/** * Creates a table based on a select on another type and inserts the data (currently not possible to * perform this action 'WITH NO DATA'. * @param sql * @param createAsSelect * @param index * @return the number of rows inserted * @throws SQLException */ public int execute(String sql, CreateTableAsSelect createAsSelect, String index) throws SQLException { if(!createAsSelect.isWithData()) throw new SQLException("Not yet possible to create table as select without data (create emtpy table, " + "insert data and delete it will have the same effect"); // first create the index SqlParser parser = new SqlParser(); int queryIdx = sql.toLowerCase().indexOf(" as "); try{ String createSql = sql.substring(0, queryIdx)+" (_id String)" ; CreateTable create = (CreateTable)parser.createStatement(createSql); this.execute(createSql, create, index); }catch(SQLException sqle) { throw sqle; }catch(Exception e){ throw new SQLException("Unable to create table due to: "+e.getMessage(), e); } // secondly add the documents from the query String insertSql = "INSERT INTO "+createAsSelect.getName().toString()+" "+sql.substring(queryIdx+4); Insert insert = (Insert)parser.createStatement(insertSql); int res = this.execute(insertSql, insert, index); this.statement.getConnection().getTypeMap(); // trigger a reload of the table&column set for the connection return res; }
public InterpretedFilterFunction( Expression predicate, Map<Symbol, Type> symbolTypes, Map<Symbol, Integer> symbolToInputMappings, Metadata metadata, SqlParser sqlParser, Session session) { // pre-compute symbol -> input mappings and replace the corresponding nodes in the tree Expression rewritten = ExpressionTreeRewriter.rewriteWith(new SymbolToInputRewriter(symbolToInputMappings), predicate); // analyze expression so we can know the type of every expression in the tree ImmutableMap.Builder<Integer, Type> inputTypes = ImmutableMap.builder(); for (Map.Entry<Symbol, Integer> entry : symbolToInputMappings.entrySet()) { inputTypes.put(entry.getValue(), symbolTypes.get(entry.getKey())); } IdentityHashMap<Expression, Type> expressionTypes = getExpressionTypesFromInput(session, metadata, sqlParser, inputTypes.build(), rewritten); evaluator = ExpressionInterpreter.expressionInterpreter(rewritten, metadata, session, expressionTypes); }
public InterpretedProjectionFunction( Expression expression, Map<Symbol, Type> symbolTypes, Map<Symbol, Integer> symbolToInputMappings, Metadata metadata, SqlParser sqlParser, Session session) { // pre-compute symbol -> input mappings and replace the corresponding nodes in the tree Expression rewritten = ExpressionTreeRewriter.rewriteWith(new SymbolToInputRewriter(symbolToInputMappings), expression); // analyze expression so we can know the type of every expression in the tree ImmutableMap.Builder<Integer, Type> inputTypes = ImmutableMap.builder(); for (Map.Entry<Symbol, Integer> entry : symbolToInputMappings.entrySet()) { inputTypes.put(entry.getValue(), symbolTypes.get(entry.getKey())); } IdentityHashMap<Expression, Type> expressionTypes = getExpressionTypesFromInput(session, metadata, sqlParser, inputTypes.build(), rewritten); this.type = requireNonNull(expressionTypes.get(rewritten), "type is null"); evaluator = ExpressionInterpreter.expressionInterpreter(rewritten, metadata, session, expressionTypes); InputReferenceExtractor inputReferenceExtractor = new InputReferenceExtractor(); inputReferenceExtractor.process(rewritten, null); this.inputChannels = inputReferenceExtractor.getInputChannels(); this.deterministic = DeterminismEvaluator.isDeterministic(expression); }
public static ExpressionAnalysis analyzeExpressionsWithSymbols( Session session, Metadata metadata, SqlParser sqlParser, Map<Symbol, Type> types, Iterable<? extends Expression> expressions) { List<Field> fields = DependencyExtractor.extractUnique(expressions).stream() .map(symbol -> { Type type = types.get(symbol); checkArgument(type != null, "No type for symbol %s", symbol); return Field.newUnqualified(symbol.getName(), type); }) .collect(toImmutableList()); return analyzeExpressions(session, metadata, sqlParser, new RelationType(fields), expressions); }
private static ExpressionAnalysis analyzeExpressions( Session session, Metadata metadata, SqlParser sqlParser, RelationType tupleDescriptor, Iterable<? extends Expression> expressions) { // expressions at this point can not have sub queries so deny all access checks // in the future, we will need a full access controller here to verify access to functions ExpressionAnalyzer analyzer = create(new Analysis(), session, metadata, sqlParser, new DenyAllAccessControl(), false); for (Expression expression : expressions) { analyzer.analyze(expression, tupleDescriptor, new AnalysisContext()); } return new ExpressionAnalysis( analyzer.getExpressionTypes(), analyzer.getExpressionCoercions(), analyzer.getSubqueryInPredicates(), analyzer.getScalarSubqueries(), analyzer.getResolvedNames().keySet()); }
public StatementAnalyzer( Analysis analysis, Metadata metadata, SqlParser sqlParser, AccessControl accessControl, Session session, boolean experimentalSyntaxEnabled, Optional<QueryExplainer> queryExplainer) { this.analysis = requireNonNull(analysis, "analysis is null"); this.metadata = requireNonNull(metadata, "metadata is null"); this.sqlParser = requireNonNull(sqlParser, "sqlParser is null"); this.accessControl = requireNonNull(accessControl, "accessControl is null"); this.session = requireNonNull(session, "session is null"); this.experimentalSyntaxEnabled = experimentalSyntaxEnabled; this.queryExplainer = requireNonNull(queryExplainer, "queryExplainer is null"); }
@Inject public QueryExplainer( List<PlanOptimizer> planOptimizers, Metadata metadata, AccessControl accessControl, SqlParser sqlParser, Map<Class<? extends Statement>, DataDefinitionTask<?>> dataDefinitionTask, FeaturesConfig featuresConfig) { this(planOptimizers, metadata, accessControl, sqlParser, dataDefinitionTask, featuresConfig.isExperimentalSyntaxEnabled()); }
public static LocalExecutionPlanner createTestingPlanner() { MetadataManager metadata = MetadataManager.createTestMetadataManager(); PageSourceManager pageSourceManager = new PageSourceManager(); pageSourceManager.addConnectorPageSourceProvider("test", new TestingPageSourceProvider()); return new LocalExecutionPlanner( metadata, new SqlParser(), pageSourceManager, new IndexManager(), new PageSinkManager(), new MockExchangeClientSupplier(), new ExpressionCompiler(metadata), new IndexJoinLookupStats(), new CompilerConfig(), new TaskManagerConfig()); }
@Test public void testSimpleQuery() throws Exception { Statement statement = new SqlParser().createStatement("select * from testcollection"); assertEquals(formatQuery(statement, name -> "dummy", '"').trim(), "SELECT *\n" + " FROM\n" + " dummy"); }
@Test(enabled = false) public void testName() throws Exception { String s = "select count(*) from test where ali = 5"; Statement statement = new SqlParser().createStatement(s); Analysis analysis = new Analysis(statement, ImmutableList.of(), false); Session build = Session.builder(new SessionPropertyManager()) .setQueryId(QueryId.valueOf("test")) .setCatalog("test") .setCatalog("test") .setCatalog("test") .build(); QueryPlanner queryPlanner = new QueryPlanner(analysis, new SymbolAllocator(), new PlanNodeIdAllocator(), null, null, build); RelationPlan plan = queryPlanner.plan((Query) statement); // EffectivePredicateExtractor.extract(plan.getRoot(), ImmutableMap.of(new Symbol("ali"), BIGINT)); EffectivePredicateExtractor.extract(plan.getRoot()); }
/** * Executes the {@link BulkRequest} being hold by this state. * @return an integer indicator for each executed request: Statement.SUCCESS_NO_INFO for success, * else Statement.EXECUTE_FAILED) */ public int[] executeBulk(){ int[] result = new int[bulkList.size()]; SqlParser parser = new SqlParser(); for(int i=0; i<bulkList.size(); i++) try{ String sql = bulkList.get(i); com.facebook.presto.sql.tree.Statement st = parser.createStatement(sql); if(st instanceof DropTable){ this.execute(sql, (DropTable)st); }else if(st instanceof DropView){ this.execute(sql, (DropView)st); }else if(st instanceof CreateTable){ this.execute(sql, (CreateTable)st, this.statement.getConnection().getSchema()); }else if(st instanceof CreateTableAsSelect){ this.execute(sql, (CreateTableAsSelect)st, this.statement.getConnection().getSchema()); }else if(st instanceof CreateView){ this.execute(sql, (CreateView)st, this.statement.getConnection().getSchema()); }else if(st instanceof Delete){ this.execute(sql, (Delete)st, this.statement.getConnection().getSchema()); }else if(st instanceof Insert){ this.execute(sql, (Insert)st, this.statement.getConnection().getSchema()); } result[i]= Statement.SUCCESS_NO_INFO; }catch (Exception e){ result[i] = Statement.EXECUTE_FAILED; } this.clearBulk(); return result; }
@Inject public CreateViewTask( JsonCodec<ViewDefinition> codec, SqlParser sqlParser, AccessControl accessControl, FeaturesConfig featuresConfig) { this.codec = requireNonNull(codec, "codec is null"); this.sqlParser = requireNonNull(sqlParser, "sqlParser is null"); this.accessControl = requireNonNull(accessControl, "accessControl is null"); requireNonNull(featuresConfig, "featuresConfig is null"); this.experimentalSyntaxEnabled = featuresConfig.isExperimentalSyntaxEnabled(); }
@Inject SqlQueryExecutionFactory(QueryManagerConfig config, FeaturesConfig featuresConfig, Metadata metadata, AccessControl accessControl, SqlParser sqlParser, LocationFactory locationFactory, SplitManager splitManager, NodeScheduler nodeScheduler, List<PlanOptimizer> planOptimizers, RemoteTaskFactory remoteTaskFactory, TransactionManager transactionManager, @ForQueryExecution ExecutorService executor, NodeTaskMap nodeTaskMap, QueryExplainer queryExplainer, Map<String, ExecutionPolicy> executionPolicies) { requireNonNull(config, "config is null"); this.scheduleSplitBatchSize = config.getScheduleSplitBatchSize(); this.metadata = requireNonNull(metadata, "metadata is null"); this.accessControl = requireNonNull(accessControl, "accessControl is null"); this.sqlParser = requireNonNull(sqlParser, "sqlParser is null"); this.locationFactory = requireNonNull(locationFactory, "locationFactory is null"); this.splitManager = requireNonNull(splitManager, "splitManager is null"); this.nodeScheduler = requireNonNull(nodeScheduler, "nodeScheduler is null"); this.planOptimizers = requireNonNull(planOptimizers, "planOptimizers is null"); this.remoteTaskFactory = requireNonNull(remoteTaskFactory, "remoteTaskFactory is null"); this.transactionManager = requireNonNull(transactionManager, "transactionManager is null"); requireNonNull(featuresConfig, "featuresConfig is null"); this.experimentalSyntaxEnabled = featuresConfig.isExperimentalSyntaxEnabled(); this.executor = requireNonNull(executor, "executor is null"); this.nodeTaskMap = requireNonNull(nodeTaskMap, "nodeTaskMap is null"); this.queryExplainer = requireNonNull(queryExplainer, "queryExplainer is null"); this.executionPolicies = requireNonNull(executionPolicies, "schedulerPolicies is null"); }
@Inject public LocalExecutionPlanner( Metadata metadata, SqlParser sqlParser, PageSourceProvider pageSourceProvider, IndexManager indexManager, PageSinkManager pageSinkManager, ExchangeClientSupplier exchangeClientSupplier, ExpressionCompiler compiler, IndexJoinLookupStats indexJoinLookupStats, CompilerConfig compilerConfig, TaskManagerConfig taskManagerConfig) { requireNonNull(compilerConfig, "compilerConfig is null"); this.pageSourceProvider = requireNonNull(pageSourceProvider, "pageSourceProvider is null"); this.indexManager = requireNonNull(indexManager, "indexManager is null"); this.exchangeClientSupplier = exchangeClientSupplier; this.metadata = requireNonNull(metadata, "metadata is null"); this.sqlParser = requireNonNull(sqlParser, "sqlParser is null"); this.pageSinkManager = requireNonNull(pageSinkManager, "pageSinkManager is null"); this.compiler = requireNonNull(compiler, "compiler is null"); this.indexJoinLookupStats = requireNonNull(indexJoinLookupStats, "indexJoinLookupStats is null"); this.maxIndexMemorySize = requireNonNull(taskManagerConfig, "taskManagerConfig is null").getMaxIndexMemoryUsage(); this.maxPartialAggregationMemorySize = taskManagerConfig.getMaxPartialAggregationMemoryUsage(); interpreterEnabled = compilerConfig.isInterpreterEnabled(); }
private Rewriter( SymbolAllocator symbolAllocator, PlanNodeIdAllocator idAllocator, Metadata metadata, SqlParser sqlParser, Session session) { this.symbolAllocator = requireNonNull(symbolAllocator, "symbolAllocator is null"); this.idAllocator = requireNonNull(idAllocator, "idAllocator is null"); this.metadata = requireNonNull(metadata, "metadata is null"); this.sqlParser = requireNonNull(sqlParser, "sqlParser is null"); this.session = requireNonNull(session, "session is null"); }
public Visitor(Metadata metadata, Session session, Map<Symbol, Type> types, SqlParser parser) { this.metadata = metadata; this.session = session; this.types = types; this.parser = parser; }
public Rewriter(Metadata metadata, SqlParser sqlParser, Session session, Map<Symbol, Type> types, PlanNodeIdAllocator idAllocator) { this.metadata = metadata; this.sqlParser = sqlParser; this.session = session; this.types = types; this.idAllocator = idAllocator; }
/** * Extract a normalized simple comparison between a QualifiedNameReference and a native value if possible. */ private static Optional<NormalizedSimpleComparison> toNormalizedSimpleComparison(Session session, Metadata metadata, Map<Symbol, Type> types, ComparisonExpression comparison) { IdentityHashMap<Expression, Type> expressionTypes = ExpressionAnalyzer.getExpressionTypes(session, metadata, new SqlParser(), types, comparison); Object left = ExpressionInterpreter.expressionOptimizer(comparison.getLeft(), metadata, session, expressionTypes).optimize(NoOpSymbolResolver.INSTANCE); Object right = ExpressionInterpreter.expressionOptimizer(comparison.getRight(), metadata, session, expressionTypes).optimize(NoOpSymbolResolver.INSTANCE); if (left instanceof QualifiedNameReference && !(right instanceof Expression)) { return Optional.of(new NormalizedSimpleComparison((QualifiedNameReference) left, comparison.getType(), new NullableValue(expressionTypes.get(comparison.getRight()), right))); } if (right instanceof QualifiedNameReference && !(left instanceof Expression)) { return Optional.of(new NormalizedSimpleComparison((QualifiedNameReference) right, flipComparison(comparison.getType()), new NullableValue(expressionTypes.get(comparison.getLeft()), left))); } return Optional.empty(); }
public Analyzer(Session session, Metadata metadata, SqlParser sqlParser, AccessControl accessControl, Optional<QueryExplainer> queryExplainer, boolean experimentalSyntaxEnabled) { this.session = requireNonNull(session, "session is null"); this.metadata = requireNonNull(metadata, "metadata is null"); this.sqlParser = requireNonNull(sqlParser, "sqlParser is null"); this.accessControl = requireNonNull(accessControl, "accessControl is null"); this.queryExplainer = requireNonNull(queryExplainer, "query explainer is null"); this.experimentalSyntaxEnabled = experimentalSyntaxEnabled; }
public static IdentityHashMap<Expression, Type> getExpressionTypes( Session session, Metadata metadata, SqlParser sqlParser, Map<Symbol, Type> types, Expression expression) { return getExpressionTypes(session, metadata, sqlParser, types, ImmutableList.of(expression)); }
public static IdentityHashMap<Expression, Type> getExpressionTypes( Session session, Metadata metadata, SqlParser sqlParser, Map<Symbol, Type> types, Iterable<? extends Expression> expressions) { return analyzeExpressionsWithSymbols(session, metadata, sqlParser, types, expressions).getExpressionTypes(); }
public static IdentityHashMap<Expression, Type> getExpressionTypesFromInput( Session session, Metadata metadata, SqlParser sqlParser, Map<Integer, Type> types, Expression expression) { return getExpressionTypesFromInput(session, metadata, sqlParser, types, ImmutableList.of(expression)); }
public static IdentityHashMap<Expression, Type> getExpressionTypesFromInput( Session session, Metadata metadata, SqlParser sqlParser, Map<Integer, Type> types, Iterable<? extends Expression> expressions) { return analyzeExpressionsWithInputs(session, metadata, sqlParser, types, expressions).getExpressionTypes(); }
private static ExpressionAnalysis analyzeExpressionsWithInputs( Session session, Metadata metadata, SqlParser sqlParser, Map<Integer, Type> types, Iterable<? extends Expression> expressions) { Field[] fields = new Field[types.size()]; for (Entry<Integer, Type> entry : types.entrySet()) { fields[entry.getKey()] = Field.newUnqualified(Optional.empty(), entry.getValue()); } RelationType tupleDescriptor = new RelationType(fields); return analyzeExpressions(session, metadata, sqlParser, tupleDescriptor, expressions); }
public static ExpressionAnalysis analyzeExpression( Session session, Metadata metadata, AccessControl accessControl, SqlParser sqlParser, RelationType tupleDescriptor, Analysis analysis, boolean approximateQueriesEnabled, AnalysisContext context, Expression expression) { ExpressionAnalyzer analyzer = create(analysis, session, metadata, sqlParser, accessControl, approximateQueriesEnabled); analyzer.analyze(expression, tupleDescriptor, context); IdentityHashMap<Expression, Type> expressionTypes = analyzer.getExpressionTypes(); IdentityHashMap<Expression, Type> expressionCoercions = analyzer.getExpressionCoercions(); IdentityHashMap<FunctionCall, Signature> resolvedFunctions = analyzer.getResolvedFunctions(); analysis.addTypes(expressionTypes); analysis.addCoercions(expressionCoercions); analysis.addFunctionSignatures(resolvedFunctions); analysis.addResolvedNames(analyzer.getResolvedNames()); return new ExpressionAnalysis( expressionTypes, expressionCoercions, analyzer.getSubqueryInPredicates(), analyzer.getScalarSubqueries(), analyzer.getColumnReferences()); }
public static ExpressionAnalyzer create( Analysis analysis, Session session, Metadata metadata, SqlParser sqlParser, AccessControl accessControl, boolean experimentalSyntaxEnabled) { return new ExpressionAnalyzer( metadata.getFunctionRegistry(), metadata.getTypeManager(), node -> new StatementAnalyzer(analysis, metadata, sqlParser, accessControl, session, experimentalSyntaxEnabled, Optional.empty()), session); }
public QueryExplainer( List<PlanOptimizer> planOptimizers, Metadata metadata, AccessControl accessControl, SqlParser sqlParser, Map<Class<? extends Statement>, DataDefinitionTask<?>> dataDefinitionTask, boolean experimentalSyntaxEnabled) { this.planOptimizers = requireNonNull(planOptimizers, "planOptimizers is null"); this.metadata = requireNonNull(metadata, "metadata is null"); this.accessControl = requireNonNull(accessControl, "accessControl is null"); this.sqlParser = requireNonNull(sqlParser, "sqlParser is null"); this.experimentalSyntaxEnabled = experimentalSyntaxEnabled; this.dataDefinitionTask = ImmutableMap.copyOf(requireNonNull(dataDefinitionTask, "dataDefinitionTask is null")); }
@Test public void testUpdateSessionParameters() throws Exception { ClientOptions options = new ClientOptions(); ClientSession session = options.toClientSession(); SqlParser sqlParser = new SqlParser(); ImmutableMap<String, String> existingProperties = ImmutableMap.of("query_max_memory", "10GB", "distributed_join", "true"); session = Console.processSessionParameterChange(sqlParser.createStatement("USE test_catalog.test_schema"), session, existingProperties); assertEquals(session.getCatalog(), "test_catalog"); assertEquals(session.getSchema(), "test_schema"); assertEquals(session.getProperties().get("query_max_memory"), "10GB"); assertEquals(session.getProperties().get("distributed_join"), "true"); session = Console.processSessionParameterChange(sqlParser.createStatement("USE test_schema_b"), session, existingProperties); assertEquals(session.getCatalog(), "test_catalog"); assertEquals(session.getSchema(), "test_schema_b"); assertEquals(session.getProperties().get("query_max_memory"), "10GB"); assertEquals(session.getProperties().get("distributed_join"), "true"); session = Console.processSessionParameterChange(sqlParser.createStatement("USE test_catalog_2.test_schema"), session, existingProperties); assertEquals(session.getCatalog(), "test_catalog_2"); assertEquals(session.getSchema(), "test_schema"); assertEquals(session.getProperties().get("query_max_memory"), "10GB"); assertEquals(session.getProperties().get("distributed_join"), "true"); }
public static void assertFormattedSql(SqlParser sqlParser, Node expected) { String formatted = formatSql(expected); // verify round-trip of formatting already-formatted SQL Statement actual = parseFormatted(sqlParser, formatted, expected); assertEquals(formatSql(actual), formatted); // compare parsed tree with parsed tree of formatted SQL if (!actual.equals(expected)) { // simplify finding the non-equal part of the tree assertListEquals(linearizeTree(actual), linearizeTree(expected)); } assertEquals(actual, expected); }
private static Statement parseFormatted(SqlParser sqlParser, String sql, Node tree) { try { return sqlParser.createStatement(sql); } catch (ParsingException e) { throw new AssertionError(format( "failed to parse formatted SQL: %s\nerror: %s\ntree: %s", sql, e.getMessage(), tree)); } }
@Test public void testSimpleExpression() throws Exception { Expression expression = new SqlParser().createExpression("test = 'test'"); assertEquals(formatExpression(expression, name -> { throw new UnsupportedOperationException(); }, name -> "\"dummy\".\"" + name + "\"", '"'), "(\"dummy\".\"test\" = 'test')"); }
@Test public void testJoinQuery() throws Exception { Statement statement = new SqlParser().createStatement ("select * from testcollection join anothercollection on (anothercollection.test = testcollection.test)"); // TODO: decide if we should also format expressions in QueryFormatter assertEquals(formatQuery(statement, name -> "dummy", '"').trim(), "SELECT *\n" + " FROM\n" + " (dummy\n" + " INNER JOIN dummy ON (\"anothercollection\".\"test\" = \"testcollection\".\"test\"))"); }
@Test public void testQueryWithCTE() throws Exception { Statement statement = new SqlParser().createStatement("with test as (select * from collection) select * from test"); assertEquals(formatQuery(statement, name -> "dummy", '"').trim(), "WITH\n" + " \"test\" AS (\n" + " SELECT *\n" + " FROM\n" + " dummy\n" + " ) \n" + " SELECT *\n" + " FROM\n" + " test"); }
@Test public void testAlias() throws Exception { Statement statement = new SqlParser().createStatement("select a as b from test"); assertEquals(formatQuery(statement, name -> "dummy", '"').trim(), "SELECT \"a\" \"b\"\n" + " FROM\n" + " dummy"); }
@Test public void testQueryWithCTEDuplicateName() throws Exception { Statement statement = new SqlParser().createStatement("with test as (select * from collection) select * from collection.test"); assertEquals(formatQuery(statement, name -> "dummy", '"').trim(), "WITH\n" + " \"test\" AS (\n" + " SELECT *\n" + " FROM\n" + " dummy\n" + " ) \n" + " SELECT *\n" + " FROM\n" + " dummy"); }
@Test public void testExpressionFormatterFormatTable() throws Exception { Expression expression = new SqlParser().createExpression("test in (select id from testcollection)"); assertEquals(formatExpression(expression, name -> "\"schema\"." + name.getParts().stream().map(e -> formatIdentifier(e, '"')).collect(Collectors.joining(".")), name -> '"' + name + '"', '"'), "(\"test\" IN (SELECT \"id\"\n" + "FROM\n" + " \"schema\".\"testcollection\"\n" + "))"); }
@Test public void testName() throws Exception { String sql = "select selami:timestamp, melami:varchar from deneme where ali:timestamp is not null and veli is null group by demo"; SqlParserOptions options = new SqlParserOptions().allowIdentifierSymbol(IdentifierSymbol.COLON); Statement statement = new SqlParser(options).createStatement(sql); String s = RakamSqlFormatter.formatSql(statement, name -> String.format("(SELECT * FROM events WHERE collection_name = '%s')", name.toString()), name -> "\"$data\"['" + name + "']", '"'); System.out.println(s); }
public SqlStatement parse(SqlSession session, String sql) { log.debug("Parsing sql: {}", sql); Statement statement = SqlParser.createStatement(sql); QueryExplainer queryExplainer = new QueryExplainer(session.prestoSession, planOptimizers, metadataManager, periodicImportManager, storageManager); // analyze query Analyzer analyzer = new Analyzer(session.prestoSession, metadataManager, Optional.of(queryExplainer)); Analysis analysis = analyzer.analyze(statement); // System.out.println("analysis: " + analysis); PlanNodeIdAllocator idAllocator = new PlanNodeIdAllocator(); // plan query LogicalPlanner logicalPlanner = new LogicalPlanner(session.prestoSession, planOptimizers, idAllocator, metadataManager, periodicImportManager, storageManager); Plan plan = logicalPlanner.plan(analysis); return new SqlStatement(session, sql, plan); // // TableScanCountVisitor visitor = new TableScanCountVisitor(); // plan.getRoot().accept(visitor, 0); // Assert.assertEquals(1, visitor.count); // String p = PlanPrinter.textLogicalPlan(plan.getRoot(), plan.getTypes()); // // System.out.println("plan: " + p); }
private Plan parse(String sql) { InMemoryNodeManager nodeManager = new InMemoryNodeManager(); MetadataManager metadata = buildMetadata(); StorageManager storageManager = new MockStorageManager(); PeriodicImportManager periodicImportManager = new StubPeriodicImportManager(); SplitManager splitManager = buildSplitManager(nodeManager); List<PlanOptimizer> planOptimizers = buildPlanOptimizers(metadata, splitManager); Statement statement = SqlParser.createStatement(sql); // System.out.println("Statement: " + statement); Session session = buildSession(); QueryExplainer queryExplainer = new QueryExplainer(session, planOptimizers, metadata, periodicImportManager, storageManager); // analyze query Analyzer analyzer = new Analyzer(session, metadata, Optional.of(queryExplainer)); Analysis analysis = analyzer.analyze(statement); // System.out.println("analysis: " + analysis); PlanNodeIdAllocator idAllocator = new PlanNodeIdAllocator(); // plan query LogicalPlanner logicalPlanner = new LogicalPlanner(session, planOptimizers, idAllocator, metadata, periodicImportManager, storageManager); Plan plan = logicalPlanner.plan(analysis); return plan; }
@SuppressWarnings("unchecked") public int execute(String update) throws SQLException { Matcher matcher = updateRegex.matcher(update); if(!matcher.find()) throw new SQLException("Unable to parse UPDATE statement"); // get index and type to update String index = statement.getConnection().getSchema(); String type = matcher.group(1); if(matcher.group(2) != null){ index = type; type = matcher.group(2); } // get fields and values to update try{ Map<String, Object> fieldValues = new HashMap<String, Object>(); SqlParser parser = new SqlParser(); String[] parts = matcher.group(3).replaceAll(",\\s*([\"|\\w|\\.]+\\s*=)", "<-SPLIT->$1").split("<-SPLIT->"); for(String p : parts){ ComparisonExpression comparison = (ComparisonExpression) parser.createExpression(p); String field = comparison.getLeft().toString().replaceAll("\"", ""); field = Heading.findOriginal(matcher.group(3), field, "", "\\s*="); Object value = getLiteralValue(comparison.getRight()); if(field.indexOf('.') == -1) { fieldValues.put(field, value); continue; } // create nested object Map<String, Object> map = fieldValues; String[] objectDef = field.split("\\."); for(int k=0; k<objectDef.length; k++){ String key = objectDef[k]; if(k == objectDef.length-1) map.put(key, value); else{ if(!map.containsKey(key)) map.put(key, new HashMap<String, Object>()); map = (Map<String, Object>)map.get(key); } } } // get ID's for documents to be updated String select = "SELECT _id FROM "+type+" WHERE "+matcher.group(4); Query query = (Query)new SqlParser().createStatement(select); this.queryState.buildRequest(select, query.getQueryBody(), index); ResultSet rs = this.queryState.execute(); // execute updates in batch mode based on id's returned int maxRequestsPerBulk = Utils.getIntProp(props, Utils.PROP_FETCH_SIZE, 2500); List<UpdateRequestBuilder> indexReqs = new ArrayList<UpdateRequestBuilder>(); int updateCount = 0; while(rs != null){ while(rs.next()){ String id = rs.getString(1); indexReqs.add( client.prepareUpdate(index, type, id).setDoc(fieldValues) ); if(indexReqs.size() >= maxRequestsPerBulk){ updateCount += this.execute(indexReqs, maxRequestsPerBulk); indexReqs.clear(); } } rs.close(); rs = queryState.moreResults(true); } if(indexReqs.size() > 0) updateCount += this.execute(indexReqs, maxRequestsPerBulk); return updateCount; }catch(Exception e){ throw new SQLException("Unable to execute UPDATE due to "+e.getMessage(),e); } }