/** * Contributes bindings and other configurations for this module to {@code binder}. * * @param binder binder */ @Override public void configure(Binder binder) { binder.bind(HDFSConnectorId.class).toInstance(new HDFSConnectorId(connectorId)); binder.bind(TypeManager.class).toInstance(typeManager); configBinder(binder).bindConfig(MetaConfig.class); binder.bind(HDFSMetadataFactory.class).in(Scopes.SINGLETON); binder.bind(HDFSMetadata.class).in(Scopes.SINGLETON); binder.bind(FSFactory.class).in(Scopes.SINGLETON); binder.bind(HDFSConnector.class).in(Scopes.SINGLETON); binder.bind(HDFSSplitManager.class).in(Scopes.SINGLETON); binder.bind(HDFSPageSourceProvider.class).in(Scopes.SINGLETON); binder.bind(ClassLoader.class).toInstance(HDFSPlugin.getClassLoader()); }
@Override public void configure(Binder binder) { log.info("INFORMATION: AmpoolMetadata configure() called."); binder.bind(TypeManager.class).toInstance(typeManager); binder.bind(AmpoolConnectorID.class).toInstance(new AmpoolConnectorID(connectorId)); binder.bind(AmpoolConnector.class).in(Scopes.SINGLETON); binder.bind(AmpoolMetadata.class).in(Scopes.SINGLETON); binder.bind(AmpoolClient.class).in(Scopes.SINGLETON); binder.bind(AmpoolSplitManager.class).in(Scopes.SINGLETON); binder.bind(AmpoolRecordSetProvider.class).in(Scopes.SINGLETON); //configBinder(binder).bindConfig(AmpoolConfig.class); /* jsonBinder(binder).addDeserializerBinding(Type.class).to(TypeDeserializer.class); jsonCodecBinder(binder).bindMapJsonCodec(String.class, listJsonCodec(AmpoolTable.class)); */ }
@Test public void testUnknownCoercion() throws Exception { TypeManager typeManager = new TypeRegistry(); Signature signature = new Signature("foo", SCALAR, ImmutableList.of(typeParameter("T")), "boolean", ImmutableList.of("T", "T"), false); assertNotNull(signature.bindTypeParameters(ImmutableList.of(UNKNOWN, UNKNOWN), true, typeManager)); assertNotNull(signature.bindTypeParameters(ImmutableList.of(UNKNOWN, BIGINT), true, typeManager)); assertNull(signature.bindTypeParameters(ImmutableList.of(BIGINT, VARCHAR), true, typeManager)); signature = new Signature("foo", SCALAR, ImmutableList.of(comparableTypeParameter("T")), "boolean", ImmutableList.of("T", "T"), false); Map<String, Type> boundParameters = signature.bindTypeParameters(ImmutableList.of(UNKNOWN, BIGINT), true, typeManager); assertNotNull(boundParameters); assertEquals(boundParameters.get("T"), BIGINT); assertNull(signature.bindTypeParameters(ImmutableList.of(BIGINT, VARCHAR), true, typeManager)); }
@Override public ScalarFunctionImplementation specialize(Map<String, Type> types, int arity, TypeManager typeManager, FunctionRegistry functionRegistry) { Type type = types.get("T"); MethodHandle methodHandle; MethodHandle equalsHandle = functionRegistry.getScalarFunctionImplementation(internalOperator(OperatorType.EQUAL, BooleanType.BOOLEAN, ImmutableList.of(type, type))).getMethodHandle(); List<Boolean> nullableArguments; if (type.getJavaType() == void.class) { nullableArguments = ImmutableList.of(false, true); methodHandle = METHOD_HANDLE_UNKNOWN; } else { nullableArguments = ImmutableList.of(false, false); methodHandle = methodHandle(ArrayContains.class, "contains", Type.class, MethodHandle.class, Block.class, type.getJavaType()); } return new ScalarFunctionImplementation(true, nullableArguments, methodHandle.bindTo(type).bindTo(equalsHandle), isDeterministic()); }
@Override public ScalarFunctionImplementation specialize(Map<String, Type> types, int arity, TypeManager typeManager, FunctionRegistry functionRegistry) { Type toType = types.get("E"); MethodHandle methodHandle; if (toType.getJavaType() == long.class) { methodHandle = METHOD_HANDLE_LONG; } else if (toType.getJavaType() == double.class) { methodHandle = METHOD_HANDLE_DOUBLE; } else if (toType.getJavaType() == boolean.class) { methodHandle = METHOD_HANDLE_BOOLEAN; } else if (toType.getJavaType() == Slice.class) { methodHandle = METHOD_HANDLE_SLICE; } else { methodHandle = METHOD_HANDLE_OBJECT.asType(METHOD_HANDLE_OBJECT.type().changeReturnType(toType.getJavaType())); } return new ScalarFunctionImplementation(true, ImmutableList.of(true), methodHandle, isDeterministic()); }
@Inject public MetadataManager(FeaturesConfig featuresConfig, TypeManager typeManager, JsonCodec<ViewDefinition> viewCodec, BlockEncodingSerde blockEncodingSerde, SessionPropertyManager sessionPropertyManager, TablePropertyManager tablePropertyManager, TransactionManager transactionManager) { functions = new FunctionRegistry(typeManager, blockEncodingSerde, featuresConfig.isExperimentalSyntaxEnabled()); procedures = new ProcedureRegistry(); this.typeManager = requireNonNull(typeManager, "types is null"); this.viewCodec = requireNonNull(viewCodec, "viewCodec is null"); this.blockEncodingSerde = requireNonNull(blockEncodingSerde, "blockEncodingSerde is null"); this.sessionPropertyManager = requireNonNull(sessionPropertyManager, "sessionPropertyManager is null"); this.tablePropertyManager = requireNonNull(tablePropertyManager, "tablePropertyManager is null"); this.transactionManager = requireNonNull(transactionManager, "transactionManager is null"); verifyComparableOrderableContract(); }
@Override public ScalarFunctionImplementation specialize(Map<String, Type> types, int arity, TypeManager typeManager, FunctionRegistry functionRegistry) { Type type = types.get("E"); MethodHandle methodHandle; if (type.getJavaType() == boolean.class) { methodHandle = METHOD_HANDLE_BOOLEAN; } else if (type.getJavaType() == long.class) { methodHandle = METHOD_HANDLE_LONG; } else if (type.getJavaType() == double.class) { methodHandle = METHOD_HANDLE_DOUBLE; } else if (type.getJavaType() == Slice.class) { methodHandle = METHOD_HANDLE_SLICE; } else { methodHandle = METHOD_HANDLE_OBJECT; } methodHandle = methodHandle.bindTo(type); return new ScalarFunctionImplementation(false, ImmutableList.of(false, false), methodHandle, isDeterministic()); }
@Override public ScalarFunctionImplementation specialize(Map<String, Type> types, int arity, TypeManager typeManager, FunctionRegistry functionRegistry) { Type fromType = types.get("F"); Type toType = types.get("T"); Class<?> returnType = Primitives.wrap(toType.getJavaType()); MethodHandle tryCastHandle; if (fromType.equals(UNKNOWN)) { tryCastHandle = dropArguments(constant(returnType, null), 0, Void.class); } else { // the resulting method needs to return a boxed type Signature signature = functionRegistry.getCoercion(fromType, toType); MethodHandle coercion = functionRegistry.getScalarFunctionImplementation(signature).getMethodHandle(); coercion = coercion.asType(methodType(returnType, coercion.type())); MethodHandle exceptionHandler = dropArguments(constant(returnType, null), 0, RuntimeException.class); tryCastHandle = catchException(coercion, RuntimeException.class, exceptionHandler); } return new ScalarFunctionImplementation(true, ImmutableList.of(true), tryCastHandle, isDeterministic()); }
@Override public ScalarFunctionImplementation specialize(Map<String, Type> types, int arity, TypeManager typeManager, FunctionRegistry functionRegistry) { checkArgument(types.size() == 1, "Expected one type, got %s", types); Type elementType = types.get("E"); MethodHandle methodHandle; if (METHOD_HANDLES.containsKey(elementType.getJavaType())) { methodHandle = METHOD_HANDLES.get(elementType.getJavaType()); } else { checkArgument(!elementType.getJavaType().isPrimitive(), "Unsupported primitive type: " + elementType.getJavaType()); methodHandle = OBJECT_METHOD_HANDLE; } requireNonNull(methodHandle, "methodHandle is null"); methodHandle = methodHandle.bindTo(elementType); return new ScalarFunctionImplementation(true, ImmutableList.of(false, false), methodHandle, isDeterministic()); }
@Test public void testArray() throws Exception { TypeManager typeManager = new TypeRegistry(); Signature signature = new Signature("get", SCALAR, ImmutableList.of(typeParameter("T")), "T", ImmutableList.of("array<T>"), false); assertNotNull(signature.bindTypeParameters(ImmutableList.of(typeManager.getType(parseTypeSignature("array<bigint>"))), true, typeManager)); assertNull(signature.bindTypeParameters(ImmutableList.of(BIGINT), true, typeManager)); signature = new Signature("contains", SCALAR, ImmutableList.of(comparableTypeParameter("T")), "T", ImmutableList.of("array<T>", "T"), false); assertNotNull(signature.bindTypeParameters(ImmutableList.of(typeManager.getType(parseTypeSignature("array<bigint>")), BIGINT), true, typeManager)); assertNull(signature.bindTypeParameters(ImmutableList.of(typeManager.getType(parseTypeSignature("array<bigint>")), VARCHAR), true, typeManager)); assertNull(signature.bindTypeParameters(ImmutableList.of(typeManager.getType(parseTypeSignature("array<HyperLogLog>")), HYPER_LOG_LOG), true, typeManager)); signature = new Signature("foo", SCALAR, ImmutableList.of(typeParameter("T")), "T", ImmutableList.of("array<T>", "array<T>"), false); assertNotNull(signature.bindTypeParameters(ImmutableList.of(typeManager.getType(parseTypeSignature("array<bigint>")), typeManager.getType(parseTypeSignature("array<bigint>"))), true, typeManager)); assertNull(signature.bindTypeParameters(ImmutableList.of(typeManager.getType(parseTypeSignature("array<bigint>")), typeManager.getType(parseTypeSignature("array<varchar>"))), true, typeManager)); }
@Nullable public Map<String, Type> bindTypeParameters(Type returnType, List<? extends Type> types, boolean allowCoercion, TypeManager typeManager) { Map<String, Type> boundParameters = new HashMap<>(); ImmutableMap.Builder<String, TypeParameterRequirement> builder = ImmutableMap.builder(); for (TypeParameterRequirement parameter : typeParameterRequirements) { builder.put(parameter.getName(), parameter); } ImmutableMap<String, TypeParameterRequirement> parameters = builder.build(); if (!matchAndBind(boundParameters, parameters, this.returnType, returnType, allowCoercion, typeManager)) { return null; } if (!matchArguments(boundParameters, parameters, argumentTypes, types, allowCoercion, variableArity, typeManager)) { return null; } checkState(boundParameters.keySet().equals(parameters.keySet()), "%s matched arguments %s, but type parameters %s are still unbound", this, types, Sets.difference(parameters.keySet(), boundParameters.keySet())); return boundParameters; }
public HDFSPageSource( ParquetReader parquetReader, ParquetDataSource dataSource, MessageType fileSchema, MessageType requestedSchema, long totalBytes, List<HDFSColumnHandle> columns, TypeManager typeManager) { checkArgument(totalBytes >= 0, "totalBytes is negative"); this.parquetReader = requireNonNull(parquetReader, "parquetReader is null"); this.dataSource = requireNonNull(dataSource, "dataSource is null"); this.fileSchema = requireNonNull(fileSchema, "fileSchema is null"); this.requestedSchema = requireNonNull(requestedSchema, "requestedSchema is null"); this.totalBytes = totalBytes; this.columnSize = columns.size(); this.constantBlocks = new Block[columnSize]; ImmutableList.Builder<String> namesBuilder = ImmutableList.builder(); ImmutableList.Builder<Type> typesBuilder = ImmutableList.builder(); for (int columnIndex = 0; columnIndex < columnSize; columnIndex++) { HDFSColumnHandle column = columns.get(columnIndex); String name = column.getName(); Type type = typeManager.getType(column.getType().getTypeSignature()); namesBuilder.add(name); typesBuilder.add(type); if (getParquetType(column, fileSchema) == null) { constantBlocks[columnIndex] = RunLengthEncodedBlock.create(type, null, MAX_VECTOR_LENGTH); } } columnNames = namesBuilder.build(); types = typesBuilder.build(); }
@Override public Connector create(String connectorId, Map<String, String> config, ConnectorContext context) { requireNonNull(connectorId, "connectorId is null"); requireNonNull(config, "config is null"); try { Bootstrap app = new Bootstrap( // new JsonModule(), new EthereumConnectorModule(), binder -> { binder.bind(EthereumConnectorId.class).toInstance(new EthereumConnectorId(connectorId)); binder.bind(TypeManager.class).toInstance(context.getTypeManager()); binder.bind(NodeManager.class).toInstance(context.getNodeManager()); } ); Injector injector = app.strictConfig() .doNotInitializeLogging() .setRequiredConfigurationProperties(config) .initialize(); return injector.getInstance(EthereumConnector.class); } catch (Exception e) { throw Throwables.propagate(e); } }
public AmpoolModule(String connectorId, AmpoolClient ampoolClient, TypeManager typeManager) { this.connectorId = requireNonNull(connectorId, "connector id is null"); this.ampoolClient = requireNonNull(ampoolClient, "ampoolClient is null"); this.typeManager = requireNonNull(typeManager, "typeManager is null"); log.info("INFORMATION: AmpoolModule created successfully."); }
@Test public void testCoercion() throws Exception { TypeManager typeManager = new TypeRegistry(); Signature signature = new Signature("foo", SCALAR, ImmutableList.of(typeParameter("T")), "boolean", ImmutableList.of("T", "double"), true); assertNotNull(signature.bindTypeParameters(ImmutableList.of(DOUBLE, DOUBLE), true, typeManager)); assertNotNull(signature.bindTypeParameters(ImmutableList.of(BIGINT, BIGINT), true, typeManager)); assertNotNull(signature.bindTypeParameters(ImmutableList.of(VARCHAR, BIGINT), true, typeManager)); assertNull(signature.bindTypeParameters(ImmutableList.of(BIGINT, VARCHAR), true, typeManager)); }
KafkaConnectorFactory(TypeManager typeManager, NodeManager nodeManager, Optional<Supplier<Map<SchemaTableName, KafkaTopicDescription>>> tableDescriptionSupplier, Map<String, String> optionalConfig) { this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.nodeManager = requireNonNull(nodeManager, "nodeManager is null"); this.optionalConfig = requireNonNull(optionalConfig, "optionalConfig is null"); this.tableDescriptionSupplier = requireNonNull(tableDescriptionSupplier, "tableDescriptionSupplier is null"); }
@ForMetadata @Singleton @Provides public IDBI createDBI(@ForMetadata ConnectionFactory connectionFactory, TypeManager typeManager) { DBI dbi = new DBI(connectionFactory); dbi.registerMapper(new TableColumn.Mapper(typeManager)); return dbi; }
@Inject public RaptorTableProperties(TypeManager typeManager) { tableProperties = ImmutableList.<PropertyMetadata<?>>builder() .add(stringListSessionProperty( typeManager, ORDERING_PROPERTY, "Sort order for each shard of the table")) .add(lowerCaseStringSessionProperty( TEMPORAL_COLUMN_PROPERTY, "Temporal column of the table")) .build(); }
private static PropertyMetadata<?> stringListSessionProperty(TypeManager typeManager, String name, String description) { return new PropertyMetadata<>( name, description, typeManager.getParameterizedType(ARRAY, ImmutableList.of(VARCHAR.getTypeSignature()), ImmutableList.of()), List.class, ImmutableList.of(), false, value -> ImmutableList.copyOf(stringList(value).stream() .map(s -> s.toLowerCase(ENGLISH)) .collect(toList()))); }
@Override public Connector create(String connectorId, Map<String, String> config) { try { Bootstrap app = new Bootstrap( new JsonModule(), new MBeanModule(), binder -> { CurrentNodeId currentNodeId = new CurrentNodeId(nodeManager.getCurrentNode().getNodeIdentifier()); MBeanServer mbeanServer = new RebindSafeMBeanServer(getPlatformMBeanServer()); binder.bind(MBeanServer.class).toInstance(mbeanServer); binder.bind(CurrentNodeId.class).toInstance(currentNodeId); binder.bind(NodeManager.class).toInstance(nodeManager); binder.bind(PageSorter.class).toInstance(pageSorter); binder.bind(BlockEncodingSerde.class).toInstance(blockEncodingSerde); binder.bind(TypeManager.class).toInstance(typeManager); }, metadataModule, new BackupModule(backupProviders), new StorageModule(connectorId), new RaptorModule(connectorId)); Injector injector = app .strictConfig() .doNotInitializeLogging() .setRequiredConfigurationProperties(config) .setOptionalConfigurationProperties(optionalConfig) .initialize(); return injector.getInstance(RaptorConnector.class); } catch (Exception e) { throw Throwables.propagate(e); } }
@Inject public OrcStorageManager( CurrentNodeId currentNodeId, StorageService storageService, Optional<BackupStore> backupStore, JsonCodec<ShardDelta> shardDeltaCodec, ReaderAttributes readerAttributes, StorageManagerConfig config, RaptorConnectorId connectorId, BackupManager backgroundBackupManager, ShardRecoveryManager recoveryManager, ShardRecorder shardRecorder, TypeManager typeManager) { this(currentNodeId.toString(), storageService, backupStore, shardDeltaCodec, readerAttributes, backgroundBackupManager, recoveryManager, shardRecorder, typeManager, connectorId.toString(), config.getDeletionThreads(), config.getShardRecoveryTimeout(), config.getMaxShardRows(), config.getMaxShardSize()); }
public OrcStorageManager( String nodeId, StorageService storageService, Optional<BackupStore> backupStore, JsonCodec<ShardDelta> shardDeltaCodec, ReaderAttributes readerAttributes, BackupManager backgroundBackupManager, ShardRecoveryManager recoveryManager, ShardRecorder shardRecorder, TypeManager typeManager, String connectorId, int deletionThreads, Duration shardRecoveryTimeout, long maxShardRows, DataSize maxShardSize) { this.nodeId = requireNonNull(nodeId, "nodeId is null"); this.storageService = requireNonNull(storageService, "storageService is null"); this.backupStore = requireNonNull(backupStore, "backupStore is null"); this.shardDeltaCodec = requireNonNull(shardDeltaCodec, "shardDeltaCodec is null"); this.defaultReaderAttributes = requireNonNull(readerAttributes, "readerAttributes is null"); backupManager = requireNonNull(backgroundBackupManager, "backgroundBackupManager is null"); this.recoveryManager = requireNonNull(recoveryManager, "recoveryManager is null"); this.recoveryTimeout = requireNonNull(shardRecoveryTimeout, "shardRecoveryTimeout is null"); checkArgument(maxShardRows > 0, "maxShardRows must be > 0"); this.maxShardRows = min(maxShardRows, MAX_ROWS); this.maxShardSize = requireNonNull(maxShardSize, "maxShardSize is null"); this.shardRecorder = requireNonNull(shardRecorder, "shardRecorder is null"); this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.deletionExecutor = newFixedThreadPool(deletionThreads, daemonThreadsNamed("raptor-delete-" + connectorId + "-%s")); }
private static ConnectorFactory createRaptorConnectorFactory(String cacheDir, NodeManager nodeManager) { try { File dataDir = new File(cacheDir); File databaseDir = new File(dataDir, "db"); Map<String, String> config = ImmutableMap.<String, String>builder() .put("metadata.db.type", "h2") .put("metadata.db.filename", databaseDir.getAbsolutePath()) .put("storage.data-directory", dataDir.getAbsolutePath()) .put("storage.compress", "false") .build(); TypeManager typeManager = new TypeRegistry(); BlockEncodingSerde blockEncodingSerde = new BlockEncodingManager(typeManager); RaptorPlugin plugin = new RaptorPlugin(); plugin.setOptionalConfig(config); plugin.setNodeManager(nodeManager); plugin.setBlockEncodingSerde(blockEncodingSerde); plugin.setTypeManager(typeManager); return getOnlyElement(plugin.getServices(ConnectorFactory.class)); } catch (Exception e) { throw Throwables.propagate(e); } }
@Override public Connector create(String connectorId, Map<String, String> config) { requireNonNull(connectorId, "connectorId is null"); requireNonNull(config, "config is null"); try { Bootstrap app = new Bootstrap( new JsonModule(), new RedisConnectorModule(), binder -> { binder.bind(RedisConnectorId.class).toInstance(new RedisConnectorId(connectorId)); binder.bind(TypeManager.class).toInstance(typeManager); binder.bind(NodeManager.class).toInstance(nodeManager); if (tableDescriptionSupplier.isPresent()) { binder.bind(new TypeLiteral<Supplier<Map<SchemaTableName, RedisTableDescription>>>() {}).toInstance(tableDescriptionSupplier.get()); } else { binder.bind(new TypeLiteral<Supplier<Map<SchemaTableName, RedisTableDescription>>>() {}) .to(RedisTableDescriptionSupplier.class) .in(Scopes.SINGLETON); } } ); Injector injector = app.strictConfig() .doNotInitializeLogging() .setRequiredConfigurationProperties(config) .setOptionalConfigurationProperties(optionalConfig) .initialize(); return injector.getInstance(RedisConnector.class); } catch (Exception e) { throw Throwables.propagate(e); } }
@Inject public TransactionsSystemTable(TypeManager typeManager, TransactionManager transactionManager) { this.transactionsTable = tableMetadataBuilder(TRANSACTIONS_TABLE_NAME) .column("transaction_id", VARCHAR) .column("isolation_level", VARCHAR) .column("read_only", BOOLEAN) .column("auto_commit_context", BOOLEAN) .column("create_time", TIMESTAMP) .column("idle_time_secs", BIGINT) .column("written_catalog", VARCHAR) .column("catalogs", typeManager.getParameterizedType(ARRAY, ImmutableList.of(VARCHAR.getTypeSignature()), ImmutableList.of())) .build(); this.transactionManager = requireNonNull(transactionManager, "transactionManager is null"); }
@Override public InternalAggregationFunction specialize(Map<String, Type> types, int arity, TypeManager typeManager, FunctionRegistry functionRegistry) { Type keyType = types.get("K"); Type valueType = types.get("V"); return generateAggregation(keyType, valueType); }
@Override public InternalAggregationFunction specialize(Map<String, Type> types, int arity, TypeManager typeManager, FunctionRegistry functionRegistry) { Type keyType = types.get("K"); Type valueType = types.get("V"); return generateAggregation(valueType, keyType); }
@Test public void testMap() throws Exception { TypeManager typeManager = new TypeRegistry(); Signature signature = new Signature("get", SCALAR, ImmutableList.of(typeParameter("K"), typeParameter("V")), "V", ImmutableList.of("map<K,V>", "K"), false); assertNotNull(signature.bindTypeParameters(ImmutableList.of(typeManager.getType(parseTypeSignature("map<bigint,varchar>")), BIGINT), true, typeManager)); assertNull(signature.bindTypeParameters(ImmutableList.of(typeManager.getType(parseTypeSignature("map<bigint,varchar>")), VARCHAR), true, typeManager)); }
@Inject public HivePageSourceProvider( HiveClientConfig hiveClientConfig, HdfsEnvironment hdfsEnvironment, Set<HiveRecordCursorProvider> cursorProviders, Set<HivePageSourceFactory> pageSourceFactories, TypeManager typeManager) { requireNonNull(hiveClientConfig, "hiveClientConfig is null"); this.hiveStorageTimeZone = hiveClientConfig.getDateTimeZone(); this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); this.cursorProviders = ImmutableSet.copyOf(requireNonNull(cursorProviders, "cursorProviders is null")); this.pageSourceFactories = ImmutableSet.copyOf(requireNonNull(pageSourceFactories, "pageSourceFactories is null")); this.typeManager = requireNonNull(typeManager, "typeManager is null"); }
@Override public InternalAggregationFunction specialize(Map<String, Type> types, int arity, TypeManager typeManager, FunctionRegistry functionRegistry) { Type keyType = types.get("K"); Type valueType = BigintType.BIGINT; return generateAggregation(keyType, valueType); }
public static GenericAggregationFunctionFactory fromAggregationDefinition(Class<?> clazz, TypeManager typeManager) { FunctionListBuilder builder = new FunctionListBuilder(typeManager); for (InternalAggregationFunction aggregation : new AggregationCompiler(typeManager).generateAggregationFunctions(clazz)) { builder.aggregate(aggregation); } return new GenericAggregationFunctionFactory(builder.getFunctions()); }
@Override public InternalAggregationFunction specialize(Map<String, Type> types, int arity, TypeManager typeManager, FunctionRegistry functionRegistry) { Type type = types.get("E"); MethodHandle compareMethodHandle = functionRegistry.getScalarFunctionImplementation(internalOperator(operatorType, BOOLEAN, ImmutableList.of(type, type))).getMethodHandle(); return generateAggregation(type, compareMethodHandle); }
public static MetadataManager createTestMetadataManager() { FeaturesConfig featuresConfig = new FeaturesConfig(); TypeManager typeManager = new TypeRegistry(); SessionPropertyManager sessionPropertyManager = new SessionPropertyManager(); BlockEncodingSerde blockEncodingSerde = new BlockEncodingManager(typeManager); TransactionManager transactionManager = createTestTransactionManager(); return new MetadataManager(featuresConfig, typeManager, blockEncodingSerde, sessionPropertyManager, new TablePropertyManager(), transactionManager); }
@Override public ScalarFunctionImplementation specialize(Map<String, Type> types, int arity, TypeManager typeManager, FunctionRegistry functionRegistry) { if (arity < 2) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "There must be two or more concatenation arguments"); } Class<?> clazz = generateConcat(arity); MethodHandle methodHandle = methodHandle(clazz, "concat", Collections.nCopies(arity, Slice.class).toArray(new Class<?>[arity])); List<Boolean> nullableParameters = ImmutableList.copyOf(Collections.nCopies(arity, false)); return new ScalarFunctionImplementation(false, nullableParameters, methodHandle, isDeterministic()); }
@Override public ScalarFunctionImplementation specialize(Map<String, Type> types, int arity, TypeManager typeManager, FunctionRegistry functionRegistry) { checkArgument(arity == 1, "Expected arity to be 1"); Type fromType = types.get("F"); Type toType = types.get("T"); Signature signature = internalOperator(CAST.name(), toType.getTypeSignature(), ImmutableList.of(fromType.getTypeSignature())); ScalarFunctionImplementation function = functionRegistry.getScalarFunctionImplementation(signature); Class<?> castOperatorClass = generateArrayCast(typeManager, signature, function); MethodHandle methodHandle = methodHandle(castOperatorClass, "castArray", ConnectorSession.class, Block.class); return new ScalarFunctionImplementation(false, ImmutableList.of(false), methodHandle, isDeterministic()); }
@Override public ScalarFunctionImplementation specialize(Map<String, Type> types, int arity, TypeManager typeManager, FunctionRegistry functionRegistry) { checkArgument(arity == 1, "Expected arity to be 1"); Type keyType = types.get("K"); Type valueType = types.get("V"); MethodHandle methodHandle = METHOD_HANDLE.bindTo(keyType); methodHandle = methodHandle.bindTo(valueType); return new ScalarFunctionImplementation(false, ImmutableList.of(false), methodHandle, isDeterministic()); }
public HiveClientModule(String connectorId, HiveMetastore metastore, TypeManager typeManager, PageIndexerFactory pageIndexerFactory) { this.connectorId = connectorId; this.metastore = metastore; this.typeManager = typeManager; this.pageIndexerFactory = pageIndexerFactory; }
@Test public void testVarArgs() throws Exception { TypeManager typeManager = new TypeRegistry(); Signature signature = new Signature("foo", SCALAR, ImmutableList.of(typeParameter("T")), "boolean", ImmutableList.of("T"), true); assertNotNull(signature.bindTypeParameters(ImmutableList.of(BIGINT), true, typeManager)); assertNotNull(signature.bindTypeParameters(ImmutableList.of(VARCHAR), true, typeManager)); assertNotNull(signature.bindTypeParameters(ImmutableList.of(BIGINT, BIGINT), true, typeManager)); assertNull(signature.bindTypeParameters(ImmutableList.of(VARCHAR, BIGINT), true, typeManager)); }
@Override public Optional<HiveRecordCursor> createHiveRecordCursor( String clientId, Configuration configuration, ConnectorSession session, Path path, long start, long length, Properties schema, List<HiveColumnHandle> columns, List<HivePartitionKey> partitionKeys, TupleDomain<HiveColumnHandle> effectivePredicate, DateTimeZone hiveStorageTimeZone, TypeManager typeManager) { if (!isDeserializerClass(schema, LazyBinaryColumnarSerDe.class)) { return Optional.empty(); } RecordReader<?, ?> recordReader = HiveUtil.createRecordReader(configuration, path, start, length, schema, columns); return Optional.<HiveRecordCursor>of(new ColumnarBinaryHiveRecordCursor<>( bytesRecordReader(recordReader), length, schema, partitionKeys, columns, hiveStorageTimeZone, typeManager)); }