private PreparedStatement prepareStatement() { List<ColumnMetadata> partkeys = cluster.getMetadata().getKeyspace(keyspaceName).getTable(tableName).getPartitionKey(); StringBuilder sb = new StringBuilder(); sb.append("SELECT COUNT(*) FROM "); sb.append(keyspaceName).append(".").append(tableName); sb.append(" WHERE Token("); sb.append(partkeys.get(0).getName()); for (int i = 1; i < partkeys.size(); i++) sb.append(", ").append(partkeys.get(i).getName()); sb.append(") > ? AND Token("); sb.append(partkeys.get(0).getName()); for (int i = 1; i < partkeys.size(); i++) sb.append(",").append(partkeys.get(i).getName()); sb.append(") <= ?"); debugPrint("Query: " + sb.toString(), true, 2); return session.prepare(sb.toString()).setConsistencyLevel(consistencyLevel); }
private TableInfo getTableInfo( @Nonnull @NonNull final QualifiedName name, @Nonnull @NonNull final TableMetadata tableMetadata ) { final ImmutableList.Builder<FieldInfo> fieldInfoBuilder = ImmutableList.builder(); // TODO: Ignores clustering, primary key, index, etc columns. We need to rework TableInfo to support for (final ColumnMetadata column : tableMetadata.getColumns()) { final String dataType = column.getType().toString(); fieldInfoBuilder.add( FieldInfo.builder() .name(column.getName()) .sourceType(dataType) .type(this.typeConverter.toMetacatType(dataType)) .build() ); } return TableInfo.builder() .name(QualifiedName.ofTable(name.getCatalogName(), name.getDatabaseName(), tableMetadata.getName())) .fields(fieldInfoBuilder.build()) .build(); }
private void parseForSpecialDataType(final ColumnMetadata aColumnDefinition) { switch (aColumnDefinition.getType().getName()) { case COUNTER: counterColumns.add(aColumnDefinition.getName()); break; case MAP: mapColumns.add(aColumnDefinition.getName()); break; case SET: setColumns.add(aColumnDefinition.getName()); break; case LIST: listColumns.add(aColumnDefinition.getName()); break; case UDT: userDefinedTypeColumns.add(aColumnDefinition.getName()); break; default: break; } }
private CassandraColumnHandle buildColumnHandle(ColumnMetadata columnMeta, boolean partitionKey, boolean clusteringKey, int ordinalPosition, boolean hidden) { CassandraType cassandraType = CassandraType.getCassandraType(columnMeta.getType().getName()); List<CassandraType> typeArguments = null; if (cassandraType != null && cassandraType.getTypeArgumentSize() > 0) { List<DataType> typeArgs = columnMeta.getType().getTypeArguments(); switch (cassandraType.getTypeArgumentSize()) { case 1: typeArguments = ImmutableList.of(CassandraType.getCassandraType(typeArgs.get(0).getName())); break; case 2: typeArguments = ImmutableList.of(CassandraType.getCassandraType(typeArgs.get(0).getName()), CassandraType.getCassandraType(typeArgs.get(1).getName())); break; default: throw new IllegalArgumentException("Invalid type arguments: " + typeArgs); } } boolean indexed = columnMeta.getIndex() != null; return new CassandraColumnHandle(connectorId, columnMeta.getName(), ordinalPosition, cassandraType, typeArguments, partitionKey, clusteringKey, indexed, hidden); }
private static MethodSpec generateSpecificGet(TableMetadata table, ClassName entityTable, int desiredColumns) { String query = getBaseQuery(table) + " WHERE "; MethodSpec.Builder builder = MethodSpec.methodBuilder("getAll"); List<ColumnMetadata> columns = table.getPrimaryKey(); for(int i = 0; i < desiredColumns; i++) { ColumnMetadata column = columns.get(i); String name = column.getName(); String newClause = name + "=:" + name; if(i != 0) { newClause = " AND " + newClause; } query += newClause; builder.addParameter(getSpec(column, true)); } return builder.addModifiers(Modifier.ABSTRACT).addModifiers(Modifier.PUBLIC) .returns(ParameterizedTypeName.get(ClassName.get(ListenableFuture.class), ParameterizedTypeName.get(ClassName.get(Result.class), entityTable))) .addAnnotation(AnnotationSpec.builder(Query.class).addMember("value", "$S", query).build()) .build(); }
private static ParameterSpec getSpec(ColumnMetadata column, boolean addAnnotation) { String name = column.getName(); String paramName = CaseFormat.LOWER_UNDERSCORE.to(CaseFormat.LOWER_CAMEL, name); ParameterSpec.Builder param; if (Udt.instance.isUdt(column.getType())) { throw new IllegalArgumentException("We don't currently support UDT primary keys in the query string, field: " + column.getName()); } else { param = ParameterSpec.builder(EntityGeneratorHelper.getRawType(column.getType()), paramName); } if(addAnnotation) { param.addAnnotation(AnnotationSpec.builder(Param.class).addMember("value", "$S", name).build()); } return param.addModifiers(Modifier.FINAL).build(); }
private String getPartitionKey(CqlDelimParser cdp, Session session) { String keyspace = cdp.getKeyspace(); String table = cdp.getTable(); if (keyspace.startsWith("\"") && keyspace.endsWith("\"")) keyspace = keyspace.replaceAll("\"", ""); else keyspace = keyspace.toLowerCase(); if (table.startsWith("\"") && table.endsWith("\"")) table = table.replaceAll("\"", ""); else table = table.toLowerCase(); List<ColumnMetadata> lcm = session.getCluster().getMetadata() .getKeyspace(keyspace).getTable(table).getPartitionKey(); String partitionKey = lcm.get(0).getName(); for (int i = 1; i < lcm.size(); i++) { partitionKey = partitionKey + "," + lcm.get(i).getName(); } return partitionKey; }
/** * Adds all columns of column family. * @param table Teiid table * @param columnFamily Column family */ private void addColumnsToTable(MetadataFactory factory, Table table, TableMetadata columnFamily) { for (ColumnMetadata column : columnFamily.getColumns()){ Class<?> cqlTypeToJavaClass = column.getType().asJavaClass(); Class<?> teiidRuntimeTypeFromJavaClass = TypeFacility.getRuntimeType(cqlTypeToJavaClass); String type = TypeFacility.getDataTypeName(teiidRuntimeTypeFromJavaClass); if (column.getType().getName().equals(com.datastax.driver.core.DataType.Name.TIMESTAMP)) { type = TypeFacility.RUNTIME_NAMES.TIMESTAMP; } else if (column.getType().getName().equals(com.datastax.driver.core.DataType.Name.CUSTOM) || column.getType().getName().equals(com.datastax.driver.core.DataType.Name.BLOB)) { type = TypeFacility.RUNTIME_NAMES.VARBINARY; } Column c = factory.addColumn(column.getName(), type, table); c.setUpdatable(true); if (column.getIndex() != null) { c.setSearchType(SearchType.Searchable); } else { c.setSearchType(SearchType.Unsearchable); } } }
public List<Column> getSchema(String keySpace, String tableName) { Metadata m = session.getCluster().getMetadata(); KeyspaceMetadata km = m.getKeyspace(keySpace); if (km == null) return null; TableMetadata tm = km.getTable(tableName); if (tm == null) return null; // build schema List<Column> columns = new LinkedList<Column>(); for (ColumnMetadata cm : tm.getColumns()) { if (!meta.contains(cm.getName())) columns.add(Column.newBuilder().setName(cm.getName()) .setType(toSimbaType(cm.getType().toString())).build()); } return columns; }
private void fetchKeys() { // get CF meta data TableMetadata tableMetadata = session.getCluster() .getMetadata() .getKeyspace(Metadata.quote(keyspace)) .getTable(Metadata.quote(cfName)); if (tableMetadata == null) { throw new RuntimeException("No table metadata found for " + keyspace + "." + cfName); } //Here we assume that tableMetadata.getPartitionKey() always //returns the list of columns in order of component_index for (ColumnMetadata partitionKey : tableMetadata.getPartitionKey()) { partitionKeys.add(partitionKey.getName()); } }
private static String insertQuery(TableMetadata tableMetadata, String... options) { Insert insert = QueryBuilder.insertInto(tableMetadata); if (options != null) { Insert.Options using = insert.using(); for (String option : options) { if ("TTL".equals(option)) { using.and(QueryBuilder.ttl(QueryBuilder.bindMarker())); } else { using.and(QueryBuilder.timestamp(QueryBuilder.bindMarker())); } } } List<ColumnMetadata> columns = tableMetadata.getColumns(); for(ColumnMetadata column : columns) { insert.value(column.getName(), QueryBuilder.bindMarker()); } return insert.toString(); }
private void validateAdditionalFilters(TableMetadata tableMetadata) { for (Map.Entry<String, Serializable> entry : additionalFilters.entrySet()) { /* check if there's an index specified on the provided column */ ColumnMetadata columnMetadata = tableMetadata.getColumn(entry.getKey()); if (columnMetadata == null) { throw new DeepNoSuchFieldException("No column with name " + entry.getKey() + " has been found on " + "table " + this.catalog + "." + this.table); } if (columnMetadata.getIndex() == null) { throw new DeepIndexNotFoundException("No index has been found on column " + columnMetadata.getName() + " on table " + this.catalog + "." + this.table); } } }
private boolean deleteEntityTableNonTransactional(String tableName, ODataEntry entity) throws ODataServiceFault { List<ColumnMetadata> cassandraTableMetaData = this.session.getCluster().getMetadata().getKeyspace(this.keyspace) .getTable(tableName).getColumns(); List<String> pKeys = this.primaryKeys.get(tableName); String query = createDeleteCQL(tableName); List<Object> values = new ArrayList<>(); for (String column : entity.getNames()) { if (pKeys.contains(column)) { bindParams(column, entity.getValue(column), values, cassandraTableMetaData); } } PreparedStatement statement = this.preparedStatementMap.get(query); if (statement == null) { statement = this.session.prepare(query); this.preparedStatementMap.put(query, statement); } ResultSet result = this.session.execute(statement.bind(values.toArray())); return result.wasApplied(); }
private Map<String, Map<String, DataColumn>> generateMetaData() { Map<String, Map<String, DataColumn>> metadata = new HashMap<>(); for (String tableName : this.tableList) { Map<String, DataColumn> dataColumnMap = new HashMap<>(); for (ColumnMetadata columnMetadata : this.session.getCluster().getMetadata().getKeyspace(this.keyspace) .getTable(tableName).getColumns()) { DataColumn dataColumn; if (this.primaryKeys.get(tableName).contains(columnMetadata.getName())) { dataColumn = new DataColumn(columnMetadata.getName(), getDataType(columnMetadata.getType().getName()), false); } else { dataColumn = new DataColumn(columnMetadata.getName(), getDataType(columnMetadata.getType().getName()), true); } dataColumnMap.put(dataColumn.getColumnName(), dataColumn); } metadata.put(tableName, dataColumnMap); } return metadata; }
public CassandraTable( final Session session, final TableMetadata table, final ConsistencyLevel consistencyLevel, final String bodyColumn, final boolean ignoreCase) { this.session = session; this.table = table; this.consistencyLevel = consistencyLevel; this.bodyColumn = bodyColumn; this.columns = table.getColumns(); this.totalColumns = this.columns.size(); this.primaryKeys = new ArrayList<String>(); for (final ColumnMetadata column : table.getPrimaryKey()) { primaryKeys.add(column.getName()); } this.ignoreCase = ignoreCase; }
public Map<String, Object> parse(final Event event) { // translate to lowercase for ignorecase option final Map<String, String> headers = ignoreCase ? processHeadersIgnoreCase(event.getHeaders()) : event.getHeaders(); final int maxValues = Math.min(headers.size(), totalColumns); final Map<String, Object> result = new HashMap<String, Object>(maxValues); for (final ColumnMetadata column : columns) { final String columnName = ignoreCase ? column.getName().toLowerCase() : column.getName(); if (headers.containsKey(columnName) && !columnName.equals(bodyColumn)) { result.put(columnName, parseValue(column.getType(), headers.get(columnName))); } else if (columnName.equals(bodyColumn)) { result.put(columnName, parseValue(column.getType(), new String(event.getBody(), Charsets.UTF_8))); } } return result; }
private void mockTableMetadata() { final ColumnMetadata idColumn = mock(ColumnMetadata.class); when(idColumn.getName()).thenReturn("id"); when(idColumn.getType()).thenReturn(DataType.cint()); final ColumnMetadata textColumn = mock(ColumnMetadata.class); when(textColumn.getName()).thenReturn("text_col"); when(textColumn.getType()).thenReturn(DataType.text()); final KeyspaceMetadata keyspaceMetadata = mock(KeyspaceMetadata.class); when(keyspaceMetadata.getName()).thenReturn("my_keyspace"); when(tableMetadata.getName()).thenReturn("my_table"); when(tableMetadata.getColumns()).thenReturn(ImmutableList.of(idColumn, textColumn)); when(tableMetadata.getKeyspace()).thenReturn(keyspaceMetadata); when(tableMetadata.getPrimaryKey()).thenReturn(ImmutableList.of(idColumn)); }
private static void ensureTableSchema(TableMetadata tableMetadata) throws IllegalStateException { ColumnMetadata primaryKey = tableMetadata.getPrimaryKey().get(0); if (!primaryKey.getName().equals("key")) { throw new IllegalStateException(String.format("The name of primary key in table [%s] should be 'key'", TABLE_NAME)); } if (primaryKey.getType() != DataType.text()) { throw new IllegalStateException(String.format("Primary key in table [%s] should have type 'text'", TABLE_NAME)); } ColumnMetadata executedColumn = tableMetadata.getColumn("executed"); if (executedColumn == null) { throw new IllegalStateException(String.format("Cannot find column 'executed' in table [%s]", TABLE_NAME)); } if (executedColumn.getType() != DataType.timestamp()) { throw new IllegalStateException(String.format("Column 'executed' in table [%s] should have type 'timestamp'", TABLE_NAME)); } }
@Override protected Schema getMainSchema() throws MetaModelException { final MutableSchema theSchema = new MutableSchema(getMainSchemaName()); for (final SimpleTableDef tableDef : tableDefs) { final MutableTable table = tableDef.toTable().setSchema(theSchema); final TableMetadata cassandraTable = cassandraCluster.getMetadata().getKeyspace(keySpaceName).getTable(table .getName()); if (cassandraTable != null) { final List<ColumnMetadata> primaryKeys = cassandraTable.getPrimaryKey(); for (ColumnMetadata primaryKey : primaryKeys) { final MutableColumn column = (MutableColumn) table.getColumnByName(primaryKey.getName()); if (column != null) { column.setPrimaryKey(true); } column.setNativeType(primaryKey.getType().getName().name()); } } theSchema.addTable(table); } return theSchema; }
public ClusteredLoader(Mapper<Data> mapper, Class<Data> dataClass, Class<CKey> ckeyClass, String tableName) { MappingManager manager = mapper.getManager(); session = manager.getSession(); this.mapper = manager.mapper(dataClass); String keyspace = mapper.getTableMetadata().getKeyspace().getName(); MaterializedViewMetadata mv = mapper.getTableMetadata().getKeyspace().getMaterializedView(tableName); AbstractTableMetadata tableMetadata = mv == null ? mapper.getTableMetadata().getKeyspace().getTable(tableName) : mv; if (tableMetadata == null) { throw new IllegalArgumentException("No table or materialized view " + keyspace + "." + tableName + "found"); } List<ColumnMetadata> primaryKey = tableMetadata.getPrimaryKey(); String pkEq = exceptLast(primaryKey).stream() .map(c -> c.getName() + "=?") .collect(Collectors.joining(" and ")); List<ColumnMetadata> clusteringColumns = tableMetadata.getClusteringColumns(); String orderByDesc = orderBy(clusteringColumns, "DESC"); String orderByAsc = orderBy(clusteringColumns, "ASC"); String indexColumn = clusteringColumns.get(clusteringColumns.size() - 1).getName(); indexAccessor = CassandraUtil.findProperty(dataClass, ckeyClass, indexColumn); selectUnbounded = prepare(String.format("select * from %s.%s where " + pkEq + " order by %s limit ?", keyspace, tableName, orderByDesc)); selectBefore = prepare(String.format("select * from %s.%s where "+pkEq+" and %s < ? order by %s limit ?", keyspace, tableName, indexColumn, orderByDesc)); selectAfter = prepare(String.format("select * from %s.%s where "+pkEq+" and %s > ? order by %s limit ?", keyspace, tableName, indexColumn, orderByDesc)); selectBeforeAfter = prepare(String.format("select * from %s.%s where "+pkEq+" and %s < ? and %s > ? order by %s limit ?", keyspace, tableName, indexColumn, indexColumn, orderByDesc)); selectUnboundedAsc = prepare(String.format("select * from %s.%s where "+pkEq+" order by %s limit ?", keyspace, tableName, orderByAsc)); selectBeforeAsc = prepare(String.format("select * from %s.%s where "+pkEq+" and %s < ? order by %s limit ?", keyspace, tableName, indexColumn, orderByAsc)); selectAfterAsc = prepare(String.format("select * from %s.%s where "+pkEq+" and %s > ? order by %s limit ?", keyspace, tableName, indexColumn, orderByAsc)); selectBeforeAfterAsc = prepare(String.format("select * from %s.%s where "+pkEq+" and %s < ? and %s > ? order by %s limit ?", keyspace, tableName, indexColumn, indexColumn, orderByAsc)); selectByIdKey = prepare(String.format("select * from %s.%s where "+pkEq+" and %s=?", keyspace, tableName, indexColumn)); deleteByIdKey = prepare(String.format("delete from %s.%s where "+pkEq+" and %s=?", keyspace, tableName, indexColumn)); selectAllById = prepare(String.format("select * from %s.%s where " + pkEq, keyspace, tableName)); deleteAllById = prepare(String.format("delete from %s.%s where "+pkEq, keyspace, tableName)); }
private void registerNonPKColumnDefinitions(final TableMetadata tableMetadata) { List<ColumnMetadata> colInfoForTable = tableMetadata.getColumns(); for (ColumnMetadata aColumnDefinition : colInfoForTable) { if (aColumnDefinition.getType().isCollection()) { collectionColumns.add(aColumnDefinition.getName()); } if (!pkColumnNames.contains(aColumnDefinition.getName())) { columnDefinitions.put(aColumnDefinition.getName(), aColumnDefinition.getType()); regularColumns.add(aColumnDefinition.getName()); } parseForSpecialDataType(aColumnDefinition); } }
private void registerPrimaryKeyColumnDefinitions(final TableMetadata tableMetadata) { List<ColumnMetadata> primaryKeyColumns = tableMetadata.getPrimaryKey(); for (ColumnMetadata primaryColumn : primaryKeyColumns) { columnDefinitions.put(primaryColumn.getName(), primaryColumn.getType()); pkColumnNames.add(primaryColumn.getName()); parseForSpecialDataType(primaryColumn); } }
public RowIterator() { AbstractType type = partitioner.getTokenValidator(); ResultSet rs = session.execute(cqlQuery, type.compose(type.fromString(split.getStartToken())), type.compose(type.fromString(split.getEndToken())) ); for (ColumnMetadata meta : cluster.getMetadata().getKeyspace(quote(keyspace)).getTable(quote(cfName)).getPartitionKey()) partitionBoundColumns.put(meta.getName(), Boolean.TRUE); rows = rs.iterator(); }
public List<String> getColumnNames(String keySpace, String tableName) { checkState(tableExists(keySpace, tableName), "table %s.%s does not exist", keySpace, tableName); KeyspaceMetadata keyspaceMetadata = session.getCluster().getMetadata().getKeyspace(keySpace); TableMetadata tableMetadata = keyspaceMetadata.getTable(tableName); return tableMetadata.getColumns().stream().map(ColumnMetadata::getName).collect(toList()); }
/** * Add fields to the class spec. */ private static void addFields(TypeSpec.Builder builder, TableMetadata tableMetadata, String className) { Map<String, Integer> partitionKeys = getMapOfKeys(tableMetadata.getPartitionKey()); Map<String, Integer> clusteringKeys = getMapOfKeys(tableMetadata.getClusteringColumns()); List<String> fields = new ArrayList<>(); for (ColumnMetadata column : tableMetadata.getColumns()) { DataType type = column.getType(); String name = column.getName(); List<AnnotationSpec> extraAnnotations = new ArrayList<>(); if(partitionKeys.containsKey(name)) { extraAnnotations.add(EntityGeneratorHelper.getPartitionKeyAnnotation(partitionKeys.get(name))); } if(clusteringKeys.containsKey(name)) { extraAnnotations.add(EntityGeneratorHelper.getClusteringAnnotation(clusteringKeys.get(name))); } builder.addField(EntityGeneratorHelper.getFieldSpec(name, type, false, extraAnnotations)); builder.addMethod(EntityGeneratorHelper.getSetter(name, type)); builder.addMethod(EntityGeneratorHelper.getGetter(name, type)); fields.add(CaseFormat.LOWER_UNDERSCORE.to(CaseFormat.LOWER_CAMEL, name)); } builder.addMethod(EntityGeneratorHelper.getToString(fields, className)); }
/** * Map out the keys and their positions. * @param keys the keys to map * @return map of the keys at their position in the key chain */ private static Map<String,Integer> getMapOfKeys(List<ColumnMetadata> keys) { Map<String, Integer> partitionKeys = new HashMap<>(); int count = 0; for(ColumnMetadata columnMetadata : keys) { partitionKeys.put(columnMetadata.getName(), count); count++; } return partitionKeys; }
private static MethodSpec getQueryConverter(TableMetadata table) { List<ColumnMetadata> keys = table.getPrimaryKey(); int numKeys = keys.size(); TypeMap typeToConverters = TypeMap.create(); MethodSpec.Builder builder = MethodSpec.methodBuilder("convertQueryString") .addJavadoc("Convert query params to their Cassandra type.\n") .addParameter(RoutingContext.class, "context", Modifier.FINAL) .addModifiers(Modifier.PUBLIC) .returns(Object[].class) .addStatement("$T request = context.request()", HttpServerRequest.class) .addCode("// if query params aren't valid a HttpResponseStatus.BAD_REQUEST will be sent with the missing field\n") .addStatement("$T values = new Object[$L]", Object[].class, keys.size()); builder.beginControlFlow("try"); for(int i = 0; i < numKeys; i++) { ColumnMetadata column = keys.get(i); if(column.getType().equals(DataType.text())) { builder.addStatement("values[$L] = request.getParam($S)", i, column.getName()); } else if (Udt.instance.isUdt(column.getType())) { throw new IllegalArgumentException("We don't currently support UDT primary keys in the query string, field: " + column.getName()); } else { String type = EntityGeneratorHelper.getRawType(column.getType()).simpleName(); builder.addCode("values[$L] = ", i) .addCode(typeToConverters.getTypeConverter(type, CodeBlock.builder().add("request.getParam($S)", column.getName()).build())) .addStatement(""); } } return builder.nextControlFlow("catch (Exception ex)") .addStatement("$T.processErrorResponse(ex.getMessage(), context.response(), $T.BAD_REQUEST.code())", HttpHelper.class, HttpResponseStatus.class) .addStatement("return null") .endControlFlow() .addStatement("return values").build(); }
/** * Kicks off DAL generation. * @param tables the cassandra table meta data * @throws IOException if write to file fails */ public static void generate(Collection<TableMetadata> tables) throws IOException { String namespaceToUse = MetaData.instance.getDalNamespace(); for (TableMetadata table : tables) { String rawName = CaseFormat.LOWER_UNDERSCORE.to(CaseFormat.UPPER_CAMEL, table.getName()); String name = rawName + "Accessor"; ClassName entityTable = ClassName.get(MetaData.instance.getTableNamespace(), rawName); TypeSpec.Builder accessorBuilder = TypeSpec.interfaceBuilder(name) .addModifiers(Modifier.PUBLIC); accessorBuilder.addAnnotation(Accessor.class); List<ColumnMetadata> pKeyColumns = table.getPrimaryKey(); List<ColumnMetadata> partitionKeyColumns = table.getPartitionKey(); int partitionKeys = partitionKeyColumns.size(); // if there is only 1 key or there's just the partition key without clustering keys if (isSingleValueKeyedTable(table)) { accessorBuilder.addMethod(generateAll(table, entityTable)); } else { // if there are clustering keys after the partition key we need to start at the partition key int count = partitionKeys > 1 ? partitionKeys : 1; while (count < pKeyColumns.size()) { accessorBuilder.addMethod(generateSpecificGet(table, entityTable, count)); count++; } } accessorBuilder.addJavadoc(GeneratorHelper.getJavaDocHeader("Accessor for Cassandra entity - {@link " + ClassName.get(MetaData.instance.getTableNamespace(), rawName) + "}", MetaData.instance.getUpdateTime())); JavaFile javaFile = JavaFile.builder(namespaceToUse, accessorBuilder.build()).build(); Disk.outputFile(javaFile); } }
public static List<List<ParameterSpec>> getParametersForAccessors(TableMetadata table) { List<List<ParameterSpec>> methodParamPermutations = new ArrayList<>(); List<ColumnMetadata> pKeys = table.getPrimaryKey(); int partitionKeys = table.getPartitionKey().size(); int primaryKeys = pKeys.size(); // if there are clustering keys after the partition key we need to start at the partition key int count = partitionKeys > 1 ? partitionKeys : 1; while (count < primaryKeys) { methodParamPermutations.add(getParameters(table, count)); count++; } return methodParamPermutations; }
private static List<ParameterSpec> getParameters(TableMetadata table, int desiredColumns) { List<ColumnMetadata> columns = table.getPrimaryKey(); List<ParameterSpec> fields = new ArrayList<>(); for(int i = 0; i < desiredColumns; i++) { ColumnMetadata column = columns.get(i); fields.add(getSpec(column)); } return fields; }
/** * Adds a primary key from columnFamily to given table. * @param table Teiid table * @param columnFamily */ private void addPrimaryKey(MetadataFactory factory, Table table, TableMetadata columnFamily) { List<ColumnMetadata> primaryKeys = new ArrayList<ColumnMetadata>(); primaryKeys = columnFamily.getPrimaryKey(); List<String> PKNames = new ArrayList<String>(); for (ColumnMetadata columnName : primaryKeys){ PKNames.add(columnName.getName()); table.getColumnByName(columnName.getName()).setSearchType(SearchType.Searchable); } factory.addPrimaryKey("PK_" + columnFamily.getName(), PKNames, table); //$NON-NLS-1$ }
private List<String> checkColumnMappings() throws StageException { List<String> invalidColumnMappings = new ArrayList<>(); columnMappings = new TreeMap<>(); for (CassandraFieldMappingConfig column : conf.columnNames) { columnMappings.put(column.columnName, column.field); } final String[] tableNameParts = conf.qualifiedTableName.split("\\."); final String keyspace = tableNameParts[0]; final String table = tableNameParts[1]; try (Cluster validationCluster = getCluster()) { final KeyspaceMetadata keyspaceMetadata = validationCluster.getMetadata().getKeyspace(keyspace); final TableMetadata tableMetadata = keyspaceMetadata.getTable(table); final List<String> columns = Lists.transform( tableMetadata.getColumns(), new Function<ColumnMetadata, String>() { @Nullable @Override public String apply(ColumnMetadata columnMetadata) { return columnMetadata.getName(); } } ); invalidColumnMappings.addAll(columnMappings.keySet() .stream() .filter(columnName -> !columns.contains(columnName)) .collect(Collectors.toList()) ); } return invalidColumnMappings; }
public RowIterator() { AbstractType<?> type = partitioner.getTokenValidator(); ResultSet rs = session.execute(cqlQuery, type.compose(type.fromString(split.getStartToken())), type.compose(type.fromString(split.getEndToken())) ); for (ColumnMetadata meta : cluster.getMetadata().getKeyspace(quote(keyspace)).getTable(quote(cfName)).getPartitionKey()) partitionBoundColumns.put(meta.getName(), Boolean.TRUE); rows = rs.iterator(); }
/** * We need to specify the columns by name because we need to add token(partition_keys) in order to count * partitions. So if the user specifies '*' then replace it with a list of all columns. */ private static String sanitizeColumns(String columns, TableMetadata tableMetadata) { if (!columns.equals("*")) return columns; return String.join(", ", tableMetadata.getColumns().stream().map(ColumnMetadata::getName).collect(Collectors.toList())); }
private String buildQuery(TokenRange tokenRange) { Token start = tokenRange.getStart(); Token end = tokenRange.getEnd(); List<String> pkColumns = tableMetadata.getPartitionKey().stream().map(ColumnMetadata::getName).collect(Collectors.toList()); String tokenStatement = String.format("token(%s)", String.join(", ", pkColumns)); StringBuilder ret = new StringBuilder(); ret.append("SELECT "); ret.append(tokenStatement); // add the token(pk) statement so that we can count partitions ret.append(", "); ret.append(columns); ret.append(" FROM "); ret.append(tableMetadata.getName()); if (start != null || end != null) ret.append(" WHERE "); if (start != null) { ret.append(tokenStatement); ret.append(" > "); ret.append(start.toString()); } if (start != null && end != null) ret.append(" AND "); if (end != null) { ret.append(tokenStatement); ret.append(" <= "); ret.append(end.toString()); } return ret.toString(); }
private static <T> DatastaxMapper<T> selectMapper(Type target, TableMetadata tableMetadata, DatastaxMapperFactory mapperFactory) { DatastaxMapperBuilder<T> mapperBuilder = mapperFactory.newBuilder(target); int i = 0; for(ColumnMetadata columnMetadata : tableMetadata.getColumns()) { mapperBuilder.addMapping(DatastaxColumnKey.of(columnMetadata, i++)); } return mapperBuilder.mapper(); }
private static <K> BoundStatementMapper<K> keySetter(Type keyTarget, TableMetadata tableMetadata, DatastaxMapperFactory mapperFactory, int offset) { SettableDataMapperBuilder<K> mapperBuilder = mapperFactory.newBuilderFrom(keyTarget); int i = offset; for(ColumnMetadata columnMetadata : tableMetadata.getPrimaryKey()) { mapperBuilder.addColumn(DatastaxColumnKey.of(columnMetadata, i++)); } return new BoundStatementMapper<K>(mapperBuilder.mapper()); }
private static <T> BoundStatementMapper<T> insertSetter(Type target, TableMetadata tableMetadata, DatastaxMapperFactory mapperFactory, int offset) { SettableDataMapperBuilder<T> mapperBuilder = mapperFactory.newBuilderFrom(target); int i = offset; for(ColumnMetadata columnMetadata : tableMetadata.getColumns()) { mapperBuilder.addColumn(DatastaxColumnKey.of(columnMetadata, i++)); } return new BoundStatementMapper<T>(mapperBuilder.mapper()); }
private static ImmutableSet<String> loadColumnNames(TableMetadata tableMetadata) { final Set<String> columnNames = Sets.newHashSet(); for (ColumnMetadata columnMetadata : tableMetadata.getColumns()) { columnNames.add(columnMetadata.getName()); } return ImmutableSet.copyOf(columnNames); }