@Override public Function<CqlResult, ByteBuffer[][]> simpleNativeHandler() { return new Function<CqlResult, ByteBuffer[][]>() { @Override public ByteBuffer[][] apply(CqlResult result) { ByteBuffer[][] r = new ByteBuffer[result.getRows().size()][]; for (int i = 0 ; i < r.length ; i++) { CqlRow row = result.getRows().get(i); r[i] = new ByteBuffer[row.getColumns().size()]; for (int j = 0 ; j < r[i].length ; j++) r[i][j] = ByteBuffer.wrap(row.getColumns().get(j).getValue()); } return r; } }; }
/** * getByCql * * @throws Exception */ @Test public void getByCql() throws Exception { String KEYSPACE = "mock"; client.set_keyspace(KEYSPACE); // String CQL = "select * from student where KEY='Jack'"; // query, compression CqlResult result = client.execute_cql_query( ByteBufferHelper.toByteBuffer(CQL), Compression.NONE); System.out.println(result); for (CqlRow cqlRow : result.getRows()) { for (Column column : cqlRow.getColumns()) { System.out.println(ByteHelper.toString(cqlRow.getKey()) + ", " + ByteHelper.toString(column.getName()) + ": " + ByteHelper.toString(column.getValue()) + ", " + column.getTimestamp()); // Jack, KEY: Jack, -1 // Jack, art: 87, 1380933848350 // Jack, grad: 5, 1380932164492000 // Jack, math: 97, 1380933848305 } } }
/** * Return the decoded key value of a row. Assumes that the supplied row comes * from the column family that this meta data represents!! * * @param row a Cassandra row * @return the decoded key value * @throws KettleException if a deserializer can't be determined */ public Object getKeyValue(CqlRow row) throws KettleException { /* * byte[] key = row.getKey(); * * return getColumnValue(key, m_keyValidator); */ ByteBuffer key = row.bufferForKey(); if (m_keyValidator.indexOf("BytesType") > 0) { return row.getKey(); } return getColumnValue(key, m_keyValidator); }
/** * Create CFMetaData from thrift {@link CqlRow} that contains columns from schema_columnfamilies. * * @param columnsRes CqlRow containing columns from schema_columnfamilies. * @return CFMetaData derived from CqlRow */ public static CFMetaData fromThriftCqlRow(CqlRow cf, CqlResult columnsRes) { UntypedResultSet.Row cfRow = new UntypedResultSet.Row(convertThriftCqlRow(cf)); List<Map<String, ByteBuffer>> cols = new ArrayList<>(columnsRes.rows.size()); for (CqlRow row : columnsRes.rows) cols.add(convertThriftCqlRow(row)); UntypedResultSet colsRow = UntypedResultSet.create(cols); return fromSchemaNoTriggers(cfRow, colsRow); }
private static Map<String, ByteBuffer> convertThriftCqlRow(CqlRow row) { Map<String, ByteBuffer> m = new HashMap<>(); for (org.apache.cassandra.thrift.Column column : row.getColumns()) m.put(UTF8Type.instance.getString(column.bufferForName()), column.value); return m; }
/** * Create CFMetaData from thrift {@link CqlRow} that contains columns from schema_columnfamilies. * * @param row CqlRow containing columns from schema_columnfamilies. * @return CFMetaData derived from CqlRow */ public static CFMetaData fromThriftCqlRow(CqlRow row) { Map<String, ByteBuffer> columns = new HashMap<>(); try { for (org.apache.cassandra.thrift.Column column : row.getColumns()) columns.put(ByteBufferUtil.string(column.bufferForName()), column.value); } catch (CharacterCodingException ignore) { } UntypedResultSet.Row cql3row = new UntypedResultSet.Row(columns); return fromSchemaNoColumnsNoTriggers(cql3row); }
/** * List pending token relocations for all nodes. * * @return * @throws ShuffleError */ private Map<String, List<CqlRow>> listRelocations() throws ShuffleError { String cqlQuery = "SELECT token_bytes,requested_at FROM system.range_xfers"; Map<String, List<CqlRow>> results = new HashMap<String, List<CqlRow>>(); for (String host : getLiveNodes()) { CqlResult result = executeCqlQuery(host, thriftPort, thriftFramed, cqlQuery); results.put(host, result.getRows()); } return results; }
/** * Get maximum key from CqlRow list * @param rows list of the CqlRow objects * @return maximum key value of the list */ private int getMaxKey(List<CqlRow> rows) { int maxKey = ByteBufferUtil.toInt(rows.get(0).key); for (CqlRow row : rows) { int currentKey = ByteBufferUtil.toInt(row.key); if (currentKey > maxKey) maxKey = currentKey; } return maxKey; }
private void printRows(CqlResult result) { for (CqlRow row : result.getRows()) { getLog().info("Row key: " + keyValidatorVal.getString(row.key)); getLog().info("-----------------------------------------------"); for (Column column : row.getColumns()) { getLog().info(" name: " + comparatorVal.getString(column.name)); getLog().info(" value: " + defaultValidatorVal.getString(column.value)); getLog().info("-----------------------------------------------"); } } }
/** * Converts a cassandra row to a Kettle row * * @param metaData meta data on the cassandra column family being read from * @param cassandraRow a row from the column family * @param outputFormatMap a Map of output field names to indexes in the * outgoing Kettle row structure * @return a Kettle row * @throws KettleException if a problem occurs */ public Object[] cassandraRowToKettle(CassandraColumnMetaData metaData, CqlRow cassandraRow, Map<String, Integer> outputFormatMap) throws KettleException { Object[] outputRowData = RowDataUtil .allocateRowData(m_outputRowMeta.size()); Object key = metaData.getKeyValue(cassandraRow); if (key == null) { throw new KettleException("Unable to obtain a key value for the row!"); } String keyName = metaData.getKeyName(); int keyIndex = m_outputRowMeta.indexOfValue(keyName); if (keyIndex < 0) { throw new KettleException("Unable to find the key field name '" + keyName + "' in the output row meta data!"); } outputRowData[keyIndex] = key; // do the columns List<Column> rowColumns = cassandraRow.getColumns(); for (Column aCol : rowColumns) { String colName = metaData.getColumnName(aCol); Integer outputIndex = outputFormatMap.get(colName); if (outputIndex != null) { Object colValue = metaData.getColumnValue(aCol); outputRowData[outputIndex.intValue()] = colValue; } } return outputRowData; }
public CqlResult toThriftResult() { String UTF8 = "UTF8Type"; CqlMetadata schema = new CqlMetadata(new HashMap<ByteBuffer, String>(), new HashMap<ByteBuffer, String>(), // The 2 following ones shouldn't be needed in CQL3 UTF8, UTF8); for (ColumnSpecification name : metadata.names) { ByteBuffer colName = ByteBufferUtil.bytes(name.toString()); schema.name_types.put(colName, UTF8); AbstractType<?> normalizedType = name.type instanceof ReversedType ? ((ReversedType)name.type).baseType : name.type; schema.value_types.put(colName, normalizedType.toString()); } List<CqlRow> cqlRows = new ArrayList<CqlRow>(rows.size()); for (List<ByteBuffer> row : rows) { List<Column> thriftCols = new ArrayList<Column>(metadata.names.size()); for (int i = 0; i < metadata.names.size(); i++) { Column col = new Column(ByteBufferUtil.bytes(metadata.names.get(i).toString())); col.setValue(row.get(i)); thriftCols.add(col); } // The key of CqlRow shoudn't be needed in CQL3 cqlRows.add(new CqlRow(ByteBufferUtil.EMPTY_BYTE_BUFFER, thriftCols)); } CqlResult res = new CqlResult(CqlResultType.ROWS); res.setRows(cqlRows).setSchema(schema); return res; }