private CqlResult executeStatement(Client client, String statement) throws ThriftApiExecutionException { ByteBuffer buf = ByteBufferUtil.bytes(statement); try { if (cqlVersion.charAt(0) >= '3') { return client.execute_cql3_query(buf, Compression.NONE, ConsistencyLevel.ONE); } else { return client.execute_cql_query(buf, Compression.NONE); } } catch (Exception e) { getLog().debug(statement); throw new ThriftApiExecutionException(e); } }
/** * get 讀取1個column * * @throws Exception */ @Test public void get() throws Exception { String KEYSPACE = "mock"; client.set_keyspace(KEYSPACE); // 讀取1個column String COLUMN_FAMILY = "student"; ColumnPath columnPath = new ColumnPath(COLUMN_FAMILY); // String COLUMN = "grad"; columnPath.setColumn(ByteBufferHelper.toByteBuffer(COLUMN)); String ROW_KEY = "Jack"; // key, column_path, consistency_level ColumnOrSuperColumn cos = client.get( ByteBufferHelper.toByteBuffer(ROW_KEY), columnPath, ConsistencyLevel.ONE);// NotFoundException Column column = cos.getColumn(); System.out.println(ROW_KEY + ", " + ByteHelper.toString(column.getName()) + ": " + ByteHelper.toString(column.getValue()) + ", " + column.getTimestamp()); // Jack, grad: 5, 1380932164492000 }
public SettingsCommand(Command type, Options options, Count count, Uncertainty uncertainty) { this.type = type; this.tries = Math.max(1, Integer.parseInt(options.retries.value()) + 1); this.ignoreErrors = options.ignoreErrors.setByUser(); this.consistencyLevel = ConsistencyLevel.valueOf(options.consistencyLevel.value().toUpperCase()); if (count != null) { this.count = Long.parseLong(count.count.value()); this.targetUncertainty = -1; this.minimumUncertaintyMeasurements = -1; this.maximumUncertaintyMeasurements = -1; } else { this.count = -1; this.targetUncertainty = Double.parseDouble(uncertainty.uncertainty.value()); this.minimumUncertaintyMeasurements = Integer.parseInt(uncertainty.minMeasurements.value()); this.maximumUncertaintyMeasurements = Integer.parseInt(uncertainty.maxMeasurements.value()); } }
private String getColumnValue(String ks, String cf, String colName, String key, String validator) throws AuthenticationException, AuthorizationException, InvalidRequestException, UnavailableException, TimedOutException, TException, NotFoundException, IOException { Cassandra.Client client = getClient(); client.set_keyspace(ks); ByteBuffer key_user_id = ByteBufferUtil.bytes(key); long timestamp = System.currentTimeMillis(); ColumnPath cp = new ColumnPath(cf); ColumnParent par = new ColumnParent(cf); cp.column = ByteBufferUtil.bytes(colName); // read ColumnOrSuperColumn got = client.get(key_user_id, cp, ConsistencyLevel.ONE); return parseType(validator).getString(got.getColumn().value); }
public SettingsCommand(Command type, Options options, Count count, Uncertainty uncertainty) { this.type = type; this.tries = Math.max(1, Integer.parseInt(options.retries.value()) + 1); this.ignoreErrors = options.ignoreErrors.setByUser(); this.consistencyLevel = ConsistencyLevel.valueOf(options.consistencyLevel.value().toUpperCase()); this.keysAtOnce = Integer.parseInt(options.atOnce.value()); this.add = options.add.get(); if (count != null) { this.count = Long.parseLong(count.count.value()); this.targetUncertainty = -1; this.minimumUncertaintyMeasurements = -1; this.maximumUncertaintyMeasurements = -1; } else { this.count = -1; this.targetUncertainty = Double.parseDouble(uncertainty.uncertainty.value()); this.minimumUncertaintyMeasurements = Integer.parseInt(uncertainty.minMeasurements.value()); this.maximumUncertaintyMeasurements = Integer.parseInt(uncertainty.maxMeasurements.value()); } }
protected Map<String,Object> getRow(String tableName, String[] columnNameList, Object rowKey) { Connection connection = null; try { connection = connectionPool.acquireConnection(); String primaryKeyName = getTablePrimaryKeyName(tableName); String rowKeyString = rowKey.toString(); List<Map<String,Object>> rowList = connection.getRowsByPrimaryKey(tableName, primaryKeyName, rowKeyString, rowKeyString, columnNameList, ConsistencyLevel.ONE); //if (rowList.size() != 1) // throw new StorageException("Row not found: table = \"" + tableName + "\"; key = \"" + rowKeyString + "\""); if (rowList.size() == 0) return null; return rowList.get(0); } finally { if (connection != null) connectionPool.releaseConnection(connection); } }
/** * Constructor when response count has to be calculated and blocked for. */ public ReadCallback(IResponseResolver<T> resolver, ConsistencyLevel consistencyLevel, IReadCommand command, List<InetAddress> endpoints) { this.command = command; this.blockfor = determineBlockFor(consistencyLevel, command.getKeyspace()); this.resolver = resolver; this.startTime = System.currentTimeMillis(); boolean repair = randomlyReadRepair(); this.endpoints = repair || resolver instanceof RowRepairResolver ? endpoints : preferredEndpoints(endpoints); if (logger.isDebugEnabled()) logger.debug(String.format("Blockfor/repair is %s/%s; setting up requests to %s", blockfor, repair, StringUtils.join(this.endpoints, ","))); }
public int determineBlockFor(ConsistencyLevel consistencyLevel, String table) { switch (consistencyLevel) { case ONE: case ANY: return 1; case TWO: return 2; case THREE: return 3; case QUORUM: return (Table.open(table).getReplicationStrategy().getReplicationFactor() / 2) + 1; case ALL: return Table.open(table).getReplicationStrategy().getReplicationFactor(); default: throw new UnsupportedOperationException("invalid consistency level: " + consistencyLevel); } }
/** * Perform the write of a mutation given a WritePerformer. * Gather the list of write endpoints, apply locally and/or forward the mutation to * said write endpoint (deletaged to the actual WritePerformer) and wait for the * responses based on consistency level. * * @param mutation the mutation to be applied * @param consistency_level the consistency level for the write operation * @param performer the WritePerformer in charge of appliying the mutation * given the list of write endpoints (either standardWritePerformer for * standard writes or counterWritePerformer for counter writes). */ public static IWriteResponseHandler performWrite(IMutation mutation, ConsistencyLevel consistency_level, String localDataCenter, WritePerformer performer) throws UnavailableException, TimeoutException, IOException { String table = mutation.getTable(); AbstractReplicationStrategy rs = Table.open(table).getReplicationStrategy(); Collection<InetAddress> writeEndpoints = getWriteEndpoints(table, mutation.key()); Multimap<InetAddress, InetAddress> hintedEndpoints = rs.getHintedEndpoints(writeEndpoints); IWriteResponseHandler responseHandler = rs.getWriteResponseHandler(writeEndpoints, hintedEndpoints, consistency_level); // exit early if we can't fulfill the CL at this time responseHandler.assureSufficientLiveNodes(); performer.apply(mutation, hintedEndpoints, responseHandler, localDataCenter, consistency_level); return responseHandler; }
/** * Performs the actual reading of a row out of the StorageService, fetching * a specific set of column names from a given column family. */ public static List<Row> read(List<ReadCommand> commands, ConsistencyLevel consistency_level) throws IOException, UnavailableException, TimeoutException, InvalidRequestException { if (StorageService.instance.isBootstrapMode()) throw new UnavailableException(); long startTime = System.nanoTime(); List<Row> rows; try { rows = fetchRows(commands, consistency_level); } finally { readStats.addNano(System.nanoTime() - startTime); } return rows; }
public void assureSufficientLiveNodes() throws UnavailableException { if (consistencyLevel == ConsistencyLevel.ANY) { // ensure there are blockFor distinct living nodes (hints are ok). if (hintedEndpoints.keySet().size() < responses.get()) throw new UnavailableException(); return; } // count destinations that are part of the desired target set int liveNodes = 0; for (InetAddress destination : hintedEndpoints.keySet()) { if (writeEndpoints.contains(destination)) liveNodes++; } if (liveNodes < responses.get()) { throw new UnavailableException(); } }
public List<String> QueryOneMinute(String min) { List<String> userList = new ArrayList<String>(); try { List<ColumnOrSuperColumn> results = client.get_slice(Utils .toByteBuffer(min), columnParent, predicate, ConsistencyLevel.ONE); for (ColumnOrSuperColumn cc : results) { SuperColumn superColumn = cc.getSuper_column(); List<Column> list = superColumn.getColumns(); for (Column c : list) { String columnName = new String(c.getName(), "UTF-8"); if (columnName.equals("username")) { String value = new String(c.getValue(), "UTF-8"); if (!userList.contains(value)) { userList.add(value); } } } } } catch (Exception e) { System.out.println(e); } return userList; }
public List<String> QueryOneMinute(String min){ List<String> bookList = new ArrayList<String>(); try { List<ColumnOrSuperColumn> results = client.get_slice(Utils .toByteBuffer(min), columnParent, predicate, ConsistencyLevel.ONE); for (ColumnOrSuperColumn cc : results) { SuperColumn superColumn = cc.getSuper_column(); List<Column> list = superColumn.getColumns(); for (Column c : list) { String columnName = new String(c.getName(), "UTF-8"); if (columnName.equals("bookno")) { String value = new String(c.getValue(), "UTF-8"); if (!bookList.contains(value)) { bookList.add(value); } } } } } catch (Exception e) { System.out.println(e); } return bookList; }
/** * Insert into IpUser Column Family */ public boolean InsertTest(String key, int val) { System.out.println("------------InsertTest--------------"); try { ColumnParent parent = new ColumnParent("test"); long timeStamp = System.currentTimeMillis(); Column idColumnPageid = new Column(); idColumnPageid.setName(this.cassandraUtil.toByteBuffer("signal")); idColumnPageid.setValue(this.cassandraUtil.toByteBuffer(String.valueOf(val))); idColumnPageid.setTimestamp(timeStamp); client.insert(this.cassandraUtil.toByteBuffer(key), parent, idColumnPageid, ConsistencyLevel.ONE); } catch (Exception e) { e.printStackTrace(); } return true; }
/** * Insert into IpUser Column Family */ public boolean InsertTest2(int key, String val) { System.out.println("------------InsertTest--------------"); try { ColumnParent parent = new ColumnParent("test"); long timeStamp = System.currentTimeMillis(); Column idColumnPageid = new Column(); idColumnPageid.setName(this.cassandraUtil.toByteBuffer("content")); idColumnPageid.setValue(this.cassandraUtil.toByteBuffer(val)); idColumnPageid.setTimestamp(timeStamp); client.insert(this.cassandraUtil.toByteBuffer(String.valueOf(key)), parent, idColumnPageid, ConsistencyLevel.ONE); } catch (Exception e) { e.printStackTrace(); } return true; }
/** * Insert into IpUser Column Family */ public boolean InsertTest3(int userid, int key, String val) { System.out.println("------------InsertTest--------------"); try { ColumnParent parent = new ColumnParent("test"); long timeStamp = System.currentTimeMillis(); Column idColumnPageid = new Column(); idColumnPageid.setName(this.cassandraUtil.toByteBuffer(String.valueOf(key))); idColumnPageid.setValue(this.cassandraUtil.toByteBuffer(val)); idColumnPageid.setTimestamp(timeStamp); client.insert(this.cassandraUtil.toByteBuffer(String.valueOf(userid)), parent, idColumnPageid, ConsistencyLevel.ONE); } catch (Exception e) { e.printStackTrace(); } return true; }
/** * Insert into CF -- "UserChapter" * @param int userid: the key of column like '119115' * @param int signal: chapter's signal like '23' * @param String content: chapter's value like '"07018720_1.0.0.0.0"' */ public boolean InsertIntoUserChapter(int userid, int signal, String content){ // System.out.println("------------InsertIntoUserChapter--------------"); try { ColumnParent parent = new ColumnParent("UserChapter"); long timeStamp = System.currentTimeMillis(); Column userChapter = new Column(); userChapter.setName(this.cassandraUtil.toByteBuffer(String.valueOf(signal))); userChapter.setValue(this.cassandraUtil.toByteBuffer(content)); userChapter.setTimestamp(timeStamp); client.insert(this.cassandraUtil.toByteBuffer(String.valueOf(userid)), parent, userChapter, ConsistencyLevel.QUORUM); return true; } catch (Exception e) { e.printStackTrace(); return false; } }
/** * Insert into CF -- "SignalChapterMap" * @param int signal: chapter's signal like '23' * @param String content: chapter's value like '"07018720_1.0.0.0.0"' */ public boolean InsertIntoSignalChapterMap(int signal, String content){ // System.out.println("------------InsertIntoSignalChapterMap--------------"); try { ColumnParent parent = new ColumnParent("SignalChapterMap"); long timeStamp = System.currentTimeMillis(); Column signalChapter = new Column(); signalChapter.setName(this.cassandraUtil.toByteBuffer("content")); signalChapter.setValue(this.cassandraUtil.toByteBuffer(content)); signalChapter.setTimestamp(timeStamp); client.insert(this.cassandraUtil.toByteBuffer(String.valueOf(signal)), parent, signalChapter, ConsistencyLevel.QUORUM); return true; } catch (Exception e) { e.printStackTrace(); return false; } }
/** * Update 'maxid' in CF 'ChapterSignalMap' */ public boolean UpdateMaxid(String signal) { try{ ColumnParent parent = new ColumnParent("ChapterSignalMap"); long timeStamp = System.currentTimeMillis(); Column signalCol = new Column(); signalCol.setName(this.cassandraUtil.toByteBuffer("maxid")); signalCol.setValue(this.cassandraUtil.toByteBuffer(signal)); signalCol.setTimestamp(timeStamp); this.client.insert(this.cassandraUtil.toByteBuffer("maxid"), parent, signalCol, ConsistencyLevel.QUORUM); }catch(Exception e) { e.printStackTrace(); } return true; }
public CassandraBase(String instance, Map<String, String> config) { // The configuration has the following: // keyspace // columnParent // readConsitency (optional) // writeConsistency (optional) messageCatalog = new Messages("Cassandra"); // The connection to Cassandra comes from RaptureCASSANDRA.cfg // and has host and port cassHost = MultiValueConfigLoader.getConfig("CASSANDRA-" + instance + ".host"); if (cassHost == null) { cassHost = "localhost"; } String cassPortString = MultiValueConfigLoader.getConfig("CASSANDRA-" + instance + ".port"); if (cassPortString == null) { cassPortString = "9160"; } cassPort = Integer.valueOf(cassPortString); keySpace = config.get(CassandraConstants.KEYSPACECFG); columnFamily = config.get(CassandraConstants.CFCFG); try { getConnection(); } catch (TTransportException e) { throw RaptureExceptionFactory.create(HttpURLConnection.HTTP_INTERNAL_ERROR, messageCatalog.getMessage("DbCommsError"), e); } ensureAllPresent(); if (config.containsKey(CassandraConstants.READ_CONSISTENCY)) { readCL = ConsistencyLevel.valueOf(config.get(CassandraConstants.READ_CONSISTENCY)); } if (config.containsKey(CassandraConstants.WRITE_CONSISTENCY)) { writeCL = ConsistencyLevel.valueOf(config.get(CassandraConstants.WRITE_CONSISTENCY)); } }
public void executeCql3Script(String script) { try { for (String cqlStatement : toCqlStatements(script)) { if (StringUtils.isNotBlank(cqlStatement)) { cqlStatement += ";"; _log.info("executing cql3 statement: " + cqlStatement); _client.execute_cql3_query(ByteBuffer.wrap(cqlStatement.getBytes("UTF-8")), Compression.NONE, ConsistencyLevel.LOCAL_QUORUM); } } } catch (Exception e) { throw Throwables.propagate(e); } }
private String getColumnValue(String ks, String cf, String colName, String key, String validator) throws AuthenticationException, AuthorizationException, InvalidRequestException, UnavailableException, TimedOutException, TException, NotFoundException, IOException { Cassandra.Client client = getClient(); client.set_keyspace(ks); ByteBuffer key_user_id = ByteBufferUtil.bytes(key); ColumnPath cp = new ColumnPath(cf); cp.column = ByteBufferUtil.bytes(colName); // read ColumnOrSuperColumn got = client.get(key_user_id, cp, ConsistencyLevel.ONE); return parseType(validator).getString(got.getColumn().value); }
/** * get 讀取所有column * * @throws Exception */ @Test public void get2() throws Exception { String KEYSPACE = "mock"; client.set_keyspace(KEYSPACE); // 讀取所有column String COLUMN_FAMILY = "student"; ColumnParent columnParent = new ColumnParent(COLUMN_FAMILY); // 術語 SlicePredicate predicate = new SlicePredicate(); // 範圍 SliceRange sliceRange = new SliceRange(); // sliceRange.setStart(ByteBufferHelper.toByteBuffer(new byte[0]));//開始 sliceRange.setStart(new byte[0]);// 開始 sliceRange.setFinish(new byte[0]);// 結束 sliceRange.setCount(100);// 筆數 // predicate.setSlice_range(sliceRange); String ROW_KEY = "Jack"; // 結果 // key, column_parent, predicate, consistency_level List<ColumnOrSuperColumn> results = client.get_slice( ByteBufferHelper.toByteBuffer(ROW_KEY), columnParent, predicate, ConsistencyLevel.ONE); for (ColumnOrSuperColumn cos : results) { Column column = cos.getColumn(); System.out.println(ROW_KEY + ", " + ByteHelper.toString(column.getName()) + ": " + ByteHelper.toString(column.getValue()) + ", " + column.getTimestamp()); // Jack, art, 87, 1380788003220 // Jack, grad, 5, 1380788003203 // Jack, math, 97, 1380788003214 } }
/** * remove * * 當remove後,key還會存在,所以再insert會無法新增 * * @throws Exception */ @Test public void remove() throws Exception { String KEYSPACE = "mock"; client.set_keyspace(KEYSPACE); // String COLUMN_FAMILY = "student"; ColumnPath columnPath = new ColumnPath(COLUMN_FAMILY); // String ROW_KEY = "Jack"; // key, column_path, timestamp, consistency_level client.remove(ByteBufferHelper.toByteBuffer(ROW_KEY), columnPath, System.nanoTime(), ConsistencyLevel.ONE); }
/** * update * * @throws Exception */ @Test public void update() throws Exception { String KEYSPACE = "mock"; client.set_keyspace(KEYSPACE); List<Mutation> mutations = new LinkedList<Mutation>(); // <columnFamily,mutations> Map<String, List<Mutation>> columnfamilyMutaions = new HashMap<String, List<Mutation>>();// keyMutations // <rowKey,keyMutations> Map<ByteBuffer, Map<String, List<Mutation>>> rowKeyMutations = new HashMap<ByteBuffer, Map<String, List<Mutation>>>(); long timestamp = System.nanoTime(); // Column column = new Column(); column.setName(ByteBufferHelper.toByteBuffer("grad")); column.setValue(ByteBufferHelper.toByteBuffer("9")); column.setTimestamp(timestamp); // ColumnOrSuperColumn cos = new ColumnOrSuperColumn(); cos.setColumn(column); // Mutation mutation = new Mutation(); mutation.setColumn_or_supercolumn(cos); mutations.add(mutation); String COLUMN_FAMILY = "student"; columnfamilyMutaions.put(COLUMN_FAMILY, mutations); String ROW_KEY = "Jack"; rowKeyMutations.put(ByteBufferHelper.toByteBuffer(ROW_KEY), columnfamilyMutaions); // mutation_map, consistency_level client.batch_mutate(rowKeyMutations, ConsistencyLevel.ONE); }
/** * delete * * @throws Exception */ @Test public void delete() throws Exception { String KEYSPACE = "mock"; client.set_keyspace(KEYSPACE); List<Mutation> mutations = new ArrayList<Mutation>(); // <columnFamily,mutations> Map<String, List<Mutation>> columnfamilyMutaions = new HashMap<String, List<Mutation>>();// keyMutations // <rowKey,keyMutations> Map<ByteBuffer, Map<String, List<Mutation>>> rowKeyMutations = new HashMap<ByteBuffer, Map<String, List<Mutation>>>(); // List<ByteBuffer> columns = new ArrayList<ByteBuffer>(); // Add as many supercolumns as you want here columns.add(ByteBufferHelper.toByteBuffer("grad")); columns.add(ByteBufferHelper.toByteBuffer("math")); // SlicePredicate predicate = new SlicePredicate(); predicate.setColumn_names(columns); // delete Deletion deletion = new Deletion(); deletion.setPredicate(predicate); // timestamp in microseconds long timestamp = System.nanoTime(); deletion.setTimestamp(timestamp); Mutation mutation = new Mutation(); mutation.setDeletion(deletion); mutations.add(mutation); String COLUMN_FAMILY = "student"; columnfamilyMutaions.put(COLUMN_FAMILY, mutations); String ROW_KEY = "Jack"; rowKeyMutations.put(ByteBufferHelper.toByteBuffer(ROW_KEY), columnfamilyMutaions); // mutation_map, consistency_level client.batch_mutate(rowKeyMutations, ConsistencyLevel.ONE); }
public void insertDataToDB(String rowKey, String entity, String category) { Mutator mutator = Pelops.createMutator(_pool); mutator.writeColumns( _colFamily, rowKey, mutator.newColumnList( mutator.newColumn("entity", entity), mutator.newColumn("category", category) ) ); mutator.execute(ConsistencyLevel.ONE); }
public void queryDB(String rowKey) { Selector selector = Pelops.createSelector(_pool); List<Column> columns = selector.getColumnsFromRow(_colFamily, rowKey, false, ConsistencyLevel.ONE); LOG.info("Entity: " + Selector.getColumnStringValue(columns, "entity")); LOG.info("Category: " + Selector.getColumnStringValue(columns, "category")); }
private void getNextBatchOfRows(CassandraConnection conn) throws Exception { // reset the column range (if necessary) if (m_requestedCols == null) { m_sliceRange = m_sliceRange.setStart(ByteBuffer.wrap(new byte[0])); m_sliceRange = m_sliceRange.setFinish(ByteBuffer.wrap(new byte[0])); m_slicePredicate.setSlice_range(m_sliceRange); } // set the key range start to the last key from the last batch of rows m_keyRange.setStart_key(m_cassandraRows.get(m_cassandraRows.size() - 1) .getKey()); m_cassandraRows = conn.getClient().get_range_slices(m_colParent, m_slicePredicate, m_keyRange, ConsistencyLevel.ONE); m_colCount = 0; // key ranges are *inclusive* of the start key - we will have already // processed the first // row in the last batch. Hence start at index 1 of this batch m_rowIndex = 1; if (m_cassandraRows == null || m_cassandraRows.size() <= 1 || m_rowCount == m_sliceRowsMax) { // indicate done m_currentCols = null; m_cassandraRows = null; } else { advanceToNonEmptyRow(); } }
private void insertColumnValue(String parentName, String id, String name, String value, long ts) throws TException, TimedOutException, UnavailableException, InvalidRequestException, UnsupportedEncodingException { ColumnParent parent = new ColumnParent(parentName); Column column = new Column(toByteBuffer(name)); column.setValue(toByteBuffer(value)); column.setTimestamp(ts); getClient().insert(toByteBuffer(id), parent, column, ConsistencyLevel.ONE); }
protected List<Map<String,Object>> getAllRows(String tableName, String[] columnNameList) { Connection connection = null; try { connection = connectionPool.acquireConnection(); String primaryKeyName = getTablePrimaryKeyName(tableName); List<Map<String,Object>> rowList = connection.getRowsByPrimaryKey(tableName, primaryKeyName, null, null, columnNameList, ConsistencyLevel.ONE); return rowList; } finally { if (connection != null) connectionPool.releaseConnection(connection); } }
protected List<Map<String,Object>> executeEqualityQuery(String tableName, String[] columnNameList, String columnName, Comparable<?> columnValue) { Connection connection = null; try { connection = connectionPool.acquireConnection(); String primaryKeyName = getTablePrimaryKeyName(tableName); List<Map<String,Object>> rowList = connection.getRowsByIndexedColumn(tableName, primaryKeyName, columnName, columnValue, columnNameList, ConsistencyLevel.ONE); return rowList; } finally { if (connection != null) connectionPool.releaseConnection(connection); } }
protected void insertRows(String tableName, List<Map<String,Object>> insertRowList) { Connection connection = null; try { connection = connectionPool.acquireConnection(); String primaryKeyName = getTablePrimaryKeyName(tableName); connection.updateRows(tableName, primaryKeyName, insertRowList); connection.commit(ConsistencyLevel.ONE); } finally { if (connection != null) connectionPool.releaseConnection(connection); } }