@Override public Number getKey() throws InvalidDataAccessApiUsageException, DataRetrievalFailureException { if (this.keyList.size() == 0) { return null; } if (this.keyList.size() > 1 || this.keyList.get(0).size() > 1) { throw new InvalidDataAccessApiUsageException( "The getKey method should only be used when a single key is returned. " + "The current key entry contains multiple keys: " + this.keyList); } Iterator<Object> keyIter = this.keyList.get(0).values().iterator(); if (keyIter.hasNext()) { Object key = keyIter.next(); if (!(key instanceof Number)) { throw new DataRetrievalFailureException( "The generated key is not of a supported numeric type. " + "Unable to cast [" + (key != null ? key.getClass().getName() : null) + "] to [" + Number.class.getName() + "]"); } return (Number) key; } else { throw new DataRetrievalFailureException("Unable to retrieve the generated key. " + "Check that the table has an identity column enabled."); } }
private String executeSubQuery(Junction junction, List<Criterion> criteria) { List<String> indices = getIndexNames(junction, entityPersister); if (indices.isEmpty()) { throw new DataRetrievalFailureException("Unsupported Redis query"); } if (indices.size() == 1) { return indices.get(0); } final String[] keyArray = indices.toArray(new String[indices.size()]); String finalKey; if (junction instanceof Conjunction) { finalKey = formulateConjunctionKey(indices); template.sinterstore(finalKey, keyArray); } else { finalKey = formulateDisjunctionKey(indices); template.sunionstore(finalKey, keyArray); } // since the keys used for queries are temporary we set Redis to kill them after a while // template.expire(finalKey, 1000); return finalKey; }
@Test public void singleKeyNonNumeric() { kh.getKeyList().addAll(singletonList(singletonMap("key", "1"))); exception.expect(DataRetrievalFailureException.class); exception.expectMessage(startsWith("The generated key is not of a supported numeric type.")); kh.getKey().intValue(); }
private VistaDataChunk fetchPatientDemographics(String vistaId, String pid, boolean isIcn) { Timer.Context timer = metricRegistry.timer(MetricRegistry.name("vpr.fetch.patient")).time(); try { Map rpcArg = new HashMap(); rpcArg.put("patientId", (isIcn ? ";" + pid : pid)); rpcArg.put("domain", "patient"); rpcArg.put("extractSchema",EXTRACT_SCHEMA); RpcResponse response = synchronizationRpcTemplate.execute(VISTA_RPC_BROKER_SCHEME + "://" + vistaId + VPR_GET_VISTA_DATA_JSON_RPC_URI, rpcArg); JsonNode json = jsonExtractor.extractData(response); JsonNode patientJsonNode = json.path("data").path("items").path(0); if (patientJsonNode.isNull()) throw new DataRetrievalFailureException("missing 'data.items[0]' node in JSON RPC response"); VistaDataChunk patientChunk = VistaDataChunk.createVistaDataChunk(vistaId, response.getRequestUri(), patientJsonNode, "patient", 0, 1, null, VistaDataChunk.getProcessorParams(vistaId, pid, isIcn)); patientChunk.getParams().put(SyncMessageConstants.DIVISION, response.getDivision()); patientChunk.getParams().put(SyncMessageConstants.DIVISION_NAME, response.getDivisionName()); if (!isIcn) patientChunk.setLocalPatientId(pid); return patientChunk; } catch (RuntimeException e) { throw e; } finally { timer.stop(); } }
public VistaDataChunk fetchOneByUid(String vistaId, String pid, String uid) { Timer.Context timer = metricRegistry.timer(MetricRegistry.name("vpr.fetch.patient")).time(); String domain = UidUtils.getCollectionNameFromUid(uid); PatientDemographics pt = patientDao.findByPid(pid); try { Map rpcArg = new HashMap(); rpcArg.put("uid", uid); RpcResponse response = synchronizationRpcTemplate.execute(VISTA_RPC_BROKER_SCHEME + "://" + vistaId + VPR_GET_VISTA_DATA_JSON_RPC_URI, rpcArg); JsonNode json = jsonExtractor.extractData(response); JsonNode jsonNode = json.path("data").path("items").path(0); if (jsonNode.isNull()) throw new DataRetrievalFailureException("missing 'data.items[0]' node in JSON RPC response"); VistaDataChunk chunk = VistaDataChunk.createVistaDataChunk(vistaId, response.getRequestUri(), jsonNode, domain, 0, 1, pt); return chunk; } catch (RuntimeException e) { throw e; } finally { timer.stop(); } }
public static List<VistaDataChunk> createVistaDataChunks(String vistaId, String rpcUri, JsonNode jsonResponse, String domain, PatientDemographics pt, Map processorParams) { JsonNode itemsNode = jsonResponse.path("data").path("items"); if (itemsNode.isNull()) throw new DataRetrievalFailureException("missing 'data.items' node in JSON RPC response"); List<VistaDataChunk> chunks = new ArrayList<VistaDataChunk>(itemsNode.size()); for (int i = 0; i < itemsNode.size(); i++) { JsonNode item = itemsNode.get(i); if (vistaId == null) { vistaId = UidUtils.getSystemIdFromPatientUid(item.path("uid").asText()); } if (processorParams == null) { processorParams = getProcessorParams(vistaId, pt.getPid(), pt.getIcn() != null); } chunks.add(createVistaDataChunk(vistaId, rpcUri, item, domain, i, itemsNode.size(), pt, processorParams)); } return chunks; }
private <T> List<T> createListFromJsonCResponse(Class<T> type, JsonNode jsonc) { if (!Map.class.isAssignableFrom(type) && !IPOMObject.class.isAssignableFrom(type)) throw new IllegalArgumentException("[Assertion failed] - type must be of type " + Map.class + " or of type " + IPOMObject.class); if (jsonc == null) { throw new DataRetrievalFailureException("Unable to fetch all " + type.getName() + "s from " + VPR_GET_OPERATIONAL_DATA_RPC + " because response JSON was null"); } JsonNode dataNode = jsonc.path("data"); if (dataNode.isNull()) { String message = jsonc.path("error").path("message").textValue(); throw new DataRetrievalFailureException("Unable to fetch all " + type.getName() + "s from " + VPR_GET_OPERATIONAL_DATA_RPC + ": " + message); } JsonNode itemsNode = jsonc.path("data").path("items"); List<T> ret = (List<T>) (Map.class.isAssignableFrom(type) ? createListOfMaps(type.asSubclass(Map.class), itemsNode) : createList(type.asSubclass(IPOMObject.class), itemsNode)); return ret; }
@Test public void testCountUnknownCollection() throws Exception { Map<String, Object> mockFooCount = new HashMap<String, Object>(); mockFooCount.put("topic", "bar"); mockFooCount.put("count", 23); when(mockJdsTemplate.getForJsonC("/data/all/count/collection")).thenReturn(JsonCCollection.create(singletonList(mockFooCount))); try { dao.count(Foo.class); fail("expected " + DataRetrievalFailureException.class); } catch (DataRetrievalFailureException e) { // NOOP } verify(mockJdsTemplate).getForJsonC("/data/all/count/collection"); }
public Object getCache(final Cache cache, final String cahceName) { Element element = null; try { element = cache.get(cahceName); } catch (CacheException cacheException) { throw new DataRetrievalFailureException("Cache failure: " + cacheException.getMessage()); } if (logger.isTraceEnabled()) { logger.trace("Cache hit: " + (element != null) + "; Cache name: " + cahceName + " Name: " + cache.getName() + " ,size: " + cache.getSize()); } if (element == null) { return null; } else { return element.getValue(); } }
public Number getKey() throws InvalidDataAccessApiUsageException, DataRetrievalFailureException { if (this.keyList.size() == 0) { return null; } if (this.keyList.size() > 1 || this.keyList.get(0).size() > 1) { throw new InvalidDataAccessApiUsageException( "The getKey method should only be used when a single key is returned. " + "The current key entry contains multiple keys: " + this.keyList); } Iterator<Object> keyIter = this.keyList.get(0).values().iterator(); if (keyIter.hasNext()) { Object key = keyIter.next(); if (!(key instanceof Number)) { throw new DataRetrievalFailureException( "The generated key is not of a supported numeric type. " + "Unable to cast [" + (key != null ? key.getClass().getName() : null) + "] to [" + Number.class.getName() + "]"); } return (Number) key; } else { throw new DataRetrievalFailureException("Unable to retrieve the generated key. " + "Check that the table has an identity column enabled."); } }
@Override @Transactional(readOnly = true) public List<LocationUpdate> findLocations(String deviceId, Date startDate, Date endDate) { logger.info("findLocations:" + deviceId + "," + startDate + "," + endDate); DeviceInfo device = deviceInfoRepository.findByDeviceId(deviceId); if (device == null) { throw new DataRetrievalFailureException("DeviceInfo:" + deviceId); } List<LocationUpdate> result = new ArrayList<LocationUpdate>(); Iterable<LocationUpdate> resultSet = locationUpdateRepository .findAll(locationUpdate.device.eq(device).and(locationUpdate.locTime.between(startDate, endDate))); for (LocationUpdate loc : resultSet) { result.add(loc); } return result; }
/** * Convenience method for parsing the users.xml file. * <p/> * <p>This method is synchronized so only one thread at a time * can parse the users.xml file and create the <code>principal</code> * instance variable.</p> */ private void parseUsers() throws DataRetrievalFailureException { final HashMap<String, OnmsUser> users = new HashMap<String, OnmsUser>(); try { for (final OnmsUser user : m_userManager.getOnmsUserList()) { users.put(user.getUsername(), user); } } catch (final Throwable t) { throw new DataRetrievalFailureException("Unable to get user list.", t); } log().debug("Loaded the users.xml file with " + users.size() + " users"); m_usersLastModified = m_userManager.getLastModified(); m_userFileSize = m_userManager.getFileSize(); m_users = users; }
public Ticket get(String ticketId) { try { Properties props = getProperties(); MyQuickBaseClient qdb = createClient(getUserName(props), getPassword(props), getUrl(props)); String dbId = qdb.findDbByName(getApplicationName(props)); HashMap<String, String> record = qdb.getRecordInfo(dbId, ticketId); Ticket ticket = new Ticket(); ticket.setId(ticketId); ticket.setModificationTimestamp(record.get(getModificationTimeStampFile(props))); ticket.setSummary(record.get(getSummaryField(props))); ticket.setDetails(record.get(getDetailsField(props))); ticket.setState(getTicketStateValue(record.get(getStateField(props)), props)); return ticket; } catch (Throwable e) { throw new DataRetrievalFailureException("Failed to commit QuickBase transaction: "+e.getMessage(), e); } }
private void scheduleBackgroundInitTask() { ReadyRunnable interfaceScheduler = new ReadyRunnable() { public boolean isReady() { return true; } public void run() { // try { scheduleExistingInterfaces(); } catch (DataRetrievalFailureException sqlE) { log().error("start: Failed to schedule existing interfaces", sqlE); } finally { setSchedulingCompleted(true); } } }; m_scheduler.schedule(interfaceScheduler, 0); }
@Nullable @Override public DataAccessException translateExceptionIfPossible(RuntimeException exception) { Assert.notNull(exception, "Exception must not be null!"); if (exception instanceof DataAccessException) { return (DataAccessException) exception; } if (exception instanceof NoSuchElementException || exception instanceof IndexOutOfBoundsException || exception instanceof IllegalStateException) { return new DataRetrievalFailureException(exception.getMessage(), exception); } if (exception.getClass().getName().startsWith("java")) { return new UncategorizedKeyValueException(exception.getMessage(), exception); } return null; }
public Stage stageById(long id) { String key = cacheKeyForStageById(id); Stage stage = (Stage) goCache.get(key); if (stage == null) { synchronized (key) { stage = (Stage) goCache.get(key); if (stage == null) { stage = (Stage) getSqlMapClientTemplate().queryForObject("getStageById", id); if (stage == null) { throw new DataRetrievalFailureException("Unable to load related stage data for id " + id); } goCache.put(key, stage); } } } return cloner.deepClone(stage); }
public Map getMethodMapFromCache(String mapName) { Element element = null; try { element = cache.get(mapName); } catch (CacheException cacheException) { throw new DataRetrievalFailureException("Cache failure: " + cacheException.getMessage()); } if (logger.isDebugEnabled()) { logger.debug("Cache hit: " + (element != null) + "; mapName: " + mapName); } if (element == null) { return null; } else { return (Map) element.getValue(); } }
private static List<ClassAttributeMap> getClassAttributeMapListFromCache(String userOrGroupName) { Element element = null; try { element = cache.get(userOrGroupName); } catch (CacheException cacheException) { throw new DataRetrievalFailureException("Cache failure: " + cacheException.getMessage()); } if (element == null) { return null; } else { return (List<ClassAttributeMap>) element.getValue(); } }
protected List<? extends T> doDelete(Iterable<? extends T> entities) { try { int count = Iterables.size(entities); List<T> result = Lists.newArrayListWithExpectedSize(count); String cql = cqlGen.buildDeleteStatement(count); PreparedCqlQuery<String, String> preparedStatement = doPreparedCqlWrite(cql); for (T entity : entities) { Map<String, ByteBuffer> serializedKeyValues = spec.getSerializedKeyValues(entity); for (String column : spec.getKeyColumns()) { preparedStatement = preparedStatement.withValue(serializedKeyValues.get(column)); } result.add(entity); } OperationResult<CqlResult<String, String>> opResult = preparedStatement.execute(); LOGGER.debug("attempts: {}, latency: {}ms", opResult.getAttemptsCount(), opResult.getLatency(TimeUnit.MILLISECONDS)); return result; } catch (ConnectionException e) { throw new DataRetrievalFailureException("Error while executing CQL3 query", e); } }
protected List<T> doFindAll(Iterable<ID> ids) { try { int count = Iterables.size(ids); List<T> result = Lists.newArrayListWithExpectedSize(count); for (ID id : ids) { Map<String, ByteBuffer> serializedKeyValues = spec.getSerializedKeyValues(id); List<String> columnsSet = EntitySpecificationUtils.getKeysSet(serializedKeyValues); String cql = cqlGen.buildFindAllStatement(columnsSet); PreparedCqlQuery<String, String> preparedStatement = doPreparedCqlRead(cql); for (String column : columnsSet) { preparedStatement = preparedStatement.withValue(serializedKeyValues.get(column)); } OperationResult<CqlResult<String, String>> opResult = preparedStatement.execute(); LOGGER.debug("attempts: {}, latency: {}ms", opResult.getAttemptsCount(), opResult.getLatency(TimeUnit.MILLISECONDS)); CqlResult<String, String> cqlResult = opResult.getResult(); result.addAll(spec.map(cqlResult.getRows())); } return result; } catch (ConnectionException e) { throw new DataRetrievalFailureException("Error while executing CQL3 query", e); } }
protected <S extends T> List<S> doSave(Iterable<S> entities) { try { List<S> result = Lists.newArrayListWithCapacity(Iterables.size(entities)); int count = Iterables.size(entities); String cql = cqlGen.buildSaveStatement(count); PreparedCqlQuery<String, String> preparedStatement = doPreparedCqlWrite(cql); for (S entity : entities) { List<ByteBuffer> serializedEntity = spec.map(entity); for (ByteBuffer buf : serializedEntity) { preparedStatement = preparedStatement.withValue(buf); } result.add(entity); } OperationResult<CqlResult<String, String>> opResult = preparedStatement.execute(); LOGGER.debug("attempts: {}, latency: {}ms", opResult.getAttemptsCount(), opResult.getLatency(TimeUnit.MILLISECONDS)); return result; } catch (ConnectionException e) { throw new DataRetrievalFailureException("Error while executing CQL3 query", e); } }
protected List<T> doFindAll(ID restrict, Sort sort) { try { Map<String, ByteBuffer> serializedKeyValues = spec.getSerializedKeyValues(restrict); List<String> keysSet = EntitySpecificationUtils.getKeysSet(serializedKeyValues); String keysCql = cqlGen.buildLimitedFindAllKeysStatement(keysSet, sort, 0); PreparedCqlQuery<String, String> preparedStatement = doPreparedCqlRead(keysCql); for (String column : keysSet) { preparedStatement = preparedStatement.withValue(serializedKeyValues.get(column)); } OperationResult<CqlResult<String, String>> keysResult = preparedStatement.execute(); LOGGER.debug("attempts: {}, latency: {}ms", keysResult.getAttemptsCount(), keysResult.getLatency(TimeUnit.MILLISECONDS)); CqlResult<String, String> cqlKeysResult = keysResult.getResult(); Rows<String, String> keysSetRows = cqlKeysResult.getRows(); List<T> keysAsEnts = spec.map(keysSetRows); List<ID> keys = spec.getKey(keysAsEnts); return findAll(keys); } catch (ConnectionException e) { throw new DataRetrievalFailureException("Error while executing CQL3 query", e); } }
protected long doCount(Predicate predicate) { try { Map<String, ByteBuffer> serializedValues = Maps.newLinkedHashMap(); predicate.accept(new PredicateSerializerVisitor(), serializedValues); String cql = cqlGen.buildCountStatement(Lists.newArrayList(serializedValues.keySet())); PreparedCqlQuery<String, String> preparedStatement = doPreparedCqlRead(cql); for (Map.Entry<String, ByteBuffer> entry : serializedValues.entrySet()) { preparedStatement = preparedStatement.withValue(entry.getValue()); } OperationResult<CqlResult<String, String>> opResult = preparedStatement.execute(); LOGGER.debug("attempts: {}, latency: {}ms", opResult.getAttemptsCount(), opResult.getLatency(TimeUnit.MILLISECONDS)); return opResult.getResult().getRows().getRowByIndex(0).getColumns().getColumnByName("count").getLongValue(); } catch (ConnectionException e) { throw new DataRetrievalFailureException("Error while executing CQL3 query", e); } }
/** * {@inheritDoc} */ @Override public synchronized void delete(ID id) { try { String cql = cqlGen.buildDeleteStatement(); PreparedCqlQuery<String, String> preparedStatement = doPreparedCqlWrite(cql); Map<String, ByteBuffer> serializedKeyValues = spec.getSerializedKeyValues(id); for (String column : spec.getKeyColumns()) { preparedStatement = preparedStatement.withValue(serializedKeyValues.get(column)); } OperationResult<CqlResult<String, String>> opResult = preparedStatement.execute(); LOGGER.debug("attempts: {}, latency: {}ms", opResult.getAttemptsCount(), opResult.getLatency(TimeUnit.MILLISECONDS)); } catch (ConnectionException e) { throw new DataRetrievalFailureException("Error while executing CQL3 query", e); } }
/** * {@inheritDoc} */ @Override public synchronized boolean exists(ID id) { try { String cql = cqlGen.buildExistsStatement(); PreparedCqlQuery<String, String> preparedStatement = doPreparedCqlRead(cql); Map<String, ByteBuffer> serializedKeyValues = spec.getSerializedKeyValues(id); for (String column : spec.getKeyColumns()) { preparedStatement = preparedStatement.withValue(serializedKeyValues.get(column)); } OperationResult<CqlResult<String, String>> opResult = preparedStatement.execute(); LOGGER.debug("attempts: {}, latency: {}ms", opResult.getAttemptsCount(), opResult.getLatency(TimeUnit.MILLISECONDS)); return opResult.getResult().getRows().size() > 0; } catch (ConnectionException e) { throw new DataRetrievalFailureException("Error while executing CQL3 query", e); } }
/** * {@inheritDoc} */ @Override public synchronized <S extends T> S save(S entity) { try { String cql = cqlGen.buildSaveStatement(); PreparedCqlQuery<String, String> preparedStatement = doPreparedCqlWrite(cql); List<ByteBuffer> serializedEntity = spec.map(entity); for (ByteBuffer buf : serializedEntity) { preparedStatement = preparedStatement.withValue(buf); } OperationResult<CqlResult<String, String>> opResult = preparedStatement.execute(); LOGGER.debug("attempts: {}, latency: {}ms", opResult.getAttemptsCount(), opResult.getLatency(TimeUnit.MILLISECONDS)); return entity; } catch (ConnectionException e) { throw new DataRetrievalFailureException("Error while executing CQL3 query", e); } }
/** * {@inheritDoc} */ @Override public synchronized long count(ID restrict) { try { Map<String, ByteBuffer> serializedKeyValues = spec.getSerializedKeyValues(restrict); List<String> keysSet = EntitySpecificationUtils.getKeysSet(serializedKeyValues); String cql = cqlGen.buildCountStatement(keysSet); PreparedCqlQuery<String, String> preparedStatement = doPreparedCqlRead(cql); for (String column : keysSet) { preparedStatement = preparedStatement.withValue(serializedKeyValues.get(column)); } OperationResult<CqlResult<String, String>> opResult = preparedStatement.execute(); LOGGER.debug("attempts: {}, latency: {}ms", opResult.getAttemptsCount(), opResult.getLatency(TimeUnit.MILLISECONDS)); return opResult.getResult().getRows().getRowByIndex(0).getColumns().getColumnByName("count") .getLongValue(); } catch (ConnectionException e) { throw new DataRetrievalFailureException("Error while executing CQL3 query", e); } }
/** {@inheritDoc} */ public void walkMatchingNodes(final String rule, final EntityVisitor visitor) { Assert.state(m_nodeDao != null, "property nodeDao cannot be null"); SortedMap<Integer, String> map; try { map = getNodeMap(rule); } catch (final FilterParseException e) { throw new DataRetrievalFailureException("Could not parse rule '" + rule + "': " + e.getLocalizedMessage(), e); } LogUtils.debugf(this, "FilterDao.walkMatchingNodes(%s, visitor) got %d results", rule, map.size()); for (final Integer nodeId : map.keySet()) { final OnmsNode node = getNodeDao().load(nodeId); visitor.visitNode(node); } }
/** * Convenience method for parsing the users.xml file. * <p/> * <p>This method is synchronized so only one thread at a time * can parse the users.xml file and create the <code>principal</code> * instance variable.</p> */ private void parseUsers() throws DataRetrievalFailureException { final HashMap<String, OnmsUser> users = new HashMap<String, OnmsUser>(); try { for (final OnmsUser user : m_userManager.getOnmsUserList()) { users.put(user.getUsername(), user); } } catch (final Throwable t) { throw new DataRetrievalFailureException("Unable to get user list.", t); } log().debug("Loaded the users.xml file with " + users.size() + " users"); m_usersLastModified = m_userManager.getLastModified(); m_users = users; }
@Override protected final Record createInputRecord(RecordFactory recordFactory, Object inObject) { try { return new CommAreaRecord(objectToBytes(inObject)); } catch (IOException ex) { throw new DataRetrievalFailureException("I/O exception during bytes conversion", ex); } }
@Override protected final Object extractOutputData(Record record) throws DataAccessException { CommAreaRecord commAreaRecord = (CommAreaRecord) record; try { return bytesToObject(commAreaRecord.toByteArray()); } catch (IOException ex) { throw new DataRetrievalFailureException("I/O exception during bytes conversion", ex); } }
/** * Test if a transaction ( begin / rollback ) is executed on the * LocalTransaction when CciLocalTransactionManager is specified as * transaction manager and a non-checked exception is thrown. */ @Test public void testLocalTransactionRollback() throws ResourceException { final ConnectionFactory connectionFactory = mock(ConnectionFactory.class); Connection connection = mock(Connection.class); Interaction interaction = mock(Interaction.class); LocalTransaction localTransaction = mock(LocalTransaction.class); final Record record = mock(Record.class); final InteractionSpec interactionSpec = mock(InteractionSpec.class); given(connectionFactory.getConnection()).willReturn(connection); given(connection.getLocalTransaction()).willReturn(localTransaction); given(connection.createInteraction()).willReturn(interaction); given(interaction.execute(interactionSpec, record, record)).willReturn(true); given(connection.getLocalTransaction()).willReturn(localTransaction); CciLocalTransactionManager tm = new CciLocalTransactionManager(); tm.setConnectionFactory(connectionFactory); TransactionTemplate tt = new TransactionTemplate(tm); try { tt.execute(new TransactionCallback<Void>() { @Override public Void doInTransaction(TransactionStatus status) { assertTrue("Has thread connection", TransactionSynchronizationManager.hasResource(connectionFactory)); CciTemplate ct = new CciTemplate(connectionFactory); ct.execute(interactionSpec, record, record); throw new DataRetrievalFailureException("error"); } }); } catch (Exception ex) { } verify(localTransaction).begin(); verify(interaction).close(); verify(localTransaction).rollback(); verify(connection).close(); }