private boolean checkNewProject(Project helpProject) { Project existingProject = projectDao.loadProject(helpProject.getName()); boolean newProject = false; if (helpProject.getId() == null) { helpProject.setCreatedBy(AuthUtils.getCurrentUserId()); helpProject.setCreatedDate(new Date()); // for new project ensure that there is no project with this name Assert.isNull(existingProject, MessageHelper.getMessage(MessagesConstants.ERROR_PROJECT_NAME_EXISTS, helpProject.getName())); newProject = true; } else { // for updated one - ensure that if there is a project with that name, // its ID is equal to this project's id Assert.isTrue(existingProject == null || existingProject.getId().equals(helpProject.getId()), MessageHelper.getMessage(MessagesConstants.ERROR_PROJECT_NAME_EXISTS, helpProject.getName())); } return newProject; }
/** * Determine whether the supplied {@link Tag} has any ancestor tag * of the supplied type. * @param tag the tag whose ancestors are to be checked * @param ancestorTagClass the ancestor {@link Class} being searched for * @return {@code true} if the supplied {@link Tag} has any ancestor tag * of the supplied type * @throws IllegalArgumentException if either of the supplied arguments is {@code null}; * or if the supplied {@code ancestorTagClass} is not type-assignable to * the {@link Tag} class */ public static boolean hasAncestorOfType(Tag tag, Class<?> ancestorTagClass) { Assert.notNull(tag, "Tag cannot be null"); Assert.notNull(ancestorTagClass, "Ancestor tag class cannot be null"); if (!Tag.class.isAssignableFrom(ancestorTagClass)) { throw new IllegalArgumentException( "Class '" + ancestorTagClass.getName() + "' is not a valid Tag type"); } Tag ancestor = tag.getParent(); while (ancestor != null) { if (ancestorTagClass.isAssignableFrom(ancestor.getClass())) { return true; } ancestor = ancestor.getParent(); } return false; }
/** * Ouvre une fenêtre d'édition de commission. * * @param commission */ public void editCommission(final Commission commission, final Boolean isAdmin) { Assert.notNull(commission, applicationContext.getMessage("assert.notNull", null, UI.getCurrent().getLocale())); /* Verrou */ if (!lockController.getLockOrNotify(commission, null)) { return; } if (commission.getI18nCommentRetourComm() == null) { commission.setI18nCommentRetourComm( new I18n(i18nController.getTypeTraduction(NomenclatureUtils.TYP_TRAD_COMM_COMMENT_RETOUR))); } CtrCandCommissionWindow window = new CtrCandCommissionWindow(commission, isAdmin); window.addCloseListener(e -> lockController.releaseLock(commission)); UI.getCurrent().addWindow(window); }
@Override public List<?> findByCriteria(final DetachedCriteria criteria, final int firstResult, final int maxResults) throws DataAccessException { Assert.notNull(criteria, "DetachedCriteria must not be null"); return executeWithNativeSession(new HibernateCallback<List<?>>() { @Override @SuppressWarnings("unchecked") public List<?> doInHibernate(Session session) throws HibernateException { Criteria executableCriteria = criteria.getExecutableCriteria(session); prepareCriteria(executableCriteria); if (firstResult >= 0) { executableCriteria.setFirstResult(firstResult); } if (maxResults > 0) { executableCriteria.setMaxResults(maxResults); } return executableCriteria.list(); } }); }
/** * 创建Query对象 * @param hql HQL语句 * @param values 可变条件 * @return Query */ protected Query createQuery(final String hql, final Object... values) { Assert.hasText(hql); Query query = (Query)getHibernateTemplate().execute(new HibernateCallback() { public Query doInHibernate(Session session) throws HibernateException//, SQLException { return session.createQuery(hql); } }); for(int i = 0; i < values.length; i++) { query.setParameter(i, values[i]); } return query; }
/** * This implementation will simply delete all ACEs in the database and recreate them on each invocation of * this method. A more comprehensive implementation might use dirty state checking, or more likely use ORM * capabilities for create, update and delete operations of {@link MutableAcl}. */ @Override public MutableAcl updateAcl(MutableAcl acl) throws NotFoundException { Assert.notNull(acl.getId(), "Object Identity doesn't provide an identifier"); // Delete this ACL's ACEs in the acl_entry table aclDao.deleteEntries(retrieveObjectIdentityPrimaryKey(acl.getObjectIdentity())); // Create this ACL's ACEs in the acl_entry table createEntries(acl); // Change the mutable columns in acl_object_identity updateObjectIdentity(acl); // Clear the cache, including children clearCacheIncludingChildren(acl.getObjectIdentity()); // Retrieve the ACL via superclass (ensures cache registration, proper retrieval etc) return (MutableAcl) readAclById(acl.getObjectIdentity()); }
/** * Perform a scan within the specified base packages, * returning the registered bean definitions. * <p>This method does <i>not</i> register an annotation config processor * but rather leaves this up to the caller. * @param basePackages the packages to check for annotated classes * @return set of beans registered if any for tooling registration purposes (never {@code null}) */ protected Set<BeanDefinitionHolder> doScan(String... basePackages) { Assert.notEmpty(basePackages, "At least one base package must be specified"); Set<BeanDefinitionHolder> beanDefinitions = new LinkedHashSet<BeanDefinitionHolder>(); for (String basePackage : basePackages) { Set<BeanDefinition> candidates = findCandidateComponents(basePackage); for (BeanDefinition candidate : candidates) { ScopeMetadata scopeMetadata = this.scopeMetadataResolver.resolveScopeMetadata(candidate); candidate.setScope(scopeMetadata.getScopeName()); String beanName = this.beanNameGenerator.generateBeanName(candidate, this.registry); if (candidate instanceof AbstractBeanDefinition) { postProcessBeanDefinition((AbstractBeanDefinition) candidate, beanName); } if (candidate instanceof AnnotatedBeanDefinition) { AnnotationConfigUtils.processCommonDefinitionAnnotations((AnnotatedBeanDefinition) candidate); } if (checkCandidate(beanName, candidate)) { BeanDefinitionHolder definitionHolder = new BeanDefinitionHolder(candidate, beanName); definitionHolder = AnnotationConfigUtils.applyScopedProxyMode(scopeMetadata, definitionHolder, this.registry); beanDefinitions.add(definitionHolder); registerBeanDefinition(definitionHolder, this.registry); } } } return beanDefinitions; }
/** * Process all Hibernate Sessions that have been registered for deferred close * for the given SessionFactory. * @param sessionFactory the Hibernate SessionFactory to process deferred close for * @see #initDeferredClose * @see #releaseSession */ public static void processDeferredClose(SessionFactory sessionFactory) { Assert.notNull(sessionFactory, "No SessionFactory specified"); Map<SessionFactory, Set<Session>> holderMap = deferredCloseHolder.get(); if (holderMap == null || !holderMap.containsKey(sessionFactory)) { throw new IllegalStateException("Deferred close not active for SessionFactory [" + sessionFactory + "]"); } logger.debug("Processing deferred close of Hibernate Sessions"); Set<Session> sessions = holderMap.remove(sessionFactory); for (Session session : sessions) { closeSession(session); } if (holderMap.isEmpty()) { deferredCloseHolder.remove(); } }
/** * Add a declared parameter to the list of parameters for the call. * Only parameters declared as {@code SqlParameter} and {@code SqlInOutParameter} * will be used to provide input values. This is different from the {@code StoredProcedure} class * which for backwards compatibility reasons allows input values to be provided for parameters declared * as {@code SqlOutParameter}. * @param parameter the {@link SqlParameter} to add */ public void addDeclaredParameter(SqlParameter parameter) { Assert.notNull(parameter, "The supplied parameter must not be null"); if (!StringUtils.hasText(parameter.getName())) { throw new InvalidDataAccessApiUsageException( "You must specify a parameter name when declaring parameters for \"" + getProcedureName() + "\""); } this.declaredParameters.add(parameter); if (logger.isDebugEnabled()) { logger.debug("Added declared parameter for [" + getProcedureName() + "]: " + parameter.getName()); } }
public AbstractTemplateProvider(CustomResourceLoader customResourceLoader) { Assert.notNull(customResourceLoader, "CustomResourceLoader must not be null!"); this.postfix = customResourceLoader.getPostfix(); this.debug = true; this.excludeClasses = new Class[]{}; this.overwrite = customResourceLoader.isOverwrite(); }
/** * Saves metadata information about a reference genome that should become available in the * system. * * @param reference {@code Reference} represents a reference genome metadata that should be * stored in the system. * @return {@code Reference} is the same with the passed one to this call, but after succeeded * call it provides access to ID values * @throws IllegalArgumentException will be thrown if reference ID isn't specified or reference * doesn't provide information about related chromosomes */ @Transactional(propagation = Propagation.REQUIRED) public Reference register(final Reference reference) { Assert.isTrue(CollectionUtils.isNotEmpty(reference.getChromosomes()), getMessage("error.reference.aborted.saving.chromosomes")); Assert.notNull(reference.getId(), getMessage(MessageCode.UNKNOWN_REFERENCE_ID)); biologicalDataItemDao.createBiologicalDataItem(reference.getIndex()); if (reference.getCreatedDate() == null) { reference.setCreatedDate(new Date()); } reference.setCreatedBy(AuthUtils.getCurrentUserId()); if (reference.getType() == null) { reference.setType(BiologicalDataItemResourceType.FILE); } if (reference.getBioDataItemId() == null) { final Long referenceId = reference.getId(); biologicalDataItemDao.createBiologicalDataItem(reference); referenceGenomeDao.createReferenceGenome(reference, referenceId); } else { referenceGenomeDao.createReferenceGenome(reference); } referenceGenomeDao.saveChromosomes(reference.getId(), reference.getChromosomes()); return reference; }
/** * Get all bean names for the given type, including those defined in ancestor * factories. Will return unique names in case of overridden bean definitions. * <p>Does consider objects created by FactoryBeans if the "allowEagerInit" * flag is set, which means that FactoryBeans will get initialized. If the * object created by the FactoryBean doesn't match, the raw FactoryBean itself * will be matched against the type. If "allowEagerInit" is not set, * only raw FactoryBeans will be checked (which doesn't require initialization * of each FactoryBean). * @param lbf the bean factory * @param includeNonSingletons whether to include prototype or scoped beans too * or just singletons (also applies to FactoryBeans) * @param allowEagerInit whether to initialize <i>lazy-init singletons</i> and * <i>objects created by FactoryBeans</i> (or by factory methods with a * "factory-bean" reference) for the type check. Note that FactoryBeans need to be * eagerly initialized to determine their type: So be aware that passing in "true" * for this flag will initialize FactoryBeans and "factory-bean" references. * @param type the type that beans must match * @return the array of matching bean names, or an empty array if none */ public static String[] beanNamesForTypeIncludingAncestors( ListableBeanFactory lbf, Class<?> type, boolean includeNonSingletons, boolean allowEagerInit) { Assert.notNull(lbf, "ListableBeanFactory must not be null"); String[] result = lbf.getBeanNamesForType(type, includeNonSingletons, allowEagerInit); if (lbf instanceof HierarchicalBeanFactory) { HierarchicalBeanFactory hbf = (HierarchicalBeanFactory) lbf; if (hbf.getParentBeanFactory() instanceof ListableBeanFactory) { String[] parentResult = beanNamesForTypeIncludingAncestors( (ListableBeanFactory) hbf.getParentBeanFactory(), type, includeNonSingletons, allowEagerInit); List<String> resultList = new ArrayList<String>(); resultList.addAll(Arrays.asList(result)); for (String beanName : parentResult) { if (!resultList.contains(beanName) && !hbf.containsLocalBean(beanName)) { resultList.add(beanName); } } result = StringUtils.toStringArray(resultList); } } return result; }
/** * 更新mainStem的同步状态数据 */ public void single(MainStemEventData data) { Assert.notNull(data); Long nid = ArbitrateConfigUtils.getCurrentNid(); if (!check()) { return; } data.setNid(nid);// 设置当前的nid String path = StagePathUtils.getMainStem(data.getPipelineId()); byte[] bytes = JsonUtils.marshalToByte(data);// 初始化的数据对象 try { zookeeper.writeData(path, bytes); } catch (ZkException e) { throw new ArbitrateException("mainStem_single", data.toString(), e); } activeData = data; }
/** * Instantiates a new handler result. * * @param source the source * @param metaData the meta data * @param p the p * @param warnings the warnings */ public DefaultHandlerResult( final AuthenticationHandler source, final CredentialMetaData metaData, final Principal p, final List<MessageDescriptor> warnings) { Assert.notNull(source, "Source cannot be null."); Assert.notNull(metaData, "Credential metadata cannot be null."); this.handlerName = source.getName(); if (!StringUtils.hasText(this.handlerName)) { this.handlerName = source.getClass().getSimpleName(); } this.credentialMetaData = metaData; this.principal = p; this.warnings = warnings; }
/** * Constructs a new <code>BootstrappingDependencyEvent</code> instance. * * @param source */ public BootstrappingDependenciesEvent(ApplicationContext source, Bundle bundle, Collection<OsgiServiceDependencyEvent> nestedEvents, Filter filter, long timeLeft) { super(source, bundle); Assert.notNull(nestedEvents); this.dependencyEvents = nestedEvents; this.dependenciesFilter = filter; this.timeLeft = timeLeft; List<String> depFilters = new ArrayList<String>(dependencyEvents.size()); for (OsgiServiceDependencyEvent dependency : nestedEvents) { depFilters.add(dependency.getServiceDependency().getServiceFilter().toString()); } dependencyFilters = Collections.unmodifiableCollection(depFilters); }
/** * {@inheritDoc} * * @see MessageManagementService#putMessage(java.lang.String, java.util.Locale, * java.lang.String) */ @Override public void putMessage(String codeId, Locale locale, String message, String type) throws I18nException { Assert.hasLength(codeId); Assert.notNull(locale); LocaleUtils.toLocale(locale.toString()); // this validates the locale Assert.hasLength(message); MessageResource persisted = this.messageSourceDao.findTopByCodeAndLang(codeId, locale.toString()); if (persisted != null) { //update case persisted.message(message).type(type); } else { //insert case persisted = new MessageResource() .code(codeId) .lang(locale.toString()) .message(message) .type(type); persisted.id((type == null ? "" : type) + codeId + locale.toString()); } this.messageSourceDao.save(persisted); }
private String getGroupByField(List<VcfFile> files, String groupBy) throws IOException { IndexSortField sortField = IndexSortField.getByName(groupBy); if (sortField == null) { VcfFilterInfo info = vcfManager.getFiltersInfo( files.stream().map(BaseEntity::getId).collect(Collectors.toList())); InfoItem infoItem = info.getInfoItemMap().get(groupBy); Assert.notNull(infoItem, "Unknown sort field: " + groupBy); if (infoItem.getType() == VCFHeaderLineType.Integer || infoItem.getType() == VCFHeaderLineType.Float) { return FeatureIndexFields.getGroupName(infoItem.getName().toLowerCase()); } else { return infoItem.getName().toLowerCase(); } } else { if (sortField.getType() == SortField.Type.INT || sortField.getType() == SortField.Type.FLOAT) { return sortField.getField().getGroupName(); } else { return sortField.getField().fieldName; } } }
@Override protected Collection<Cache> loadCaches() { net.sf.ehcache.CacheManager cacheManager = getCacheManager(); Assert.notNull(cacheManager, "A backing EhCache CacheManager is required"); Status status = cacheManager.getStatus(); Assert.isTrue(Status.STATUS_ALIVE.equals(status), "An 'alive' EhCache CacheManager is required - current cache is " + status.toString()); String[] names = cacheManager.getCacheNames(); Collection<Cache> caches = new LinkedHashSet<Cache>(names.length); for (String name : names) { caches.add(new EhCacheCache(cacheManager.getEhcache(name))); } return caches; }
public DefaultFrameworkTemplate(Object target, Log log) { if (OsgiPlatformDetector.isR42()) { Assert.isInstanceOf(Framework.class, target); fwk = (Framework) target; } else { throw new IllegalStateException("Cannot use OSGi 4.2 Framework API in an OSGi 4.1 environment"); } this.log = log; }
/** * Deletes a {@code Reference} instance from the server and database * @param reference an instnce to delete */ @Transactional(propagation = Propagation.REQUIRED) public void unregister(final Reference reference) { Assert.notNull(reference, MessagesConstants.ERROR_INVALID_PARAM); Assert.notNull(reference.getId(), MessagesConstants.ERROR_INVALID_PARAM); if (reference.getType() != BiologicalDataItemResourceType.GA4GH) { List<Project> projectsWhereFileInUse = projectDao.loadProjectsByBioDataItemId( reference.getBioDataItemId()); Assert.isTrue(projectsWhereFileInUse.isEmpty(), getMessage(MessagesConstants.ERROR_FILE_IN_USE, reference.getName(), reference.getId(), projectsWhereFileInUse.stream().map(BaseEntity::getName) .collect(Collectors.joining(", ")))); List<BaseEntity> fileList = loadAllFile(reference.getId()); Assert.isTrue(fileList.isEmpty(), getMessage(MessagesConstants.ERROR_FILE_IN_LINK, reference.getName(), reference.getId(), fileList.stream().map(BaseEntity::getName) .collect(Collectors.joining(", ")))); } referenceGenomeDao.unregisterReferenceGenome(reference.getId()); biologicalDataItemDao.deleteBiologicalDataItem(reference.getBioDataItemId()); }
/** * Binds the parameters to the given {@link Query}. * * @param query must not be {@literal null}. * @return */ public Object bind(Object query) { Assert.notNull(query, "Query must not be null!"); int bindableParameterIndex = 0; int queryParameterPosition = 1; for (Parameter parameter : parameters) { if (canBindParameter(parameter)) { Object value = accessor.getBindableValue(bindableParameterIndex); bind(query, parameter, value, queryParameterPosition++); bindableParameterIndex++; } } return query; }
@Override public long getLatestCount(short catalog, Integer subCatalog, Long date, Integer noAds, Integer noVirus, Integer official) { Assert.isTrue(catalog > 0, "catalog negative!"); if (subCatalog != null) { Assert.isTrue(subCatalog > -1, "date negative!"); } if (date != null) { Assert.isTrue(date > -1, "date negative!"); } Boolean bNoAds = null, bOfficial = null; if (noAds != null && noAds.intValue() == 1) { bNoAds = Boolean.TRUE; } if (official != null && official.intValue() == 1) { bOfficial = Boolean.TRUE; } Date startDate = null; Date endDate = null; if (date != null) { HighAndLowDate hld = new HighAndLowDate(date); startDate = hld.getLow(); endDate = hld.getHigh(); } return appMapper.getLatestCount(catalog, subCatalog, startDate, endDate, bNoAds, bOfficial); }
/** * Loads a specific read form a BAM file, specified by ReadQuery object * @param query a {@link ReadQuery} object, that specifies a {@link Read} to load * @return a {@link Read} object * @throws IOException if somwthing goes wrong with the filesystem */ public Read loadRead(final ReadQuery query, String fileUrl, String indexUrl) throws IOException { Assert.notNull(query, MessagesConstants.ERROR_NULL_PARAM); Assert.isTrue(query.getId() != null || (StringUtils.isNotBlank(fileUrl) && StringUtils.isNotBlank(indexUrl)), MessagesConstants.ERROR_NULL_PARAM); Assert.notNull(query.getChromosomeId(), MessagesConstants.ERROR_NULL_PARAM); Assert.notNull(query.getStartIndex(), MessagesConstants.ERROR_NULL_PARAM); Assert.notNull(query.getEndIndex(), MessagesConstants.ERROR_NULL_PARAM); Assert.notNull(query.getName(), MessagesConstants.ERROR_NULL_PARAM); final Chromosome chromosome = referenceGenomeManager.loadChromosome(query.getChromosomeId()); BamFile bamFile; if (query.getId() != null) { bamFile= bamFileManager.loadBamFile(query.getId()); } else { bamFile = bamHelper.makeUrlBamFile(fileUrl, indexUrl, chromosome.getReferenceId()); } return getReadFromBamFile(query, chromosome, bamFile); }
/** * Validates and parses the given to make sure that all mandatory properties, * describing genome data, are provided. * <p> * The default values will be assigned in cases when it is possible to do. E.g., to treat omitted * custom name for a genome it's possible to use an original name of corresponded file without * extension. * * @param path {@code File} Path to fasta file * @param name {@code String} Alternative name */ private String parse(final String path, final String name) { Assert.notNull(path, getMessage(MessageCode.RESOURCE_NOT_FOUND)); // checks that an original file name is provided, because it is used as a name // for a genome if custom name isn't specified String fileName = StringUtils.trimToNull(FilenameUtils.getName(path)); Assert.notNull(fileName, getMessage(MessageCode.MANDATORY_FILE_NAME)); // checks that file is in one of supported formats boolean supported = false; final Collection<String> formats = FastaUtils.getFastaExtensions(); for (final String ext : formats) { if (fileName.endsWith(ext)) { supported = true; fileName = Utils.removeFileExtension(fileName, ext); break; } } if (!supported) { throw new IllegalArgumentException(getMessage("error.reference.illegal.file.type", StringUtils.join(formats, ", "))); } // if no custom name is provided for a genome, then a file name without extension should be // used by default return StringUtils.defaultString(StringUtils.trimToNull(name), fileName); }
/** * Returns the entity with attributes only on the requested vertex. No parent attributes are included. */ public E getEntity(final ZoneEntity zone, final String identifier) { try { GraphTraversal<Vertex, Vertex> traversal = this.graphTraversal.V().has(ZONE_ID_KEY, zone.getName()) .has(getEntityIdKey(), identifier); if (!traversal.hasNext()) { return null; } Vertex vertex = traversal.next(); E entity = vertexToEntity(vertex); // There should be only one entity with a given entity id. Assert.isTrue(!traversal.hasNext(), String.format("There are two entities with the same %s.", getEntityIdKey())); return entity; } finally { this.graphTraversal.tx().commit(); } }
@Transactional @Override @SuppressWarnings("unchecked") public void delete(ID id) { Assert.notNull(id, "The given id must not be null!"); // CriteriaBuilder builder = this.entityManager.getCriteriaBuilder(); // CriteriaDelete<T> query = builder.createCriteriaDelete(domainClass); // // Root<T> root = query.from(domainClass); // this.entityManager.createQuery(query.where( // builder.equal(root.get("id"), id) // )).executeUpdate(); T entity = findOne(id); if (entity == null) throw new EmptyResultDataAccessException( String.format("No %s entity with id %s exists!", domainClass, id), 1); delete((SelfReference<T>) entity); }
/** * Set the template used for method entry log messages. * This template can contain any of the following placeholders: * <ul> * <li>{@code $[targetClassName]}</li> * <li>{@code $[targetClassShortName]}</li> * <li>{@code $[argumentTypes]}</li> * <li>{@code $[arguments]}</li> * </ul> */ public void setEnterMessage(String enterMessage) throws IllegalArgumentException { Assert.hasText(enterMessage, "'enterMessage' must not be empty"); checkForInvalidPlaceholders(enterMessage); Assert.doesNotContain(enterMessage, PLACEHOLDER_RETURN_VALUE, "enterMessage cannot contain placeholder [" + PLACEHOLDER_RETURN_VALUE + "]"); Assert.doesNotContain(enterMessage, PLACEHOLDER_EXCEPTION, "enterMessage cannot contain placeholder [" + PLACEHOLDER_EXCEPTION + "]"); Assert.doesNotContain(enterMessage, PLACEHOLDER_INVOCATION_TIME, "enterMessage cannot contain placeholder [" + PLACEHOLDER_INVOCATION_TIME + "]"); this.enterMessage = enterMessage; }
public void afterPropertiesSet() throws Exception { Assert.hasText(persistentId, "persistentId property is required"); Assert.notNull(bundleContext, "bundleContext property is required"); Assert.isTrue(initTimeout >= 0, "a positive initTimeout is required"); if (!initLazy) { createProperties(); } }
public AbstractCacheHealthIndicator(final RedisConnectionFactory redisConnectionFactory, final String cacheType, final boolean cachingEnabled) { Assert.notNull(redisConnectionFactory, "ConnectionFactory must not be null"); this.redisConnectionFactory = redisConnectionFactory; this.cacheType = cacheType; this.cachingEnabled = cachingEnabled; }
@Override public void delete(String[] paths) { Assert.notEmpty(paths, "Paths must not be empty"); for (String path : paths) { LOGGER.info("delete path: " + rootLocation.resolve(path).toAbsolutePath()); FileSystemUtils.deleteRecursively(rootLocation.resolve(path).toFile()); } }
private void checkReference(Reference reference, List<BiologicalDataItem> projectItems) { for (BiologicalDataItem item : projectItems) { if (FeatureFile.class.isAssignableFrom(item.getClass())) { FeatureFile file = (FeatureFile) item; Assert.isTrue(reference.getId().equals(file.getReferenceId()), MessageHelper.getMessage(MessagesConstants.ERROR_PROJECT_NON_MATCHING_REFERENCE, file.getName())); } } }
/** * find one entity per id with partitions * * @param id * @param partitionKeyValue * @return */ public T findOne(ID id, String partitionKeyValue) { Assert.notNull(id, "id must not be null"); Assert.notNull(partitionKeyValue, "partitionKeyValue must not be null"); return documentDbOperations.findById( entityInformation.getCollectionName(), id, entityInformation.getJavaType(), partitionKeyValue); }
/** * Reads a long value from stream * @return an unsigned byte value * @throws IOException if no value was read */ public final long readLong() throws IOException { final byte[] readBuffer = new byte[LONG_SIZE]; final int countByte = read(readBuffer); Assert.isTrue(countByte == readBuffer.length); return (((long) readBuffer[OFFSET_0] << SHIFT56) + ((long) (readBuffer[OFFSET_1] & BYTE_MASK) << SHIFT48) + ((long) (readBuffer[OFFSET_2] & BYTE_MASK) << SHIFT40) + ((long) (readBuffer[OFFSET_3] & BYTE_MASK) << SHIFT32) + ((long) (readBuffer[OFFSET_4] & BYTE_MASK) << SHIFT24) + ((readBuffer[OFFSET_5] & BYTE_MASK) << TWO_BYTE_SIZE) + ((readBuffer[OFFSET_6] & BYTE_MASK) << BYTE_SIZE) + readBuffer[OFFSET_7]) & BYTE_MASK; }
@Override public <X> List<X> find(String hql, Map<String, ?> params, Pageable pageable, Class<X> clazz) { Assert.hasText(hql, "hql can not been null or blank"); Query query = entityManager.createQuery(hql); if (query != null) { query.unwrap(QueryImpl.class).setResultTransformer(Transformers.aliasToBean(clazz)); query.setFirstResult(pageable.getOffset()); query.setMaxResults(pageable.getPageSize()); if (params != null) { params.entrySet().forEach(param -> query.setParameter(param.getKey(), param.getValue())); } return query.getResultList(); } return Lists.newArrayList(); }
/** * Persists a BiologicalDataItem instance into the database * @param item BiologicalDataItem to persist */ @Transactional(propagation = Propagation.MANDATORY) public void createBiologicalDataItem(BiologicalDataItem item) { if (!item.getFormat().isIndex() || (item.getFormat().isIndex() && !StringUtils.isEmpty(item.getName()))) { Assert.isTrue(!StringUtils.isEmpty(item.getName()), "File name is required for registration."); List<BiologicalDataItem> items = loadFilesByNameStrict(item.getName()); Assert.isTrue(items.isEmpty(), MessageHelper .getMessage(MessagesConstants.ERROR_FILE_NAME_EXISTS, item.getName())); item.setId(daoHelper.createId(biologicalDataItemSequenceName)); } else { item.setId(daoHelper.createId(biologicalDataItemSequenceName)); item.setName("INDEX " + item.getId()); } final MapSqlParameterSource params = new MapSqlParameterSource(); params.addValue(BiologicalDataItemParameters.BIO_DATA_ITEM_ID.name(), item.getId()); params.addValue(BiologicalDataItemParameters.NAME.name(), item.getName()); params.addValue(BiologicalDataItemParameters.TYPE.name(), item.getType().getId()); params.addValue(BiologicalDataItemParameters.PATH.name(), item.getPath()); params.addValue(BiologicalDataItemParameters.FORMAT.name(), item.getFormat().getId()); params.addValue(BiologicalDataItemParameters.CREATED_DATE.name(), item.getCreatedDate()); params.addValue(BiologicalDataItemParameters.CREATED_BY.name(), item.getCreatedBy()); params.addValue(BiologicalDataItemParameters.BUCKET_ID.name(), item.getBucketId()); getNamedParameterJdbcTemplate().update(insertBiologicalDataItemQuery, params); }
/** * 添加节点 */ private void addElement(Element rootElement, String elementName, Object obj, boolean required) { if (obj == null && !required) { return; } Assert.notNull(obj, elementName); Class<?> clazz = obj.getClass(); Element element = rootElement.addElement(elementName); if (clazz == Integer.class || clazz == Long.class) { element.setText(obj.toString()); } else { element.addCDATA(obj.toString()); } }
public DocumentDbTemplate(DocumentDbFactory documentDbFactory, MappingDocumentDbConverter mappingDocumentDbConverter, String dbName) { Assert.notNull(documentDbFactory, "DocumentDbFactory must not be null!"); Assert.notNull(mappingDocumentDbConverter, "MappingDocumentDbConverter must not be null!"); this.databaseName = dbName; this.documentDbFactory = documentDbFactory; this.mappingDocumentDbConverter = mappingDocumentDbConverter; this.collectionCache = new ArrayList<>(); }
/** * Returns references to <em>all</em> services matching the given class name and OSGi filter. * * @param bundleContext OSGi bundle context * @param clazz fully qualified class name (can be <code>null</code>) * @param filter valid OSGi filter (can be <code>null</code>) * @return non-<code>null</code> array of references to matching services */ public static ServiceReference[] getServiceReferences(BundleContext bundleContext, String clazz, String filter) { Assert.notNull(bundleContext, "bundleContext should be not null"); try { ServiceReference[] refs = bundleContext.getServiceReferences(clazz, filter); return (refs == null ? new ServiceReference[0] : refs); } catch (InvalidSyntaxException ise) { throw (RuntimeException) new IllegalArgumentException("invalid filter: " + ise.getFilter()).initCause(ise); } }
static ExchangeFilterFunction loggingFilter(Logger logger, LogLevel logLevel) { Assert.notNull(logger, "'logger' Can't be null"); Assert.notNull(logLevel, "'logLevel' Can't be null"); return (clientRequest, exchangeFunction) -> logClientRequest( clientRequest, exchangeFunction, logger, logLevel); }
public static boolean execute(Runnable task, long waitTime, TaskExecutor taskExecutor) { Assert.notNull(task); Counter counter = new Counter("counter for task: " + task); Runnable wrapper = new MonitoredRunnable(task, counter); boolean internallyManaged = false; if (taskExecutor == null) { taskExecutor = new SimpleTaskExecutor(); internallyManaged = true; } counter.increment(); taskExecutor.execute(wrapper); if (counter.waitForZero(waitTime)) { log.error(task + " did not finish in " + waitTime + "ms; consider taking a snapshot and then shutdown the VM in case the thread still hangs"); //log.error("Current Thread dump***\n" + ThreadDump.dumpThreads()); if (internallyManaged) { try { ((DisposableBean) taskExecutor).destroy(); } catch (Exception e) { log.error("Exception thrown while destroying internally managed thread executor", e); } } return true; } return false; }