private Type determineDefaultCollectionInformation(AbstractPluralAttributeBinding attributeBinding) { final TypeFactory typeFactory = metadata.getTypeResolver().getTypeFactory(); switch ( attributeBinding.getAttribute().getNature() ) { case SET: { return typeFactory.set( attributeBinding.getAttribute().getName(), attributeBinding.getReferencedPropertyName() ); } case BAG: { return typeFactory.bag( attributeBinding.getAttribute().getName(), attributeBinding.getReferencedPropertyName() ); } default: { throw new UnsupportedOperationException( "Collection type not supported yet:" + attributeBinding.getAttribute().getNature() ); } } }
private static void parseFilterDef(Element element, Mappings mappings) { String name = element.attributeValue( "name" ); log.debug( "Parsing filter-def [" + name + "]" ); String defaultCondition = element.getTextTrim(); if ( StringHelper.isEmpty( defaultCondition ) ) { defaultCondition = element.attributeValue( "condition" ); } HashMap paramMappings = new HashMap(); Iterator params = element.elementIterator( "filter-param" ); while ( params.hasNext() ) { final Element param = (Element) params.next(); final String paramName = param.attributeValue( "name" ); final String paramType = param.attributeValue( "type" ); log.debug( "adding filter parameter : " + paramName + " -> " + paramType ); final Type heuristicType = TypeFactory.heuristicType( paramType ); log.debug( "parameter heuristic type : " + heuristicType ); paramMappings.put( paramName, heuristicType ); } log.debug( "Parsed filter-def [" + name + "]" ); FilterDefinition def = new FilterDefinition( name, defaultCondition, paramMappings ); mappings.addFilterDefinition( def ); }
public Type getType() throws MappingException { if ( getColumnIterator().hasNext() ) { return new SpecialOneToOneType( getReferencedEntityName(), foreignKeyType, referencedPropertyName, isLazy(), isUnwrapProxy(), entityName, propertyName ); } else { return TypeFactory.oneToOne( getReferencedEntityName(), foreignKeyType, referencedPropertyName, isLazy(), isUnwrapProxy(), isEmbedded(), entityName, propertyName ); } }
/** * Locate the property-indices of all properties considered to be dirty. * * @param currentState The current state of the entity (the state to be checked). * @param previousState The previous state of the entity (the state to be checked against). * @param entity The entity for which we are checking state dirtiness. * @param session The session in which the check is ccurring. * @return <tt>null</tt> or the indices of the dirty properties * @throws HibernateException */ public int[] findDirty(Object[] currentState, Object[] previousState, Object entity, SessionImplementor session) throws HibernateException { int[] props = TypeFactory.findDirty( entityMetamodel.getProperties(), currentState, previousState, propertyColumnUpdateable, hasUninitializedLazyProperties( entity, session.getEntityMode() ), session ); if ( props == null ) { return null; } else { logDirtyProperties( props ); return props; } }
/** * Locate the property-indices of all properties considered to be dirty. * * @param old The old state of the entity. * @param current The current state of the entity. * @param entity The entity for which we are checking state modification. * @param session The session in which the check is ccurring. * @return <tt>null</tt> or the indices of the modified properties * @throws HibernateException */ public int[] findModified(Object[] old, Object[] current, Object entity, SessionImplementor session) throws HibernateException { int[] props = TypeFactory.findModified( entityMetamodel.getProperties(), current, old, propertyColumnUpdateable, hasUninitializedLazyProperties( entity, session.getEntityMode() ), session ); if ( props == null ) { return null; } else { logDirtyProperties( props ); return props; } }
public String render(List args, SessionFactoryImplementor factory) throws QueryException { if ( args.size()!=2 ) { throw new QueryException("cast() requires two arguments"); } String type = (String) args.get(1); int[] sqlTypeCodes = TypeFactory.heuristicType(type).sqlTypes(factory); if ( sqlTypeCodes.length!=1 ) { throw new QueryException("invalid Hibernate type for cast()"); } String sqlType = factory.getDialect().getCastTypeName( sqlTypeCodes[0] ); if (sqlType==null) { //TODO: never reached, since getTypeName() actually throws an exception! sqlType = type; } /*else { //trim off the length/precision/scale int loc = sqlType.indexOf('('); if (loc>-1) { sqlType = sqlType.substring(0, loc); } }*/ return "cast(" + args.get(0) + " as " + sqlType + ')'; }
public CacheEntry( final Object[] state, final EntityPersister persister, final boolean unfetched, final Object version, final SessionImplementor session, final Object owner) throws HibernateException { //disassembled state gets put in a new array (we write to cache by value!) this.disassembledState = TypeFactory.disassemble( state, persister.getPropertyTypes(), persister.isLazyPropertiesCacheable() ? null : persister.getPropertyLaziness(), session, owner ); subclass = persister.getEntityName(); lazyPropertiesAreUnfetched = unfetched || !persister.isLazyPropertiesCacheable(); this.version = version; }
protected void copyValues( final EntityPersister persister, final Object entity, final Object target, final SessionImplementor source, final Map copyCache ) { final Object[] copiedValues = TypeFactory.replace( persister.getPropertyValues( entity, source.getEntityMode() ), persister.getPropertyValues( target, source.getEntityMode() ), persister.getPropertyTypes(), source, target, copyCache ); persister.setPropertyValues( target, copiedValues, source.getEntityMode() ); }
private Type guessType(Class clazz) throws HibernateException { String typename = clazz.getName(); Type type = TypeFactory.heuristicType(typename); boolean serializable = type!=null && type instanceof SerializableType; if (type==null || serializable) { try { session.getFactory().getEntityPersister( clazz.getName() ); } catch (MappingException me) { if (serializable) { return type; } else { throw new HibernateException("Could not determine a type for class: " + typename); } } return Hibernate.entity(clazz); } else { return type; } }
/** * Find the function return type given the function name and the first argument expression node. * * @param functionName The function name. * @param first The first argument expression. * @return the function return type given the function name and the first argument expression node. */ public Type findFunctionReturnType(String functionName, AST first) { // locate the registered function by the given name SQLFunction sqlFunction = requireSQLFunction( functionName ); // determine the type of the first argument... Type argumentType = null; if ( first != null ) { if ( "cast".equals(functionName) ) { argumentType = TypeFactory.heuristicType( first.getNextSibling().getText() ); } else if ( first instanceof SqlNode ) { argumentType = ( (SqlNode) first ).getDataType(); } } return sqlFunction.getReturnType( argumentType, sfi ); }
@Override public Type getType() throws MappingException { // TODO : temporary initial step towards HHH-1907 final ComponentMetamodel metamodel = new ComponentMetamodel( this ); final TypeFactory factory = getMappings().getTypeResolver().getTypeFactory(); return isEmbedded() ? factory.embeddedComponent( metamodel ) : factory.component( metamodel ); }
public static void bindAny(Element node, Any any, boolean isNullable, Mappings mappings) throws MappingException { any.setIdentifierType( getTypeFromXML( node ) ); Attribute metaAttribute = node.attribute( "meta-type" ); if ( metaAttribute != null ) { any.setMetaType( metaAttribute.getValue() ); Iterator iter = node.elementIterator( "meta-value" ); if ( iter.hasNext() ) { HashMap values = new HashMap(); org.hibernate.type.Type metaType = TypeFactory.heuristicType( any.getMetaType() ); while ( iter.hasNext() ) { Element metaValue = (Element) iter.next(); try { Object value = ( (DiscriminatorType) metaType ).stringToObject( metaValue .attributeValue( "value" ) ); String entityName = getClassName( metaValue.attribute( "class" ), mappings ); values.put( value, entityName ); } catch (ClassCastException cce) { throw new MappingException( "meta-type was not a DiscriminatorType: " + metaType.getName() ); } catch (Exception e) { throw new MappingException( "could not interpret meta-value", e ); } } any.setMetaValues( values ); } } bindColumns( node, any, isNullable, false, null, mappings ); }
/** * Build a ResultSetMappingDefinition given a containing element for the "return-XXX" elements * * @param resultSetElem The element containing the return definitions. * @param path No clue... * @param mappings The current processing state. * @return The description of the mappings... */ protected static ResultSetMappingDefinition buildResultSetMappingDefinition(Element resultSetElem, String path, Mappings mappings) { String resultSetName = resultSetElem.attribute( "name" ).getValue(); if ( path != null ) { resultSetName = path + '.' + resultSetName; } ResultSetMappingDefinition definition = new ResultSetMappingDefinition( resultSetName ); int cnt = 0; Iterator returns = resultSetElem.elementIterator(); while ( returns.hasNext() ) { cnt++; Element returnElem = (Element) returns.next(); String name = returnElem.getName(); if ( "return-scalar".equals( name ) ) { String column = returnElem.attributeValue( "column" ); String typeFromXML = HbmBinder.getTypeFromXML( returnElem ); Type type = null; if(typeFromXML!=null) { type = TypeFactory.heuristicType( typeFromXML ); if ( type == null ) { throw new MappingException( "could not determine type " + type ); } } definition.addQueryReturn( new NativeSQLQueryScalarReturn( column, type ) ); } else if ( "return".equals( name ) ) { definition.addQueryReturn( bindReturn( returnElem, mappings, cnt ) ); } else if ( "return-join".equals( name ) ) { definition.addQueryReturn( bindReturnJoin( returnElem, mappings ) ); } else if ( "load-collection".equals( name ) ) { definition.addQueryReturn( bindLoadCollection( returnElem, mappings ) ); } } return definition; }
public CollectionType getCollectionType() { if ( typeName == null ) { return getDefaultCollectionType(); } else { return TypeFactory.customCollection( typeName, typeParameters, role, referencedPropertyName, isEmbedded() ); } }
private EntityType getEntityType() { return TypeFactory.manyToOne( getReferencedEntityName(), null, false, false, isEmbedded(), isIgnoreNotFound() ); }
public CollectionType getDefaultCollectionType() { if ( isSorted() ) { return TypeFactory.sortedMap( getRole(), getReferencedPropertyName(), isEmbedded(), getComparator() ); } else if ( hasOrder() ) { return TypeFactory.orderedMap( getRole(), getReferencedPropertyName(), isEmbedded() ); } else { return TypeFactory.map( getRole(), getReferencedPropertyName(), isEmbedded() ); } }
public CollectionType getDefaultCollectionType() { if ( isSorted() ) { return TypeFactory.sortedSet( getRole(), getReferencedPropertyName(), isEmbedded(), getComparator() ); } else if ( hasOrder() ) { return TypeFactory.orderedSet( getRole(), getReferencedPropertyName(), isEmbedded() ); } else { return TypeFactory.set( getRole(), getReferencedPropertyName(), isEmbedded() ); } }
public Type getType() throws MappingException { return new AnyType( metaValues==null ? TypeFactory.heuristicType(metaTypeName) : new MetaType( metaValues, TypeFactory.heuristicType(metaTypeName) ), TypeFactory.heuristicType(identifierTypeName) ); }
public Type getType() throws MappingException { if (typeName==null) { throw new MappingException("No type name"); } Type result = TypeFactory.heuristicType(typeName, typeParameters); if (result==null) { String msg = "Could not determine type for: " + typeName; if(columns!=null && columns.size()>0) { msg += ", for columns: " + columns; } throw new MappingException(msg); } return result; }
public Type getType() throws MappingException { return TypeFactory.manyToOne( getReferencedEntityName(), getReferencedPropertyName(), isLazy(), isUnwrapProxy(), isEmbedded(), isIgnoreNotFound() ); }
public CriteriaJoinWalker( final OuterJoinLoadable persister, final CriteriaQueryTranslator translator, final SessionFactoryImplementor factory, final CriteriaImpl criteria, final String rootEntityName, final Map enabledFilters) throws HibernateException { super(persister, factory, enabledFilters); this.translator = translator; querySpaces = translator.getQuerySpaces(); if ( translator.hasProjection() ) { resultTypes = translator.getProjectedTypes(); initProjection( translator.getSelect(), translator.getWhereCondition(), translator.getOrderBy(), translator.getGroupBy(), LockMode.NONE ); } else { resultTypes = new Type[] { TypeFactory.manyToOne( persister.getEntityName() ) }; initAll( translator.getWhereCondition(), translator.getOrderBy(), LockMode.NONE ); } userAliasList.add( criteria.getAlias() ); //root entity comes *last* userAliases = ArrayHelper.toStringArray(userAliasList); }
public Type getHibernateType(int columnPos) throws SQLException { int columnType = resultSetMetaData.getColumnType( columnPos ); int scale = resultSetMetaData.getScale( columnPos ); int precision = resultSetMetaData.getPrecision( columnPos ); return TypeFactory.heuristicType( factory.getDialect().getHibernateTypeName( columnType, precision, precision, scale ) ); }
private static Object[] assemble( final Serializable[] values, final Object result, final Serializable id, final EntityPersister persister, final Interceptor interceptor, final EventSource session) throws HibernateException { //assembled state gets put in a new array (we read from cache by value!) Object[] assembledProps = TypeFactory.assemble( values, persister.getPropertyTypes(), session, result ); //persister.setIdentifier(result, id); //before calling interceptor, for consistency with normal load //TODO: reuse the PreLoadEvent PreLoadEvent preLoadEvent = new PreLoadEvent( session ) .setEntity(result) .setState(assembledProps) .setId(id) .setPersister(persister); PreLoadEventListener[] listeners = session.getListeners().getPreLoadEventListeners(); for ( int i = 0; i < listeners.length; i++ ) { listeners[i].onPreLoad(preLoadEvent); } persister.setPropertyValues( result, assembledProps, session.getEntityMode() ); return assembledProps; }
public boolean put( QueryKey key, Type[] returnTypes, List result, boolean isNaturalKeyLookup, SessionImplementor session) throws HibernateException { if ( isNaturalKeyLookup && result.size()==0 ) { return false; } else { Long ts = new Long( session.getTimestamp() ); if ( log.isDebugEnabled() ) { log.debug( "caching query results in region: " + regionName + "; timestamp=" + ts ); } List cacheable = new ArrayList( result.size()+1 ); cacheable.add( ts ); for ( int i=0; i<result.size(); i++ ) { if ( returnTypes.length==1 ) { cacheable.add( returnTypes[0].disassemble( result.get(i), session, null ) ); } else { cacheable.add( TypeFactory.disassemble( (Object[]) result.get(i), returnTypes, null, session, null ) ); } } queryCache.put(key, cacheable); return true; } }
private Object[] createDeletedState(EntityPersister persister, Object[] currentState, EventSource session) { Type[] propTypes = persister.getPropertyTypes(); final Object[] deletedState = new Object[propTypes.length]; // TypeFactory.deepCopy( currentState, propTypes, persister.getPropertyUpdateability(), deletedState, session ); boolean[] copyability = new boolean[propTypes.length]; java.util.Arrays.fill( copyability, true ); TypeFactory.deepCopy( currentState, propTypes, copyability, deletedState, session ); return deletedState; }
protected void copyValues( final EntityPersister persister, final Object entity, final Object target, final SessionImplementor source, final Map copyCache, final ForeignKeyDirection foreignKeyDirection) { final Object[] copiedValues; if ( foreignKeyDirection == ForeignKeyDirection.FOREIGN_KEY_TO_PARENT ) { // this is the second pass through on a merge op, so here we limit the // replacement to associations types (value types were already replaced // during the first pass) copiedValues = TypeFactory.replaceAssociations( persister.getPropertyValues( entity, source.getEntityMode() ), persister.getPropertyValues( target, source.getEntityMode() ), persister.getPropertyTypes(), source, target, copyCache, foreignKeyDirection ); } else { copiedValues = TypeFactory.replace( persister.getPropertyValues( entity, source.getEntityMode() ), persister.getPropertyValues( target, source.getEntityMode() ), persister.getPropertyTypes(), source, target, copyCache, foreignKeyDirection ); } persister.setPropertyValues( target, copiedValues, source.getEntityMode() ); }
public void setText(String s) { // for some reason the antlr.CommonAST initialization routines force // this method to get called twice. The first time with an empty string if ( StringHelper.isNotEmpty( s ) ) { constantExpression = s; constantValue = ReflectHelper.getConstantValue( s ); heuristicType = TypeFactory.heuristicType( constantValue.getClass().getName() ); super.setText( s ); } }
String continueFromManyToMany(String entityName, String[] joinColumns, QueryTranslatorImpl q) throws QueryException { start( q ); continuation = true; currentName = q.createNameFor( entityName ); q.addType( currentName, entityName ); Queryable classPersister = q.getEntityPersister( entityName ); //QueryJoinFragment join = q.createJoinFragment(useThetaStyleJoin); addJoin( currentName, TypeFactory.manyToOne( entityName ), joinColumns ); currentPropertyMapping = classPersister; return currentName; }
public CollectionType getDefaultCollectionType() throws MappingException { return TypeFactory.array( getRole(), getReferencedPropertyName(), isEmbedded(), getElementClass() ); }
public CollectionType getDefaultCollectionType() throws MappingException { return TypeFactory.list( getRole(), getReferencedPropertyName(), isEmbedded() ); }
public CollectionType getDefaultCollectionType() { return TypeFactory.bag( getRole(), getReferencedPropertyName(), isEmbedded() ); }
public CollectionType getDefaultCollectionType() { return TypeFactory.idbag( getRole(), getReferencedPropertyName(), isEmbedded() ); }
public List get( QueryKey key, Type[] returnTypes, boolean isNaturalKeyLookup, Set spaces, SessionImplementor session) throws HibernateException { if ( log.isDebugEnabled() ) { log.debug("checking cached query results in region: " + regionName); } List cacheable = (List) queryCache.get(key); if (cacheable==null) { log.debug("query results were not found in cache"); return null; } Long timestamp = (Long) cacheable.get(0); if ( !isNaturalKeyLookup && !isUpToDate(spaces, timestamp) ) { log.debug("cached query results were not up to date"); return null; } log.debug("returning cached query results"); for ( int i=1; i<cacheable.size(); i++ ) { if ( returnTypes.length==1 ) { returnTypes[0].beforeAssemble( (Serializable) cacheable.get(i), session ); } else { TypeFactory.beforeAssemble( (Serializable[]) cacheable.get(i), returnTypes, session ); } } List result = new ArrayList( cacheable.size()-1 ); for ( int i=1; i<cacheable.size(); i++ ) { try { if ( returnTypes.length==1 ) { result.add( returnTypes[0].assemble( (Serializable) cacheable.get(i), session, null ) ); } else { result.add( TypeFactory.assemble( (Serializable[]) cacheable.get(i), returnTypes, session, null ) ); } } catch (UnresolvableObjectException uoe) { if (isNaturalKeyLookup) { //TODO: not really completely correct, since // the uoe could occur while resolving // associations, leaving the PC in an // inconsistent state log.debug("could not reassemble cached result set"); queryCache.remove(key); return null; } else { throw uoe; } } } return result; }
private Object assembleCacheEntry( final CacheEntry entry, final Serializable id, final EntityPersister persister, final LoadEvent event) throws HibernateException { final Object optionalObject = event.getInstanceToLoad(); final EventSource session = event.getSession(); final SessionFactoryImplementor factory = session.getFactory(); if ( log.isTraceEnabled() ) { log.trace( "assembling entity from second-level cache: " + MessageHelper.infoString( persister, id, factory ) ); } EntityPersister subclassPersister = factory.getEntityPersister( entry.getSubclass() ); Object result = optionalObject == null ? session.instantiate( subclassPersister, id ) : optionalObject; // make it circular-reference safe TwoPhaseLoad.addUninitializedCachedEntity( new EntityKey( id, subclassPersister, session.getEntityMode() ), result, subclassPersister, LockMode.NONE, entry.areLazyPropertiesUnfetched(), entry.getVersion(), session ); Type[] types = subclassPersister.getPropertyTypes(); Object[] values = entry.assemble( result, id, subclassPersister, session.getInterceptor(), session ); // intializes result by side-effect TypeFactory.deepCopy( values, types, subclassPersister.getPropertyUpdateability(), values, session ); Object version = Versioning.getVersion( values, subclassPersister ); if ( log.isTraceEnabled() ) log.trace( "Cached Version: " + version ); final PersistenceContext persistenceContext = session.getPersistenceContext(); persistenceContext.addEntry( result, Status.MANAGED, values, null, id, version, LockMode.NONE, true, subclassPersister, false, entry.areLazyPropertiesUnfetched() ); subclassPersister.afterInitialize( result, entry.areLazyPropertiesUnfetched(), session ); persistenceContext.initializeNonLazyCollections(); // upgrade the lock if necessary: //lock(result, lockMode); //PostLoad is needed for EJB3 //TODO: reuse the PostLoadEvent... PostLoadEvent postLoadEvent = new PostLoadEvent(session).setEntity(result) .setId(id).setPersister(persister); PostLoadEventListener[] listeners = session.getListeners().getPostLoadEventListeners(); for ( int i = 0; i < listeners.length; i++ ) { listeners[i].onPostLoad(postLoadEvent); } return result; }
/** * Associates a given entity (either transient or associated with another session) to * the given session. * * @param event The event triggering the re-association * @param object The entity to be associated * @param id The id of the entity. * @param persister The entity's persister instance. * * @return An EntityEntry representing the entity within this session. */ protected final EntityEntry reassociate(AbstractEvent event, Object object, Serializable id, EntityPersister persister) { if ( log.isTraceEnabled() ) { log.trace( "reassociating transient instance: " + MessageHelper.infoString( persister, id, event.getSession().getFactory() ) ); } EventSource source = event.getSession(); EntityKey key = new EntityKey( id, persister, source.getEntityMode() ); source.getPersistenceContext().checkUniqueness( key, object ); //get a snapshot Object[] values = persister.getPropertyValues( object, source.getEntityMode() ); TypeFactory.deepCopy( values, persister.getPropertyTypes(), persister.getPropertyUpdateability(), values, source ); Object version = Versioning.getVersion( values, persister ); EntityEntry newEntry = source.getPersistenceContext().addEntity( object, Status.MANAGED, values, key, version, LockMode.NONE, true, persister, false, true //will be ignored, using the existing Entry instead ); new OnLockVisitor( source, id, object ).process( object, persister ); persister.afterReassociate( object, source ); return newEntry; }
public Type getSelectType() { if (entityType==null) return null; boolean shallow = fromElement.getFromClause().getWalker().isShallowQuery(); return TypeFactory.manyToOne( entityType.getAssociatedEntityName(), shallow ); }
private void setConstantValue(DotNode node, String text, Object value) { if ( log.isDebugEnabled() ) { log.debug( "setConstantValue() " + text + " -> " + value + " " + value.getClass().getName() ); } node.setFirstChild( null ); // Chop off the rest of the tree. if ( value instanceof String ) { node.setType( SqlTokenTypes.QUOTED_STRING ); } else if ( value instanceof Character ) { node.setType( SqlTokenTypes.QUOTED_STRING ); } else if ( value instanceof Byte ) { node.setType( SqlTokenTypes.NUM_INT ); } else if ( value instanceof Short ) { node.setType( SqlTokenTypes.NUM_INT ); } else if ( value instanceof Integer ) { node.setType( SqlTokenTypes.NUM_INT ); } else if ( value instanceof Long ) { node.setType( SqlTokenTypes.NUM_LONG ); } else if ( value instanceof Double ) { node.setType( SqlTokenTypes.NUM_DOUBLE ); } else if ( value instanceof Float ) { node.setType( SqlTokenTypes.NUM_FLOAT ); } else { node.setType( SqlTokenTypes.CONSTANT ); } Type type; try { type = TypeFactory.heuristicType( value.getClass().getName() ); } catch ( MappingException me ) { throw new QueryException( me ); } if ( type == null ) { throw new QueryException( QueryTranslator.ERROR_CANNOT_DETERMINE_TYPE + node.getText() ); } try { LiteralType literalType = ( LiteralType ) type; Dialect dialect = walker.getSessionFactoryHelper().getFactory().getDialect(); node.setText( literalType.objectToSQLString( value, dialect ) ); } catch ( Exception e ) { throw new QueryException( QueryTranslator.ERROR_CANNOT_FORMAT_LITERAL + node.getText(), e ); } node.setDataType( type ); node.setResolvedConstant( text ); }
private void doToken(String token, QueryTranslatorImpl q) throws QueryException { if ( q.isName( StringHelper.root( token ) ) ) { //path expression doPathExpression( q.unalias( token ), q ); } else if ( token.startsWith( ParserHelper.HQL_VARIABLE_PREFIX ) ) { //named query parameter q.addNamedParameter( token.substring( 1 ) ); appendToken( q, "?" ); } else { Queryable persister = q.getEntityPersisterUsingImports( token ); if ( persister != null ) { // the name of a class final String discrim = persister.getDiscriminatorSQLValue(); if ( InFragment.NULL.equals(discrim) || InFragment.NOT_NULL.equals(discrim) ) { throw new QueryException( "subclass test not allowed for null or not null discriminator" ); } else { appendToken( q, discrim ); } } else { Object constant; if ( token.indexOf( '.' ) > -1 && ( constant = ReflectHelper.getConstantValue( token ) ) != null ) { Type type; try { type = TypeFactory.heuristicType( constant.getClass().getName() ); } catch ( MappingException me ) { throw new QueryException( me ); } if ( type == null ) throw new QueryException( QueryTranslator.ERROR_CANNOT_DETERMINE_TYPE + token ); try { appendToken( q, ( ( LiteralType ) type ).objectToSQLString( constant, q.getFactory().getDialect() ) ); } catch ( Exception e ) { throw new QueryException( QueryTranslator.ERROR_CANNOT_FORMAT_LITERAL + token, e ); } } else { //anything else String negatedToken = negated ? ( String ) NEGATIONS.get( token.toLowerCase() ) : null; if ( negatedToken != null && ( !betweenSpecialCase || !"or".equals( negatedToken ) ) ) { appendToken( q, negatedToken ); } else { appendToken( q, token ); } } } } }
@SuppressWarnings({ "unchecked", "rawtypes" }) protected static <E extends AbstractEnumType<?>, T extends Enum<?>> Type getType(final Class<E> enumType, final Class<T> enumClass) { final Properties properties = new Properties(); properties.setProperty("enumClassName", enumClass.getName()); return new TypeFactory().custom((Class) enumType, properties); }