private static void processNeverReferencedCollection(PersistentCollection coll, SessionImplementor session) throws HibernateException { final PersistenceContext persistenceContext = session.getPersistenceContext(); final CollectionEntry entry = persistenceContext.getCollectionEntry( coll ); if ( LOG.isDebugEnabled() ) { LOG.debugf( "Found collection with unloaded owner: %s", MessageHelper.collectionInfoString( entry.getLoadedPersister(), coll, entry.getLoadedKey(), session ) ); } entry.setCurrentPersister( entry.getLoadedPersister() ); entry.setCurrentKey( entry.getLoadedKey() ); prepareCollectionForUpdate( coll, entry, session.getFactory() ); }
/** * PostLoad cannot occur during initializeEntity, as that call occurs *before* * the Set collections are added to the persistence context by Loader. * Without the split, LazyInitializationExceptions can occur in the Entity's * postLoad if it acts upon the collection. * * HHH-6043 * * @param entity The entity * @param session The Session * @param postLoadEvent The (re-used) post-load event */ public static void postLoad( final Object entity, final SessionImplementor session, final PostLoadEvent postLoadEvent) { if ( session.isEventSource() ) { final PersistenceContext persistenceContext = session.getPersistenceContext(); final EntityEntry entityEntry = persistenceContext.getEntry( entity ); postLoadEvent.setEntity( entity ).setId( entityEntry.getId() ).setPersister( entityEntry.getPersister() ); final EventListenerGroup<PostLoadEventListener> listenerGroup = session.getFactory() .getServiceRegistry() .getService( EventListenerRegistry.class ) .getEventListenerGroup( EventType.POST_LOAD ); for ( PostLoadEventListener listener : listenerGroup.listeners() ) { listener.onPostLoad( postLoadEvent ); } } }
/** * Given a proxy, initialize it and/or narrow it provided either * is necessary. * * @param event The initiating load request event * @param persister The persister corresponding to the entity to be loaded * @param keyToLoad The key of the entity to be loaded * @param options The defined load options * @param persistenceContext The originating session * @param proxy The proxy to narrow * * @return The created/existing proxy */ private Object returnNarrowedProxy( final LoadEvent event, final EntityPersister persister, final EntityKey keyToLoad, final LoadEventListener.LoadType options, final PersistenceContext persistenceContext, final Object proxy) { LOG.trace( "Entity proxy found in session cache" ); LazyInitializer li = ( (HibernateProxy) proxy ).getHibernateLazyInitializer(); if ( li.isUnwrap() ) { return li.getImplementation(); } Object impl = null; if ( !options.isAllowProxyCreation() ) { impl = load( event, persister, keyToLoad, options ); if ( impl == null ) { event.getSession() .getFactory() .getEntityNotFoundDelegate() .handleEntityNotFound( persister.getEntityName(), keyToLoad.getIdentifier() ); } } return persistenceContext.narrowProxy( proxy, persister, keyToLoad, impl ); }
@SuppressWarnings( value = {"unchecked"} ) private void logFlushResults(FlushEvent event) { if ( !LOG.isDebugEnabled() ) { return; } final EventSource session = event.getSession(); final PersistenceContext persistenceContext = session.getPersistenceContext(); LOG.debugf( "Flushed: %s insertions, %s updates, %s deletions to %s objects", session.getActionQueue().numberOfInsertions(), session.getActionQueue().numberOfUpdates(), session.getActionQueue().numberOfDeletions(), persistenceContext.getNumberOfManagedEntities() ); LOG.debugf( "Flushed: %s (re)creations, %s updates, %s removals to %s collections", session.getActionQueue().numberOfCollectionCreations(), session.getActionQueue().numberOfCollectionUpdates(), session.getActionQueue().numberOfCollectionRemovals(), persistenceContext.getCollectionEntries().size() ); new EntityPrinter( session.getFactory() ).toString( persistenceContext.getEntitiesByKey().entrySet() ); }
/** * process cascade save/update at the start of a flush to discover * any newly referenced entity that must be passed to saveOrUpdate(), * and also apply orphan delete */ private void prepareEntityFlushes(EventSource session, PersistenceContext persistenceContext) throws HibernateException { LOG.debug( "Processing flush-time cascades" ); final Object anything = getAnything(); //safe from concurrent modification because of how concurrentEntries() is implemented on IdentityMap for ( Map.Entry<Object,EntityEntry> me : persistenceContext.reentrantSafeEntityEntries() ) { // for ( Map.Entry me : IdentityMap.concurrentEntries( persistenceContext.getEntityEntries() ) ) { EntityEntry entry = (EntityEntry) me.getValue(); Status status = entry.getStatus(); if ( status == Status.MANAGED || status == Status.SAVING || status == Status.READ_ONLY ) { cascadeOnFlush( session, entry.getPersister(), me.getKey(), anything ); } } }
public List doQueryAndInitializeNonLazyCollections( final SessionImplementor session, final QueryParameters queryParameters, final boolean returnProxies, final ResultTransformer forcedResultTransformer) throws HibernateException, SQLException { final PersistenceContext persistenceContext = session.getPersistenceContext(); boolean defaultReadOnlyOrig = persistenceContext.isDefaultReadOnly(); if ( queryParameters.isReadOnlyInitialized() ) { // The read-only/modifiable mode for the query was explicitly set. // Temporarily set the default read-only/modifiable setting to the query's setting. persistenceContext.setDefaultReadOnly( queryParameters.isReadOnly() ); } else { // The read-only/modifiable setting for the query was not initialized. // Use the default read-only/modifiable from the persistence context instead. queryParameters.setReadOnly( persistenceContext.isDefaultReadOnly() ); } persistenceContext.beforeLoad(); List result; try { try { result = doQuery( session, queryParameters, returnProxies, forcedResultTransformer ); } finally { persistenceContext.afterLoad(); } persistenceContext.initializeNonLazyCollections(); } finally { // Restore the original default persistenceContext.setDefaultReadOnly( defaultReadOnlyOrig ); } return result; }
private CollectionInitializer getSubselectInitializer(Serializable key, SessionImplementor session) { if ( !isSubselectLoadable() ) { return null; } final PersistenceContext persistenceContext = session.getPersistenceContext(); SubselectFetch subselect = persistenceContext.getBatchFetchQueue() .getSubselect( session.generateEntityKey( key, getOwnerEntityPersister() ) ); if ( subselect == null ) { return null; } else { // Take care of any entities that might have // been evicted! Iterator iter = subselect.getResult().iterator(); while ( iter.hasNext() ) { if ( !persistenceContext.containsEntity( (EntityKey) iter.next() ) ) { iter.remove(); } } // Run a subquery loader return createSubselectInitializer( subselect, session ); } }
@Override public void setIdentifier(Object entity, Serializable id, EntityMode entityMode, SessionImplementor session) { final Object[] extractedValues = mappedIdentifierType.getPropertyValues( id, entityMode ); final Object[] injectionValues = new Object[ extractedValues.length ]; final PersistenceContext persistenceContext = session.getPersistenceContext(); for ( int i = 0; i < virtualIdComponent.getSubtypes().length; i++ ) { final Type virtualPropertyType = virtualIdComponent.getSubtypes()[i]; final Type idClassPropertyType = mappedIdentifierType.getSubtypes()[i]; if ( virtualPropertyType.isEntityType() && ! idClassPropertyType.isEntityType() ) { if ( session == null ) { throw new AssertionError( "Deprecated version of getIdentifier (no session) was used but session was required" ); } final String associatedEntityName = ( (EntityType) virtualPropertyType ).getAssociatedEntityName(); final EntityKey entityKey = session.generateEntityKey( (Serializable) extractedValues[i], session.getFactory().getEntityPersister( associatedEntityName ) ); // it is conceivable there is a proxy, so check that first Object association = persistenceContext.getProxy( entityKey ); if ( association == null ) { // otherwise look for an initialized version association = persistenceContext.getEntity( entityKey ); } injectionValues[i] = association; } else { injectionValues[i] = extractedValues[i]; } } virtualIdComponent.setPropertyValues( entity, injectionValues, entityMode ); }
protected final Serializable resolveNaturalId(Map<String, Object> naturalIdParameters) { performAnyNeededCrossReferenceSynchronizations(); final ResolveNaturalIdEvent event = new ResolveNaturalIdEvent( naturalIdParameters, entityPersister, SessionImpl.this ); fireResolveNaturalId( event ); if ( event.getEntityId() == PersistenceContext.NaturalIdHelper.INVALID_NATURAL_ID_REFERENCE ) { return null; } else { return event.getEntityId(); } }
/** Handle the given flush event. * * @param event The flush event to be handled. * @throws HibernateException */ public void onFlush(FlushEvent event) throws HibernateException { final EventSource source = event.getSession(); final PersistenceContext persistenceContext = source.getPersistenceContext(); if ( persistenceContext.getNumberOfManagedEntities() > 0 || persistenceContext.getCollectionEntries().size() > 0 ) { try { source.getEventListenerManager().flushStart(); flushEverythingToExecutions( event ); performExecutions( source ); postFlush( source ); } finally { source.getEventListenerManager().flushEnd( event.getNumberOfEntitiesProcessed(), event.getNumberOfCollectionsProcessed() ); } postPostFlush( source ); if ( source.getFactory().getStatistics().isStatisticsEnabled() ) { source.getFactory().getStatisticsImplementor().flush(); } } }
/** * Based on configured options, will either return a pre-existing proxy, * generate a new proxy, or perform an actual load. * * @param event The initiating load request event * @param persister The persister corresponding to the entity to be loaded * @param keyToLoad The key of the entity to be loaded * @param options The defined load options * * @return The result of the proxy/load operation. */ protected Object proxyOrLoad( final LoadEvent event, final EntityPersister persister, final EntityKey keyToLoad, final LoadEventListener.LoadType options) { if ( LOG.isTraceEnabled() ) { LOG.tracev( "Loading entity: {0}", MessageHelper.infoString( persister, event.getEntityId(), event.getSession().getFactory() ) ); } // this class has no proxies (so do a shortcut) if ( !persister.hasProxy() ) { return load( event, persister, keyToLoad, options ); } final PersistenceContext persistenceContext = event.getSession().getPersistenceContext(); // look for a proxy Object proxy = persistenceContext.getProxy( keyToLoad ); if ( proxy != null ) { return returnNarrowedProxy( event, persister, keyToLoad, options, persistenceContext, proxy ); } if ( options.isAllowProxyCreation() ) { return createProxyIfNecessary( event, persister, keyToLoad, options, persistenceContext ); } // return a newly loaded object return load( event, persister, keyToLoad, options ); }
/** * If there is already a corresponding proxy associated with the * persistence context, return it; otherwise create a proxy, associate it * with the persistence context, and return the just-created proxy. * * @param event The initiating load request event * @param persister The persister corresponding to the entity to be loaded * @param keyToLoad The key of the entity to be loaded * @param options The defined load options * @param persistenceContext The originating session * * @return The created/existing proxy */ private Object createProxyIfNecessary( final LoadEvent event, final EntityPersister persister, final EntityKey keyToLoad, final LoadEventListener.LoadType options, final PersistenceContext persistenceContext) { Object existing = persistenceContext.getEntity( keyToLoad ); if ( existing != null ) { // return existing object or initialized proxy (unless deleted) LOG.trace( "Entity found in session cache" ); if ( options.isCheckDeleted() ) { EntityEntry entry = persistenceContext.getEntry( existing ); Status status = entry.getStatus(); if ( status == Status.DELETED || status == Status.GONE ) { return null; } } return existing; } LOG.trace( "Creating new proxy for entity" ); // return new uninitialized proxy Object proxy = persister.createProxy( event.getEntityId(), event.getSession() ); persistenceContext.getBatchFetchQueue().addBatchLoadableEntityKey( keyToLoad ); persistenceContext.addProxy( keyToLoad, proxy ); return proxy; }
/** * Coordinates the processing necessary to get things ready for executions * as db calls by preping the session caches and moving the appropriate * entities and collections to their respective execution queues. * * @param event The flush event. * @throws HibernateException Error flushing caches to execution queues. */ protected void flushEverythingToExecutions(FlushEvent event) throws HibernateException { LOG.trace( "Flushing session" ); EventSource session = event.getSession(); final PersistenceContext persistenceContext = session.getPersistenceContext(); session.getInterceptor().preFlush( new LazyIterator( persistenceContext.getEntitiesByKey() ) ); prepareEntityFlushes( session, persistenceContext ); // we could move this inside if we wanted to // tolerate collection initializations during // collection dirty checking: prepareCollectionFlushes( persistenceContext ); // now, any collections that are initialized // inside this block do not get updated - they // are ignored until the next flush persistenceContext.setFlushing( true ); try { int entityCount = flushEntities( event, persistenceContext ); int collectionCount = flushCollections( session, persistenceContext ); event.setNumberOfEntitiesProcessed( entityCount ); event.setNumberOfCollectionsProcessed( collectionCount ); } finally { persistenceContext.setFlushing(false); } //some statistics logFlushResults( event ); }
/** * Initialize the flags of the CollectionEntry, including the * dirty check. */ private void prepareCollectionFlushes(PersistenceContext persistenceContext) throws HibernateException { // Initialize dirty flags for arrays + collections with composite elements // and reset reached, doupdate, etc. LOG.debug( "Dirty checking collections" ); for ( Map.Entry<PersistentCollection,CollectionEntry> entry : IdentityMap.concurrentEntries( (Map<PersistentCollection,CollectionEntry>) persistenceContext.getCollectionEntries() )) { entry.getValue().preFlush( entry.getKey() ); } }
/** * 1. detect any dirty entities * 2. schedule any entity updates * 3. search out any reachable collections */ private int flushEntities(final FlushEvent event, final PersistenceContext persistenceContext) throws HibernateException { LOG.trace( "Flushing entities and processing referenced collections" ); final EventSource source = event.getSession(); final Iterable<FlushEntityEventListener> flushListeners = source.getFactory().getServiceRegistry() .getService( EventListenerRegistry.class ) .getEventListenerGroup( EventType.FLUSH_ENTITY ) .listeners(); // Among other things, updateReachables() will recursively load all // collections that are moving roles. This might cause entities to // be loaded. // So this needs to be safe from concurrent modification problems. final Map.Entry<Object,EntityEntry>[] entityEntries = persistenceContext.reentrantSafeEntityEntries(); final int count = entityEntries.length; for ( Map.Entry<Object,EntityEntry> me : entityEntries ) { // Update the status of the object and if necessary, schedule an update EntityEntry entry = me.getValue(); Status status = entry.getStatus(); if ( status != Status.LOADING && status != Status.GONE ) { final FlushEntityEvent entityEvent = new FlushEntityEvent( source, me.getKey(), entry ); for ( FlushEntityEventListener listener : flushListeners ) { listener.onFlushEntity( entityEvent ); } } } source.getActionQueue().sortActions(); return count; }
/** * 1. Recreate the collection key -> collection map * 2. rebuild the collection entries * 3. call Interceptor.postFlush() */ protected void postFlush(SessionImplementor session) throws HibernateException { LOG.trace( "Post flush" ); final PersistenceContext persistenceContext = session.getPersistenceContext(); persistenceContext.getCollectionsByKey().clear(); // the database has changed now, so the subselect results need to be invalidated // the batch fetching queues should also be cleared - especially the collection batch fetching one persistenceContext.getBatchFetchQueue().clear(); for ( Map.Entry<PersistentCollection, CollectionEntry> me : IdentityMap.concurrentEntries( persistenceContext.getCollectionEntries() ) ) { CollectionEntry collectionEntry = me.getValue(); PersistentCollection persistentCollection = me.getKey(); collectionEntry.postFlush(persistentCollection); if ( collectionEntry.getLoadedPersister() == null ) { //if the collection is dereferenced, remove from the session cache //iter.remove(); //does not work, since the entrySet is not backed by the set persistenceContext.getCollectionEntries() .remove(persistentCollection); } else { //otherwise recreate the mapping between the collection and its key CollectionKey collectionKey = new CollectionKey( collectionEntry.getLoadedPersister(), collectionEntry.getLoadedKey() ); persistenceContext.getCollectionsByKey().put(collectionKey, persistentCollection); } } }
/** * Load an instance by a unique key that is not the primary key. * * @param entityName The name of the entity to load * @param uniqueKeyPropertyName The name of the property defining the uniqie key. * @param key The unique key property value. * @param session The originating session. * @return The loaded entity * @throws HibernateException generally indicates problems performing the load. */ public Object loadByUniqueKey( String entityName, String uniqueKeyPropertyName, Object key, SessionImplementor session) throws HibernateException { final SessionFactoryImplementor factory = session.getFactory(); UniqueKeyLoadable persister = ( UniqueKeyLoadable ) factory.getEntityPersister( entityName ); //TODO: implement caching?! proxies?! EntityUniqueKey euk = new EntityUniqueKey( entityName, uniqueKeyPropertyName, key, getIdentifierOrUniqueKeyType( factory ), persister.getEntityMode(), session.getFactory() ); final PersistenceContext persistenceContext = session.getPersistenceContext(); Object result = persistenceContext.getEntity( euk ); if ( result == null ) { result = persister.loadByUniqueKey( uniqueKeyPropertyName, key, session ); } return result == null ? null : persistenceContext.proxyFor( result ); }
public List doEntityBatchFetch( SessionImplementor session, QueryParameters queryParameters, Serializable[] ids) { final String sql = StringHelper.expandBatchIdPlaceholder( sqlTemplate, ids, alias, persister.getKeyColumnNames(), getFactory().getDialect() ); try { final PersistenceContext persistenceContext = session.getPersistenceContext(); boolean defaultReadOnlyOrig = persistenceContext.isDefaultReadOnly(); if ( queryParameters.isReadOnlyInitialized() ) { // The read-only/modifiable mode for the query was explicitly set. // Temporarily set the default read-only/modifiable setting to the query's setting. persistenceContext.setDefaultReadOnly( queryParameters.isReadOnly() ); } else { // The read-only/modifiable setting for the query was not initialized. // Use the default read-only/modifiable from the persistence context instead. queryParameters.setReadOnly( persistenceContext.isDefaultReadOnly() ); } persistenceContext.beforeLoad(); List results; try { try { results = doTheLoad( sql, queryParameters, session ); } finally { persistenceContext.afterLoad(); } persistenceContext.initializeNonLazyCollections(); log.debug( "Done batch load" ); return results; } finally { // Restore the original default persistenceContext.setDefaultReadOnly( defaultReadOnlyOrig ); } } catch ( SQLException sqle ) { throw session.getFactory().getSQLExceptionHelper().convert( sqle, "could not load an entity batch: " + MessageHelper.infoString( getEntityPersisters()[0], ids, session.getFactory() ), sql ); } }
/** * For missing objects associated by one-to-one with another object in the * result set, register the fact that the the object is missing with the * session. */ private void registerNonExists( final EntityKey[] keys, final Loadable[] persisters, final SessionImplementor session) { final int[] owners = getOwners(); if ( owners != null ) { EntityType[] ownerAssociationTypes = getOwnerAssociationTypes(); for ( int i = 0; i < keys.length; i++ ) { int owner = owners[i]; if ( owner > -1 ) { EntityKey ownerKey = keys[owner]; if ( keys[i] == null && ownerKey != null ) { final PersistenceContext persistenceContext = session.getPersistenceContext(); /*final boolean isPrimaryKey; final boolean isSpecialOneToOne; if ( ownerAssociationTypes == null || ownerAssociationTypes[i] == null ) { isPrimaryKey = true; isSpecialOneToOne = false; } else { isPrimaryKey = ownerAssociationTypes[i].getRHSUniqueKeyPropertyName()==null; isSpecialOneToOne = ownerAssociationTypes[i].getLHSPropertyName()!=null; }*/ //TODO: can we *always* use the "null property" approach for everything? /*if ( isPrimaryKey && !isSpecialOneToOne ) { persistenceContext.addNonExistantEntityKey( new EntityKey( ownerKey.getIdentifier(), persisters[i], session.getEntityMode() ) ); } else if ( isSpecialOneToOne ) {*/ boolean isOneToOneAssociation = ownerAssociationTypes!=null && ownerAssociationTypes[i]!=null && ownerAssociationTypes[i].isOneToOne(); if ( isOneToOneAssociation ) { persistenceContext.addNullProperty( ownerKey, ownerAssociationTypes[i].getPropertyName() ); } /*} else { persistenceContext.addNonExistantEntityUniqueKey( new EntityUniqueKey( persisters[i].getEntityName(), ownerAssociationTypes[i].getRHSUniqueKeyPropertyName(), ownerKey.getIdentifier(), persisters[owner].getIdentifierType(), session.getEntityMode() ) ); }*/ } } } } }
/** * Read one collection element from the current row of the JDBC result set */ private void readCollectionElement( final Object optionalOwner, final Serializable optionalKey, final CollectionPersister persister, final CollectionAliases descriptor, final ResultSet rs, final SessionImplementor session) throws HibernateException, SQLException { final PersistenceContext persistenceContext = session.getPersistenceContext(); final Serializable collectionRowKey = (Serializable) persister.readKey( rs, descriptor.getSuffixedKeyAliases(), session ); if ( collectionRowKey != null ) { // we found a collection element in the result set if ( LOG.isDebugEnabled() ) { LOG.debugf( "Found row of collection: %s", MessageHelper.collectionInfoString( persister, collectionRowKey, getFactory() ) ); } Object owner = optionalOwner; if ( owner == null ) { owner = persistenceContext.getCollectionOwner( collectionRowKey, persister ); if ( owner == null ) { //TODO: This is assertion is disabled because there is a bug that means the // original owner of a transient, uninitialized collection is not known // if the collection is re-referenced by a different object associated // with the current Session //throw new AssertionFailure("bug loading unowned collection"); } } PersistentCollection rowCollection = persistenceContext.getLoadContexts() .getCollectionLoadContext( rs ) .getLoadingCollection( persister, collectionRowKey ); if ( rowCollection != null ) { rowCollection.readFrom( rs, persister, descriptor, owner ); } } else if ( optionalKey != null ) { // we did not find a collection element in the result set, so we // ensure that a collection is created with the owner's identifier, // since what we have is an empty collection if ( LOG.isDebugEnabled() ) { LOG.debugf( "Result set contains (possibly empty) collection: %s", MessageHelper.collectionInfoString( persister, optionalKey, getFactory() ) ); } persistenceContext.getLoadContexts() .getCollectionLoadContext( rs ) .getLoadingCollection( persister, optionalKey ); // handle empty collection } // else no collection element, but also no owner }
private List getResultFromQueryCache( final SessionImplementor session, final QueryParameters queryParameters, final Set<Serializable> querySpaces, final Type[] resultTypes, final QueryCache queryCache, final QueryKey key) { List result = null; if ( session.getCacheMode().isGetEnabled() ) { boolean isImmutableNaturalKeyLookup = queryParameters.isNaturalKeyLookup() && resultTypes.length == 1 && resultTypes[0].isEntityType() && getEntityPersister( EntityType.class.cast( resultTypes[0] ) ) .getEntityMetamodel() .hasImmutableNaturalId(); final PersistenceContext persistenceContext = session.getPersistenceContext(); boolean defaultReadOnlyOrig = persistenceContext.isDefaultReadOnly(); if ( queryParameters.isReadOnlyInitialized() ) { // The read-only/modifiable mode for the query was explicitly set. // Temporarily set the default read-only/modifiable setting to the query's setting. persistenceContext.setDefaultReadOnly( queryParameters.isReadOnly() ); } else { // The read-only/modifiable setting for the query was not initialized. // Use the default read-only/modifiable from the persistence context instead. queryParameters.setReadOnly( persistenceContext.isDefaultReadOnly() ); } try { result = queryCache.get( key, key.getResultTransformer().getCachedResultTypes( resultTypes ), isImmutableNaturalKeyLookup, querySpaces, session ); } finally { persistenceContext.setDefaultReadOnly( defaultReadOnlyOrig ); } if ( factory.getStatistics().isStatisticsEnabled() ) { if ( result == null ) { factory.getStatisticsImplementor() .queryCacheMiss( getQueryIdentifier(), queryCache.getRegion().getName() ); } else { factory.getStatisticsImplementor() .queryCacheHit( getQueryIdentifier(), queryCache.getRegion().getName() ); } } } return result; }
public final void doBatchedCollectionLoad( final SessionImplementor session, final Serializable[] ids, final Type type) throws HibernateException { if ( LOG.isDebugEnabled() ) LOG.debugf( "Batch loading collection: %s", MessageHelper.collectionInfoString( getCollectionPersisters()[0], ids, getFactory() ) ); final Type[] idTypes = new Type[ids.length]; Arrays.fill( idTypes, type ); final QueryParameters queryParameters = new QueryParameters( idTypes, ids, ids ); final String sql = StringHelper.expandBatchIdPlaceholder( sqlTemplate, ids, alias, collectionPersister().getKeyColumnNames(), getFactory().getDialect() ); try { final PersistenceContext persistenceContext = session.getPersistenceContext(); boolean defaultReadOnlyOrig = persistenceContext.isDefaultReadOnly(); if ( queryParameters.isReadOnlyInitialized() ) { // The read-only/modifiable mode for the query was explicitly set. // Temporarily set the default read-only/modifiable setting to the query's setting. persistenceContext.setDefaultReadOnly( queryParameters.isReadOnly() ); } else { // The read-only/modifiable setting for the query was not initialized. // Use the default read-only/modifiable from the persistence context instead. queryParameters.setReadOnly( persistenceContext.isDefaultReadOnly() ); } persistenceContext.beforeLoad(); try { try { doTheLoad( sql, queryParameters, session ); } finally { persistenceContext.afterLoad(); } persistenceContext.initializeNonLazyCollections(); } finally { // Restore the original default persistenceContext.setDefaultReadOnly( defaultReadOnlyOrig ); } } catch ( SQLException e ) { throw getFactory().getSQLExceptionHelper().convert( e, "could not initialize a collection batch: " + MessageHelper.collectionInfoString( collectionPersister(), ids, getFactory() ), sql ); } LOG.debug( "Done batch load" ); }
@Override public void finishUpRow(ResultSet resultSet, ResultSetProcessingContextImpl context) { try { // read the collection key for this reference for the current row. final PersistenceContext persistenceContext = context.getSession().getPersistenceContext(); final Serializable collectionRowKey = (Serializable) collectionReference.getCollectionPersister().readKey( resultSet, aliases.getCollectionColumnAliases().getSuffixedKeyAliases(), context.getSession() ); if ( collectionRowKey != null ) { // we found a collection element in the result set if ( log.isDebugEnabled() ) { log.debugf( "Found row of collection: %s", MessageHelper.collectionInfoString( collectionReference.getCollectionPersister(), collectionRowKey, context.getSession().getFactory() ) ); } Object collectionOwner = findCollectionOwner( collectionRowKey, resultSet, context ); PersistentCollection rowCollection = persistenceContext.getLoadContexts() .getCollectionLoadContext( resultSet ) .getLoadingCollection( collectionReference.getCollectionPersister(), collectionRowKey ); if ( rowCollection != null ) { rowCollection.readFrom( resultSet, collectionReference.getCollectionPersister(), aliases.getCollectionColumnAliases(), collectionOwner ); } } else { final Serializable optionalKey = findCollectionOwnerKey( context ); if ( optionalKey != null ) { // we did not find a collection element in the result set, so we // ensure that a collection is created with the owner's identifier, // since what we have is an empty collection if ( log.isDebugEnabled() ) { log.debugf( "Result set contains (possibly empty) collection: %s", MessageHelper.collectionInfoString( collectionReference.getCollectionPersister(), optionalKey, context.getSession().getFactory() ) ); } // handle empty collection persistenceContext.getLoadContexts() .getCollectionLoadContext( resultSet ) .getLoadingCollection( collectionReference.getCollectionPersister(), optionalKey ); } } // else no collection element, but also no owner } catch ( SQLException sqle ) { // TODO: would be nice to have the SQL string that failed... throw context.getSession().getFactory().getSQLExceptionHelper().convert( sqle, "could not read next row of results" ); } }
protected List executeLoad( SessionImplementor session, QueryParameters queryParameters, LoadQueryDetails loadQueryDetails, boolean returnProxies, ResultTransformer forcedResultTransformer, List<AfterLoadAction> afterLoadActions) throws SQLException { final PersistenceContext persistenceContext = session.getPersistenceContext(); final boolean defaultReadOnlyOrig = persistenceContext.isDefaultReadOnly(); if ( queryParameters.isReadOnlyInitialized() ) { // The read-only/modifiable mode for the query was explicitly set. // Temporarily set the default read-only/modifiable setting to the query's setting. persistenceContext.setDefaultReadOnly( queryParameters.isReadOnly() ); } else { // The read-only/modifiable setting for the query was not initialized. // Use the default read-only/modifiable from the persistence context instead. queryParameters.setReadOnly( persistenceContext.isDefaultReadOnly() ); } persistenceContext.beforeLoad(); try { List results = null; final String sql = loadQueryDetails.getSqlStatement(); SqlStatementWrapper wrapper = null; try { wrapper = executeQueryStatement( sql, queryParameters, false, afterLoadActions, session ); results = loadQueryDetails.getResultSetProcessor().extractResults( wrapper.getResultSet(), session, queryParameters, new NamedParameterContext() { @Override public int[] getNamedParameterLocations(String name) { return AbstractLoadPlanBasedLoader.this.getNamedParameterLocs( name ); } }, returnProxies, queryParameters.isReadOnly(), forcedResultTransformer, afterLoadActions ); } finally { if ( wrapper != null ) { session.getTransactionCoordinator().getJdbcCoordinator().release( wrapper.getResultSet(), wrapper.getStatement() ); session.getTransactionCoordinator().getJdbcCoordinator().release( wrapper.getStatement() ); } persistenceContext.afterLoad(); } persistenceContext.initializeNonLazyCollections(); return results; } finally { // Restore the original default persistenceContext.setDefaultReadOnly( defaultReadOnlyOrig ); } }
@Override public Object getIdentifier(Object entity, EntityMode entityMode, SessionImplementor session) { final Object id = mappedIdentifierType.instantiate( entityMode ); final Object[] propertyValues = virtualIdComponent.getPropertyValues( entity, entityMode ); final Type[] subTypes = virtualIdComponent.getSubtypes(); final Type[] copierSubTypes = mappedIdentifierType.getSubtypes(); final Iterable<PersistEventListener> persistEventListeners = persistEventListeners( session ); final PersistenceContext persistenceContext = session.getPersistenceContext(); final int length = subTypes.length; for ( int i = 0 ; i < length; i++ ) { if ( propertyValues[i] == null ) { throw new HibernateException( "No part of a composite identifier may be null" ); } //JPA 2 @MapsId + @IdClass points to the pk of the entity if ( subTypes[i].isAssociationType() && ! copierSubTypes[i].isAssociationType() ) { // we need a session to handle this use case if ( session == null ) { throw new AssertionError( "Deprecated version of getIdentifier (no session) was used but session was required" ); } final Object subId; if ( HibernateProxy.class.isInstance( propertyValues[i] ) ) { subId = ( (HibernateProxy) propertyValues[i] ).getHibernateLazyInitializer().getIdentifier(); } else { EntityEntry pcEntry = session.getPersistenceContext().getEntry( propertyValues[i] ); if ( pcEntry != null ) { subId = pcEntry.getId(); } else { LOG.debug( "Performing implicit derived identity cascade" ); final PersistEvent event = new PersistEvent( null, propertyValues[i], (EventSource) session ); for ( PersistEventListener listener : persistEventListeners ) { listener.onPersist( event ); } pcEntry = persistenceContext.getEntry( propertyValues[i] ); if ( pcEntry == null || pcEntry.getId() == null ) { throw new HibernateException( "Unable to process implicit derived identity cascade" ); } else { subId = pcEntry.getId(); } } } propertyValues[i] = subId; } } mappedIdentifierType.setPropertyValues( id, propertyValues, entityMode ); return id; }
private static void processDereferencedCollection(PersistentCollection coll, SessionImplementor session) { final PersistenceContext persistenceContext = session.getPersistenceContext(); final CollectionEntry entry = persistenceContext.getCollectionEntry( coll ); final CollectionPersister loadedPersister = entry.getLoadedPersister(); if ( loadedPersister != null && LOG.isDebugEnabled() ) { LOG.debugf( "Collection dereferenced: %s", MessageHelper.collectionInfoString( loadedPersister, coll, entry.getLoadedKey(), session ) ); } // do a check final boolean hasOrphanDelete = loadedPersister != null && loadedPersister.hasOrphanDelete(); if ( hasOrphanDelete ) { Serializable ownerId = loadedPersister.getOwnerEntityPersister().getIdentifier( coll.getOwner(), session ); if ( ownerId == null ) { // the owning entity may have been deleted and its identifier unset due to // identifier-rollback; in which case, try to look up its identifier from // the persistence context if ( session.getFactory().getSettings().isIdentifierRollbackEnabled() ) { final EntityEntry ownerEntry = persistenceContext.getEntry( coll.getOwner() ); if ( ownerEntry != null ) { ownerId = ownerEntry.getId(); } } if ( ownerId == null ) { throw new AssertionFailure( "Unable to determine collection owner identifier for orphan-delete processing" ); } } final EntityKey key = session.generateEntityKey( ownerId, loadedPersister.getOwnerEntityPersister() ); final Object owner = persistenceContext.getEntity( key ); if ( owner == null ) { throw new AssertionFailure( "collection owner not associated with session: " + loadedPersister.getRole() ); } final EntityEntry e = persistenceContext.getEntry( owner ); //only collections belonging to deleted entities are allowed to be dereferenced in the case of orphan delete if ( e != null && e.getStatus() != Status.DELETED && e.getStatus() != Status.GONE ) { throw new HibernateException( "A collection with cascade=\"all-delete-orphan\" was no longer referenced by the owning entity instance: " + loadedPersister.getRole() ); } } // do the work entry.setCurrentPersister( null ); entry.setCurrentKey( null ); prepareCollectionForUpdate( coll, entry, session.getFactory() ); }
@Override public PersistenceContext getPersistenceContext() { errorIfClosed(); checkTransactionSynchStatus(); return persistenceContext; }
@Override public PersistenceContext getPersistenceContext() { return temporaryPersistenceContext; }
final Object processArrayOrNewCollection(Object collection, CollectionType collectionType) throws HibernateException { final SessionImplementor session = getSession(); if ( collection == null ) { //do nothing return null; } else { CollectionPersister persister = session.getFactory().getCollectionPersister( collectionType.getRole() ); final PersistenceContext persistenceContext = session.getPersistenceContext(); //TODO: move into collection type, so we can use polymorphism! if ( collectionType.hasHolder() ) { if ( collection == CollectionType.UNFETCHED_COLLECTION ) { return null; } PersistentCollection ah = persistenceContext.getCollectionHolder( collection ); if ( ah == null ) { ah = collectionType.wrap( session, collection ); persistenceContext.addNewCollection( persister, ah ); persistenceContext.addCollectionHolder( ah ); } return null; } else { PersistentCollection persistentCollection = collectionType.wrap( session, collection ); persistenceContext.addNewCollection( persister, persistentCollection ); if ( LOG.isTraceEnabled() ) { LOG.tracev( "Wrapped collection in role: {0}", collectionType.getRole() ); } return persistentCollection; //Force a substitution! } } }
/** * Try to initialize a collection from the cache * * @param id The id of the collection of initialize * @param persister The collection persister * @param collection The collection to initialize * @param source The originating session * * @return true if we were able to initialize the collection from the cache; * false otherwise. */ private boolean initializeCollectionFromCache( Serializable id, CollectionPersister persister, PersistentCollection collection, SessionImplementor source) { if ( !source.getLoadQueryInfluencers().getEnabledFilters().isEmpty() && persister.isAffectedByEnabledFilters( source ) ) { LOG.trace( "Disregarding cached version (if any) of collection due to enabled filters" ); return false; } final boolean useCache = persister.hasCache() && source.getCacheMode().isGetEnabled(); if ( !useCache ) { return false; } final SessionFactoryImplementor factory = source.getFactory(); final CacheKey ck = source.generateCacheKey( id, persister.getKeyType(), persister.getRole() ); final Object ce = CacheHelper.fromSharedCache( source, ck, persister.getCacheAccessStrategy() ); if ( factory.getStatistics().isStatisticsEnabled() ) { if ( ce == null ) { factory.getStatisticsImplementor() .secondLevelCacheMiss( persister.getCacheAccessStrategy().getRegion().getName() ); } else { factory.getStatisticsImplementor() .secondLevelCacheHit( persister.getCacheAccessStrategy().getRegion().getName() ); } } if ( ce == null ) { return false; } CollectionCacheEntry cacheEntry = (CollectionCacheEntry) persister.getCacheEntryStructure().destructure( ce, factory ); final PersistenceContext persistenceContext = source.getPersistenceContext(); cacheEntry.assemble( collection, persister, persistenceContext.getCollectionOwner( id, persister ) ); persistenceContext.getCollectionEntry( collection ).postInitialize( collection ); // addInitializedCollection(collection, persister, id); return true; }
@Override public void execute() throws HibernateException { final Serializable id = getId(); final EntityPersister persister = getPersister(); final SessionImplementor session = getSession(); final Object instance = getInstance(); final boolean veto = preDelete(); Object version = this.version; if ( persister.isVersionPropertyGenerated() ) { // we need to grab the version value from the entity, otherwise // we have issues with generated-version entities that may have // multiple actions queued during the same flush version = persister.getVersion( instance ); } final CacheKey ck; if ( persister.hasCache() ) { ck = session.generateCacheKey( id, persister.getIdentifierType(), persister.getRootEntityName() ); lock = persister.getCacheAccessStrategy().lockItem( ck, version ); } else { ck = null; } if ( !isCascadeDeleteEnabled && !veto ) { persister.delete( id, version, instance, session ); } //postDelete: // After actually deleting a row, record the fact that the instance no longer // exists on the database (needed for identity-column key generation), and // remove it from the session cache final PersistenceContext persistenceContext = session.getPersistenceContext(); final EntityEntry entry = persistenceContext.removeEntry( instance ); if ( entry == null ) { throw new AssertionFailure( "possible nonthreadsafe access to session" ); } entry.postDelete(); persistenceContext.removeEntity( entry.getEntityKey() ); persistenceContext.removeProxy( entry.getEntityKey() ); if ( persister.hasCache() ) { persister.getCacheAccessStrategy().remove( ck ); } persistenceContext.getNaturalIdHelper().removeSharedNaturalIdCrossReference( persister, id, naturalIdValues ); postDelete(); if ( getSession().getFactory().getStatistics().isStatisticsEnabled() && !veto ) { getSession().getFactory().getStatisticsImplementor().deleteEntity( getPersister().getEntityName() ); } }
/** * instantiate a collection wrapper (called when loading an object) * * @param key The collection owner key * @param session The session from which the request is originating. * @param owner The collection owner * @return The collection */ public Object getCollection(Serializable key, SessionImplementor session, Object owner) { CollectionPersister persister = getPersister( session ); final PersistenceContext persistenceContext = session.getPersistenceContext(); final EntityMode entityMode = persister.getOwnerEntityPersister().getEntityMode(); // check if collection is currently being loaded PersistentCollection collection = persistenceContext.getLoadContexts().locateLoadingCollection( persister, key ); if ( collection == null ) { // check if it is already completely loaded, but unowned collection = persistenceContext.useUnownedCollection( new CollectionKey(persister, key, entityMode) ); if ( collection == null ) { // create a new collection wrapper, to be initialized later collection = instantiate( session, persister, key ); collection.setOwner(owner); persistenceContext.addUninitializedCollection( persister, collection, key ); // some collections are not lazy: if ( initializeImmediately() ) { session.initializeCollection( collection, false ); } else if ( !persister.isLazy() ) { persistenceContext.addNonLazyCollection( collection ); } if ( hasHolder() ) { session.getPersistenceContext().addCollectionHolder( collection ); } } if ( LOG.isTraceEnabled() ) { LOG.tracef( "Created collection wrapper: %s", MessageHelper.collectionInfoString( persister, collection, key, session ) ); } } collection.setOwner(owner); return collection.getValue(); }
/** * Another problem with * http://build.e-monocot.org/bugzilla/show_bug.cgi?id=262 Unexpected Taxon * Exception in DwC Harvesting even though the taxon is expected. Comparing * HibernateProxies with non-proxies means you can't use o1.getClass() == * o2.getClass(). * @throws Exception if there is a problem */ @Test public final void testEqualsWithHibernateProxies() throws Exception { b2.setIdentifier("test"); b2.setId(1L); b1.setIdentifier("test"); b1.setId(1L); SessionImplementor sessionImplementor = EasyMock .createMock(SessionImplementor.class); SessionFactoryImplementor sessionFactoryImplementor = EasyMock .createMock(SessionFactoryImplementor.class); EntityPersister entityPersister = EasyMock .createMock(EntityPersister.class); PersistenceContext persistenceContext = EasyMock .createMock(PersistenceContext.class); EasyMock.expect(sessionImplementor.getFactory()) .andReturn(sessionFactoryImplementor).anyTimes(); EasyMock.expect( sessionFactoryImplementor.getEntityPersister((String) EasyMock .eq("Annotation"))).andReturn(entityPersister) .anyTimes(); EasyMock.expect(sessionImplementor.getPersistenceContext()).andReturn( persistenceContext); EasyMock.expect(persistenceContext.isDefaultReadOnly()) .andReturn(Boolean.TRUE).anyTimes(); EasyMock.expect(entityPersister.isMutable()).andReturn(Boolean.TRUE) .anyTimes(); EasyMock.expect(sessionImplementor.isClosed()).andReturn(Boolean.FALSE) .anyTimes(); EasyMock.expect(sessionImplementor.isOpen()).andReturn(Boolean.TRUE) .anyTimes(); EasyMock.expect(sessionImplementor.isConnected()) .andReturn(Boolean.TRUE).anyTimes(); EasyMock.expect( sessionImplementor.immediateLoad(EasyMock.eq("Annotation"), EasyMock.eq(1L))).andReturn(b2).anyTimes(); EasyMock.replay(sessionImplementor, sessionFactoryImplementor, entityPersister, persistenceContext); JavassistProxyFactory javassistProxyFactory = new JavassistProxyFactory(); Set interfaces = new HashSet(); interfaces.add(HibernateProxy.class); interfaces.add(Serializable.class); interfaces.add(Identifiable.class); interfaces.add(SecuredObject.class); javassistProxyFactory.postInstantiate("Annotation", Annotation.class, interfaces, Annotation.class.getDeclaredMethod("getId"), Annotation.class.getDeclaredMethod("setId", Long.class), null); b3 = javassistProxyFactory.getProxy(1L, sessionImplementor); EasyMock.verify(sessionImplementor, sessionFactoryImplementor, entityPersister, persistenceContext); assertTrue("Equals should return true", b1.equals(b3)); }
@Override public PersistenceContext getPersistenceContext() { return target.getPersistenceContext(); }
/** * Perform the second step of 2-phase load. Fully initialize the entity * instance. * <p/> * After processing a JDBC result set, we "resolve" all the associations * between the entities which were instantiated and had their state * "hydrated" into an array * * @param entity The entity being loaded * @param readOnly Is the entity being loaded as read-only * @param session The Session * @param preLoadEvent The (re-used) pre-load event */ public static void initializeEntity( final Object entity, final boolean readOnly, final SessionImplementor session, final PreLoadEvent preLoadEvent) { final PersistenceContext persistenceContext = session.getPersistenceContext(); final EntityEntry entityEntry = persistenceContext.getEntry( entity ); if ( entityEntry == null ) { throw new AssertionFailure( "possible non-threadsafe access to the session" ); } doInitializeEntity( entity, entityEntry, readOnly, session, preLoadEvent ); }
/** * Creates and binds this to the given persistence context. * * @param persistenceContext The persistence context to which this * will be bound. */ public LoadContexts(PersistenceContext persistenceContext) { this.persistenceContext = persistenceContext; }
/** * Retrieves the persistence context to which this is bound. * * @return The persistence context to which this is bound. */ public PersistenceContext getPersistenceContext() { return persistenceContext; }