/** * Add an uninitialized instance of an entity class, as a placeholder to ensure object * identity. Must be called before <tt>postHydrate()</tt>. * * Create a "temporary" entry for a newly instantiated entity. The entity is uninitialized, * but we need the mapping from id to instance in order to guarantee uniqueness. * * @param key The entity key * @param object The entity instance * @param persister The entity persister * @param lockMode The lock mode * @param lazyPropertiesAreUnFetched Are lazy properties still un-fetched? * @param session The Session */ public static void addUninitializedEntity( final EntityKey key, final Object object, final EntityPersister persister, final LockMode lockMode, final boolean lazyPropertiesAreUnFetched, final SessionImplementor session) { session.getPersistenceContext().addEntity( object, Status.LOADING, null, key, null, lockMode, true, persister, false, lazyPropertiesAreUnFetched ); }
/** * Same as {@link #addUninitializedEntity}, but here for an entity from the second level cache * * @param key The entity key * @param object The entity instance * @param persister The entity persister * @param lockMode The lock mode * @param lazyPropertiesAreUnFetched Are lazy properties still un-fetched? * @param version The version * @param session The Session */ public static void addUninitializedCachedEntity( final EntityKey key, final Object object, final EntityPersister persister, final LockMode lockMode, final boolean lazyPropertiesAreUnFetched, final Object version, final SessionImplementor session) { session.getPersistenceContext().addEntity( object, Status.LOADING, null, key, version, lockMode, true, persister, false, lazyPropertiesAreUnFetched ); }
@Override public LockMode getCurrentLockMode(Object object) throws HibernateException { errorIfClosed(); checkTransactionSynchStatus(); if ( object == null ) { throw new NullPointerException( "null object passed to getCurrentLockMode()" ); } if ( object instanceof HibernateProxy ) { object = ( (HibernateProxy) object ).getHibernateLazyInitializer().getImplementation(this); if ( object == null ) { return LockMode.NONE; } } EntityEntry e = persistenceContext.getEntry(object); if ( e == null ) { throw new TransientObjectException( "Given object not associated with the session" ); } if ( e.getStatus() != Status.MANAGED ) { throw new ObjectDeletedException( "The given object was deleted", e.getId(), e.getPersister().getEntityName() ); } return e.getLockMode(); }
protected Serializable performSaveOrUpdate(SaveOrUpdateEvent event) { // this implementation is supposed to tolerate incorrect unsaved-value // mappings, for the purpose of backward-compatibility EntityEntry entry = event.getSession().getPersistenceContext().getEntry( event.getEntity() ); if ( entry!=null ) { if ( entry.getStatus()== Status.DELETED ) { throw new ObjectDeletedException( "deleted instance passed to update()", null, event.getEntityName() ); } else { return entityIsPersistent(event); } } else { entityIsDetached(event); return null; } }
private Object[] getValues(Object entity, EntityEntry entry, boolean mightBeDirty, SessionImplementor session) { final Object[] loadedState = entry.getLoadedState(); final Status status = entry.getStatus(); final EntityPersister persister = entry.getPersister(); final Object[] values; if ( status == Status.DELETED ) { //grab its state saved at deletion values = entry.getDeletedState(); } else if ( !mightBeDirty && loadedState != null ) { values = loadedState; } else { checkId( entity, persister, entry.getId(), session ); // grab its current state values = persister.getPropertyValues( entity ); checkNaturalId( persister, entry, values, loadedState, session ); } return values; }
/** * Performs all necessary checking to determine if an entity needs an SQL update * to synchronize its state to the database. Modifies the event by side-effect! * Note: this method is quite slow, avoid calling if possible! */ protected final boolean isUpdateNecessary(FlushEntityEvent event) throws HibernateException { EntityPersister persister = event.getEntityEntry().getPersister(); Status status = event.getEntityEntry().getStatus(); if ( !event.isDirtyCheckPossible() ) { return true; } else { int[] dirtyProperties = event.getDirtyProperties(); if ( dirtyProperties != null && dirtyProperties.length != 0 ) { return true; //TODO: suck into event class } else { return hasDirtyCollections( event, persister, status ); } } }
/** * The given save-update event named a transient entity. * <p/> * Here, we will perform the save processing. * * @param event The save event to be handled. * * @return The entity's identifier after saving. */ protected Serializable entityIsTransient(SaveOrUpdateEvent event) { LOG.trace( "Saving transient instance" ); final EventSource source = event.getSession(); EntityEntry entityEntry = event.getEntry(); if ( entityEntry != null ) { if ( entityEntry.getStatus() == Status.DELETED ) { source.forceFlush( entityEntry ); } else { throw new AssertionFailure( "entity was persistent" ); } } Serializable id = saveWithGeneratedOrRequestedId( event ); source.getPersistenceContext().reassociateProxy( event.getObject(), id ); return id; }
/** * process cascade save/update at the start of a flush to discover * any newly referenced entity that must be passed to saveOrUpdate(), * and also apply orphan delete */ private void prepareEntityFlushes(EventSource session, PersistenceContext persistenceContext) throws HibernateException { LOG.debug( "Processing flush-time cascades" ); final Object anything = getAnything(); //safe from concurrent modification because of how concurrentEntries() is implemented on IdentityMap for ( Map.Entry<Object,EntityEntry> me : persistenceContext.reentrantSafeEntityEntries() ) { // for ( Map.Entry me : IdentityMap.concurrentEntries( persistenceContext.getEntityEntries() ) ) { EntityEntry entry = (EntityEntry) me.getValue(); Status status = entry.getStatus(); if ( status == Status.MANAGED || status == Status.SAVING || status == Status.READ_ONLY ) { cascadeOnFlush( session, entry.getPersister(), me.getKey(), anything ); } } }
/** * Make the entity "managed" by the persistence context. */ public final void makeEntityManaged() { nullifyTransientReferencesIfNotAlready(); final Object version = Versioning.getVersion( getState(), getPersister() ); getSession().getPersistenceContext().addEntity( getInstance(), ( getPersister().isMutable() ? Status.MANAGED : Status.READ_ONLY ), getState(), getEntityKey(), version, LockMode.WRITE, isExecuted, getPersister(), isVersionIncrementDisabled, false ); }
/** * Sets the target object to read-write, allowing Hibernate to dirty check it and auto-flush changes. * * @see #setObjectToReadyOnly(Object, org.hibernate.SessionFactory) * * @param target The target object * @param sessionFactory The SessionFactory instance */ public static void setObjectToReadWrite(final Object target, SessionFactory sessionFactory) { Session session = sessionFactory.getCurrentSession(); if (!canModifyReadWriteState(session, target)) { return; } SessionImplementor sessionImpl = (SessionImplementor) session; EntityEntry ee = sessionImpl.getPersistenceContext().getEntry(target); if (ee == null || ee.getStatus() != Status.READ_ONLY) { return; } Object actualTarget = target; if (target instanceof HibernateProxy) { actualTarget = ((HibernateProxy)target).getHibernateLazyInitializer().getImplementation(); } session.setReadOnly(actualTarget, false); session.setFlushMode(FlushMode.AUTO); incrementVersion(target); }
@Override public EntityEntry addEntity( final Object entity, final Status status, final Object[] loadedState, final EntityKey entityKey, final Object version, final LockMode lockMode, final boolean existsInDatabase, final EntityPersister persister, final boolean disableVersionIncrement, boolean lazyPropertiesAreUnfetched) { addEntity( entityKey, entity ); return addEntry( entity, status, loadedState, null, entityKey.getIdentifier(), version, lockMode, existsInDatabase, persister, disableVersionIncrement, lazyPropertiesAreUnfetched ); }
@Override public EntityEntry addEntry( final Object entity, final Status status, final Object[] loadedState, final Object rowId, final Serializable id, final Object version, final LockMode lockMode, final boolean existsInDatabase, final EntityPersister persister, final boolean disableVersionIncrement, boolean lazyPropertiesAreUnfetched) { final EntityEntry e = new EntityEntry( status, loadedState, rowId, id, version, lockMode, existsInDatabase, persister, disableVersionIncrement, lazyPropertiesAreUnfetched, this ); entityEntryContext.addEntityEntry( entity, e ); // entityEntries.put(entity, e); setHasNonReadOnlyEnties( status ); return e; }
/** * Register the "hydrated" state of an entity instance, after the first step of 2-phase loading. * * Add the "hydrated state" (an array) of an uninitialized entity to the session. We don't try * to resolve any associations yet, because there might be other entities waiting to be * read from the JDBC result set we are currently processing * * @param persister The persister for the hydrated entity * @param id The entity identifier * @param values The entity values * @param rowId The rowId for the entity * @param object An optional instance for the entity being loaded * @param lockMode The lock mode * @param lazyPropertiesAreUnFetched Whether properties defined as lazy are yet un-fetched * @param session The Session */ public static void postHydrate( final EntityPersister persister, final Serializable id, final Object[] values, final Object rowId, final Object object, final LockMode lockMode, final boolean lazyPropertiesAreUnFetched, final SessionImplementor session) { final Object version = Versioning.getVersion( values, persister ); session.getPersistenceContext().addEntry( object, Status.LOADING, values, rowId, id, version, lockMode, true, persister, false, lazyPropertiesAreUnFetched ); if ( version != null && LOG.isTraceEnabled() ) { final String versionStr = persister.isVersioned() ? persister.getVersionType().toLoggableString( version, session.getFactory() ) : "null"; LOG.tracef( "Version: %s", versionStr ); } }
@Override public boolean contains(Object object) { errorIfClosed(); checkTransactionSynchStatus(); if ( object instanceof HibernateProxy ) { //do not use proxiesByKey, since not all //proxies that point to this session's //instances are in that collection! LazyInitializer li = ( (HibernateProxy) object ).getHibernateLazyInitializer(); if ( li.isUninitialized() ) { //if it is an uninitialized proxy, pointing //with this session, then when it is accessed, //the underlying instance will be "contained" return li.getSession()==this; } else { //if it is initialized, see if the underlying //instance is contained, since we need to //account for the fact that it might have been //evicted object = li.getImplementation(); } } // A session is considered to contain an entity only if the entity has // an entry in the session's persistence context and the entry reports // that the entity has not been removed EntityEntry entry = persistenceContext.getEntry( object ); delayedAfterCompletion(); return entry != null && entry.getStatus() != Status.DELETED && entry.getStatus() != Status.GONE; }
protected Serializable performSaveOrUpdate(SaveOrUpdateEvent event) { // this implementation is supposed to tolerate incorrect unsaved-value // mappings, for the purpose of backward-compatibility EntityEntry entry = event.getSession().getPersistenceContext().getEntry( event.getEntity() ); if ( entry!=null && entry.getStatus() != Status.DELETED ) { return entityIsPersistent(event); } else { return entityIsTransient(event); } }
/** * Flushes a single entity's state to the database, by scheduling * an update action, if necessary */ public void onFlushEntity(FlushEntityEvent event) throws HibernateException { final Object entity = event.getEntity(); final EntityEntry entry = event.getEntityEntry(); final EventSource session = event.getSession(); final EntityPersister persister = entry.getPersister(); final Status status = entry.getStatus(); final Type[] types = persister.getPropertyTypes(); final boolean mightBeDirty = entry.requiresDirtyCheck( entity ); final Object[] values = getValues( entity, entry, mightBeDirty, session ); event.setPropertyValues( values ); //TODO: avoid this for non-new instances where mightBeDirty==false boolean substitute = wrapCollections( session, persister, types, values ); if ( isUpdateNecessary( event, mightBeDirty ) ) { substitute = scheduleUpdate( event ) || substitute; } if ( status != Status.DELETED ) { // now update the object .. has to be outside the main if block above (because of collections) if ( substitute ) { persister.setPropertyValues( entity, values ); } // Search for collections by reachability, updating their role. // We don't want to touch collections reachable from a deleted object if ( persister.hasCollections() ) { new FlushVisitor( session, entity ).processEntityPropertyValues( values, types ); } } }
private boolean isUpdateNecessary(final FlushEntityEvent event, final boolean mightBeDirty) { final Status status = event.getEntityEntry().getStatus(); if ( mightBeDirty || status == Status.DELETED ) { // compare to cached state (ignoring collections unless versioned) dirtyCheck( event ); if ( isUpdateNecessary( event ) ) { return true; } else { if ( event.getEntityEntry().getPersister().getInstrumentationMetadata().isInstrumented() ) { event.getEntityEntry() .getPersister() .getInstrumentationMetadata() .extractInterceptor( event.getEntity() ) .clearDirty(); } event.getSession() .getFactory() .getCustomEntityDirtinessStrategy() .resetDirty( event.getEntity(), event.getEntityEntry().getPersister(), event.getSession() ); return false; } } else { return hasDirtyCollections( event, event.getEntityEntry().getPersister(), status ); } }
private void performReplication( Object entity, Serializable id, Object version, EntityPersister persister, ReplicationMode replicationMode, EventSource source) throws HibernateException { if ( LOG.isTraceEnabled() ) { LOG.tracev( "Replicating changes to {0}", MessageHelper.infoString( persister, id, source.getFactory() ) ); } new OnReplicateVisitor( source, id, entity, true ).process( entity, persister ); source.getPersistenceContext().addEntity( entity, ( persister.isMutable() ? Status.MANAGED : Status.READ_ONLY ), null, source.generateEntityKey( id, persister ), version, LockMode.NONE, true, persister, true, false ); cascadeAfterReplicate( entity, persister, replicationMode, source ); }
/** * If there is already a corresponding proxy associated with the * persistence context, return it; otherwise create a proxy, associate it * with the persistence context, and return the just-created proxy. * * @param event The initiating load request event * @param persister The persister corresponding to the entity to be loaded * @param keyToLoad The key of the entity to be loaded * @param options The defined load options * @param persistenceContext The originating session * * @return The created/existing proxy */ private Object createProxyIfNecessary( final LoadEvent event, final EntityPersister persister, final EntityKey keyToLoad, final LoadEventListener.LoadType options, final PersistenceContext persistenceContext) { Object existing = persistenceContext.getEntity( keyToLoad ); if ( existing != null ) { // return existing object or initialized proxy (unless deleted) LOG.trace( "Entity found in session cache" ); if ( options.isCheckDeleted() ) { EntityEntry entry = persistenceContext.getEntry( existing ); Status status = entry.getStatus(); if ( status == Status.DELETED || status == Status.GONE ) { return null; } } return existing; } LOG.trace( "Creating new proxy for entity" ); // return new uninitialized proxy Object proxy = persister.createProxy( event.getEntityId(), event.getSession() ); persistenceContext.getBatchFetchQueue().addBatchLoadableEntityKey( keyToLoad ); persistenceContext.addProxy( keyToLoad, proxy ); return proxy; }
/** * Attempts to locate the entity in the session-level cache. * <p/> * If allowed to return nulls, then if the entity happens to be found in * the session cache, we check the entity type for proper handling * of entity hierarchies. * <p/> * If checkDeleted was set to true, then if the entity is found in the * session-level cache, it's current status within the session cache * is checked to see if it has previously been scheduled for deletion. * * @param event The load event * @param keyToLoad The EntityKey representing the entity to be loaded. * @param options The load options. * * @return The entity from the session-level cache, or null. * * @throws HibernateException Generally indicates problems applying a lock-mode. */ protected Object loadFromSessionCache( final LoadEvent event, final EntityKey keyToLoad, final LoadEventListener.LoadType options) throws HibernateException { SessionImplementor session = event.getSession(); Object old = session.getEntityUsingInterceptor( keyToLoad ); if ( old != null ) { // this object was already loaded EntityEntry oldEntry = session.getPersistenceContext().getEntry( old ); if ( options.isCheckDeleted() ) { Status status = oldEntry.getStatus(); if ( status == Status.DELETED || status == Status.GONE ) { return REMOVED_ENTITY_MARKER; } } if ( options.isAllowNulls() ) { final EntityPersister persister = event.getSession() .getFactory() .getEntityPersister( keyToLoad.getEntityName() ); if ( !persister.isInstance( old ) ) { return INCONSISTENT_RTN_CLASS_MARKER; } } upgradeLock( old, oldEntry, event.getLockOptions(), event.getSession() ); } return old; }
/** * 1. detect any dirty entities * 2. schedule any entity updates * 3. search out any reachable collections */ private int flushEntities(final FlushEvent event, final PersistenceContext persistenceContext) throws HibernateException { LOG.trace( "Flushing entities and processing referenced collections" ); final EventSource source = event.getSession(); final Iterable<FlushEntityEventListener> flushListeners = source.getFactory().getServiceRegistry() .getService( EventListenerRegistry.class ) .getEventListenerGroup( EventType.FLUSH_ENTITY ) .listeners(); // Among other things, updateReachables() will recursively load all // collections that are moving roles. This might cause entities to // be loaded. // So this needs to be safe from concurrent modification problems. final Map.Entry<Object,EntityEntry>[] entityEntries = persistenceContext.reentrantSafeEntityEntries(); final int count = entityEntries.length; for ( Map.Entry<Object,EntityEntry> me : entityEntries ) { // Update the status of the object and if necessary, schedule an update EntityEntry entry = me.getValue(); Status status = entry.getStatus(); if ( status != Status.LOADING && status != Status.GONE ) { final FlushEntityEvent entityEvent = new FlushEntityEvent( source, me.getKey(), entry ); for ( FlushEntityEventListener listener : flushListeners ) { listener.onFlushEntity( entityEvent ); } } } source.getActionQueue().sortActions(); return count; }
/** * Determine whether the entity is persistent, detached, or transient * * @param entity The entity to check * @param entityName The name of the entity * @param entry The entity's entry in the persistence context * @param source The originating session. * * @return The state. */ protected EntityState getEntityState( Object entity, String entityName, EntityEntry entry, //pass this as an argument only to avoid double looking SessionImplementor source) { final boolean traceEnabled = LOG.isTraceEnabled(); if ( entry != null ) { // the object is persistent //the entity is associated with the session, so check its status if ( entry.getStatus() != Status.DELETED ) { // do nothing for persistent instances if ( traceEnabled ) { LOG.tracev( "Persistent instance of: {0}", getLoggableName( entityName, entity ) ); } return EntityState.PERSISTENT; } // ie. e.status==DELETED if ( traceEnabled ) { LOG.tracev( "Deleted instance of: {0}", getLoggableName( entityName, entity ) ); } return EntityState.DELETED; } // the object is transient or detached // the entity is not associated with the session, so // try interceptor and unsaved-value if ( ForeignKeys.isTransient( entityName, entity, getAssumedUnsaved(), source ) ) { if ( traceEnabled ) { LOG.tracev( "Transient instance of: {0}", getLoggableName( entityName, entity ) ); } return EntityState.TRANSIENT; } if ( traceEnabled ) { LOG.tracev( "Detached instance of: {0}", getLoggableName( entityName, entity ) ); } return EntityState.DETACHED; }
private static void processDereferencedCollection(PersistentCollection coll, SessionImplementor session) { final PersistenceContext persistenceContext = session.getPersistenceContext(); final CollectionEntry entry = persistenceContext.getCollectionEntry( coll ); final CollectionPersister loadedPersister = entry.getLoadedPersister(); if ( loadedPersister != null && LOG.isDebugEnabled() ) { LOG.debugf( "Collection dereferenced: %s", MessageHelper.collectionInfoString( loadedPersister, coll, entry.getLoadedKey(), session ) ); } // do a check final boolean hasOrphanDelete = loadedPersister != null && loadedPersister.hasOrphanDelete(); if ( hasOrphanDelete ) { Serializable ownerId = loadedPersister.getOwnerEntityPersister().getIdentifier( coll.getOwner(), session ); if ( ownerId == null ) { // the owning entity may have been deleted and its identifier unset due to // identifier-rollback; in which case, try to look up its identifier from // the persistence context if ( session.getFactory().getSettings().isIdentifierRollbackEnabled() ) { final EntityEntry ownerEntry = persistenceContext.getEntry( coll.getOwner() ); if ( ownerEntry != null ) { ownerId = ownerEntry.getId(); } } if ( ownerId == null ) { throw new AssertionFailure( "Unable to determine collection owner identifier for orphan-delete processing" ); } } final EntityKey key = session.generateEntityKey( ownerId, loadedPersister.getOwnerEntityPersister() ); final Object owner = persistenceContext.getEntity( key ); if ( owner == null ) { throw new AssertionFailure( "collection owner not associated with session: " + loadedPersister.getRole() ); } final EntityEntry e = persistenceContext.getEntry( owner ); //only collections belonging to deleted entities are allowed to be dereferenced in the case of orphan delete if ( e != null && e.getStatus() != Status.DELETED && e.getStatus() != Status.GONE ) { throw new HibernateException( "A collection with cascade=\"all-delete-orphan\" was no longer referenced by the owning entity instance: " + loadedPersister.getRole() ); } } // do the work entry.setCurrentPersister( null ); entry.setCurrentKey( null ); prepareCollectionForUpdate( coll, entry, session.getFactory() ); }
@Override public void setEntryStatus(EntityEntry entry, Status status) { entry.setStatus( status ); setHasNonReadOnlyEnties( status ); }
private void setHasNonReadOnlyEnties(Status status) { if ( status==Status.DELETED || status==Status.MANAGED || status==Status.SAVING ) { hasNonReadOnlyEntities = true; } }
/** * Retrieve the collection that is being loaded as part of processing this * result set. * <p/> * Basically, there are two valid return values from this method:<ul> * <li>an instance of {@link org.hibernate.collection.spi.PersistentCollection} which indicates to * continue loading the result set row data into that returned collection * instance; this may be either an instance already associated and in the * midst of being loaded, or a newly instantiated instance as a matching * associated collection was not found.</li> * <li><i>null</i> indicates to ignore the corresponding result set row * data relating to the requested collection; this indicates that either * the collection was found to already be associated with the persistence * context in a fully loaded state, or it was found in a loading state * associated with another result set processing context.</li> * </ul> * * @param persister The persister for the collection being requested. * @param key The key of the collection being requested. * * @return The loading collection (see discussion above). */ public PersistentCollection getLoadingCollection(final CollectionPersister persister, final Serializable key) { final EntityMode em = persister.getOwnerEntityPersister().getEntityMetamodel().getEntityMode(); final CollectionKey collectionKey = new CollectionKey( persister, key, em ); if ( LOG.isTraceEnabled() ) { LOG.tracev( "Starting attempt to find loading collection [{0}]", MessageHelper.collectionInfoString( persister.getRole(), key ) ); } final LoadingCollectionEntry loadingCollectionEntry = loadContexts.locateLoadingCollectionEntry( collectionKey ); if ( loadingCollectionEntry == null ) { // look for existing collection as part of the persistence context PersistentCollection collection = loadContexts.getPersistenceContext().getCollection( collectionKey ); if ( collection != null ) { if ( collection.wasInitialized() ) { LOG.trace( "Collection already initialized; ignoring" ); // ignore this row of results! Note the early exit return null; } LOG.trace( "Collection not yet initialized; initializing" ); } else { final Object owner = loadContexts.getPersistenceContext().getCollectionOwner( key, persister ); final boolean newlySavedEntity = owner != null && loadContexts.getPersistenceContext().getEntry( owner ).getStatus() != Status.LOADING; if ( newlySavedEntity ) { // important, to account for newly saved entities in query // todo : some kind of check for new status... LOG.trace( "Owning entity already loaded; ignoring" ); return null; } // create one LOG.tracev( "Instantiating new collection [key={0}, rs={1}]", key, resultSet ); collection = persister.getCollectionType().instantiate( loadContexts.getPersistenceContext().getSession(), persister, key ); } collection.beforeInitialize( persister, -1 ); collection.beginRead(); localLoadingCollectionKeys.add( collectionKey ); loadContexts.registerLoadingCollectionXRef( collectionKey, new LoadingCollectionEntry( resultSet, persister, key, collection ) ); return collection; } if ( loadingCollectionEntry.getResultSet() == resultSet ) { LOG.trace( "Found loading collection bound to current result set processing; reading row" ); return loadingCollectionEntry.getCollection(); } // ignore this row, the collection is in process of // being loaded somewhere further "up" the stack LOG.trace( "Collection is already being initialized; ignoring row" ); return null; }
protected void performAnyNeededCrossReferenceSynchronizations() { if ( ! synchronizationEnabled ) { // synchronization (this process) was disabled return; } if ( entityPersister.getEntityMetamodel().hasImmutableNaturalId() ) { // only mutable natural-ids need this processing return; } if ( ! isTransactionInProgress() ) { // not in a transaction so skip synchronization return; } final boolean debugEnabled = LOG.isDebugEnabled(); for ( Serializable pk : getPersistenceContext().getNaturalIdHelper().getCachedPkResolutions( entityPersister ) ) { final EntityKey entityKey = generateEntityKey( pk, entityPersister ); final Object entity = getPersistenceContext().getEntity( entityKey ); final EntityEntry entry = getPersistenceContext().getEntry( entity ); if ( entry == null ) { if ( debugEnabled ) { LOG.debug( "Cached natural-id/pk resolution linked to null EntityEntry in persistence context : " + MessageHelper.infoString( entityPersister, pk, getFactory() ) ); } continue; } if ( !entry.requiresDirtyCheck( entity ) ) { continue; } // MANAGED is the only status we care about here... if ( entry.getStatus() != Status.MANAGED ) { continue; } getPersistenceContext().getNaturalIdHelper().handleSynchronization( entityPersister, pk, entity ); } }
/** * Performs a pessimistic lock upgrade on a given entity, if needed. * * @param object The entity for which to upgrade the lock. * @param entry The entity's EntityEntry instance. * @param lockOptions contains the requested lock mode. * @param source The session which is the source of the event being processed. */ protected void upgradeLock(Object object, EntityEntry entry, LockOptions lockOptions, EventSource source) { LockMode requestedLockMode = lockOptions.getLockMode(); if ( requestedLockMode.greaterThan( entry.getLockMode() ) ) { // The user requested a "greater" (i.e. more restrictive) form of // pessimistic lock if ( entry.getStatus() != Status.MANAGED ) { throw new ObjectDeletedException( "attempted to lock a deleted instance", entry.getId(), entry.getPersister().getEntityName() ); } final EntityPersister persister = entry.getPersister(); if ( log.isTraceEnabled() ) { log.tracev( "Locking {0} in mode: {1}", MessageHelper.infoString( persister, entry.getId(), source.getFactory() ), requestedLockMode ); } final SoftLock lock; final CacheKey ck; if ( persister.hasCache() ) { ck = source.generateCacheKey( entry.getId(), persister.getIdentifierType(), persister.getRootEntityName() ); lock = persister.getCacheAccessStrategy().lockItem( ck, entry.getVersion() ); } else { ck = null; lock = null; } try { if ( persister.isVersioned() && requestedLockMode == LockMode.FORCE ) { // todo : should we check the current isolation mode explicitly? Object nextVersion = persister.forceVersionIncrement( entry.getId(), entry.getVersion(), source ); entry.forceLocked( object, nextVersion ); } else { persister.lock( entry.getId(), entry.getVersion(), object, lockOptions, source ); } entry.setLockMode(requestedLockMode); } finally { // the database now holds a lock + the object is flushed from the cache, // so release the soft lock if ( persister.hasCache() ) { persister.getCacheAccessStrategy().unlockItem( ck, lock ); } } } }
private void checkNaturalId( EntityPersister persister, EntityEntry entry, Object[] current, Object[] loaded, SessionImplementor session) { if ( persister.hasNaturalIdentifier() && entry.getStatus() != Status.READ_ONLY ) { if ( !persister.getEntityMetamodel().hasImmutableNaturalId() ) { // SHORT-CUT: if the natural id is mutable (!immutable), no need to do the below checks // EARLY EXIT!!! return; } final int[] naturalIdentifierPropertiesIndexes = persister.getNaturalIdentifierProperties(); final Type[] propertyTypes = persister.getPropertyTypes(); final boolean[] propertyUpdateability = persister.getPropertyUpdateability(); final Object[] snapshot = loaded == null ? session.getPersistenceContext().getNaturalIdSnapshot( entry.getId(), persister ) : session.getPersistenceContext().getNaturalIdHelper().extractNaturalIdValues( loaded, persister ); for ( int i = 0; i < naturalIdentifierPropertiesIndexes.length; i++ ) { final int naturalIdentifierPropertyIndex = naturalIdentifierPropertiesIndexes[i]; if ( propertyUpdateability[naturalIdentifierPropertyIndex] ) { // if the given natural id property is updatable (mutable), there is nothing to check continue; } final Type propertyType = propertyTypes[naturalIdentifierPropertyIndex]; if ( !propertyType.isEqual( current[naturalIdentifierPropertyIndex], snapshot[i] ) ) { throw new HibernateException( String.format( "An immutable natural identifier of entity %s was altered from %s to %s", persister.getEntityName(), propertyTypes[naturalIdentifierPropertyIndex].toLoggableString( snapshot[i], session.getFactory() ), propertyTypes[naturalIdentifierPropertyIndex].toLoggableString( current[naturalIdentifierPropertyIndex], session.getFactory() ) ) ); } } } }
private boolean scheduleUpdate(final FlushEntityEvent event) { final EntityEntry entry = event.getEntityEntry(); final EventSource session = event.getSession(); final Object entity = event.getEntity(); final Status status = entry.getStatus(); final EntityPersister persister = entry.getPersister(); final Object[] values = event.getPropertyValues(); if ( LOG.isTraceEnabled() ) { if ( status == Status.DELETED ) { if ( !persister.isMutable() ) { LOG.tracev( "Updating immutable, deleted entity: {0}", MessageHelper.infoString( persister, entry.getId(), session.getFactory() ) ); } else if ( !entry.isModifiableEntity() ) { LOG.tracev( "Updating non-modifiable, deleted entity: {0}", MessageHelper.infoString( persister, entry.getId(), session.getFactory() ) ); } else { LOG.tracev( "Updating deleted entity: ", MessageHelper.infoString( persister, entry.getId(), session.getFactory() ) ); } } else { LOG.tracev( "Updating entity: {0}", MessageHelper.infoString( persister, entry.getId(), session.getFactory() ) ); } } final boolean intercepted = !entry.isBeingReplicated() && handleInterception( event ); // increment the version number (if necessary) final Object nextVersion = getNextVersion( event ); // if it was dirtied by a collection only int[] dirtyProperties = event.getDirtyProperties(); if ( event.isDirtyCheckPossible() && dirtyProperties == null ) { if ( !intercepted && !event.hasDirtyCollection() ) { throw new AssertionFailure( "dirty, but no dirty properties" ); } dirtyProperties = ArrayHelper.EMPTY_INT_ARRAY; } // check nullability but do not doAfterTransactionCompletion command execute // we'll use scheduled updates for that. new Nullability( session ).checkNullability( values, persister, true ); // schedule the update // note that we intentionally do _not_ pass in currentPersistentState! session.getActionQueue().addAction( new EntityUpdateAction( entry.getId(), values, dirtyProperties, event.hasDirtyCollection(), ( status == Status.DELETED && !entry.isModifiableEntity() ? persister.getPropertyValues( entity ) : entry.getLoadedState() ), entry.getVersion(), nextVersion, entity, entry.getRowId(), persister, session ) ); return intercepted; }
private boolean isCollectionDirtyCheckNecessary(EntityPersister persister, Status status) { return ( status == Status.MANAGED || status == Status.READ_ONLY ) && persister.isVersioned() && persister.hasCollections(); }
protected Serializable entityIsPersistent(SaveOrUpdateEvent event) throws HibernateException { final boolean traceEnabled = LOG.isTraceEnabled(); if ( traceEnabled ) { LOG.trace( "Ignoring persistent instance" ); } EntityEntry entityEntry = event.getEntry(); if ( entityEntry == null ) { throw new AssertionFailure( "entity was transient or detached" ); } else { if ( entityEntry.getStatus() == Status.DELETED ) { throw new AssertionFailure( "entity was deleted" ); } final SessionFactoryImplementor factory = event.getSession().getFactory(); Serializable requestedId = event.getRequestedId(); Serializable savedId; if ( requestedId == null ) { savedId = entityEntry.getId(); } else { final boolean isEqual = !entityEntry.getPersister().getIdentifierType() .isEqual( requestedId, entityEntry.getId(), factory ); if ( isEqual ) { throw new PersistentObjectException( "object passed to save() was already persistent: " + MessageHelper.infoString( entityEntry.getPersister(), requestedId, factory ) ); } savedId = requestedId; } if ( traceEnabled ) { LOG.tracev( "Object already associated with session: {0}", MessageHelper.infoString( entityEntry.getPersister(), savedId, factory ) ); } return savedId; } }
protected void performUpdate( SaveOrUpdateEvent event, Object entity, EntityPersister persister) throws HibernateException { final boolean traceEnabled = LOG.isTraceEnabled(); if ( traceEnabled && !persister.isMutable() ) { LOG.trace( "Immutable instance passed to performUpdate()" ); } if ( traceEnabled ) { LOG.tracev( "Updating {0}", MessageHelper.infoString( persister, event.getRequestedId(), event.getSession().getFactory() ) ); } final EventSource source = event.getSession(); final EntityKey key = source.generateEntityKey( event.getRequestedId(), persister ); source.getPersistenceContext().checkUniqueness( key, entity ); if ( invokeUpdateLifecycle( entity, persister, source ) ) { reassociate( event, event.getObject(), event.getRequestedId(), persister ); return; } // this is a transient object with existing persistent state not loaded by the session new OnUpdateVisitor( source, event.getRequestedId(), entity ).process( entity, persister ); // TODO: put this stuff back in to read snapshot from // the second-level cache (needs some extra work) /*Object[] cachedState = null; if ( persister.hasCache() ) { CacheEntry entry = (CacheEntry) persister.getCache() .get( event.getRequestedId(), source.getTimestamp() ); cachedState = entry==null ? null : entry.getState(); //TODO: half-assemble this stuff }*/ source.getPersistenceContext().addEntity( entity, ( persister.isMutable() ? Status.MANAGED : Status.READ_ONLY ), null, // cachedState, key, persister.getVersion( entity ), LockMode.NONE, true, persister, false, true // assume true, since we don't really know, and it doesn't matter ); persister.afterReassociate( entity, source ); if ( traceEnabled ) { LOG.tracev( "Updating {0}", MessageHelper.infoString( persister, event.getRequestedId(), source.getFactory() ) ); } cascadeOnUpdate( event, persister, entity ); }
/** * Associates a given entity (either transient or associated with another session) to * the given session. * * @param event The event triggering the re-association * @param object The entity to be associated * @param id The id of the entity. * @param persister The entity's persister instance. * * @return An EntityEntry representing the entity within this session. */ protected final EntityEntry reassociate(AbstractEvent event, Object object, Serializable id, EntityPersister persister) { if ( log.isTraceEnabled() ) { log.tracev( "Reassociating transient instance: {0}", MessageHelper.infoString( persister, id, event.getSession().getFactory() ) ); } final EventSource source = event.getSession(); final EntityKey key = source.generateEntityKey( id, persister ); source.getPersistenceContext().checkUniqueness( key, object ); //get a snapshot Object[] values = persister.getPropertyValues( object ); TypeHelper.deepCopy( values, persister.getPropertyTypes(), persister.getPropertyUpdateability(), values, source ); Object version = Versioning.getVersion( values, persister ); EntityEntry newEntry = source.getPersistenceContext().addEntity( object, ( persister.isMutable() ? Status.MANAGED : Status.READ_ONLY ), values, key, version, LockMode.NONE, true, persister, false, true //will be ignored, using the existing Entry instead ); new OnLockVisitor( source, id, object ).process( object, persister ); persister.afterReassociate( object, source ); return newEntry; }
/** * Prepares the save call by checking the session caches for a pre-existing * entity and performing any lifecycle callbacks. * * @param entity The entity to be saved. * @param id The id by which to save the entity. * @param persister The entity's persister instance. * @param useIdentityColumn Is an identity column being used? * @param anything Generally cascade-specific information. * @param source The session from which the event originated. * @param requiresImmediateIdAccess does the event context require * access to the identifier immediately after execution of this method (if * not, post-insert style id generators may be postponed if we are outside * a transaction). * * @return The id used to save the entity; may be null depending on the * type of id generator used and the requiresImmediateIdAccess value */ protected Serializable performSave( Object entity, Serializable id, EntityPersister persister, boolean useIdentityColumn, Object anything, EventSource source, boolean requiresImmediateIdAccess) { if ( LOG.isTraceEnabled() ) { LOG.tracev( "Saving {0}", MessageHelper.infoString( persister, id, source.getFactory() ) ); } final EntityKey key; if ( !useIdentityColumn ) { key = source.generateEntityKey( id, persister ); Object old = source.getPersistenceContext().getEntity( key ); if ( old != null ) { if ( source.getPersistenceContext().getEntry( old ).getStatus() == Status.DELETED ) { source.forceFlush( source.getPersistenceContext().getEntry( old ) ); } else { throw new NonUniqueObjectException( id, persister.getEntityName() ); } } persister.setIdentifier( entity, id, source ); } else { key = null; } if ( invokeSaveLifecycle( entity, persister, source ) ) { return id; //EARLY EXIT } return performSaveOrReplicate( entity, key, persister, useIdentityColumn, anything, source, requiresImmediateIdAccess ); }
/** * Given a collection of entity instances that used to * belong to the collection, and a collection of instances * that currently belong, return a collection of orphans */ @SuppressWarnings({"JavaDoc", "unchecked"}) protected static Collection getOrphans( Collection oldElements, Collection currentElements, String entityName, SessionImplementor session) throws HibernateException { // short-circuit(s) if ( currentElements.size() == 0 ) { // no new elements, the old list contains only Orphans return oldElements; } if ( oldElements.size() == 0 ) { // no old elements, so no Orphans neither return oldElements; } final EntityPersister entityPersister = session.getFactory().getEntityPersister( entityName ); final Type idType = entityPersister.getIdentifierType(); // create the collection holding the Orphans final Collection res = new ArrayList(); // collect EntityIdentifier(s) of the *current* elements - add them into a HashSet for fast access final java.util.Set currentIds = new HashSet(); final java.util.Set currentSaving = new IdentitySet(); for ( Object current : currentElements ) { if ( current != null && ForeignKeys.isNotTransient( entityName, current, null, session ) ) { final EntityEntry ee = session.getPersistenceContext().getEntry( current ); if ( ee != null && ee.getStatus() == Status.SAVING ) { currentSaving.add( current ); } else { final Serializable currentId = ForeignKeys.getEntityIdentifierIfNotUnsaved( entityName, current, session ); currentIds.add( new TypedValue( idType, currentId ) ); } } } // iterate over the *old* list for ( Object old : oldElements ) { if ( !currentSaving.contains( old ) ) { final Serializable oldId = ForeignKeys.getEntityIdentifierIfNotUnsaved( entityName, old, session ); if ( !currentIds.contains( new TypedValue( idType, oldId ) ) ) { res.add( old ); } } } return res; }
/** * Resolve any dependencies on {@code managedEntity}. * * @param managedEntity - the managed entity name * @param session - the session * * @return the insert actions that depended only on the specified entity. * * @throws IllegalArgumentException if {@code managedEntity} did not have managed or read-only status. */ @SuppressWarnings({ "unchecked" }) public Set<AbstractEntityInsertAction> resolveDependentActions(Object managedEntity, SessionImplementor session) { final EntityEntry entityEntry = session.getPersistenceContext().getEntry( managedEntity ); if ( entityEntry.getStatus() != Status.MANAGED && entityEntry.getStatus() != Status.READ_ONLY ) { throw new IllegalArgumentException( "EntityEntry did not have status MANAGED or READ_ONLY: " + entityEntry ); } final boolean traceEnabled = LOG.isTraceEnabled(); // Find out if there are any unresolved insertions that are waiting for the // specified entity to be resolved. final Set<AbstractEntityInsertAction> dependentActions = dependentActionsByTransientEntity.remove( managedEntity ); if ( dependentActions == null ) { if ( traceEnabled ) { LOG.tracev( "No unresolved entity inserts that depended on [{0}]", MessageHelper.infoString( entityEntry.getEntityName(), entityEntry.getId() ) ); } // NOTE EARLY EXIT! return Collections.emptySet(); } final Set<AbstractEntityInsertAction> resolvedActions = new IdentitySet( ); if ( traceEnabled ) { LOG.tracev( "Unresolved inserts before resolving [{0}]: [{1}]", MessageHelper.infoString( entityEntry.getEntityName(), entityEntry.getId() ), toString() ); } for ( AbstractEntityInsertAction dependentAction : dependentActions ) { if ( traceEnabled ) { LOG.tracev( "Resolving insert [{0}] dependency on [{1}]", MessageHelper.infoString( dependentAction.getEntityName(), dependentAction.getId() ), MessageHelper.infoString( entityEntry.getEntityName(), entityEntry.getId() ) ); } final NonNullableTransientDependencies dependencies = dependenciesByAction.get( dependentAction ); dependencies.resolveNonNullableTransientEntity( managedEntity ); if ( dependencies.isEmpty() ) { if ( traceEnabled ) { LOG.tracev( "Resolving insert [{0}] (only depended on [{1}])", dependentAction, MessageHelper.infoString( entityEntry.getEntityName(), entityEntry.getId() ) ); } // dependentAction only depended on managedEntity.. dependenciesByAction.remove( dependentAction ); resolvedActions.add( dependentAction ); } } if ( traceEnabled ) { LOG.tracev( "Unresolved inserts after resolving [{0}]: [{1}]", MessageHelper.infoString( entityEntry.getEntityName(), entityEntry.getId() ), toString() ); } return resolvedActions; }
private boolean deleted(FlushEntityEvent event) { return event.getEntityEntry().getStatus() == Status.DELETED; }
private void incrementRootVersion(FlushEntityEvent event, Object root) { EntityEntry entityEntry = event.getSession().getPersistenceContext().getEntry( Hibernate.unproxy( root) ); if(entityEntry.getStatus() != Status.DELETED) { event.getSession().lock(root, LockMode.OPTIMISTIC_FORCE_INCREMENT); } }