@Override public void cascade( EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled) { LOG.tracev( "Cascading to lock: {0}", entityName ); LockMode lockMode = LockMode.NONE; LockOptions lr = new LockOptions(); if ( anything instanceof LockOptions ) { LockOptions lockOptions = (LockOptions) anything; lr.setTimeOut( lockOptions.getTimeOut() ); lr.setScope( lockOptions.getScope() ); if ( lockOptions.getScope() ) { lockMode = lockOptions.getLockMode(); } } lr.setLockMode( lockMode ); session.buildLockRequest( lr ).lock( entityName, child ); }
/** * Prepares the save call using the given requested id. * * @param entity The entity to be saved. * @param requestedId The id to which to associate the entity. * @param entityName The name of the entity being saved. * @param anything Generally cascade-specific information. * @param source The session which is the source of this save event. * * @return The id used to save the entity. */ protected Serializable saveWithRequestedId( Object entity, Serializable requestedId, String entityName, Object anything, EventSource source) { return performSave( entity, requestedId, source.getEntityPersister( entityName, entity ), false, anything, source, true ); }
/** * Handles the calls needed to perform pre-save cascades for the given entity. * * @param source The session from whcih the save event originated. * @param persister The entity's persister instance. * @param entity The entity to be saved. * @param anything Generally cascade-specific data */ protected void cascadeBeforeSave( EventSource source, EntityPersister persister, Object entity, Object anything) { // cascade-save to many-to-one BEFORE the parent is saved source.getPersistenceContext().incrementCascadeLevel(); try { new Cascade( getCascadeAction(), CascadePoint.BEFORE_INSERT_AFTER_DELETE, source ).cascade( persister, entity, anything ); } finally { source.getPersistenceContext().decrementCascadeLevel(); } }
public IteratorImpl( ResultSet rs, PreparedStatement ps, EventSource sess, boolean readOnly, Type[] types, String[][] columnNames, HolderInstantiator holderInstantiator) throws HibernateException, SQLException { this.rs=rs; this.ps=ps; this.session = sess; this.readOnly = readOnly; this.types = types; this.names = columnNames; this.holderInstantiator = holderInstantiator; single = types.length==1; postNext(); }
/** * We encountered a delete request on a transient instance. * <p/> * This is a deviation from historical Hibernate (pre-3.2) behavior to * align with the JPA spec, which states that transient entities can be * passed to remove operation in which case cascades still need to be * performed. * * @param session The session which is the source of the event * @param entity The entity being delete processed * @param cascadeDeleteEnabled Is cascading of deletes enabled * @param persister The entity persister * @param transientEntities A cache of already visited transient entities * (to avoid infinite recursion). */ protected void deleteTransientEntity( EventSource session, Object entity, boolean cascadeDeleteEnabled, EntityPersister persister, Set transientEntities) { LOG.handlingTransientEntity(); if ( transientEntities.contains( entity ) ) { LOG.trace( "Already handled transient entity; skipping" ); return; } transientEntities.add( entity ); cascadeBeforeDelete( session, persister, entity, null, transientEntities ); cascadeAfterDelete( session, persister, entity, transientEntities ); }
protected void cascadeBeforeDelete( EventSource session, EntityPersister persister, Object entity, EntityEntry entityEntry, Set transientEntities) throws HibernateException { CacheMode cacheMode = session.getCacheMode(); session.setCacheMode( CacheMode.GET ); session.getPersistenceContext().incrementCascadeLevel(); try { // cascade-delete to collections BEFORE the collection owner is deleted new Cascade( CascadingActions.DELETE, CascadePoint.AFTER_INSERT_BEFORE_DELETE, session ).cascade( persister, entity, transientEntities ); } finally { session.getPersistenceContext().decrementCascadeLevel(); session.setCacheMode( cacheMode ); } }
protected void cascadeAfterDelete( EventSource session, EntityPersister persister, Object entity, Set transientEntities) throws HibernateException { CacheMode cacheMode = session.getCacheMode(); session.setCacheMode( CacheMode.GET ); session.getPersistenceContext().incrementCascadeLevel(); try { // cascade-delete to many-to-one AFTER the parent was deleted new Cascade( CascadingActions.DELETE, CascadePoint.BEFORE_INSERT_AFTER_DELETE, session ).cascade( persister, entity, transientEntities ); } finally { session.getPersistenceContext().decrementCascadeLevel(); session.setCacheMode( cacheMode ); } }
private boolean wrapCollections( EventSource session, EntityPersister persister, Type[] types, Object[] values ) { if ( persister.hasCollections() ) { // wrap up any new collections directly referenced by the object // or its components // NOTE: we need to do the wrap here even if its not "dirty", // because collections need wrapping but changes to _them_ // don't dirty the container. Also, for versioned data, we // need to wrap before calling searchForDirtyCollections WrapVisitor visitor = new WrapVisitor( session ); // substitutes into values by side-effect visitor.processEntityPropertyValues( values, types ); return visitor.isSubstitutionRequired(); } else { return false; } }
@SuppressWarnings({"unchecked"}) private void entityIsDeleted(PersistEvent event, Map createCache) { final EventSource source = event.getSession(); final Object entity = source.getPersistenceContext().unproxy( event.getObject() ); final EntityPersister persister = source.getEntityPersister( event.getEntityName(), entity ); LOG.tracef( "un-scheduling entity deletion [%s]", MessageHelper.infoString( persister, persister.getIdentifier( entity, source ), source.getFactory() ) ); if ( createCache.put( entity, entity ) == null ) { justCascade( createCache, source, entity, persister ); } }
/** * The given save-update event named a transient entity. * <p/> * Here, we will perform the save processing. * * @param event The save event to be handled. * * @return The entity's identifier after saving. */ protected Serializable entityIsTransient(SaveOrUpdateEvent event) { LOG.trace( "Saving transient instance" ); final EventSource source = event.getSession(); EntityEntry entityEntry = event.getEntry(); if ( entityEntry != null ) { if ( entityEntry.getStatus() == Status.DELETED ) { source.forceFlush( entityEntry ); } else { throw new AssertionFailure( "entity was persistent" ); } } Serializable id = saveWithGeneratedOrRequestedId( event ); source.getPersistenceContext().reassociateProxy( event.getObject(), id ); return id; }
@Override public void entityCopyDetected( Object managedEntity, Object mergeEntity1, Object mergeEntity2, EventSource session) { if ( mergeEntity1 == managedEntity && mergeEntity2 == managedEntity) { throw new AssertionFailure( "entity1 and entity2 are the same as managedEntity; must be different." ); } final String managedEntityString = MessageHelper.infoString( session.getEntityName( managedEntity ), session.getIdentifier( managedEntity ) ); throw new IllegalStateException( "Multiple representations of the same entity " + managedEntityString + " are being merged. " + getManagedOrDetachedEntityString( managedEntity, mergeEntity1 ) + "; " + getManagedOrDetachedEntityString( managedEntity, mergeEntity2 ) ); }
@SuppressWarnings( value = {"unchecked"} ) private void logFlushResults(FlushEvent event) { if ( !LOG.isDebugEnabled() ) { return; } final EventSource session = event.getSession(); final PersistenceContext persistenceContext = session.getPersistenceContext(); LOG.debugf( "Flushed: %s insertions, %s updates, %s deletions to %s objects", session.getActionQueue().numberOfInsertions(), session.getActionQueue().numberOfUpdates(), session.getActionQueue().numberOfDeletions(), persistenceContext.getNumberOfManagedEntities() ); LOG.debugf( "Flushed: %s (re)creations, %s updates, %s removals to %s collections", session.getActionQueue().numberOfCollectionCreations(), session.getActionQueue().numberOfCollectionUpdates(), session.getActionQueue().numberOfCollectionRemovals(), persistenceContext.getCollectionEntries().size() ); new EntityPrinter( session.getFactory() ).toString( persistenceContext.getEntitiesByKey().entrySet() ); }
/** * process cascade save/update at the start of a flush to discover * any newly referenced entity that must be passed to saveOrUpdate(), * and also apply orphan delete */ private void prepareEntityFlushes(EventSource session, PersistenceContext persistenceContext) throws HibernateException { LOG.debug( "Processing flush-time cascades" ); final Object anything = getAnything(); //safe from concurrent modification because of how concurrentEntries() is implemented on IdentityMap for ( Map.Entry<Object,EntityEntry> me : persistenceContext.reentrantSafeEntityEntries() ) { // for ( Map.Entry me : IdentityMap.concurrentEntries( persistenceContext.getEntityEntries() ) ) { EntityEntry entry = (EntityEntry) me.getValue(); Status status = entry.getStatus(); if ( status == Status.MANAGED || status == Status.SAVING || status == Status.READ_ONLY ) { cascadeOnFlush( session, entry.getPersister(), me.getKey(), anything ); } } }
private void saveTransientEntity( Object entity, String entityName, Serializable requestedId, EventSource source, Map copyCache) { //this bit is only *really* absolutely necessary for handling //requestedId, but is also good if we merge multiple object //graphs, since it helps ensure uniqueness if ( requestedId == null ) { saveWithGeneratedId( entity, entityName, copyCache, source, false ); } else { saveWithRequestedId( entity, requestedId, entityName, copyCache, source ); } }
private boolean isVersionChanged(Object entity, EventSource source, EntityPersister persister, Object target) { if ( !persister.isVersioned() ) { return false; } // for merging of versioned entities, we consider the version having // been changed only when: // 1) the two version values are different; // *AND* // 2) The target actually represents database state! // // This second condition is a special case which allows // an entity to be merged during the same transaction // (though during a seperate operation) in which it was // originally persisted/saved boolean changed = !persister.getVersionType().isSame( persister.getVersion( target ), persister.getVersion( entity ) ); // TODO : perhaps we should additionally require that the incoming entity // version be equivalent to the defined unsaved-value? return changed && existsInDatabase( target, source, persister ); }
/** * Perform any cascades needed as part of this copy event. * * @param source The merge event being processed. * @param persister The persister of the entity being copied. * @param entity The entity being copied. * @param copyCache A cache of already copied instance. */ protected void cascadeOnMerge( final EventSource source, final EntityPersister persister, final Object entity, final Map copyCache ) { source.getPersistenceContext().incrementCascadeLevel(); try { new Cascade( getCascadeAction(), CascadePoint.BEFORE_MERGE, source ).cascade( persister, entity, copyCache ); } finally { source.getPersistenceContext().decrementCascadeLevel(); } }
protected void coordinateSharedCacheCleanup(SessionImplementor session) { final BulkOperationCleanupAction action = new BulkOperationCleanupAction( session, getCustomQuery().getQuerySpaces() ); if ( session.isEventSource() ) { ( (EventSource) session ).getActionQueue().addAction( action ); } else { action.getAfterTransactionCompletionProcess().doAfterTransactionCompletion( true, session ); } }
@Override public void cascade( EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled) { LOG.tracev( "Cascading to delete: {0}", entityName ); session.delete( entityName, child, isCascadeDeleteEnabled, (Set) anything ); }
@Override public Iterator getCascadableChildrenIterator( EventSource session, CollectionType collectionType, Object collection) { // delete does cascade to uninitialized collections return getAllElementsIterator( session, collectionType, collection ); }
@Override public Iterator getCascadableChildrenIterator( EventSource session, CollectionType collectionType, Object collection) { // lock doesn't cascade to uninitialized collections return getLoadedElementsIterator( session, collectionType, collection ); }
@Override public void cascade( EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled) throws HibernateException { LOG.tracev( "Cascading to refresh: {0}", entityName ); session.refresh( entityName, child, (Map) anything ); }
@Override public Iterator getCascadableChildrenIterator( EventSource session, CollectionType collectionType, Object collection) { // refresh doesn't cascade to uninitialized collections return getLoadedElementsIterator( session, collectionType, collection ); }
@Override public void cascade( EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled) throws HibernateException { LOG.tracev( "Cascading to evict: {0}", entityName ); session.evict( child ); }
@Override public Iterator getCascadableChildrenIterator( EventSource session, CollectionType collectionType, Object collection) { // evicts don't cascade to uninitialized collections return getLoadedElementsIterator( session, collectionType, collection ); }
@Override public Iterator getCascadableChildrenIterator( EventSource session, CollectionType collectionType, Object collection) { // saves / updates don't cascade to uninitialized collections return getLoadedElementsIterator( session, collectionType, collection ); }
@Override public void cascade( EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled) throws HibernateException { LOG.tracev( "Cascading to merge: {0}", entityName ); session.merge( entityName, child, (Map) anything ); }
@Override public Iterator getCascadableChildrenIterator( EventSource session, CollectionType collectionType, Object collection) { // merges don't cascade to uninitialized collections return getLoadedElementsIterator( session, collectionType, collection ); }
@Override public void cascade( EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled) throws HibernateException { LOG.tracev( "Cascading to persist: {0}" + entityName ); session.persist( entityName, child, (Map) anything ); }
@Override public Iterator getCascadableChildrenIterator( EventSource session, CollectionType collectionType, Object collection) { // persists don't cascade to uninitialized collections return getAllElementsIterator( session, collectionType, collection ); }
private void markInterceptorDirty(Object entity, EntityPersister persister, EventSource source) { if ( persister.getInstrumentationMetadata().isInstrumented() ) { FieldInterceptor interceptor = persister.getInstrumentationMetadata().injectInterceptor( entity, persister.getEntityName(), null, source ); interceptor.dirty(); } }
@Override public Iterator getCascadableChildrenIterator( EventSource session, CollectionType collectionType, Object collection) { // persists don't cascade to uninitialized collections return getLoadedElementsIterator( session, collectionType, collection ); }
private boolean isInManagedState(Object child, EventSource session) { EntityEntry entry = session.getPersistenceContext().getEntry( child ); return entry != null && ( entry.getStatus() == Status.MANAGED || entry.getStatus() == Status.READ_ONLY || entry.getStatus() == Status.SAVING ); }
@Override public Iterator getCascadableChildrenIterator( EventSource session, CollectionType collectionType, Object collection) { // replicate does cascade to uninitialized collections return getLoadedElementsIterator( session, collectionType, collection ); }
@Override public void lock(Serializable id, Object version, Object object, int timeout, SessionImplementor session) { if ( !lockable.isVersioned() ) { throw new OptimisticLockException( object, "[" + lockMode + "] not supported for non-versioned entities [" + lockable.getEntityName() + "]" ); } final EntityEntry entry = session.getPersistenceContext().getEntry( object ); // Register the EntityVerifyVersionProcess action to run just prior to transaction commit. ( (EventSource) session ).getActionQueue().registerProcess( new EntityVerifyVersionProcess( object, entry ) ); }
@Override public void lock(Serializable id, Object version, Object object, int timeout, SessionImplementor session) { if ( !lockable.isVersioned() ) { throw new HibernateException( "[" + lockMode + "] not supported for non-versioned entities [" + lockable.getEntityName() + "]" ); } final EntityEntry entry = session.getPersistenceContext().getEntry( object ); // Register the EntityIncrementVersionProcess action to run just prior to transaction commit. ( (EventSource) session ).getActionQueue().registerProcess( new EntityIncrementVersionProcess( object, entry ) ); }
/** * Assemble the previously disassembled state represented by this entry into the given entity instance. * * Additionally manages the PreLoadEvent callbacks. * * @param instance The entity instance * @param id The entity identifier * @param persister The entity persister * @param interceptor (currently unused) * @param session The session * * @return The assembled state * * @throws HibernateException Indicates a problem performing assembly or calling the PreLoadEventListeners. * * @see org.hibernate.type.Type#assemble * @see org.hibernate.type.Type#disassemble */ public Object[] assemble( final Object instance, final Serializable id, final EntityPersister persister, final Interceptor interceptor, final EventSource session) throws HibernateException { if ( !persister.getEntityName().equals( subclass ) ) { throw new AssertionFailure( "Tried to assemble a different subclass instance" ); } //assembled state gets put in a new array (we read from cache by value!) final Object[] assembledProps = TypeHelper.assemble( disassembledState, persister.getPropertyTypes(), session, instance ); //persister.setIdentifier(instance, id); //before calling interceptor, for consistency with normal load //TODO: reuse the PreLoadEvent final PreLoadEvent preLoadEvent = new PreLoadEvent( session ) .setEntity( instance ) .setState( assembledProps ) .setId( id ) .setPersister( persister ); final EventListenerGroup<PreLoadEventListener> listenerGroup = session .getFactory() .getServiceRegistry() .getService( EventListenerRegistry.class ) .getEventListenerGroup( EventType.PRE_LOAD ); for ( PreLoadEventListener listener : listenerGroup.listeners() ) { listener.onPreLoad( preLoadEvent ); } persister.setPropertyValues( instance, assembledProps ); return assembledProps; }
@Override Object processCollection(Object collection, CollectionType type) throws HibernateException { if ( collection == CollectionType.UNFETCHED_COLLECTION ) { return null; } EventSource session = getSession(); CollectionPersister persister = session.getFactory().getCollectionPersister( type.getRole() ); final Serializable collectionKey = extractCollectionKeyFromOwner( persister ); if ( collection!=null && (collection instanceof PersistentCollection) ) { PersistentCollection wrapper = (PersistentCollection) collection; if ( wrapper.setCurrentSession(session) ) { //a "detached" collection! if ( !isOwnerUnchanged( wrapper, persister, collectionKey ) ) { // if the collection belonged to a different entity, // clean up the existing state of the collection removeCollection( persister, collectionKey, session ); } reattachCollection(wrapper, type); } else { // a collection loaded in the current session // can not possibly be the collection belonging // to the entity passed to update() removeCollection(persister, collectionKey, session); } } else { // null or brand new collection // this will also (inefficiently) handle arrays, which have // no snapshot, so we can't do any better removeCollection(persister, collectionKey, session); } return null; }
protected void doEvict( final Object object, final EntityKey key, final EntityPersister persister, final EventSource session) throws HibernateException { if ( LOG.isTraceEnabled() ) { LOG.tracev( "Evicting {0}", MessageHelper.infoString( persister ) ); } if ( persister.hasNaturalIdentifier() ) { session.getPersistenceContext().getNaturalIdHelper().handleEviction( object, persister, key.getIdentifier() ); } // remove all collections for the entity from the session-level cache if ( persister.hasCollections() ) { new EvictVisitor( session ).process( object, persister ); } // remove any snapshot, not really for memory management purposes, but // rather because it might now be stale, and there is no longer any // EntityEntry to take precedence // This is now handled by removeEntity() //session.getPersistenceContext().removeDatabaseSnapshot(key); new Cascade( CascadingActions.EVICT, CascadePoint.AFTER_EVICT, session ).cascade( persister, object ); }
@Override public void entityCopyDetected( Object managedEntity, Object mergeEntity1, Object mergeEntity2, EventSource session) { final String entityName = session.getEntityName( managedEntity ); LOG.trace( String.format( "More than one representation of the same persistent entity being merged for: %s", MessageHelper.infoString( entityName, session.getIdentifier( managedEntity ) ) ) ); Set<Object> detachedEntitiesForManaged = null; if ( managedToMergeEntitiesXref == null ) { // This is the first time multiple representations have been found; // instantiate managedToMergeEntitiesXref. managedToMergeEntitiesXref = new IdentityHashMap<Object, Set<Object>>(); } else { // Get any existing representations that have already been found. detachedEntitiesForManaged = managedToMergeEntitiesXref.get( managedEntity ); } if ( detachedEntitiesForManaged == null ) { // There were no existing representations for this particular managed entity; detachedEntitiesForManaged = new IdentitySet(); managedToMergeEntitiesXref.put( managedEntity, detachedEntitiesForManaged ); incrementEntityNameCount( entityName ); } // Now add the detached representation for the managed entity; detachedEntitiesForManaged.add( mergeEntity1 ); detachedEntitiesForManaged.add( mergeEntity2 ); }
/** * Handle the given auto-flush event. * * @param event * The auto-flush event to be handled. * @throws HibernateException */ public void onAutoFlush(AutoFlushEvent event) throws HibernateException { final EventSource source = event.getSession(); try { source.getEventListenerManager().partialFlushStart(); if ( flushMightBeNeeded(source) ) { // Need to get the number of collection removals before flushing to executions // (because flushing to executions can add collection removal actions to the action queue). final int oldSize = source.getActionQueue().numberOfCollectionRemovals(); flushEverythingToExecutions(event); if ( flushIsReallyNeeded(event, source) ) { LOG.trace( "Need to execute flush" ); // note: performExecutions() clears all collectionXxxxtion // collections (the collection actions) in the session performExecutions(source); postFlush(source); postPostFlush( source ); if ( source.getFactory().getStatistics().isStatisticsEnabled() ) { source.getFactory().getStatisticsImplementor().flush(); } } else { LOG.trace( "Don't need to execute flush" ); source.getActionQueue().clearFromFlushNeededCheck( oldSize ); } event.setFlushRequired( flushIsReallyNeeded( event, source ) ); } } finally { source.getEventListenerManager().partialFlushEnd( event.getNumberOfEntitiesProcessed(), event.getNumberOfEntitiesProcessed() ); } }