Skip to content
Merged
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@
*/
abstract class AbstractTransactionCompletionProcessQueue<T extends CompletionCallback> {
SharedSessionContractImplementor session;
// Concurrency handling required when transaction completion process is dynamically registered
// inside event listener (HHH-7478).
// Concurrency handling required when the transaction completion process
// is dynamically registered inside an event listener (HHH-7478).
ConcurrentLinkedQueue<@NonNull T> processes = new ConcurrentLinkedQueue<>();

AbstractTransactionCompletionProcessQueue(SharedSessionContractImplementor session) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,16 @@
package org.hibernate.engine.internal;

import org.hibernate.HibernateException;
import org.hibernate.action.internal.BulkOperationCleanupAction;
import org.hibernate.action.internal.BulkOperationCleanupAction.BulkOperationCleanUpAfterTransactionCompletionProcess;
import org.hibernate.cache.CacheException;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.engine.spi.TransactionCompletionCallbacks.AfterCompletionCallback;

import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;

import static org.hibernate.internal.CoreMessageLogger.CORE_LOGGER;
import static org.hibernate.internal.util.collections.ArrayHelper.EMPTY_STRING_ARRAY;

/**
* Encapsulates behavior needed for after transaction processing
Expand All @@ -41,59 +40,54 @@ boolean hasActions() {
void afterTransactionCompletion(boolean success) {
AfterCompletionCallback process;
while ( (process = processes.poll()) != null ) {
try {
process.doAfterTransactionCompletion( success, session );
}
catch (CacheException ce) {
CORE_LOGGER.unableToReleaseCacheLock( ce );
// continue loop
}
catch (Exception e) {
throw new HibernateException(
"Unable to perform afterTransactionCompletion callback: " + e.getMessage(), e );
}
callAfterCompletion( success, process );
}
invalidateCaches();
}

final SessionFactoryImplementor factory = session.getFactory();
if ( factory.getSessionFactoryOptions().isQueryCacheEnabled() ) {
factory.getCache().getTimestampsCache()
.invalidate( querySpacesToInvalidate.toArray( new String[0] ), session );
void executePendingBulkOperationCleanUpActions() {
if ( performBulkOperationCallbacks() ) {
invalidateCaches();
}
querySpacesToInvalidate.clear();
}

void executePendingBulkOperationCleanUpActions() {
private boolean performBulkOperationCallbacks() {
boolean hasPendingBulkOperationCleanUpActions = false;
Iterator<AfterCompletionCallback> iterator = processes.iterator();
var iterator = processes.iterator();
while ( iterator.hasNext() ) {
AfterCompletionCallback process = iterator.next();
if ( process instanceof BulkOperationCleanupAction.BulkOperationCleanUpAfterTransactionCompletionProcess ) {
try {
hasPendingBulkOperationCleanUpActions = true;
process.doAfterTransactionCompletion( true, session );
var process = iterator.next();
if ( process instanceof BulkOperationCleanUpAfterTransactionCompletionProcess ) {
hasPendingBulkOperationCleanUpActions = true;
if ( callAfterCompletion( true, process ) ) {
iterator.remove();
}
catch (CacheException ce) {
CORE_LOGGER.unableToReleaseCacheLock( ce );
// continue loop
}
catch (Exception e) {
throw new HibernateException(
"Unable to perform afterTransactionCompletion callback: " + e.getMessage(),
e
);
}
}
}
return hasPendingBulkOperationCleanUpActions;
}

if ( hasPendingBulkOperationCleanUpActions ) {
if ( session.getFactory().getSessionFactoryOptions().isQueryCacheEnabled() ) {
session.getFactory().getCache().getTimestampsCache().invalidate(
querySpacesToInvalidate.toArray( new String[0] ),
session
);
}
querySpacesToInvalidate.clear();
private boolean callAfterCompletion(boolean success, AfterCompletionCallback process) {
try {
process.doAfterTransactionCompletion( success, session );
return true;
}
catch (CacheException ce) {
CORE_LOGGER.unableToReleaseCacheLock( ce );
// continue loop
return false;
}
catch (Exception e) {
throw new HibernateException(
"Unable to perform afterTransactionCompletion callback: " + e.getMessage(), e );
}
}

private void invalidateCaches() {
final var factory = session.getFactory();
if ( factory.getSessionFactoryOptions().isQueryCacheEnabled() ) {
factory.getCache().getTimestampsCache().
invalidate( querySpacesToInvalidate.toArray( EMPTY_STRING_ARRAY ), session );
}
querySpacesToInvalidate.clear();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -89,20 +89,15 @@ class StatefulPersistenceContext implements PersistenceContext {

private static final int INIT_COLL_SIZE = 8;

/*
Eagerly Initialized Fields
the following fields are used in all circumstances, and are not worth (or not suited) to being converted into lazy
*/
// Eagerly initialized fields. The following fields are used in every circumstance
// and are not worth (or not suited) to being converted to lazy initialization.

private final SharedSessionContractImplementor session;
private EntityEntryContext entityEntryContext;

/*
Everything else below should be carefully initialized only on first need;
this optimisation is very effective as null checks are free, while allocation costs
are very often the dominating cost of an application using ORM.
This is not general advice, but it's worth the added maintenance burden in this case
as this is a very central component of our library.
*/
// Everything else below should be carefully initialized only on first need.
// This optimization is very effective as null checks are free, while allocation
// costs are very often the dominating cost of an application using ORM.

// Loaded entity instances, by EntityKey
private HashMap<EntityKey, EntityHolderImpl> entitiesByKey;
Expand All @@ -113,8 +108,7 @@ the following fields are used in all circumstances, and are not worth (or not su
// Loaded entity instances, by EntityUniqueKey
private HashMap<EntityUniqueKey, Object> entitiesByUniqueKey;


// Snapshots of current database state for entities
// Snapshots of the current database state for entities
// that have *not* been loaded
private HashMap<EntityKey, Object> entitySnapshotsByKey;

Expand All @@ -136,7 +130,7 @@ the following fields are used in all circumstances, and are not worth (or not su
// Set of EntityKeys of deleted unloaded proxies
private HashSet<EntityKey> deletedUnloadedEntityKeys;

// properties that we have tried to load, and not found in the database
// properties that we have tried to load and not found in the database
private HashSet<AssociationKey> nullAssociations;

// A list of collection wrappers that were instantiating during result set
Expand Down Expand Up @@ -209,14 +203,6 @@ public boolean hasLoadContext() {
return loadContexts != null;
}

// @Override
// public void addUnownedCollection(CollectionKey key, PersistentCollection collection) {
// if ( unownedCollections == null ) {
// unownedCollections = CollectionHelper.mapOfSize( INIT_COLL_SIZE );
// }
// unownedCollections.put( key, collection );
// }
//
@Override
public PersistentCollection<?> useUnownedCollection(CollectionKey key) {
return unownedCollections == null ? null : unownedCollections.remove( key );
Expand Down Expand Up @@ -935,7 +921,7 @@ else if ( ownerPersister.isInstance( key ) ) {
}
else {
// b) try by EntityKey, which means we need to resolve owner-key -> collection-key
// IMPL NOTE : yes if we get here this impl is very non-performant, but PersistenceContext
// IMPL NOTE: yes if we get here this impl is very non-performant, but PersistenceContext
// was never designed to handle this case; adding that capability for real means splitting
// the notions of:
// 1) collection key
Expand Down Expand Up @@ -1228,14 +1214,6 @@ public Object removeProxy(EntityKey key) {
return removeProxyByKey( key );
}

// @Override
// public HashSet getNullifiableEntityKeys() {
// if ( nullifiableEntityKeys == null ) {
// nullifiableEntityKeys = new HashSet<>();
// }
// return nullifiableEntityKeys;
// }

/**
* @deprecated this will be removed: it provides too wide access, making it hard to optimise the internals
* for specific access needs. Consider using #iterateEntities instead.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -178,8 +178,7 @@ private BindableType<T> getBindableType(QueryParameterBinding<T> binding) {
return type;
}
else if ( binding != null ) {
//noinspection unchecked
return (BindableType<T>) binding.getBindType();
return binding.getBindType();
}
else {
return null;
Expand Down
Loading