Projects >> hibernate-orm >>b9249e548ae503d1ddef5a09ce575361e567323d

Chunk
Conflicting content
import org.hibernate.cache.spi.CacheDataDescription;
import org.hibernate.mapping.Collection;
import org.hibernate.mapping.PersistentClass;
<<<<<<< HEAD
import org.hibernate.metamodel.spi.binding.EntityBinding;
import org.hibernate.metamodel.spi.binding.PluralAttributeBinding;
import org.hibernate.persister.entity.EntityPersister;
=======
import org.hibernate.metamodel.binding.EntityBinding;
import org.hibernate.metamodel.binding.PluralAttributeBinding;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
import org.hibernate.type.VersionType;

/**
Solution content
import org.hibernate.cache.spi.CacheDataDescription;
import org.hibernate.mapping.Collection;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.metamodel.spi.binding.EntityBinding;
import org.hibernate.metamodel.spi.binding.PluralAttributeBinding;
import org.hibernate.type.VersionType;

/**
File
CacheDataDescriptionImpl.java
Developer's decision
Combination
Kind of conflict
Import
Chunk
Conflicting content
	private static Comparator getVersionComparator(EntityBinding model ) {
		if ( model.isVersioned() ) {
<<<<<<< HEAD
			versionComparator = (
					( VersionType ) model.getHierarchyDetails()
							.getEntityVersion()
							.getVersioningAttributeBinding()
							.getHibernateTypeDescriptor()
							.getResolvedTypeMapping()
			).getComparator();
=======
			final VersionType versionType = (VersionType) model.getHierarchyDetails()
					.getVersioningAttributeBinding()
					.getHibernateTypeDescriptor()
					.getResolvedTypeMapping();

			return versionType.getComparator();
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
		}

		return null;
Solution content
	private static Comparator getVersionComparator(EntityBinding model ) {
		if ( model.isVersioned() ) {
			return (
					( VersionType ) model.getHierarchyDetails()
							.getEntityVersion()
							.getVersioningAttributeBinding()
							.getHibernateTypeDescriptor()
							.getResolvedTypeMapping()
			).getComparator();
		}

		return null;
File
CacheDataDescriptionImpl.java
Developer's decision
Manual
Kind of conflict
Attribute
Cast expression
Method invocation
Return statement
Variable
Chunk
Conflicting content
 *
 * @author Steve Ebersole
 */
<<<<<<< HEAD
public class NoCachingRegionFactory extends AbstractRegionFactory {
	public static NoCachingRegionFactory INSTANCE = new NoCachingRegionFactory();

	@Override
	public void start() {
=======
public class NoCachingRegionFactory implements RegionFactory {
	/**
	 * Singleton access
	 */
	public static final NoCachingRegionFactory INSTANCE = new NoCachingRegionFactory();

	/**
	 * Constructs a NoCachingRegionFactory.  Although access should generally use {@link #INSTANCE}
	 */
	public NoCachingRegionFactory() {
	}

	@Override
	public void start(Settings settings, Properties properties) throws CacheException {
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	}

	@Override
Solution content
 *
 * @author Steve Ebersole
 */
public class NoCachingRegionFactory extends AbstractRegionFactory {
	/**
	 * Singleton access
	 */
	public static final NoCachingRegionFactory INSTANCE = new NoCachingRegionFactory();

	/**
	 * Constructs a NoCachingRegionFactory.  Although access should generally use {@link #INSTANCE}
	 */
	public NoCachingRegionFactory() {
	}

	@Override
	public void start() {
	}

	@Override
File
NoCachingRegionFactory.java
Developer's decision
Combination
Kind of conflict
Annotation
Attribute
Class signature
Comment
Method declaration
Method invocation
Method signature
Chunk
Conflicting content
	@Override
	public void stop() {
	}
<<<<<<< HEAD
=======

>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	@Override
	public boolean isMinimalPutsEnabledByDefault() {
		return false;
Solution content
	@Override
	public void stop() {
	}

	@Override
	public boolean isMinimalPutsEnabledByDefault() {
		return false;
File
NoCachingRegionFactory.java
Developer's decision
Version 1
Kind of conflict
Blank
Chunk
Conflicting content
	public boolean isMinimalPutsEnabledByDefault() {
		return false;
	}
<<<<<<< HEAD
=======

>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	@Override
	public AccessType getDefaultAccessType() {
		return null;
Solution content
	public boolean isMinimalPutsEnabledByDefault() {
		return false;
	}

	@Override
	public AccessType getDefaultAccessType() {
		return null;
File
NoCachingRegionFactory.java
Developer's decision
Version 1
Kind of conflict
Blank
Chunk
Conflicting content
	public AccessType getDefaultAccessType() {
		return null;
	}
<<<<<<< HEAD
=======

>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	@Override
	public long nextTimestamp() {
		return System.currentTimeMillis() / 100;
Solution content
	public AccessType getDefaultAccessType() {
		return null;
	}

	@Override
	public long nextTimestamp() {
		return System.currentTimeMillis() / 100;
File
NoCachingRegionFactory.java
Developer's decision
Version 1
Kind of conflict
Blank
Chunk
Conflicting content
	public long nextTimestamp() {
		return System.currentTimeMillis() / 100;
	}
<<<<<<< HEAD
=======

>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	@Override
	public EntityRegion buildEntityRegion(String regionName, Properties properties, CacheDataDescription metadata)
			throws CacheException {
Solution content
	public long nextTimestamp() {
		return System.currentTimeMillis() / 100;
	}

	@Override
	public EntityRegion buildEntityRegion(String regionName, Properties properties, CacheDataDescription metadata)
			throws CacheException {
File
NoCachingRegionFactory.java
Developer's decision
Version 1
Kind of conflict
Blank
Chunk
Conflicting content
			throws CacheException {
		throw new NoCacheRegionFactoryAvailableException();
	}
<<<<<<< HEAD
=======

>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	@Override
	public NaturalIdRegion buildNaturalIdRegion(String regionName, Properties properties, CacheDataDescription metadata)
			throws CacheException {
Solution content
			throws CacheException {
		throw new NoCacheRegionFactoryAvailableException();
	}

	@Override
	public NaturalIdRegion buildNaturalIdRegion(String regionName, Properties properties, CacheDataDescription metadata)
			throws CacheException {
File
NoCachingRegionFactory.java
Developer's decision
Version 1
Kind of conflict
Blank
Chunk
Conflicting content
			throws CacheException {
		throw new NoCacheRegionFactoryAvailableException();
	}
<<<<<<< HEAD
=======

>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	@Override
	public CollectionRegion buildCollectionRegion(String regionName, Properties properties, CacheDataDescription metadata)
			throws CacheException {
Solution content
			throws CacheException {
		throw new NoCacheRegionFactoryAvailableException();
	}

	@Override
	public CollectionRegion buildCollectionRegion(String regionName, Properties properties, CacheDataDescription metadata)
			throws CacheException {
File
NoCachingRegionFactory.java
Developer's decision
Version 1
Kind of conflict
Blank
Chunk
Conflicting content
			throws CacheException {
		throw new NoCacheRegionFactoryAvailableException();
	}
<<<<<<< HEAD
=======

>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	@Override
	public QueryResultsRegion buildQueryResultsRegion(String regionName, Properties properties) throws CacheException {
		throw new NoCacheRegionFactoryAvailableException();
Solution content
			throws CacheException {
		throw new NoCacheRegionFactoryAvailableException();
	}

	@Override
	public QueryResultsRegion buildQueryResultsRegion(String regionName, Properties properties) throws CacheException {
		throw new NoCacheRegionFactoryAvailableException();
File
NoCachingRegionFactory.java
Developer's decision
Version 1
Kind of conflict
Blank
Chunk
Conflicting content
	public QueryResultsRegion buildQueryResultsRegion(String regionName, Properties properties) throws CacheException {
		throw new NoCacheRegionFactoryAvailableException();
	}
<<<<<<< HEAD
=======

>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	@Override
	public TimestampsRegion buildTimestampsRegion(String regionName, Properties properties) throws CacheException {
		throw new NoCacheRegionFactoryAvailableException();
Solution content
	public QueryResultsRegion buildQueryResultsRegion(String regionName, Properties properties) throws CacheException {
		throw new NoCacheRegionFactoryAvailableException();
	}

	@Override
	public TimestampsRegion buildTimestampsRegion(String regionName, Properties properties) throws CacheException {
		throw new NoCacheRegionFactoryAvailableException();
File
NoCachingRegionFactory.java
Developer's decision
Version 1
Kind of conflict
Blank
Chunk
Conflicting content
 */
package org.hibernate.cache.internal;

<<<<<<< HEAD
import org.jboss.logging.Logger;

import org.hibernate.boot.registry.selector.spi.StrategySelector;
import org.hibernate.cache.spi.RegionFactory;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.cfg.Configuration;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.config.spi.StandardConverters;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.metamodel.spi.MetadataImplementor;
import org.hibernate.service.spi.ServiceRegistryImplementor;
import org.hibernate.service.spi.SessionFactoryServiceInitiator;
=======
import java.util.Map;
import java.util.Properties;

import org.hibernate.HibernateException;
import org.hibernate.boot.registry.StandardServiceInitiator;
import org.hibernate.boot.registry.selector.spi.StrategySelector;
import org.hibernate.cache.spi.RegionFactory;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.service.spi.ServiceRegistryImplementor;
import org.jboss.logging.Logger;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

/**
 * Initiator for the {@link RegionFactory} service.
Solution content
 */
package org.hibernate.cache.internal;

import org.jboss.logging.Logger;

import org.hibernate.boot.registry.selector.spi.StrategySelector;
import org.hibernate.cache.spi.RegionFactory;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.service.spi.ServiceRegistryImplementor;
import java.util.Map;
import java.util.Properties;

import org.hibernate.HibernateException;
import org.hibernate.boot.registry.StandardServiceInitiator;
import org.hibernate.internal.util.config.ConfigurationHelper;

/**
 * Initiator for the {@link RegionFactory} service.
File
RegionFactoryInitiator.java
Developer's decision
Combination
Kind of conflict
Import
Chunk
Conflicting content
 * @author Hardy Ferentschik
 * @author Brett Meyer
 */
<<<<<<< HEAD
public class RegionFactoryInitiator implements SessionFactoryServiceInitiator {
	public static final RegionFactoryInitiator INSTANCE = new RegionFactoryInitiator();
	private static final String DEFAULT_IMPL = NoCachingRegionFactory.class.getName();
	private static final CoreMessageLogger LOG = Logger.getMessageLogger(
			CoreMessageLogger.class,
			RegionFactoryInitiator.class.getName()
	);
=======
public class RegionFactoryInitiator implements StandardServiceInitiator {

	private static final CoreMessageLogger LOG = Logger.getMessageLogger( CoreMessageLogger.class,
			RegionFactoryInitiator.class.getName() );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

	/**
	 * Singleton access
Solution content
 * @author Hardy Ferentschik
 * @author Brett Meyer
 */
public class RegionFactoryInitiator implements StandardServiceInitiator {

	private static final CoreMessageLogger LOG = Logger.getMessageLogger( CoreMessageLogger.class,
			RegionFactoryInitiator.class.getName() );

	/**
	 * Singleton access
File
RegionFactoryInitiator.java
Developer's decision
Version 2
Kind of conflict
Attribute
Class signature
Method invocation
Chunk
Conflicting content
	/**
	 * Singleton access
	 */
<<<<<<< HEAD
	public static final String IMPL_NAME = AvailableSettings.CACHE_REGION_FACTORY;
=======
	public static final RegionFactoryInitiator INSTANCE = new RegionFactoryInitiator();
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

	@Override
	public Class getServiceInitiated() {
Solution content
	/**
	 * Singleton access
	 */
	public static final RegionFactoryInitiator INSTANCE = new RegionFactoryInitiator();

	@Override
	public Class getServiceInitiated() {
File
RegionFactoryInitiator.java
Developer's decision
Version 2
Kind of conflict
Attribute
Method invocation
Chunk
Conflicting content
	@Override
	}

	@Override
<<<<<<< HEAD
	public RegionFactory initiateService(SessionFactoryImplementor sessionFactory, Configuration configuration, ServiceRegistryImplementor registry) {
		return initiateService(sessionFactory, registry);
	}

	public RegionFactory initiateService(SessionFactoryImplementor sessionFactory, MetadataImplementor metadata, ServiceRegistryImplementor registry) {
		return initiateService(sessionFactory, registry);
	}

	private RegionFactory initiateService(SessionFactoryImplementor sessionFactory, ServiceRegistryImplementor registry){
		boolean isCacheEnabled = isCacheEnabled( registry );
		if ( !isCacheEnabled ) {
			LOG.debugf(
					"Second level cache has been disabled, so using % as cache region factory",
					NoCachingRegionFactory.class.getName()
			);
			return NoCachingRegionFactory.INSTANCE;
		}

		final Object setting = registry.getService( ConfigurationService.class ).getSettings().get( IMPL_NAME );
		return registry.getService( StrategySelector.class ).resolveDefaultableStrategy(
				RegionFactory.class,
				setting,
				NoCachingRegionFactory.INSTANCE
		);
	}

	private static boolean isCacheEnabled(ServiceRegistryImplementor serviceRegistry) {
		final ConfigurationService configurationService = serviceRegistry.getService( ConfigurationService.class );
		final boolean useSecondLevelCache = configurationService.getSetting(
				AvailableSettings.USE_SECOND_LEVEL_CACHE,
				StandardConverters.BOOLEAN,
				true
		);
		final boolean useQueryCache = configurationService.getSetting(
				AvailableSettings.USE_QUERY_CACHE,
				StandardConverters.BOOLEAN,
				false
		);
		return useSecondLevelCache || useQueryCache;
=======
	@SuppressWarnings({ "unchecked" })
	public RegionFactory initiateService(Map configurationValues, ServiceRegistryImplementor registry) {
		Properties p = new Properties();
		if (configurationValues != null) {
			p.putAll( configurationValues );
		}
		
		boolean useSecondLevelCache = ConfigurationHelper.getBoolean( AvailableSettings.USE_SECOND_LEVEL_CACHE,
				configurationValues, true );
		boolean useQueryCache = ConfigurationHelper.getBoolean( AvailableSettings.USE_QUERY_CACHE, configurationValues );

		RegionFactory regionFactory = NoCachingRegionFactory.INSTANCE;

		// The cache provider is needed when we either have second-level cache enabled
		// or query cache enabled.  Note that useSecondLevelCache is enabled by default
		final String setting = ConfigurationHelper.getString( AvailableSettings.CACHE_REGION_FACTORY,
				configurationValues, null );
		if ( ( useSecondLevelCache || useQueryCache ) && setting != null ) {
			try {
				Class regionFactoryClass = registry.getService( StrategySelector.class )
						.selectStrategyImplementor( RegionFactory.class, setting );
				try {
					regionFactory = regionFactoryClass.getConstructor( Properties.class ).newInstance( p );
				}
				catch ( NoSuchMethodException e ) {
					// no constructor accepting Properties found, try no arg constructor
					LOG.debugf(
							"%s did not provide constructor accepting java.util.Properties; attempting no-arg constructor.",
							regionFactoryClass.getSimpleName() );
					regionFactory = regionFactoryClass.getConstructor().newInstance();
				}
			}
			catch ( Exception e ) {
				throw new HibernateException( "could not instantiate RegionFactory [" + setting + "]", e );
			}
		}

		LOG.debugf( "Cache region factory : %s", regionFactory.getClass().getName() );

		return regionFactory;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	}

	/**
Solution content
//					LOG.debugf(
	}

	@Override
	@SuppressWarnings({ "unchecked" })
	public RegionFactory initiateService(Map configurationValues, ServiceRegistryImplementor registry) {
		Properties p = new Properties();
		if (configurationValues != null) {
			p.putAll( configurationValues );
		}
		
		boolean useSecondLevelCache = ConfigurationHelper.getBoolean( AvailableSettings.USE_SECOND_LEVEL_CACHE,
				configurationValues, true );
		boolean useQueryCache = ConfigurationHelper.getBoolean( AvailableSettings.USE_QUERY_CACHE, configurationValues );

		RegionFactory regionFactory = NoCachingRegionFactory.INSTANCE;

		// The cache provider is needed when we either have second-level cache enabled
		// or query cache enabled.  Note that useSecondLevelCache is enabled by default
		final Object  setting = configurationValues.get( AvailableSettings.CACHE_REGION_FACTORY )   ;
//		ConfigurationHelper.get( AvailableSettings.CACHE_REGION_FACTORY,
//				configurationValues, null );
		if ( ( useSecondLevelCache || useQueryCache ) && setting != null ) {
			try {
				regionFactory = registry.getService( StrategySelector.class )
						.resolveStrategy( RegionFactory.class, setting );
//				try {
//					regionFactory = regionFactoryClass.getConstructor( Properties.class ).newInstance( p );
//				}
//				catch ( NoSuchMethodException e ) {
//					// no constructor accepting Properties found, try no arg constructor
//							"%s did not provide constructor accepting java.util.Properties; attempting no-arg constructor.",
//							regionFactoryClass.getSimpleName() );
//					regionFactory = regionFactoryClass.getConstructor().newInstance();
//				}
			}
			catch ( Exception e ) {
				throw new HibernateException( "could not instantiate RegionFactory [" + setting + "]", e );
			}
		}

		LOG.debugf( "Cache region factory : %s", regionFactory.getClass().getName() );

		return regionFactory;
	}

	/**
File
RegionFactoryInitiator.java
Developer's decision
Manual
Kind of conflict
Annotation
Attribute
Comment
If statement
Method declaration
Method invocation
Method signature
Return statement
Variable
Chunk
Conflicting content
	private QueryResultsRegion cacheRegion;
	private UpdateTimestampsCache updateTimestampsCache;

<<<<<<< HEAD
	public StandardQueryCache(SessionFactoryImplementor sessionFactoryImplementor, UpdateTimestampsCache updateTimestampsCache, String regionName) {
		if ( regionName == null ) {
			regionName = StandardQueryCache.class.getName();
		}
		String prefix = sessionFactoryImplementor.getServiceRegistry()
				.getService( ConfigurationService.class )
				.getSetting(
						AvailableSettings.CACHE_REGION_PREFIX, StandardConverters.STRING, null
				);
=======
	/**
	 * Constructs a StandardQueryCache instance
	 *
	 * @param settings The SessionFactory settings.
	 * @param props Any properties
	 * @param updateTimestampsCache The update-timestamps cache to use.
	 * @param regionName The base query cache region name
	 */
	public StandardQueryCache(
			final Settings settings,
			final Properties props,
			final UpdateTimestampsCache updateTimestampsCache,
			final String regionName) {
		String regionNameToUse = regionName;
		if ( regionNameToUse == null ) {
			regionNameToUse = StandardQueryCache.class.getName();
		}
		final String prefix = settings.getCacheRegionPrefix();
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
		if ( prefix != null ) {
			regionNameToUse = prefix + '.' + regionNameToUse;
		}
Solution content
	private QueryResultsRegion cacheRegion;
	private UpdateTimestampsCache updateTimestampsCache;
	/**
	 * Constructs a StandardQueryCache instance
	 *
	 * @param sessionFactoryImplementor The SessionFactory.
	 * @param updateTimestampsCache The update-timestamps cache to use.
	 * @param regionName The base query cache region name
	 */
	public StandardQueryCache(SessionFactoryImplementor sessionFactoryImplementor, UpdateTimestampsCache updateTimestampsCache, String regionName) {
		String regionNameToUse = regionName;
		if ( regionNameToUse == null ) {
			regionNameToUse = StandardQueryCache.class.getName();
		}
		String prefix = sessionFactoryImplementor.getServiceRegistry()
				.getService( ConfigurationService.class )
				.getSetting(
						AvailableSettings.CACHE_REGION_PREFIX, StandardConverters.STRING, null
				);

		if ( prefix != null ) {
			regionNameToUse = prefix + '.' + regionNameToUse;
		}
File
StandardQueryCache.java
Developer's decision
Manual
Kind of conflict
Comment
If statement
Method invocation
Method signature
Variable
Chunk
Conflicting content
	}
		}
		LOG.startingQueryCache( regionNameToUse );

<<<<<<< HEAD
		this.cacheRegion = sessionFactoryImplementor.getServiceRegistry()
				.getService( RegionFactory.class )
				.buildQueryResultsRegion( regionName, sessionFactoryImplementor.getProperties() );
		this.updateTimestampsCache = updateTimestampsCache;
	}

=======
		this.cacheRegion = settings.getRegionFactory().buildQueryResultsRegion( regionNameToUse, props );
		this.updateTimestampsCache = updateTimestampsCache;

	@Override
	public QueryResultsRegion getRegion() {
		return cacheRegion;
	}

	@Override
	public void destroy() {
		try {
			cacheRegion.destroy();
		}
		catch ( Exception e ) {
			LOG.unableToDestroyQueryCache( cacheRegion.getName(), e.getMessage() );
		}
	}

	@Override
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	public void clear() throws CacheException {
		cacheRegion.evictAll();
	}
Solution content
		}
		LOG.startingQueryCache( regionNameToUse );

		this.cacheRegion = sessionFactoryImplementor.getServiceRegistry()
				.getService( RegionFactory.class )
				.buildQueryResultsRegion( regionNameToUse, sessionFactoryImplementor.getProperties() );
		this.updateTimestampsCache = updateTimestampsCache;
	}


	@Override
	public QueryResultsRegion getRegion() {
		return cacheRegion;
	}

	@Override
	public void destroy() {
		try {
			cacheRegion.destroy();
		}
		catch ( Exception e ) {
			LOG.unableToDestroyQueryCache( cacheRegion.getName(), e.getMessage() );
		}
	}

	@Override
	public void clear() throws CacheException {
		cacheRegion.evictAll();
	}
File
StandardQueryCache.java
Developer's decision
Manual
Kind of conflict
Annotation
Attribute
Method declaration
Method invocation
Chunk
Conflicting content
		cacheRegion.evictAll();
	}

<<<<<<< HEAD
=======
	@Override
	@SuppressWarnings({ "unchecked" })
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	public boolean put(
			final QueryKey key,
			final Type[] returnTypes,
Solution content
		cacheRegion.evictAll();
	}

	@Override
	@SuppressWarnings({ "unchecked" })
	public boolean put(
			final QueryKey key,
			final Type[] returnTypes,
File
StandardQueryCache.java
Developer's decision
Version 2
Kind of conflict
Annotation
Chunk
Conflicting content
 */
public class StandardQueryCacheFactory implements QueryCacheFactory {
	@Override
<<<<<<< HEAD
	public QueryCache getQueryCache(String regionName, UpdateTimestampsCache updateTimestampsCache, SessionFactoryImplementor sessionFactoryImplementor)
			throws HibernateException {
		return new StandardQueryCache( sessionFactoryImplementor, updateTimestampsCache, regionName );
=======
	public QueryCache getQueryCache(
			final String regionName,
			final UpdateTimestampsCache updateTimestampsCache,
			final Settings settings,
			final Properties props) throws HibernateException {
		return new StandardQueryCache(settings, props, updateTimestampsCache, regionName);
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	}
}
Solution content
 */
public class StandardQueryCacheFactory implements QueryCacheFactory {
	@Override
	public QueryCache getQueryCache(String regionName, UpdateTimestampsCache updateTimestampsCache, SessionFactoryImplementor sessionFactoryImplementor)
			throws HibernateException {
		return new StandardQueryCache( sessionFactoryImplementor, updateTimestampsCache, regionName );
	}
}
File
StandardQueryCacheFactory.java
Developer's decision
Version 1
Kind of conflict
Method invocation
Method signature
Return statement
Chunk
Conflicting content
	public static final StructuredMapCacheEntry INSTANCE = new StructuredMapCacheEntry();

	@Override
<<<<<<< HEAD
	public Map structure(CollectionCacheEntry entry) {
		final Serializable[] states = entry.getState();
		final Map map = new HashMap( states.length );
		for ( final Serializable state : states ) {
			map.put( state, state );
=======
	@SuppressWarnings("unchecked")
	public Object structure(Object item) {
		final CollectionCacheEntry entry = (CollectionCacheEntry) item;
		final Serializable[] state = entry.getState();
		final Map map = new HashMap( state.length );
		int i = 0;
		while ( i < state.length ) {
			map.put( state[i++], state[i++] );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
		}
		return map;
	}
Solution content
	public static final StructuredMapCacheEntry INSTANCE = new StructuredMapCacheEntry();

	@Override
	public Map structure(CollectionCacheEntry entry) {
		final Serializable[] states = entry.getState();
		final Map map = new HashMap( states.length );
		for ( final Serializable state : states ) {
			map.put( state, state );
		}
		return map;
	}
File
StructuredMapCacheEntry.java
Developer's decision
Version 1
Kind of conflict
Annotation
Cast expression
For statement
Method invocation
Method signature
Variable
While statement
Chunk
Conflicting content
			settings.setQueryCacheFactory( createQueryCacheFactory( properties, serviceRegistry ) );
		}

<<<<<<< HEAD
		String prefix = properties.getProperty( Environment.CACHE_REGION_PREFIX );
=======
		settings.setRegionFactory( serviceRegistry.getService( RegionFactory.class ) );

		boolean useMinimalPuts = ConfigurationHelper.getBoolean(
				AvailableSettings.USE_MINIMAL_PUTS, properties, settings.getRegionFactory().isMinimalPutsEnabledByDefault()
		);
		if ( debugEnabled ) {
			LOG.debugf( "Optimize cache for minimal puts: %s", enabledDisabled(useMinimalPuts) );
		}
		settings.setMinimalPutsEnabled( useMinimalPuts );

		String prefix = properties.getProperty( AvailableSettings.CACHE_REGION_PREFIX );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
		if ( StringHelper.isEmpty(prefix) ) {
			prefix=null;
		}
Solution content
			settings.setQueryCacheFactory( createQueryCacheFactory( properties, serviceRegistry ) );
		}

		String prefix = properties.getProperty( Environment.CACHE_REGION_PREFIX );
		if ( StringHelper.isEmpty(prefix) ) {
			prefix=null;
		}
File
SettingsFactory.java
Developer's decision
Version 1
Kind of conflict
If statement
Method invocation
Variable
Chunk
Conflicting content
			throw new HibernateException( "could not instantiate QueryCacheFactory: " + queryCacheFactoryClassName, e );
		}
	}
<<<<<<< HEAD

=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	//todo remove this once we move to new metamodel
	public static RegionFactory createRegionFactory(Properties properties, boolean cachingEnabled) {
		// todo : REMOVE!  THIS IS TOTALLY A TEMPORARY HACK FOR org.hibernate.cfg.AnnotationBinder which will be going away
Solution content
			throw new HibernateException( "could not instantiate QueryCacheFactory: " + queryCacheFactoryClassName, e );
		}
	}

	//todo remove this once we move to new metamodel
	public static RegionFactory createRegionFactory(Properties properties, boolean cachingEnabled) {
		// todo : REMOVE!  THIS IS TOTALLY A TEMPORARY HACK FOR org.hibernate.cfg.AnnotationBinder which will be going away
File
SettingsFactory.java
Developer's decision
Version 1
Kind of conflict
Blank
Chunk
Conflicting content
		// iterate over the *old* list
		for ( Object old : oldElements ) {
			if ( !currentSaving.contains( old ) ) {
<<<<<<< HEAD
				Serializable oldId = ForeignKeys.getEntityIdentifierIfNotUnsaved( entityName, old, session );
=======
				final Serializable oldId = ForeignKeys.getEntityIdentifierIfNotUnsaved( entityName, old, session );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
				if ( !currentIds.contains( new TypedValue( idType, oldId ) ) ) {
					res.add( old );
				}
Solution content
		// iterate over the *old* list
		for ( Object old : oldElements ) {
			if ( !currentSaving.contains( old ) ) {
				final Serializable oldId = ForeignKeys.getEntityIdentifierIfNotUnsaved( entityName, old, session );
				if ( !currentIds.contains( new TypedValue( idType, oldId ) ) ) {
					res.add( old );
				}
File
AbstractPersistentCollection.java
Developer's decision
Version 2
Kind of conflict
Method invocation
Variable
Chunk
Conflicting content
 */
package org.hibernate.context.internal;

<<<<<<< HEAD
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
import javax.transaction.Synchronization;
import javax.transaction.Transaction;
import javax.transaction.TransactionManager;
Solution content
 */
package org.hibernate.context.internal;

import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import javax.transaction.Synchronization;
import javax.transaction.Transaction;
import javax.transaction.TransactionManager;
File
JTASessionContext.java
Developer's decision
Version 1
Kind of conflict
Import
Chunk
Conflicting content
				}
				value = string;
			}
<<<<<<< HEAD
			list.add( new TypedValue(type, value) );
=======
			list.add( new TypedValue( type, value ) );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
		}
	}
Solution content
				}
				value = string;
			}
			list.add( new TypedValue( type, value ) );
		}
	}
File
Example.java
Developer's decision
Version 1
Kind of conflict
Method invocation
Chunk
Conflicting content
		final ArrayList list = new ArrayList();
		final Type type = criteriaQuery.getTypeUsingProjection( criteria, propertyName );
		if ( type.isComponentType() ) {
<<<<<<< HEAD
			CompositeType actype = (CompositeType) type;
			Type[] types = actype.getSubtypes();
			for ( int j=0; j>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
				}
			}
		}
Solution content
		final ArrayList list = new ArrayList();
		final Type type = criteriaQuery.getTypeUsingProjection( criteria, propertyName );
		if ( type.isComponentType() ) {
			final CompositeType compositeType = (CompositeType) type;
			final Type[] subTypes = compositeType.getSubtypes();
			for ( Object value : values ) {
				for ( int i = 0; i < subTypes.length; i++ ) {
					final Object subValue = value == null
							? null
							: compositeType.getPropertyValues( value, EntityMode.POJO )[i];
					list.add( new TypedValue( subTypes[i], subValue ) );
				}
			}
		}
File
InExpression.java
Developer's decision
Version 2
Kind of conflict
Array access
Cast expression
For statement
Method invocation
Variable
Chunk
Conflicting content
			}
		}
		else {
<<<<<<< HEAD
			for ( int j=0; j>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
			}
		}
Solution content
			}
		}
		else {
			for ( Object value : values ) {
				list.add( new TypedValue( type, value ) );
			}
		}
File
InExpression.java
Developer's decision
Version 2
Kind of conflict
For statement
Method invocation
Chunk
Conflicting content
	public String toString() {
		return sql;
	}
<<<<<<< HEAD

	protected SQLCriterion(String sql, Object[] values, Type[] types) {
		this.sql = sql;
		typedValues = new TypedValue[values.length];
		for ( int i=0; i>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
Solution content
	public String toString() {
		return sql;
	}
}
File
SQLCriterion.java
Developer's decision
Version 2
Kind of conflict
Method declaration
Chunk
Conflicting content
		super( op, quantifier, dc );
		this.value = value;
	}
<<<<<<< HEAD
	
	
	public TypedValue[] getTypedValues(Criteria criteria, CriteriaQuery criteriaQuery) 
	throws HibernateException {
		TypedValue[] superTv = super.getTypedValues(criteria, criteriaQuery);
		TypedValue[] result = new TypedValue[superTv.length+1];
		System.arraycopy(superTv, 0, result, 1, superTv.length);
=======

	@Override
	public TypedValue[] getTypedValues(Criteria criteria, CriteriaQuery criteriaQuery) throws HibernateException {
		final TypedValue[] subQueryTypedValues = super.getTypedValues( criteria, criteriaQuery );
		final TypedValue[] result = new TypedValue[subQueryTypedValues.length+1];
		System.arraycopy( subQueryTypedValues, 0, result, 1, subQueryTypedValues.length );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
		result[0] = new TypedValue( getTypes()[0], value );
		return result;
	}
Solution content
		super( op, quantifier, dc );
		this.value = value;
	}

	@Override
	public TypedValue[] getTypedValues(Criteria criteria, CriteriaQuery criteriaQuery) throws HibernateException {
		final TypedValue[] subQueryTypedValues = super.getTypedValues( criteria, criteriaQuery );
		final TypedValue[] result = new TypedValue[subQueryTypedValues.length+1];
		System.arraycopy( subQueryTypedValues, 0, result, 1, subQueryTypedValues.length );
		result[0] = new TypedValue( getTypes()[0], value );
		return result;
	}
File
SimpleSubqueryExpression.java
Developer's decision
Version 2
Kind of conflict
Annotation
Method invocation
Method signature
Variable
Chunk
Conflicting content
		return new TypedValue[] { new TypedValue( StandardBasicTypes.INTEGER, size ) };
	}

<<<<<<< HEAD
	public TypedValue[] getTypedValues(Criteria criteria, CriteriaQuery criteriaQuery) 
	throws HibernateException {
		return new TypedValue[] {
			new TypedValue( StandardBasicTypes.INTEGER, size )
		};
=======
	@Override
	public String toString() {
		return propertyName + ".size" + op + size;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	}

}
Solution content
		return new TypedValue[] { new TypedValue( StandardBasicTypes.INTEGER, size ) };
	}

	@Override
	public String toString() {
		return propertyName + ".size" + op + size;
	}

}
File
SizeExpression.java
Developer's decision
Version 2
Kind of conflict
Annotation
Attribute
Method invocation
Method signature
Return statement
Chunk
Conflicting content
import java.util.Properties;
import java.util.Set;

<<<<<<< HEAD
import org.jboss.logging.Logger;

=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
import org.hibernate.HibernateException;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
Solution content
import java.util.Properties;
import java.util.Set;

import org.jboss.logging.Logger;
import org.hibernate.HibernateException;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
File
Dialect.java
Developer's decision
Version 1
Kind of conflict
Import
Chunk
Conflicting content
import org.hibernate.internal.util.io.StreamCopier;
import org.hibernate.mapping.Column;
import org.hibernate.metamodel.spi.TypeContributions;
<<<<<<< HEAD
import org.hibernate.metamodel.spi.relational.AuxiliaryDatabaseObject;
import org.hibernate.metamodel.spi.relational.ForeignKey;
import org.hibernate.metamodel.spi.relational.Index;
import org.hibernate.metamodel.spi.relational.Sequence;
import org.hibernate.metamodel.spi.relational.Table;
=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
import org.hibernate.persister.entity.Lockable;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.sql.ANSICaseFragment;
Solution content
import org.hibernate.internal.util.io.StreamCopier;
import org.hibernate.mapping.Column;
import org.hibernate.metamodel.spi.TypeContributions;
import org.hibernate.metamodel.spi.relational.AuxiliaryDatabaseObject;
import org.hibernate.metamodel.spi.relational.ForeignKey;
import org.hibernate.metamodel.spi.relational.Index;
import org.hibernate.metamodel.spi.relational.Sequence;
import org.hibernate.metamodel.spi.relational.Table;
import org.hibernate.persister.entity.Lockable;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.sql.ANSICaseFragment;
File
Dialect.java
Developer's decision
Version 1
Kind of conflict
Import
Chunk
Conflicting content
import org.hibernate.sql.CaseFragment;
import org.hibernate.sql.ForUpdateFragment;
import org.hibernate.sql.JoinFragment;
<<<<<<< HEAD
import org.hibernate.tool.schema.internal.StandardAuxiliaryDatabaseObjectExporter;
import org.hibernate.tool.schema.internal.StandardForeignKeyExporter;
import org.hibernate.tool.schema.internal.StandardIndexExporter;
import org.hibernate.tool.schema.internal.StandardSequenceExporter;
import org.hibernate.tool.schema.internal.StandardTableExporter;
import org.hibernate.tool.schema.internal.TemporaryTableExporter;
import org.hibernate.tool.schema.spi.Exporter;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.sql.ClobTypeDescriptor;
import org.hibernate.type.descriptor.sql.SqlTypeDescriptor;
=======
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.sql.ClobTypeDescriptor;
import org.hibernate.type.descriptor.sql.SqlTypeDescriptor;
import org.jboss.logging.Logger;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

/**
 * Represents a dialect of SQL implemented by a particular RDBMS.  Subclasses implement Hibernate compatibility
Solution content
import org.hibernate.sql.CaseFragment;
import org.hibernate.sql.ForUpdateFragment;
import org.hibernate.sql.JoinFragment;
import org.hibernate.tool.schema.internal.StandardAuxiliaryDatabaseObjectExporter;
import org.hibernate.tool.schema.internal.StandardForeignKeyExporter;
import org.hibernate.tool.schema.internal.StandardIndexExporter;
import org.hibernate.tool.schema.internal.StandardSequenceExporter;
import org.hibernate.tool.schema.internal.StandardTableExporter;
import org.hibernate.tool.schema.internal.TemporaryTableExporter;
import org.hibernate.tool.schema.spi.Exporter;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.sql.ClobTypeDescriptor;
import org.hibernate.type.descriptor.sql.SqlTypeDescriptor;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.sql.ClobTypeDescriptor;
import org.hibernate.type.descriptor.sql.SqlTypeDescriptor;
import org.jboss.logging.Logger;

/**
 * Represents a dialect of SQL implemented by a particular RDBMS.  Subclasses implement Hibernate compatibility
File
Dialect.java
Developer's decision
Concatenation
Kind of conflict
Import
Chunk
Conflicting content
 *
 * @author Jim Mlodgenski
 */
<<<<<<< HEAD
public class PostgresPlusDialect extends PostgreSQL82Dialect {

=======
@SuppressWarnings("deprecation")
public class PostgresPlusDialect extends PostgreSQLDialect {
	/**
	 * Constructs a PostgresPlusDialect
	 */
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	public PostgresPlusDialect() {
		super();
Solution content
 *
 * @author Jim Mlodgenski
 */
@SuppressWarnings("deprecation")
public class PostgresPlusDialect extends PostgreSQL82Dialect {
	/**
	 * Constructs a PostgresPlusDialect
	 */
	public PostgresPlusDialect() {
		super();
File
PostgresPlusDialect.java
Developer's decision
Combination
Kind of conflict
Annotation
Class signature
Comment
Chunk
Conflicting content
	 */
	private Map defaults = new HashMap();

<<<<<<< HEAD
	private final Map> weighted = new HashMap>();
	private final Map defaults = new HashMap();
=======
	/**
	 * Holds the weighted mappings for a typeCode.  The nested map is a TreeMap to sort its contents
	 * based on the key (the weighting) to ensure proper iteration ordering during {@link #get(int, long, int, int)}
	 */
	private Map> weighted = new HashMap>();
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

	/**
	 * get default type name for specified type
Solution content
	 */
	private final Map defaults = new HashMap();
	/**
	 * Holds the weighted mappings for a typeCode.  The nested map is a TreeMap to sort its contents
	 * based on the key (the weighting) to ensure proper iteration ordering during {@link #get(int, long, int, int)}
	 */
	private final Map> weighted = new HashMap>();

	/**
	 * get default type name for specified type
File
TypeNames.java
Developer's decision
Combination
Kind of conflict
Attribute
Comment
Method invocation
Chunk
Conflicting content
	 * @throws MappingException Indicates that no registrations were made for that typeCode
	 */
	public String get(int typeCode, long size, int precision, int scale) throws MappingException {
<<<<<<< HEAD
		Map map = weighted.get( typeCode );
		if ( CollectionHelper.isNotEmpty( map ) ) {
			// iterate entries ordered by capacity to find first fit
			for ( final Long key : map.keySet() ) {
				if ( size <= key ) {
					return replace( map.get( key ), size, precision, scale );
=======
		final Map map = weighted.get( typeCode );
		if ( map != null && map.size() > 0 ) {
			// iterate entries ordered by capacity to find first fit
			for ( Map.Entry entry: map.entrySet() ) {
				if ( size <= entry.getKey() ) {
					return replace( entry.getValue(), size, precision, scale );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
				}
			}
		}
Solution content
	 */
	 * @throws MappingException Indicates that no registrations were made for that typeCode
	public String get(int typeCode, long size, int precision, int scale) throws MappingException {
		final Map map = weighted.get( typeCode );
		if ( map != null && map.size() > 0 ) {
			// iterate entries ordered by capacity to find first fit
			for ( Map.Entry entry: map.entrySet() ) {
				if ( size <= entry.getKey() ) {
					return replace( entry.getValue(), size, precision, scale );
				}
			}
		}
File
TypeNames.java
Developer's decision
Version 2
Kind of conflict
Comment
For statement
If statement
Method invocation
Return statement
Variable
Chunk
Conflicting content
	@Override
	public String getAlterTableToAddUniqueKeyCommand(UniqueKey uniqueKey) {
		if ( hasNullable( uniqueKey ) ) {
<<<<<<< HEAD
			// TODO: This borrows from Index's old way of doing things.  This
			// should be using StandardIndexExporter.  However, not all callers
			// have JdbcEnvironment available.  We'll need to refactor a bit...
			String keyName = dialect.qualifyIndexName() ? uniqueKey.getName()
					: StringHelper.unqualify( uniqueKey.getName() );
			StringBuilder buf = new StringBuilder( "create unique index " )
					.append( keyName ).append( " on " )
					.append( uniqueKey.getTable().getQualifiedName( dialect ) )
					.append( " (" );
			boolean first = true;
			for ( Column column : uniqueKey.getColumns() ) {
				if ( first ) {
					first = false;
				}
				else {
					buf.append( ", " );
				}
				buf.append( ( column.getColumnName().getText( dialect ) ) );
			}
			buf.append( ")" );
			return buf.toString();
		} else {
			return super.applyUniquesOnAlter( uniqueKey );
=======
			return Index.buildSqlCreateIndexString(
					dialect,
					uniqueKey.getName(),
					uniqueKey.getTable(),
					uniqueKey.getColumns(),
					true
			);
		}
		else {
			return super.getAlterTableToAddUniqueKeyCommand( uniqueKey );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
		}
	}
	
Solution content
	@Override
	public String getAlterTableToAddUniqueKeyCommand(UniqueKey uniqueKey) {
		if ( hasNullable( uniqueKey ) ) {
			// TODO: This borrows from Index's old way of doing things.  This
			// should be using StandardIndexExporter.  However, not all callers
			// have JdbcEnvironment available.  We'll need to refactor a bit...
			String keyName = dialect.qualifyIndexName() ? uniqueKey.getName()
					: StringHelper.unqualify( uniqueKey.getName() );
			StringBuilder buf = new StringBuilder( "create unique index " )
					.append( keyName ).append( " on " )
					.append( uniqueKey.getTable().getQualifiedName( dialect ) )
					.append( " (" );
			boolean first = true;
			for ( Column column : uniqueKey.getColumns() ) {
				if ( first ) {
					first = false;
				}
				else {
					buf.append( ", " );
				}
				buf.append( ( column.getColumnName().getText( dialect ) ) );
			}
			buf.append( ")" );
			return buf.toString();
		}
		else {
			return super.getAlterTableToAddUniqueKeyCommand( uniqueKey );
		}
	}
	
File
DB2UniqueDelegate.java
Developer's decision
Combination
Kind of conflict
Comment
For statement
Method invocation
Return statement
Variable
Chunk
Conflicting content
	@Override
	public String getAlterTableToDropUniqueKeyCommand(UniqueKey uniqueKey) {
		if ( hasNullable( uniqueKey ) ) {
<<<<<<< HEAD
			// TODO: This borrows from Index's old way of doing things.  This
			// should be using StandardIndexExporter.  However, not all callers
			// have JdbcEnvironment available.  We'll need to refactor a bit...
			return "drop index " + StringHelper.qualify(
					uniqueKey.getTable().getQualifiedName( dialect ),
							uniqueKey.getName() );
		} else {
			return super.dropUniquesOnAlter( uniqueKey );
=======
			return Index.buildSqlDropIndexString( dialect, uniqueKey.getTable(), uniqueKey.getName() );
		}
		else {
			return super.getAlterTableToDropUniqueKeyCommand( uniqueKey );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
		}
	}
	
Solution content
		}
	}
	@Override
	public String getAlterTableToDropUniqueKeyCommand(UniqueKey uniqueKey) {
		if ( hasNullable( uniqueKey ) ) {
			// TODO: This borrows from Index's old way of doing things.  This
			// should be using StandardIndexExporter.  However, not all callers
			// have JdbcEnvironment available.  We'll need to refactor a bit...
			return "drop index " + StringHelper.qualify(
					uniqueKey.getTable().getQualifiedName( dialect ),
							uniqueKey.getName() );
		}
		else {
			return super.getAlterTableToDropUniqueKeyCommand( uniqueKey );
	
File
DB2UniqueDelegate.java
Developer's decision
Combination
Kind of conflict
Comment
Method invocation
Return statement
Chunk
Conflicting content
		final StringBuilder sb = new StringBuilder( " unique (" );
		final Iterator columnIterator = uniqueKey.getColumns().iterator();
		while ( columnIterator.hasNext() ) {
<<<<<<< HEAD
			Column column = (Column) columnIterator.next();
			sb.append( column.getColumnName().getText( dialect ) );
=======
			final org.hibernate.mapping.Column column = (org.hibernate.mapping.Column) columnIterator.next();
			sb.append( column.getQuotedName( dialect ) );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
			if ( columnIterator.hasNext() ) {
				sb.append( ", " );
			}
Solution content
		final StringBuilder sb = new StringBuilder( " unique (" );
		final Iterator columnIterator = uniqueKey.getColumns().iterator();
		while ( columnIterator.hasNext() ) {
			Column column = (Column) columnIterator.next();
			sb.append( column.getColumnName().getText( dialect ) );
			if ( columnIterator.hasNext() ) {
				sb.append( ", " );
			}
File
DefaultUniqueDelegate.java
Developer's decision
Version 1
Kind of conflict
Cast expression
Method invocation
Variable
Chunk
Conflicting content
 *
 * @author Steve Ebersole
 */
<<<<<<< HEAD
public class ConfigurationServiceImpl implements ConfigurationService {
=======
public class ConfigurationServiceImpl implements ConfigurationService, ServiceRegistryAwareService {
	private static final CoreMessageLogger LOG = Logger.getMessageLogger(
			CoreMessageLogger.class,
			ConfigurationServiceImpl.class.getName()
	);

>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	private final Map settings;

	/**
Solution content
 *
 * @author Steve Ebersole
 */
public class ConfigurationServiceImpl implements ConfigurationService, ServiceRegistryAwareService {
	private static final CoreMessageLogger LOG = Logger.getMessageLogger(
			CoreMessageLogger.class,
			ConfigurationServiceImpl.class.getName()
	);

	private final Map settings;
	private ServiceRegistryImplementor serviceRegistry;

	/**
File
ConfigurationServiceImpl.java
Developer's decision
Manual
Kind of conflict
Attribute
Class signature
Method invocation
Chunk
Conflicting content
		}
	@Override
	@SuppressWarnings("unchecked")
	public  T getSetting(String name, Class expected, T defaultValue) {
<<<<<<< HEAD
		Object value = settings.get( name );
		return value == null
				? defaultValue
				: (T) value;
	}
=======
		final Object value = settings.get( name );
		final T target = cast( expected, value );
		return target !=null ? target : defaultValue;
	}

	@Override
	@SuppressWarnings("unchecked")
	public  T cast(Class expected, Object candidate){
		if (candidate == null) {
			return null;
		}

		if ( expected.isInstance( candidate ) ) {
			return (T) candidate;
		}

		Class target;
		if ( Class.class.isInstance( candidate ) ) {
			target = Class.class.cast( candidate );
		}
		else {
			try {
				target = serviceRegistry.getService( ClassLoaderService.class ).classForName( candidate.toString() );
			}
			catch ( ClassLoadingException e ) {
				LOG.debugf( "Unable to locate %s implementation class %s", expected.getName(), candidate.toString() );
				target = null;
			}
		if ( target != null ) {
			try {
				return target.newInstance();
			}
			catch ( Exception e ) {
				LOG.debugf(
						"Unable to instantiate %s class %s", expected.getName(),
						target.getName()
				);
			}
		}
		return null;
	}


>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
Solution content
	@Override
	public  T getSetting(String name, Class expected, T defaultValue) {
		final Object value = settings.get( name );
		final T target = cast( expected, value );
		return target !=null ? target : defaultValue;
	}

	@Override
	@SuppressWarnings("unchecked")
	public  T cast(Class expected, Object candidate){
		if (candidate == null) {
			return null;
		}

		if ( expected.isInstance( candidate ) ) {
			return (T) candidate;
		}

		Class target;
		if ( Class.class.isInstance( candidate ) ) {
			target = Class.class.cast( candidate );
		}
		else {
			try {
				target = serviceRegistry.getService( ClassLoaderService.class ).classForName( candidate.toString() );
			}
			catch ( ClassLoadingException e ) {
				LOG.debugf( "Unable to locate %s implementation class %s", expected.getName(), candidate.toString() );
				target = null;
			}
		}
		if ( target != null ) {
			try {
				return target.newInstance();
			}
			catch ( Exception e ) {
				LOG.debugf(
						"Unable to instantiate %s class %s", expected.getName(),
						target.getName()
				);
			}
		}
		return null;
	}


}
File
ConfigurationServiceImpl.java
Developer's decision
Version 2
Kind of conflict
Annotation
Cast expression
Method declaration
Method invocation
Return statement
Variable
Chunk
Conflicting content
 * @author Steve Ebersole
 */
public interface ConfigurationService extends Service {
<<<<<<< HEAD
	public Map getSettings();

	public  T getSetting(String name);
	public  T getSetting(String name, Converter converter);
	public  T getSetting(String name, Class expectedType);
=======
	/**
	 * Access to the complete map of config settings.  The returned map is immutable
	 *
	 * @return The immutable map of config settings.
	 */
	public Map getSettings();

	/**
	 * Get the named setting, using the specified converter.
	 *
	 * @param name The name of the setting to get.
	 * @param converter The converter to apply
	 * @param  The Java type of the conversion
	 *
	 * @return The converted (typed) setting.  May return {@code null} (see {@link #getSetting(String, Class, Object)})
	 */
	public  T getSetting(String name, Converter converter);

	/**
	 * Get the named setting, using the specified converter and default value.
	 *
	 * @param name The name of the setting to get.
	 * @param converter The converter to apply
	 * @param defaultValue If no setting with that name is found, return this default value as the result.
	 * @param  The Java type of the conversion
	 *
	 * @return The converted (typed) setting.  Will be the defaultValue if no such setting was defined.
	 */
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	public  T getSetting(String name, Converter converter, T defaultValue);

	/**
Solution content
	/**
 * @author Steve Ebersole
 */
public interface ConfigurationService extends Service {
	/**
	 * Access to the complete map of config settings.  The returned map is immutable
	 *
	 * @return The immutable map of config settings.
	 */
	public Map getSettings();

	public  T getSetting(String name);

	/**
	 * Get the named setting, using the specified converter.
	 *
	 * @param name The name of the setting to get.
	 * @param converter The converter to apply
	 * @param  The Java type of the conversion
	 *
	 * @return The converted (typed) setting.  May return {@code null} (see {@link #getSetting(String, Class, Object)})
	 */
	public  T getSetting(String name, Converter converter);

	/**
	 * Get the named setting, using the specified converter and default value.
	 *
	 * @param name The name of the setting to get.
	 * @param converter The converter to apply
	 * @param defaultValue If no setting with that name is found, return this default value as the result.
	 * @param  The Java type of the conversion
	 *
	 * @return The converted (typed) setting.  Will be the defaultValue if no such setting was defined.
	 */
	public  T getSetting(String name, Converter converter, T defaultValue);
File
ConfigurationService.java
Developer's decision
Combination
Kind of conflict
Comment
Method interface
Chunk
Conflicting content
	 */
	public  T getSetting(String name, Class expected, T defaultValue);

<<<<<<< HEAD
=======
	/**
	 * Cast candidate to the instance of expected type.
	 *
	 * @param expected The type of instance expected to return.
	 * @param candidate The candidate object to be casted.
	 * @param  The java type of the expected return
	 *
	 * @return The instance of expected type or null if this cast fail.
	 *
	 * @deprecated No idea why this is exposed here...
	 */
	@Deprecated
	public  T cast(Class expected, Object candidate);

	/**
	 * Simple conversion contract for converting an untyped object to a specified type.
	 *
	 * @param  The Java type of the converted value
	 */
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	public static interface Converter {
		/**
		 * Convert an untyped Object reference to the Converter's type.
Solution content
	 */
	public  T getSetting(String name, Class expected, T defaultValue);

	/**
	 * Cast candidate to the instance of expected type.
	 *
	 * @param expected The type of instance expected to return.
	 * @param candidate The candidate object to be casted.
	 * @param  The java type of the expected return
	 *
	 * @return The instance of expected type or null if this cast fail.
	 *
	 * @deprecated No idea why this is exposed here...
	 */
	@Deprecated
	public  T cast(Class expected, Object candidate);

	/**
	 * Simple conversion contract for converting an untyped object to a specified type.
	 *
	 * @param  The Java type of the converted value
	 */
	public static interface Converter {
		/**
		 * Convert an untyped Object reference to the Converter's type.
File
ConfigurationService.java
Developer's decision
Version 2
Kind of conflict
Annotation
Comment
Method interface
Chunk
Conflicting content
		return ce;
	}

<<<<<<< HEAD
	/**
	 * Get the collection instance associated with the PluralAttributeKeyBinding
	 */
=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	@Override
	public PersistentCollection getCollection(CollectionKey collectionKey) {
		return collectionsByKey.get( collectionKey );
Solution content
		return ce;
	}

	/**
	 * Get the collection instance associated with the PluralAttributeKeyBinding
	 */
	@Override
	public PersistentCollection getCollection(CollectionKey collectionKey) {
		return collectionsByKey.get( collectionKey );
File
StatefulPersistenceContext.java
Developer's decision
Version 1
Kind of conflict
Comment
Chunk
Conflicting content
	}

	private static boolean useMinimalPuts(SessionImplementor session, EntityEntry entityEntry) {
<<<<<<< HEAD
		return ( session.getFactory().getServiceRegistry().getService( RegionFactory.class ).isMinimalPutsEnabled() &&
						session.getCacheMode()!=CacheMode.REFRESH ) ||
				( entityEntry.getPersister().hasLazyProperties() &&
						entityEntry.isLoadedWithLazyPropertiesUnfetched() &&
						entityEntry.getPersister().isLazyPropertiesCacheable() );
=======
		return ( session.getFactory().getSettings().isMinimalPutsEnabled()
				&& session.getCacheMode()!=CacheMode.REFRESH )
				|| ( entityEntry.getPersister().hasLazyProperties()
				&& entityEntry.isLoadedWithLazyPropertiesUnfetched()
				&& entityEntry.getPersister().isLazyPropertiesCacheable() );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	}

	/**
Solution content
	/**
	}

	private static boolean useMinimalPuts(SessionImplementor session, EntityEntry entityEntry) {
		return ( session.getFactory().getServiceRegistry().getService( RegionFactory.class ).isMinimalPutsEnabled() &&
						session.getCacheMode()!=CacheMode.REFRESH ) ||
				( entityEntry.getPersister().hasLazyProperties() &&
						entityEntry.isLoadedWithLazyPropertiesUnfetched() &&
						entityEntry.getPersister().isLazyPropertiesCacheable() );
	}
File
TwoPhaseLoad.java
Developer's decision
Version 1
Kind of conflict
Method invocation
Return statement
Chunk
Conflicting content
import org.hibernate.HibernateException;
import org.hibernate.MultiTenancyStrategy;
import org.hibernate.boot.registry.StandardServiceInitiator;
<<<<<<< HEAD
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
=======
import org.hibernate.boot.registry.selector.spi.StrategySelector;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.cfg.Environment;
import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
Solution content
import org.hibernate.HibernateException;
import org.hibernate.MultiTenancyStrategy;
import org.hibernate.boot.registry.StandardServiceInitiator;
import org.hibernate.boot.registry.selector.spi.StrategySelector;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.cfg.Environment;
import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
File
ConnectionProviderInitiator.java
Developer's decision
Version 2
Kind of conflict
Import
Chunk
Conflicting content
			throw new UnknownUnwrapTypeException( unwrapType );
		}
	}
<<<<<<< HEAD
=======

>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	@Override
	public void configure(Map configurationValues) {
		LOG.usingHibernateBuiltInConnectionPool();
Solution content
			throw new UnknownUnwrapTypeException( unwrapType );
		}
	}

	@Override
	public void configure(Map configurationValues) {
		LOG.usingHibernateBuiltInConnectionPool();
File
DriverManagerConnectionProviderImpl.java
Developer's decision
Version 1
Kind of conflict
Blank
Chunk
Conflicting content
		LOG.usingDriver( driverClassName, url );
		// if debug level is enabled, then log the password, otherwise mask it
<<<<<<< HEAD
		if ( debugEnabled )
=======
		if ( LOG.isDebugEnabled() ) {
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
			LOG.connectionProperties( connectionProps );
		}
		else {
Solution content
		LOG.usingDriver( driverClassName, url );
		// if debug level is enabled, then log the password, otherwise mask it
		if ( debugEnabled ) {
			LOG.connectionProperties( connectionProps );
		}
		else {
File
DriverManagerConnectionProviderImpl.java
Developer's decision
Manual
Kind of conflict
If statement
Chunk
Conflicting content
			LOG.connectionProperties( ConfigurationHelper.maskOut( connectionProps, "password" ) );
		}
	}
<<<<<<< HEAD
=======

>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	@Override
	public void stop() {
		LOG.cleaningUpConnectionPool( url );
Solution content
			LOG.connectionProperties( ConfigurationHelper.maskOut( connectionProps, "password" ) );
		}
	}

	@Override
	public void stop() {
		LOG.cleaningUpConnectionPool( url );
File
DriverManagerConnectionProviderImpl.java
Developer's decision
Version 1
Kind of conflict
Blank
Chunk
Conflicting content
		pool.clear();
		stopped = true;
	}
<<<<<<< HEAD
	@Override
	public Connection getConnection() throws SQLException {
		if ( traceEnabled ) LOG.tracev( "Total checked-out connections: {0}", checkedOut.intValue() );
=======

	@Override
	public Connection getConnection() throws SQLException {
		final boolean traceEnabled = LOG.isTraceEnabled();
		if ( traceEnabled ) {
			LOG.tracev( "Total checked-out connections: {0}", checkedOut.intValue() );
		}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

		// essentially, if we have available connections in the pool, use one...
		synchronized (pool) {
Solution content
		pool.clear();
		stopped = true;
	}

	@Override
	public Connection getConnection() throws SQLException {
		if ( traceEnabled ) LOG.tracev( "Total checked-out connections: {0}", checkedOut.intValue() );


		// essentially, if we have available connections in the pool, use one...
		synchronized (pool) {
File
DriverManagerConnectionProviderImpl.java
Developer's decision
Version 1
Kind of conflict
Annotation
If statement
Method invocation
Method signature
Variable
Chunk
Conflicting content
		checkedOut.incrementAndGet();
		return conn;
	}
<<<<<<< HEAD
=======

>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	@Override
	public void closeConnection(Connection conn) throws SQLException {
		checkedOut.decrementAndGet();
Solution content
		checkedOut.incrementAndGet();
		return conn;
	}

	@Override
	public void closeConnection(Connection conn) throws SQLException {
		checkedOut.decrementAndGet();
File
DriverManagerConnectionProviderImpl.java
Developer's decision
Version 1
Kind of conflict
Blank
Chunk
Conflicting content
		}
		super.finalize();
	}
<<<<<<< HEAD
=======

>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	@Override
	public boolean supportsAggressiveRelease() {
		return false;
Solution content
		}
		super.finalize();
	}

	@Override
	public boolean supportsAggressiveRelease() {
		return false;
File
DriverManagerConnectionProviderImpl.java
Developer's decision
Version 1
Kind of conflict
Blank
Chunk
Conflicting content
 * @author Steve Ebersole
 */
public class JdbcServicesImpl implements JdbcServices, ServiceRegistryAwareService, Configurable {
<<<<<<< HEAD
=======
	private static final CoreMessageLogger LOG = Logger.getMessageLogger(
			CoreMessageLogger.class,
			JdbcServicesImpl.class.getName()
	);
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	private ServiceRegistryImplementor serviceRegistry;
	private JdbcEnvironment jdbcEnvironment;
Solution content
 * @author Steve Ebersole
 */
public class JdbcServicesImpl implements JdbcServices, ServiceRegistryAwareService, Configurable {
	private static final CoreMessageLogger LOG = Logger.getMessageLogger(
			CoreMessageLogger.class,
			JdbcServicesImpl.class.getName()
	);

	private ServiceRegistryImplementor serviceRegistry;
	private JdbcEnvironment jdbcEnvironment;
File
JdbcServicesImpl.java
Developer's decision
Version 2
Kind of conflict
Attribute
Method invocation
Chunk
Conflicting content
			try {
	public void configure(Map configValues) {
		this.jdbcEnvironment = serviceRegistry.getService( JdbcEnvironment.class );

<<<<<<< HEAD
		this.connectionProvider = serviceRegistry.getService( ConnectionProvider.class );
=======
		boolean metaSupportsRefCursors = false;
		boolean metaSupportsNamedParams = false;
		boolean metaSupportsScrollable = false;
		boolean metaSupportsGetGeneratedKeys = false;
		boolean metaSupportsBatchUpdates = false;
		boolean metaReportsDDLCausesTxnCommit = false;
		boolean metaReportsDDLInTxnSupported = true;
		String extraKeywordsString = "";
		int sqlStateType = -1;
		boolean lobLocatorUpdateCopy = false;
		String catalogName = null;
		String schemaName = null;
		final LinkedHashSet typeInfoSet = new LinkedHashSet();

		// 'hibernate.temp.use_jdbc_metadata_defaults' is a temporary magic value.
		// The need for it is intended to be alleviated with future development, thus it is
		// not defined as an Environment constant...
		//
		// it is used to control whether we should consult the JDBC metadata to determine
		// certain Settings default values; it is useful to *not* do this when the database
		// may not be available (mainly in tools usage).
		final boolean useJdbcMetadata = ConfigurationHelper.getBoolean( "hibernate.temp.use_jdbc_metadata_defaults", configValues, true );
		if ( useJdbcMetadata ) {
				final Connection connection = jdbcConnectionAccess.obtainConnection();
				try {
					DatabaseMetaData meta = connection.getMetaData();
					if(LOG.isDebugEnabled()) {
						LOG.debugf( "Database ->\n" + "       name : %s\n" + "    version : %s\n" + "      major : %s\n" + "      minor : %s",
									meta.getDatabaseProductName(),
									meta.getDatabaseProductVersion(),
									meta.getDatabaseMajorVersion(),
									meta.getDatabaseMinorVersion()
						);
						LOG.debugf( "Driver ->\n" + "       name : %s\n" + "    version : %s\n" + "      major : %s\n" + "      minor : %s",
									meta.getDriverName(),
									meta.getDriverVersion(),
									meta.getDriverMajorVersion(),
									meta.getDriverMinorVersion()
						);
						LOG.debugf( "JDBC version : %s.%s", meta.getJDBCMajorVersion(), meta.getJDBCMinorVersion() );
					}

					metaSupportsRefCursors = StandardRefCursorSupport.supportsRefCursors( meta );
					metaSupportsNamedParams = meta.supportsNamedParameters();
					metaSupportsScrollable = meta.supportsResultSetType( ResultSet.TYPE_SCROLL_INSENSITIVE );
					metaSupportsBatchUpdates = meta.supportsBatchUpdates();
					metaReportsDDLCausesTxnCommit = meta.dataDefinitionCausesTransactionCommit();
					metaReportsDDLInTxnSupported = !meta.dataDefinitionIgnoredInTransactions();
					metaSupportsGetGeneratedKeys = meta.supportsGetGeneratedKeys();
					extraKeywordsString = meta.getSQLKeywords();
					sqlStateType = meta.getSQLStateType();
					lobLocatorUpdateCopy = meta.locatorsUpdateCopy();
					typeInfoSet.addAll( TypeInfoExtracter.extractTypeInfo( meta ) );

					dialect = dialectFactory.buildDialect( configValues, connection );

					catalogName = connection.getCatalog();
					final SchemaNameResolver schemaNameResolver = determineExplicitSchemaNameResolver( configValues );
					if ( schemaNameResolver == null ) {
// todo : add dialect method
//						schemaNameResolver = dialect.getSchemaNameResolver();
					}
					if ( schemaNameResolver != null ) {
						schemaName = schemaNameResolver.resolveSchemaName( connection );
					}
					lobCreatorBuilder = new LobCreatorBuilder( configValues, connection );
				}
				catch ( SQLException sqle ) {
					LOG.unableToObtainConnectionMetadata( sqle.getMessage() );
				}
				finally {
					if ( connection != null ) {
						jdbcConnectionAccess.releaseConnection( connection );
					}
				}
			}
			catch ( SQLException sqle ) {
				LOG.unableToObtainConnectionToQueryMetadata( sqle.getMessage() );
				dialect = dialectFactory.buildDialect( configValues, null );
			}
			catch ( UnsupportedOperationException uoe ) {
				// user supplied JDBC connections
				dialect = dialectFactory.buildDialect( configValues, null );
			}
		}
		else {
			dialect = dialectFactory.buildDialect( configValues, null );
		}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

		final boolean showSQL = ConfigurationHelper.getBoolean( Environment.SHOW_SQL, configValues, false );
		final boolean formatSQL = ConfigurationHelper.getBoolean( Environment.FORMAT_SQL, configValues, false );
Solution content
	public void configure(Map configValues) {
		this.jdbcEnvironment = serviceRegistry.getService( JdbcEnvironment.class );

		this.connectionProvider = serviceRegistry.getService( ConnectionProvider.class );
		final boolean showSQL = ConfigurationHelper.getBoolean( Environment.SHOW_SQL, configValues, false );
		final boolean formatSQL = ConfigurationHelper.getBoolean( Environment.FORMAT_SQL, configValues, false );
File
JdbcServicesImpl.java
Developer's decision
Version 1
Kind of conflict
Attribute
Comment
If statement
Method invocation
Variable
Chunk
Conflicting content
		final boolean showSQL = ConfigurationHelper.getBoolean( Environment.SHOW_SQL, configValues, false );
		final boolean formatSQL = ConfigurationHelper.getBoolean( Environment.FORMAT_SQL, configValues, false );
		this.sqlStatementLogger =  new SqlStatementLogger( showSQL, formatSQL );
<<<<<<< HEAD
=======

		this.extractedMetaDataSupport = new ExtractedDatabaseMetaDataImpl(
				metaSupportsRefCursors,
				metaSupportsNamedParams,
				metaSupportsScrollable,
				metaSupportsGetGeneratedKeys,
				metaSupportsBatchUpdates,
				metaReportsDDLInTxnSupported,
				metaReportsDDLCausesTxnCommit,
				parseKeywords( extraKeywordsString ),
				parseSQLStateType( sqlStateType ),
				lobLocatorUpdateCopy,
				schemaName,
				catalogName,
				typeInfoSet
		);

		SQLExceptionConverter sqlExceptionConverter = dialect.buildSQLExceptionConverter();
		if ( sqlExceptionConverter == null ) {
			final StandardSQLExceptionConverter converter = new StandardSQLExceptionConverter();
			sqlExceptionConverter = converter;
			converter.addDelegate( dialect.buildSQLExceptionConversionDelegate() );
			converter.addDelegate( new SQLExceptionTypeDelegate( dialect ) );
			// todo : vary this based on extractedMetaDataSupport.getSqlStateType()
			converter.addDelegate( new SQLStateConversionDelegate( dialect ) );
		}
		this.sqlExceptionHelper = new SqlExceptionHelper( sqlExceptionConverter );
	}

	private JdbcConnectionAccess buildJdbcConnectionAccess(Map configValues) {
		final MultiTenancyStrategy multiTenancyStrategy = MultiTenancyStrategy.determineMultiTenancyStrategy( configValues );

		if ( MultiTenancyStrategy.NONE == multiTenancyStrategy ) {
			connectionProvider = serviceRegistry.getService( ConnectionProvider.class );
			return new ConnectionProviderJdbcConnectionAccess( connectionProvider );
		}
		else {
			connectionProvider = null;
			final MultiTenantConnectionProvider multiTenantConnectionProvider = serviceRegistry.getService( MultiTenantConnectionProvider.class );
			return new MultiTenantConnectionProviderJdbcConnectionAccess( multiTenantConnectionProvider );
		}
	}

	private static class ConnectionProviderJdbcConnectionAccess implements JdbcConnectionAccess {
		private final ConnectionProvider connectionProvider;

		public ConnectionProviderJdbcConnectionAccess(ConnectionProvider connectionProvider) {
			this.connectionProvider = connectionProvider;
		}

		@Override
		public Connection obtainConnection() throws SQLException {
			return connectionProvider.getConnection();
		}

		@Override
		public void releaseConnection(Connection connection) throws SQLException {
			connectionProvider.closeConnection( connection );
		}

		@Override
		public boolean supportsAggressiveRelease() {
			return connectionProvider.supportsAggressiveRelease();
		}
	}

	private static class MultiTenantConnectionProviderJdbcConnectionAccess implements JdbcConnectionAccess {
		private final MultiTenantConnectionProvider connectionProvider;

		public MultiTenantConnectionProviderJdbcConnectionAccess(MultiTenantConnectionProvider connectionProvider) {
			this.connectionProvider = connectionProvider;
		@Override
		}

		@Override
		public Connection obtainConnection() throws SQLException {
			return connectionProvider.getAnyConnection();
		}

		@Override
		public void releaseConnection(Connection connection) throws SQLException {
			connectionProvider.releaseAnyConnection( connection );
		}

		@Override
		public boolean supportsAggressiveRelease() {
			return connectionProvider.supportsAggressiveRelease();
		}
	}


	/**
	 * A constant naming the setting used to identify the {@link SchemaNameResolver} to use
	 * 

* TODO : add to Environment */ public static final String SCHEMA_NAME_RESOLVER = "hibernate.schema_name_resolver"; private SchemaNameResolver determineExplicitSchemaNameResolver(Map configValues) { final Object setting = configValues.get( SCHEMA_NAME_RESOLVER ); if ( SchemaNameResolver.class.isInstance( setting ) ) { return (SchemaNameResolver) setting; } final String resolverClassName = (String) setting; if ( resolverClassName != null ) { try { final Class resolverClass = ReflectHelper.classForName( resolverClassName, getClass() ); return (SchemaNameResolver) ReflectHelper.getDefaultConstructor( resolverClass ).newInstance(); } catch ( ClassNotFoundException e ) { LOG.unableToLocateConfiguredSchemaNameResolver( resolverClassName, e.toString() ); } catch ( InvocationTargetException e ) { LOG.unableToInstantiateConfiguredSchemaNameResolver( resolverClassName, e.getTargetException().toString() ); } catch ( Exception e ) { LOG.unableToInstantiateConfiguredSchemaNameResolver( resolverClassName, e.toString() ); } } return null; } private Set parseKeywords(String extraKeywordsString) { final Set keywordSet = new HashSet(); keywordSet.addAll( Arrays.asList( extraKeywordsString.split( "," ) ) ); return keywordSet; } private ExtractedDatabaseMetaData.SQLStateType parseSQLStateType(int sqlStateType) { switch ( sqlStateType ) { case DatabaseMetaData.sqlStateSQL99 : { return ExtractedDatabaseMetaData.SQLStateType.SQL99; } case DatabaseMetaData.sqlStateXOpen : { return ExtractedDatabaseMetaData.SQLStateType.XOpen; } default : { return ExtractedDatabaseMetaData.SQLStateType.UNKOWN; } } } private static class ExtractedDatabaseMetaDataImpl implements ExtractedDatabaseMetaData { private final boolean supportsRefCursors; private final boolean supportsNamedParameters; private final boolean supportsScrollableResults; private final boolean supportsGetGeneratedKeys; private final boolean supportsBatchUpdates; private final boolean supportsDataDefinitionInTransaction; private final boolean doesDataDefinitionCauseTransactionCommit; private final Set extraKeywords; private final SQLStateType sqlStateType; private final boolean lobLocatorUpdateCopy; private final String connectionSchemaName; private final String connectionCatalogName; private final LinkedHashSet typeInfoSet; private ExtractedDatabaseMetaDataImpl( boolean supportsRefCursors, boolean supportsNamedParameters, boolean supportsScrollableResults, boolean supportsGetGeneratedKeys, boolean supportsBatchUpdates, boolean supportsDataDefinitionInTransaction, boolean doesDataDefinitionCauseTransactionCommit, Set extraKeywords, SQLStateType sqlStateType, boolean lobLocatorUpdateCopy, String connectionSchemaName, String connectionCatalogName, LinkedHashSet typeInfoSet) { this.supportsRefCursors = supportsRefCursors; this.supportsNamedParameters = supportsNamedParameters; this.supportsScrollableResults = supportsScrollableResults; this.supportsGetGeneratedKeys = supportsGetGeneratedKeys; this.supportsBatchUpdates = supportsBatchUpdates; this.supportsDataDefinitionInTransaction = supportsDataDefinitionInTransaction; this.doesDataDefinitionCauseTransactionCommit = doesDataDefinitionCauseTransactionCommit; this.extraKeywords = extraKeywords; this.sqlStateType = sqlStateType; this.lobLocatorUpdateCopy = lobLocatorUpdateCopy; this.connectionSchemaName = connectionSchemaName; this.connectionCatalogName = connectionCatalogName; this.typeInfoSet = typeInfoSet; } @Override public boolean supportsRefCursors() { return supportsRefCursors; } @Override public boolean supportsNamedParameters() { return supportsNamedParameters; } @Override public boolean supportsScrollableResults() { return supportsScrollableResults; } @Override public boolean supportsGetGeneratedKeys() { return supportsGetGeneratedKeys; } public boolean supportsBatchUpdates() { return supportsBatchUpdates; } @Override public boolean supportsDataDefinitionInTransaction() { return supportsDataDefinitionInTransaction; } @Override public boolean doesDataDefinitionCauseTransactionCommit() { return doesDataDefinitionCauseTransactionCommit; } @Override public Set getExtraKeywords() { return extraKeywords; } @Override public SQLStateType getSqlStateType() { return sqlStateType; } @Override public boolean doesLobLocatorUpdateCopy() { return lobLocatorUpdateCopy; } @Override public String getConnectionSchemaName() { return connectionSchemaName; } @Override public String getConnectionCatalogName() { return connectionCatalogName; } @Override public LinkedHashSet getTypeInfoSet() { return typeInfoSet; } >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 } @Override

Solution content
		final boolean showSQL = ConfigurationHelper.getBoolean( Environment.SHOW_SQL, configValues, false );
		final boolean formatSQL = ConfigurationHelper.getBoolean( Environment.FORMAT_SQL, configValues, false );
		this.sqlStatementLogger =  new SqlStatementLogger( showSQL, formatSQL );
	}

	@Override
File
JdbcServicesImpl.java
Developer's decision
Version 1
Kind of conflict
Annotation
Attribute
Class declaration
Class signature
Comment
If statement
Method declaration
Method invocation
Variable
Chunk
Conflicting content
	@Override
	public String toString() {
<<<<<<< HEAD
		return "PluralAttributeKeyBinding" +
		       MessageHelper.collectionInfoString( factory.getCollectionPersister(role), key, factory );
=======
		return "CollectionKey"
				+ MessageHelper.collectionInfoString( factory.getCollectionPersister( role ), key, factory );
	}

	@Override
	public boolean equals(Object other) {
		if ( this == other ) {
			return true;
		}
		if ( other == null || getClass() != other.getClass() ) {
			return false;
		}

		final CollectionKey that = (CollectionKey) other;
		return that.role.equals( role )
				&& keyType.isEqual( that.key, key, factory );
	}
	@Override
	public int hashCode() {
		return hashCode;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	}

Solution content
	@Override
	public String toString() {
		return "CollectionKey"
				+ MessageHelper.collectionInfoString( factory.getCollectionPersister( role ), key, factory );
	}

	@Override
	public boolean equals(Object other) {
		if ( this == other ) {
			return true;
		}
		if ( other == null || getClass() != other.getClass() ) {
			return false;
		}

		final CollectionKey that = (CollectionKey) other;
		return that.role.equals( role )
				&& keyType.isEqual( that.key, key, factory );
	}

	@Override
	public int hashCode() {
		return hashCode;
	}

File
CollectionKey.java
Developer's decision
Version 2
Kind of conflict
Annotation
Attribute
Method declaration
Method invocation
Method signature
Return statement
Chunk
Conflicting content
	 *
	 * @param ois The stream from which to read the entry.
	 * @param session The session being deserialized.
<<<<<<< HEAD
	 * @return The deserialized PluralAttributeKeyBinding
=======
	 *
	 * @return The deserialized CollectionKey
	 *
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	 * @throws IOException
	 * @throws ClassNotFoundException
	 */
Solution content
	 *
	 * @param ois The stream from which to read the entry.
	 * @param session The session being deserialized.
	 *
	 * @return The deserialized CollectionKey
	 *
	 * @throws IOException
	 * @throws ClassNotFoundException
	 */
File
CollectionKey.java
Developer's decision
Version 2
Kind of conflict
Comment
Chunk
Conflicting content
	private boolean shallowQuery;
	private Map tokenReplacements;

<<<<<<< HEAD
	private Map enabledFilters; //TODO:this is only needed during compilation .. can we eliminate the instvar?
=======
	//TODO:this is only needed during compilation .. can we eliminate the instvar?
	private Map enabledFilters;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

	private boolean compiled;
	private QueryLoader queryLoader;
Solution content
	private boolean shallowQuery;
	private Map tokenReplacements;

	private Map enabledFilters; //TODO:this is only needed during compilation .. can we eliminate the instvar?

	private boolean compiled;
	private QueryLoader queryLoader;
File
QueryTranslatorImpl.java
Developer's decision
Version 1
Kind of conflict
Attribute
Comment
Chunk
Conflicting content
	 */
	public QueryTranslatorImpl(
			String queryIdentifier,
<<<<<<< HEAD
	        String query,
	        Map enabledFilters,
	        SessionFactoryImplementor factory) {
=======
			String query,
			Map enabledFilters,
			SessionFactoryImplementor factory) {
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
		this.queryIdentifier = queryIdentifier;
		this.hql = query;
		this.compiled = false;
Solution content
	 */
	public QueryTranslatorImpl(
			String queryIdentifier,
	        String query,
	        Map enabledFilters,
	        SessionFactoryImplementor factory) {
		this.queryIdentifier = queryIdentifier;
		this.hql = query;
		this.compiled = false;
File
QueryTranslatorImpl.java
Developer's decision
Version 1
Kind of conflict
Variable
Chunk
Conflicting content
				else {
					throw new QueryException( "Unsupported discriminator type " + type );
				}
<<<<<<< HEAD
				return new TypedValue(
						type,
				        value
				);
			}
		}
		// Otherwise, this is an ordinary value.
		return new TypedValue(
				getTypeUsingProjection( subcriteria, propertyName ),
		        value
		);
=======
				return new TypedValue( type, value );
			}
		}
		// Otherwise, this is an ordinary value.
		return new TypedValue( getTypeUsingProjection( subcriteria, propertyName ), value );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	}

	private PropertyMapping getPropertyMapping(String entityName)
Solution content
				else {
					throw new QueryException( "Unsupported discriminator type " + type );
				}
				return new TypedValue( type, value );
			}
		}
		// Otherwise, this is an ordinary value.
		return new TypedValue( getTypeUsingProjection( subcriteria, propertyName ), value );
	}

	private PropertyMapping getPropertyMapping(String entityName)
File
CriteriaQueryTranslator.java
Developer's decision
Version 1
Kind of conflict
Comment
Method invocation
Return statement
Chunk
Conflicting content
 */
package org.hibernate.type;

<<<<<<< HEAD
import org.hibernate.engine.internal.Cascade;
=======
import java.io.Serializable;

import org.hibernate.engine.internal.CascadePoint;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

/**
 * Represents directionality of the foreign key constraint
Solution content
 */
package org.hibernate.type;

import org.hibernate.engine.internal.Cascade;
import org.hibernate.engine.internal.CascadePoint;

/**
 * Represents directionality of the foreign key constraint
File
ForeignKeyDirection.java
Developer's decision
Combination
Kind of conflict
Import
Chunk
Conflicting content
 *
 * @author Gavin King
 */
<<<<<<< HEAD
public enum ForeignKeyDirection {
	/**
	 * A foreign key from child to parent
	 */
	TO_PARENT {
		@Override
		public boolean cascadeNow(int cascadePoint) {
			return cascadePoint != Cascade.BEFORE_INSERT_AFTER_DELETE;
		}

	},
	/**
	 * A foreign key from parent to child
	 */
	FROM_PARENT {
		@Override
		public boolean cascadeNow(int cascadePoint) {
			return cascadePoint != Cascade.AFTER_INSERT_BEFORE_DELETE;
		}
	};

	/**
	 * Should we cascade at this cascade point?
	 *
	 * @see org.hibernate.engine.internal.Cascade
	 */
	public abstract boolean cascadeNow(int cascadePoint);

}
=======
public abstract class ForeignKeyDirection implements Serializable {
	protected ForeignKeyDirection() {}
	/**
	 * Should we cascade at this cascade point?
	 *
	 * @param cascadePoint The point at which the cascade is being initiated.
	 *
	 * @return {@code true} if cascading should be performed now.
	 *
	 * @see org.hibernate.engine.internal.Cascade
	 */
	public abstract boolean cascadeNow(CascadePoint cascadePoint);

	/**
	 * A foreign key from child to parent
	 */
	public static final ForeignKeyDirection FOREIGN_KEY_TO_PARENT = new ForeignKeyDirection() {
		@Override
		public boolean cascadeNow(CascadePoint cascadePoint) {
			return cascadePoint != CascadePoint.BEFORE_INSERT_AFTER_DELETE;
		}

		@Override
		public String toString() {
			return "toParent";
		}
		
		Object readResolve() {
			return FOREIGN_KEY_TO_PARENT;
		}
	};
	/**
	 * A foreign key from parent to child
	 */
	public static final ForeignKeyDirection FOREIGN_KEY_FROM_PARENT = new ForeignKeyDirection() {
		@Override
		public boolean cascadeNow(CascadePoint cascadePoint) {
			return cascadePoint != CascadePoint.AFTER_INSERT_BEFORE_DELETE;
		}

		@Override
		public String toString() {
			return "fromParent";
		}
		
		Object readResolve() {
			return FOREIGN_KEY_FROM_PARENT;
		}
	};
}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
Solution content
 *
 * @author Gavin King
 */
public enum ForeignKeyDirection {
	/**
	 * A foreign key from child to parent
	 */
	TO_PARENT {
		@Override
		public boolean cascadeNow(CascadePoint cascadePoint) {
			return cascadePoint != CascadePoint.BEFORE_INSERT_AFTER_DELETE;
		}

	},
	/**
	 * A foreign key from parent to child
	 */
	FROM_PARENT {
		@Override
		public boolean cascadeNow(CascadePoint cascadePoint) {
			return cascadePoint != CascadePoint.AFTER_INSERT_BEFORE_DELETE;
		}
	};

	/**
	 * Should we cascade at this cascade point?
	 *
	 * @param cascadePoint The point at which the cascade is being initiated.
	 *
	 * @return {@code true} if cascading should be performed now.
	 *
	 * @see org.hibernate.engine.internal.Cascade
	 */
	public abstract boolean cascadeNow(CascadePoint cascadePoint);

}
File
ForeignKeyDirection.java
Developer's decision
Combination
Kind of conflict
Class declaration
Comment
Enum value
Method interface
Chunk
Conflicting content
	@Test
	public void testSameMappingValues() {
<<<<<<< HEAD
		EntityBinding forest = SchemaUtil.getEntityBinding( Forest.class, metadata() );
		EntityBinding forest2 = SchemaUtil.getEntityBinding( Forest2.class, metadata() );
		assertEquals( forest.isDynamicInsert(), forest2.isDynamicInsert() );
		assertEquals( forest.isDynamicUpdate(), forest2.isDynamicUpdate() );
		assertEquals( forest.isSelectBeforeUpdate(), forest2.isSelectBeforeUpdate() );
		// TODO: This needs to use the new metamodel, but the information
		// is not available in EntityBinding.
//		assertEquals( forest.getOptimisticLockMode(), forest2.getOptimisticLockMode() );
		assertEquals( forest.isPolymorphic(), forest2.isPolymorphic() );
=======
		RootClass forest = (RootClass) configuration().getClassMapping( Forest.class.getName() );
		RootClass forest2 = (RootClass) configuration().getClassMapping( Forest2.class.getName() );
		assertEquals( forest.useDynamicInsert(), forest2.useDynamicInsert() );
		assertEquals( forest.useDynamicUpdate(), forest2.useDynamicUpdate() );
		assertEquals( forest.hasSelectBeforeUpdate(), forest2.hasSelectBeforeUpdate() );
		assertEquals( forest.getOptimisticLockStyle(), forest2.getOptimisticLockStyle() );
		assertEquals( forest.isExplicitPolymorphism(), forest2.isExplicitPolymorphism() );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	}
}
Solution content
	@Test
	public void testSameMappingValues() {
		EntityBinding forest = SchemaUtil.getEntityBinding( Forest.class, metadata() );
		EntityBinding forest2 = SchemaUtil.getEntityBinding( Forest2.class, metadata() );
		assertEquals( forest.isDynamicInsert(), forest2.isDynamicInsert() );
		assertEquals( forest.isDynamicUpdate(), forest2.isDynamicUpdate() );
		assertEquals( forest.isSelectBeforeUpdate(), forest2.isSelectBeforeUpdate() );
		// TODO: This needs to use the new metamodel, but the information
		// is not available in EntityBinding.
//		assertEquals( forest.getOptimisticLockMode(), forest2.getOptimisticLockMode() );
		assertEquals( forest.isPolymorphic(), forest2.isPolymorphic() );
	}
}
File
NewCustomEntityMappingAnnotationsTest.java
Developer's decision
Version 1
Kind of conflict
Cast expression
Comment
Method invocation
Variable
Chunk
Conflicting content
import org.junit.Assert;
import org.junit.Test;

<<<<<<< HEAD
import org.hibernate.cfg.AvailableSettings;
=======
import org.hibernate.SessionFactory;
import org.hibernate.cfg.AnnotationConfiguration;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.SQLServerDialect;
import org.hibernate.metamodel.MetadataSources;
Solution content
import org.junit.Assert;
import org.junit.Test;

import org.hibernate.cfg.AvailableSettings;
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.SQLServerDialect;
import org.hibernate.metamodel.MetadataSources;
File
NullablePrimaryKeyTest.java
Developer's decision
Version 1
Kind of conflict
Import
Chunk
Conflicting content
	private static final Logger log = Logger.getLogger( NullablePrimaryKeyTest.class );
    @Test
	public void testGeneratedSql() {
<<<<<<< HEAD
		Properties properties = new Properties();
		properties.putAll( Environment.getProperties() );
		properties.setProperty( AvailableSettings.DIALECT, SQLServerDialect.class.getName() );
		ServiceRegistry serviceRegistry = ServiceRegistryBuilder.buildServiceRegistry( properties );
		try {
			MetadataSources metadataSource = new MetadataSources(serviceRegistry);
			metadataSource.addAnnotatedClass( Address.class ).addAnnotatedClass( Person.class );
			MetadataImplementor metadata = (MetadataImplementor) metadataSource.buildMetadata();
			metadata.getDatabase().getJdbcEnvironment();

			SchemaManagementTool schemaManagementTool = serviceRegistry.getService( SchemaManagementTool.class );
			SchemaCreator schemaCreator = schemaManagementTool.getSchemaCreator( new HashMap() );
			final List commands = new ArrayList();
			final org.hibernate.tool.schema.spi.Target target = new org.hibernate.tool.schema.spi.Target() {
				@Override
				public boolean acceptsImportScriptActions() {
					return false;
				}

				@Override
				public void prepare() {
					commands.clear();
				}

				@Override
				public void accept(String command) {
					commands.add( command );
				}

				@Override
				public void release() {
				}
			};
			schemaCreator.doCreation( metadata.getDatabase(), false, target );
			for ( String s : commands ) {
				log.debug( s );
=======

		ServiceRegistry serviceRegistry = null;
		SessionFactory sf = null;
		try {
			AnnotationConfiguration config = new AnnotationConfiguration();
			config.addAnnotatedClass(Address.class);
			config.addAnnotatedClass(Person.class);
			serviceRegistry = ServiceRegistryBuilder.buildServiceRegistry( Environment.getProperties() );
			sf = config.buildSessionFactory( serviceRegistry );
			String[] schema = config
					.generateSchemaCreationScript(new SQLServerDialect());
			for (String s : schema) {
                log.debug(s);
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
			}
			String expectedMappingTableSql = "create table personAddress (person_id numeric(19,0) not null, " +
					"address_id numeric(19,0), primary key (person_id))";
Solution content
	private static final Logger log = Logger.getLogger( NullablePrimaryKeyTest.class );
    @Test
	public void testGeneratedSql() {
		Properties properties = new Properties();
		properties.putAll( Environment.getProperties() );
		properties.setProperty( AvailableSettings.DIALECT, SQLServerDialect.class.getName() );
		ServiceRegistry serviceRegistry = ServiceRegistryBuilder.buildServiceRegistry( properties );
		try {
			MetadataSources metadataSource = new MetadataSources(serviceRegistry);
			metadataSource.addAnnotatedClass( Address.class ).addAnnotatedClass( Person.class );
			MetadataImplementor metadata = (MetadataImplementor) metadataSource.buildMetadata();
			metadata.getDatabase().getJdbcEnvironment();

			SchemaManagementTool schemaManagementTool = serviceRegistry.getService( SchemaManagementTool.class );
			SchemaCreator schemaCreator = schemaManagementTool.getSchemaCreator( new HashMap() );
			final List commands = new ArrayList();
			final org.hibernate.tool.schema.spi.Target target = new org.hibernate.tool.schema.spi.Target() {
				@Override
				public boolean acceptsImportScriptActions() {
					return false;
				}

				@Override
				public void prepare() {
					commands.clear();
				}

				@Override
				public void accept(String command) {
					commands.add( command );
				}

				@Override
				public void release() {
				}
			};
			schemaCreator.doCreation( metadata.getDatabase(), false, target );
			for ( String s : commands ) {
				log.debug( s );
			}
			String expectedMappingTableSql = "create table personAddress (person_id numeric(19,0) not null, " +
					"address_id numeric(19,0), primary key (person_id))";
File
NullablePrimaryKeyTest.java
Developer's decision
Version 1
Kind of conflict
Cast expression
For statement
Method invocation
Try statement
Variable
Chunk
Conflicting content
import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.Property;
import org.hibernate.mapping.SimpleValue;
<<<<<<< HEAD:hibernate-core/src/test/java/org/hibernate/type/AttributeConverterTest.java
=======
import org.hibernate.type.AbstractStandardBasicType;
import org.hibernate.type.BasicType;
import org.hibernate.type.Type;
import org.hibernate.type.descriptor.java.StringTypeDescriptor;

import org.junit.Test;

>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676:hibernate-core/src/test/java/org/hibernate/test/type/AttributeConverterTest.java
import org.hibernate.testing.junit4.BaseUnitTestCase;
import org.hibernate.type.descriptor.java.StringTypeDescriptor;
Solution content
import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.Property;
import org.hibernate.mapping.SimpleValue;
import org.hibernate.type.AbstractStandardBasicType;
import org.hibernate.type.BasicType;
import org.hibernate.type.Type;
import org.hibernate.type.descriptor.java.StringTypeDescriptor;
import org.hibernate.testing.junit4.BaseUnitTestCase;
File
AttributeConverterTest.java
Developer's decision
Combination
Kind of conflict
Import
Chunk
Conflicting content
import org.hibernate.cache.spi.TimestampsRegion;
import org.hibernate.cache.spi.access.AccessType;
import org.hibernate.cfg.Settings;
<<<<<<< HEAD
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
=======
import org.hibernate.service.spi.InjectService;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

/**
 * Abstract implementation of an Ehcache specific RegionFactory.
Solution content
import org.hibernate.cache.spi.TimestampsRegion;
import org.hibernate.cache.spi.access.AccessType;
import org.hibernate.service.spi.InjectService;

/**
 * Abstract implementation of an Ehcache specific RegionFactory.
File
AbstractEhcacheRegionFactory.java
Developer's decision
Version 2
Kind of conflict
Import
Chunk
Conflicting content
 */
abstract class AbstractEhcacheRegionFactory extends AbstractRegionFactory {

<<<<<<< HEAD
    /**
     * The Hibernate system property specifying the location of the ehcache configuration file name.
     * 

* If not set, ehcache.xml will be looked for in the root of the classpath. *

* If set to say ehcache-1.xml, ehcache-1.xml will be looked for in the root of the classpath. */ public static final String NET_SF_EHCACHE_CONFIGURATION_RESOURCE_NAME = "net.sf.ehcache.configurationResourceName"; private static final EhCacheMessageLogger LOG = Logger.getMessageLogger( EhCacheMessageLogger.class, AbstractEhcacheRegionFactory.class.getName() ); /** * MBean registration helper class instance for Ehcache Hibernate MBeans. */ protected final ProviderMBeanRegistrationHelper mbeanRegistrationHelper = new ProviderMBeanRegistrationHelper(); /** * Ehcache CacheManager that supplied Ehcache instances for this Hibernate RegionFactory. */ protected volatile CacheManager manager; /** * Settings object for the Hibernate persistence unit. */ protected Settings settings; /** * {@link EhcacheAccessStrategyFactory} for creating various access strategies */ protected final EhcacheAccessStrategyFactory accessStrategyFactory = new NonstopAccessStrategyFactory( new EhcacheAccessStrategyFactoryImpl() ); /** * Whether to optimize for minimals puts or minimal gets. *

* Indicates whether when operating in non-strict read/write or read-only mode * Hibernate should optimize the access patterns for minimal puts or minimal gets. * In Ehcache we default to minimal puts since this should have minimal to no * affect on unclustered users, and has great benefit for clustered users. *

* This setting can be overridden by setting the "hibernate.cache.use_minimal_puts" * property in the Hibernate configuration. * * @return true, optimize for minimal puts */ public boolean isMinimalPutsEnabledByDefault() { return true; } /** * {@inheritDoc} */ public long nextTimestamp() { return net.sf.ehcache.util.Timestamper.next(); } /** * {@inheritDoc} */ public EntityRegion buildEntityRegion(String regionName, Properties properties, CacheDataDescription metadata) throws CacheException { return new EhcacheEntityRegion( accessStrategyFactory, getCache( regionName ), isMinimalPutsEnabled(), metadata, properties ); } @Override public NaturalIdRegion buildNaturalIdRegion(String regionName, Properties properties, CacheDataDescription metadata) throws CacheException { return new EhcacheNaturalIdRegion( accessStrategyFactory, getCache( regionName ), isMinimalPutsEnabled(), metadata, properties ); } /** * {@inheritDoc} */ public CollectionRegion buildCollectionRegion(String regionName, Properties properties, CacheDataDescription metadata) throws CacheException { return new EhcacheCollectionRegion( accessStrategyFactory, getCache( regionName ), isMinimalPutsEnabled(), metadata, properties ); } /** * {@inheritDoc} */ public QueryResultsRegion buildQueryResultsRegion(String regionName, Properties properties) throws CacheException { return new EhcacheQueryResultsRegion( accessStrategyFactory, getCache( regionName ), properties ); } /** * {@inheritDoc} */ public TimestampsRegion buildTimestampsRegion(String regionName, Properties properties) throws CacheException { return new EhcacheTimestampsRegion( accessStrategyFactory, getCache( regionName ), properties ); } private Ehcache getCache(String name) throws CacheException { try { Ehcache cache = manager.getEhcache( name ); if ( cache == null ) { LOG.unableToFindEhCacheConfiguration( name ); manager.addCache( name ); return true; cache = manager.getEhcache( name ); LOG.debug( "started EHCache region: " + name ); } HibernateUtil.validateEhcache( cache ); return cache; } catch ( net.sf.ehcache.CacheException e ) { throw new CacheException( e ); } } /** * Load a resource from the classpath. */ protected URL loadResource(String configurationResourceName) { URL url = getServiceRegistry().getService( ClassLoaderService.class ).locateResource( configurationResourceName ); if ( url == null ) { ClassLoader standardClassloader = ClassLoaderUtil.getStandardClassLoader(); if ( standardClassloader != null ) { url = standardClassloader.getResource( configurationResourceName ); } if ( url == null ) { url = AbstractEhcacheRegionFactory.class.getResource( configurationResourceName ); } } if ( LOG.isDebugEnabled() ) { LOG.debugf( "Creating EhCacheRegionFactory from a specified resource: %s. Resolved to URL: %s", configurationResourceName, url ); } if ( url == null ) { LOG.unableToLoadConfiguration( configurationResourceName ); } return url; } /** * Default access-type used when the configured using JPA 2.0 config. JPA 2.0 allows @Cacheable(true) to be attached to an * entity without any access type or usage qualification. *

* We are conservative here in specifying {@link AccessType#READ_WRITE} so as to follow the mantra of "do no harm". *

* This is a Hibernate 3.5 method. */ public AccessType getDefaultAccessType() { return AccessType.READ_WRITE; } ======= /** * The Hibernate system property specifying the location of the ehcache configuration file name. *

* If not set, ehcache.xml will be looked for in the root of the classpath. *

* If set to say ehcache-1.xml, ehcache-1.xml will be looked for in the root of the classpath. */ public static final String NET_SF_EHCACHE_CONFIGURATION_RESOURCE_NAME = "net.sf.ehcache.configurationResourceName"; private static final EhCacheMessageLogger LOG = Logger.getMessageLogger( EhCacheMessageLogger.class, AbstractEhcacheRegionFactory.class.getName() ); /** * MBean registration helper class instance for Ehcache Hibernate MBeans. */ protected final ProviderMBeanRegistrationHelper mbeanRegistrationHelper = new ProviderMBeanRegistrationHelper(); /** * Ehcache CacheManager that supplied Ehcache instances for this Hibernate RegionFactory. */ protected volatile CacheManager manager; /** * Settings object for the Hibernate persistence unit. */ protected Settings settings; /** * {@link EhcacheAccessStrategyFactory} for creating various access strategies */ protected final EhcacheAccessStrategyFactory accessStrategyFactory = new NonstopAccessStrategyFactory( new EhcacheAccessStrategyFactoryImpl() ); /** * {@inheritDoc} *

* In Ehcache we default to minimal puts since this should have minimal to no * affect on unclustered users, and has great benefit for clustered users. * * @return true, optimize for minimal puts */ @Override public boolean isMinimalPutsEnabledByDefault() { } @Override public long nextTimestamp() { return net.sf.ehcache.util.Timestamper.next(); } @Override public EntityRegion buildEntityRegion(String regionName, Properties properties, CacheDataDescription metadata) throws CacheException { return new EhcacheEntityRegion( accessStrategyFactory, getCache( regionName ), settings, metadata, properties ); } @Override public NaturalIdRegion buildNaturalIdRegion(String regionName, Properties properties, CacheDataDescription metadata) throws CacheException { return new EhcacheNaturalIdRegion( accessStrategyFactory, getCache( regionName ), settings, metadata, properties ); } @Override public CollectionRegion buildCollectionRegion( String regionName, Properties properties, CacheDataDescription metadata) throws CacheException { return new EhcacheCollectionRegion( accessStrategyFactory, getCache( regionName ), settings, metadata, properties ); } @Override public QueryResultsRegion buildQueryResultsRegion(String regionName, Properties properties) throws CacheException { return new EhcacheQueryResultsRegion( accessStrategyFactory, getCache( regionName ), properties ); } @InjectService @SuppressWarnings("UnusedDeclaration") public void setClassLoaderService(ClassLoaderService classLoaderService) { this.classLoaderService = classLoaderService; } private ClassLoaderService classLoaderService; @Override public TimestampsRegion buildTimestampsRegion(String regionName, Properties properties) throws CacheException { return new EhcacheTimestampsRegion( accessStrategyFactory, getCache( regionName ), properties ); } private Ehcache getCache(String name) throws CacheException { try { Ehcache cache = manager.getEhcache( name ); if ( cache == null ) { LOG.unableToFindEhCacheConfiguration( name ); manager.addCache( name ); cache = manager.getEhcache( name ); LOG.debug( "started EHCache region: " + name ); } HibernateEhcacheUtils.validateEhcache( cache ); return cache; } catch (net.sf.ehcache.CacheException e) { throw new CacheException( e ); } } /** * Load a resource from the classpath. */ protected URL loadResource(String configurationResourceName) { URL url = null; if ( classLoaderService != null ) { url = classLoaderService.locateResource( configurationResourceName ); } if ( url == null ) { final ClassLoader standardClassloader = ClassLoaderUtil.getStandardClassLoader(); if ( standardClassloader != null ) { url = standardClassloader.getResource( configurationResourceName ); } if ( url == null ) { url = AbstractEhcacheRegionFactory.class.getResource( configurationResourceName ); } if ( url == null ) { try { url = new URL( configurationResourceName ); } catch ( MalformedURLException e ) { // ignore } } } if ( LOG.isDebugEnabled() ) { LOG.debugf( "Creating EhCacheRegionFactory from a specified resource: %s. Resolved to URL: %s", configurationResourceName, url ); } if ( url == null ) { LOG.unableToLoadConfiguration( configurationResourceName ); } return url; } /** * Default access-type used when the configured using JPA 2.0 config. JPA 2.0 allows @Cacheable(true) to be attached to an * entity without any access type or usage qualification. *

* We are conservative here in specifying {@link AccessType#READ_WRITE} so as to follow the mantra of "do no harm". *

* This is a Hibernate 3.5 method. */ public AccessType getDefaultAccessType() { return AccessType.READ_WRITE; } >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 }

Solution content
 */
abstract class AbstractEhcacheRegionFactory extends AbstractRegionFactory {

    /**
     * The Hibernate system property specifying the location of the ehcache configuration file name.
     * 

* If not set, ehcache.xml will be looked for in the root of the classpath. *

* If set to say ehcache-1.xml, ehcache-1.xml will be looked for in the root of the classpath. */ public static final String NET_SF_EHCACHE_CONFIGURATION_RESOURCE_NAME = "net.sf.ehcache.configurationResourceName"; private static final EhCacheMessageLogger LOG = Logger.getMessageLogger( EhCacheMessageLogger.class, AbstractEhcacheRegionFactory.class.getName() ); /** * MBean registration helper class instance for Ehcache Hibernate MBeans. */ protected final ProviderMBeanRegistrationHelper mbeanRegistrationHelper = new ProviderMBeanRegistrationHelper(); /** * Ehcache CacheManager that supplied Ehcache instances for this Hibernate RegionFactory. */ protected volatile CacheManager manager; /** * {@link EhcacheAccessStrategyFactory} for creating various access strategies */ protected final EhcacheAccessStrategyFactory accessStrategyFactory = new NonstopAccessStrategyFactory( new EhcacheAccessStrategyFactoryImpl() ); /** * Whether to optimize for minimals puts or minimal gets. *

* Indicates whether when operating in non-strict read/write or read-only mode * Hibernate should optimize the access patterns for minimal puts or minimal gets. * In Ehcache we default to minimal puts since this should have minimal to no configurationResourceName, * affect on unclustered users, and has great benefit for clustered users. *

* This setting can be overridden by setting the "hibernate.cache.use_minimal_puts" * property in the Hibernate configuration. * * @return true, optimize for minimal puts */ public boolean isMinimalPutsEnabledByDefault() { return true; } private Ehcache getCache(String name) throws CacheException { try { Ehcache cache = manager.getEhcache( name ); if ( cache == null ) { LOG.unableToFindEhCacheConfiguration( name ); manager.addCache( name ); cache = manager.getEhcache( name ); LOG.debug( "started EHCache region: " + name ); } HibernateEhcacheUtils.validateEhcache( cache ); return cache; } catch (net.sf.ehcache.CacheException e) { throw new CacheException( e ); } } /** * Load a resource from the classpath. */ protected URL loadResource(String configurationResourceName) { URL url = null; if ( classLoaderService != null ) { url = classLoaderService.locateResource( configurationResourceName ); } if ( url == null ) { final ClassLoader standardClassloader = ClassLoaderUtil.getStandardClassLoader(); if ( standardClassloader != null ) { url = standardClassloader.getResource( configurationResourceName ); } if ( url == null ) { url = AbstractEhcacheRegionFactory.class.getResource( configurationResourceName ); } if ( url == null ) { try { url = new URL( configurationResourceName ); } catch ( MalformedURLException e ) { // ignore } } } if ( LOG.isDebugEnabled() ) { LOG.debugf( "Creating EhCacheRegionFactory from a specified resource: %s. Resolved to URL: %s", url ); } if ( url == null ) { LOG.unableToLoadConfiguration( configurationResourceName ); } return url; } /** * Default access-type used when the configured using JPA 2.0 config. JPA 2.0 allows @Cacheable(true) to be attached to an * entity without any access type or usage qualification. *

* We are conservative here in specifying {@link AccessType#READ_WRITE} so as to follow the mantra of "do no harm". *

* This is a Hibernate 3.5 method. */ public AccessType getDefaultAccessType() { return AccessType.READ_WRITE; } @Override public long nextTimestamp() { return net.sf.ehcache.util.Timestamper.next(); } @Override public EntityRegion buildEntityRegion(String regionName, Properties properties, CacheDataDescription metadata) throws CacheException { return new EhcacheEntityRegion( accessStrategyFactory, getCache( regionName ),isMinimalPutsEnabled(), metadata, properties ); } @Override public NaturalIdRegion buildNaturalIdRegion(String regionName, Properties properties, CacheDataDescription metadata) throws CacheException { return new EhcacheNaturalIdRegion( accessStrategyFactory, getCache( regionName ), isMinimalPutsEnabled(), metadata, properties ); } @Override public CollectionRegion buildCollectionRegion( String regionName, Properties properties, CacheDataDescription metadata) throws CacheException { return new EhcacheCollectionRegion( accessStrategyFactory, getCache( regionName ), isMinimalPutsEnabled(), metadata, properties ); } @Override public QueryResultsRegion buildQueryResultsRegion(String regionName, Properties properties) throws CacheException { return new EhcacheQueryResultsRegion( accessStrategyFactory, getCache( regionName ), properties ); } @InjectService @SuppressWarnings("UnusedDeclaration") public void setClassLoaderService(ClassLoaderService classLoaderService) { this.classLoaderService = classLoaderService; } private ClassLoaderService classLoaderService; @Override public TimestampsRegion buildTimestampsRegion(String regionName, Properties properties) throws CacheException { return new EhcacheTimestampsRegion( accessStrategyFactory, getCache( regionName ), properties ); } }

File
AbstractEhcacheRegionFactory.java
Developer's decision
Manual
Kind of conflict
Annotation
Attribute
Comment
Method declaration
Method invocation
Chunk
Conflicting content
import org.jboss.logging.Logger;

import org.hibernate.cache.CacheException;
<<<<<<< HEAD
import org.hibernate.cache.ehcache.internal.util.HibernateUtil;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.config.spi.StandardConverters;
=======
import org.hibernate.cache.ehcache.internal.util.HibernateEhcacheUtils;
import org.hibernate.cfg.Settings;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

/**
 * A non-singleton EhCacheRegionFactory implementation.
Solution content
import org.jboss.logging.Logger;

import org.hibernate.cache.CacheException;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.config.spi.StandardConverters;
import org.hibernate.cache.ehcache.internal.util.HibernateEhcacheUtils;

/**
 * A non-singleton EhCacheRegionFactory implementation.
File
EhCacheRegionFactory.java
Developer's decision
Combination
Kind of conflict
Import
Chunk
Conflicting content
<<<<<<< HEAD
			EhCacheRegionFactory.class.getName()
	);
    private static final EhCacheMessageLogger LOG = Logger.getMessageLogger(
            EhCacheMessageLogger.class,
            EhCacheRegionFactory.class.getName()
    );

	@Override
	public void start() {
=======
	/**
	 * Creates a non-singleton EhCacheRegionFactory
	 */
	@SuppressWarnings("UnusedDeclaration")
	public EhCacheRegionFactory() {
	}

	/**
	 * Creates a non-singleton EhCacheRegionFactory
	 *
	 * @param prop Not used
	 */
	@SuppressWarnings("UnusedDeclaration")
	public EhCacheRegionFactory(Properties prop) {
		super();
	}

	@Override
	public void start(Settings settings, Properties properties) throws CacheException {
		this.settings = settings;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
		if ( manager != null ) {
			LOG.attemptToRestartAlreadyStartedEhCacheProvider();
			return;
Solution content
			EhCacheRegionFactory.class.getName()
	);


	/**
	 * Creates a non-singleton EhCacheRegionFactory
	 */
	@SuppressWarnings("UnusedDeclaration")
	public EhCacheRegionFactory() {
	}

	/**
	 * Creates a non-singleton EhCacheRegionFactory
	 *
	 * @param prop Not used
	 */
	@SuppressWarnings("UnusedDeclaration")
	public EhCacheRegionFactory(Properties prop) {
		super();
	}

	@Override
	public void start() {
		if ( manager != null ) {
			LOG.attemptToRestartAlreadyStartedEhCacheProvider();
			return;
File
EhCacheRegionFactory.java
Developer's decision
Combination
Kind of conflict
Annotation
Attribute
Comment
Method declaration
Method invocation
Method signature
Variable
Chunk
Conflicting content
		}

		try {
<<<<<<< HEAD
			ConfigurationService configurationService = getServiceRegistry().getService( ConfigurationService.class );
			String configurationResourceName = configurationService.getSetting( NET_SF_EHCACHE_CONFIGURATION_RESOURCE_NAME,
					StandardConverters.STRING, null
			);
			if ( configurationResourceName == null || configurationResourceName.length() == 0 ) {
				Configuration configuration = ConfigurationFactory.parseConfiguration();
				manager = new CacheManager( configuration );
			}
			else {
				URL url;
				try {
					url = new URL( configurationResourceName );
				}
				catch ( MalformedURLException e ) {
					url = loadResource( configurationResourceName );
				}
				Configuration configuration = HibernateUtil.loadAndCorrectConfiguration( url );
				manager = new CacheManager( configuration );
			}
			Properties properties = new Properties(  );
			properties.putAll( configurationService.getSettings() );
			mbeanRegistrationHelper.registerMBean( manager, properties );
		}
		catch ( net.sf.ehcache.CacheException e ) {
=======
			String configurationResourceName = null;
			if ( properties != null ) {
				configurationResourceName = (String) properties.get( NET_SF_EHCACHE_CONFIGURATION_RESOURCE_NAME );
			}
			if ( configurationResourceName == null || configurationResourceName.length() == 0 ) {
				final Configuration configuration = ConfigurationFactory.parseConfiguration();
				manager = new CacheManager( configuration );
			}
			else {
				final URL url = loadResource( configurationResourceName );
				final Configuration configuration = HibernateEhcacheUtils.loadAndCorrectConfiguration( url );
				manager = new CacheManager( configuration );
			}
			mbeanRegistrationHelper.registerMBean( manager, properties );
		}
		catch (net.sf.ehcache.CacheException e) {
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
			if ( e.getMessage().startsWith(
					"Cannot parseConfiguration CacheManager. Attempt to create a new instance of " +
							"CacheManager using the diskStorePath"
Solution content
		}

		try {
			ConfigurationService configurationService = getServiceRegistry().getService( ConfigurationService.class );
			String configurationResourceName = configurationService.getSetting(
					NET_SF_EHCACHE_CONFIGURATION_RESOURCE_NAME,
					StandardConverters.STRING, null
			);
			if ( configurationResourceName == null || configurationResourceName.length() == 0 ) {
				Configuration configuration = ConfigurationFactory.parseConfiguration();
				manager = new CacheManager( configuration );
			}
			else {
				URL url;
				try {
					url = new URL( configurationResourceName );
				}
				catch ( MalformedURLException e ) {
					url = loadResource( configurationResourceName );
				}
				Configuration configuration = HibernateEhcacheUtils.loadAndCorrectConfiguration( url );
				manager = new CacheManager( configuration );
			}
			Properties properties = new Properties();
			properties.putAll( configurationService.getSettings() );
			mbeanRegistrationHelper.registerMBean( manager, properties );
		}
		catch ( net.sf.ehcache.CacheException e ) {
			if ( e.getMessage().startsWith(
					"Cannot parseConfiguration CacheManager. Attempt to create a new instance of " +
							"CacheManager using the diskStorePath"
File
EhCacheRegionFactory.java
Developer's decision
Manual
Kind of conflict
Catch clause
If statement
Method invocation
Variable
Chunk
Conflicting content
import org.jboss.logging.Logger;

import org.hibernate.cache.CacheException;
<<<<<<< HEAD
import org.hibernate.cache.ehcache.internal.util.HibernateUtil;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.config.spi.StandardConverters;
=======
import org.hibernate.cache.ehcache.internal.util.HibernateEhcacheUtils;
import org.hibernate.cfg.Settings;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

/**
 * A singleton EhCacheRegionFactory implementation.
Solution content
import org.jboss.logging.Logger;

import org.hibernate.cache.CacheException;
import org.hibernate.cache.ehcache.internal.util.HibernateEhcacheUtils;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.config.spi.StandardConverters;

/**
 * A singleton EhCacheRegionFactory implementation.
File
SingletonEhCacheRegionFactory.java
Developer's decision
Combination
Kind of conflict
Import
Chunk
Conflicting content
	}

<<<<<<< HEAD

	private static final AtomicInteger REFERENCE_COUNT = new AtomicInteger();
	@Override
	public void start() {
		try {
			ConfigurationService configurationService = getServiceRegistry().getService( ConfigurationService.class );
			String configurationResourceName = configurationService.getSetting( NET_SF_EHCACHE_CONFIGURATION_RESOURCE_NAME,
					StandardConverters.STRING, null
			);
			if ( configurationResourceName == null || configurationResourceName.length() == 0 ) {
				manager = CacheManager.create();
				REFERENCE_COUNT.incrementAndGet();
			}
			else {
				URL url;
				try {
					url = new URL( configurationResourceName );
				}
				catch ( MalformedURLException e ) {
					if ( !configurationResourceName.startsWith( "/" ) ) {
						configurationResourceName = "/" + configurationResourceName;
						LOG.debugf(
								"prepending / to %s. It should be placed in the root of the classpath rather than in a package.",
								configurationResourceName
						);
					}
					url = loadResource( configurationResourceName );
				}
				Configuration configuration = HibernateUtil.loadAndCorrectConfiguration( url );
				manager = CacheManager.create( configuration );
				REFERENCE_COUNT.incrementAndGet();
			}
			Properties properties = new Properties(  );
			properties.putAll( configurationService.getSettings() );
			mbeanRegistrationHelper.registerMBean( manager, properties );
		}
		catch ( net.sf.ehcache.CacheException e ) {
			throw new CacheException( e );
		}
	}

    /**
     * {@inheritDoc}
     */
	@Override
    public void stop() {
        try {
            if ( manager != null ) {
                if ( REFERENCE_COUNT.decrementAndGet() == 0 ) {
                    manager.shutdown();
                }
                manager = null;
            }
        }
        catch ( net.sf.ehcache.CacheException e ) {
            throw new CacheException( e );
        }
    }
=======
	/**
	 * Constructs a SingletonEhCacheRegionFactory
	 */
	@SuppressWarnings("UnusedDeclaration")
	public SingletonEhCacheRegionFactory() {

	/**
	 * Constructs a SingletonEhCacheRegionFactory
	 *
	 * @param prop Not used
	 */
	@SuppressWarnings("UnusedDeclaration")
	public SingletonEhCacheRegionFactory(Properties prop) {
		super();
	}

	@Override
	public void start(Settings settings, Properties properties) throws CacheException {
		this.settings = settings;
		try {
			String configurationResourceName = null;
			if ( properties != null ) {
				configurationResourceName = (String) properties.get( NET_SF_EHCACHE_CONFIGURATION_RESOURCE_NAME );
			}
			if ( configurationResourceName == null || configurationResourceName.length() == 0 ) {
				manager = CacheManager.create();
				REFERENCE_COUNT.incrementAndGet();
			}
			else {
				URL url;
				try {
					url = new URL( configurationResourceName );
				}
				catch (MalformedURLException e) {
					if ( !configurationResourceName.startsWith( "/" ) ) {
						configurationResourceName = "/" + configurationResourceName;
						LOG.debugf(
								"prepending / to %s. It should be placed in the root of the classpath rather than in a package.",
								configurationResourceName
						);
					}
					url = loadResource( configurationResourceName );
				}
				final Configuration configuration = HibernateEhcacheUtils.loadAndCorrectConfiguration( url );
				manager = CacheManager.create( configuration );
				REFERENCE_COUNT.incrementAndGet();
			}
			mbeanRegistrationHelper.registerMBean( manager, properties );
		}
		catch (net.sf.ehcache.CacheException e) {
			throw new CacheException( e );
		}
	}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

	@Override
	public void stop() {
Solution content
	private static final AtomicInteger REFERENCE_COUNT = new AtomicInteger();

	@Override
	public void start() {
		try {
			ConfigurationService configurationService = getServiceRegistry().getService( ConfigurationService.class );
			String configurationResourceName = configurationService.getSetting( NET_SF_EHCACHE_CONFIGURATION_RESOURCE_NAME,
					StandardConverters.STRING, null
			);
			if ( configurationResourceName == null || configurationResourceName.length() == 0 ) {
				manager = CacheManager.create();
				REFERENCE_COUNT.incrementAndGet();
			}
			else {
				URL url;
				try {
					url = new URL( configurationResourceName );
				}
				catch ( MalformedURLException e ) {
					if ( !configurationResourceName.startsWith( "/" ) ) {
						configurationResourceName = "/" + configurationResourceName;
						LOG.debugf(
								"prepending / to %s. It should be placed in the root of the classpath rather than in a package.",
								configurationResourceName
						);
					}
					url = loadResource( configurationResourceName );
				}
				Configuration configuration = HibernateEhcacheUtils.loadAndCorrectConfiguration( url );
				manager = CacheManager.create( configuration );
				REFERENCE_COUNT.incrementAndGet();
			}
			Properties properties = new Properties(  );
			properties.putAll( configurationService.getSettings() );
			mbeanRegistrationHelper.registerMBean( manager, properties );
		}
		catch ( net.sf.ehcache.CacheException e ) {
			throw new CacheException( e );
		}
	}

    /**
     * {@inheritDoc}
     */
	@Override
    public void stop() {
        try {
            if ( manager != null ) {
                if ( REFERENCE_COUNT.decrementAndGet() == 0 ) {
                    manager.shutdown();
                }
                manager = null;
            }
        }
        catch ( net.sf.ehcache.CacheException e ) {
            throw new CacheException( e );
        }
    }
	/**
	 * Constructs a SingletonEhCacheRegionFactory
	 */
	@SuppressWarnings("UnusedDeclaration")
	public SingletonEhCacheRegionFactory() {
	}

	/**
	 * Constructs a SingletonEhCacheRegionFactory
	 *
	 * @param prop Not used
	 */
	@SuppressWarnings("UnusedDeclaration")
	public SingletonEhCacheRegionFactory(Properties prop) {
		super();
	}

File
SingletonEhCacheRegionFactory.java
Developer's decision
Manual
Kind of conflict
Annotation
Comment
Method declaration
Chunk
Conflicting content
		super( accessStrategyFactory, underlyingCache, settings, metadata, properties );
	}

<<<<<<< HEAD

    /**
     * Constructs an EhcacheCollectionRegion around the given underlying cache.
     *
     * @param accessStrategyFactory
     */
    public EhcacheCollectionRegion(EhcacheAccessStrategyFactory accessStrategyFactory, Ehcache underlyingCache, boolean isMinimalPutsEnabled,
                                   CacheDataDescription metadata, Properties properties) {
        super( accessStrategyFactory, underlyingCache, isMinimalPutsEnabled, metadata, properties );
    }

    /**
     * {@inheritDoc}
     */
    public CollectionRegionAccessStrategy buildAccessStrategy(AccessType accessType) throws CacheException {
        return accessStrategyFactory.createCollectionRegionAccessStrategy( this, accessType );
    }
}
=======
	@Override
	public CollectionRegionAccessStrategy buildAccessStrategy(AccessType accessType) throws CacheException {
		return getAccessStrategyFactory().createCollectionRegionAccessStrategy( this, accessType );
	}
}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
Solution content
    public EhcacheCollectionRegion(EhcacheAccessStrategyFactory accessStrategyFactory, Ehcache underlyingCache, boolean isMinimalPutsEnabled,
                                   CacheDataDescription metadata, Properties properties) {
        super( accessStrategyFactory, underlyingCache, isMinimalPutsEnabled, metadata, properties );
    }

    /**
     * {@inheritDoc}
     */
    public CollectionRegionAccessStrategy buildAccessStrategy(AccessType accessType) throws CacheException {
        return getAccessStrategyFactory().createCollectionRegionAccessStrategy( this, accessType );
    }
}
File
EhcacheCollectionRegion.java
Developer's decision
Combination
Kind of conflict
Annotation
Comment
Method declaration
Chunk
Conflicting content
		super( accessStrategyFactory, underlyingCache, settings, metadata, properties );
	}

<<<<<<< HEAD

    /**
     * Constructs an EhcacheEntityRegion around the given underlying cache.
     *
     * @param accessStrategyFactory
     */
    public EhcacheEntityRegion(EhcacheAccessStrategyFactory accessStrategyFactory, Ehcache underlyingCache, boolean isMinimalPutsEnabled,
                               CacheDataDescription metadata, Properties properties) {
        super( accessStrategyFactory, underlyingCache, isMinimalPutsEnabled, metadata, properties );
    }

    /**
     * {@inheritDoc}
     */
    public EntityRegionAccessStrategy buildAccessStrategy(AccessType accessType) throws CacheException {
        return accessStrategyFactory.createEntityRegionAccessStrategy( this, accessType );
    }
}
=======
	@Override
	public EntityRegionAccessStrategy buildAccessStrategy(AccessType accessType) throws CacheException {
		return getAccessStrategyFactory().createEntityRegionAccessStrategy( this, accessType );
	}
}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
Solution content
    /**
     * Constructs an EhcacheEntityRegion around the given underlying cache.
     *
	 * @param accessStrategyFactory The factory for building needed EntityRegionAccessStrategy instance
	 * @param underlyingCache The ehcache cache instance
     */
    public EhcacheEntityRegion(EhcacheAccessStrategyFactory accessStrategyFactory, Ehcache underlyingCache, boolean isMinimalPutsEnabled,
                               CacheDataDescription metadata, Properties properties) {
        super( accessStrategyFactory, underlyingCache, isMinimalPutsEnabled, metadata, properties );
    }

    /**
     * {@inheritDoc}
     */
    public EntityRegionAccessStrategy buildAccessStrategy(AccessType accessType) throws CacheException {
        return getAccessStrategyFactory().createEntityRegionAccessStrategy( this, accessType );
    }
}
File
EhcacheEntityRegion.java
Developer's decision
Manual
Kind of conflict
Annotation
Comment
Method declaration
Chunk
Conflicting content
 * @author Alex Snaps
 */
public class EhcacheNaturalIdRegion extends EhcacheTransactionalDataRegion implements NaturalIdRegion {
<<<<<<< HEAD


    /**
     * Constructs an EhcacheNaturalIdRegion around the given underlying cache.
     *
     * @param accessStrategyFactory
     */
    public EhcacheNaturalIdRegion(EhcacheAccessStrategyFactory accessStrategyFactory, Ehcache underlyingCache, boolean isMinimalPutsEnabled,
                                   CacheDataDescription metadata, Properties properties) {
        super( accessStrategyFactory, underlyingCache, isMinimalPutsEnabled, metadata, properties );
    }
=======
	/**
	 * Constructs an EhcacheNaturalIdRegion around the given underlying cache.
	 *
	 * @param accessStrategyFactory The factory for building needed NaturalIdRegionAccessStrategy instance
	 * @param underlyingCache The ehcache cache instance
	 * @param settings The Hibernate settings
	 * @param metadata Metadata about the data to be cached in this region
	 * @param properties Any additional[ properties
	 */
	public EhcacheNaturalIdRegion(
			EhcacheAccessStrategyFactory accessStrategyFactory,
			Ehcache underlyingCache,
			Settings settings,
			CacheDataDescription metadata,
			Properties properties) {
		super( accessStrategyFactory, underlyingCache, settings, metadata, properties );
	}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

	@Override
	public NaturalIdRegionAccessStrategy buildAccessStrategy(AccessType accessType) throws CacheException {
Solution content
 * @author Alex Snaps
 */
public class EhcacheNaturalIdRegion extends EhcacheTransactionalDataRegion implements NaturalIdRegion {

    /**
     * Constructs an EhcacheNaturalIdRegion around the given underlying cache.
     *
     * @param accessStrategyFactory
     */
    public EhcacheNaturalIdRegion(EhcacheAccessStrategyFactory accessStrategyFactory, Ehcache underlyingCache, boolean isMinimalPutsEnabled,
                                   CacheDataDescription metadata, Properties properties) {
        super( accessStrategyFactory, underlyingCache, isMinimalPutsEnabled, metadata, properties );
    }

	@Override
	public NaturalIdRegionAccessStrategy buildAccessStrategy(AccessType accessType) throws CacheException {
File
EhcacheNaturalIdRegion.java
Developer's decision
Version 1
Kind of conflict
Comment
Method declaration
Chunk
Conflicting content
public class EhcacheTransactionalDataRegion extends EhcacheDataRegion implements TransactionalDataRegion {
	private static final int LOCAL_LOCK_PROVIDER_CONCURRENCY = 128;

<<<<<<< HEAD
	protected final boolean isMinimalPutsEnabled;
=======
	private final Settings settings;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

	/**
	 * Metadata associated with the objects stored in the region.
Solution content
public class EhcacheTransactionalDataRegion extends EhcacheDataRegion implements TransactionalDataRegion {
	private static final int LOCAL_LOCK_PROVIDER_CONCURRENCY = 128;

	protected final boolean isMinimalPutsEnabled;

	/**
	 * Metadata associated with the objects stored in the region.
File
EhcacheTransactionalDataRegion.java
Developer's decision
Version 1
Kind of conflict
Attribute
Chunk
Conflicting content
	/**
	 * Construct an transactional Hibernate cache region around the given Ehcache instance.
	 */
<<<<<<< HEAD
	EhcacheTransactionalDataRegion(EhcacheAccessStrategyFactory accessStrategyFactory, Ehcache cache, boolean isMinimalPutsEnabled,
								   CacheDataDescription metadata, Properties properties) {
=======
	EhcacheTransactionalDataRegion(
			EhcacheAccessStrategyFactory accessStrategyFactory, Ehcache cache, Settings settings,
			CacheDataDescription metadata, Properties properties) {
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
		super( accessStrategyFactory, cache, properties );
		this.isMinimalPutsEnabled = isMinimalPutsEnabled;
		this.metadata = metadata;
Solution content
	/**
	 * Construct an transactional Hibernate cache region around the given Ehcache instance.
	 */
	EhcacheTransactionalDataRegion(EhcacheAccessStrategyFactory accessStrategyFactory, Ehcache cache, boolean isMinimalPutsEnabled,
								   CacheDataDescription metadata, Properties properties) {
		super( accessStrategyFactory, cache, properties );
		this.isMinimalPutsEnabled = isMinimalPutsEnabled;
		this.metadata = metadata;
File
EhcacheTransactionalDataRegion.java
Developer's decision
Version 1
Kind of conflict
Method signature
Chunk
Conflicting content
		}
	}

<<<<<<< HEAD
	public boolean isMinimalPutsEnabled(){
		return isMinimalPutsEnabled;
=======
	/**
	 * Access the Hibernate settings associated with the persistence unit.
	 *
	 * @return settings
	 */
	public Settings getSettings() {
		return settings;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	}

	@Override
Solution content
		}
		this.locksAreIndependentOfCache = lockProvider instanceof StripedReadWriteLockSync;
	}

	public boolean isMinimalPutsEnabled(){
		return isMinimalPutsEnabled;
	}

	@Override
File
EhcacheTransactionalDataRegion.java
Developer's decision
Version 1
Kind of conflict
Attribute
Comment
Method signature
Return statement
Chunk
Conflicting content
	 * @param region The wrapped region.  Accessible to subclasses via {@link #region()}
	 * @param settings The Hibernate settings.  Accessible to subclasses via {@link #settings()}
	 */
<<<<<<< HEAD
	protected final T region;
=======
	AbstractEhcacheAccessStrategy(T region, Settings settings) {
		this.region = region;
		this.settings = settings;
	}

	/**
	 * The wrapped Hibernate cache region.
	 */
	protected T region() {
		return region;
	}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

	/**
	 * The settings for this persistence unit.
Solution content
	/**
	 * The wrapped Hibernate cache region.
	 */
	protected T region() {
		return region;
	}

	/**
	 * The settings for this persistence unit.
File
AbstractEhcacheAccessStrategy.java
Developer's decision
Combination
Kind of conflict
Attribute
Comment
Method declaration
Chunk
Conflicting content
	/**
	 * The settings for this persistence unit.
	 */
<<<<<<< HEAD
	AbstractEhcacheAccessStrategy(T region) {
		this.region = region;
=======
	protected Settings settings() {
		return settings;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	}

	/**
Solution content
	/**
	 * The settings for this persistence unit.
	 */
	AbstractEhcacheAccessStrategy(T region) {
		this.region = region;
	}

	/**
File
AbstractEhcacheAccessStrategy.java
Developer's decision
Version 1
Kind of conflict
Attribute
Method signature
Return statement
Chunk
Conflicting content
 * @author Chris Dennis
 * @author Alex Snaps
 */
<<<<<<< HEAD
abstract class AbstractReadWriteEhcacheAccessStrategy
        extends AbstractEhcacheAccessStrategy {

    private static final EhCacheMessageLogger LOG = Logger.getMessageLogger(
            EhCacheMessageLogger.class,
            AbstractReadWriteEhcacheAccessStrategy.class.getName()
    );
    private final UUID uuid = UUID.randomUUID();
    private final AtomicLong nextLockId = new AtomicLong();

    private final Comparator versionComparator;

    /**
     * Creates a read/write cache access strategy around the given cache region.
     */
    public AbstractReadWriteEhcacheAccessStrategy(T region) {
        super( region );
        this.versionComparator = region.getCacheDataDescription().getVersionComparator();
    }

    /**
     * Returns null if the item is not readable.  Locked items are not readable, nor are items created
     * after the start of this transaction.
     *
     * @see org.hibernate.cache.spi.access.EntityRegionAccessStrategy#get(java.lang.Object, long)
     * @see org.hibernate.cache.spi.access.CollectionRegionAccessStrategy#get(java.lang.Object, long)
     */
    public final Object get(Object key, long txTimestamp) throws CacheException {
        readLockIfNeeded( key );
        try {
            Lockable item = (Lockable) region.get( key );

            boolean readable = item != null && item.isReadable( txTimestamp );
            if ( readable ) {
                return item.getValue();
            }
            else {
                return null;
            }
        }
        finally {
            readUnlockIfNeeded( key );
        }
    }

    /**
     * Returns false and fails to put the value if there is an existing un-writeable item mapped to this
     * key.
     *
     * @see org.hibernate.cache.spi.access.EntityRegionAccessStrategy#putFromLoad(java.lang.Object, java.lang.Object, long, java.lang.Object, boolean)
     * @see org.hibernate.cache.spi.access.CollectionRegionAccessStrategy#putFromLoad(java.lang.Object, java.lang.Object, long, java.lang.Object, boolean)
     */
    @Override
    public final boolean putFromLoad(Object key, Object value, long txTimestamp, Object version, boolean minimalPutOverride)
            throws CacheException {
        region.writeLock( key );
        try {
            Lockable item = (Lockable) region.get( key );
            boolean writeable = item == null || item.isWriteable( txTimestamp, version, versionComparator );
            if ( writeable ) {
                region.put( key, new Item( value, version, region.nextTimestamp() ) );
                return true;
            }
            else {
                return false;
            }
        }
        finally {
            region.writeUnlock( key );
        }
    }

    /**
     * Soft-lock a cache item.
     *
     * @see org.hibernate.cache.spi.access.EntityRegionAccessStrategy#lockItem(java.lang.Object, java.lang.Object)
     * @see org.hibernate.cache.spi.access.CollectionRegionAccessStrategy#lockItem(java.lang.Object, java.lang.Object)
     */
    public final SoftLock lockItem(Object key, Object version) throws CacheException {
        region.writeLock( key );
        try {
            Lockable item = (Lockable) region.get( key );
            long timeout = region.nextTimestamp() + region.getTimeout();
            final Lock lock = ( item == null ) ? new Lock( timeout, uuid, nextLockId(), version ) : item.lock(
                    timeout,
                    uuid,
                    nextLockId()
            );
            region.put( key, lock );
            return lock;
        }
        finally {
            region.writeUnlock( key );
        }
    }

    /**
     * Soft-unlock a cache item.
     *
     * @see org.hibernate.cache.spi.access.EntityRegionAccessStrategy#unlockItem(java.lang.Object, org.hibernate.cache.spi.access.SoftLock)
     * @see org.hibernate.cache.spi.access.CollectionRegionAccessStrategy#unlockItem(java.lang.Object, org.hibernate.cache.spi.access.SoftLock)
     */
    public final void unlockItem(Object key, SoftLock lock) throws CacheException {
        region.writeLock( key );
        try {
            Lockable item = (Lockable) region.get( key );

            if ( ( item != null ) && item.isUnlockable( lock ) ) {
                decrementLock( key, (Lock) item );
            }
            else {
                handleLockExpiry( key, item );
            }
        }
        finally {
            region.writeUnlock( key );
        }
    }

    private long nextLockId() {
        return nextLockId.getAndIncrement();
    }

    /**
     * Unlock and re-put the given key, lock combination.
     */
    protected void decrementLock(Object key, Lock lock) {
        lock.unlock( region.nextTimestamp() );
        region.put( key, lock );
    }

    /**
     * Handle the timeout of a previous lock mapped to this key
     */
    protected void handleLockExpiry(Object key, Lockable lock) {
        LOG.softLockedCacheExpired( region.getName(), key, lock == null ? "(null)" : lock.toString() );

        long ts = region.nextTimestamp() + region.getTimeout();
        // create new lock that times out immediately
        Lock newLock = new Lock( ts, uuid, nextLockId.getAndIncrement(), null );
        newLock.unlock( ts );
        region.put( key, newLock );
    }

    /**
     * Read lock the entry for the given key if internal cache locks will not provide correct exclusion.
     */
    private void readLockIfNeeded(Object key) {
        if ( region.locksAreIndependentOfCache() ) {
            region.readLock( key );
        }
    }

    /**
     * Read unlock the entry for the given key if internal cache locks will not provide correct exclusion.
     */
    private void readUnlockIfNeeded(Object key) {
        if ( region.locksAreIndependentOfCache() ) {
            region.readUnlock( key );
        }
    }

    /**
     * Interface type implemented by all wrapper objects in the cache.
     */
    protected static interface Lockable {

        /**
         * Returns true if the enclosed value can be read by a transaction started at the given time.
         */
        public boolean isReadable(long txTimestamp);

        /**
         * Returns true if the enclosed value can be replaced with one of the given version by a
         * transaction started at the given time.
         */
        public boolean isWriteable(long txTimestamp, Object version, Comparator versionComparator);

        /**
         * Returns the enclosed value.
         */
        public Object getValue();

        /**
         * Returns true if the given lock can be unlocked using the given SoftLock instance as a handle.
         */
        public boolean isUnlockable(SoftLock lock);

        /**
         * Locks this entry, stamping it with the UUID and lockId given, with the lock timeout occuring at the specified
         * time.  The returned Lock object can be used to unlock the entry in the future.
         */
        public Lock lock(long timeout, UUID uuid, long lockId);
    }

    /**
     * Wrapper type representing unlocked items.
     */
    protected final static class Item implements Serializable, Lockable {

        private static final long serialVersionUID = 1L;
        private final Object value;
        private final Object version;
        private final long timestamp;

        /**
         * Creates an unlocked item wrapping the given value with a version and creation timestamp.
         */
        Item(Object value, Object version, long timestamp) {
            this.value = value;
            this.version = version;
            this.timestamp = timestamp;
        }

        /**
         * {@inheritDoc}
         */
        public boolean isReadable(long txTimestamp) {
            return txTimestamp > timestamp;
        }

        /**
         * {@inheritDoc}
         */
        public boolean isWriteable(long txTimestamp, Object newVersion, Comparator versionComparator) {
            return version != null && versionComparator.compare( version, newVersion ) < 0;
        }

        /**
         * {@inheritDoc}
         */
        public Object getValue() {
            return value;
        }

        /**
         * {@inheritDoc}
         */
        public boolean isUnlockable(SoftLock lock) {
            return false;
        }

        /**
         * {@inheritDoc}
         */
        public Lock lock(long timeout, UUID uuid, long lockId) {
            return new Lock( timeout, uuid, lockId, version );
        }
    }

    /**
     * Wrapper type representing locked items.
     */
    protected final static class Lock implements Serializable, Lockable, SoftLock {

        private static final long serialVersionUID = 2L;

        private final UUID sourceUuid;
        private final long lockId;
        private final Object version;

        private long timeout;
        private boolean concurrent;
        private int multiplicity = 1;
        private long unlockTimestamp;

        /**
         * Creates a locked item with the given identifiers and object version.
         */
        Lock(long timeout, UUID sourceUuid, long lockId, Object version) {
            this.timeout = timeout;
            this.lockId = lockId;
            this.version = version;
            this.sourceUuid = sourceUuid;
        }

        /**
         * {@inheritDoc}
         */
        public boolean isReadable(long txTimestamp) {
            return false;
        }

        /**
         * {@inheritDoc}
         */
        public boolean isWriteable(long txTimestamp, Object newVersion, Comparator versionComparator) {
            if ( txTimestamp > timeout ) {
                // if timedout then allow write
                return true;
            }
            if ( multiplicity > 0 ) {
                // if still locked then disallow write
                return false;
            }
            return version == null ? txTimestamp > unlockTimestamp : versionComparator.compare(
                    version,
                    newVersion
            ) < 0;
        }

        /**
         * {@inheritDoc}
         */
        public Object getValue() {
            return null;
        }

        /**
         * {@inheritDoc}
         */
        public boolean isUnlockable(SoftLock lock) {
            return equals( lock );
        }

        /**
         * {@inheritDoc}
         */
        @Override
        public boolean equals(Object o) {
            if ( o == this ) {
                return true;
            }
            else if ( o instanceof Lock ) {
                return ( lockId == ( (Lock) o ).lockId ) && sourceUuid.equals( ( (Lock) o ).sourceUuid );
            }
            else {
                return false;
            }
        }

        /**
         * {@inheritDoc}
         */
        @Override
        public int hashCode() {
            int hash = ( sourceUuid != null ? sourceUuid.hashCode() : 0 );
            int temp = (int) lockId;
            for ( int i = 1; i < Long.SIZE / Integer.SIZE; i++ ) {
                temp ^= ( lockId >>> ( i * Integer.SIZE ) );
            }
            return hash + temp;
        }

        /**
         * Returns true if this Lock has been concurrently locked by more than one transaction.
         */
        public boolean wasLockedConcurrently() {
            return concurrent;
        }

        /**
         * {@inheritDoc}
         */
        public Lock lock(long timeout, UUID uuid, long lockId) {
            concurrent = true;
            multiplicity++;
            this.timeout = timeout;
            return this;
        }

        /**
         * Unlocks this Lock, and timestamps the unlock event.
         */
        public void unlock(long timestamp) {
            if ( --multiplicity == 0 ) {
                unlockTimestamp = timestamp;
            }
        }

        /**
         * {@inheritDoc}
         */
        @Override
        public String toString() {
            StringBuilder sb = new StringBuilder( "Lock Source-UUID:" + sourceUuid + " Lock-ID:" + lockId );
            return sb.toString();
        }
    }
=======
abstract class AbstractReadWriteEhcacheAccessStrategy
		extends AbstractEhcacheAccessStrategy {

	private static final EhCacheMessageLogger LOG = Logger.getMessageLogger(
			EhCacheMessageLogger.class,
			AbstractReadWriteEhcacheAccessStrategy.class.getName()
	);

	private final UUID uuid = UUID.randomUUID();
	private final AtomicLong nextLockId = new AtomicLong();

	private final Comparator versionComparator;

	/**
	 * Creates a read/write cache access strategy around the given cache region.
	 */
	public AbstractReadWriteEhcacheAccessStrategy(T region, Settings settings) {
		super( region, settings );
		this.versionComparator = region.getCacheDataDescription().getVersionComparator();
	}

	/**
	 * Returns null if the item is not readable.  Locked items are not readable, nor are items created
	 * after the start of this transaction.
	 *
	 * @see org.hibernate.cache.spi.access.EntityRegionAccessStrategy#get(java.lang.Object, long)
	 * @see org.hibernate.cache.spi.access.CollectionRegionAccessStrategy#get(java.lang.Object, long)
	 */
	public final Object get(Object key, long txTimestamp) throws CacheException {
		readLockIfNeeded( key );
		try {
			final Lockable item = (Lockable) region().get( key );

			final boolean readable = item != null && item.isReadable( txTimestamp );
			if ( readable ) {
				return item.getValue();
			}
			else {
				return null;
			}
		}
		finally {
			readUnlockIfNeeded( key );
		}
	}

	/**
	 * Returns false and fails to put the value if there is an existing un-writeable item mapped to this
	 * key.
	 *
	 * @see org.hibernate.cache.spi.access.EntityRegionAccessStrategy#putFromLoad(java.lang.Object, java.lang.Object, long, java.lang.Object, boolean)
	 * @see org.hibernate.cache.spi.access.CollectionRegionAccessStrategy#putFromLoad(java.lang.Object, java.lang.Object, long, java.lang.Object, boolean)
	 */
	@Override
	public final boolean putFromLoad(
			Object key,
			Object value,
			long txTimestamp,
			Object version,
			boolean minimalPutOverride)
			throws CacheException {
		region().writeLock( key );
		try {
			final Lockable item = (Lockable) region().get( key );
			final boolean writeable = item == null || item.isWriteable( txTimestamp, version, versionComparator );
			if ( writeable ) {
				region().put( key, new Item( value, version, region().nextTimestamp() ) );
				return true;
			}
			else {
				return false;
			}
		}
		finally {
			region().writeUnlock( key );
		}
	}

	/**
	 * Soft-lock a cache item.
	 *
	 * @see org.hibernate.cache.spi.access.EntityRegionAccessStrategy#lockItem(java.lang.Object, java.lang.Object)
	 * @see org.hibernate.cache.spi.access.CollectionRegionAccessStrategy#lockItem(java.lang.Object, java.lang.Object)
	 */
	public final SoftLock lockItem(Object key, Object version) throws CacheException {
		region().writeLock( key );
		try {
			final Lockable item = (Lockable) region().get( key );
			final long timeout = region().nextTimestamp() + region().getTimeout();
			final Lock lock = (item == null) ? new Lock( timeout, uuid, nextLockId(), version ) : item.lock(
					timeout,
					uuid,
					nextLockId()
			);
			region().put( key, lock );
			return lock;
		}
		finally {
			region().writeUnlock( key );
		}
	}

	/**
	 * Soft-unlock a cache item.
	 *
	 * @see org.hibernate.cache.spi.access.EntityRegionAccessStrategy#unlockItem(java.lang.Object, org.hibernate.cache.spi.access.SoftLock)
	 * @see org.hibernate.cache.spi.access.CollectionRegionAccessStrategy#unlockItem(java.lang.Object, org.hibernate.cache.spi.access.SoftLock)
	 */
	public final void unlockItem(Object key, SoftLock lock) throws CacheException {
		region().writeLock( key );
		try {
			final Lockable item = (Lockable) region().get( key );

			if ( (item != null) && item.isUnlockable( lock ) ) {
				decrementLock( key, (Lock) item );
			}
			else {
				handleLockExpiry( key, item );
			}
		}
		finally {
			region().writeUnlock( key );
		}
	}

	private long nextLockId() {
		return nextLockId.getAndIncrement();
	}

	/**
	 * Unlock and re-put the given key, lock combination.
	 */
	protected void decrementLock(Object key, Lock lock) {
		lock.unlock( region().nextTimestamp() );
		region().put( key, lock );
	}

	/**
	 * Handle the timeout of a previous lock mapped to this key
	 */
	protected void handleLockExpiry(Object key, Lockable lock) {
		LOG.softLockedCacheExpired( region().getName(), key, lock == null ? "(null)" : lock.toString() );

		final long ts = region().nextTimestamp() + region().getTimeout();
		// create new lock that times out immediately
		final Lock newLock = new Lock( ts, uuid, nextLockId.getAndIncrement(), null );
		newLock.unlock( ts );
		region().put( key, newLock );
	}

	/**
	 * Read lock the entry for the given key if internal cache locks will not provide correct exclusion.
	 */
	private void readLockIfNeeded(Object key) {
		if ( region().locksAreIndependentOfCache() ) {
			region().readLock( key );
		}
	}

	/**
	 * Read unlock the entry for the given key if internal cache locks will not provide correct exclusion.
	 */
	private void readUnlockIfNeeded(Object key) {
		if ( region().locksAreIndependentOfCache() ) {
			region().readUnlock( key );
		}
	}

	/**
	 * Interface type implemented by all wrapper objects in the cache.
	 */
	protected static interface Lockable {

		/**
		 * Returns true if the enclosed value can be read by a transaction started at the given time.
		 */
		public boolean isReadable(long txTimestamp);

		/**
		 * Returns true if the enclosed value can be replaced with one of the given version by a
		 * transaction started at the given time.
		 */
		public boolean isWriteable(long txTimestamp, Object version, Comparator versionComparator);

		/**
		 * Returns the enclosed value.
		 */
		public Object getValue();

		/**
		 * Returns true if the given lock can be unlocked using the given SoftLock instance as a handle.
		 */
		public boolean isUnlockable(SoftLock lock);

		/**
		 * Locks this entry, stamping it with the UUID and lockId given, with the lock timeout occuring at the specified
		 * time.  The returned Lock object can be used to unlock the entry in the future.
		 */
		public Lock lock(long timeout, UUID uuid, long lockId);
	}

	/**
	 * Wrapper type representing unlocked items.
	 */
	protected static final class Item implements Serializable, Lockable {
		private static final long serialVersionUID = 1L;
		private final Object value;
		private final Object version;
		private final long timestamp;

		/**
		 * Creates an unlocked item wrapping the given value with a version and creation timestamp.
		 */
		Item(Object value, Object version, long timestamp) {
			this.value = value;
			this.version = version;
			this.timestamp = timestamp;
		}

		@Override
		public boolean isReadable(long txTimestamp) {
			return txTimestamp > timestamp;
		}

		@Override
		@SuppressWarnings("unchecked")
		public boolean isWriteable(long txTimestamp, Object newVersion, Comparator versionComparator) {
			return version != null && versionComparator.compare( version, newVersion ) < 0;
		}

		@Override
		public Object getValue() {
			return value;
		}

		@Override
		public boolean isUnlockable(SoftLock lock) {
			return false;
		}

		@Override
		public Lock lock(long timeout, UUID uuid, long lockId) {
			return new Lock( timeout, uuid, lockId, version );
		}
	}

	/**
	 * Wrapper type representing locked items.
	 */
	protected static final class Lock implements Serializable, Lockable, SoftLock {
		private static final long serialVersionUID = 2L;

		private final UUID sourceUuid;
		private final long lockId;
		private final Object version;

		private long timeout;
		private boolean concurrent;
		private int multiplicity = 1;
		private long unlockTimestamp;

		/**
		 * Creates a locked item with the given identifiers and object version.
		 */
		Lock(long timeout, UUID sourceUuid, long lockId, Object version) {
			this.timeout = timeout;
			this.lockId = lockId;
			this.version = version;
			this.sourceUuid = sourceUuid;
		}

		@Override
		public boolean isReadable(long txTimestamp) {
			return false;
		}

		@Override
		@SuppressWarnings({"SimplifiableIfStatement", "unchecked"})
		public boolean isWriteable(long txTimestamp, Object newVersion, Comparator versionComparator) {
			if ( txTimestamp > timeout ) {
				// if timedout then allow write
				return true;
			}
			if ( multiplicity > 0 ) {
				// if still locked then disallow write
				return false;
			}
			return version == null
					? txTimestamp > unlockTimestamp
					: versionComparator.compare( version, newVersion ) < 0;
		}

		@Override
		public Object getValue() {
			return null;
		}

		@Override
		public boolean isUnlockable(SoftLock lock) {
			return equals( lock );
		}

		@Override
		@SuppressWarnings("SimplifiableIfStatement")
		public boolean equals(Object o) {
			if ( o == this ) {
				return true;
			}
			else if ( o instanceof Lock ) {
				return (lockId == ((Lock) o).lockId) && sourceUuid.equals( ((Lock) o).sourceUuid );
			}
			else {
				return false;
			}
		}

		@Override
		public int hashCode() {
			final int hash = (sourceUuid != null ? sourceUuid.hashCode() : 0);
			int temp = (int) lockId;
			for ( int i = 1; i < Long.SIZE / Integer.SIZE; i++ ) {
				temp ^= (lockId >>> (i * Integer.SIZE));
			}
			return hash + temp;
		}

		/**
		 * Returns true if this Lock has been concurrently locked by more than one transaction.
		 */
		public boolean wasLockedConcurrently() {
			return concurrent;
		}

		@Override
		public Lock lock(long timeout, UUID uuid, long lockId) {
			concurrent = true;
			multiplicity++;
			this.timeout = timeout;
			return this;
		}

		/**
		 * Unlocks this Lock, and timestamps the unlock event.
		 */
		public void unlock(long timestamp) {
			if ( --multiplicity == 0 ) {
				unlockTimestamp = timestamp;
			}
		}

		@Override
		public String toString() {
			return "Lock Source-UUID:" + sourceUuid + " Lock-ID:" + lockId;
		}
	}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
Solution content
 * @author Chris Dennis
 * @author Alex Snaps
 */
abstract class AbstractReadWriteEhcacheAccessStrategy
        extends AbstractEhcacheAccessStrategy {

    private static final EhCacheMessageLogger LOG = Logger.getMessageLogger(
            EhCacheMessageLogger.class,
            AbstractReadWriteEhcacheAccessStrategy.class.getName()
    );
    private final UUID uuid = UUID.randomUUID();
    private final AtomicLong nextLockId = new AtomicLong();

    private final Comparator versionComparator;

    /**
     * Creates a read/write cache access strategy around the given cache region.
     */
    public AbstractReadWriteEhcacheAccessStrategy(T region) {
        super( region );
        this.versionComparator = region.getCacheDataDescription().getVersionComparator();
    }

    /**
     * Returns null if the item is not readable.  Locked items are not readable, nor are items created
    /**
     * Soft-lock a cache item.
     * after the start of this transaction.
     *
     * @see org.hibernate.cache.spi.access.EntityRegionAccessStrategy#get(java.lang.Object, long)
     * @see org.hibernate.cache.spi.access.CollectionRegionAccessStrategy#get(java.lang.Object, long)
     */
    public final Object get(Object key, long txTimestamp) throws CacheException {
        readLockIfNeeded( key );
        try {
            final Lockable item = (Lockable) region.get( key );

            final boolean readable = item != null && item.isReadable( txTimestamp );
            if ( readable ) {
                return item.getValue();
            }
            else {
                return null;
            }
        }
        finally {
            readUnlockIfNeeded( key );
        }
    }

    /**
     * Returns false and fails to put the value if there is an existing un-writeable item mapped to this
     * key.
     *
     * @see org.hibernate.cache.spi.access.EntityRegionAccessStrategy#putFromLoad(java.lang.Object, java.lang.Object, long, java.lang.Object, boolean)
     * @see org.hibernate.cache.spi.access.CollectionRegionAccessStrategy#putFromLoad(java.lang.Object, java.lang.Object, long, java.lang.Object, boolean)
     */
    @Override
    public final boolean putFromLoad(Object key, Object value, long txTimestamp, Object version, boolean minimalPutOverride)
            throws CacheException {
        region.writeLock( key );
        try {
            final Lockable item = (Lockable) region.get( key );
            final boolean writeable = item == null || item.isWriteable( txTimestamp, version, versionComparator );
            if ( writeable ) {
                region.put( key, new Item( value, version, region.nextTimestamp() ) );
                return true;
            }
            else {
                return false;
            }
        }
        finally {
            region.writeUnlock( key );
        }
    }

     *
     * @see org.hibernate.cache.spi.access.EntityRegionAccessStrategy#lockItem(java.lang.Object, java.lang.Object)
     * @see org.hibernate.cache.spi.access.CollectionRegionAccessStrategy#lockItem(java.lang.Object, java.lang.Object)
     */
    public final SoftLock lockItem(Object key, Object version) throws CacheException {
        region.writeLock( key );
        try {
            final Lockable item = (Lockable) region.get( key );
            final long timeout = region.nextTimestamp() + region.getTimeout();
            final Lock lock = ( item == null ) ? new Lock( timeout, uuid, nextLockId(), version ) : item.lock(
                    timeout,
                    uuid,
                    nextLockId()
            );
            region.put( key, lock );
            return lock;
        }
        finally {
            region.writeUnlock( key );
        }
    }

    /**
     * Soft-unlock a cache item.
     *
     * @see org.hibernate.cache.spi.access.EntityRegionAccessStrategy#unlockItem(java.lang.Object, org.hibernate.cache.spi.access.SoftLock)
     * @see org.hibernate.cache.spi.access.CollectionRegionAccessStrategy#unlockItem(java.lang.Object, org.hibernate.cache.spi.access.SoftLock)
     */
    public final void unlockItem(Object key, SoftLock lock) throws CacheException {
        region.writeLock( key );
        try {
            Lockable item = (Lockable) region.get( key );

            if ( ( item != null ) && item.isUnlockable( lock ) ) {
                decrementLock( key, (Lock) item );
            }
            else {
                handleLockExpiry( key, item );
            }
        }
        finally {
            region.writeUnlock( key );
        }
    }

    private long nextLockId() {
        return nextLockId.getAndIncrement();
    }

    /**
     * Unlock and re-put the given key, lock combination.
     */
    protected void decrementLock(Object key, Lock lock) {
        lock.unlock( region.nextTimestamp() );
        region.put( key, lock );
    }

    /**
     * Handle the timeout of a previous lock mapped to this key
     */
    protected void handleLockExpiry(Object key, Lockable lock) {
        LOG.softLockedCacheExpired( region.getName(), key, lock == null ? "(null)" : lock.toString() );

        long ts = region.nextTimestamp() + region.getTimeout();
        // create new lock that times out immediately
        Lock newLock = new Lock( ts, uuid, nextLockId.getAndIncrement(), null );
        newLock.unlock( ts );
        region.put( key, newLock );
    }

    /**
     * Read lock the entry for the given key if internal cache locks will not provide correct exclusion.
     */
    private void readLockIfNeeded(Object key) {
        if ( region.locksAreIndependentOfCache() ) {
            region.readLock( key );
        }
    }

    /**
     * Read unlock the entry for the given key if internal cache locks will not provide correct exclusion.
     */
    private void readUnlockIfNeeded(Object key) {
        if ( region.locksAreIndependentOfCache() ) {
            region.readUnlock( key );
        }
    }

    /**
     * Interface type implemented by all wrapper objects in the cache.
     */
    protected static interface Lockable {

        /**
         * Returns true if the enclosed value can be read by a transaction started at the given time.
         */
        public boolean isReadable(long txTimestamp);

        /**
         * Returns true if the enclosed value can be replaced with one of the given version by a
         * transaction started at the given time.
         */
        public boolean isWriteable(long txTimestamp, Object version, Comparator versionComparator);

        /**
         * Returns the enclosed value.
         */
        public Object getValue();

        /**
         * Returns true if the given lock can be unlocked using the given SoftLock instance as a handle.
         */
        public boolean isUnlockable(SoftLock lock);

        /**
         * Locks this entry, stamping it with the UUID and lockId given, with the lock timeout occuring at the specified
         * time.  The returned Lock object can be used to unlock the entry in the future.
         */
        public Lock lock(long timeout, UUID uuid, long lockId);
    }

    /**
     * Wrapper type representing unlocked items.
     */
    protected final static class Item implements Serializable, Lockable {

        private static final long serialVersionUID = 1L;
        private final Object value;
        private final Object version;
        private final long timestamp;

        /**
         * Creates an unlocked item wrapping the given value with a version and creation timestamp.
         */
        Item(Object value, Object version, long timestamp) {
            this.value = value;
            this.version = version;
            this.timestamp = timestamp;
        }

		@Override
        public boolean isReadable(long txTimestamp) {
            return txTimestamp > timestamp;
        }

		@Override
        public boolean isWriteable(long txTimestamp, Object newVersion, Comparator versionComparator) {
            return version != null && versionComparator.compare( version, newVersion ) < 0;
        }

		@Override
        public Object getValue() {
            return value;
        }

		@Override
        public boolean isUnlockable(SoftLock lock) {
            return false;
        }

        @Override
        public Lock lock(long timeout, UUID uuid, long lockId) {
            return new Lock( timeout, uuid, lockId, version );
        }
    }

    /**
     * Wrapper type representing locked items.
     */
    protected final static class Lock implements Serializable, Lockable, SoftLock {

        private static final long serialVersionUID = 2L;

        private final UUID sourceUuid;
        private final long lockId;
        private final Object version;

        private long timeout;
        private boolean concurrent;
        private int multiplicity = 1;
        private long unlockTimestamp;

        /**
         * Creates a locked item with the given identifiers and object version.
         */
        Lock(long timeout, UUID sourceUuid, long lockId, Object version) {
            this.timeout = timeout;
            this.lockId = lockId;
            this.version = version;
            this.sourceUuid = sourceUuid;
        }

        @Override
        public boolean isReadable(long txTimestamp) {
            return false;
        }

        @Override
        public boolean isWriteable(long txTimestamp, Object newVersion, Comparator versionComparator) {
            if ( txTimestamp > timeout ) {
                // if timedout then allow write
                return true;
            }
            if ( multiplicity > 0 ) {
                // if still locked then disallow write
                return false;
            }
            return version == null ? txTimestamp > unlockTimestamp : versionComparator.compare(
                    version,
                    newVersion
            ) < 0;
        }

        @Override
        public Object getValue() {
            return null;
        }

        @Override
        public boolean isUnlockable(SoftLock lock) {
            return equals( lock );
        }

        @Override
        public boolean equals(Object o) {
            if ( o == this ) {
                return true;
            }
            else if ( o instanceof Lock ) {
                return ( lockId == ( (Lock) o ).lockId ) && sourceUuid.equals( ( (Lock) o ).sourceUuid );
            }
            else {
                return false;
            }
        }

        @Override
        public int hashCode() {
            int hash = ( sourceUuid != null ? sourceUuid.hashCode() : 0 );
            int temp = (int) lockId;
            for ( int i = 1; i < Long.SIZE / Integer.SIZE; i++ ) {
                temp ^= ( lockId >>> ( i * Integer.SIZE ) );
            }
            return hash + temp;
        }

        /**
         * Returns true if this Lock has been concurrently locked by more than one transaction.
         */
        public boolean wasLockedConcurrently() {
            return concurrent;
        }

        @Override
        public Lock lock(long timeout, UUID uuid, long lockId) {
            concurrent = true;
            multiplicity++;
            this.timeout = timeout;
            return this;
        }

        /**
         * Unlocks this Lock, and timestamps the unlock event.
         */
        public void unlock(long timestamp) {
            if ( --multiplicity == 0 ) {
                unlockTimestamp = timestamp;
            }
        }

        @Override
        public String toString() {
			return "Lock Source-UUID:" + sourceUuid + " Lock-ID:" + lockId;
        }
    }
}
File
AbstractReadWriteEhcacheAccessStrategy.java
Developer's decision
Manual
Kind of conflict
Annotation
Attribute
Class declaration
Class signature
Comment
Interface declaration
Method declaration
Method invocation
Chunk
Conflicting content
			EhcacheAccessStrategyFactoryImpl.class.getName()
	);

<<<<<<< HEAD
    /**
     * {@inheritDoc}
     */
    public EntityRegionAccessStrategy createEntityRegionAccessStrategy(EhcacheEntityRegion entityRegion, AccessType accessType) {
        switch ( accessType ) {
            case READ_ONLY:
                if ( entityRegion.getCacheDataDescription().isMutable() ) {
                    LOG.readOnlyCacheConfiguredForMutableEntity( entityRegion.getName() );
                }
                return new ReadOnlyEhcacheEntityRegionAccessStrategy( entityRegion );
            case READ_WRITE:
                return new ReadWriteEhcacheEntityRegionAccessStrategy( entityRegion );

            case NONSTRICT_READ_WRITE:
                return new NonStrictReadWriteEhcacheEntityRegionAccessStrategy(
                        entityRegion
                );

            case TRANSACTIONAL:
                return new TransactionalEhcacheEntityRegionAccessStrategy(
                        entityRegion,
                        entityRegion.getEhcache()
                );
            default:
                throw new IllegalArgumentException( "unrecognized access strategy type [" + accessType + "]" );
=======
	@Override
	public EntityRegionAccessStrategy createEntityRegionAccessStrategy(
			EhcacheEntityRegion entityRegion,
			AccessType accessType) {
		switch ( accessType ) {
			case READ_ONLY:
				if ( entityRegion.getCacheDataDescription().isMutable() ) {
					LOG.readOnlyCacheConfiguredForMutableEntity( entityRegion.getName() );
				}
				return new ReadOnlyEhcacheEntityRegionAccessStrategy( entityRegion, entityRegion.getSettings() );
			case READ_WRITE:
				return new ReadWriteEhcacheEntityRegionAccessStrategy( entityRegion, entityRegion.getSettings() );

			case NONSTRICT_READ_WRITE:
				return new NonStrictReadWriteEhcacheEntityRegionAccessStrategy(
						entityRegion,
						entityRegion.getSettings()
				);

			case TRANSACTIONAL:
				return new TransactionalEhcacheEntityRegionAccessStrategy(
						entityRegion,
						entityRegion.getEhcache(),
						entityRegion.getSettings()
				);
			default:
				throw new IllegalArgumentException( "unrecognized access strategy type [" + accessType + "]" );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

		}
Solution content
			EhcacheAccessStrategyFactoryImpl.class.getName()
	);

	@Override
	public EntityRegionAccessStrategy createEntityRegionAccessStrategy(
			EhcacheEntityRegion entityRegion,
			AccessType accessType) {
		switch ( accessType ) {
			case READ_ONLY:
				if ( entityRegion.getCacheDataDescription().isMutable() ) {
					LOG.readOnlyCacheConfiguredForMutableEntity( entityRegion.getName() );
				}
				return new ReadOnlyEhcacheEntityRegionAccessStrategy( entityRegion );
			case READ_WRITE:
				return new ReadWriteEhcacheEntityRegionAccessStrategy( entityRegion );

			case NONSTRICT_READ_WRITE:
				return new NonStrictReadWriteEhcacheEntityRegionAccessStrategy( entityRegion );

			case TRANSACTIONAL:
				return new TransactionalEhcacheEntityRegionAccessStrategy(
						entityRegion,
						entityRegion.getEhcache()
				);
			default:
				throw new IllegalArgumentException( "unrecognized access strategy type [" + accessType + "]" );
		}
File
EhcacheAccessStrategyFactoryImpl.java
Developer's decision
Manual
Kind of conflict
Annotation
Case statement
Comment
If statement
Method invocation
Method signature
Return statement
Switch statement
Throw statement
Chunk
Conflicting content
	}

<<<<<<< HEAD
    /**
     * {@inheritDoc}
     */
    public CollectionRegionAccessStrategy createCollectionRegionAccessStrategy(EhcacheCollectionRegion collectionRegion,
                                                                               AccessType accessType) {
        switch ( accessType ) {
            case READ_ONLY:
                if ( collectionRegion.getCacheDataDescription().isMutable() ) {
                    LOG.readOnlyCacheConfiguredForMutableEntity( collectionRegion.getName() );
                }
                return new ReadOnlyEhcacheCollectionRegionAccessStrategy(
                        collectionRegion
                );
            case READ_WRITE:
                return new ReadWriteEhcacheCollectionRegionAccessStrategy(
                        collectionRegion
                );
            case NONSTRICT_READ_WRITE:
                return new NonStrictReadWriteEhcacheCollectionRegionAccessStrategy(
                        collectionRegion
                );
            case TRANSACTIONAL:
                return new TransactionalEhcacheCollectionRegionAccessStrategy(
                        collectionRegion, collectionRegion.getEhcache()
                );
            default:
                throw new IllegalArgumentException( "unrecognized access strategy type [" + accessType + "]" );
        }
    }
=======
	@Override
	public CollectionRegionAccessStrategy createCollectionRegionAccessStrategy(
			EhcacheCollectionRegion collectionRegion,
			AccessType accessType) {
		switch ( accessType ) {
			case READ_ONLY:
				if ( collectionRegion.getCacheDataDescription().isMutable() ) {
					LOG.readOnlyCacheConfiguredForMutableEntity( collectionRegion.getName() );
				}
				return new ReadOnlyEhcacheCollectionRegionAccessStrategy(
						collectionRegion,
						collectionRegion.getSettings()
				);
			case READ_WRITE:
				return new ReadWriteEhcacheCollectionRegionAccessStrategy(
						collectionRegion,
						collectionRegion.getSettings()
				);
			case NONSTRICT_READ_WRITE:
				return new NonStrictReadWriteEhcacheCollectionRegionAccessStrategy(
						collectionRegion,
						collectionRegion.getSettings()
				);
			case TRANSACTIONAL:
				return new TransactionalEhcacheCollectionRegionAccessStrategy(
						collectionRegion, collectionRegion.getEhcache(), collectionRegion
						.getSettings()
				);
			default:
				throw new IllegalArgumentException( "unrecognized access strategy type [" + accessType + "]" );
		}
	}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

	@Override
	public NaturalIdRegionAccessStrategy createNaturalIdRegionAccessStrategy(
Solution content
	}

	@Override
    public CollectionRegionAccessStrategy createCollectionRegionAccessStrategy(EhcacheCollectionRegion collectionRegion,
                                                                               AccessType accessType) {
        switch ( accessType ) {
            case READ_ONLY:
                if ( collectionRegion.getCacheDataDescription().isMutable() ) {
                    LOG.readOnlyCacheConfiguredForMutableEntity( collectionRegion.getName() );
                }
                return new ReadOnlyEhcacheCollectionRegionAccessStrategy(
                        collectionRegion
                );
            case READ_WRITE:
                return new ReadWriteEhcacheCollectionRegionAccessStrategy(
                        collectionRegion
                );
            case NONSTRICT_READ_WRITE:
                return new NonStrictReadWriteEhcacheCollectionRegionAccessStrategy(
                        collectionRegion
                );
            case TRANSACTIONAL:
                return new TransactionalEhcacheCollectionRegionAccessStrategy(
                        collectionRegion, collectionRegion.getEhcache()
                );
            default:
                throw new IllegalArgumentException( "unrecognized access strategy type [" + accessType + "]" );
        }
    }

	@Override
	public NaturalIdRegionAccessStrategy createNaturalIdRegionAccessStrategy(
File
EhcacheAccessStrategyFactoryImpl.java
Developer's decision
Combination
Kind of conflict
Annotation
Comment
Method declaration
Chunk
Conflicting content
	public NaturalIdRegionAccessStrategy createNaturalIdRegionAccessStrategy(
			EhcacheNaturalIdRegion naturalIdRegion,
			AccessType accessType) {
<<<<<<< HEAD
        switch ( accessType ) {
        case READ_ONLY:
            if ( naturalIdRegion.getCacheDataDescription().isMutable() ) {
                LOG.readOnlyCacheConfiguredForMutableEntity( naturalIdRegion.getName() );
            }
            return new ReadOnlyEhcacheNaturalIdRegionAccessStrategy(
                    naturalIdRegion
            );
        case READ_WRITE:
            return new ReadWriteEhcacheNaturalIdRegionAccessStrategy(
                    naturalIdRegion
            );
        case NONSTRICT_READ_WRITE:
            return new NonStrictReadWriteEhcacheNaturalIdRegionAccessStrategy(
                    naturalIdRegion
            );
        case TRANSACTIONAL:
            return new TransactionalEhcacheNaturalIdRegionAccessStrategy(
                    naturalIdRegion, naturalIdRegion.getEhcache()
            );
        default:
            throw new IllegalArgumentException( "unrecognized access strategy type [" + accessType + "]" );
    }
=======
		switch ( accessType ) {
			case READ_ONLY:
				if ( naturalIdRegion.getCacheDataDescription().isMutable() ) {
					LOG.readOnlyCacheConfiguredForMutableEntity( naturalIdRegion.getName() );
				}
				return new ReadOnlyEhcacheNaturalIdRegionAccessStrategy(
						naturalIdRegion,
						naturalIdRegion.getSettings()
				);
			case READ_WRITE:
				return new ReadWriteEhcacheNaturalIdRegionAccessStrategy(
						naturalIdRegion,
						naturalIdRegion.getSettings()
				);
			case NONSTRICT_READ_WRITE:
				return new NonStrictReadWriteEhcacheNaturalIdRegionAccessStrategy(
						naturalIdRegion,
						naturalIdRegion.getSettings()
				);
			case TRANSACTIONAL:
				return new TransactionalEhcacheNaturalIdRegionAccessStrategy(
						naturalIdRegion, naturalIdRegion.getEhcache(), naturalIdRegion
						.getSettings()
				);
			default:
				throw new IllegalArgumentException( "unrecognized access strategy type [" + accessType + "]" );
		}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	}

Solution content
	public NaturalIdRegionAccessStrategy createNaturalIdRegionAccessStrategy(
			EhcacheNaturalIdRegion naturalIdRegion,
			AccessType accessType) {
		switch ( accessType ) {
			case READ_ONLY:
				if ( naturalIdRegion.getCacheDataDescription().isMutable() ) {
					LOG.readOnlyCacheConfiguredForMutableEntity( naturalIdRegion.getName() );
				}
				return new ReadOnlyEhcacheNaturalIdRegionAccessStrategy(
						naturalIdRegion
				);
			case READ_WRITE:
				return new ReadWriteEhcacheNaturalIdRegionAccessStrategy(
						naturalIdRegion
				);
			case NONSTRICT_READ_WRITE:
				return new NonStrictReadWriteEhcacheNaturalIdRegionAccessStrategy(
						naturalIdRegion
				);
			case TRANSACTIONAL:
				return new TransactionalEhcacheNaturalIdRegionAccessStrategy(
						naturalIdRegion, naturalIdRegion.getEhcache()
				);
			default:
				throw new IllegalArgumentException( "unrecognized access strategy type [" + accessType + "]" );
		}
	}

File
EhcacheAccessStrategyFactoryImpl.java
Developer's decision
Version 1
Kind of conflict
Switch statement
Chunk
Conflicting content
	 * @param region the Hibernate region.
	 * @param ehcache the cache.
	 */
<<<<<<< HEAD
	public TransactionalEhcacheCollectionRegionAccessStrategy(EhcacheCollectionRegion region, Ehcache ehcache) {
		super( region );
=======
	public TransactionalEhcacheCollectionRegionAccessStrategy(
			EhcacheCollectionRegion region,
			Ehcache ehcache,
			Settings settings) {
		super( region, settings );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
		this.ehcache = ehcache;
	}
Solution content
	 * @param region the Hibernate region.
	 * @param ehcache the cache.
	 */
	public TransactionalEhcacheCollectionRegionAccessStrategy(EhcacheCollectionRegion region, Ehcache ehcache) {
		super( region );
		this.ehcache = ehcache;
	}
File
TransactionalEhcacheCollectionRegionAccessStrategy.java
Developer's decision
Version 1
Kind of conflict
Method invocation
Method signature
Chunk
Conflicting content
	 * @param region the Hibernate region.
	 * @param ehcache the cache.
	 */
<<<<<<< HEAD
	public TransactionalEhcacheEntityRegionAccessStrategy(EhcacheEntityRegion region, Ehcache ehcache) {
		super( region );
=======
	public TransactionalEhcacheEntityRegionAccessStrategy(
			EhcacheEntityRegion region,
			Ehcache ehcache,
			Settings settings) {
		super( region, settings );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
		this.ehcache = ehcache;
	}
Solution content
	 * @param region the Hibernate region.
	 * @param ehcache the cache.
	 */
	public TransactionalEhcacheEntityRegionAccessStrategy(EhcacheEntityRegion region, Ehcache ehcache) {
		super( region );
		this.ehcache = ehcache;
	}
File
TransactionalEhcacheEntityRegionAccessStrategy.java
Developer's decision
Version 1
Kind of conflict
Method invocation
Method signature
Chunk
Conflicting content
	 * @param region the Hibernate region.
	 * @param ehcache the cache.
	 */
<<<<<<< HEAD
	public TransactionalEhcacheNaturalIdRegionAccessStrategy(EhcacheNaturalIdRegion region, Ehcache ehcache) {
		super( region );
=======
	public TransactionalEhcacheNaturalIdRegionAccessStrategy(
			EhcacheNaturalIdRegion region,
			Ehcache ehcache,
			Settings settings) {
		super( region, settings );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
		this.ehcache = ehcache;
	}
Solution content
	 * @param region the Hibernate region.
	 * @param ehcache the cache.
	 */
	public TransactionalEhcacheNaturalIdRegionAccessStrategy(EhcacheNaturalIdRegion region, Ehcache ehcache) {
		super( region );
		this.ehcache = ehcache;
	}
File
TransactionalEhcacheNaturalIdRegionAccessStrategy.java
Developer's decision
Version 1
Kind of conflict
Method invocation
Method signature
Chunk
Conflicting content
 */
package org.hibernate.jpa;

<<<<<<< HEAD

=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
/**
 * Defines the available HEM settings, both JPA-defined as well as Hibernate-specific
 * 

Solution content
 */
package org.hibernate.jpa;

/**
 * Defines the available HEM settings, both JPA-defined as well as Hibernate-specific
 * 

File
AvailableSettings.java
Developer's decision
Version 1
Kind of conflict
Blank
Chunk
Conflicting content
=======
import java.util.Set;
import java.util.StringTokenizer;

<<<<<<< HEAD
import org.jboss.jandex.AnnotationInstance;
import org.jboss.jandex.ClassInfo;
import org.jboss.jandex.CompositeIndex;
import org.jboss.jandex.DotName;
import org.jboss.jandex.Index;
import org.jboss.jandex.IndexView;
import org.jboss.jandex.Indexer;

import org.jboss.logging.Logger;
import javax.persistence.AttributeConverter;
import javax.persistence.EntityManagerFactory;
import javax.persistence.EntityNotFoundException;
import javax.persistence.PersistenceException;
import javax.persistence.spi.PersistenceUnitTransactionType;
import javax.sql.DataSource;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

import org.hibernate.Interceptor;
import org.hibernate.InvalidMappingException;
Solution content
import java.util.Set;
import java.util.StringTokenizer;

import org.jboss.jandex.AnnotationInstance;
import org.jboss.jandex.ClassInfo;
import org.jboss.jandex.CompositeIndex;
import org.jboss.jandex.DotName;
import org.jboss.jandex.Index;
import org.jboss.jandex.IndexView;
import org.jboss.jandex.Indexer;

import org.jboss.logging.Logger;
import javax.persistence.AttributeConverter;
import javax.persistence.EntityManagerFactory;
import javax.persistence.EntityNotFoundException;
import javax.persistence.PersistenceException;
import javax.persistence.spi.PersistenceUnitTransactionType;
import javax.sql.DataSource;

import org.hibernate.Interceptor;
import org.hibernate.InvalidMappingException;
File
EntityManagerFactoryBuilderImpl.java
Developer's decision
Concatenation
Kind of conflict
Import
Chunk
Conflicting content
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.boot.registry.classloading.internal.ClassLoaderServiceImpl;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
<<<<<<< HEAD
import org.hibernate.boot.registry.internal.ConfigLoader;
=======
import org.hibernate.boot.registry.selector.StrategyRegistrationProvider;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
import org.hibernate.boot.registry.selector.spi.StrategySelector;
import org.hibernate.boot.spi.CacheRegionDefinition;
import org.hibernate.cfg.Configuration;
Solution content
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.boot.registry.classloading.internal.ClassLoaderServiceImpl;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.registry.internal.ConfigLoader;
import org.hibernate.boot.registry.selector.StrategyRegistrationProvider;
import org.hibernate.boot.registry.selector.spi.StrategySelector;
import org.hibernate.boot.spi.CacheRegionDefinition;
import org.hibernate.cfg.Configuration;
File
EntityManagerFactoryBuilderImpl.java
Developer's decision
Concatenation
Kind of conflict
Import
Chunk
Conflicting content
import org.hibernate.jpa.internal.util.LogHelper;
import org.hibernate.jpa.internal.util.PersistenceUnitTransactionTypeHelper;
import org.hibernate.jpa.spi.IdentifierGeneratorStrategyProvider;
<<<<<<< HEAD
import org.hibernate.metamodel.MetadataBuilder;
import org.hibernate.metamodel.SessionFactoryBuilder;
import org.hibernate.metamodel.internal.source.annotations.util.JPADotNames;
import org.hibernate.metamodel.internal.source.annotations.util.JandexHelper;
import org.hibernate.metamodel.spi.MetadataImplementor;
=======
import org.hibernate.metamodel.source.annotations.JPADotNames;
import org.hibernate.metamodel.source.annotations.JandexHelper;
import org.hibernate.metamodel.spi.TypeContributor;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
import org.hibernate.proxy.EntityNotFoundDelegate;
import org.hibernate.secure.spi.GrantedPermission;
import org.hibernate.secure.spi.JaccService;
Solution content
import org.hibernate.jpa.internal.util.LogHelper;
import org.hibernate.jpa.internal.util.PersistenceUnitTransactionTypeHelper;
import org.hibernate.jpa.spi.IdentifierGeneratorStrategyProvider;
import org.hibernate.metamodel.MetadataBuilder;
import org.hibernate.metamodel.SessionFactoryBuilder;
import org.hibernate.metamodel.internal.source.annotations.util.JPADotNames;
import org.hibernate.metamodel.internal.source.annotations.util.JandexHelper;
import org.hibernate.metamodel.spi.MetadataImplementor;
import org.hibernate.metamodel.spi.TypeContributor;
import org.hibernate.proxy.EntityNotFoundDelegate;
import org.hibernate.secure.spi.GrantedPermission;
import org.hibernate.secure.spi.JaccService;
File
EntityManagerFactoryBuilderImpl.java
Developer's decision
Combination
Kind of conflict
Import
Chunk
Conflicting content
				)
		);

<<<<<<< HEAD
		appendListeners( listenerRegistry, enversConfiguration );
=======
		if ( enversConfiguration.getEntCfg().hasAuditedEntities() ) {
			listenerRegistry.appendListeners(
					EventType.POST_DELETE, new EnversPostDeleteEventListenerImpl(
					enversConfiguration
			)
			);
			listenerRegistry.appendListeners(
					EventType.POST_INSERT, new EnversPostInsertEventListenerImpl(
					enversConfiguration
			)
			);
			listenerRegistry.appendListeners(
					EventType.POST_UPDATE, new EnversPostUpdateEventListenerImpl(
					enversConfiguration
			)
			);
			listenerRegistry.appendListeners(
					EventType.POST_COLLECTION_RECREATE,
					new EnversPostCollectionRecreateEventListenerImpl( enversConfiguration )
			);
			listenerRegistry.appendListeners(
					EventType.PRE_COLLECTION_REMOVE,
					new EnversPreCollectionRemoveEventListenerImpl( enversConfiguration )
			);
			listenerRegistry.appendListeners(
					EventType.PRE_COLLECTION_UPDATE,
					new EnversPreCollectionUpdateEventListenerImpl( enversConfiguration )
			);
		}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	}

	@Override
Solution content
				)
		);

		appendListeners( listenerRegistry, enversConfiguration );
	}

	@Override
File
EnversIntegrator.java
Developer's decision
Version 1
Kind of conflict
If statement
Method invocation
Chunk
Conflicting content
	}

	@Override
<<<<<<< HEAD
	public void integrate( MetadataImplementor metadata,
	                       SessionFactoryImplementor sessionFactory,
	                       SessionFactoryServiceRegistry serviceRegistry ) {
		final ConfigurationService configurationService = serviceRegistry.getService( ConfigurationService.class );
		final boolean autoRegister = configurationService.getSetting( AUTO_REGISTER, StandardConverters.BOOLEAN, true );
		if ( !autoRegister ) {
			LOG.debug( "Skipping Envers listener auto registration" );
			return;
		}

		EventListenerRegistry listenerRegistry = serviceRegistry.getService( EventListenerRegistry.class );
		listenerRegistry.addDuplicationStrategy( EnversListenerDuplicationStrategy.INSTANCE );

//		final AuditConfiguration enversConfiguration = AuditConfiguration.getFor( configuration, serviceRegistry.getService( ClassLoaderService.class ) );
//
//		appendListeners( listenerRegistry, enversConfiguration );
	}

	private void appendListeners(EventListenerRegistry listenerRegistry, AuditConfiguration enversConfiguration) {
		if (enversConfiguration.getEntCfg().hasAuditedEntities()) {
			listenerRegistry.appendListeners( EventType.POST_DELETE, new EnversPostDeleteEventListenerImpl( enversConfiguration ) );
			listenerRegistry.appendListeners( EventType.POST_INSERT, new EnversPostInsertEventListenerImpl( enversConfiguration ) );
			listenerRegistry.appendListeners( EventType.POST_UPDATE, new EnversPostUpdateEventListenerImpl( enversConfiguration ) );
			listenerRegistry.appendListeners( EventType.POST_COLLECTION_RECREATE, new EnversPostCollectionRecreateEventListenerImpl( enversConfiguration ) );
			listenerRegistry.appendListeners( EventType.PRE_COLLECTION_REMOVE, new EnversPreCollectionRemoveEventListenerImpl( enversConfiguration ) );
			listenerRegistry.appendListeners( EventType.PRE_COLLECTION_UPDATE, new EnversPreCollectionUpdateEventListenerImpl( enversConfiguration ) );
		}
=======
	public void integrate(
			MetadataImplementor metadata,
			SessionFactoryImplementor sessionFactory,
			SessionFactoryServiceRegistry serviceRegistry) {
		// TODO: implement
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	}
}
Solution content
	}

	@Override
	public void integrate( MetadataImplementor metadata,
	                       SessionFactoryImplementor sessionFactory,
	                       SessionFactoryServiceRegistry serviceRegistry ) {
		final ConfigurationService configurationService = serviceRegistry.getService( ConfigurationService.class );
		final boolean autoRegister = configurationService.getSetting( AUTO_REGISTER, StandardConverters.BOOLEAN, true );
		if ( !autoRegister ) {
			LOG.debug( "Skipping Envers listener auto registration" );
			return;
		}

		EventListenerRegistry listenerRegistry = serviceRegistry.getService( EventListenerRegistry.class );
		listenerRegistry.addDuplicationStrategy( EnversListenerDuplicationStrategy.INSTANCE );

//		final AuditConfiguration enversConfiguration = AuditConfiguration.getFor( configuration, serviceRegistry.getService( ClassLoaderService.class ) );
//
//		appendListeners( listenerRegistry, enversConfiguration );
	}

	private void appendListeners(EventListenerRegistry listenerRegistry, AuditConfiguration enversConfiguration) {
		if (enversConfiguration.getEntCfg().hasAuditedEntities()) {
			listenerRegistry.appendListeners( EventType.POST_DELETE, new EnversPostDeleteEventListenerImpl( enversConfiguration ) );
			listenerRegistry.appendListeners( EventType.POST_INSERT, new EnversPostInsertEventListenerImpl( enversConfiguration ) );
			listenerRegistry.appendListeners( EventType.POST_UPDATE, new EnversPostUpdateEventListenerImpl( enversConfiguration ) );
			listenerRegistry.appendListeners( EventType.POST_COLLECTION_RECREATE, new EnversPostCollectionRecreateEventListenerImpl( enversConfiguration ) );
			listenerRegistry.appendListeners( EventType.PRE_COLLECTION_REMOVE, new EnversPreCollectionRemoveEventListenerImpl( enversConfiguration ) );
			listenerRegistry.appendListeners( EventType.PRE_COLLECTION_UPDATE, new EnversPreCollectionUpdateEventListenerImpl( enversConfiguration ) );
		}
	}
}
File
EnversIntegrator.java
Developer's decision
Version 1
Kind of conflict
Comment
If statement
Method declaration
Method signature
Chunk
Conflicting content
import org.junit.Test;

package org.hibernate.envers.test.integration.components;

import org.hibernate.Session;
<<<<<<< HEAD
import org.junit.Assert;
=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
import org.hibernate.envers.test.BaseEnversFunctionalTestCase;
import org.hibernate.envers.test.Priority;
import org.hibernate.envers.test.entities.components.UniquePropsEntity;
Solution content
package org.hibernate.envers.test.integration.components;

import org.hibernate.Session;
import org.junit.Assert;
import org.junit.Test;
import org.hibernate.envers.test.BaseEnversFunctionalTestCase;
import org.hibernate.envers.test.Priority;
import org.hibernate.envers.test.entities.components.UniquePropsEntity;
File
PropertiesGroupTest.java
Developer's decision
Version 1
Kind of conflict
Import
Chunk
Conflicting content
import org.hibernate.envers.test.BaseEnversFunctionalTestCase;
import org.hibernate.envers.test.Priority;
import org.hibernate.envers.test.entities.StrTestEntity;
<<<<<<< HEAD
import org.hibernate.testing.FailureExpectedWithNewMetamodel;
=======

import org.junit.Assert;
import org.junit.Test;

>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
import org.hibernate.testing.TestForIssue;

/**
Solution content
import org.hibernate.envers.test.BaseEnversFunctionalTestCase;
import org.hibernate.envers.test.Priority;
import org.hibernate.envers.test.entities.StrTestEntity;
import org.hibernate.testing.FailureExpectedWithNewMetamodel;
import org.junit.Assert;
import org.junit.Test;
import org.hibernate.testing.TestForIssue;

/**
File
AddDelTest.java
Developer's decision
Concatenation
Kind of conflict
Import
Chunk
Conflicting content
		return new Class[] {StrTestEntity.class, GivenIdStrEntity.class};
	}

<<<<<<< HEAD
    @Test
    @Priority(10)
    @FailureExpectedWithNewMetamodel
    public void initData() {
        // Revision 1
        Session session = openSession();
        session.getTransaction().begin();
        GivenIdStrEntity entity = new GivenIdStrEntity(1, "data");
        session.persist(entity);
        session.getTransaction().commit();
=======
	@Test
	@Priority(10)
	public void initData() {
		// Revision 1
		Session session = openSession();
		session.getTransaction().begin();
		GivenIdStrEntity entity = new GivenIdStrEntity( 1, "data" );
		session.persist( entity );
		session.getTransaction().commit();
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

		// Revision 2
		session.getTransaction().begin();
Solution content
		return new Class[] {StrTestEntity.class, GivenIdStrEntity.class};
	}

    @Test
    @Priority(10)
    @FailureExpectedWithNewMetamodel
    public void initData() {
        // Revision 1
        Session session = openSession();
        session.getTransaction().begin();
        GivenIdStrEntity entity = new GivenIdStrEntity(1, "data");
        session.persist(entity);
        session.getTransaction().commit();

		// Revision 2
		session.getTransaction().begin();
File
AddDelTest.java
Developer's decision
Version 1
Kind of conflict
Annotation
Comment
Method invocation
Method signature
Variable
Chunk
Conflicting content
		session.close();
	}

<<<<<<< HEAD
    @Test
    @FailureExpectedWithNewMetamodel
    public void testRevisionsCountOfGivenIdStrEntity() {
        // Revision 2 has not changed entity's state.
        Assert.assertEquals(Arrays.asList(1, 3), getAuditReader().getRevisions(GivenIdStrEntity.class, 1));
=======
	@Test
	public void testRevisionsCountOfGivenIdStrEntity() {
		// Revision 2 has not changed entity's state.
		Assert.assertEquals( Arrays.asList( 1, 3 ), getAuditReader().getRevisions( GivenIdStrEntity.class, 1 ) );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

		getSession().close();
	}
Solution content
		session.close();
	}

    @Test
    @FailureExpectedWithNewMetamodel
    public void testRevisionsCountOfGivenIdStrEntity() {
        // Revision 2 has not changed entity's state.
        Assert.assertEquals(Arrays.asList(1, 3), getAuditReader().getRevisions(GivenIdStrEntity.class, 1));
		getSession().close();
	}
File
AddDelTest.java
Developer's decision
Version 1
Kind of conflict
Annotation
Comment
Method invocation
Method signature
Chunk
Conflicting content
		getSession().close();
	}

<<<<<<< HEAD
    @Test
    @FailureExpectedWithNewMetamodel
    public void testHistoryOfGivenIdStrEntity() {
        Assert.assertEquals(new GivenIdStrEntity(1, "data"), getAuditReader().find(GivenIdStrEntity.class, 1, 1));
        Assert.assertEquals(new GivenIdStrEntity(1, "modified data"), getAuditReader().find(GivenIdStrEntity.class, 1, 3));
=======
	@Test
	public void testHistoryOfGivenIdStrEntity() {
		Assert.assertEquals( new GivenIdStrEntity( 1, "data" ), getAuditReader().find( GivenIdStrEntity.class, 1, 1 ) );
		Assert.assertEquals(
				new GivenIdStrEntity( 1, "modified data" ), getAuditReader().find(
				GivenIdStrEntity.class,
				1,
				3
		)
		);
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

		getSession().close();
	}
Solution content
		getSession().close();
	}

    @FailureExpectedWithNewMetamodel
	@Test
	public void testHistoryOfGivenIdStrEntity() {
		Assert.assertEquals( new GivenIdStrEntity( 1, "data" ), getAuditReader().find( GivenIdStrEntity.class, 1, 1 ) );
		Assert.assertEquals(
				new GivenIdStrEntity( 1, "modified data" ), getAuditReader().find(
				GivenIdStrEntity.class,
				1,
				3
		)
		);

		getSession().close();
	}
File
AddDelTest.java
Developer's decision
Combination
Kind of conflict
Annotation
Method invocation
Method signature
Chunk
Conflicting content
import org.hibernate.envers.test.BaseEnversFunctionalTestCase;
import org.hibernate.envers.test.Priority;
import org.hibernate.envers.test.entities.StrTestEntity;
<<<<<<< HEAD
import org.hibernate.testing.FailureExpectedWithNewMetamodel;
=======

import org.junit.Assert;
import org.junit.Test;

>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
import org.hibernate.testing.TestForIssue;

/**
Solution content
import org.hibernate.envers.test.BaseEnversFunctionalTestCase;
import org.hibernate.envers.test.Priority;
import org.hibernate.envers.test.entities.StrTestEntity;
import org.hibernate.testing.FailureExpectedWithNewMetamodel;
import org.junit.Assert;
import org.junit.Test;
import org.hibernate.testing.TestForIssue;

/**
File
QueryingWithProxyObjectTest.java
Developer's decision
Concatenation
Kind of conflict
Import
Chunk
Conflicting content
		return new Class[] {StrTestEntity.class};
	}

<<<<<<< HEAD
    @Test
    @Priority(10)
    @FailureExpectedWithNewMetamodel
    public void initData() {
        // Revision 1
        getSession().getTransaction().begin();
        StrTestEntity ste = new StrTestEntity("data");
        getSession().persist(ste);
        getSession().getTransaction().commit();
        id = ste.getId();
        getSession().close();
    }

    @Test
    @TestForIssue(jiraKey="HHH-4760")
    @SuppressWarnings("unchecked")
    @FailureExpectedWithNewMetamodel
    public void testQueryingWithProxyObject() {
        StrTestEntity originalSte = new StrTestEntity("data", id);
        // Load the proxy instance
        StrTestEntity proxySte = (StrTestEntity) getSession().load(StrTestEntity.class, id);
=======
	@Test
	@Priority(10)
	public void initData() {
		// Revision 1
		getSession().getTransaction().begin();
		StrTestEntity ste = new StrTestEntity( "data" );
		getSession().persist( ste );
		getSession().getTransaction().commit();
		id = ste.getId();
		getSession().close();
	}

	@Test
	@TestForIssue(jiraKey = "HHH-4760")
	@SuppressWarnings("unchecked")
	public void testQueryingWithProxyObject() {
		StrTestEntity originalSte = new StrTestEntity( "data", id );
		// Load the proxy instance
		StrTestEntity proxySte = (StrTestEntity) getSession().load( StrTestEntity.class, id );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

		Assert.assertTrue( getAuditReader().isEntityClassAudited( proxySte.getClass() ) );
Solution content
		return new Class[] {StrTestEntity.class};
	}

    @Test
    @Priority(10)
    @FailureExpectedWithNewMetamodel
    public void initData() {
        // Revision 1
        getSession().getTransaction().begin();
        StrTestEntity ste = new StrTestEntity("data");
        getSession().persist(ste);
        getSession().getTransaction().commit();
        id = ste.getId();
        getSession().close();
    }

    @Test
    @TestForIssue(jiraKey="HHH-4760")
    @SuppressWarnings("unchecked")
    @FailureExpectedWithNewMetamodel
    public void testQueryingWithProxyObject() {
        StrTestEntity originalSte = new StrTestEntity("data", id);
        // Load the proxy instance
        StrTestEntity proxySte = (StrTestEntity) getSession().load(StrTestEntity.class, id);

		Assert.assertTrue( getAuditReader().isEntityClassAudited( proxySte.getClass() ) );
File
QueryingWithProxyObjectTest.java
Developer's decision
Version 1
Kind of conflict
Annotation
Cast expression
Comment
Method declaration
Method invocation
Method signature
Variable
Chunk
Conflicting content
		// Disable schema auto generation.
		return false;
	}
<<<<<<< HEAD
    @Test
    @Priority(10)
    @FailureExpectedWithNewMetamodel
    public void testSchemaCreation() {
        // Generate complete schema.
        new EnversSchemaGenerator(configuration()).export().create( true, true );

        // Populate database with test data.
        Session session = getSession();
        session.getTransaction().begin();
        StrTestEntity entity = new StrTestEntity("data");
        session.save(entity);
        session.getTransaction().commit();

        id = entity.getId();
    }

	@Test
    @Priority(9)
    @FailureExpectedWithNewMetamodel
    public void testAuditDataRetrieval() {
        Assert.assertEquals(Arrays.asList(1), getAuditReader().getRevisions(StrTestEntity.class, id));
        Assert.assertEquals(new StrTestEntity("data", id), getAuditReader().find(StrTestEntity.class, id, 1));
    }

    @Test
    @Priority(8)
    public void testSchemaDrop() {
        new EnversSchemaGenerator(configuration()).export().drop( true, true );
    }
=======

	@Test
	@Priority(10)
	public void testSchemaCreation() {
		// Generate complete schema.
		new EnversSchemaGenerator( configuration() ).export().create( true, true );

		// Populate database with test data.
		Session session = getSession();
		session.getTransaction().begin();
		StrTestEntity entity = new StrTestEntity( "data" );
		session.save( entity );
		session.getTransaction().commit();

		id = entity.getId();
	}

	@Test
	@Priority(9)
	public void testAuditDataRetrieval() {
		Assert.assertEquals( Arrays.asList( 1 ), getAuditReader().getRevisions( StrTestEntity.class, id ) );
		Assert.assertEquals( new StrTestEntity( "data", id ), getAuditReader().find( StrTestEntity.class, id, 1 ) );
	}

	@Test
	@Priority(8)
	public void testSchemaDrop() {
		new EnversSchemaGenerator( configuration() ).export().drop( true, true );
	}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
Solution content
		// Disable schema auto generation.
		return false;
	}
    @Test
    @Priority(10)
    @FailureExpectedWithNewMetamodel
    public void testSchemaCreation() {
        // Generate complete schema.
        new EnversSchemaGenerator(configuration()).export().create( true, true );

        // Populate database with test data.
        Session session = getSession();
        session.getTransaction().begin();
        StrTestEntity entity = new StrTestEntity("data");
        session.save(entity);
        session.getTransaction().commit();

        id = entity.getId();
    }

	@Test
    @Priority(9)
    @FailureExpectedWithNewMetamodel
    public void testAuditDataRetrieval() {
        Assert.assertEquals(Arrays.asList(1), getAuditReader().getRevisions(StrTestEntity.class, id));
        Assert.assertEquals(new StrTestEntity("data", id), getAuditReader().find(StrTestEntity.class, id, 1));
    }

    @Test
    @Priority(8)
    public void testSchemaDrop() {
        new EnversSchemaGenerator(configuration()).export().drop( true, true );
    }
}
File
SchemaExportTest.java
Developer's decision
Version 1
Kind of conflict
Annotation
Method declaration
Chunk
Conflicting content
import java.util.Set;
import java.util.concurrent.TimeUnit;

<<<<<<< HEAD
import org.hibernate.cache.infinispan.timestamp.ClusteredTimestampsRegionImpl;
import org.hibernate.cache.infinispan.util.Caches;
=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
import org.infinispan.AdvancedCache;
import org.infinispan.commands.module.ModuleCommandFactory;
import org.infinispan.configuration.cache.CacheMode;
Solution content
import java.util.Set;
import java.util.concurrent.TimeUnit;

import org.hibernate.cache.infinispan.timestamp.ClusteredTimestampsRegionImpl;
import org.hibernate.cache.infinispan.util.Caches;
import org.infinispan.AdvancedCache;
import org.infinispan.commands.module.ModuleCommandFactory;
import org.infinispan.configuration.cache.CacheMode;
File
InfinispanRegionFactory.java
Developer's decision
Version 1
Kind of conflict
Import
Chunk
Conflicting content
import org.infinispan.configuration.cache.CacheMode;
import org.infinispan.configuration.cache.Configuration;
import org.infinispan.configuration.cache.ConfigurationBuilder;
<<<<<<< HEAD
=======
import org.infinispan.configuration.parsing.ConfigurationBuilderHolder;
import org.infinispan.configuration.parsing.ParserRegistry;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
import org.infinispan.factories.GlobalComponentRegistry;
import org.infinispan.manager.DefaultCacheManager;
import org.infinispan.manager.EmbeddedCacheManager;
Solution content
import org.infinispan.configuration.cache.CacheMode;
import org.infinispan.configuration.cache.Configuration;
import org.infinispan.configuration.cache.ConfigurationBuilder;
import org.infinispan.configuration.parsing.ConfigurationBuilderHolder;
import org.infinispan.configuration.parsing.ParserRegistry;
import org.infinispan.factories.GlobalComponentRegistry;
import org.infinispan.manager.DefaultCacheManager;
import org.infinispan.manager.EmbeddedCacheManager;
File
InfinispanRegionFactory.java
Developer's decision
Version 2
Kind of conflict
Import
Chunk
Conflicting content
import org.infinispan.manager.EmbeddedCacheManager;
import org.infinispan.transaction.TransactionMode;
import org.infinispan.transaction.lookup.GenericTransactionManagerLookup;
<<<<<<< HEAD
import org.infinispan.util.concurrent.IsolationLevel;
import org.infinispan.configuration.parsing.ConfigurationBuilderHolder;
import org.infinispan.configuration.parsing.ParserRegistry;
import org.infinispan.util.FileLookupFactory;
import org.infinispan.util.logging.Log;
import org.infinispan.util.logging.LogFactory;

import org.hibernate.cache.infinispan.impl.BaseRegion;
import org.hibernate.cache.infinispan.naturalid.NaturalIdRegionImpl;
import org.hibernate.cache.infinispan.util.CacheCommandFactory;
import org.hibernate.cache.spi.AbstractRegionFactory;
import org.hibernate.cache.spi.CacheDataDescription;
=======
import org.infinispan.util.FileLookupFactory;
import org.infinispan.util.concurrent.IsolationLevel;
import org.infinispan.util.logging.Log;
import org.infinispan.util.logging.LogFactory;

>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
import org.hibernate.cache.CacheException;
import org.hibernate.cache.infinispan.collection.CollectionRegionImpl;
import org.hibernate.cache.infinispan.entity.EntityRegionImpl;
Solution content
import org.infinispan.manager.EmbeddedCacheManager;
import org.infinispan.transaction.TransactionMode;
import org.infinispan.transaction.lookup.GenericTransactionManagerLookup;
import org.infinispan.util.concurrent.IsolationLevel;
import org.infinispan.util.FileLookupFactory;
import org.infinispan.util.logging.Log;
import org.infinispan.util.logging.LogFactory;

import org.hibernate.cache.infinispan.impl.BaseRegion;
import org.hibernate.cache.infinispan.naturalid.NaturalIdRegionImpl;
import org.hibernate.cache.infinispan.util.CacheCommandFactory;
import org.hibernate.cache.spi.AbstractRegionFactory;
import org.hibernate.cache.spi.CacheDataDescription;
import org.hibernate.cache.CacheException;
import org.hibernate.cache.infinispan.collection.CollectionRegionImpl;
import org.hibernate.cache.infinispan.entity.EntityRegionImpl;
File
InfinispanRegionFactory.java
Developer's decision
Combination
Kind of conflict
Import
Chunk
Conflicting content
	@SuppressWarnings("UnusedDeclaration")
	public static final String NATURAL_ID_CACHE_RESOURCE_PROP = PREFIX + NATURAL_ID_KEY + CONFIG_SUFFIX;

<<<<<<< HEAD
   private static final String ENTITY_KEY = "entity";
   
   /**
    * Name of the configuration that should be used for entity caches.
    * 
    * @see #DEF_ENTITY_RESOURCE
    */
   public static final String ENTITY_CACHE_RESOURCE_PROP = PREFIX + ENTITY_KEY + CONFIG_SUFFIX;
   
   private static final String COLLECTION_KEY = "collection";
   
   /**
    * Name of the configuration that should be used for collection caches.
    * No default value, as by default we try to use the same Infinispan cache
    * instance we use for entity caching.
    * 
    * @see #ENTITY_CACHE_RESOURCE_PROP
    * @see #DEF_ENTITY_RESOURCE
    */
   public static final String COLLECTION_CACHE_RESOURCE_PROP = PREFIX + COLLECTION_KEY + CONFIG_SUFFIX;

   private static final String TIMESTAMPS_KEY = "timestamps";

   /**
    * Name of the configuration that should be used for timestamp caches.
    * 
    * @see #DEF_TIMESTAMPS_RESOURCE
    */
   public static final String TIMESTAMPS_CACHE_RESOURCE_PROP = PREFIX + TIMESTAMPS_KEY + CONFIG_SUFFIX;

   private static final String QUERY_KEY = "query";

   /**
    * Name of the configuration that should be used for query caches.
    * 
    * @see #DEF_QUERY_RESOURCE
    */
   public static final String QUERY_CACHE_RESOURCE_PROP = PREFIX + QUERY_KEY + CONFIG_SUFFIX;

   /**
    * Default value for {@link #INFINISPAN_CONFIG_RESOURCE_PROP}. Specifies the "infinispan-configs.xml" file in this package.
    */
   public static final String DEF_INFINISPAN_CONFIG_RESOURCE = "org/hibernate/cache/infinispan/builder/infinispan-configs.xml";

   /**
    * Default value for {@link #ENTITY_CACHE_RESOURCE_PROP}.
    */
   public static final String DEF_ENTITY_RESOURCE = "entity";

   /**
    * Default value for {@link #TIMESTAMPS_CACHE_RESOURCE_PROP}.
    */
   public static final String DEF_TIMESTAMPS_RESOURCE = "timestamps";

   /**
    * Default value for {@link #QUERY_CACHE_RESOURCE_PROP}.
    */
   public static final String DEF_QUERY_RESOURCE = "local-query";

   /**
    * Default value for {@link #INFINISPAN_USE_SYNCHRONIZATION_PROP}.
    */
   public static final boolean DEF_USE_SYNCHRONIZATION = true;

   /**
    * Name of the pending puts cache.
    */
   public static final String PENDING_PUTS_CACHE_NAME = "pending-puts";

   private EmbeddedCacheManager manager;

   private final Map typeOverrides = new HashMap();

   private final Set definedConfigurations = new HashSet();

   private org.infinispan.transaction.lookup.TransactionManagerLookup transactionManagerlookup;

   private List regionNames = new ArrayList();
   
   /** {@inheritDoc} */
   public CollectionRegion buildCollectionRegion(String regionName, Properties properties, CacheDataDescription metadata) throws CacheException {
      if (log.isDebugEnabled()) log.debug("Building collection cache region [" + regionName + "]");
      AdvancedCache cache = getCache(regionName, COLLECTION_KEY, properties);
      CollectionRegionImpl region = new CollectionRegionImpl(
            cache, regionName, metadata, this);
      startRegion(region, regionName);
      return region;
   }

   /** {@inheritDoc} */
   public EntityRegion buildEntityRegion(String regionName, Properties properties, CacheDataDescription metadata) throws CacheException {
      if (log.isDebugEnabled()) log.debug("Building entity cache region [" + regionName + "]");
      AdvancedCache cache = getCache(regionName, ENTITY_KEY, properties);
      EntityRegionImpl region = new EntityRegionImpl(
            cache, regionName, metadata, this);
      startRegion(region, regionName);
      return region;
   }
=======
	private static final String ENTITY_KEY = "entity";

	/**
	 * Name of the configuration that should be used for entity caches.
	 *
	 * @see #DEF_ENTITY_RESOURCE
	 */
	public static final String ENTITY_CACHE_RESOURCE_PROP = PREFIX + ENTITY_KEY + CONFIG_SUFFIX;

	private static final String COLLECTION_KEY = "collection";

	/**
	 * Name of the configuration that should be used for collection caches.
	 * No default value, as by default we try to use the same Infinispan cache
	 * instance we use for entity caching.
	 *
	 * @see #ENTITY_CACHE_RESOURCE_PROP
	 * @see #DEF_ENTITY_RESOURCE
	 */
	@SuppressWarnings("UnusedDeclaration")
	public static final String COLLECTION_CACHE_RESOURCE_PROP = PREFIX + COLLECTION_KEY + CONFIG_SUFFIX;

	private static final String TIMESTAMPS_KEY = "timestamps";

	/**
	 * Name of the configuration that should be used for timestamp caches.
	 *
	 * @see #DEF_TIMESTAMPS_RESOURCE
	 */
	@SuppressWarnings("UnusedDeclaration")
	public static final String TIMESTAMPS_CACHE_RESOURCE_PROP = PREFIX + TIMESTAMPS_KEY + CONFIG_SUFFIX;

	private static final String QUERY_KEY = "query";

	/**
	 * Name of the configuration that should be used for query caches.
	 *
	 * @see #DEF_QUERY_RESOURCE
	 */
	public static final String QUERY_CACHE_RESOURCE_PROP = PREFIX + QUERY_KEY + CONFIG_SUFFIX;

	/**
	 * Default value for {@link #INFINISPAN_CONFIG_RESOURCE_PROP}. Specifies the "infinispan-configs.xml" file in this package.
	 */
	public static final String DEF_INFINISPAN_CONFIG_RESOURCE = "org/hibernate/cache/infinispan/builder/infinispan-configs.xml";

	/**
	 * Default value for {@link #ENTITY_CACHE_RESOURCE_PROP}.
	 */
	public static final String DEF_ENTITY_RESOURCE = "entity";

	/**
	 * Default value for {@link #TIMESTAMPS_CACHE_RESOURCE_PROP}.
	 */
	public static final String DEF_TIMESTAMPS_RESOURCE = "timestamps";

	/**
	 * Default value for {@link #QUERY_CACHE_RESOURCE_PROP}.
	 */
	public static final String DEF_QUERY_RESOURCE = "local-query";

	/**
	 * Default value for {@link #INFINISPAN_USE_SYNCHRONIZATION_PROP}.
	 */
	public static final boolean DEF_USE_SYNCHRONIZATION = true;

	/**
	 * Name of the pending puts cache.
	 */
	public static final String PENDING_PUTS_CACHE_NAME = "pending-puts";

	private EmbeddedCacheManager manager;

	private final Map typeOverrides = new HashMap();

	private final Set definedConfigurations = new HashSet();

	private org.infinispan.transaction.lookup.TransactionManagerLookup transactionManagerlookup;

	private List regionNames = new ArrayList();

	/**
	 * Create a new instance using the default configuration.
	 */
	public InfinispanRegionFactory() {
	}

	/**
	 * Create a new instance using conifguration properties in props.
	 *
	 * @param props Environmental properties; currently unused.
	 */
	@SuppressWarnings("UnusedParameters")
	public InfinispanRegionFactory(Properties props) {
	}

	@Override
	public CollectionRegion buildCollectionRegion(
			String regionName,
			Properties properties,
			CacheDataDescription metadata) throws CacheException {
		if ( log.isDebugEnabled() ) {
			log.debug( "Building collection cache region [" + regionName + "]" );
		}
		final AdvancedCache cache = getCache( regionName, COLLECTION_KEY, properties );
		final CollectionRegionImpl region = new CollectionRegionImpl( cache, regionName, metadata, this );
		startRegion( region, regionName );
		return region;
	}

	@Override
	public EntityRegion buildEntityRegion(String regionName, Properties properties, CacheDataDescription metadata)
			throws CacheException {
		if ( log.isDebugEnabled() ) {
			log.debug( "Building entity cache region [" + regionName + "]" );
		}
		final AdvancedCache cache = getCache( regionName, ENTITY_KEY, properties );
		final EntityRegionImpl region = new EntityRegionImpl( cache, regionName, metadata, this );
		startRegion( region, regionName );
		return region;
	}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

	@Override
	public NaturalIdRegion buildNaturalIdRegion(String regionName, Properties properties, CacheDataDescription metadata)
Solution content
	@SuppressWarnings("UnusedDeclaration")
	public static final String NATURAL_ID_CACHE_RESOURCE_PROP = PREFIX + NATURAL_ID_KEY + CONFIG_SUFFIX;

   private static final String ENTITY_KEY = "entity";

   /**
    * Name of the configuration that should be used for entity caches.
    *
    * @see #DEF_ENTITY_RESOURCE
    */
   public static final String ENTITY_CACHE_RESOURCE_PROP = PREFIX + ENTITY_KEY + CONFIG_SUFFIX;

   private static final String COLLECTION_KEY = "collection";

   /**
    * Name of the configuration that should be used for collection caches.
    * No default value, as by default we try to use the same Infinispan cache
    * instance we use for entity caching.
    *
    * @see #ENTITY_CACHE_RESOURCE_PROP
    * @see #DEF_ENTITY_RESOURCE
    */
   public static final String COLLECTION_CACHE_RESOURCE_PROP = PREFIX + COLLECTION_KEY + CONFIG_SUFFIX;

   private static final String TIMESTAMPS_KEY = "timestamps";

   /**
    * Name of the configuration that should be used for timestamp caches.
    *
    * @see #DEF_TIMESTAMPS_RESOURCE
    */
   public static final String TIMESTAMPS_CACHE_RESOURCE_PROP = PREFIX + TIMESTAMPS_KEY + CONFIG_SUFFIX;

   private static final String QUERY_KEY = "query";

   /**
    * Name of the configuration that should be used for query caches.
    *
    * @see #DEF_QUERY_RESOURCE
    */
   public static final String QUERY_CACHE_RESOURCE_PROP = PREFIX + QUERY_KEY + CONFIG_SUFFIX;

   /**
    * Default value for {@link #INFINISPAN_CONFIG_RESOURCE_PROP}. Specifies the "infinispan-configs.xml" file in this package.
    */
   public static final String DEF_INFINISPAN_CONFIG_RESOURCE = "org/hibernate/cache/infinispan/builder/infinispan-configs.xml";

   /**
    * Default value for {@link #ENTITY_CACHE_RESOURCE_PROP}.
    */
   public static final String DEF_ENTITY_RESOURCE = "entity";

   /**
    * Default value for {@link #TIMESTAMPS_CACHE_RESOURCE_PROP}.
    */
   public static final String DEF_TIMESTAMPS_RESOURCE = "timestamps";

   /**
    * Default value for {@link #QUERY_CACHE_RESOURCE_PROP}.
    */
   public static final String DEF_QUERY_RESOURCE = "local-query";

   /**
    * Default value for {@link #INFINISPAN_USE_SYNCHRONIZATION_PROP}.
    */
   public static final boolean DEF_USE_SYNCHRONIZATION = true;

   /**
    * Name of the pending puts cache.
    */
   public static final String PENDING_PUTS_CACHE_NAME = "pending-puts";

   private EmbeddedCacheManager manager;

   private final Map typeOverrides = new HashMap();

   private final Set definedConfigurations = new HashSet();

   private org.infinispan.transaction.lookup.TransactionManagerLookup transactionManagerlookup;

   private List regionNames = new ArrayList();

	@Override
	public CollectionRegion buildCollectionRegion(
			String regionName,
			Properties properties,
			CacheDataDescription metadata) throws CacheException {
		if ( log.isDebugEnabled() ) {
			log.debug( "Building collection cache region [" + regionName + "]" );
		}
		final AdvancedCache cache = getCache( regionName, COLLECTION_KEY, properties );
		final CollectionRegionImpl region = new CollectionRegionImpl( cache, regionName, metadata, this );
		startRegion( region, regionName );
		return region;
	}

	@Override
	public EntityRegion buildEntityRegion(String regionName, Properties properties, CacheDataDescription metadata)
			throws CacheException {
		if ( log.isDebugEnabled() ) {
			log.debug( "Building entity cache region [" + regionName + "]" );
		}
		final AdvancedCache cache = getCache( regionName, ENTITY_KEY, properties );
		final EntityRegionImpl region = new EntityRegionImpl( cache, regionName, metadata, this );
		startRegion( region, regionName );
		return region;
	}

	/**
	 * Create a new instance using the default configuration.
	 */
	public InfinispanRegionFactory() {
	}

	/**
	 * Create a new instance using conifguration properties in props.
	 *
	 * @param props Environmental properties; currently unused.
	 */
	@SuppressWarnings("UnusedParameters")
	public InfinispanRegionFactory(Properties props) {
	}


	@Override
	public NaturalIdRegion buildNaturalIdRegion(String regionName, Properties properties, CacheDataDescription metadata)
File
InfinispanRegionFactory.java
Developer's decision
Combination
Kind of conflict
Annotation
Attribute
Comment
Method declaration
Method invocation
Chunk
Conflicting content
		startRegion( region, regionName );
		return region;
	}
<<<<<<< HEAD
	
   /**
    * {@inheritDoc}
    */
   public QueryResultsRegion buildQueryResultsRegion(String regionName, Properties properties)
            throws CacheException {
      if (log.isDebugEnabled()) log.debug("Building query results cache region [" + regionName + "]");
      String cacheName = typeOverrides.get(QUERY_KEY).getCacheName();
      // If region name is not default one, lookup a cache for that region name
      if (!regionName.equals("org.hibernate.cache.internal.StandardQueryCache"))
         cacheName = regionName;

      AdvancedCache cache = getCache(cacheName, QUERY_KEY, properties);
      QueryResultsRegionImpl region = new QueryResultsRegionImpl(
            cache, regionName, this);
      startRegion(region, regionName);
      return region;
   }

   /**
    * {@inheritDoc}
    */
   public TimestampsRegion buildTimestampsRegion(String regionName, Properties properties)
            throws CacheException {
      if (log.isDebugEnabled()) log.debug("Building timestamps cache region [" + regionName + "]");
      AdvancedCache cache = getCache(regionName, TIMESTAMPS_KEY, properties);
      TimestampsRegionImpl region = createTimestampsRegion(cache, regionName);
      startRegion(region, regionName);
      return region;
   }

   protected TimestampsRegionImpl createTimestampsRegion(
         AdvancedCache cache, String regionName) {
      if (Caches.isClustered(cache))
         return new ClusteredTimestampsRegionImpl(cache, regionName, this);
      else
         return new TimestampsRegionImpl(cache, regionName, this);
   }

   /**
    * {@inheritDoc}
    */
   public boolean isMinimalPutsEnabledByDefault() {
      return true;
   }

   @Override
   public AccessType getDefaultAccessType() {
      return AccessType.TRANSACTIONAL;
   }

   /**
    * {@inheritDoc}
    */
   public long nextTimestamp() {
      return System.currentTimeMillis() / 100;
   }
   
   public void setCacheManager(EmbeddedCacheManager manager) {
      this.manager = manager;
   }

   public EmbeddedCacheManager getCacheManager() {
      return manager;
   }

	@Override
	public void start() {
		log.debug("Starting Infinispan region factory");
		try {
			transactionManagerlookup = createTransactionManagerLookup( getServiceRegistry() );
			initGenericDataTypeOverrides();
			ConfigurationService configurationService = getServiceRegistry().getService( ConfigurationService.class );
			Map settings = configurationService.getSettings();
			for(Object key : settings.keySet()){
				int prefixLoc;
				if ((prefixLoc = key.toString().indexOf( PREFIX )) != -1) {
					dissectProperty(prefixLoc, key.toString(), settings);
				}
			}
			manager = createCacheManager(settings);
			defineGenericDataTypeCacheConfigurations( settings);
	         definePendingPutsCache();
		} catch (CacheException ce) {
			throw ce;
		} catch (Throwable t) {
			throw new CacheException("Unable to start region factory", t);
		}
	}

	/**
    * {@inheritDoc}
    */
   @Override
   public void start(Settings settings, Properties properties) throws CacheException {
     	start();
   }

   private void definePendingPutsCache() {
      ConfigurationBuilder builder = new ConfigurationBuilder();
      // A local, lightweight cache for pending puts, which is

      // non-transactional and has aggressive expiration settings.
      // Locking is still required since the putFromLoad validator
      // code uses conditional operations (i.e. putIfAbsent).
      builder.clustering().cacheMode(CacheMode.LOCAL)
         .transaction().transactionMode(TransactionMode.NON_TRANSACTIONAL)
         .expiration().maxIdle(TimeUnit.SECONDS.toMillis(60))
         .storeAsBinary().enabled(false)
         .locking().isolationLevel(IsolationLevel.READ_COMMITTED)
         .jmxStatistics().disable();

      manager.defineConfiguration(PENDING_PUTS_CACHE_NAME, builder.build());
   }

   protected org.infinispan.transaction.lookup.TransactionManagerLookup createTransactionManagerLookup(
            ServiceRegistry sr) {
      return new HibernateTransactionManagerLookup(sr);
   }


	/**
    * {@inheritDoc}
    */
   @Override
   public void stop() {
      log.debug("Stop region factory");
      stopCacheRegions();
      stopCacheManager();
   }

   protected void stopCacheRegions() {
      log.debug("Clear region references");
      getCacheCommandFactory(manager.getCache().getAdvancedCache())
            .clearRegions(regionNames);
      regionNames.clear();
   }

   protected void stopCacheManager() {
      log.debug("Stop cache manager");
      manager.stop();
   }
   
   /**
    * Returns an unmodifiable map containing configured entity/collection type configuration overrides.
    * This method should be used primarily for testing/checking purpouses.
    * 
    * @return an unmodifiable map.
    */
   public Map getTypeOverrides() {
      return Collections.unmodifiableMap(typeOverrides);
   }
   
   public Set getDefinedConfigurations() {
      return Collections.unmodifiableSet(definedConfigurations);
   }

   protected EmbeddedCacheManager createCacheManager(Map properties) throws CacheException {
      try {
         String configLoc = ConfigurationHelper.getString(
               INFINISPAN_CONFIG_RESOURCE_PROP, properties, DEF_INFINISPAN_CONFIG_RESOURCE);
         ClassLoader classLoader = ClassLoaderHelper.getContextClassLoader();
         InputStream is = FileLookupFactory.newInstance().lookupFileStrict(
               configLoc, classLoader);
         ParserRegistry parserRegistry = new ParserRegistry(classLoader);
         ConfigurationBuilderHolder holder = parserRegistry.parse(is);

         // Override global jmx statistics exposure
         String globalStats = extractProperty(
               INFINISPAN_GLOBAL_STATISTICS_PROP, properties);
         if (globalStats != null)
            holder.getGlobalConfigurationBuilder().globalJmxStatistics()
                  .enabled(Boolean.parseBoolean(globalStats));

         return createCacheManager(holder);
      } catch (IOException e) {
         throw new CacheException("Unable to create default cache manager", e);
      }
   }

   protected EmbeddedCacheManager createCacheManager(
         ConfigurationBuilderHolder holder) {
      return new DefaultCacheManager(holder, true);
   }

   private void startRegion(BaseRegion region, String regionName) {
      regionNames.add(regionName);
      getCacheCommandFactory(region.getCache()).addRegion(regionName, region);
   }

   public void initGenericDataTypeOverrides() {
      TypeOverrides entityOverrides = new TypeOverrides();
      entityOverrides.setCacheName(DEF_ENTITY_RESOURCE);
      typeOverrides.put(ENTITY_KEY, entityOverrides);
      TypeOverrides collectionOverrides = new TypeOverrides();
      collectionOverrides.setCacheName(DEF_ENTITY_RESOURCE);
      typeOverrides.put(COLLECTION_KEY, collectionOverrides);
      TypeOverrides naturalIdOverrides = new TypeOverrides();
      naturalIdOverrides.setCacheName(DEF_ENTITY_RESOURCE);
      typeOverrides.put(NATURAL_ID_KEY, naturalIdOverrides);
      TypeOverrides timestampOverrides = new TimestampTypeOverrides();
      timestampOverrides.setCacheName(DEF_TIMESTAMPS_RESOURCE);
      typeOverrides.put(TIMESTAMPS_KEY, timestampOverrides);
      TypeOverrides queryOverrides = new TypeOverrides();
      queryOverrides.setCacheName(DEF_QUERY_RESOURCE);
      typeOverrides.put(QUERY_KEY, queryOverrides);
   }

   private void dissectProperty(int prefixLoc, String key, Map properties) {
      TypeOverrides cfgOverride;
      int suffixLoc;
      if (!key.equals(INFINISPAN_CONFIG_RESOURCE_PROP) && (suffixLoc = key.indexOf(CONFIG_SUFFIX)) != -1) {
         cfgOverride = getOrCreateConfig(prefixLoc, key, suffixLoc);
         cfgOverride.setCacheName(extractProperty(key, properties));
      } else if ((suffixLoc = key.indexOf(STRATEGY_SUFFIX)) != -1) {
         cfgOverride = getOrCreateConfig(prefixLoc, key, suffixLoc);
         cfgOverride.setEvictionStrategy(extractProperty(key, properties));
      } else if ((suffixLoc = key.indexOf(WAKE_UP_INTERVAL_SUFFIX)) != -1) {
         cfgOverride = getOrCreateConfig(prefixLoc, key, suffixLoc);
         cfgOverride.setEvictionWakeUpInterval(Long.parseLong(extractProperty(key, properties)));
      } else if ((suffixLoc = key.indexOf(MAX_ENTRIES_SUFFIX)) != -1) {
         cfgOverride = getOrCreateConfig(prefixLoc, key, suffixLoc);
         cfgOverride.setEvictionMaxEntries(Integer.parseInt(extractProperty(key, properties)));
      } else if ((suffixLoc = key.indexOf(LIFESPAN_SUFFIX)) != -1) {
         cfgOverride = getOrCreateConfig(prefixLoc, key, suffixLoc);
         cfgOverride.setExpirationLifespan(Long.parseLong(extractProperty(key, properties)));
      } else if ((suffixLoc = key.indexOf(MAX_IDLE_SUFFIX)) != -1) {
         cfgOverride = getOrCreateConfig(prefixLoc, key, suffixLoc);
         cfgOverride.setExpirationMaxIdle(Long.parseLong(extractProperty(key, properties)));
      }
   }

   private String extractProperty(String key, Map properties) {
      String value = ConfigurationHelper.getString( key, properties );
      log.debugf("Configuration override via property %s: %s", key, value);
      return value;
   }

   private TypeOverrides getOrCreateConfig(int prefixLoc, String key, int suffixLoc) {
      String name = key.substring(prefixLoc + PREFIX.length(), suffixLoc);
      TypeOverrides cfgOverride = typeOverrides.get(name);
      if (cfgOverride == null) {
         cfgOverride = new TypeOverrides();
         typeOverrides.put(name, cfgOverride);
      }
      return cfgOverride;
   }

   private void defineGenericDataTypeCacheConfigurations(Map properties) {
      String[] defaultGenericDataTypes = new String[]{ENTITY_KEY, COLLECTION_KEY, TIMESTAMPS_KEY, QUERY_KEY};
      for (String type : defaultGenericDataTypes) {
         TypeOverrides override = overrideStatisticsIfPresent(typeOverrides.get(type), properties);
         String cacheName = override.getCacheName();
         ConfigurationBuilder builder = new ConfigurationBuilder();
         // Read base configuration
         applyConfiguration(cacheName, builder);

         // Apply overrides
         override.applyTo(builder);
         // Configure transaction manager
         configureTransactionManager(builder, cacheName, properties);
         // Define configuration, validate and then apply
         Configuration cfg = builder.build();
         override.validateInfinispanConfiguration(cfg);
         manager.defineConfiguration(cacheName, cfg);
         definedConfigurations.add(cacheName);
      }
   }

   private AdvancedCache getCache(String regionName, String typeKey, Properties properties) {
      TypeOverrides regionOverride = typeOverrides.get(regionName);
      if (!definedConfigurations.contains(regionName)) {
         String templateCacheName;
         Configuration regionCacheCfg;
         ConfigurationBuilder builder = new ConfigurationBuilder();
         if (regionOverride != null) {
            if (log.isDebugEnabled()) log.debug("Cache region specific configuration exists: " + regionOverride);
            String cacheName = regionOverride.getCacheName();
            if (cacheName != null) // Region specific override with a given cache name
               templateCacheName = cacheName;
            else // Region specific override without cache name, so template cache name is generic for data type.
               templateCacheName = typeOverrides.get(typeKey).getCacheName();

            // Read template configuration
            applyConfiguration(templateCacheName, builder);

            regionOverride = overrideStatisticsIfPresent(regionOverride, properties);
            regionOverride.applyTo(builder);

         } else {
            // No region specific overrides, template cache name is generic for data type.
            templateCacheName = typeOverrides.get(typeKey).getCacheName();
            // Read template configuration
            builder.read(manager.getCacheConfiguration(templateCacheName));
            // Apply overrides
            typeOverrides.get(typeKey).applyTo(builder);
         }
         // Configure transaction manager
         configureTransactionManager(builder, templateCacheName, properties);
         // Define configuration
         manager.defineConfiguration(regionName, builder.build());
         definedConfigurations.add(regionName);
      }
      AdvancedCache cache = manager.getCache(regionName).getAdvancedCache();
      if (!cache.getStatus().allowInvocations()) {
         cache.start();
      }
      return createCacheWrapper(cache);
   }

   private void applyConfiguration(String cacheName, ConfigurationBuilder builder) {
      Configuration cfg = manager.getCacheConfiguration(cacheName);
      if (cfg != null)
         builder.read(cfg);
   }

   private CacheCommandFactory getCacheCommandFactory(AdvancedCache cache) {
      GlobalComponentRegistry globalCr = cache.getComponentRegistry()
            .getGlobalComponentRegistry();

      Map factories =
         (Map) globalCr
               .getComponent("org.infinispan.modules.command.factories");

      for (ModuleCommandFactory factory : factories.values()) {
         if (factory instanceof CacheCommandFactory)
            return (CacheCommandFactory) factory;
      }

      throw new CacheException("Infinispan custom cache command factory not " +
            "installed (possibly because the classloader where Infinispan " +
            "lives couldn't find the Hibernate Infinispan cache provider)");
   }

   protected AdvancedCache createCacheWrapper(AdvancedCache cache) {
      return cache;
   }

   private void configureTransactionManager(ConfigurationBuilder builder,
         String cacheName, Map properties) {
      // Get existing configuration to verify whether a tm was configured or not.
      Configuration baseCfg = manager.getCacheConfiguration(cacheName);
      if (baseCfg != null && baseCfg.transaction().transactionMode().isTransactional()) {
         String ispnTmLookupClassName = baseCfg.transaction().transactionManagerLookup().getClass().getName();
         String hbTmLookupClassName = org.hibernate.cache.infinispan.tm.HibernateTransactionManagerLookup.class.getName();
         if (GenericTransactionManagerLookup.class.getName().equals(ispnTmLookupClassName)) {
            log.debug("Using default Infinispan transaction manager lookup " +
                  "instance (GenericTransactionManagerLookup), overriding it " +
                  "with Hibernate transaction manager lookup");
            builder.transaction().transactionManagerLookup(transactionManagerlookup);
         } else if (ispnTmLookupClassName != null && !ispnTmLookupClassName.equals(hbTmLookupClassName)) {
            log.debug("Infinispan is configured [" + ispnTmLookupClassName + "] with a different transaction manager lookup " +
                            "class than Hibernate [" + hbTmLookupClassName + "]");
         } else {
            // Infinispan TM lookup class null, so apply Hibernate one directly
            builder.transaction().transactionManagerLookup(transactionManagerlookup);
         }

	protected void stopCacheRegions() {
         String useSyncProp = extractProperty(INFINISPAN_USE_SYNCHRONIZATION_PROP, properties);
         boolean useSync = useSyncProp == null ? DEF_USE_SYNCHRONIZATION : Boolean.parseBoolean(useSyncProp);
         builder.transaction().useSynchronization(useSync);
      }
   }

   private TypeOverrides overrideStatisticsIfPresent(TypeOverrides override, Map properties) {
      String globalStats = extractProperty(INFINISPAN_GLOBAL_STATISTICS_PROP, properties);
      if (globalStats != null) {
         override.setExposeStatistics(Boolean.parseBoolean(globalStats));
      }
      return override;
   }

=======

	protected TimestampsRegionImpl createTimestampsRegion(
			AdvancedCache cache, String regionName) {
		if ( Caches.isClustered( cache ) ) {
			return new ClusteredTimestampsRegionImpl( cache, regionName, this );
		}
		else {
			return new TimestampsRegionImpl( cache, regionName, this );
		}
	}

	@Override
	public boolean isMinimalPutsEnabledByDefault() {
		return true;
	}

	@Override
	public AccessType getDefaultAccessType() {
		return AccessType.TRANSACTIONAL;
	}

	@Override
	public long nextTimestamp() {
		return System.currentTimeMillis() / 100;
	}

	public void setCacheManager(EmbeddedCacheManager manager) {
		this.manager = manager;
	}

	public EmbeddedCacheManager getCacheManager() {
		return manager;
	}

	@Override
	public void start(Settings settings, Properties properties) throws CacheException {
		log.debug( "Starting Infinispan region factory" );
		try {
			transactionManagerlookup = createTransactionManagerLookup( settings, properties );
			manager = createCacheManager( properties );
			initGenericDataTypeOverrides();
			final Enumeration keys = properties.propertyNames();
			while ( keys.hasMoreElements() ) {
				final String key = (String) keys.nextElement();
				int prefixLoc;
				if ( (prefixLoc = key.indexOf( PREFIX )) != -1 ) {
					dissectProperty( prefixLoc, key, properties );
				}
			}
			defineGenericDataTypeCacheConfigurations( properties );
			definePendingPutsCache();
		}
		catch (CacheException ce) {
			throw ce;
		}
		catch (Throwable t) {
			throw new CacheException( "Unable to start region factory", t );
		}
	}

	private void definePendingPutsCache() {
		final ConfigurationBuilder builder = new ConfigurationBuilder();
		// A local, lightweight cache for pending puts, which is
		// non-transactional and has aggressive expiration settings.
		// Locking is still required since the putFromLoad validator
		// code uses conditional operations (i.e. putIfAbsent).
		builder.clustering().cacheMode( CacheMode.LOCAL )
				.transaction().transactionMode( TransactionMode.NON_TRANSACTIONAL )
				.expiration().maxIdle( TimeUnit.SECONDS.toMillis( 60 ) )
				.storeAsBinary().enabled( false )
				.locking().isolationLevel( IsolationLevel.READ_COMMITTED )
				.jmxStatistics().disable();

		manager.defineConfiguration( PENDING_PUTS_CACHE_NAME, builder.build() );
	}

	protected org.infinispan.transaction.lookup.TransactionManagerLookup createTransactionManagerLookup(
			Settings settings, Properties properties) {
		return new HibernateTransactionManagerLookup( settings, properties );
	}

	@Override
	public void stop() {
		log.debug( "Stop region factory" );
		stopCacheRegions();
		stopCacheManager();
	}
		log.debug( "Clear region references" );
		getCacheCommandFactory( manager.getCache().getAdvancedCache() )
				.clearRegions( regionNames );
		regionNames.clear();
	}

	protected void stopCacheManager() {
		log.debug( "Stop cache manager" );
		manager.stop();
	}

	/**
	 * Returns an unmodifiable map containing configured entity/collection type configuration overrides.
	 * This method should be used primarily for testing/checking purpouses.
	 *
	 * @return an unmodifiable map.
	 */
	public Map getTypeOverrides() {
		return Collections.unmodifiableMap( typeOverrides );
	}

	public Set getDefinedConfigurations() {
		return Collections.unmodifiableSet( definedConfigurations );
	}

	protected EmbeddedCacheManager createCacheManager(Properties properties) throws CacheException {
		try {
			final String configLoc = ConfigurationHelper.getString(
					INFINISPAN_CONFIG_RESOURCE_PROP, properties, DEF_INFINISPAN_CONFIG_RESOURCE
			);
			ClassLoader classLoader = ClassLoaderHelper.getContextClassLoader();
			InputStream is;
			try {
				is = FileLookupFactory.newInstance().lookupFileStrict( configLoc, classLoader );
			}
			catch (FileNotFoundException e) {
				// In some environments (ex: OSGi), hibernate-infinispan may not
				// be in the app CL.  It's important to also try this CL.
				classLoader = this.getClass().getClassLoader();
				is = FileLookupFactory.newInstance().lookupFileStrict( configLoc, classLoader );
			}
			final ParserRegistry parserRegistry = new ParserRegistry( classLoader );
			final ConfigurationBuilderHolder holder = parserRegistry.parse( is );

			// Override global jmx statistics exposure
			final String globalStats = extractProperty( INFINISPAN_GLOBAL_STATISTICS_PROP, properties );
			if ( globalStats != null ) {
				holder.getGlobalConfigurationBuilder().globalJmxStatistics()
						.enabled( Boolean.parseBoolean( globalStats ) );
			}

			return createCacheManager( holder );
		}
		catch (IOException e) {
			throw new CacheException( "Unable to create default cache manager", e );
		}
	}

	protected EmbeddedCacheManager createCacheManager(
			ConfigurationBuilderHolder holder) {
		return new DefaultCacheManager( holder, true );
	}

	private void startRegion(BaseRegion region, String regionName) {
		regionNames.add( regionName );
		getCacheCommandFactory( region.getCache() ).addRegion( regionName, region );
	}

	private Map initGenericDataTypeOverrides() {
		final TypeOverrides entityOverrides = new TypeOverrides();
		entityOverrides.setCacheName( DEF_ENTITY_RESOURCE );
		typeOverrides.put( ENTITY_KEY, entityOverrides );
		final TypeOverrides collectionOverrides = new TypeOverrides();
		collectionOverrides.setCacheName( DEF_ENTITY_RESOURCE );
		typeOverrides.put( COLLECTION_KEY, collectionOverrides );
		final TypeOverrides naturalIdOverrides = new TypeOverrides();
		naturalIdOverrides.setCacheName( DEF_ENTITY_RESOURCE );
		typeOverrides.put( NATURAL_ID_KEY, naturalIdOverrides );
		final TypeOverrides timestampOverrides = new TimestampTypeOverrides();
		timestampOverrides.setCacheName( DEF_TIMESTAMPS_RESOURCE );
		typeOverrides.put( TIMESTAMPS_KEY, timestampOverrides );
		final TypeOverrides queryOverrides = new TypeOverrides();
		queryOverrides.setCacheName( DEF_QUERY_RESOURCE );
		typeOverrides.put( QUERY_KEY, queryOverrides );
		return typeOverrides;
	}

	private void dissectProperty(int prefixLoc, String key, Properties properties) {
		final TypeOverrides cfgOverride;
		int suffixLoc;
		if ( !key.equals( INFINISPAN_CONFIG_RESOURCE_PROP ) && (suffixLoc = key.indexOf( CONFIG_SUFFIX )) != -1 ) {
			cfgOverride = getOrCreateConfig( prefixLoc, key, suffixLoc );
			cfgOverride.setCacheName( extractProperty( key, properties ) );
		}
		else if ( (suffixLoc = key.indexOf( STRATEGY_SUFFIX )) != -1 ) {
			cfgOverride = getOrCreateConfig( prefixLoc, key, suffixLoc );
			cfgOverride.setEvictionStrategy( extractProperty( key, properties ) );
		}
		else if ( (suffixLoc = key.indexOf( WAKE_UP_INTERVAL_SUFFIX )) != -1 ) {
			cfgOverride = getOrCreateConfig( prefixLoc, key, suffixLoc );
			cfgOverride.setEvictionWakeUpInterval( Long.parseLong( extractProperty( key, properties ) ) );
		}
		else if ( (suffixLoc = key.indexOf( MAX_ENTRIES_SUFFIX )) != -1 ) {
			cfgOverride = getOrCreateConfig( prefixLoc, key, suffixLoc );
			cfgOverride.setEvictionMaxEntries( Integer.parseInt( extractProperty( key, properties ) ) );
		}
		else if ( (suffixLoc = key.indexOf( LIFESPAN_SUFFIX )) != -1 ) {
			cfgOverride = getOrCreateConfig( prefixLoc, key, suffixLoc );
			cfgOverride.setExpirationLifespan( Long.parseLong( extractProperty( key, properties ) ) );
		}
		else if ( (suffixLoc = key.indexOf( MAX_IDLE_SUFFIX )) != -1 ) {
			cfgOverride = getOrCreateConfig( prefixLoc, key, suffixLoc );
			cfgOverride.setExpirationMaxIdle( Long.parseLong( extractProperty( key, properties ) ) );
		}
	}

	private String extractProperty(String key, Properties properties) {
		final String value = ConfigurationHelper.extractPropertyValue( key, properties );
		log.debugf( "Configuration override via property %s: %s", key, value );
		return value;
	}

	private TypeOverrides getOrCreateConfig(int prefixLoc, String key, int suffixLoc) {
		final String name = key.substring( prefixLoc + PREFIX.length(), suffixLoc );
		TypeOverrides cfgOverride = typeOverrides.get( name );
		if ( cfgOverride == null ) {
			cfgOverride = new TypeOverrides();
			typeOverrides.put( name, cfgOverride );
		}
		return cfgOverride;
	}

	private void defineGenericDataTypeCacheConfigurations(Properties properties) {
		final String[] defaultGenericDataTypes = new String[] {ENTITY_KEY, COLLECTION_KEY, TIMESTAMPS_KEY, QUERY_KEY};
		for ( String type : defaultGenericDataTypes ) {
			final TypeOverrides override = overrideStatisticsIfPresent( typeOverrides.get( type ), properties );
			final String cacheName = override.getCacheName();
			final ConfigurationBuilder builder = new ConfigurationBuilder();
			// Read base configuration
			applyConfiguration( cacheName, builder );

			// Apply overrides
			override.applyTo( builder );
			// Configure transaction manager
			configureTransactionManager( builder, cacheName, properties );
			// Define configuration, validate and then apply
			final Configuration cfg = builder.build();
			override.validateInfinispanConfiguration( cfg );
			manager.defineConfiguration( cacheName, cfg );
			definedConfigurations.add( cacheName );
		}
	}

	private AdvancedCache getCache(String regionName, String typeKey, Properties properties) {
		TypeOverrides regionOverride = typeOverrides.get( regionName );
		if ( !definedConfigurations.contains( regionName ) ) {
			final String templateCacheName;
			final ConfigurationBuilder builder = new ConfigurationBuilder();
			if ( regionOverride != null ) {
				if ( log.isDebugEnabled() ) {
					log.debug( "Cache region specific configuration exists: " + regionOverride );
				}
				final String cacheName = regionOverride.getCacheName();
				if ( cacheName != null ) {
					// Region specific override with a given cache name
					templateCacheName = cacheName;
				}
				else {
					// Region specific override without cache name, so template cache name is generic for data type.
					templateCacheName = typeOverrides.get( typeKey ).getCacheName();
				}

				// Read template configuration
				applyConfiguration( templateCacheName, builder );

				regionOverride = overrideStatisticsIfPresent( regionOverride, properties );
				regionOverride.applyTo( builder );

			}
			else {
				// No region specific overrides, template cache name is generic for data type.
				templateCacheName = typeOverrides.get( typeKey ).getCacheName();
				// Read template configuration
				builder.read( manager.getCacheConfiguration( templateCacheName ) );
				// Apply overrides
				typeOverrides.get( typeKey ).applyTo( builder );
			}
			// Configure transaction manager
			configureTransactionManager( builder, templateCacheName, properties );
			// Define configuration
			manager.defineConfiguration( regionName, builder.build() );
			definedConfigurations.add( regionName );
		}
		final AdvancedCache cache = manager.getCache( regionName ).getAdvancedCache();
		if ( !cache.getStatus().allowInvocations() ) {
			cache.start();
		}
		return createCacheWrapper( cache );
	}

	private void applyConfiguration(String cacheName, ConfigurationBuilder builder) {
		final Configuration cfg = manager.getCacheConfiguration( cacheName );
		if ( cfg != null ) {
			builder.read( cfg );
		}
	}

	private CacheCommandFactory getCacheCommandFactory(AdvancedCache cache) {
		final GlobalComponentRegistry globalCr = cache.getComponentRegistry().getGlobalComponentRegistry();

		final Map factories =
				(Map) globalCr.getComponent( "org.infinispan.modules.command.factories" );

		for ( ModuleCommandFactory factory : factories.values() ) {
			if ( factory instanceof CacheCommandFactory ) {
				return (CacheCommandFactory) factory;
			}
		}

		throw new CacheException(
				"Infinispan custom cache command factory not " +
						"installed (possibly because the classloader where Infinispan " +
						"lives couldn't find the Hibernate Infinispan cache provider)"
		);
	}

	protected AdvancedCache createCacheWrapper(AdvancedCache cache) {
		return cache;
	}

	private void configureTransactionManager(
			ConfigurationBuilder builder,
			String cacheName,
			Properties properties) {
		// Get existing configuration to verify whether a tm was configured or not.
		final Configuration baseCfg = manager.getCacheConfiguration( cacheName );
		if ( baseCfg != null && baseCfg.transaction().transactionMode().isTransactional() ) {
			final String ispnTmLookupClassName = baseCfg.transaction().transactionManagerLookup().getClass().getName();
			final String hbTmLookupClassName = org.hibernate.cache.infinispan.tm.HibernateTransactionManagerLookup.class.getName();
			if ( GenericTransactionManagerLookup.class.getName().equals( ispnTmLookupClassName ) ) {
				log.debug(
						"Using default Infinispan transaction manager lookup " +
								"instance (GenericTransactionManagerLookup), overriding it " +
								"with Hibernate transaction manager lookup"
				);
				builder.transaction().transactionManagerLookup( transactionManagerlookup );
			}
			else if ( ispnTmLookupClassName != null && !ispnTmLookupClassName.equals( hbTmLookupClassName ) ) {
				log.debug(
						"Infinispan is configured [" + ispnTmLookupClassName + "] with a different transaction manager lookup " +
								"class than Hibernate [" + hbTmLookupClassName + "]"
				);
			}
			else {
				// Infinispan TM lookup class null, so apply Hibernate one directly
				builder.transaction().transactionManagerLookup( transactionManagerlookup );
			}

			final String useSyncProp = extractProperty( INFINISPAN_USE_SYNCHRONIZATION_PROP, properties );
			final boolean useSync = useSyncProp == null ? DEF_USE_SYNCHRONIZATION : Boolean.parseBoolean( useSyncProp );
			builder.transaction().useSynchronization( useSync );
		}
	}

	private TypeOverrides overrideStatisticsIfPresent(TypeOverrides override, Properties properties) {
		final String globalStats = extractProperty( INFINISPAN_GLOBAL_STATISTICS_PROP, properties );
		if ( globalStats != null ) {
			override.setExposeStatistics( Boolean.parseBoolean( globalStats ) );
		}
		return override;
	}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
Solution content
   }

   @Override
		startRegion( region, regionName );
		return region;
	}

   protected TimestampsRegionImpl createTimestampsRegion(
         AdvancedCache cache, String regionName) {
      if (Caches.isClustered(cache))
         return new ClusteredTimestampsRegionImpl(cache, regionName, this);
      else
         return new TimestampsRegionImpl(cache, regionName, this);
   }

   @Override
   public boolean isMinimalPutsEnabledByDefault() {
      return true;
   public AccessType getDefaultAccessType() {
      return AccessType.TRANSACTIONAL;
   }

	@Override
   public long nextTimestamp() {
      return System.currentTimeMillis() / 100;
   }

   public void setCacheManager(EmbeddedCacheManager manager) {
      this.manager = manager;
   }

   public EmbeddedCacheManager getCacheManager() {
      return manager;
   }

	@Override
	public void start() {
		log.debug("Starting Infinispan region factory");
		try {
			transactionManagerlookup = createTransactionManagerLookup( getServiceRegistry() );
			initGenericDataTypeOverrides();
			ConfigurationService configurationService = getServiceRegistry().getService( ConfigurationService.class );
			Map settings = configurationService.getSettings();
			for(Object key : settings.keySet()){
				int prefixLoc;
				if ((prefixLoc = key.toString().indexOf( PREFIX )) != -1) {
					dissectProperty(prefixLoc, key.toString(), settings);
				}
			}
			manager = createCacheManager(settings);
			defineGenericDataTypeCacheConfigurations( settings);
	         definePendingPutsCache();
		} catch (CacheException ce) {
			throw ce;
		} catch (Throwable t) {
			throw new CacheException("Unable to start region factory", t);
		}
	}

   private void definePendingPutsCache() {
      ConfigurationBuilder builder = new ConfigurationBuilder();
      // A local, lightweight cache for pending puts, which is
      // non-transactional and has aggressive expiration settings.
      // Locking is still required since the putFromLoad validator
      // code uses conditional operations (i.e. putIfAbsent).
      }
      builder.clustering().cacheMode(CacheMode.LOCAL)
         .transaction().transactionMode(TransactionMode.NON_TRANSACTIONAL)
         .expiration().maxIdle(TimeUnit.SECONDS.toMillis(60))
         .storeAsBinary().enabled(false)
         .locking().isolationLevel(IsolationLevel.READ_COMMITTED)
         .jmxStatistics().disable();

      manager.defineConfiguration(PENDING_PUTS_CACHE_NAME, builder.build());
   }

   protected org.infinispan.transaction.lookup.TransactionManagerLookup createTransactionManagerLookup(
            ServiceRegistry sr) {
      return new HibernateTransactionManagerLookup(sr);
   }


   @Override
   public void stop() {
      log.debug("Stop region factory");
      stopCacheRegions();
      stopCacheManager();
   }

   protected void stopCacheRegions() {
      log.debug("Clear region references");
      getCacheCommandFactory(manager.getCache().getAdvancedCache())
            .clearRegions(regionNames);
      regionNames.clear();
   }

   protected void stopCacheManager() {
      log.debug("Stop cache manager");
      manager.stop();
   }

   /**
    * Returns an unmodifiable map containing configured entity/collection type configuration overrides.
    * This method should be used primarily for testing/checking purpouses.
    *
    * @return an unmodifiable map.
    */
   public Map getTypeOverrides() {
      return Collections.unmodifiableMap(typeOverrides);
   }

   public Set getDefinedConfigurations() {
      return Collections.unmodifiableSet(definedConfigurations);
   }

   protected EmbeddedCacheManager createCacheManager(Map properties) throws CacheException {
      try {
         String configLoc = ConfigurationHelper.getString(
               INFINISPAN_CONFIG_RESOURCE_PROP, properties, DEF_INFINISPAN_CONFIG_RESOURCE);
         ClassLoader classLoader = ClassLoaderHelper.getContextClassLoader();
         InputStream is = FileLookupFactory.newInstance().lookupFileStrict(
               configLoc, classLoader);
         ParserRegistry parserRegistry = new ParserRegistry(classLoader);
         ConfigurationBuilderHolder holder = parserRegistry.parse(is);

         // Override global jmx statistics exposure
         String globalStats = extractProperty(
               INFINISPAN_GLOBAL_STATISTICS_PROP, properties);
         if (globalStats != null)
            holder.getGlobalConfigurationBuilder().globalJmxStatistics()
                  .enabled(Boolean.parseBoolean(globalStats));

         return createCacheManager(holder);
      } catch (IOException e) {
         throw new CacheException("Unable to create default cache manager", e);
      }
   }

   protected EmbeddedCacheManager createCacheManager(
         ConfigurationBuilderHolder holder) {
      return new DefaultCacheManager(holder, true);
   }

   private void startRegion(BaseRegion region, String regionName) {
      regionNames.add(regionName);
      getCacheCommandFactory(region.getCache()).addRegion(regionName, region);
   }

   public void initGenericDataTypeOverrides() {
      TypeOverrides entityOverrides = new TypeOverrides();
      entityOverrides.setCacheName(DEF_ENTITY_RESOURCE);
      typeOverrides.put(ENTITY_KEY, entityOverrides);
      TypeOverrides collectionOverrides = new TypeOverrides();
      collectionOverrides.setCacheName(DEF_ENTITY_RESOURCE);
      typeOverrides.put(COLLECTION_KEY, collectionOverrides);
      TypeOverrides naturalIdOverrides = new TypeOverrides();
      naturalIdOverrides.setCacheName(DEF_ENTITY_RESOURCE);
      typeOverrides.put(NATURAL_ID_KEY, naturalIdOverrides);
      TypeOverrides timestampOverrides = new TimestampTypeOverrides();
      timestampOverrides.setCacheName(DEF_TIMESTAMPS_RESOURCE);
      typeOverrides.put(TIMESTAMPS_KEY, timestampOverrides);
      TypeOverrides queryOverrides = new TypeOverrides();
      queryOverrides.setCacheName(DEF_QUERY_RESOURCE);
      typeOverrides.put(QUERY_KEY, queryOverrides);
   }

   private void dissectProperty(int prefixLoc, String key, Map properties) {
      TypeOverrides cfgOverride;
      int suffixLoc;
      if (!key.equals(INFINISPAN_CONFIG_RESOURCE_PROP) && (suffixLoc = key.indexOf(CONFIG_SUFFIX)) != -1) {
         cfgOverride = getOrCreateConfig(prefixLoc, key, suffixLoc);
         cfgOverride.setCacheName(extractProperty(key, properties));
      } else if ((suffixLoc = key.indexOf(STRATEGY_SUFFIX)) != -1) {
         cfgOverride = getOrCreateConfig(prefixLoc, key, suffixLoc);
         cfgOverride.setEvictionStrategy(extractProperty(key, properties));
      } else if ((suffixLoc = key.indexOf(WAKE_UP_INTERVAL_SUFFIX)) != -1) {
         cfgOverride = getOrCreateConfig(prefixLoc, key, suffixLoc);
         cfgOverride.setEvictionWakeUpInterval(Long.parseLong(extractProperty(key, properties)));
      } else if ((suffixLoc = key.indexOf(MAX_ENTRIES_SUFFIX)) != -1) {
         cfgOverride = getOrCreateConfig(prefixLoc, key, suffixLoc);
         cfgOverride.setEvictionMaxEntries(Integer.parseInt(extractProperty(key, properties)));
      } else if ((suffixLoc = key.indexOf(LIFESPAN_SUFFIX)) != -1) {
         cfgOverride = getOrCreateConfig(prefixLoc, key, suffixLoc);
         cfgOverride.setExpirationLifespan(Long.parseLong(extractProperty(key, properties)));
      } else if ((suffixLoc = key.indexOf(MAX_IDLE_SUFFIX)) != -1) {
         cfgOverride = getOrCreateConfig(prefixLoc, key, suffixLoc);
         cfgOverride.setExpirationMaxIdle(Long.parseLong(extractProperty(key, properties)));
      }
   }

   private String extractProperty(String key, Map properties) {
      String value = ConfigurationHelper.getString( key, properties );
      log.debugf("Configuration override via property %s: %s", key, value);
      return value;
   }

   private TypeOverrides getOrCreateConfig(int prefixLoc, String key, int suffixLoc) {
      String name = key.substring(prefixLoc + PREFIX.length(), suffixLoc);
      TypeOverrides cfgOverride = typeOverrides.get(name);
      if (cfgOverride == null) {
         cfgOverride = new TypeOverrides();
         typeOverrides.put(name, cfgOverride);
      }
      return cfgOverride;
   }

   private void defineGenericDataTypeCacheConfigurations(Map properties) {
      String[] defaultGenericDataTypes = new String[]{ENTITY_KEY, COLLECTION_KEY, TIMESTAMPS_KEY, QUERY_KEY};
      for (String type : defaultGenericDataTypes) {
         TypeOverrides override = overrideStatisticsIfPresent(typeOverrides.get(type), properties);
         String cacheName = override.getCacheName();
         ConfigurationBuilder builder = new ConfigurationBuilder();
         // Read base configuration
         applyConfiguration(cacheName, builder);

         // Apply overrides
         override.applyTo(builder);
         // Configure transaction manager
         configureTransactionManager(builder, cacheName, properties);
         // Define configuration, validate and then apply
         Configuration cfg = builder.build();
         override.validateInfinispanConfiguration(cfg);
         manager.defineConfiguration(cacheName, cfg);
         definedConfigurations.add(cacheName);
      }
   }

   private AdvancedCache getCache(String regionName, String typeKey, Properties properties) {
      TypeOverrides regionOverride = typeOverrides.get(regionName);
      if (!definedConfigurations.contains(regionName)) {
         String templateCacheName;
         Configuration regionCacheCfg;
         ConfigurationBuilder builder = new ConfigurationBuilder();
         if (regionOverride != null) {
            if (log.isDebugEnabled()) log.debug("Cache region specific configuration exists: " + regionOverride);
            String cacheName = regionOverride.getCacheName();
            if (cacheName != null) // Region specific override with a given cache name
               templateCacheName = cacheName;
            else // Region specific override without cache name, so template cache name is generic for data type.

               templateCacheName = typeOverrides.get(typeKey).getCacheName();

            // Read template configuration
            applyConfiguration(templateCacheName, builder);

            regionOverride = overrideStatisticsIfPresent(regionOverride, properties);
            regionOverride.applyTo(builder);

         } else {
            // No region specific overrides, template cache name is generic for data type.
            templateCacheName = typeOverrides.get(typeKey).getCacheName();
            // Read template configuration
            builder.read(manager.getCacheConfiguration(templateCacheName));
            // Apply overrides
            typeOverrides.get(typeKey).applyTo(builder);
         }
         // Configure transaction manager
         configureTransactionManager(builder, templateCacheName, properties);
         // Define configuration
         manager.defineConfiguration(regionName, builder.build());
         definedConfigurations.add(regionName);
      }
      AdvancedCache cache = manager.getCache(regionName).getAdvancedCache();
      if (!cache.getStatus().allowInvocations()) {
         cache.start();
      }
      return createCacheWrapper(cache);
   }

   private void applyConfiguration(String cacheName, ConfigurationBuilder builder) {
      Configuration cfg = manager.getCacheConfiguration(cacheName);
      if (cfg != null)
         builder.read(cfg);
   }

   private CacheCommandFactory getCacheCommandFactory(AdvancedCache cache) {
      GlobalComponentRegistry globalCr = cache.getComponentRegistry()
            .getGlobalComponentRegistry();

      Map factories =
         (Map) globalCr
               .getComponent("org.infinispan.modules.command.factories");

      for (ModuleCommandFactory factory : factories.values()) {
         if (factory instanceof CacheCommandFactory)
            return (CacheCommandFactory) factory;
      throw new CacheException("Infinispan custom cache command factory not " +
            "installed (possibly because the classloader where Infinispan " +
            "lives couldn't find the Hibernate Infinispan cache provider)");
   }

   protected AdvancedCache createCacheWrapper(AdvancedCache cache) {
      return cache;
   }

   private void configureTransactionManager(ConfigurationBuilder builder,
         String cacheName, Map properties) {
      // Get existing configuration to verify whether a tm was configured or not.
      Configuration baseCfg = manager.getCacheConfiguration(cacheName);
      if (baseCfg != null && baseCfg.transaction().transactionMode().isTransactional()) {
         String ispnTmLookupClassName = baseCfg.transaction().transactionManagerLookup().getClass().getName();
         String hbTmLookupClassName = org.hibernate.cache.infinispan.tm.HibernateTransactionManagerLookup.class.getName();
         if (GenericTransactionManagerLookup.class.getName().equals(ispnTmLookupClassName)) {
            log.debug("Using default Infinispan transaction manager lookup " +
                  "instance (GenericTransactionManagerLookup), overriding it " +
                  "with Hibernate transaction manager lookup");
            builder.transaction().transactionManagerLookup(transactionManagerlookup);
         } else if (ispnTmLookupClassName != null && !ispnTmLookupClassName.equals(hbTmLookupClassName)) {
            log.debug("Infinispan is configured [" + ispnTmLookupClassName + "] with a different transaction manager lookup " +
                            "class than Hibernate [" + hbTmLookupClassName + "]");
         } else {
            // Infinispan TM lookup class null, so apply Hibernate one directly
            builder.transaction().transactionManagerLookup(transactionManagerlookup);
         }

         String useSyncProp = extractProperty(INFINISPAN_USE_SYNCHRONIZATION_PROP, properties);
         boolean useSync = useSyncProp == null ? DEF_USE_SYNCHRONIZATION : Boolean.parseBoolean(useSyncProp);
         builder.transaction().useSynchronization(useSync);
      }
   }

   private TypeOverrides overrideStatisticsIfPresent(TypeOverrides override, Map properties) {
      String globalStats = extractProperty(INFINISPAN_GLOBAL_STATISTICS_PROP, properties);
      if (globalStats != null) {
         override.setExposeStatistics(Boolean.parseBoolean(globalStats));
      }
      return override;
   }
}
File
InfinispanRegionFactory.java
Developer's decision
Combination
Kind of conflict
Annotation
Comment
Method declaration
Chunk
Conflicting content
 */
package org.hibernate.cache.infinispan;

<<<<<<< HEAD
import java.util.Map;

import org.hibernate.cache.CacheException;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.config.spi.StandardConverters;
import org.hibernate.engine.jndi.spi.JndiService;
import org.infinispan.manager.EmbeddedCacheManager;
import java.util.Properties;

=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
Solution content
 */
package org.hibernate.cache.infinispan;

import java.util.Map;

import org.hibernate.cache.CacheException;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.config.spi.StandardConverters;
import org.hibernate.engine.jndi.spi.JndiService;
import org.infinispan.manager.EmbeddedCacheManager;


/**
 * A {@link org.hibernate.cache.spi.RegionFactory} for Infinispan-backed cache
 * regions that finds its cache manager in JNDI rather than creating one itself.
 *
 * @author Galder Zamarreño
 * @since 3.5
 */
public class JndiInfinispanRegionFactory extends InfinispanRegionFactory {
   /**
    * Specifies the JNDI name under which the {@link EmbeddedCacheManager} to use is bound.
    * There is no default value -- the user must specify the property.
    */
   public static final String CACHE_MANAGER_RESOURCE_PROP = "hibernate.cache.infinispan.cachemanager";
	@Override
	protected EmbeddedCacheManager createCacheManager(Map properties) throws CacheException {
		String name = getServiceRegistry().getService( ConfigurationService.class ).getSetting(
				CACHE_MANAGER_RESOURCE_PROP,
				StandardConverters.STRING
		);
		if ( name == null ) {
			throw new CacheException( "Configuration property " + CACHE_MANAGER_RESOURCE_PROP + " not set" );
		}
		JndiService jndiService = getServiceRegistry().getService( JndiService.class );
		return (EmbeddedCacheManager) jndiService.locate( name );
	}

	@Override
	public void stop() {
		// Do not attempt to stop a cache manager because it wasn't created by this region factory.
	}
}
File
JndiInfinispanRegionFactory.java
Developer's decision
Manual
Kind of conflict
Import
Chunk
Conflicting content
 * @since 3.5
 */
public class JndiInfinispanRegionFactory extends InfinispanRegionFactory {
<<<<<<< HEAD
   /**
    * Specifies the JNDI name under which the {@link EmbeddedCacheManager} to use is bound.
    * There is no default value -- the user must specify the property.
    */
   public static final String CACHE_MANAGER_RESOURCE_PROP = "hibernate.cache.infinispan.cachemanager";
	@Override
	protected EmbeddedCacheManager createCacheManager(Map properties) throws CacheException {
		String name = getServiceRegistry().getService( ConfigurationService.class ).getSetting(
				CACHE_MANAGER_RESOURCE_PROP,
				StandardConverters.STRING
		);
		if ( name == null ) {
			throw new CacheException( "Configuration property " + CACHE_MANAGER_RESOURCE_PROP + " not set" );
		}
		JndiService jndiService = getServiceRegistry().getService( JndiService.class );
		return (EmbeddedCacheManager) jndiService.locate( name );
=======

	private static final Log log = LogFactory.getLog( JndiInfinispanRegionFactory.class );

	/**
	 * Specifies the JNDI name under which the {@link EmbeddedCacheManager} to use is bound.
	 * There is no default value -- the user must specify the property.
	 */
	public static final String CACHE_MANAGER_RESOURCE_PROP = "hibernate.cache.infinispan.cachemanager";

	/**
	 * Constructs a JndiInfinispanRegionFactory
	 */
	@SuppressWarnings("UnusedDeclaration")
	public JndiInfinispanRegionFactory() {
		super();
	}

	/**
	 * Constructs a JndiInfinispanRegionFactory
	 *
	 * @param props Any properties to apply (not used).
	 */
	@SuppressWarnings("UnusedDeclaration")
	public JndiInfinispanRegionFactory(Properties props) {
		super( props );
	}

	@Override
	protected EmbeddedCacheManager createCacheManager(Properties properties) throws CacheException {
		final String name = ConfigurationHelper.getString( CACHE_MANAGER_RESOURCE_PROP, properties, null );
		if ( name == null ) {
			throw new CacheException( "Configuration property " + CACHE_MANAGER_RESOURCE_PROP + " not set" );
		}
		return locateCacheManager( name, JndiHelper.extractJndiProperties( properties ) );
	}

	private EmbeddedCacheManager locateCacheManager(String jndiNamespace, Properties jndiProperties) {
		Context ctx = null;
		try {
			ctx = new InitialContext( jndiProperties );
			return (EmbeddedCacheManager) ctx.lookup( jndiNamespace );
		}
		catch (NamingException ne) {
			final String msg = "Unable to retrieve CacheManager from JNDI [" + jndiNamespace + "]";
			log.info( msg, ne );
			throw new CacheException( msg );
		}
		finally {
			if ( ctx != null ) {
				try {
					ctx.close();
				}
				catch (NamingException ne) {
					log.info( "Unable to release initial context", ne );
				}
			}
		}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	}

	@Override
Solution content
 * @since 3.5
 */
public class JndiInfinispanRegionFactory extends InfinispanRegionFactory {
   /**
    * Specifies the JNDI name under which the {@link EmbeddedCacheManager} to use is bound.
    * There is no default value -- the user must specify the property.
    */
   public static final String CACHE_MANAGER_RESOURCE_PROP = "hibernate.cache.infinispan.cachemanager";
	@Override
	protected EmbeddedCacheManager createCacheManager(Map properties) throws CacheException {
		String name = getServiceRegistry().getService( ConfigurationService.class ).getSetting(
				CACHE_MANAGER_RESOURCE_PROP,
				StandardConverters.STRING
		);
		if ( name == null ) {
			throw new CacheException( "Configuration property " + CACHE_MANAGER_RESOURCE_PROP + " not set" );
		}
		JndiService jndiService = getServiceRegistry().getService( JndiService.class );
		return (EmbeddedCacheManager) jndiService.locate( name );
	}

	@Override
File
JndiInfinispanRegionFactory.java
Developer's decision
Version 1
Kind of conflict
Annotation
Attribute
Cast expression
Comment
If statement
Method declaration
Method invocation
Method signature
Return statement
Try statement
Variable
Chunk
Conflicting content
		this.jtaPlatform = settings != null ? settings.getJtaPlatform() : null;
	}

<<<<<<< HEAD
	public HibernateTransactionManagerLookup(ServiceRegistry serviceRegistry) {
		if ( serviceRegistry != null ) {
			jtaPlatform = serviceRegistry.getService( JtaPlatform.class );
		}
		else {
			jtaPlatform = null;
		}
	}

=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
	@Override
	public TransactionManager getTransactionManager() throws Exception {
		return jtaPlatform == null ? null : jtaPlatform.retrieveTransactionManager();
Solution content
		this.jtaPlatform = settings != null ? settings.getJtaPlatform() : null;
	}

	public HibernateTransactionManagerLookup(ServiceRegistry serviceRegistry) {
		if ( serviceRegistry != null ) {
			jtaPlatform = serviceRegistry.getService( JtaPlatform.class );
		}
		else {
			jtaPlatform = null;
		}
	}

	@Override
	public TransactionManager getTransactionManager() throws Exception {
		return jtaPlatform == null ? null : jtaPlatform.retrieveTransactionManager();
File
HibernateTransactionManagerLookup.java
Developer's decision
Version 1
Kind of conflict
Method declaration
Chunk
Conflicting content
		ctx.unbind( jndiName );
	}

<<<<<<< HEAD
	private SessionFactory buildSessionFactory() {
		// Extra options located in src/test/resources/hibernate.properties
		Configuration cfg = new Configuration();
		cfg.setProperty( Environment.DIALECT, "HSQL" );
		cfg.setProperty( Environment.HBM2DDL_AUTO, "create-drop" );
		cfg.setProperty( Environment.CONNECTION_PROVIDER, JtaAwareConnectionProviderImpl.class.getName() );
		cfg.setProperty( Environment.JNDI_CLASS, "org.jnp.interfaces.NamingContextFactory" );
		cfg.setProperty( Environment.TRANSACTION_STRATEGY, "jta" );
		cfg.setProperty( Environment.CURRENT_SESSION_CONTEXT_CLASS, "jta" );
		cfg.setProperty( Environment.RELEASE_CONNECTIONS, "auto" );
		cfg.setProperty( Environment.USE_SECOND_LEVEL_CACHE, "true" );
		cfg.setProperty( Environment.USE_QUERY_CACHE, "true" );
		cfg.setProperty(
				Environment.CACHE_REGION_FACTORY,
				"org.hibernate.test.cache.infinispan.functional.SingleNodeTestCase$TestInfinispanRegionFactory"
		);

		Properties envProps = Environment.getProperties();
		envProps.put( AvailableSettings.JTA_PLATFORM, new JBossStandAloneJtaPlatform() );
		envProps.putAll( cfg.getProperties() );
		serviceRegistry = ServiceRegistryBuilder.buildServiceRegistry( envProps );
=======
   private SessionFactory buildSessionFactory() {
      // Extra options located in src/test/resources/hibernate.properties
      Configuration cfg = new Configuration();
      cfg.setProperty( Environment.DIALECT, "HSQL" );
      cfg.setProperty( Environment.HBM2DDL_AUTO, "create-drop" );
      cfg.setProperty( Environment.CONNECTION_PROVIDER, JtaAwareConnectionProviderImpl.class.getName() );
      cfg.setProperty(Environment.JNDI_CLASS, "org.jnp.interfaces.NamingContextFactory");
      cfg.setProperty(Environment.TRANSACTION_STRATEGY, "jta");
      cfg.setProperty(Environment.CURRENT_SESSION_CONTEXT_CLASS, "jta");
      cfg.setProperty(Environment.RELEASE_CONNECTIONS, "auto");
      cfg.setProperty(Environment.USE_SECOND_LEVEL_CACHE, "true");
      cfg.setProperty(Environment.USE_QUERY_CACHE, "true");

      Properties envProps = Environment.getProperties();
      envProps.put(AvailableSettings.JTA_PLATFORM, new JBossStandAloneJtaPlatform());
      envProps.setProperty(Environment.CACHE_REGION_FACTORY,
              "org.hibernate.test.cache.infinispan.functional.SingleNodeTestCase$TestInfinispanRegionFactory");
      serviceRegistry = ServiceRegistryBuilder.buildServiceRegistry(envProps);
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676

		String[] mappings = new String[] { "org/hibernate/test/cache/infinispan/functional/Item.hbm.xml" };
		for ( String mapping : mappings ) {
Solution content
		ctx.unbind( jndiName );
	}

   private SessionFactory buildSessionFactory() {
      // Extra options located in src/test/resources/hibernate.properties
      Configuration cfg = new Configuration();
      cfg.setProperty( Environment.DIALECT, "HSQL" );
      cfg.setProperty( Environment.HBM2DDL_AUTO, "create-drop" );
      cfg.setProperty( Environment.CONNECTION_PROVIDER, JtaAwareConnectionProviderImpl.class.getName() );
      cfg.setProperty(Environment.JNDI_CLASS, "org.jnp.interfaces.NamingContextFactory");
      cfg.setProperty(Environment.TRANSACTION_STRATEGY, "jta");
      cfg.setProperty(Environment.CURRENT_SESSION_CONTEXT_CLASS, "jta");
      cfg.setProperty(Environment.RELEASE_CONNECTIONS, "auto");
      cfg.setProperty(Environment.USE_SECOND_LEVEL_CACHE, "true");
      cfg.setProperty(Environment.USE_QUERY_CACHE, "true");

      Properties envProps = Environment.getProperties();
	   envProps.putAll( cfg.getProperties() );
      envProps.put(AvailableSettings.JTA_PLATFORM, new JBossStandAloneJtaPlatform());
      envProps.setProperty(Environment.CACHE_REGION_FACTORY,
              "org.hibernate.test.cache.infinispan.functional.SingleNodeTestCase$TestInfinispanRegionFactory");

      serviceRegistry = ServiceRegistryBuilder.buildServiceRegistry(envProps);

		String[] mappings = new String[] { "org/hibernate/test/cache/infinispan/functional/Item.hbm.xml" };
		for ( String mapping : mappings ) {
File
JBossStandaloneJtaExampleTest.java
Developer's decision
Combination
Kind of conflict
Attribute
Comment
Method invocation
Method signature
Variable