| Chunk |
|---|
| Conflicting content |
|---|
import org.hibernate.cache.spi.CacheDataDescription; import org.hibernate.mapping.Collection; import org.hibernate.mapping.PersistentClass; <<<<<<< HEAD import org.hibernate.metamodel.spi.binding.EntityBinding; import org.hibernate.metamodel.spi.binding.PluralAttributeBinding; import org.hibernate.persister.entity.EntityPersister; ======= import org.hibernate.metamodel.binding.EntityBinding; import org.hibernate.metamodel.binding.PluralAttributeBinding; >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 import org.hibernate.type.VersionType; /** |
| Solution content |
|---|
import org.hibernate.cache.spi.CacheDataDescription; import org.hibernate.mapping.Collection; import org.hibernate.mapping.PersistentClass; import org.hibernate.metamodel.spi.binding.EntityBinding; import org.hibernate.metamodel.spi.binding.PluralAttributeBinding; import org.hibernate.type.VersionType; /** |
| File |
|---|
| CacheDataDescriptionImpl.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
private static Comparator getVersionComparator(EntityBinding model ) {
if ( model.isVersioned() ) {
<<<<<<< HEAD
versionComparator = (
( VersionType ) model.getHierarchyDetails()
.getEntityVersion()
.getVersioningAttributeBinding()
.getHibernateTypeDescriptor()
.getResolvedTypeMapping()
).getComparator();
=======
final VersionType versionType = (VersionType) model.getHierarchyDetails()
.getVersioningAttributeBinding()
.getHibernateTypeDescriptor()
.getResolvedTypeMapping();
return versionType.getComparator();
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
return null; |
| Solution content |
|---|
private static Comparator getVersionComparator(EntityBinding model ) {
if ( model.isVersioned() ) {
return (
( VersionType ) model.getHierarchyDetails()
.getEntityVersion()
.getVersioningAttributeBinding()
.getHibernateTypeDescriptor()
.getResolvedTypeMapping()
).getComparator();
}
return null; |
| File |
|---|
| CacheDataDescriptionImpl.java |
| Developer's decision |
|---|
| Manual |
| Kind of conflict |
|---|
| Attribute |
| Cast expression |
| Method invocation |
| Return statement |
| Variable |
| Chunk |
|---|
| Conflicting content |
|---|
*
* @author Steve Ebersole
*/
<<<<<<< HEAD
public class NoCachingRegionFactory extends AbstractRegionFactory {
public static NoCachingRegionFactory INSTANCE = new NoCachingRegionFactory();
@Override
public void start() {
=======
public class NoCachingRegionFactory implements RegionFactory {
/**
* Singleton access
*/
public static final NoCachingRegionFactory INSTANCE = new NoCachingRegionFactory();
/**
* Constructs a NoCachingRegionFactory. Although access should generally use {@link #INSTANCE}
*/
public NoCachingRegionFactory() {
}
@Override
public void start(Settings settings, Properties properties) throws CacheException {
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
@Override |
| Solution content |
|---|
*
* @author Steve Ebersole
*/
public class NoCachingRegionFactory extends AbstractRegionFactory {
/**
* Singleton access
*/
public static final NoCachingRegionFactory INSTANCE = new NoCachingRegionFactory();
/**
* Constructs a NoCachingRegionFactory. Although access should generally use {@link #INSTANCE}
*/
public NoCachingRegionFactory() {
}
@Override
public void start() {
}
@Override |
| File |
|---|
| NoCachingRegionFactory.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Annotation |
| Attribute |
| Class signature |
| Comment |
| Method declaration |
| Method invocation |
| Method signature |
| Chunk |
|---|
| Conflicting content |
|---|
@Override
public void stop() {
}
<<<<<<< HEAD
=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
@Override
public boolean isMinimalPutsEnabledByDefault() {
return false; |
| Solution content |
|---|
@Override
public void stop() {
}
@Override
public boolean isMinimalPutsEnabledByDefault() {
return false; |
| File |
|---|
| NoCachingRegionFactory.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Blank |
| Chunk |
|---|
| Conflicting content |
|---|
public boolean isMinimalPutsEnabledByDefault() {
return false;
}
<<<<<<< HEAD
=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
@Override
public AccessType getDefaultAccessType() {
return null; |
| Solution content |
|---|
public boolean isMinimalPutsEnabledByDefault() {
return false;
}
@Override
public AccessType getDefaultAccessType() {
return null; |
| File |
|---|
| NoCachingRegionFactory.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Blank |
| Chunk |
|---|
| Conflicting content |
|---|
public AccessType getDefaultAccessType() {
return null;
}
<<<<<<< HEAD
=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
@Override
public long nextTimestamp() {
return System.currentTimeMillis() / 100; |
| Solution content |
|---|
public AccessType getDefaultAccessType() {
return null;
}
@Override
public long nextTimestamp() {
return System.currentTimeMillis() / 100; |
| File |
|---|
| NoCachingRegionFactory.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Blank |
| Chunk |
|---|
| Conflicting content |
|---|
public long nextTimestamp() {
return System.currentTimeMillis() / 100;
}
<<<<<<< HEAD
=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
@Override
public EntityRegion buildEntityRegion(String regionName, Properties properties, CacheDataDescription metadata)
throws CacheException { |
| Solution content |
|---|
public long nextTimestamp() {
return System.currentTimeMillis() / 100;
}
@Override
public EntityRegion buildEntityRegion(String regionName, Properties properties, CacheDataDescription metadata)
throws CacheException { |
| File |
|---|
| NoCachingRegionFactory.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Blank |
| Chunk |
|---|
| Conflicting content |
|---|
throws CacheException {
throw new NoCacheRegionFactoryAvailableException();
}
<<<<<<< HEAD
=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
@Override
public NaturalIdRegion buildNaturalIdRegion(String regionName, Properties properties, CacheDataDescription metadata)
throws CacheException { |
| Solution content |
|---|
throws CacheException {
throw new NoCacheRegionFactoryAvailableException();
}
@Override
public NaturalIdRegion buildNaturalIdRegion(String regionName, Properties properties, CacheDataDescription metadata)
throws CacheException { |
| File |
|---|
| NoCachingRegionFactory.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Blank |
| Chunk |
|---|
| Conflicting content |
|---|
throws CacheException {
throw new NoCacheRegionFactoryAvailableException();
}
<<<<<<< HEAD
=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
@Override
public CollectionRegion buildCollectionRegion(String regionName, Properties properties, CacheDataDescription metadata)
throws CacheException { |
| Solution content |
|---|
throws CacheException {
throw new NoCacheRegionFactoryAvailableException();
}
@Override
public CollectionRegion buildCollectionRegion(String regionName, Properties properties, CacheDataDescription metadata)
throws CacheException { |
| File |
|---|
| NoCachingRegionFactory.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Blank |
| Chunk |
|---|
| Conflicting content |
|---|
throws CacheException {
throw new NoCacheRegionFactoryAvailableException();
}
<<<<<<< HEAD
=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
@Override
public QueryResultsRegion buildQueryResultsRegion(String regionName, Properties properties) throws CacheException {
throw new NoCacheRegionFactoryAvailableException(); |
| Solution content |
|---|
throws CacheException {
throw new NoCacheRegionFactoryAvailableException();
}
@Override
public QueryResultsRegion buildQueryResultsRegion(String regionName, Properties properties) throws CacheException {
throw new NoCacheRegionFactoryAvailableException(); |
| File |
|---|
| NoCachingRegionFactory.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Blank |
| Chunk |
|---|
| Conflicting content |
|---|
public QueryResultsRegion buildQueryResultsRegion(String regionName, Properties properties) throws CacheException {
throw new NoCacheRegionFactoryAvailableException();
}
<<<<<<< HEAD
=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
@Override
public TimestampsRegion buildTimestampsRegion(String regionName, Properties properties) throws CacheException {
throw new NoCacheRegionFactoryAvailableException(); |
| Solution content |
|---|
public QueryResultsRegion buildQueryResultsRegion(String regionName, Properties properties) throws CacheException {
throw new NoCacheRegionFactoryAvailableException();
}
@Override
public TimestampsRegion buildTimestampsRegion(String regionName, Properties properties) throws CacheException {
throw new NoCacheRegionFactoryAvailableException(); |
| File |
|---|
| NoCachingRegionFactory.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Blank |
| Chunk |
|---|
| Conflicting content |
|---|
*/
package org.hibernate.cache.internal;
<<<<<<< HEAD
import org.jboss.logging.Logger;
import org.hibernate.boot.registry.selector.spi.StrategySelector;
import org.hibernate.cache.spi.RegionFactory;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.cfg.Configuration;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.config.spi.StandardConverters;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.metamodel.spi.MetadataImplementor;
import org.hibernate.service.spi.ServiceRegistryImplementor;
import org.hibernate.service.spi.SessionFactoryServiceInitiator;
=======
import java.util.Map;
import java.util.Properties;
import org.hibernate.HibernateException;
import org.hibernate.boot.registry.StandardServiceInitiator;
import org.hibernate.boot.registry.selector.spi.StrategySelector;
import org.hibernate.cache.spi.RegionFactory;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.service.spi.ServiceRegistryImplementor;
import org.jboss.logging.Logger;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
/**
* Initiator for the {@link RegionFactory} service. |
| Solution content |
|---|
*/
package org.hibernate.cache.internal;
import org.jboss.logging.Logger;
import org.hibernate.boot.registry.selector.spi.StrategySelector;
import org.hibernate.cache.spi.RegionFactory;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.service.spi.ServiceRegistryImplementor;
import java.util.Map;
import java.util.Properties;
import org.hibernate.HibernateException;
import org.hibernate.boot.registry.StandardServiceInitiator;
import org.hibernate.internal.util.config.ConfigurationHelper;
/**
* Initiator for the {@link RegionFactory} service. |
| File |
|---|
| RegionFactoryInitiator.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
* @author Hardy Ferentschik * @author Brett Meyer */ <<<<<<< HEAD public class RegionFactoryInitiator implements SessionFactoryServiceInitiator |
| Solution content |
|---|
* @author Hardy Ferentschik * @author Brett Meyer */ public class RegionFactoryInitiator implements StandardServiceInitiator |
| File |
|---|
| RegionFactoryInitiator.java |
| Developer's decision |
|---|
| Version 2 |
| Kind of conflict |
|---|
| Attribute |
| Class signature |
| Method invocation |
| Chunk |
|---|
| Conflicting content |
|---|
/** * Singleton access */ <<<<<<< HEAD public static final String IMPL_NAME = AvailableSettings.CACHE_REGION_FACTORY; ======= public static final RegionFactoryInitiator INSTANCE = new RegionFactoryInitiator(); >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 @Override public Class |
| Solution content |
|---|
/** * Singleton access */ public static final RegionFactoryInitiator INSTANCE = new RegionFactoryInitiator(); @Override public Class |
| File |
|---|
| RegionFactoryInitiator.java |
| Developer's decision |
|---|
| Version 2 |
| Kind of conflict |
|---|
| Attribute |
| Method invocation |
| Chunk |
|---|
| Conflicting content |
|---|
@Override
}
@Override
<<<<<<< HEAD
public RegionFactory initiateService(SessionFactoryImplementor sessionFactory, Configuration configuration, ServiceRegistryImplementor registry) {
return initiateService(sessionFactory, registry);
}
public RegionFactory initiateService(SessionFactoryImplementor sessionFactory, MetadataImplementor metadata, ServiceRegistryImplementor registry) {
return initiateService(sessionFactory, registry);
}
private RegionFactory initiateService(SessionFactoryImplementor sessionFactory, ServiceRegistryImplementor registry){
boolean isCacheEnabled = isCacheEnabled( registry );
if ( !isCacheEnabled ) {
LOG.debugf(
"Second level cache has been disabled, so using % as cache region factory",
NoCachingRegionFactory.class.getName()
);
return NoCachingRegionFactory.INSTANCE;
}
final Object setting = registry.getService( ConfigurationService.class ).getSettings().get( IMPL_NAME );
return registry.getService( StrategySelector.class ).resolveDefaultableStrategy(
RegionFactory.class,
setting,
NoCachingRegionFactory.INSTANCE
);
}
private static boolean isCacheEnabled(ServiceRegistryImplementor serviceRegistry) {
final ConfigurationService configurationService = serviceRegistry.getService( ConfigurationService.class );
final boolean useSecondLevelCache = configurationService.getSetting(
AvailableSettings.USE_SECOND_LEVEL_CACHE,
StandardConverters.BOOLEAN,
true
);
final boolean useQueryCache = configurationService.getSetting(
AvailableSettings.USE_QUERY_CACHE,
StandardConverters.BOOLEAN,
false
);
return useSecondLevelCache || useQueryCache;
=======
@SuppressWarnings({ "unchecked" })
public RegionFactory initiateService(Map configurationValues, ServiceRegistryImplementor registry) {
Properties p = new Properties();
if (configurationValues != null) {
p.putAll( configurationValues );
}
boolean useSecondLevelCache = ConfigurationHelper.getBoolean( AvailableSettings.USE_SECOND_LEVEL_CACHE,
configurationValues, true );
boolean useQueryCache = ConfigurationHelper.getBoolean( AvailableSettings.USE_QUERY_CACHE, configurationValues );
RegionFactory regionFactory = NoCachingRegionFactory.INSTANCE;
// The cache provider is needed when we either have second-level cache enabled
// or query cache enabled. Note that useSecondLevelCache is enabled by default
final String setting = ConfigurationHelper.getString( AvailableSettings.CACHE_REGION_FACTORY,
configurationValues, null );
if ( ( useSecondLevelCache || useQueryCache ) && setting != null ) {
try {
Class extends RegionFactory> regionFactoryClass = registry.getService( StrategySelector.class )
.selectStrategyImplementor( RegionFactory.class, setting );
try {
regionFactory = regionFactoryClass.getConstructor( Properties.class ).newInstance( p );
}
catch ( NoSuchMethodException e ) {
// no constructor accepting Properties found, try no arg constructor
LOG.debugf(
"%s did not provide constructor accepting java.util.Properties; attempting no-arg constructor.",
regionFactoryClass.getSimpleName() );
regionFactory = regionFactoryClass.getConstructor().newInstance();
}
}
catch ( Exception e ) {
throw new HibernateException( "could not instantiate RegionFactory [" + setting + "]", e );
}
}
LOG.debugf( "Cache region factory : %s", regionFactory.getClass().getName() );
return regionFactory;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
/** |
| Solution content |
|---|
// LOG.debugf(
}
@Override
@SuppressWarnings({ "unchecked" })
public RegionFactory initiateService(Map configurationValues, ServiceRegistryImplementor registry) {
Properties p = new Properties();
if (configurationValues != null) {
p.putAll( configurationValues );
}
boolean useSecondLevelCache = ConfigurationHelper.getBoolean( AvailableSettings.USE_SECOND_LEVEL_CACHE,
configurationValues, true );
boolean useQueryCache = ConfigurationHelper.getBoolean( AvailableSettings.USE_QUERY_CACHE, configurationValues );
RegionFactory regionFactory = NoCachingRegionFactory.INSTANCE;
// The cache provider is needed when we either have second-level cache enabled
// or query cache enabled. Note that useSecondLevelCache is enabled by default
final Object setting = configurationValues.get( AvailableSettings.CACHE_REGION_FACTORY ) ;
// ConfigurationHelper.get( AvailableSettings.CACHE_REGION_FACTORY,
// configurationValues, null );
if ( ( useSecondLevelCache || useQueryCache ) && setting != null ) {
try {
regionFactory = registry.getService( StrategySelector.class )
.resolveStrategy( RegionFactory.class, setting );
// try {
// regionFactory = regionFactoryClass.getConstructor( Properties.class ).newInstance( p );
// }
// catch ( NoSuchMethodException e ) {
// // no constructor accepting Properties found, try no arg constructor
// "%s did not provide constructor accepting java.util.Properties; attempting no-arg constructor.",
// regionFactoryClass.getSimpleName() );
// regionFactory = regionFactoryClass.getConstructor().newInstance();
// }
}
catch ( Exception e ) {
throw new HibernateException( "could not instantiate RegionFactory [" + setting + "]", e );
}
}
LOG.debugf( "Cache region factory : %s", regionFactory.getClass().getName() );
return regionFactory;
}
/** |
| File |
|---|
| RegionFactoryInitiator.java |
| Developer's decision |
|---|
| Manual |
| Kind of conflict |
|---|
| Annotation |
| Attribute |
| Comment |
| If statement |
| Method declaration |
| Method invocation |
| Method signature |
| Return statement |
| Variable |
| Chunk |
|---|
| Conflicting content |
|---|
private QueryResultsRegion cacheRegion;
private UpdateTimestampsCache updateTimestampsCache;
<<<<<<< HEAD
public StandardQueryCache(SessionFactoryImplementor sessionFactoryImplementor, UpdateTimestampsCache updateTimestampsCache, String regionName) {
if ( regionName == null ) {
regionName = StandardQueryCache.class.getName();
}
String prefix = sessionFactoryImplementor.getServiceRegistry()
.getService( ConfigurationService.class )
.getSetting(
AvailableSettings.CACHE_REGION_PREFIX, StandardConverters.STRING, null
);
=======
/**
* Constructs a StandardQueryCache instance
*
* @param settings The SessionFactory settings.
* @param props Any properties
* @param updateTimestampsCache The update-timestamps cache to use.
* @param regionName The base query cache region name
*/
public StandardQueryCache(
final Settings settings,
final Properties props,
final UpdateTimestampsCache updateTimestampsCache,
final String regionName) {
String regionNameToUse = regionName;
if ( regionNameToUse == null ) {
regionNameToUse = StandardQueryCache.class.getName();
}
final String prefix = settings.getCacheRegionPrefix();
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
if ( prefix != null ) {
regionNameToUse = prefix + '.' + regionNameToUse;
} |
| Solution content |
|---|
private QueryResultsRegion cacheRegion;
private UpdateTimestampsCache updateTimestampsCache;
/**
* Constructs a StandardQueryCache instance
*
* @param sessionFactoryImplementor The SessionFactory.
* @param updateTimestampsCache The update-timestamps cache to use.
* @param regionName The base query cache region name
*/
public StandardQueryCache(SessionFactoryImplementor sessionFactoryImplementor, UpdateTimestampsCache updateTimestampsCache, String regionName) {
String regionNameToUse = regionName;
if ( regionNameToUse == null ) {
regionNameToUse = StandardQueryCache.class.getName();
}
String prefix = sessionFactoryImplementor.getServiceRegistry()
.getService( ConfigurationService.class )
.getSetting(
AvailableSettings.CACHE_REGION_PREFIX, StandardConverters.STRING, null
);
if ( prefix != null ) {
regionNameToUse = prefix + '.' + regionNameToUse;
} |
| File |
|---|
| StandardQueryCache.java |
| Developer's decision |
|---|
| Manual |
| Kind of conflict |
|---|
| Comment |
| If statement |
| Method invocation |
| Method signature |
| Variable |
| Chunk |
|---|
| Conflicting content |
|---|
}
}
LOG.startingQueryCache( regionNameToUse );
<<<<<<< HEAD
this.cacheRegion = sessionFactoryImplementor.getServiceRegistry()
.getService( RegionFactory.class )
.buildQueryResultsRegion( regionName, sessionFactoryImplementor.getProperties() );
this.updateTimestampsCache = updateTimestampsCache;
}
=======
this.cacheRegion = settings.getRegionFactory().buildQueryResultsRegion( regionNameToUse, props );
this.updateTimestampsCache = updateTimestampsCache;
@Override
public QueryResultsRegion getRegion() {
return cacheRegion;
}
@Override
public void destroy() {
try {
cacheRegion.destroy();
}
catch ( Exception e ) {
LOG.unableToDestroyQueryCache( cacheRegion.getName(), e.getMessage() );
}
}
@Override
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
public void clear() throws CacheException {
cacheRegion.evictAll();
} |
| Solution content |
|---|
}
LOG.startingQueryCache( regionNameToUse );
this.cacheRegion = sessionFactoryImplementor.getServiceRegistry()
.getService( RegionFactory.class )
.buildQueryResultsRegion( regionNameToUse, sessionFactoryImplementor.getProperties() );
this.updateTimestampsCache = updateTimestampsCache;
}
@Override
public QueryResultsRegion getRegion() {
return cacheRegion;
}
@Override
public void destroy() {
try {
cacheRegion.destroy();
}
catch ( Exception e ) {
LOG.unableToDestroyQueryCache( cacheRegion.getName(), e.getMessage() );
}
}
@Override
public void clear() throws CacheException {
cacheRegion.evictAll();
} |
| File |
|---|
| StandardQueryCache.java |
| Developer's decision |
|---|
| Manual |
| Kind of conflict |
|---|
| Annotation |
| Attribute |
| Method declaration |
| Method invocation |
| Chunk |
|---|
| Conflicting content |
|---|
cacheRegion.evictAll();
}
<<<<<<< HEAD
=======
@Override
@SuppressWarnings({ "unchecked" })
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
public boolean put(
final QueryKey key,
final Type[] returnTypes, |
| Solution content |
|---|
cacheRegion.evictAll();
}
@Override
@SuppressWarnings({ "unchecked" })
public boolean put(
final QueryKey key,
final Type[] returnTypes, |
| File |
|---|
| StandardQueryCache.java |
| Developer's decision |
|---|
| Version 2 |
| Kind of conflict |
|---|
| Annotation |
| Chunk |
|---|
| Conflicting content |
|---|
*/
public class StandardQueryCacheFactory implements QueryCacheFactory {
@Override
<<<<<<< HEAD
public QueryCache getQueryCache(String regionName, UpdateTimestampsCache updateTimestampsCache, SessionFactoryImplementor sessionFactoryImplementor)
throws HibernateException {
return new StandardQueryCache( sessionFactoryImplementor, updateTimestampsCache, regionName );
=======
public QueryCache getQueryCache(
final String regionName,
final UpdateTimestampsCache updateTimestampsCache,
final Settings settings,
final Properties props) throws HibernateException {
return new StandardQueryCache(settings, props, updateTimestampsCache, regionName);
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
} |
| Solution content |
|---|
*/
public class StandardQueryCacheFactory implements QueryCacheFactory {
@Override
public QueryCache getQueryCache(String regionName, UpdateTimestampsCache updateTimestampsCache, SessionFactoryImplementor sessionFactoryImplementor)
throws HibernateException {
return new StandardQueryCache( sessionFactoryImplementor, updateTimestampsCache, regionName );
}
} |
| File |
|---|
| StandardQueryCacheFactory.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Method invocation |
| Method signature |
| Return statement |
| Chunk |
|---|
| Conflicting content |
|---|
public static final StructuredMapCacheEntry INSTANCE = new StructuredMapCacheEntry(); @Override <<<<<<< HEAD public Map |
| Solution content |
|---|
public static final StructuredMapCacheEntry INSTANCE = new StructuredMapCacheEntry(); @Override public Map |
| File |
|---|
| StructuredMapCacheEntry.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Annotation |
| Cast expression |
| For statement |
| Method invocation |
| Method signature |
| Variable |
| While statement |
| Chunk |
|---|
| Conflicting content |
|---|
settings.setQueryCacheFactory( createQueryCacheFactory( properties, serviceRegistry ) );
}
<<<<<<< HEAD
String prefix = properties.getProperty( Environment.CACHE_REGION_PREFIX );
=======
settings.setRegionFactory( serviceRegistry.getService( RegionFactory.class ) );
boolean useMinimalPuts = ConfigurationHelper.getBoolean(
AvailableSettings.USE_MINIMAL_PUTS, properties, settings.getRegionFactory().isMinimalPutsEnabledByDefault()
);
if ( debugEnabled ) {
LOG.debugf( "Optimize cache for minimal puts: %s", enabledDisabled(useMinimalPuts) );
}
settings.setMinimalPutsEnabled( useMinimalPuts );
String prefix = properties.getProperty( AvailableSettings.CACHE_REGION_PREFIX );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
if ( StringHelper.isEmpty(prefix) ) {
prefix=null;
} |
| Solution content |
|---|
settings.setQueryCacheFactory( createQueryCacheFactory( properties, serviceRegistry ) );
}
String prefix = properties.getProperty( Environment.CACHE_REGION_PREFIX );
if ( StringHelper.isEmpty(prefix) ) {
prefix=null;
} |
| File |
|---|
| SettingsFactory.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| If statement |
| Method invocation |
| Variable |
| Chunk |
|---|
| Conflicting content |
|---|
throw new HibernateException( "could not instantiate QueryCacheFactory: " + queryCacheFactoryClassName, e );
}
}
<<<<<<< HEAD
=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
//todo remove this once we move to new metamodel
public static RegionFactory createRegionFactory(Properties properties, boolean cachingEnabled) {
// todo : REMOVE! THIS IS TOTALLY A TEMPORARY HACK FOR org.hibernate.cfg.AnnotationBinder which will be going away |
| Solution content |
|---|
throw new HibernateException( "could not instantiate QueryCacheFactory: " + queryCacheFactoryClassName, e );
}
}
//todo remove this once we move to new metamodel
public static RegionFactory createRegionFactory(Properties properties, boolean cachingEnabled) {
// todo : REMOVE! THIS IS TOTALLY A TEMPORARY HACK FOR org.hibernate.cfg.AnnotationBinder which will be going away |
| File |
|---|
| SettingsFactory.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Blank |
| Chunk |
|---|
| Conflicting content |
|---|
// iterate over the *old* list
for ( Object old : oldElements ) {
if ( !currentSaving.contains( old ) ) {
<<<<<<< HEAD
Serializable oldId = ForeignKeys.getEntityIdentifierIfNotUnsaved( entityName, old, session );
=======
final Serializable oldId = ForeignKeys.getEntityIdentifierIfNotUnsaved( entityName, old, session );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
if ( !currentIds.contains( new TypedValue( idType, oldId ) ) ) {
res.add( old );
} |
| Solution content |
|---|
// iterate over the *old* list
for ( Object old : oldElements ) {
if ( !currentSaving.contains( old ) ) {
final Serializable oldId = ForeignKeys.getEntityIdentifierIfNotUnsaved( entityName, old, session );
if ( !currentIds.contains( new TypedValue( idType, oldId ) ) ) {
res.add( old );
} |
| File |
|---|
| AbstractPersistentCollection.java |
| Developer's decision |
|---|
| Version 2 |
| Kind of conflict |
|---|
| Method invocation |
| Variable |
| Chunk |
|---|
| Conflicting content |
|---|
*/ package org.hibernate.context.internal; <<<<<<< HEAD import java.util.Map; import java.util.concurrent.ConcurrentHashMap; ======= >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 import javax.transaction.Synchronization; import javax.transaction.Transaction; import javax.transaction.TransactionManager; |
| Solution content |
|---|
*/ package org.hibernate.context.internal; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import javax.transaction.Synchronization; import javax.transaction.Transaction; import javax.transaction.TransactionManager; |
| File |
|---|
| JTASessionContext.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
} value = string; } <<<<<<< HEAD list.add( new TypedValue(type, value) ); ======= list.add( new TypedValue( type, value ) ); >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 } } |
| Solution content |
|---|
} value = string; } list.add( new TypedValue( type, value ) ); } } |
| File |
|---|
| Example.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Method invocation |
| Chunk |
|---|
| Conflicting content |
|---|
final ArrayList |
| Solution content |
|---|
final ArrayList |
| File |
|---|
| InExpression.java |
| Developer's decision |
|---|
| Version 2 |
| Kind of conflict |
|---|
| Array access |
| Cast expression |
| For statement |
| Method invocation |
| Variable |
| Chunk |
|---|
| Conflicting content |
|---|
}
}
else {
<<<<<<< HEAD
for ( int j=0; j |
| Solution content |
|---|
}
}
else {
for ( Object value : values ) {
list.add( new TypedValue( type, value ) );
}
}
|
| File |
|---|
| InExpression.java |
| Developer's decision |
|---|
| Version 2 |
| Kind of conflict |
|---|
| For statement |
| Method invocation |
| Chunk |
|---|
| Conflicting content |
|---|
public String toString() {
return sql;
}
<<<<<<< HEAD
protected SQLCriterion(String sql, Object[] values, Type[] types) {
this.sql = sql;
typedValues = new TypedValue[values.length];
for ( int i=0; i |
| Solution content |
|---|
public String toString() {
return sql;
}
} |
| File |
|---|
| SQLCriterion.java |
| Developer's decision |
|---|
| Version 2 |
| Kind of conflict |
|---|
| Method declaration |
| Chunk |
|---|
| Conflicting content |
|---|
super( op, quantifier, dc );
this.value = value;
}
<<<<<<< HEAD
public TypedValue[] getTypedValues(Criteria criteria, CriteriaQuery criteriaQuery)
throws HibernateException {
TypedValue[] superTv = super.getTypedValues(criteria, criteriaQuery);
TypedValue[] result = new TypedValue[superTv.length+1];
System.arraycopy(superTv, 0, result, 1, superTv.length);
=======
@Override
public TypedValue[] getTypedValues(Criteria criteria, CriteriaQuery criteriaQuery) throws HibernateException {
final TypedValue[] subQueryTypedValues = super.getTypedValues( criteria, criteriaQuery );
final TypedValue[] result = new TypedValue[subQueryTypedValues.length+1];
System.arraycopy( subQueryTypedValues, 0, result, 1, subQueryTypedValues.length );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
result[0] = new TypedValue( getTypes()[0], value );
return result;
} |
| Solution content |
|---|
super( op, quantifier, dc );
this.value = value;
}
@Override
public TypedValue[] getTypedValues(Criteria criteria, CriteriaQuery criteriaQuery) throws HibernateException {
final TypedValue[] subQueryTypedValues = super.getTypedValues( criteria, criteriaQuery );
final TypedValue[] result = new TypedValue[subQueryTypedValues.length+1];
System.arraycopy( subQueryTypedValues, 0, result, 1, subQueryTypedValues.length );
result[0] = new TypedValue( getTypes()[0], value );
return result;
} |
| File |
|---|
| SimpleSubqueryExpression.java |
| Developer's decision |
|---|
| Version 2 |
| Kind of conflict |
|---|
| Annotation |
| Method invocation |
| Method signature |
| Variable |
| Chunk |
|---|
| Conflicting content |
|---|
return new TypedValue[] { new TypedValue( StandardBasicTypes.INTEGER, size ) };
}
<<<<<<< HEAD
public TypedValue[] getTypedValues(Criteria criteria, CriteriaQuery criteriaQuery)
throws HibernateException {
return new TypedValue[] {
new TypedValue( StandardBasicTypes.INTEGER, size )
};
=======
@Override
public String toString() {
return propertyName + ".size" + op + size;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
} |
| Solution content |
|---|
return new TypedValue[] { new TypedValue( StandardBasicTypes.INTEGER, size ) };
}
@Override
public String toString() {
return propertyName + ".size" + op + size;
}
} |
| File |
|---|
| SizeExpression.java |
| Developer's decision |
|---|
| Version 2 |
| Kind of conflict |
|---|
| Annotation |
| Attribute |
| Method invocation |
| Method signature |
| Return statement |
| Chunk |
|---|
| Conflicting content |
|---|
import java.util.Properties; import java.util.Set; <<<<<<< HEAD import org.jboss.logging.Logger; ======= >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 import org.hibernate.HibernateException; import org.hibernate.LockMode; import org.hibernate.LockOptions; |
| Solution content |
|---|
import java.util.Properties; import java.util.Set; import org.jboss.logging.Logger; import org.hibernate.HibernateException; import org.hibernate.LockMode; import org.hibernate.LockOptions; |
| File |
|---|
| Dialect.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
import org.hibernate.internal.util.io.StreamCopier; import org.hibernate.mapping.Column; import org.hibernate.metamodel.spi.TypeContributions; <<<<<<< HEAD import org.hibernate.metamodel.spi.relational.AuxiliaryDatabaseObject; import org.hibernate.metamodel.spi.relational.ForeignKey; import org.hibernate.metamodel.spi.relational.Index; import org.hibernate.metamodel.spi.relational.Sequence; import org.hibernate.metamodel.spi.relational.Table; ======= >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 import org.hibernate.persister.entity.Lockable; import org.hibernate.service.ServiceRegistry; import org.hibernate.sql.ANSICaseFragment; |
| Solution content |
|---|
import org.hibernate.internal.util.io.StreamCopier; import org.hibernate.mapping.Column; import org.hibernate.metamodel.spi.TypeContributions; import org.hibernate.metamodel.spi.relational.AuxiliaryDatabaseObject; import org.hibernate.metamodel.spi.relational.ForeignKey; import org.hibernate.metamodel.spi.relational.Index; import org.hibernate.metamodel.spi.relational.Sequence; import org.hibernate.metamodel.spi.relational.Table; import org.hibernate.persister.entity.Lockable; import org.hibernate.service.ServiceRegistry; import org.hibernate.sql.ANSICaseFragment; |
| File |
|---|
| Dialect.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
import org.hibernate.sql.CaseFragment; import org.hibernate.sql.ForUpdateFragment; import org.hibernate.sql.JoinFragment; <<<<<<< HEAD import org.hibernate.tool.schema.internal.StandardAuxiliaryDatabaseObjectExporter; import org.hibernate.tool.schema.internal.StandardForeignKeyExporter; import org.hibernate.tool.schema.internal.StandardIndexExporter; import org.hibernate.tool.schema.internal.StandardSequenceExporter; import org.hibernate.tool.schema.internal.StandardTableExporter; import org.hibernate.tool.schema.internal.TemporaryTableExporter; import org.hibernate.tool.schema.spi.Exporter; import org.hibernate.type.StandardBasicTypes; import org.hibernate.type.descriptor.sql.ClobTypeDescriptor; import org.hibernate.type.descriptor.sql.SqlTypeDescriptor; ======= import org.hibernate.type.StandardBasicTypes; import org.hibernate.type.descriptor.sql.ClobTypeDescriptor; import org.hibernate.type.descriptor.sql.SqlTypeDescriptor; import org.jboss.logging.Logger; >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 /** * Represents a dialect of SQL implemented by a particular RDBMS. Subclasses implement Hibernate compatibility |
| Solution content |
|---|
import org.hibernate.sql.CaseFragment; import org.hibernate.sql.ForUpdateFragment; import org.hibernate.sql.JoinFragment; import org.hibernate.tool.schema.internal.StandardAuxiliaryDatabaseObjectExporter; import org.hibernate.tool.schema.internal.StandardForeignKeyExporter; import org.hibernate.tool.schema.internal.StandardIndexExporter; import org.hibernate.tool.schema.internal.StandardSequenceExporter; import org.hibernate.tool.schema.internal.StandardTableExporter; import org.hibernate.tool.schema.internal.TemporaryTableExporter; import org.hibernate.tool.schema.spi.Exporter; import org.hibernate.type.StandardBasicTypes; import org.hibernate.type.descriptor.sql.ClobTypeDescriptor; import org.hibernate.type.descriptor.sql.SqlTypeDescriptor; import org.hibernate.type.StandardBasicTypes; import org.hibernate.type.descriptor.sql.ClobTypeDescriptor; import org.hibernate.type.descriptor.sql.SqlTypeDescriptor; import org.jboss.logging.Logger; /** * Represents a dialect of SQL implemented by a particular RDBMS. Subclasses implement Hibernate compatibility |
| File |
|---|
| Dialect.java |
| Developer's decision |
|---|
| Concatenation |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
*
* @author Jim Mlodgenski
*/
<<<<<<< HEAD
public class PostgresPlusDialect extends PostgreSQL82Dialect {
=======
@SuppressWarnings("deprecation")
public class PostgresPlusDialect extends PostgreSQLDialect {
/**
* Constructs a PostgresPlusDialect
*/
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
public PostgresPlusDialect() {
super();
|
| Solution content |
|---|
*
* @author Jim Mlodgenski
*/
@SuppressWarnings("deprecation")
public class PostgresPlusDialect extends PostgreSQL82Dialect {
/**
* Constructs a PostgresPlusDialect
*/
public PostgresPlusDialect() {
super();
|
| File |
|---|
| PostgresPlusDialect.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Annotation |
| Class signature |
| Comment |
| Chunk |
|---|
| Conflicting content |
|---|
*/ private Map |
| Solution content |
|---|
*/ private final Map |
| File |
|---|
| TypeNames.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Attribute |
| Comment |
| Method invocation |
| Chunk |
|---|
| Conflicting content |
|---|
* @throws MappingException Indicates that no registrations were made for that typeCode
*/
public String get(int typeCode, long size, int precision, int scale) throws MappingException {
<<<<<<< HEAD
Map |
| Solution content |
|---|
*/
* @throws MappingException Indicates that no registrations were made for that typeCode
public String get(int typeCode, long size, int precision, int scale) throws MappingException {
final Map |
| File |
|---|
| TypeNames.java |
| Developer's decision |
|---|
| Version 2 |
| Kind of conflict |
|---|
| Comment |
| For statement |
| If statement |
| Method invocation |
| Return statement |
| Variable |
| Chunk |
|---|
| Conflicting content |
|---|
@Override
public String getAlterTableToAddUniqueKeyCommand(UniqueKey uniqueKey) {
if ( hasNullable( uniqueKey ) ) {
<<<<<<< HEAD
// TODO: This borrows from Index's old way of doing things. This
// should be using StandardIndexExporter. However, not all callers
// have JdbcEnvironment available. We'll need to refactor a bit...
String keyName = dialect.qualifyIndexName() ? uniqueKey.getName()
: StringHelper.unqualify( uniqueKey.getName() );
StringBuilder buf = new StringBuilder( "create unique index " )
.append( keyName ).append( " on " )
.append( uniqueKey.getTable().getQualifiedName( dialect ) )
.append( " (" );
boolean first = true;
for ( Column column : uniqueKey.getColumns() ) {
if ( first ) {
first = false;
}
else {
buf.append( ", " );
}
buf.append( ( column.getColumnName().getText( dialect ) ) );
}
buf.append( ")" );
return buf.toString();
} else {
return super.applyUniquesOnAlter( uniqueKey );
=======
return Index.buildSqlCreateIndexString(
dialect,
uniqueKey.getName(),
uniqueKey.getTable(),
uniqueKey.getColumns(),
true
);
}
else {
return super.getAlterTableToAddUniqueKeyCommand( uniqueKey );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
}
|
| Solution content |
|---|
@Override
public String getAlterTableToAddUniqueKeyCommand(UniqueKey uniqueKey) {
if ( hasNullable( uniqueKey ) ) {
// TODO: This borrows from Index's old way of doing things. This
// should be using StandardIndexExporter. However, not all callers
// have JdbcEnvironment available. We'll need to refactor a bit...
String keyName = dialect.qualifyIndexName() ? uniqueKey.getName()
: StringHelper.unqualify( uniqueKey.getName() );
StringBuilder buf = new StringBuilder( "create unique index " )
.append( keyName ).append( " on " )
.append( uniqueKey.getTable().getQualifiedName( dialect ) )
.append( " (" );
boolean first = true;
for ( Column column : uniqueKey.getColumns() ) {
if ( first ) {
first = false;
}
else {
buf.append( ", " );
}
buf.append( ( column.getColumnName().getText( dialect ) ) );
}
buf.append( ")" );
return buf.toString();
}
else {
return super.getAlterTableToAddUniqueKeyCommand( uniqueKey );
}
}
|
| File |
|---|
| DB2UniqueDelegate.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Comment |
| For statement |
| Method invocation |
| Return statement |
| Variable |
| Chunk |
|---|
| Conflicting content |
|---|
@Override
public String getAlterTableToDropUniqueKeyCommand(UniqueKey uniqueKey) {
if ( hasNullable( uniqueKey ) ) {
<<<<<<< HEAD
// TODO: This borrows from Index's old way of doing things. This
// should be using StandardIndexExporter. However, not all callers
// have JdbcEnvironment available. We'll need to refactor a bit...
return "drop index " + StringHelper.qualify(
uniqueKey.getTable().getQualifiedName( dialect ),
uniqueKey.getName() );
} else {
return super.dropUniquesOnAlter( uniqueKey );
=======
return Index.buildSqlDropIndexString( dialect, uniqueKey.getTable(), uniqueKey.getName() );
}
else {
return super.getAlterTableToDropUniqueKeyCommand( uniqueKey );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
}
|
| Solution content |
|---|
}
}
@Override
public String getAlterTableToDropUniqueKeyCommand(UniqueKey uniqueKey) {
if ( hasNullable( uniqueKey ) ) {
// TODO: This borrows from Index's old way of doing things. This
// should be using StandardIndexExporter. However, not all callers
// have JdbcEnvironment available. We'll need to refactor a bit...
return "drop index " + StringHelper.qualify(
uniqueKey.getTable().getQualifiedName( dialect ),
uniqueKey.getName() );
}
else {
return super.getAlterTableToDropUniqueKeyCommand( uniqueKey );
|
| File |
|---|
| DB2UniqueDelegate.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Comment |
| Method invocation |
| Return statement |
| Chunk |
|---|
| Conflicting content |
|---|
final StringBuilder sb = new StringBuilder( " unique (" );
final Iterator columnIterator = uniqueKey.getColumns().iterator();
while ( columnIterator.hasNext() ) {
<<<<<<< HEAD
Column column = (Column) columnIterator.next();
sb.append( column.getColumnName().getText( dialect ) );
=======
final org.hibernate.mapping.Column column = (org.hibernate.mapping.Column) columnIterator.next();
sb.append( column.getQuotedName( dialect ) );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
if ( columnIterator.hasNext() ) {
sb.append( ", " );
} |
| Solution content |
|---|
final StringBuilder sb = new StringBuilder( " unique (" );
final Iterator columnIterator = uniqueKey.getColumns().iterator();
while ( columnIterator.hasNext() ) {
Column column = (Column) columnIterator.next();
sb.append( column.getColumnName().getText( dialect ) );
if ( columnIterator.hasNext() ) {
sb.append( ", " );
} |
| File |
|---|
| DefaultUniqueDelegate.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Cast expression |
| Method invocation |
| Variable |
| Chunk |
|---|
| Conflicting content |
|---|
*
* @author Steve Ebersole
*/
<<<<<<< HEAD
public class ConfigurationServiceImpl implements ConfigurationService {
=======
public class ConfigurationServiceImpl implements ConfigurationService, ServiceRegistryAwareService {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(
CoreMessageLogger.class,
ConfigurationServiceImpl.class.getName()
);
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
private final Map settings;
/** |
| Solution content |
|---|
*
* @author Steve Ebersole
*/
public class ConfigurationServiceImpl implements ConfigurationService, ServiceRegistryAwareService {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(
CoreMessageLogger.class,
ConfigurationServiceImpl.class.getName()
);
private final Map settings;
private ServiceRegistryImplementor serviceRegistry;
/** |
| File |
|---|
| ConfigurationServiceImpl.java |
| Developer's decision |
|---|
| Manual |
| Kind of conflict |
|---|
| Attribute |
| Class signature |
| Method invocation |
| Chunk |
|---|
| Conflicting content |
|---|
}
@Override
@SuppressWarnings("unchecked")
public |
| Solution content |
|---|
@Override public |
| File |
|---|
| ConfigurationServiceImpl.java |
| Developer's decision |
|---|
| Version 2 |
| Kind of conflict |
|---|
| Annotation |
| Cast expression |
| Method declaration |
| Method invocation |
| Return statement |
| Variable |
| Chunk |
|---|
| Conflicting content |
|---|
* @author Steve Ebersole
*/
public interface ConfigurationService extends Service {
<<<<<<< HEAD
public Map,?> getSettings();
public |
| Solution content |
|---|
/**
* @author Steve Ebersole
*/
public interface ConfigurationService extends Service {
/**
* Access to the complete map of config settings. The returned map is immutable
*
* @return The immutable map of config settings.
*/
public Map,?> getSettings();
public |
| File |
|---|
| ConfigurationService.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Comment |
| Method interface |
| Chunk |
|---|
| Conflicting content |
|---|
*/ public |
| Solution content |
|---|
*/ public |
| File |
|---|
| ConfigurationService.java |
| Developer's decision |
|---|
| Version 2 |
| Kind of conflict |
|---|
| Annotation |
| Comment |
| Method interface |
| Chunk |
|---|
| Conflicting content |
|---|
return ce;
}
<<<<<<< HEAD
/**
* Get the collection instance associated with the PluralAttributeKeyBinding
*/
=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
@Override
public PersistentCollection getCollection(CollectionKey collectionKey) {
return collectionsByKey.get( collectionKey ); |
| Solution content |
|---|
return ce;
}
/**
* Get the collection instance associated with the PluralAttributeKeyBinding
*/
@Override
public PersistentCollection getCollection(CollectionKey collectionKey) {
return collectionsByKey.get( collectionKey ); |
| File |
|---|
| StatefulPersistenceContext.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Comment |
| Chunk |
|---|
| Conflicting content |
|---|
}
private static boolean useMinimalPuts(SessionImplementor session, EntityEntry entityEntry) {
<<<<<<< HEAD
return ( session.getFactory().getServiceRegistry().getService( RegionFactory.class ).isMinimalPutsEnabled() &&
session.getCacheMode()!=CacheMode.REFRESH ) ||
( entityEntry.getPersister().hasLazyProperties() &&
entityEntry.isLoadedWithLazyPropertiesUnfetched() &&
entityEntry.getPersister().isLazyPropertiesCacheable() );
=======
return ( session.getFactory().getSettings().isMinimalPutsEnabled()
&& session.getCacheMode()!=CacheMode.REFRESH )
|| ( entityEntry.getPersister().hasLazyProperties()
&& entityEntry.isLoadedWithLazyPropertiesUnfetched()
&& entityEntry.getPersister().isLazyPropertiesCacheable() );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
/** |
| Solution content |
|---|
/**
}
private static boolean useMinimalPuts(SessionImplementor session, EntityEntry entityEntry) {
return ( session.getFactory().getServiceRegistry().getService( RegionFactory.class ).isMinimalPutsEnabled() &&
session.getCacheMode()!=CacheMode.REFRESH ) ||
( entityEntry.getPersister().hasLazyProperties() &&
entityEntry.isLoadedWithLazyPropertiesUnfetched() &&
entityEntry.getPersister().isLazyPropertiesCacheable() );
}
|
| File |
|---|
| TwoPhaseLoad.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Method invocation |
| Return statement |
| Chunk |
|---|
| Conflicting content |
|---|
import org.hibernate.HibernateException; import org.hibernate.MultiTenancyStrategy; import org.hibernate.boot.registry.StandardServiceInitiator; <<<<<<< HEAD import org.hibernate.boot.registry.classloading.spi.ClassLoaderService; ======= import org.hibernate.boot.registry.selector.spi.StrategySelector; >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 import org.hibernate.cfg.AvailableSettings; import org.hibernate.cfg.Environment; import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider; |
| Solution content |
|---|
import org.hibernate.HibernateException; import org.hibernate.MultiTenancyStrategy; import org.hibernate.boot.registry.StandardServiceInitiator; import org.hibernate.boot.registry.selector.spi.StrategySelector; import org.hibernate.cfg.AvailableSettings; import org.hibernate.cfg.Environment; import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider; |
| File |
|---|
| ConnectionProviderInitiator.java |
| Developer's decision |
|---|
| Version 2 |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
throw new UnknownUnwrapTypeException( unwrapType );
}
}
<<<<<<< HEAD
=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
@Override
public void configure(Map configurationValues) {
LOG.usingHibernateBuiltInConnectionPool(); |
| Solution content |
|---|
throw new UnknownUnwrapTypeException( unwrapType );
}
}
@Override
public void configure(Map configurationValues) {
LOG.usingHibernateBuiltInConnectionPool(); |
| File |
|---|
| DriverManagerConnectionProviderImpl.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Blank |
| Chunk |
|---|
| Conflicting content |
|---|
LOG.usingDriver( driverClassName, url );
// if debug level is enabled, then log the password, otherwise mask it
<<<<<<< HEAD
if ( debugEnabled )
=======
if ( LOG.isDebugEnabled() ) {
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
LOG.connectionProperties( connectionProps );
}
else { |
| Solution content |
|---|
LOG.usingDriver( driverClassName, url );
// if debug level is enabled, then log the password, otherwise mask it
if ( debugEnabled ) {
LOG.connectionProperties( connectionProps );
}
else { |
| File |
|---|
| DriverManagerConnectionProviderImpl.java |
| Developer's decision |
|---|
| Manual |
| Kind of conflict |
|---|
| If statement |
| Chunk |
|---|
| Conflicting content |
|---|
LOG.connectionProperties( ConfigurationHelper.maskOut( connectionProps, "password" ) );
}
}
<<<<<<< HEAD
=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
@Override
public void stop() {
LOG.cleaningUpConnectionPool( url ); |
| Solution content |
|---|
LOG.connectionProperties( ConfigurationHelper.maskOut( connectionProps, "password" ) );
}
}
@Override
public void stop() {
LOG.cleaningUpConnectionPool( url ); |
| File |
|---|
| DriverManagerConnectionProviderImpl.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Blank |
| Chunk |
|---|
| Conflicting content |
|---|
pool.clear();
stopped = true;
}
<<<<<<< HEAD
@Override
public Connection getConnection() throws SQLException {
if ( traceEnabled ) LOG.tracev( "Total checked-out connections: {0}", checkedOut.intValue() );
=======
@Override
public Connection getConnection() throws SQLException {
final boolean traceEnabled = LOG.isTraceEnabled();
if ( traceEnabled ) {
LOG.tracev( "Total checked-out connections: {0}", checkedOut.intValue() );
}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
// essentially, if we have available connections in the pool, use one...
synchronized (pool) { |
| Solution content |
|---|
pool.clear();
stopped = true;
}
@Override
public Connection getConnection() throws SQLException {
if ( traceEnabled ) LOG.tracev( "Total checked-out connections: {0}", checkedOut.intValue() );
// essentially, if we have available connections in the pool, use one...
synchronized (pool) { |
| File |
|---|
| DriverManagerConnectionProviderImpl.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Annotation |
| If statement |
| Method invocation |
| Method signature |
| Variable |
| Chunk |
|---|
| Conflicting content |
|---|
checkedOut.incrementAndGet();
return conn;
}
<<<<<<< HEAD
=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
@Override
public void closeConnection(Connection conn) throws SQLException {
checkedOut.decrementAndGet(); |
| Solution content |
|---|
checkedOut.incrementAndGet();
return conn;
}
@Override
public void closeConnection(Connection conn) throws SQLException {
checkedOut.decrementAndGet(); |
| File |
|---|
| DriverManagerConnectionProviderImpl.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Blank |
| Chunk |
|---|
| Conflicting content |
|---|
}
super.finalize();
}
<<<<<<< HEAD
=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
@Override
public boolean supportsAggressiveRelease() {
return false; |
| Solution content |
|---|
}
super.finalize();
}
@Override
public boolean supportsAggressiveRelease() {
return false; |
| File |
|---|
| DriverManagerConnectionProviderImpl.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Blank |
| Chunk |
|---|
| Conflicting content |
|---|
* @author Steve Ebersole
*/
public class JdbcServicesImpl implements JdbcServices, ServiceRegistryAwareService, Configurable {
<<<<<<< HEAD
=======
private static final CoreMessageLogger LOG = Logger.getMessageLogger(
CoreMessageLogger.class,
JdbcServicesImpl.class.getName()
);
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
private ServiceRegistryImplementor serviceRegistry;
private JdbcEnvironment jdbcEnvironment;
|
| Solution content |
|---|
* @author Steve Ebersole
*/
public class JdbcServicesImpl implements JdbcServices, ServiceRegistryAwareService, Configurable {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(
CoreMessageLogger.class,
JdbcServicesImpl.class.getName()
);
private ServiceRegistryImplementor serviceRegistry;
private JdbcEnvironment jdbcEnvironment;
|
| File |
|---|
| JdbcServicesImpl.java |
| Developer's decision |
|---|
| Version 2 |
| Kind of conflict |
|---|
| Attribute |
| Method invocation |
| Chunk |
|---|
| Conflicting content |
|---|
try {
public void configure(Map configValues) {
this.jdbcEnvironment = serviceRegistry.getService( JdbcEnvironment.class );
<<<<<<< HEAD
this.connectionProvider = serviceRegistry.getService( ConnectionProvider.class );
=======
boolean metaSupportsRefCursors = false;
boolean metaSupportsNamedParams = false;
boolean metaSupportsScrollable = false;
boolean metaSupportsGetGeneratedKeys = false;
boolean metaSupportsBatchUpdates = false;
boolean metaReportsDDLCausesTxnCommit = false;
boolean metaReportsDDLInTxnSupported = true;
String extraKeywordsString = "";
int sqlStateType = -1;
boolean lobLocatorUpdateCopy = false;
String catalogName = null;
String schemaName = null;
final LinkedHashSet |
| Solution content |
|---|
public void configure(Map configValues) {
this.jdbcEnvironment = serviceRegistry.getService( JdbcEnvironment.class );
this.connectionProvider = serviceRegistry.getService( ConnectionProvider.class );
final boolean showSQL = ConfigurationHelper.getBoolean( Environment.SHOW_SQL, configValues, false );
final boolean formatSQL = ConfigurationHelper.getBoolean( Environment.FORMAT_SQL, configValues, false ); |
| File |
|---|
| JdbcServicesImpl.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Attribute |
| Comment |
| If statement |
| Method invocation |
| Variable |
| Chunk |
|---|
| Conflicting content |
|---|
final boolean showSQL = ConfigurationHelper.getBoolean( Environment.SHOW_SQL, configValues, false );
final boolean formatSQL = ConfigurationHelper.getBoolean( Environment.FORMAT_SQL, configValues, false );
this.sqlStatementLogger = new SqlStatementLogger( showSQL, formatSQL );
<<<<<<< HEAD
=======
this.extractedMetaDataSupport = new ExtractedDatabaseMetaDataImpl(
metaSupportsRefCursors,
metaSupportsNamedParams,
metaSupportsScrollable,
metaSupportsGetGeneratedKeys,
metaSupportsBatchUpdates,
metaReportsDDLInTxnSupported,
metaReportsDDLCausesTxnCommit,
parseKeywords( extraKeywordsString ),
parseSQLStateType( sqlStateType ),
lobLocatorUpdateCopy,
schemaName,
catalogName,
typeInfoSet
);
SQLExceptionConverter sqlExceptionConverter = dialect.buildSQLExceptionConverter();
if ( sqlExceptionConverter == null ) {
final StandardSQLExceptionConverter converter = new StandardSQLExceptionConverter();
sqlExceptionConverter = converter;
converter.addDelegate( dialect.buildSQLExceptionConversionDelegate() );
converter.addDelegate( new SQLExceptionTypeDelegate( dialect ) );
// todo : vary this based on extractedMetaDataSupport.getSqlStateType()
converter.addDelegate( new SQLStateConversionDelegate( dialect ) );
}
this.sqlExceptionHelper = new SqlExceptionHelper( sqlExceptionConverter );
}
private JdbcConnectionAccess buildJdbcConnectionAccess(Map configValues) {
final MultiTenancyStrategy multiTenancyStrategy = MultiTenancyStrategy.determineMultiTenancyStrategy( configValues );
if ( MultiTenancyStrategy.NONE == multiTenancyStrategy ) {
connectionProvider = serviceRegistry.getService( ConnectionProvider.class );
return new ConnectionProviderJdbcConnectionAccess( connectionProvider );
}
else {
connectionProvider = null;
final MultiTenantConnectionProvider multiTenantConnectionProvider = serviceRegistry.getService( MultiTenantConnectionProvider.class );
return new MultiTenantConnectionProviderJdbcConnectionAccess( multiTenantConnectionProvider );
}
}
private static class ConnectionProviderJdbcConnectionAccess implements JdbcConnectionAccess {
private final ConnectionProvider connectionProvider;
public ConnectionProviderJdbcConnectionAccess(ConnectionProvider connectionProvider) {
this.connectionProvider = connectionProvider;
}
@Override
public Connection obtainConnection() throws SQLException {
return connectionProvider.getConnection();
}
@Override
public void releaseConnection(Connection connection) throws SQLException {
connectionProvider.closeConnection( connection );
}
@Override
public boolean supportsAggressiveRelease() {
return connectionProvider.supportsAggressiveRelease();
}
}
private static class MultiTenantConnectionProviderJdbcConnectionAccess implements JdbcConnectionAccess {
private final MultiTenantConnectionProvider connectionProvider;
public MultiTenantConnectionProviderJdbcConnectionAccess(MultiTenantConnectionProvider connectionProvider) {
this.connectionProvider = connectionProvider;
@Override
}
@Override
public Connection obtainConnection() throws SQLException {
return connectionProvider.getAnyConnection();
}
@Override
public void releaseConnection(Connection connection) throws SQLException {
connectionProvider.releaseAnyConnection( connection );
}
@Override
public boolean supportsAggressiveRelease() {
return connectionProvider.supportsAggressiveRelease();
}
}
/**
* A constant naming the setting used to identify the {@link SchemaNameResolver} to use
*
* TODO : add to Environment
*/
public static final String SCHEMA_NAME_RESOLVER = "hibernate.schema_name_resolver";
private SchemaNameResolver determineExplicitSchemaNameResolver(Map configValues) {
final Object setting = configValues.get( SCHEMA_NAME_RESOLVER );
if ( SchemaNameResolver.class.isInstance( setting ) ) {
return (SchemaNameResolver) setting;
}
final String resolverClassName = (String) setting;
if ( resolverClassName != null ) {
try {
final Class resolverClass = ReflectHelper.classForName( resolverClassName, getClass() );
return (SchemaNameResolver) ReflectHelper.getDefaultConstructor( resolverClass ).newInstance();
}
catch ( ClassNotFoundException e ) {
LOG.unableToLocateConfiguredSchemaNameResolver( resolverClassName, e.toString() );
}
catch ( InvocationTargetException e ) {
LOG.unableToInstantiateConfiguredSchemaNameResolver( resolverClassName, e.getTargetException().toString() );
}
catch ( Exception e ) {
LOG.unableToInstantiateConfiguredSchemaNameResolver( resolverClassName, e.toString() );
}
}
return null;
}
private Set |
| Solution content |
|---|
final boolean showSQL = ConfigurationHelper.getBoolean( Environment.SHOW_SQL, configValues, false ); final boolean formatSQL = ConfigurationHelper.getBoolean( Environment.FORMAT_SQL, configValues, false ); this.sqlStatementLogger = new SqlStatementLogger( showSQL, formatSQL ); } @Override |
| File |
|---|
| JdbcServicesImpl.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Annotation |
| Attribute |
| Class declaration |
| Class signature |
| Comment |
| If statement |
| Method declaration |
| Method invocation |
| Variable |
| Chunk |
|---|
| Conflicting content |
|---|
@Override
public String toString() {
<<<<<<< HEAD
return "PluralAttributeKeyBinding" +
MessageHelper.collectionInfoString( factory.getCollectionPersister(role), key, factory );
=======
return "CollectionKey"
+ MessageHelper.collectionInfoString( factory.getCollectionPersister( role ), key, factory );
}
@Override
public boolean equals(Object other) {
if ( this == other ) {
return true;
}
if ( other == null || getClass() != other.getClass() ) {
return false;
}
final CollectionKey that = (CollectionKey) other;
return that.role.equals( role )
&& keyType.isEqual( that.key, key, factory );
}
@Override
public int hashCode() {
return hashCode;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
|
| Solution content |
|---|
@Override
public String toString() {
return "CollectionKey"
+ MessageHelper.collectionInfoString( factory.getCollectionPersister( role ), key, factory );
}
@Override
public boolean equals(Object other) {
if ( this == other ) {
return true;
}
if ( other == null || getClass() != other.getClass() ) {
return false;
}
final CollectionKey that = (CollectionKey) other;
return that.role.equals( role )
&& keyType.isEqual( that.key, key, factory );
}
@Override
public int hashCode() {
return hashCode;
}
|
| File |
|---|
| CollectionKey.java |
| Developer's decision |
|---|
| Version 2 |
| Kind of conflict |
|---|
| Annotation |
| Attribute |
| Method declaration |
| Method invocation |
| Method signature |
| Return statement |
| Chunk |
|---|
| Conflicting content |
|---|
* * @param ois The stream from which to read the entry. * @param session The session being deserialized. <<<<<<< HEAD * @return The deserialized PluralAttributeKeyBinding ======= * * @return The deserialized CollectionKey * >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 * @throws IOException * @throws ClassNotFoundException */ |
| Solution content |
|---|
* * @param ois The stream from which to read the entry. * @param session The session being deserialized. * * @return The deserialized CollectionKey * * @throws IOException * @throws ClassNotFoundException */ |
| File |
|---|
| CollectionKey.java |
| Developer's decision |
|---|
| Version 2 |
| Kind of conflict |
|---|
| Comment |
| Chunk |
|---|
| Conflicting content |
|---|
private boolean shallowQuery; private Map tokenReplacements; <<<<<<< HEAD private Map |
| Solution content |
|---|
private boolean shallowQuery; private Map tokenReplacements; private Map |
| File |
|---|
| QueryTranslatorImpl.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Attribute |
| Comment |
| Chunk |
|---|
| Conflicting content |
|---|
*/ public QueryTranslatorImpl( String queryIdentifier, <<<<<<< HEAD String query, Map |
| Solution content |
|---|
*/ public QueryTranslatorImpl( String queryIdentifier, String query, Map |
| File |
|---|
| QueryTranslatorImpl.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Variable |
| Chunk |
|---|
| Conflicting content |
|---|
else {
throw new QueryException( "Unsupported discriminator type " + type );
}
<<<<<<< HEAD
return new TypedValue(
type,
value
);
}
}
// Otherwise, this is an ordinary value.
return new TypedValue(
getTypeUsingProjection( subcriteria, propertyName ),
value
);
=======
return new TypedValue( type, value );
}
}
// Otherwise, this is an ordinary value.
return new TypedValue( getTypeUsingProjection( subcriteria, propertyName ), value );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
private PropertyMapping getPropertyMapping(String entityName) |
| Solution content |
|---|
else {
throw new QueryException( "Unsupported discriminator type " + type );
}
return new TypedValue( type, value );
}
}
// Otherwise, this is an ordinary value.
return new TypedValue( getTypeUsingProjection( subcriteria, propertyName ), value );
}
private PropertyMapping getPropertyMapping(String entityName) |
| File |
|---|
| CriteriaQueryTranslator.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Comment |
| Method invocation |
| Return statement |
| Chunk |
|---|
| Conflicting content |
|---|
*/ package org.hibernate.type; <<<<<<< HEAD import org.hibernate.engine.internal.Cascade; ======= import java.io.Serializable; import org.hibernate.engine.internal.CascadePoint; >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 /** * Represents directionality of the foreign key constraint |
| Solution content |
|---|
*/ package org.hibernate.type; import org.hibernate.engine.internal.Cascade; import org.hibernate.engine.internal.CascadePoint; /** * Represents directionality of the foreign key constraint |
| File |
|---|
| ForeignKeyDirection.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
*
* @author Gavin King
*/
<<<<<<< HEAD
public enum ForeignKeyDirection {
/**
* A foreign key from child to parent
*/
TO_PARENT {
@Override
public boolean cascadeNow(int cascadePoint) {
return cascadePoint != Cascade.BEFORE_INSERT_AFTER_DELETE;
}
},
/**
* A foreign key from parent to child
*/
FROM_PARENT {
@Override
public boolean cascadeNow(int cascadePoint) {
return cascadePoint != Cascade.AFTER_INSERT_BEFORE_DELETE;
}
};
/**
* Should we cascade at this cascade point?
*
* @see org.hibernate.engine.internal.Cascade
*/
public abstract boolean cascadeNow(int cascadePoint);
}
=======
public abstract class ForeignKeyDirection implements Serializable {
protected ForeignKeyDirection() {}
/**
* Should we cascade at this cascade point?
*
* @param cascadePoint The point at which the cascade is being initiated.
*
* @return {@code true} if cascading should be performed now.
*
* @see org.hibernate.engine.internal.Cascade
*/
public abstract boolean cascadeNow(CascadePoint cascadePoint);
/**
* A foreign key from child to parent
*/
public static final ForeignKeyDirection FOREIGN_KEY_TO_PARENT = new ForeignKeyDirection() {
@Override
public boolean cascadeNow(CascadePoint cascadePoint) {
return cascadePoint != CascadePoint.BEFORE_INSERT_AFTER_DELETE;
}
@Override
public String toString() {
return "toParent";
}
Object readResolve() {
return FOREIGN_KEY_TO_PARENT;
}
};
/**
* A foreign key from parent to child
*/
public static final ForeignKeyDirection FOREIGN_KEY_FROM_PARENT = new ForeignKeyDirection() {
@Override
public boolean cascadeNow(CascadePoint cascadePoint) {
return cascadePoint != CascadePoint.AFTER_INSERT_BEFORE_DELETE;
}
@Override
public String toString() {
return "fromParent";
}
Object readResolve() {
return FOREIGN_KEY_FROM_PARENT;
}
};
}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 |
| Solution content |
|---|
*
* @author Gavin King
*/
public enum ForeignKeyDirection {
/**
* A foreign key from child to parent
*/
TO_PARENT {
@Override
public boolean cascadeNow(CascadePoint cascadePoint) {
return cascadePoint != CascadePoint.BEFORE_INSERT_AFTER_DELETE;
}
},
/**
* A foreign key from parent to child
*/
FROM_PARENT {
@Override
public boolean cascadeNow(CascadePoint cascadePoint) {
return cascadePoint != CascadePoint.AFTER_INSERT_BEFORE_DELETE;
}
};
/**
* Should we cascade at this cascade point?
*
* @param cascadePoint The point at which the cascade is being initiated.
*
* @return {@code true} if cascading should be performed now.
*
* @see org.hibernate.engine.internal.Cascade
*/
public abstract boolean cascadeNow(CascadePoint cascadePoint);
}
|
| File |
|---|
| ForeignKeyDirection.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Class declaration |
| Comment |
| Enum value |
| Method interface |
| Chunk |
|---|
| Conflicting content |
|---|
@Test
public void testSameMappingValues() {
<<<<<<< HEAD
EntityBinding forest = SchemaUtil.getEntityBinding( Forest.class, metadata() );
EntityBinding forest2 = SchemaUtil.getEntityBinding( Forest2.class, metadata() );
assertEquals( forest.isDynamicInsert(), forest2.isDynamicInsert() );
assertEquals( forest.isDynamicUpdate(), forest2.isDynamicUpdate() );
assertEquals( forest.isSelectBeforeUpdate(), forest2.isSelectBeforeUpdate() );
// TODO: This needs to use the new metamodel, but the information
// is not available in EntityBinding.
// assertEquals( forest.getOptimisticLockMode(), forest2.getOptimisticLockMode() );
assertEquals( forest.isPolymorphic(), forest2.isPolymorphic() );
=======
RootClass forest = (RootClass) configuration().getClassMapping( Forest.class.getName() );
RootClass forest2 = (RootClass) configuration().getClassMapping( Forest2.class.getName() );
assertEquals( forest.useDynamicInsert(), forest2.useDynamicInsert() );
assertEquals( forest.useDynamicUpdate(), forest2.useDynamicUpdate() );
assertEquals( forest.hasSelectBeforeUpdate(), forest2.hasSelectBeforeUpdate() );
assertEquals( forest.getOptimisticLockStyle(), forest2.getOptimisticLockStyle() );
assertEquals( forest.isExplicitPolymorphism(), forest2.isExplicitPolymorphism() );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
} |
| Solution content |
|---|
@Test
public void testSameMappingValues() {
EntityBinding forest = SchemaUtil.getEntityBinding( Forest.class, metadata() );
EntityBinding forest2 = SchemaUtil.getEntityBinding( Forest2.class, metadata() );
assertEquals( forest.isDynamicInsert(), forest2.isDynamicInsert() );
assertEquals( forest.isDynamicUpdate(), forest2.isDynamicUpdate() );
assertEquals( forest.isSelectBeforeUpdate(), forest2.isSelectBeforeUpdate() );
// TODO: This needs to use the new metamodel, but the information
// is not available in EntityBinding.
// assertEquals( forest.getOptimisticLockMode(), forest2.getOptimisticLockMode() );
assertEquals( forest.isPolymorphic(), forest2.isPolymorphic() );
}
} |
| File |
|---|
| NewCustomEntityMappingAnnotationsTest.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Cast expression |
| Comment |
| Method invocation |
| Variable |
| Chunk |
|---|
| Conflicting content |
|---|
import org.junit.Assert; import org.junit.Test; <<<<<<< HEAD import org.hibernate.cfg.AvailableSettings; ======= import org.hibernate.SessionFactory; import org.hibernate.cfg.AnnotationConfiguration; >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 import org.hibernate.cfg.Environment; import org.hibernate.dialect.SQLServerDialect; import org.hibernate.metamodel.MetadataSources; |
| Solution content |
|---|
import org.junit.Assert; import org.junit.Test; import org.hibernate.cfg.AvailableSettings; import org.hibernate.cfg.Environment; import org.hibernate.dialect.SQLServerDialect; import org.hibernate.metamodel.MetadataSources; |
| File |
|---|
| NullablePrimaryKeyTest.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
private static final Logger log = Logger.getLogger( NullablePrimaryKeyTest.class );
@Test
public void testGeneratedSql() {
<<<<<<< HEAD
Properties properties = new Properties();
properties.putAll( Environment.getProperties() );
properties.setProperty( AvailableSettings.DIALECT, SQLServerDialect.class.getName() );
ServiceRegistry serviceRegistry = ServiceRegistryBuilder.buildServiceRegistry( properties );
try {
MetadataSources metadataSource = new MetadataSources(serviceRegistry);
metadataSource.addAnnotatedClass( Address.class ).addAnnotatedClass( Person.class );
MetadataImplementor metadata = (MetadataImplementor) metadataSource.buildMetadata();
metadata.getDatabase().getJdbcEnvironment();
SchemaManagementTool schemaManagementTool = serviceRegistry.getService( SchemaManagementTool.class );
SchemaCreator schemaCreator = schemaManagementTool.getSchemaCreator( new HashMap() );
final List |
| Solution content |
|---|
private static final Logger log = Logger.getLogger( NullablePrimaryKeyTest.class );
@Test
public void testGeneratedSql() {
Properties properties = new Properties();
properties.putAll( Environment.getProperties() );
properties.setProperty( AvailableSettings.DIALECT, SQLServerDialect.class.getName() );
ServiceRegistry serviceRegistry = ServiceRegistryBuilder.buildServiceRegistry( properties );
try {
MetadataSources metadataSource = new MetadataSources(serviceRegistry);
metadataSource.addAnnotatedClass( Address.class ).addAnnotatedClass( Person.class );
MetadataImplementor metadata = (MetadataImplementor) metadataSource.buildMetadata();
metadata.getDatabase().getJdbcEnvironment();
SchemaManagementTool schemaManagementTool = serviceRegistry.getService( SchemaManagementTool.class );
SchemaCreator schemaCreator = schemaManagementTool.getSchemaCreator( new HashMap() );
final List |
| File |
|---|
| NullablePrimaryKeyTest.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Cast expression |
| For statement |
| Method invocation |
| Try statement |
| Variable |
| Chunk |
|---|
| Conflicting content |
|---|
import org.hibernate.mapping.PersistentClass; import org.hibernate.mapping.Property; import org.hibernate.mapping.SimpleValue; <<<<<<< HEAD:hibernate-core/src/test/java/org/hibernate/type/AttributeConverterTest.java ======= import org.hibernate.type.AbstractStandardBasicType; import org.hibernate.type.BasicType; import org.hibernate.type.Type; import org.hibernate.type.descriptor.java.StringTypeDescriptor; import org.junit.Test; >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676:hibernate-core/src/test/java/org/hibernate/test/type/AttributeConverterTest.java import org.hibernate.testing.junit4.BaseUnitTestCase; import org.hibernate.type.descriptor.java.StringTypeDescriptor; |
| Solution content |
|---|
import org.hibernate.mapping.PersistentClass; import org.hibernate.mapping.Property; import org.hibernate.mapping.SimpleValue; import org.hibernate.type.AbstractStandardBasicType; import org.hibernate.type.BasicType; import org.hibernate.type.Type; import org.hibernate.type.descriptor.java.StringTypeDescriptor; import org.hibernate.testing.junit4.BaseUnitTestCase; |
| File |
|---|
| AttributeConverterTest.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
import org.hibernate.cache.spi.TimestampsRegion; import org.hibernate.cache.spi.access.AccessType; import org.hibernate.cfg.Settings; <<<<<<< HEAD import org.hibernate.boot.registry.classloading.spi.ClassLoaderService; ======= import org.hibernate.service.spi.InjectService; >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 /** * Abstract implementation of an Ehcache specific RegionFactory. |
| Solution content |
|---|
import org.hibernate.cache.spi.TimestampsRegion; import org.hibernate.cache.spi.access.AccessType; import org.hibernate.service.spi.InjectService; /** * Abstract implementation of an Ehcache specific RegionFactory. |
| File |
|---|
| AbstractEhcacheRegionFactory.java |
| Developer's decision |
|---|
| Version 2 |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
*/
abstract class AbstractEhcacheRegionFactory extends AbstractRegionFactory {
<<<<<<< HEAD
/**
* The Hibernate system property specifying the location of the ehcache configuration file name.
*
* If not set, ehcache.xml will be looked for in the root of the classpath.
*
* If set to say ehcache-1.xml, ehcache-1.xml will be looked for in the root of the classpath.
*/
public static final String NET_SF_EHCACHE_CONFIGURATION_RESOURCE_NAME = "net.sf.ehcache.configurationResourceName";
private static final EhCacheMessageLogger LOG = Logger.getMessageLogger(
EhCacheMessageLogger.class,
AbstractEhcacheRegionFactory.class.getName()
);
/**
* MBean registration helper class instance for Ehcache Hibernate MBeans.
*/
protected final ProviderMBeanRegistrationHelper mbeanRegistrationHelper = new ProviderMBeanRegistrationHelper();
/**
* Ehcache CacheManager that supplied Ehcache instances for this Hibernate RegionFactory.
*/
protected volatile CacheManager manager;
/**
* Settings object for the Hibernate persistence unit.
*/
protected Settings settings;
/**
* {@link EhcacheAccessStrategyFactory} for creating various access strategies
*/
protected final EhcacheAccessStrategyFactory accessStrategyFactory =
new NonstopAccessStrategyFactory( new EhcacheAccessStrategyFactoryImpl() );
/**
* Whether to optimize for minimals puts or minimal gets.
*
* Indicates whether when operating in non-strict read/write or read-only mode
* Hibernate should optimize the access patterns for minimal puts or minimal gets.
* In Ehcache we default to minimal puts since this should have minimal to no
* affect on unclustered users, and has great benefit for clustered users.
*
* This setting can be overridden by setting the "hibernate.cache.use_minimal_puts"
* property in the Hibernate configuration.
*
* @return true, optimize for minimal puts
*/
public boolean isMinimalPutsEnabledByDefault() {
return true;
}
/**
* {@inheritDoc}
*/
public long nextTimestamp() {
return net.sf.ehcache.util.Timestamper.next();
}
/**
* {@inheritDoc}
*/
public EntityRegion buildEntityRegion(String regionName, Properties properties, CacheDataDescription metadata)
throws CacheException {
return new EhcacheEntityRegion( accessStrategyFactory, getCache( regionName ), isMinimalPutsEnabled(), metadata, properties );
}
@Override
public NaturalIdRegion buildNaturalIdRegion(String regionName, Properties properties, CacheDataDescription metadata)
throws CacheException {
return new EhcacheNaturalIdRegion( accessStrategyFactory, getCache( regionName ), isMinimalPutsEnabled(), metadata, properties );
}
/**
* {@inheritDoc}
*/
public CollectionRegion buildCollectionRegion(String regionName, Properties properties, CacheDataDescription metadata)
throws CacheException {
return new EhcacheCollectionRegion(
accessStrategyFactory,
getCache( regionName ),
isMinimalPutsEnabled(),
metadata,
properties
);
}
/**
* {@inheritDoc}
*/
public QueryResultsRegion buildQueryResultsRegion(String regionName, Properties properties) throws CacheException {
return new EhcacheQueryResultsRegion( accessStrategyFactory, getCache( regionName ), properties );
}
/**
* {@inheritDoc}
*/
public TimestampsRegion buildTimestampsRegion(String regionName, Properties properties) throws CacheException {
return new EhcacheTimestampsRegion( accessStrategyFactory, getCache( regionName ), properties );
}
private Ehcache getCache(String name) throws CacheException {
try {
Ehcache cache = manager.getEhcache( name );
if ( cache == null ) {
LOG.unableToFindEhCacheConfiguration( name );
manager.addCache( name );
return true;
cache = manager.getEhcache( name );
LOG.debug( "started EHCache region: " + name );
}
HibernateUtil.validateEhcache( cache );
return cache;
}
catch ( net.sf.ehcache.CacheException e ) {
throw new CacheException( e );
}
}
/**
* Load a resource from the classpath.
*/
protected URL loadResource(String configurationResourceName) {
URL url = getServiceRegistry().getService( ClassLoaderService.class ).locateResource( configurationResourceName );
if ( url == null ) {
ClassLoader standardClassloader = ClassLoaderUtil.getStandardClassLoader();
if ( standardClassloader != null ) {
url = standardClassloader.getResource( configurationResourceName );
}
if ( url == null ) {
url = AbstractEhcacheRegionFactory.class.getResource( configurationResourceName );
}
}
if ( LOG.isDebugEnabled() ) {
LOG.debugf(
"Creating EhCacheRegionFactory from a specified resource: %s. Resolved to URL: %s",
configurationResourceName,
url
);
}
if ( url == null ) {
LOG.unableToLoadConfiguration( configurationResourceName );
}
return url;
}
/**
* Default access-type used when the configured using JPA 2.0 config. JPA 2.0 allows |
| Solution content |
|---|
*/
abstract class AbstractEhcacheRegionFactory extends AbstractRegionFactory {
/**
* The Hibernate system property specifying the location of the ehcache configuration file name.
*
* If not set, ehcache.xml will be looked for in the root of the classpath.
*
* If set to say ehcache-1.xml, ehcache-1.xml will be looked for in the root of the classpath.
*/
public static final String NET_SF_EHCACHE_CONFIGURATION_RESOURCE_NAME = "net.sf.ehcache.configurationResourceName";
private static final EhCacheMessageLogger LOG = Logger.getMessageLogger(
EhCacheMessageLogger.class,
AbstractEhcacheRegionFactory.class.getName()
);
/**
* MBean registration helper class instance for Ehcache Hibernate MBeans.
*/
protected final ProviderMBeanRegistrationHelper mbeanRegistrationHelper = new ProviderMBeanRegistrationHelper();
/**
* Ehcache CacheManager that supplied Ehcache instances for this Hibernate RegionFactory.
*/
protected volatile CacheManager manager;
/**
* {@link EhcacheAccessStrategyFactory} for creating various access strategies
*/
protected final EhcacheAccessStrategyFactory accessStrategyFactory =
new NonstopAccessStrategyFactory( new EhcacheAccessStrategyFactoryImpl() );
/**
* Whether to optimize for minimals puts or minimal gets.
*
* Indicates whether when operating in non-strict read/write or read-only mode
* Hibernate should optimize the access patterns for minimal puts or minimal gets.
* In Ehcache we default to minimal puts since this should have minimal to no
configurationResourceName,
* affect on unclustered users, and has great benefit for clustered users.
*
* This setting can be overridden by setting the "hibernate.cache.use_minimal_puts"
* property in the Hibernate configuration.
*
* @return true, optimize for minimal puts
*/
public boolean isMinimalPutsEnabledByDefault() {
return true;
}
private Ehcache getCache(String name) throws CacheException {
try {
Ehcache cache = manager.getEhcache( name );
if ( cache == null ) {
LOG.unableToFindEhCacheConfiguration( name );
manager.addCache( name );
cache = manager.getEhcache( name );
LOG.debug( "started EHCache region: " + name );
}
HibernateEhcacheUtils.validateEhcache( cache );
return cache;
}
catch (net.sf.ehcache.CacheException e) {
throw new CacheException( e );
}
}
/**
* Load a resource from the classpath.
*/
protected URL loadResource(String configurationResourceName) {
URL url = null;
if ( classLoaderService != null ) {
url = classLoaderService.locateResource( configurationResourceName );
}
if ( url == null ) {
final ClassLoader standardClassloader = ClassLoaderUtil.getStandardClassLoader();
if ( standardClassloader != null ) {
url = standardClassloader.getResource( configurationResourceName );
}
if ( url == null ) {
url = AbstractEhcacheRegionFactory.class.getResource( configurationResourceName );
}
if ( url == null ) {
try {
url = new URL( configurationResourceName );
}
catch ( MalformedURLException e ) {
// ignore
}
}
}
if ( LOG.isDebugEnabled() ) {
LOG.debugf(
"Creating EhCacheRegionFactory from a specified resource: %s. Resolved to URL: %s",
url
);
}
if ( url == null ) {
LOG.unableToLoadConfiguration( configurationResourceName );
}
return url;
}
/**
* Default access-type used when the configured using JPA 2.0 config. JPA 2.0 allows |
| File |
|---|
| AbstractEhcacheRegionFactory.java |
| Developer's decision |
|---|
| Manual |
| Kind of conflict |
|---|
| Annotation |
| Attribute |
| Comment |
| Method declaration |
| Method invocation |
| Chunk |
|---|
| Conflicting content |
|---|
import org.jboss.logging.Logger; import org.hibernate.cache.CacheException; <<<<<<< HEAD import org.hibernate.cache.ehcache.internal.util.HibernateUtil; import org.hibernate.engine.config.spi.ConfigurationService; import org.hibernate.engine.config.spi.StandardConverters; ======= import org.hibernate.cache.ehcache.internal.util.HibernateEhcacheUtils; import org.hibernate.cfg.Settings; >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 /** * A non-singleton EhCacheRegionFactory implementation. |
| Solution content |
|---|
import org.jboss.logging.Logger; import org.hibernate.cache.CacheException; import org.hibernate.engine.config.spi.ConfigurationService; import org.hibernate.engine.config.spi.StandardConverters; import org.hibernate.cache.ehcache.internal.util.HibernateEhcacheUtils; /** * A non-singleton EhCacheRegionFactory implementation. |
| File |
|---|
| EhCacheRegionFactory.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
<<<<<<< HEAD
EhCacheRegionFactory.class.getName()
);
private static final EhCacheMessageLogger LOG = Logger.getMessageLogger(
EhCacheMessageLogger.class,
EhCacheRegionFactory.class.getName()
);
@Override
public void start() {
=======
/**
* Creates a non-singleton EhCacheRegionFactory
*/
@SuppressWarnings("UnusedDeclaration")
public EhCacheRegionFactory() {
}
/**
* Creates a non-singleton EhCacheRegionFactory
*
* @param prop Not used
*/
@SuppressWarnings("UnusedDeclaration")
public EhCacheRegionFactory(Properties prop) {
super();
}
@Override
public void start(Settings settings, Properties properties) throws CacheException {
this.settings = settings;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
if ( manager != null ) {
LOG.attemptToRestartAlreadyStartedEhCacheProvider();
return; |
| Solution content |
|---|
EhCacheRegionFactory.class.getName()
);
/**
* Creates a non-singleton EhCacheRegionFactory
*/
@SuppressWarnings("UnusedDeclaration")
public EhCacheRegionFactory() {
}
/**
* Creates a non-singleton EhCacheRegionFactory
*
* @param prop Not used
*/
@SuppressWarnings("UnusedDeclaration")
public EhCacheRegionFactory(Properties prop) {
super();
}
@Override
public void start() {
if ( manager != null ) {
LOG.attemptToRestartAlreadyStartedEhCacheProvider();
return; |
| File |
|---|
| EhCacheRegionFactory.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Annotation |
| Attribute |
| Comment |
| Method declaration |
| Method invocation |
| Method signature |
| Variable |
| Chunk |
|---|
| Conflicting content |
|---|
}
try {
<<<<<<< HEAD
ConfigurationService configurationService = getServiceRegistry().getService( ConfigurationService.class );
String configurationResourceName = configurationService.getSetting( NET_SF_EHCACHE_CONFIGURATION_RESOURCE_NAME,
StandardConverters.STRING, null
);
if ( configurationResourceName == null || configurationResourceName.length() == 0 ) {
Configuration configuration = ConfigurationFactory.parseConfiguration();
manager = new CacheManager( configuration );
}
else {
URL url;
try {
url = new URL( configurationResourceName );
}
catch ( MalformedURLException e ) {
url = loadResource( configurationResourceName );
}
Configuration configuration = HibernateUtil.loadAndCorrectConfiguration( url );
manager = new CacheManager( configuration );
}
Properties properties = new Properties( );
properties.putAll( configurationService.getSettings() );
mbeanRegistrationHelper.registerMBean( manager, properties );
}
catch ( net.sf.ehcache.CacheException e ) {
=======
String configurationResourceName = null;
if ( properties != null ) {
configurationResourceName = (String) properties.get( NET_SF_EHCACHE_CONFIGURATION_RESOURCE_NAME );
}
if ( configurationResourceName == null || configurationResourceName.length() == 0 ) {
final Configuration configuration = ConfigurationFactory.parseConfiguration();
manager = new CacheManager( configuration );
}
else {
final URL url = loadResource( configurationResourceName );
final Configuration configuration = HibernateEhcacheUtils.loadAndCorrectConfiguration( url );
manager = new CacheManager( configuration );
}
mbeanRegistrationHelper.registerMBean( manager, properties );
}
catch (net.sf.ehcache.CacheException e) {
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
if ( e.getMessage().startsWith(
"Cannot parseConfiguration CacheManager. Attempt to create a new instance of " +
"CacheManager using the diskStorePath" |
| Solution content |
|---|
}
try {
ConfigurationService configurationService = getServiceRegistry().getService( ConfigurationService.class );
String configurationResourceName = configurationService.getSetting(
NET_SF_EHCACHE_CONFIGURATION_RESOURCE_NAME,
StandardConverters.STRING, null
);
if ( configurationResourceName == null || configurationResourceName.length() == 0 ) {
Configuration configuration = ConfigurationFactory.parseConfiguration();
manager = new CacheManager( configuration );
}
else {
URL url;
try {
url = new URL( configurationResourceName );
}
catch ( MalformedURLException e ) {
url = loadResource( configurationResourceName );
}
Configuration configuration = HibernateEhcacheUtils.loadAndCorrectConfiguration( url );
manager = new CacheManager( configuration );
}
Properties properties = new Properties();
properties.putAll( configurationService.getSettings() );
mbeanRegistrationHelper.registerMBean( manager, properties );
}
catch ( net.sf.ehcache.CacheException e ) {
if ( e.getMessage().startsWith(
"Cannot parseConfiguration CacheManager. Attempt to create a new instance of " +
"CacheManager using the diskStorePath" |
| File |
|---|
| EhCacheRegionFactory.java |
| Developer's decision |
|---|
| Manual |
| Kind of conflict |
|---|
| Catch clause |
| If statement |
| Method invocation |
| Variable |
| Chunk |
|---|
| Conflicting content |
|---|
import org.jboss.logging.Logger; import org.hibernate.cache.CacheException; <<<<<<< HEAD import org.hibernate.cache.ehcache.internal.util.HibernateUtil; import org.hibernate.engine.config.spi.ConfigurationService; import org.hibernate.engine.config.spi.StandardConverters; ======= import org.hibernate.cache.ehcache.internal.util.HibernateEhcacheUtils; import org.hibernate.cfg.Settings; >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 /** * A singleton EhCacheRegionFactory implementation. |
| Solution content |
|---|
import org.jboss.logging.Logger; import org.hibernate.cache.CacheException; import org.hibernate.cache.ehcache.internal.util.HibernateEhcacheUtils; import org.hibernate.engine.config.spi.ConfigurationService; import org.hibernate.engine.config.spi.StandardConverters; /** * A singleton EhCacheRegionFactory implementation. |
| File |
|---|
| SingletonEhCacheRegionFactory.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
}
<<<<<<< HEAD
private static final AtomicInteger REFERENCE_COUNT = new AtomicInteger();
@Override
public void start() {
try {
ConfigurationService configurationService = getServiceRegistry().getService( ConfigurationService.class );
String configurationResourceName = configurationService.getSetting( NET_SF_EHCACHE_CONFIGURATION_RESOURCE_NAME,
StandardConverters.STRING, null
);
if ( configurationResourceName == null || configurationResourceName.length() == 0 ) {
manager = CacheManager.create();
REFERENCE_COUNT.incrementAndGet();
}
else {
URL url;
try {
url = new URL( configurationResourceName );
}
catch ( MalformedURLException e ) {
if ( !configurationResourceName.startsWith( "/" ) ) {
configurationResourceName = "/" + configurationResourceName;
LOG.debugf(
"prepending / to %s. It should be placed in the root of the classpath rather than in a package.",
configurationResourceName
);
}
url = loadResource( configurationResourceName );
}
Configuration configuration = HibernateUtil.loadAndCorrectConfiguration( url );
manager = CacheManager.create( configuration );
REFERENCE_COUNT.incrementAndGet();
}
Properties properties = new Properties( );
properties.putAll( configurationService.getSettings() );
mbeanRegistrationHelper.registerMBean( manager, properties );
}
catch ( net.sf.ehcache.CacheException e ) {
throw new CacheException( e );
}
}
/**
* {@inheritDoc}
*/
@Override
public void stop() {
try {
if ( manager != null ) {
if ( REFERENCE_COUNT.decrementAndGet() == 0 ) {
manager.shutdown();
}
manager = null;
}
}
catch ( net.sf.ehcache.CacheException e ) {
throw new CacheException( e );
}
}
=======
/**
* Constructs a SingletonEhCacheRegionFactory
*/
@SuppressWarnings("UnusedDeclaration")
public SingletonEhCacheRegionFactory() {
/**
* Constructs a SingletonEhCacheRegionFactory
*
* @param prop Not used
*/
@SuppressWarnings("UnusedDeclaration")
public SingletonEhCacheRegionFactory(Properties prop) {
super();
}
@Override
public void start(Settings settings, Properties properties) throws CacheException {
this.settings = settings;
try {
String configurationResourceName = null;
if ( properties != null ) {
configurationResourceName = (String) properties.get( NET_SF_EHCACHE_CONFIGURATION_RESOURCE_NAME );
}
if ( configurationResourceName == null || configurationResourceName.length() == 0 ) {
manager = CacheManager.create();
REFERENCE_COUNT.incrementAndGet();
}
else {
URL url;
try {
url = new URL( configurationResourceName );
}
catch (MalformedURLException e) {
if ( !configurationResourceName.startsWith( "/" ) ) {
configurationResourceName = "/" + configurationResourceName;
LOG.debugf(
"prepending / to %s. It should be placed in the root of the classpath rather than in a package.",
configurationResourceName
);
}
url = loadResource( configurationResourceName );
}
final Configuration configuration = HibernateEhcacheUtils.loadAndCorrectConfiguration( url );
manager = CacheManager.create( configuration );
REFERENCE_COUNT.incrementAndGet();
}
mbeanRegistrationHelper.registerMBean( manager, properties );
}
catch (net.sf.ehcache.CacheException e) {
throw new CacheException( e );
}
}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
@Override
public void stop() { |
| Solution content |
|---|
private static final AtomicInteger REFERENCE_COUNT = new AtomicInteger();
@Override
public void start() {
try {
ConfigurationService configurationService = getServiceRegistry().getService( ConfigurationService.class );
String configurationResourceName = configurationService.getSetting( NET_SF_EHCACHE_CONFIGURATION_RESOURCE_NAME,
StandardConverters.STRING, null
);
if ( configurationResourceName == null || configurationResourceName.length() == 0 ) {
manager = CacheManager.create();
REFERENCE_COUNT.incrementAndGet();
}
else {
URL url;
try {
url = new URL( configurationResourceName );
}
catch ( MalformedURLException e ) {
if ( !configurationResourceName.startsWith( "/" ) ) {
configurationResourceName = "/" + configurationResourceName;
LOG.debugf(
"prepending / to %s. It should be placed in the root of the classpath rather than in a package.",
configurationResourceName
);
}
url = loadResource( configurationResourceName );
}
Configuration configuration = HibernateEhcacheUtils.loadAndCorrectConfiguration( url );
manager = CacheManager.create( configuration );
REFERENCE_COUNT.incrementAndGet();
}
Properties properties = new Properties( );
properties.putAll( configurationService.getSettings() );
mbeanRegistrationHelper.registerMBean( manager, properties );
}
catch ( net.sf.ehcache.CacheException e ) {
throw new CacheException( e );
}
}
/**
* {@inheritDoc}
*/
@Override
public void stop() {
try {
if ( manager != null ) {
if ( REFERENCE_COUNT.decrementAndGet() == 0 ) {
manager.shutdown();
}
manager = null;
}
}
catch ( net.sf.ehcache.CacheException e ) {
throw new CacheException( e );
}
}
/**
* Constructs a SingletonEhCacheRegionFactory
*/
@SuppressWarnings("UnusedDeclaration")
public SingletonEhCacheRegionFactory() {
}
/**
* Constructs a SingletonEhCacheRegionFactory
*
* @param prop Not used
*/
@SuppressWarnings("UnusedDeclaration")
public SingletonEhCacheRegionFactory(Properties prop) {
super();
}
|
| File |
|---|
| SingletonEhCacheRegionFactory.java |
| Developer's decision |
|---|
| Manual |
| Kind of conflict |
|---|
| Annotation |
| Comment |
| Method declaration |
| Chunk |
|---|
| Conflicting content |
|---|
super( accessStrategyFactory, underlyingCache, settings, metadata, properties );
}
<<<<<<< HEAD
/**
* Constructs an EhcacheCollectionRegion around the given underlying cache.
*
* @param accessStrategyFactory
*/
public EhcacheCollectionRegion(EhcacheAccessStrategyFactory accessStrategyFactory, Ehcache underlyingCache, boolean isMinimalPutsEnabled,
CacheDataDescription metadata, Properties properties) {
super( accessStrategyFactory, underlyingCache, isMinimalPutsEnabled, metadata, properties );
}
/**
* {@inheritDoc}
*/
public CollectionRegionAccessStrategy buildAccessStrategy(AccessType accessType) throws CacheException {
return accessStrategyFactory.createCollectionRegionAccessStrategy( this, accessType );
}
}
=======
@Override
public CollectionRegionAccessStrategy buildAccessStrategy(AccessType accessType) throws CacheException {
return getAccessStrategyFactory().createCollectionRegionAccessStrategy( this, accessType );
}
}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 |
| Solution content |
|---|
public EhcacheCollectionRegion(EhcacheAccessStrategyFactory accessStrategyFactory, Ehcache underlyingCache, boolean isMinimalPutsEnabled,
CacheDataDescription metadata, Properties properties) {
super( accessStrategyFactory, underlyingCache, isMinimalPutsEnabled, metadata, properties );
}
/**
* {@inheritDoc}
*/
public CollectionRegionAccessStrategy buildAccessStrategy(AccessType accessType) throws CacheException {
return getAccessStrategyFactory().createCollectionRegionAccessStrategy( this, accessType );
}
}
|
| File |
|---|
| EhcacheCollectionRegion.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Annotation |
| Comment |
| Method declaration |
| Chunk |
|---|
| Conflicting content |
|---|
super( accessStrategyFactory, underlyingCache, settings, metadata, properties );
}
<<<<<<< HEAD
/**
* Constructs an EhcacheEntityRegion around the given underlying cache.
*
* @param accessStrategyFactory
*/
public EhcacheEntityRegion(EhcacheAccessStrategyFactory accessStrategyFactory, Ehcache underlyingCache, boolean isMinimalPutsEnabled,
CacheDataDescription metadata, Properties properties) {
super( accessStrategyFactory, underlyingCache, isMinimalPutsEnabled, metadata, properties );
}
/**
* {@inheritDoc}
*/
public EntityRegionAccessStrategy buildAccessStrategy(AccessType accessType) throws CacheException {
return accessStrategyFactory.createEntityRegionAccessStrategy( this, accessType );
}
}
=======
@Override
public EntityRegionAccessStrategy buildAccessStrategy(AccessType accessType) throws CacheException {
return getAccessStrategyFactory().createEntityRegionAccessStrategy( this, accessType );
}
}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 |
| Solution content |
|---|
/**
* Constructs an EhcacheEntityRegion around the given underlying cache.
*
* @param accessStrategyFactory The factory for building needed EntityRegionAccessStrategy instance
* @param underlyingCache The ehcache cache instance
*/
public EhcacheEntityRegion(EhcacheAccessStrategyFactory accessStrategyFactory, Ehcache underlyingCache, boolean isMinimalPutsEnabled,
CacheDataDescription metadata, Properties properties) {
super( accessStrategyFactory, underlyingCache, isMinimalPutsEnabled, metadata, properties );
}
/**
* {@inheritDoc}
*/
public EntityRegionAccessStrategy buildAccessStrategy(AccessType accessType) throws CacheException {
return getAccessStrategyFactory().createEntityRegionAccessStrategy( this, accessType );
}
}
|
| File |
|---|
| EhcacheEntityRegion.java |
| Developer's decision |
|---|
| Manual |
| Kind of conflict |
|---|
| Annotation |
| Comment |
| Method declaration |
| Chunk |
|---|
| Conflicting content |
|---|
* @author Alex Snaps
*/
public class EhcacheNaturalIdRegion extends EhcacheTransactionalDataRegion implements NaturalIdRegion {
<<<<<<< HEAD
/**
* Constructs an EhcacheNaturalIdRegion around the given underlying cache.
*
* @param accessStrategyFactory
*/
public EhcacheNaturalIdRegion(EhcacheAccessStrategyFactory accessStrategyFactory, Ehcache underlyingCache, boolean isMinimalPutsEnabled,
CacheDataDescription metadata, Properties properties) {
super( accessStrategyFactory, underlyingCache, isMinimalPutsEnabled, metadata, properties );
}
=======
/**
* Constructs an EhcacheNaturalIdRegion around the given underlying cache.
*
* @param accessStrategyFactory The factory for building needed NaturalIdRegionAccessStrategy instance
* @param underlyingCache The ehcache cache instance
* @param settings The Hibernate settings
* @param metadata Metadata about the data to be cached in this region
* @param properties Any additional[ properties
*/
public EhcacheNaturalIdRegion(
EhcacheAccessStrategyFactory accessStrategyFactory,
Ehcache underlyingCache,
Settings settings,
CacheDataDescription metadata,
Properties properties) {
super( accessStrategyFactory, underlyingCache, settings, metadata, properties );
}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
@Override
public NaturalIdRegionAccessStrategy buildAccessStrategy(AccessType accessType) throws CacheException { |
| Solution content |
|---|
* @author Alex Snaps
*/
public class EhcacheNaturalIdRegion extends EhcacheTransactionalDataRegion implements NaturalIdRegion {
/**
* Constructs an EhcacheNaturalIdRegion around the given underlying cache.
*
* @param accessStrategyFactory
*/
public EhcacheNaturalIdRegion(EhcacheAccessStrategyFactory accessStrategyFactory, Ehcache underlyingCache, boolean isMinimalPutsEnabled,
CacheDataDescription metadata, Properties properties) {
super( accessStrategyFactory, underlyingCache, isMinimalPutsEnabled, metadata, properties );
}
@Override
public NaturalIdRegionAccessStrategy buildAccessStrategy(AccessType accessType) throws CacheException { |
| File |
|---|
| EhcacheNaturalIdRegion.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Comment |
| Method declaration |
| Chunk |
|---|
| Conflicting content |
|---|
public class EhcacheTransactionalDataRegion extends EhcacheDataRegion implements TransactionalDataRegion {
private static final int LOCAL_LOCK_PROVIDER_CONCURRENCY = 128;
<<<<<<< HEAD
protected final boolean isMinimalPutsEnabled;
=======
private final Settings settings;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
/**
* Metadata associated with the objects stored in the region. |
| Solution content |
|---|
public class EhcacheTransactionalDataRegion extends EhcacheDataRegion implements TransactionalDataRegion {
private static final int LOCAL_LOCK_PROVIDER_CONCURRENCY = 128;
protected final boolean isMinimalPutsEnabled;
/**
* Metadata associated with the objects stored in the region. |
| File |
|---|
| EhcacheTransactionalDataRegion.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Attribute |
| Chunk |
|---|
| Conflicting content |
|---|
/**
* Construct an transactional Hibernate cache region around the given Ehcache instance.
*/
<<<<<<< HEAD
EhcacheTransactionalDataRegion(EhcacheAccessStrategyFactory accessStrategyFactory, Ehcache cache, boolean isMinimalPutsEnabled,
CacheDataDescription metadata, Properties properties) {
=======
EhcacheTransactionalDataRegion(
EhcacheAccessStrategyFactory accessStrategyFactory, Ehcache cache, Settings settings,
CacheDataDescription metadata, Properties properties) {
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
super( accessStrategyFactory, cache, properties );
this.isMinimalPutsEnabled = isMinimalPutsEnabled;
this.metadata = metadata; |
| Solution content |
|---|
/**
* Construct an transactional Hibernate cache region around the given Ehcache instance.
*/
EhcacheTransactionalDataRegion(EhcacheAccessStrategyFactory accessStrategyFactory, Ehcache cache, boolean isMinimalPutsEnabled,
CacheDataDescription metadata, Properties properties) {
super( accessStrategyFactory, cache, properties );
this.isMinimalPutsEnabled = isMinimalPutsEnabled;
this.metadata = metadata; |
| File |
|---|
| EhcacheTransactionalDataRegion.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Method signature |
| Chunk |
|---|
| Conflicting content |
|---|
}
}
<<<<<<< HEAD
public boolean isMinimalPutsEnabled(){
return isMinimalPutsEnabled;
=======
/**
* Access the Hibernate settings associated with the persistence unit.
*
* @return settings
*/
public Settings getSettings() {
return settings;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
@Override |
| Solution content |
|---|
}
this.locksAreIndependentOfCache = lockProvider instanceof StripedReadWriteLockSync;
}
public boolean isMinimalPutsEnabled(){
return isMinimalPutsEnabled;
}
@Override |
| File |
|---|
| EhcacheTransactionalDataRegion.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Attribute |
| Comment |
| Method signature |
| Return statement |
| Chunk |
|---|
| Conflicting content |
|---|
* @param region The wrapped region. Accessible to subclasses via {@link #region()}
* @param settings The Hibernate settings. Accessible to subclasses via {@link #settings()}
*/
<<<<<<< HEAD
protected final T region;
=======
AbstractEhcacheAccessStrategy(T region, Settings settings) {
this.region = region;
this.settings = settings;
}
/**
* The wrapped Hibernate cache region.
*/
protected T region() {
return region;
}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
/**
* The settings for this persistence unit. |
| Solution content |
|---|
/**
* The wrapped Hibernate cache region.
*/
protected T region() {
return region;
}
/**
* The settings for this persistence unit. |
| File |
|---|
| AbstractEhcacheAccessStrategy.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Attribute |
| Comment |
| Method declaration |
| Chunk |
|---|
| Conflicting content |
|---|
/**
* The settings for this persistence unit.
*/
<<<<<<< HEAD
AbstractEhcacheAccessStrategy(T region) {
this.region = region;
=======
protected Settings settings() {
return settings;
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
/** |
| Solution content |
|---|
/**
* The settings for this persistence unit.
*/
AbstractEhcacheAccessStrategy(T region) {
this.region = region;
}
/** |
| File |
|---|
| AbstractEhcacheAccessStrategy.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Attribute |
| Method signature |
| Return statement |
| Chunk |
|---|
| Conflicting content |
|---|
* @author Chris Dennis * @author Alex Snaps */ <<<<<<< HEAD abstract class AbstractReadWriteEhcacheAccessStrategy |
| Solution content |
|---|
* @author Chris Dennis * @author Alex Snaps */ abstract class AbstractReadWriteEhcacheAccessStrategy |
| File |
|---|
| AbstractReadWriteEhcacheAccessStrategy.java |
| Developer's decision |
|---|
| Manual |
| Kind of conflict |
|---|
| Annotation |
| Attribute |
| Class declaration |
| Class signature |
| Comment |
| Interface declaration |
| Method declaration |
| Method invocation |
| Chunk |
|---|
| Conflicting content |
|---|
EhcacheAccessStrategyFactoryImpl.class.getName()
);
<<<<<<< HEAD
/**
* {@inheritDoc}
*/
public EntityRegionAccessStrategy createEntityRegionAccessStrategy(EhcacheEntityRegion entityRegion, AccessType accessType) {
switch ( accessType ) {
case READ_ONLY:
if ( entityRegion.getCacheDataDescription().isMutable() ) {
LOG.readOnlyCacheConfiguredForMutableEntity( entityRegion.getName() );
}
return new ReadOnlyEhcacheEntityRegionAccessStrategy( entityRegion );
case READ_WRITE:
return new ReadWriteEhcacheEntityRegionAccessStrategy( entityRegion );
case NONSTRICT_READ_WRITE:
return new NonStrictReadWriteEhcacheEntityRegionAccessStrategy(
entityRegion
);
case TRANSACTIONAL:
return new TransactionalEhcacheEntityRegionAccessStrategy(
entityRegion,
entityRegion.getEhcache()
);
default:
throw new IllegalArgumentException( "unrecognized access strategy type [" + accessType + "]" );
=======
@Override
public EntityRegionAccessStrategy createEntityRegionAccessStrategy(
EhcacheEntityRegion entityRegion,
AccessType accessType) {
switch ( accessType ) {
case READ_ONLY:
if ( entityRegion.getCacheDataDescription().isMutable() ) {
LOG.readOnlyCacheConfiguredForMutableEntity( entityRegion.getName() );
}
return new ReadOnlyEhcacheEntityRegionAccessStrategy( entityRegion, entityRegion.getSettings() );
case READ_WRITE:
return new ReadWriteEhcacheEntityRegionAccessStrategy( entityRegion, entityRegion.getSettings() );
case NONSTRICT_READ_WRITE:
return new NonStrictReadWriteEhcacheEntityRegionAccessStrategy(
entityRegion,
entityRegion.getSettings()
);
case TRANSACTIONAL:
return new TransactionalEhcacheEntityRegionAccessStrategy(
entityRegion,
entityRegion.getEhcache(),
entityRegion.getSettings()
);
default:
throw new IllegalArgumentException( "unrecognized access strategy type [" + accessType + "]" );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
|
| Solution content |
|---|
EhcacheAccessStrategyFactoryImpl.class.getName()
);
@Override
public EntityRegionAccessStrategy createEntityRegionAccessStrategy(
EhcacheEntityRegion entityRegion,
AccessType accessType) {
switch ( accessType ) {
case READ_ONLY:
if ( entityRegion.getCacheDataDescription().isMutable() ) {
LOG.readOnlyCacheConfiguredForMutableEntity( entityRegion.getName() );
}
return new ReadOnlyEhcacheEntityRegionAccessStrategy( entityRegion );
case READ_WRITE:
return new ReadWriteEhcacheEntityRegionAccessStrategy( entityRegion );
case NONSTRICT_READ_WRITE:
return new NonStrictReadWriteEhcacheEntityRegionAccessStrategy( entityRegion );
case TRANSACTIONAL:
return new TransactionalEhcacheEntityRegionAccessStrategy(
entityRegion,
entityRegion.getEhcache()
);
default:
throw new IllegalArgumentException( "unrecognized access strategy type [" + accessType + "]" );
}
|
| File |
|---|
| EhcacheAccessStrategyFactoryImpl.java |
| Developer's decision |
|---|
| Manual |
| Kind of conflict |
|---|
| Annotation |
| Case statement |
| Comment |
| If statement |
| Method invocation |
| Method signature |
| Return statement |
| Switch statement |
| Throw statement |
| Chunk |
|---|
| Conflicting content |
|---|
}
<<<<<<< HEAD
/**
* {@inheritDoc}
*/
public CollectionRegionAccessStrategy createCollectionRegionAccessStrategy(EhcacheCollectionRegion collectionRegion,
AccessType accessType) {
switch ( accessType ) {
case READ_ONLY:
if ( collectionRegion.getCacheDataDescription().isMutable() ) {
LOG.readOnlyCacheConfiguredForMutableEntity( collectionRegion.getName() );
}
return new ReadOnlyEhcacheCollectionRegionAccessStrategy(
collectionRegion
);
case READ_WRITE:
return new ReadWriteEhcacheCollectionRegionAccessStrategy(
collectionRegion
);
case NONSTRICT_READ_WRITE:
return new NonStrictReadWriteEhcacheCollectionRegionAccessStrategy(
collectionRegion
);
case TRANSACTIONAL:
return new TransactionalEhcacheCollectionRegionAccessStrategy(
collectionRegion, collectionRegion.getEhcache()
);
default:
throw new IllegalArgumentException( "unrecognized access strategy type [" + accessType + "]" );
}
}
=======
@Override
public CollectionRegionAccessStrategy createCollectionRegionAccessStrategy(
EhcacheCollectionRegion collectionRegion,
AccessType accessType) {
switch ( accessType ) {
case READ_ONLY:
if ( collectionRegion.getCacheDataDescription().isMutable() ) {
LOG.readOnlyCacheConfiguredForMutableEntity( collectionRegion.getName() );
}
return new ReadOnlyEhcacheCollectionRegionAccessStrategy(
collectionRegion,
collectionRegion.getSettings()
);
case READ_WRITE:
return new ReadWriteEhcacheCollectionRegionAccessStrategy(
collectionRegion,
collectionRegion.getSettings()
);
case NONSTRICT_READ_WRITE:
return new NonStrictReadWriteEhcacheCollectionRegionAccessStrategy(
collectionRegion,
collectionRegion.getSettings()
);
case TRANSACTIONAL:
return new TransactionalEhcacheCollectionRegionAccessStrategy(
collectionRegion, collectionRegion.getEhcache(), collectionRegion
.getSettings()
);
default:
throw new IllegalArgumentException( "unrecognized access strategy type [" + accessType + "]" );
}
}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
@Override
public NaturalIdRegionAccessStrategy createNaturalIdRegionAccessStrategy( |
| Solution content |
|---|
}
@Override
public CollectionRegionAccessStrategy createCollectionRegionAccessStrategy(EhcacheCollectionRegion collectionRegion,
AccessType accessType) {
switch ( accessType ) {
case READ_ONLY:
if ( collectionRegion.getCacheDataDescription().isMutable() ) {
LOG.readOnlyCacheConfiguredForMutableEntity( collectionRegion.getName() );
}
return new ReadOnlyEhcacheCollectionRegionAccessStrategy(
collectionRegion
);
case READ_WRITE:
return new ReadWriteEhcacheCollectionRegionAccessStrategy(
collectionRegion
);
case NONSTRICT_READ_WRITE:
return new NonStrictReadWriteEhcacheCollectionRegionAccessStrategy(
collectionRegion
);
case TRANSACTIONAL:
return new TransactionalEhcacheCollectionRegionAccessStrategy(
collectionRegion, collectionRegion.getEhcache()
);
default:
throw new IllegalArgumentException( "unrecognized access strategy type [" + accessType + "]" );
}
}
@Override
public NaturalIdRegionAccessStrategy createNaturalIdRegionAccessStrategy( |
| File |
|---|
| EhcacheAccessStrategyFactoryImpl.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Annotation |
| Comment |
| Method declaration |
| Chunk |
|---|
| Conflicting content |
|---|
public NaturalIdRegionAccessStrategy createNaturalIdRegionAccessStrategy(
EhcacheNaturalIdRegion naturalIdRegion,
AccessType accessType) {
<<<<<<< HEAD
switch ( accessType ) {
case READ_ONLY:
if ( naturalIdRegion.getCacheDataDescription().isMutable() ) {
LOG.readOnlyCacheConfiguredForMutableEntity( naturalIdRegion.getName() );
}
return new ReadOnlyEhcacheNaturalIdRegionAccessStrategy(
naturalIdRegion
);
case READ_WRITE:
return new ReadWriteEhcacheNaturalIdRegionAccessStrategy(
naturalIdRegion
);
case NONSTRICT_READ_WRITE:
return new NonStrictReadWriteEhcacheNaturalIdRegionAccessStrategy(
naturalIdRegion
);
case TRANSACTIONAL:
return new TransactionalEhcacheNaturalIdRegionAccessStrategy(
naturalIdRegion, naturalIdRegion.getEhcache()
);
default:
throw new IllegalArgumentException( "unrecognized access strategy type [" + accessType + "]" );
}
=======
switch ( accessType ) {
case READ_ONLY:
if ( naturalIdRegion.getCacheDataDescription().isMutable() ) {
LOG.readOnlyCacheConfiguredForMutableEntity( naturalIdRegion.getName() );
}
return new ReadOnlyEhcacheNaturalIdRegionAccessStrategy(
naturalIdRegion,
naturalIdRegion.getSettings()
);
case READ_WRITE:
return new ReadWriteEhcacheNaturalIdRegionAccessStrategy(
naturalIdRegion,
naturalIdRegion.getSettings()
);
case NONSTRICT_READ_WRITE:
return new NonStrictReadWriteEhcacheNaturalIdRegionAccessStrategy(
naturalIdRegion,
naturalIdRegion.getSettings()
);
case TRANSACTIONAL:
return new TransactionalEhcacheNaturalIdRegionAccessStrategy(
naturalIdRegion, naturalIdRegion.getEhcache(), naturalIdRegion
.getSettings()
);
default:
throw new IllegalArgumentException( "unrecognized access strategy type [" + accessType + "]" );
}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
|
| Solution content |
|---|
public NaturalIdRegionAccessStrategy createNaturalIdRegionAccessStrategy(
EhcacheNaturalIdRegion naturalIdRegion,
AccessType accessType) {
switch ( accessType ) {
case READ_ONLY:
if ( naturalIdRegion.getCacheDataDescription().isMutable() ) {
LOG.readOnlyCacheConfiguredForMutableEntity( naturalIdRegion.getName() );
}
return new ReadOnlyEhcacheNaturalIdRegionAccessStrategy(
naturalIdRegion
);
case READ_WRITE:
return new ReadWriteEhcacheNaturalIdRegionAccessStrategy(
naturalIdRegion
);
case NONSTRICT_READ_WRITE:
return new NonStrictReadWriteEhcacheNaturalIdRegionAccessStrategy(
naturalIdRegion
);
case TRANSACTIONAL:
return new TransactionalEhcacheNaturalIdRegionAccessStrategy(
naturalIdRegion, naturalIdRegion.getEhcache()
);
default:
throw new IllegalArgumentException( "unrecognized access strategy type [" + accessType + "]" );
}
}
|
| File |
|---|
| EhcacheAccessStrategyFactoryImpl.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Switch statement |
| Chunk |
|---|
| Conflicting content |
|---|
* @param region the Hibernate region.
* @param ehcache the cache.
*/
<<<<<<< HEAD
public TransactionalEhcacheCollectionRegionAccessStrategy(EhcacheCollectionRegion region, Ehcache ehcache) {
super( region );
=======
public TransactionalEhcacheCollectionRegionAccessStrategy(
EhcacheCollectionRegion region,
Ehcache ehcache,
Settings settings) {
super( region, settings );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
this.ehcache = ehcache;
}
|
| Solution content |
|---|
* @param region the Hibernate region.
* @param ehcache the cache.
*/
public TransactionalEhcacheCollectionRegionAccessStrategy(EhcacheCollectionRegion region, Ehcache ehcache) {
super( region );
this.ehcache = ehcache;
}
|
| File |
|---|
| TransactionalEhcacheCollectionRegionAccessStrategy.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Method invocation |
| Method signature |
| Chunk |
|---|
| Conflicting content |
|---|
* @param region the Hibernate region.
* @param ehcache the cache.
*/
<<<<<<< HEAD
public TransactionalEhcacheEntityRegionAccessStrategy(EhcacheEntityRegion region, Ehcache ehcache) {
super( region );
=======
public TransactionalEhcacheEntityRegionAccessStrategy(
EhcacheEntityRegion region,
Ehcache ehcache,
Settings settings) {
super( region, settings );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
this.ehcache = ehcache;
}
|
| Solution content |
|---|
* @param region the Hibernate region.
* @param ehcache the cache.
*/
public TransactionalEhcacheEntityRegionAccessStrategy(EhcacheEntityRegion region, Ehcache ehcache) {
super( region );
this.ehcache = ehcache;
}
|
| File |
|---|
| TransactionalEhcacheEntityRegionAccessStrategy.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Method invocation |
| Method signature |
| Chunk |
|---|
| Conflicting content |
|---|
* @param region the Hibernate region.
* @param ehcache the cache.
*/
<<<<<<< HEAD
public TransactionalEhcacheNaturalIdRegionAccessStrategy(EhcacheNaturalIdRegion region, Ehcache ehcache) {
super( region );
=======
public TransactionalEhcacheNaturalIdRegionAccessStrategy(
EhcacheNaturalIdRegion region,
Ehcache ehcache,
Settings settings) {
super( region, settings );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
this.ehcache = ehcache;
}
|
| Solution content |
|---|
* @param region the Hibernate region.
* @param ehcache the cache.
*/
public TransactionalEhcacheNaturalIdRegionAccessStrategy(EhcacheNaturalIdRegion region, Ehcache ehcache) {
super( region );
this.ehcache = ehcache;
}
|
| File |
|---|
| TransactionalEhcacheNaturalIdRegionAccessStrategy.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Method invocation |
| Method signature |
| Chunk |
|---|
| Conflicting content |
|---|
*/ package org.hibernate.jpa; <<<<<<< HEAD ======= >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 /** * Defines the available HEM settings, both JPA-defined as well as Hibernate-specific * |
| Solution content |
|---|
*/ package org.hibernate.jpa; /** * Defines the available HEM settings, both JPA-defined as well as Hibernate-specific * |
| File |
|---|
| AvailableSettings.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Blank |
| Chunk |
|---|
| Conflicting content |
|---|
======= import java.util.Set; import java.util.StringTokenizer; <<<<<<< HEAD import org.jboss.jandex.AnnotationInstance; import org.jboss.jandex.ClassInfo; import org.jboss.jandex.CompositeIndex; import org.jboss.jandex.DotName; import org.jboss.jandex.Index; import org.jboss.jandex.IndexView; import org.jboss.jandex.Indexer; import org.jboss.logging.Logger; import javax.persistence.AttributeConverter; import javax.persistence.EntityManagerFactory; import javax.persistence.EntityNotFoundException; import javax.persistence.PersistenceException; import javax.persistence.spi.PersistenceUnitTransactionType; import javax.sql.DataSource; >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 import org.hibernate.Interceptor; import org.hibernate.InvalidMappingException; |
| Solution content |
|---|
import java.util.Set; import java.util.StringTokenizer; import org.jboss.jandex.AnnotationInstance; import org.jboss.jandex.ClassInfo; import org.jboss.jandex.CompositeIndex; import org.jboss.jandex.DotName; import org.jboss.jandex.Index; import org.jboss.jandex.IndexView; import org.jboss.jandex.Indexer; import org.jboss.logging.Logger; import javax.persistence.AttributeConverter; import javax.persistence.EntityManagerFactory; import javax.persistence.EntityNotFoundException; import javax.persistence.PersistenceException; import javax.persistence.spi.PersistenceUnitTransactionType; import javax.sql.DataSource; import org.hibernate.Interceptor; import org.hibernate.InvalidMappingException; |
| File |
|---|
| EntityManagerFactoryBuilderImpl.java |
| Developer's decision |
|---|
| Concatenation |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
import org.hibernate.boot.registry.StandardServiceRegistryBuilder; import org.hibernate.boot.registry.classloading.internal.ClassLoaderServiceImpl; import org.hibernate.boot.registry.classloading.spi.ClassLoaderService; <<<<<<< HEAD import org.hibernate.boot.registry.internal.ConfigLoader; ======= import org.hibernate.boot.registry.selector.StrategyRegistrationProvider; >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 import org.hibernate.boot.registry.selector.spi.StrategySelector; import org.hibernate.boot.spi.CacheRegionDefinition; import org.hibernate.cfg.Configuration; |
| Solution content |
|---|
import org.hibernate.boot.registry.StandardServiceRegistryBuilder; import org.hibernate.boot.registry.classloading.internal.ClassLoaderServiceImpl; import org.hibernate.boot.registry.classloading.spi.ClassLoaderService; import org.hibernate.boot.registry.internal.ConfigLoader; import org.hibernate.boot.registry.selector.StrategyRegistrationProvider; import org.hibernate.boot.registry.selector.spi.StrategySelector; import org.hibernate.boot.spi.CacheRegionDefinition; import org.hibernate.cfg.Configuration; |
| File |
|---|
| EntityManagerFactoryBuilderImpl.java |
| Developer's decision |
|---|
| Concatenation |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
import org.hibernate.jpa.internal.util.LogHelper; import org.hibernate.jpa.internal.util.PersistenceUnitTransactionTypeHelper; import org.hibernate.jpa.spi.IdentifierGeneratorStrategyProvider; <<<<<<< HEAD import org.hibernate.metamodel.MetadataBuilder; import org.hibernate.metamodel.SessionFactoryBuilder; import org.hibernate.metamodel.internal.source.annotations.util.JPADotNames; import org.hibernate.metamodel.internal.source.annotations.util.JandexHelper; import org.hibernate.metamodel.spi.MetadataImplementor; ======= import org.hibernate.metamodel.source.annotations.JPADotNames; import org.hibernate.metamodel.source.annotations.JandexHelper; import org.hibernate.metamodel.spi.TypeContributor; >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 import org.hibernate.proxy.EntityNotFoundDelegate; import org.hibernate.secure.spi.GrantedPermission; import org.hibernate.secure.spi.JaccService; |
| Solution content |
|---|
import org.hibernate.jpa.internal.util.LogHelper; import org.hibernate.jpa.internal.util.PersistenceUnitTransactionTypeHelper; import org.hibernate.jpa.spi.IdentifierGeneratorStrategyProvider; import org.hibernate.metamodel.MetadataBuilder; import org.hibernate.metamodel.SessionFactoryBuilder; import org.hibernate.metamodel.internal.source.annotations.util.JPADotNames; import org.hibernate.metamodel.internal.source.annotations.util.JandexHelper; import org.hibernate.metamodel.spi.MetadataImplementor; import org.hibernate.metamodel.spi.TypeContributor; import org.hibernate.proxy.EntityNotFoundDelegate; import org.hibernate.secure.spi.GrantedPermission; import org.hibernate.secure.spi.JaccService; |
| File |
|---|
| EntityManagerFactoryBuilderImpl.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
)
);
<<<<<<< HEAD
appendListeners( listenerRegistry, enversConfiguration );
=======
if ( enversConfiguration.getEntCfg().hasAuditedEntities() ) {
listenerRegistry.appendListeners(
EventType.POST_DELETE, new EnversPostDeleteEventListenerImpl(
enversConfiguration
)
);
listenerRegistry.appendListeners(
EventType.POST_INSERT, new EnversPostInsertEventListenerImpl(
enversConfiguration
)
);
listenerRegistry.appendListeners(
EventType.POST_UPDATE, new EnversPostUpdateEventListenerImpl(
enversConfiguration
)
);
listenerRegistry.appendListeners(
EventType.POST_COLLECTION_RECREATE,
new EnversPostCollectionRecreateEventListenerImpl( enversConfiguration )
);
listenerRegistry.appendListeners(
EventType.PRE_COLLECTION_REMOVE,
new EnversPreCollectionRemoveEventListenerImpl( enversConfiguration )
);
listenerRegistry.appendListeners(
EventType.PRE_COLLECTION_UPDATE,
new EnversPreCollectionUpdateEventListenerImpl( enversConfiguration )
);
}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
@Override |
| Solution content |
|---|
) ); appendListeners( listenerRegistry, enversConfiguration ); } @Override |
| File |
|---|
| EnversIntegrator.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| If statement |
| Method invocation |
| Chunk |
|---|
| Conflicting content |
|---|
}
@Override
<<<<<<< HEAD
public void integrate( MetadataImplementor metadata,
SessionFactoryImplementor sessionFactory,
SessionFactoryServiceRegistry serviceRegistry ) {
final ConfigurationService configurationService = serviceRegistry.getService( ConfigurationService.class );
final boolean autoRegister = configurationService.getSetting( AUTO_REGISTER, StandardConverters.BOOLEAN, true );
if ( !autoRegister ) {
LOG.debug( "Skipping Envers listener auto registration" );
return;
}
EventListenerRegistry listenerRegistry = serviceRegistry.getService( EventListenerRegistry.class );
listenerRegistry.addDuplicationStrategy( EnversListenerDuplicationStrategy.INSTANCE );
// final AuditConfiguration enversConfiguration = AuditConfiguration.getFor( configuration, serviceRegistry.getService( ClassLoaderService.class ) );
//
// appendListeners( listenerRegistry, enversConfiguration );
}
private void appendListeners(EventListenerRegistry listenerRegistry, AuditConfiguration enversConfiguration) {
if (enversConfiguration.getEntCfg().hasAuditedEntities()) {
listenerRegistry.appendListeners( EventType.POST_DELETE, new EnversPostDeleteEventListenerImpl( enversConfiguration ) );
listenerRegistry.appendListeners( EventType.POST_INSERT, new EnversPostInsertEventListenerImpl( enversConfiguration ) );
listenerRegistry.appendListeners( EventType.POST_UPDATE, new EnversPostUpdateEventListenerImpl( enversConfiguration ) );
listenerRegistry.appendListeners( EventType.POST_COLLECTION_RECREATE, new EnversPostCollectionRecreateEventListenerImpl( enversConfiguration ) );
listenerRegistry.appendListeners( EventType.PRE_COLLECTION_REMOVE, new EnversPreCollectionRemoveEventListenerImpl( enversConfiguration ) );
listenerRegistry.appendListeners( EventType.PRE_COLLECTION_UPDATE, new EnversPreCollectionUpdateEventListenerImpl( enversConfiguration ) );
}
=======
public void integrate(
MetadataImplementor metadata,
SessionFactoryImplementor sessionFactory,
SessionFactoryServiceRegistry serviceRegistry) {
// TODO: implement
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
} |
| Solution content |
|---|
}
@Override
public void integrate( MetadataImplementor metadata,
SessionFactoryImplementor sessionFactory,
SessionFactoryServiceRegistry serviceRegistry ) {
final ConfigurationService configurationService = serviceRegistry.getService( ConfigurationService.class );
final boolean autoRegister = configurationService.getSetting( AUTO_REGISTER, StandardConverters.BOOLEAN, true );
if ( !autoRegister ) {
LOG.debug( "Skipping Envers listener auto registration" );
return;
}
EventListenerRegistry listenerRegistry = serviceRegistry.getService( EventListenerRegistry.class );
listenerRegistry.addDuplicationStrategy( EnversListenerDuplicationStrategy.INSTANCE );
// final AuditConfiguration enversConfiguration = AuditConfiguration.getFor( configuration, serviceRegistry.getService( ClassLoaderService.class ) );
//
// appendListeners( listenerRegistry, enversConfiguration );
}
private void appendListeners(EventListenerRegistry listenerRegistry, AuditConfiguration enversConfiguration) {
if (enversConfiguration.getEntCfg().hasAuditedEntities()) {
listenerRegistry.appendListeners( EventType.POST_DELETE, new EnversPostDeleteEventListenerImpl( enversConfiguration ) );
listenerRegistry.appendListeners( EventType.POST_INSERT, new EnversPostInsertEventListenerImpl( enversConfiguration ) );
listenerRegistry.appendListeners( EventType.POST_UPDATE, new EnversPostUpdateEventListenerImpl( enversConfiguration ) );
listenerRegistry.appendListeners( EventType.POST_COLLECTION_RECREATE, new EnversPostCollectionRecreateEventListenerImpl( enversConfiguration ) );
listenerRegistry.appendListeners( EventType.PRE_COLLECTION_REMOVE, new EnversPreCollectionRemoveEventListenerImpl( enversConfiguration ) );
listenerRegistry.appendListeners( EventType.PRE_COLLECTION_UPDATE, new EnversPreCollectionUpdateEventListenerImpl( enversConfiguration ) );
}
}
} |
| File |
|---|
| EnversIntegrator.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Comment |
| If statement |
| Method declaration |
| Method signature |
| Chunk |
|---|
| Conflicting content |
|---|
import org.junit.Test; package org.hibernate.envers.test.integration.components; import org.hibernate.Session; <<<<<<< HEAD import org.junit.Assert; ======= >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 import org.hibernate.envers.test.BaseEnversFunctionalTestCase; import org.hibernate.envers.test.Priority; import org.hibernate.envers.test.entities.components.UniquePropsEntity; |
| Solution content |
|---|
package org.hibernate.envers.test.integration.components; import org.hibernate.Session; import org.junit.Assert; import org.junit.Test; import org.hibernate.envers.test.BaseEnversFunctionalTestCase; import org.hibernate.envers.test.Priority; import org.hibernate.envers.test.entities.components.UniquePropsEntity; |
| File |
|---|
| PropertiesGroupTest.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
import org.hibernate.envers.test.BaseEnversFunctionalTestCase; import org.hibernate.envers.test.Priority; import org.hibernate.envers.test.entities.StrTestEntity; <<<<<<< HEAD import org.hibernate.testing.FailureExpectedWithNewMetamodel; ======= import org.junit.Assert; import org.junit.Test; >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 import org.hibernate.testing.TestForIssue; /** |
| Solution content |
|---|
import org.hibernate.envers.test.BaseEnversFunctionalTestCase; import org.hibernate.envers.test.Priority; import org.hibernate.envers.test.entities.StrTestEntity; import org.hibernate.testing.FailureExpectedWithNewMetamodel; import org.junit.Assert; import org.junit.Test; import org.hibernate.testing.TestForIssue; /** |
| File |
|---|
| AddDelTest.java |
| Developer's decision |
|---|
| Concatenation |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
return new Class[] {StrTestEntity.class, GivenIdStrEntity.class};
}
<<<<<<< HEAD
@Test
@Priority(10)
@FailureExpectedWithNewMetamodel
public void initData() {
// Revision 1
Session session = openSession();
session.getTransaction().begin();
GivenIdStrEntity entity = new GivenIdStrEntity(1, "data");
session.persist(entity);
session.getTransaction().commit();
=======
@Test
@Priority(10)
public void initData() {
// Revision 1
Session session = openSession();
session.getTransaction().begin();
GivenIdStrEntity entity = new GivenIdStrEntity( 1, "data" );
session.persist( entity );
session.getTransaction().commit();
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
// Revision 2
session.getTransaction().begin(); |
| Solution content |
|---|
return new Class[] {StrTestEntity.class, GivenIdStrEntity.class};
}
@Test
@Priority(10)
@FailureExpectedWithNewMetamodel
public void initData() {
// Revision 1
Session session = openSession();
session.getTransaction().begin();
GivenIdStrEntity entity = new GivenIdStrEntity(1, "data");
session.persist(entity);
session.getTransaction().commit();
// Revision 2
session.getTransaction().begin(); |
| File |
|---|
| AddDelTest.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Annotation |
| Comment |
| Method invocation |
| Method signature |
| Variable |
| Chunk |
|---|
| Conflicting content |
|---|
session.close();
}
<<<<<<< HEAD
@Test
@FailureExpectedWithNewMetamodel
public void testRevisionsCountOfGivenIdStrEntity() {
// Revision 2 has not changed entity's state.
Assert.assertEquals(Arrays.asList(1, 3), getAuditReader().getRevisions(GivenIdStrEntity.class, 1));
=======
@Test
public void testRevisionsCountOfGivenIdStrEntity() {
// Revision 2 has not changed entity's state.
Assert.assertEquals( Arrays.asList( 1, 3 ), getAuditReader().getRevisions( GivenIdStrEntity.class, 1 ) );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
getSession().close();
} |
| Solution content |
|---|
session.close();
}
@Test
@FailureExpectedWithNewMetamodel
public void testRevisionsCountOfGivenIdStrEntity() {
// Revision 2 has not changed entity's state.
Assert.assertEquals(Arrays.asList(1, 3), getAuditReader().getRevisions(GivenIdStrEntity.class, 1));
getSession().close();
} |
| File |
|---|
| AddDelTest.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Annotation |
| Comment |
| Method invocation |
| Method signature |
| Chunk |
|---|
| Conflicting content |
|---|
getSession().close();
}
<<<<<<< HEAD
@Test
@FailureExpectedWithNewMetamodel
public void testHistoryOfGivenIdStrEntity() {
Assert.assertEquals(new GivenIdStrEntity(1, "data"), getAuditReader().find(GivenIdStrEntity.class, 1, 1));
Assert.assertEquals(new GivenIdStrEntity(1, "modified data"), getAuditReader().find(GivenIdStrEntity.class, 1, 3));
=======
@Test
public void testHistoryOfGivenIdStrEntity() {
Assert.assertEquals( new GivenIdStrEntity( 1, "data" ), getAuditReader().find( GivenIdStrEntity.class, 1, 1 ) );
Assert.assertEquals(
new GivenIdStrEntity( 1, "modified data" ), getAuditReader().find(
GivenIdStrEntity.class,
1,
3
)
);
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
getSession().close();
} |
| Solution content |
|---|
getSession().close();
}
@FailureExpectedWithNewMetamodel
@Test
public void testHistoryOfGivenIdStrEntity() {
Assert.assertEquals( new GivenIdStrEntity( 1, "data" ), getAuditReader().find( GivenIdStrEntity.class, 1, 1 ) );
Assert.assertEquals(
new GivenIdStrEntity( 1, "modified data" ), getAuditReader().find(
GivenIdStrEntity.class,
1,
3
)
);
getSession().close();
} |
| File |
|---|
| AddDelTest.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Annotation |
| Method invocation |
| Method signature |
| Chunk |
|---|
| Conflicting content |
|---|
import org.hibernate.envers.test.BaseEnversFunctionalTestCase; import org.hibernate.envers.test.Priority; import org.hibernate.envers.test.entities.StrTestEntity; <<<<<<< HEAD import org.hibernate.testing.FailureExpectedWithNewMetamodel; ======= import org.junit.Assert; import org.junit.Test; >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 import org.hibernate.testing.TestForIssue; /** |
| Solution content |
|---|
import org.hibernate.envers.test.BaseEnversFunctionalTestCase; import org.hibernate.envers.test.Priority; import org.hibernate.envers.test.entities.StrTestEntity; import org.hibernate.testing.FailureExpectedWithNewMetamodel; import org.junit.Assert; import org.junit.Test; import org.hibernate.testing.TestForIssue; /** |
| File |
|---|
| QueryingWithProxyObjectTest.java |
| Developer's decision |
|---|
| Concatenation |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
return new Class[] {StrTestEntity.class};
}
<<<<<<< HEAD
@Test
@Priority(10)
@FailureExpectedWithNewMetamodel
public void initData() {
// Revision 1
getSession().getTransaction().begin();
StrTestEntity ste = new StrTestEntity("data");
getSession().persist(ste);
getSession().getTransaction().commit();
id = ste.getId();
getSession().close();
}
@Test
@TestForIssue(jiraKey="HHH-4760")
@SuppressWarnings("unchecked")
@FailureExpectedWithNewMetamodel
public void testQueryingWithProxyObject() {
StrTestEntity originalSte = new StrTestEntity("data", id);
// Load the proxy instance
StrTestEntity proxySte = (StrTestEntity) getSession().load(StrTestEntity.class, id);
=======
@Test
@Priority(10)
public void initData() {
// Revision 1
getSession().getTransaction().begin();
StrTestEntity ste = new StrTestEntity( "data" );
getSession().persist( ste );
getSession().getTransaction().commit();
id = ste.getId();
getSession().close();
}
@Test
@TestForIssue(jiraKey = "HHH-4760")
@SuppressWarnings("unchecked")
public void testQueryingWithProxyObject() {
StrTestEntity originalSte = new StrTestEntity( "data", id );
// Load the proxy instance
StrTestEntity proxySte = (StrTestEntity) getSession().load( StrTestEntity.class, id );
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
Assert.assertTrue( getAuditReader().isEntityClassAudited( proxySte.getClass() ) );
|
| Solution content |
|---|
return new Class[] {StrTestEntity.class};
}
@Test
@Priority(10)
@FailureExpectedWithNewMetamodel
public void initData() {
// Revision 1
getSession().getTransaction().begin();
StrTestEntity ste = new StrTestEntity("data");
getSession().persist(ste);
getSession().getTransaction().commit();
id = ste.getId();
getSession().close();
}
@Test
@TestForIssue(jiraKey="HHH-4760")
@SuppressWarnings("unchecked")
@FailureExpectedWithNewMetamodel
public void testQueryingWithProxyObject() {
StrTestEntity originalSte = new StrTestEntity("data", id);
// Load the proxy instance
StrTestEntity proxySte = (StrTestEntity) getSession().load(StrTestEntity.class, id);
Assert.assertTrue( getAuditReader().isEntityClassAudited( proxySte.getClass() ) );
|
| File |
|---|
| QueryingWithProxyObjectTest.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Annotation |
| Cast expression |
| Comment |
| Method declaration |
| Method invocation |
| Method signature |
| Variable |
| Chunk |
|---|
| Conflicting content |
|---|
// Disable schema auto generation.
return false;
}
<<<<<<< HEAD
@Test
@Priority(10)
@FailureExpectedWithNewMetamodel
public void testSchemaCreation() {
// Generate complete schema.
new EnversSchemaGenerator(configuration()).export().create( true, true );
// Populate database with test data.
Session session = getSession();
session.getTransaction().begin();
StrTestEntity entity = new StrTestEntity("data");
session.save(entity);
session.getTransaction().commit();
id = entity.getId();
}
@Test
@Priority(9)
@FailureExpectedWithNewMetamodel
public void testAuditDataRetrieval() {
Assert.assertEquals(Arrays.asList(1), getAuditReader().getRevisions(StrTestEntity.class, id));
Assert.assertEquals(new StrTestEntity("data", id), getAuditReader().find(StrTestEntity.class, id, 1));
}
@Test
@Priority(8)
public void testSchemaDrop() {
new EnversSchemaGenerator(configuration()).export().drop( true, true );
}
=======
@Test
@Priority(10)
public void testSchemaCreation() {
// Generate complete schema.
new EnversSchemaGenerator( configuration() ).export().create( true, true );
// Populate database with test data.
Session session = getSession();
session.getTransaction().begin();
StrTestEntity entity = new StrTestEntity( "data" );
session.save( entity );
session.getTransaction().commit();
id = entity.getId();
}
@Test
@Priority(9)
public void testAuditDataRetrieval() {
Assert.assertEquals( Arrays.asList( 1 ), getAuditReader().getRevisions( StrTestEntity.class, id ) );
Assert.assertEquals( new StrTestEntity( "data", id ), getAuditReader().find( StrTestEntity.class, id, 1 ) );
}
@Test
@Priority(8)
public void testSchemaDrop() {
new EnversSchemaGenerator( configuration() ).export().drop( true, true );
}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
} |
| Solution content |
|---|
// Disable schema auto generation.
return false;
}
@Test
@Priority(10)
@FailureExpectedWithNewMetamodel
public void testSchemaCreation() {
// Generate complete schema.
new EnversSchemaGenerator(configuration()).export().create( true, true );
// Populate database with test data.
Session session = getSession();
session.getTransaction().begin();
StrTestEntity entity = new StrTestEntity("data");
session.save(entity);
session.getTransaction().commit();
id = entity.getId();
}
@Test
@Priority(9)
@FailureExpectedWithNewMetamodel
public void testAuditDataRetrieval() {
Assert.assertEquals(Arrays.asList(1), getAuditReader().getRevisions(StrTestEntity.class, id));
Assert.assertEquals(new StrTestEntity("data", id), getAuditReader().find(StrTestEntity.class, id, 1));
}
@Test
@Priority(8)
public void testSchemaDrop() {
new EnversSchemaGenerator(configuration()).export().drop( true, true );
}
} |
| File |
|---|
| SchemaExportTest.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Annotation |
| Method declaration |
| Chunk |
|---|
| Conflicting content |
|---|
import java.util.Set; import java.util.concurrent.TimeUnit; <<<<<<< HEAD import org.hibernate.cache.infinispan.timestamp.ClusteredTimestampsRegionImpl; import org.hibernate.cache.infinispan.util.Caches; ======= >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 import org.infinispan.AdvancedCache; import org.infinispan.commands.module.ModuleCommandFactory; import org.infinispan.configuration.cache.CacheMode; |
| Solution content |
|---|
import java.util.Set; import java.util.concurrent.TimeUnit; import org.hibernate.cache.infinispan.timestamp.ClusteredTimestampsRegionImpl; import org.hibernate.cache.infinispan.util.Caches; import org.infinispan.AdvancedCache; import org.infinispan.commands.module.ModuleCommandFactory; import org.infinispan.configuration.cache.CacheMode; |
| File |
|---|
| InfinispanRegionFactory.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
import org.infinispan.configuration.cache.CacheMode; import org.infinispan.configuration.cache.Configuration; import org.infinispan.configuration.cache.ConfigurationBuilder; <<<<<<< HEAD ======= import org.infinispan.configuration.parsing.ConfigurationBuilderHolder; import org.infinispan.configuration.parsing.ParserRegistry; >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 import org.infinispan.factories.GlobalComponentRegistry; import org.infinispan.manager.DefaultCacheManager; import org.infinispan.manager.EmbeddedCacheManager; |
| Solution content |
|---|
import org.infinispan.configuration.cache.CacheMode; import org.infinispan.configuration.cache.Configuration; import org.infinispan.configuration.cache.ConfigurationBuilder; import org.infinispan.configuration.parsing.ConfigurationBuilderHolder; import org.infinispan.configuration.parsing.ParserRegistry; import org.infinispan.factories.GlobalComponentRegistry; import org.infinispan.manager.DefaultCacheManager; import org.infinispan.manager.EmbeddedCacheManager; |
| File |
|---|
| InfinispanRegionFactory.java |
| Developer's decision |
|---|
| Version 2 |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
import org.infinispan.manager.EmbeddedCacheManager; import org.infinispan.transaction.TransactionMode; import org.infinispan.transaction.lookup.GenericTransactionManagerLookup; <<<<<<< HEAD import org.infinispan.util.concurrent.IsolationLevel; import org.infinispan.configuration.parsing.ConfigurationBuilderHolder; import org.infinispan.configuration.parsing.ParserRegistry; import org.infinispan.util.FileLookupFactory; import org.infinispan.util.logging.Log; import org.infinispan.util.logging.LogFactory; import org.hibernate.cache.infinispan.impl.BaseRegion; import org.hibernate.cache.infinispan.naturalid.NaturalIdRegionImpl; import org.hibernate.cache.infinispan.util.CacheCommandFactory; import org.hibernate.cache.spi.AbstractRegionFactory; import org.hibernate.cache.spi.CacheDataDescription; ======= import org.infinispan.util.FileLookupFactory; import org.infinispan.util.concurrent.IsolationLevel; import org.infinispan.util.logging.Log; import org.infinispan.util.logging.LogFactory; >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 import org.hibernate.cache.CacheException; import org.hibernate.cache.infinispan.collection.CollectionRegionImpl; import org.hibernate.cache.infinispan.entity.EntityRegionImpl; |
| Solution content |
|---|
import org.infinispan.manager.EmbeddedCacheManager; import org.infinispan.transaction.TransactionMode; import org.infinispan.transaction.lookup.GenericTransactionManagerLookup; import org.infinispan.util.concurrent.IsolationLevel; import org.infinispan.util.FileLookupFactory; import org.infinispan.util.logging.Log; import org.infinispan.util.logging.LogFactory; import org.hibernate.cache.infinispan.impl.BaseRegion; import org.hibernate.cache.infinispan.naturalid.NaturalIdRegionImpl; import org.hibernate.cache.infinispan.util.CacheCommandFactory; import org.hibernate.cache.spi.AbstractRegionFactory; import org.hibernate.cache.spi.CacheDataDescription; import org.hibernate.cache.CacheException; import org.hibernate.cache.infinispan.collection.CollectionRegionImpl; import org.hibernate.cache.infinispan.entity.EntityRegionImpl; |
| File |
|---|
| InfinispanRegionFactory.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
@SuppressWarnings("UnusedDeclaration")
public static final String NATURAL_ID_CACHE_RESOURCE_PROP = PREFIX + NATURAL_ID_KEY + CONFIG_SUFFIX;
<<<<<<< HEAD
private static final String ENTITY_KEY = "entity";
/**
* Name of the configuration that should be used for entity caches.
*
* @see #DEF_ENTITY_RESOURCE
*/
public static final String ENTITY_CACHE_RESOURCE_PROP = PREFIX + ENTITY_KEY + CONFIG_SUFFIX;
private static final String COLLECTION_KEY = "collection";
/**
* Name of the configuration that should be used for collection caches.
* No default value, as by default we try to use the same Infinispan cache
* instance we use for entity caching.
*
* @see #ENTITY_CACHE_RESOURCE_PROP
* @see #DEF_ENTITY_RESOURCE
*/
public static final String COLLECTION_CACHE_RESOURCE_PROP = PREFIX + COLLECTION_KEY + CONFIG_SUFFIX;
private static final String TIMESTAMPS_KEY = "timestamps";
/**
* Name of the configuration that should be used for timestamp caches.
*
* @see #DEF_TIMESTAMPS_RESOURCE
*/
public static final String TIMESTAMPS_CACHE_RESOURCE_PROP = PREFIX + TIMESTAMPS_KEY + CONFIG_SUFFIX;
private static final String QUERY_KEY = "query";
/**
* Name of the configuration that should be used for query caches.
*
* @see #DEF_QUERY_RESOURCE
*/
public static final String QUERY_CACHE_RESOURCE_PROP = PREFIX + QUERY_KEY + CONFIG_SUFFIX;
/**
* Default value for {@link #INFINISPAN_CONFIG_RESOURCE_PROP}. Specifies the "infinispan-configs.xml" file in this package.
*/
public static final String DEF_INFINISPAN_CONFIG_RESOURCE = "org/hibernate/cache/infinispan/builder/infinispan-configs.xml";
/**
* Default value for {@link #ENTITY_CACHE_RESOURCE_PROP}.
*/
public static final String DEF_ENTITY_RESOURCE = "entity";
/**
* Default value for {@link #TIMESTAMPS_CACHE_RESOURCE_PROP}.
*/
public static final String DEF_TIMESTAMPS_RESOURCE = "timestamps";
/**
* Default value for {@link #QUERY_CACHE_RESOURCE_PROP}.
*/
public static final String DEF_QUERY_RESOURCE = "local-query";
/**
* Default value for {@link #INFINISPAN_USE_SYNCHRONIZATION_PROP}.
*/
public static final boolean DEF_USE_SYNCHRONIZATION = true;
/**
* Name of the pending puts cache.
*/
public static final String PENDING_PUTS_CACHE_NAME = "pending-puts";
private EmbeddedCacheManager manager;
private final Map |
| Solution content |
|---|
@SuppressWarnings("UnusedDeclaration")
public static final String NATURAL_ID_CACHE_RESOURCE_PROP = PREFIX + NATURAL_ID_KEY + CONFIG_SUFFIX;
private static final String ENTITY_KEY = "entity";
/**
* Name of the configuration that should be used for entity caches.
*
* @see #DEF_ENTITY_RESOURCE
*/
public static final String ENTITY_CACHE_RESOURCE_PROP = PREFIX + ENTITY_KEY + CONFIG_SUFFIX;
private static final String COLLECTION_KEY = "collection";
/**
* Name of the configuration that should be used for collection caches.
* No default value, as by default we try to use the same Infinispan cache
* instance we use for entity caching.
*
* @see #ENTITY_CACHE_RESOURCE_PROP
* @see #DEF_ENTITY_RESOURCE
*/
public static final String COLLECTION_CACHE_RESOURCE_PROP = PREFIX + COLLECTION_KEY + CONFIG_SUFFIX;
private static final String TIMESTAMPS_KEY = "timestamps";
/**
* Name of the configuration that should be used for timestamp caches.
*
* @see #DEF_TIMESTAMPS_RESOURCE
*/
public static final String TIMESTAMPS_CACHE_RESOURCE_PROP = PREFIX + TIMESTAMPS_KEY + CONFIG_SUFFIX;
private static final String QUERY_KEY = "query";
/**
* Name of the configuration that should be used for query caches.
*
* @see #DEF_QUERY_RESOURCE
*/
public static final String QUERY_CACHE_RESOURCE_PROP = PREFIX + QUERY_KEY + CONFIG_SUFFIX;
/**
* Default value for {@link #INFINISPAN_CONFIG_RESOURCE_PROP}. Specifies the "infinispan-configs.xml" file in this package.
*/
public static final String DEF_INFINISPAN_CONFIG_RESOURCE = "org/hibernate/cache/infinispan/builder/infinispan-configs.xml";
/**
* Default value for {@link #ENTITY_CACHE_RESOURCE_PROP}.
*/
public static final String DEF_ENTITY_RESOURCE = "entity";
/**
* Default value for {@link #TIMESTAMPS_CACHE_RESOURCE_PROP}.
*/
public static final String DEF_TIMESTAMPS_RESOURCE = "timestamps";
/**
* Default value for {@link #QUERY_CACHE_RESOURCE_PROP}.
*/
public static final String DEF_QUERY_RESOURCE = "local-query";
/**
* Default value for {@link #INFINISPAN_USE_SYNCHRONIZATION_PROP}.
*/
public static final boolean DEF_USE_SYNCHRONIZATION = true;
/**
* Name of the pending puts cache.
*/
public static final String PENDING_PUTS_CACHE_NAME = "pending-puts";
private EmbeddedCacheManager manager;
private final Map |
| File |
|---|
| InfinispanRegionFactory.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Annotation |
| Attribute |
| Comment |
| Method declaration |
| Method invocation |
| Chunk |
|---|
| Conflicting content |
|---|
startRegion( region, regionName );
return region;
}
<<<<<<< HEAD
/**
* {@inheritDoc}
*/
public QueryResultsRegion buildQueryResultsRegion(String regionName, Properties properties)
throws CacheException {
if (log.isDebugEnabled()) log.debug("Building query results cache region [" + regionName + "]");
String cacheName = typeOverrides.get(QUERY_KEY).getCacheName();
// If region name is not default one, lookup a cache for that region name
if (!regionName.equals("org.hibernate.cache.internal.StandardQueryCache"))
cacheName = regionName;
AdvancedCache cache = getCache(cacheName, QUERY_KEY, properties);
QueryResultsRegionImpl region = new QueryResultsRegionImpl(
cache, regionName, this);
startRegion(region, regionName);
return region;
}
/**
* {@inheritDoc}
*/
public TimestampsRegion buildTimestampsRegion(String regionName, Properties properties)
throws CacheException {
if (log.isDebugEnabled()) log.debug("Building timestamps cache region [" + regionName + "]");
AdvancedCache cache = getCache(regionName, TIMESTAMPS_KEY, properties);
TimestampsRegionImpl region = createTimestampsRegion(cache, regionName);
startRegion(region, regionName);
return region;
}
protected TimestampsRegionImpl createTimestampsRegion(
AdvancedCache cache, String regionName) {
if (Caches.isClustered(cache))
return new ClusteredTimestampsRegionImpl(cache, regionName, this);
else
return new TimestampsRegionImpl(cache, regionName, this);
}
/**
* {@inheritDoc}
*/
public boolean isMinimalPutsEnabledByDefault() {
return true;
}
@Override
public AccessType getDefaultAccessType() {
return AccessType.TRANSACTIONAL;
}
/**
* {@inheritDoc}
*/
public long nextTimestamp() {
return System.currentTimeMillis() / 100;
}
public void setCacheManager(EmbeddedCacheManager manager) {
this.manager = manager;
}
public EmbeddedCacheManager getCacheManager() {
return manager;
}
@Override
public void start() {
log.debug("Starting Infinispan region factory");
try {
transactionManagerlookup = createTransactionManagerLookup( getServiceRegistry() );
initGenericDataTypeOverrides();
ConfigurationService configurationService = getServiceRegistry().getService( ConfigurationService.class );
Map settings = configurationService.getSettings();
for(Object key : settings.keySet()){
int prefixLoc;
if ((prefixLoc = key.toString().indexOf( PREFIX )) != -1) {
dissectProperty(prefixLoc, key.toString(), settings);
}
}
manager = createCacheManager(settings);
defineGenericDataTypeCacheConfigurations( settings);
definePendingPutsCache();
} catch (CacheException ce) {
throw ce;
} catch (Throwable t) {
throw new CacheException("Unable to start region factory", t);
}
}
/**
* {@inheritDoc}
*/
@Override
public void start(Settings settings, Properties properties) throws CacheException {
start();
}
private void definePendingPutsCache() {
ConfigurationBuilder builder = new ConfigurationBuilder();
// A local, lightweight cache for pending puts, which is
// non-transactional and has aggressive expiration settings.
// Locking is still required since the putFromLoad validator
// code uses conditional operations (i.e. putIfAbsent).
builder.clustering().cacheMode(CacheMode.LOCAL)
.transaction().transactionMode(TransactionMode.NON_TRANSACTIONAL)
.expiration().maxIdle(TimeUnit.SECONDS.toMillis(60))
.storeAsBinary().enabled(false)
.locking().isolationLevel(IsolationLevel.READ_COMMITTED)
.jmxStatistics().disable();
manager.defineConfiguration(PENDING_PUTS_CACHE_NAME, builder.build());
}
protected org.infinispan.transaction.lookup.TransactionManagerLookup createTransactionManagerLookup(
ServiceRegistry sr) {
return new HibernateTransactionManagerLookup(sr);
}
/**
* {@inheritDoc}
*/
@Override
public void stop() {
log.debug("Stop region factory");
stopCacheRegions();
stopCacheManager();
}
protected void stopCacheRegions() {
log.debug("Clear region references");
getCacheCommandFactory(manager.getCache().getAdvancedCache())
.clearRegions(regionNames);
regionNames.clear();
}
protected void stopCacheManager() {
log.debug("Stop cache manager");
manager.stop();
}
/**
* Returns an unmodifiable map containing configured entity/collection type configuration overrides.
* This method should be used primarily for testing/checking purpouses.
*
* @return an unmodifiable map.
*/
public Map |
| Solution content |
|---|
}
@Override
startRegion( region, regionName );
return region;
}
protected TimestampsRegionImpl createTimestampsRegion(
AdvancedCache cache, String regionName) {
if (Caches.isClustered(cache))
return new ClusteredTimestampsRegionImpl(cache, regionName, this);
else
return new TimestampsRegionImpl(cache, regionName, this);
}
@Override
public boolean isMinimalPutsEnabledByDefault() {
return true;
public AccessType getDefaultAccessType() {
return AccessType.TRANSACTIONAL;
}
@Override
public long nextTimestamp() {
return System.currentTimeMillis() / 100;
}
public void setCacheManager(EmbeddedCacheManager manager) {
this.manager = manager;
}
public EmbeddedCacheManager getCacheManager() {
return manager;
}
@Override
public void start() {
log.debug("Starting Infinispan region factory");
try {
transactionManagerlookup = createTransactionManagerLookup( getServiceRegistry() );
initGenericDataTypeOverrides();
ConfigurationService configurationService = getServiceRegistry().getService( ConfigurationService.class );
Map settings = configurationService.getSettings();
for(Object key : settings.keySet()){
int prefixLoc;
if ((prefixLoc = key.toString().indexOf( PREFIX )) != -1) {
dissectProperty(prefixLoc, key.toString(), settings);
}
}
manager = createCacheManager(settings);
defineGenericDataTypeCacheConfigurations( settings);
definePendingPutsCache();
} catch (CacheException ce) {
throw ce;
} catch (Throwable t) {
throw new CacheException("Unable to start region factory", t);
}
}
private void definePendingPutsCache() {
ConfigurationBuilder builder = new ConfigurationBuilder();
// A local, lightweight cache for pending puts, which is
// non-transactional and has aggressive expiration settings.
// Locking is still required since the putFromLoad validator
// code uses conditional operations (i.e. putIfAbsent).
}
builder.clustering().cacheMode(CacheMode.LOCAL)
.transaction().transactionMode(TransactionMode.NON_TRANSACTIONAL)
.expiration().maxIdle(TimeUnit.SECONDS.toMillis(60))
.storeAsBinary().enabled(false)
.locking().isolationLevel(IsolationLevel.READ_COMMITTED)
.jmxStatistics().disable();
manager.defineConfiguration(PENDING_PUTS_CACHE_NAME, builder.build());
}
protected org.infinispan.transaction.lookup.TransactionManagerLookup createTransactionManagerLookup(
ServiceRegistry sr) {
return new HibernateTransactionManagerLookup(sr);
}
@Override
public void stop() {
log.debug("Stop region factory");
stopCacheRegions();
stopCacheManager();
}
protected void stopCacheRegions() {
log.debug("Clear region references");
getCacheCommandFactory(manager.getCache().getAdvancedCache())
.clearRegions(regionNames);
regionNames.clear();
}
protected void stopCacheManager() {
log.debug("Stop cache manager");
manager.stop();
}
/**
* Returns an unmodifiable map containing configured entity/collection type configuration overrides.
* This method should be used primarily for testing/checking purpouses.
*
* @return an unmodifiable map.
*/
public Map |
| File |
|---|
| InfinispanRegionFactory.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Annotation |
| Comment |
| Method declaration |
| Chunk |
|---|
| Conflicting content |
|---|
*/ package org.hibernate.cache.infinispan; <<<<<<< HEAD import java.util.Map; import org.hibernate.cache.CacheException; import org.hibernate.engine.config.spi.ConfigurationService; import org.hibernate.engine.config.spi.StandardConverters; import org.hibernate.engine.jndi.spi.JndiService; import org.infinispan.manager.EmbeddedCacheManager; import java.util.Properties; ======= >>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676 import javax.naming.Context; import javax.naming.InitialContext; import javax.naming.NamingException; |
| Solution content |
|---|
*/
package org.hibernate.cache.infinispan;
import java.util.Map;
import org.hibernate.cache.CacheException;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.config.spi.StandardConverters;
import org.hibernate.engine.jndi.spi.JndiService;
import org.infinispan.manager.EmbeddedCacheManager;
/**
* A {@link org.hibernate.cache.spi.RegionFactory} for Infinispan-backed cache
* regions that finds its cache manager in JNDI rather than creating one itself.
*
* @author Galder Zamarreño
* @since 3.5
*/
public class JndiInfinispanRegionFactory extends InfinispanRegionFactory {
/**
* Specifies the JNDI name under which the {@link EmbeddedCacheManager} to use is bound.
* There is no default value -- the user must specify the property.
*/
public static final String CACHE_MANAGER_RESOURCE_PROP = "hibernate.cache.infinispan.cachemanager";
@Override
protected EmbeddedCacheManager createCacheManager(Map properties) throws CacheException {
String name = getServiceRegistry().getService( ConfigurationService.class ).getSetting(
CACHE_MANAGER_RESOURCE_PROP,
StandardConverters.STRING
);
if ( name == null ) {
throw new CacheException( "Configuration property " + CACHE_MANAGER_RESOURCE_PROP + " not set" );
}
JndiService jndiService = getServiceRegistry().getService( JndiService.class );
return (EmbeddedCacheManager) jndiService.locate( name );
}
@Override
public void stop() {
// Do not attempt to stop a cache manager because it wasn't created by this region factory.
}
} |
| File |
|---|
| JndiInfinispanRegionFactory.java |
| Developer's decision |
|---|
| Manual |
| Kind of conflict |
|---|
| Import |
| Chunk |
|---|
| Conflicting content |
|---|
* @since 3.5
*/
public class JndiInfinispanRegionFactory extends InfinispanRegionFactory {
<<<<<<< HEAD
/**
* Specifies the JNDI name under which the {@link EmbeddedCacheManager} to use is bound.
* There is no default value -- the user must specify the property.
*/
public static final String CACHE_MANAGER_RESOURCE_PROP = "hibernate.cache.infinispan.cachemanager";
@Override
protected EmbeddedCacheManager createCacheManager(Map properties) throws CacheException {
String name = getServiceRegistry().getService( ConfigurationService.class ).getSetting(
CACHE_MANAGER_RESOURCE_PROP,
StandardConverters.STRING
);
if ( name == null ) {
throw new CacheException( "Configuration property " + CACHE_MANAGER_RESOURCE_PROP + " not set" );
}
JndiService jndiService = getServiceRegistry().getService( JndiService.class );
return (EmbeddedCacheManager) jndiService.locate( name );
=======
private static final Log log = LogFactory.getLog( JndiInfinispanRegionFactory.class );
/**
* Specifies the JNDI name under which the {@link EmbeddedCacheManager} to use is bound.
* There is no default value -- the user must specify the property.
*/
public static final String CACHE_MANAGER_RESOURCE_PROP = "hibernate.cache.infinispan.cachemanager";
/**
* Constructs a JndiInfinispanRegionFactory
*/
@SuppressWarnings("UnusedDeclaration")
public JndiInfinispanRegionFactory() {
super();
}
/**
* Constructs a JndiInfinispanRegionFactory
*
* @param props Any properties to apply (not used).
*/
@SuppressWarnings("UnusedDeclaration")
public JndiInfinispanRegionFactory(Properties props) {
super( props );
}
@Override
protected EmbeddedCacheManager createCacheManager(Properties properties) throws CacheException {
final String name = ConfigurationHelper.getString( CACHE_MANAGER_RESOURCE_PROP, properties, null );
if ( name == null ) {
throw new CacheException( "Configuration property " + CACHE_MANAGER_RESOURCE_PROP + " not set" );
}
return locateCacheManager( name, JndiHelper.extractJndiProperties( properties ) );
}
private EmbeddedCacheManager locateCacheManager(String jndiNamespace, Properties jndiProperties) {
Context ctx = null;
try {
ctx = new InitialContext( jndiProperties );
return (EmbeddedCacheManager) ctx.lookup( jndiNamespace );
}
catch (NamingException ne) {
final String msg = "Unable to retrieve CacheManager from JNDI [" + jndiNamespace + "]";
log.info( msg, ne );
throw new CacheException( msg );
}
finally {
if ( ctx != null ) {
try {
ctx.close();
}
catch (NamingException ne) {
log.info( "Unable to release initial context", ne );
}
}
}
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
}
@Override |
| Solution content |
|---|
* @since 3.5
*/
public class JndiInfinispanRegionFactory extends InfinispanRegionFactory {
/**
* Specifies the JNDI name under which the {@link EmbeddedCacheManager} to use is bound.
* There is no default value -- the user must specify the property.
*/
public static final String CACHE_MANAGER_RESOURCE_PROP = "hibernate.cache.infinispan.cachemanager";
@Override
protected EmbeddedCacheManager createCacheManager(Map properties) throws CacheException {
String name = getServiceRegistry().getService( ConfigurationService.class ).getSetting(
CACHE_MANAGER_RESOURCE_PROP,
StandardConverters.STRING
);
if ( name == null ) {
throw new CacheException( "Configuration property " + CACHE_MANAGER_RESOURCE_PROP + " not set" );
}
JndiService jndiService = getServiceRegistry().getService( JndiService.class );
return (EmbeddedCacheManager) jndiService.locate( name );
}
@Override |
| File |
|---|
| JndiInfinispanRegionFactory.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Annotation |
| Attribute |
| Cast expression |
| Comment |
| If statement |
| Method declaration |
| Method invocation |
| Method signature |
| Return statement |
| Try statement |
| Variable |
| Chunk |
|---|
| Conflicting content |
|---|
this.jtaPlatform = settings != null ? settings.getJtaPlatform() : null;
}
<<<<<<< HEAD
public HibernateTransactionManagerLookup(ServiceRegistry serviceRegistry) {
if ( serviceRegistry != null ) {
jtaPlatform = serviceRegistry.getService( JtaPlatform.class );
}
else {
jtaPlatform = null;
}
}
=======
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
@Override
public TransactionManager getTransactionManager() throws Exception {
return jtaPlatform == null ? null : jtaPlatform.retrieveTransactionManager(); |
| Solution content |
|---|
this.jtaPlatform = settings != null ? settings.getJtaPlatform() : null;
}
public HibernateTransactionManagerLookup(ServiceRegistry serviceRegistry) {
if ( serviceRegistry != null ) {
jtaPlatform = serviceRegistry.getService( JtaPlatform.class );
}
else {
jtaPlatform = null;
}
}
@Override
public TransactionManager getTransactionManager() throws Exception {
return jtaPlatform == null ? null : jtaPlatform.retrieveTransactionManager(); |
| File |
|---|
| HibernateTransactionManagerLookup.java |
| Developer's decision |
|---|
| Version 1 |
| Kind of conflict |
|---|
| Method declaration |
| Chunk |
|---|
| Conflicting content |
|---|
ctx.unbind( jndiName );
}
<<<<<<< HEAD
private SessionFactory buildSessionFactory() {
// Extra options located in src/test/resources/hibernate.properties
Configuration cfg = new Configuration();
cfg.setProperty( Environment.DIALECT, "HSQL" );
cfg.setProperty( Environment.HBM2DDL_AUTO, "create-drop" );
cfg.setProperty( Environment.CONNECTION_PROVIDER, JtaAwareConnectionProviderImpl.class.getName() );
cfg.setProperty( Environment.JNDI_CLASS, "org.jnp.interfaces.NamingContextFactory" );
cfg.setProperty( Environment.TRANSACTION_STRATEGY, "jta" );
cfg.setProperty( Environment.CURRENT_SESSION_CONTEXT_CLASS, "jta" );
cfg.setProperty( Environment.RELEASE_CONNECTIONS, "auto" );
cfg.setProperty( Environment.USE_SECOND_LEVEL_CACHE, "true" );
cfg.setProperty( Environment.USE_QUERY_CACHE, "true" );
cfg.setProperty(
Environment.CACHE_REGION_FACTORY,
"org.hibernate.test.cache.infinispan.functional.SingleNodeTestCase$TestInfinispanRegionFactory"
);
Properties envProps = Environment.getProperties();
envProps.put( AvailableSettings.JTA_PLATFORM, new JBossStandAloneJtaPlatform() );
envProps.putAll( cfg.getProperties() );
serviceRegistry = ServiceRegistryBuilder.buildServiceRegistry( envProps );
=======
private SessionFactory buildSessionFactory() {
// Extra options located in src/test/resources/hibernate.properties
Configuration cfg = new Configuration();
cfg.setProperty( Environment.DIALECT, "HSQL" );
cfg.setProperty( Environment.HBM2DDL_AUTO, "create-drop" );
cfg.setProperty( Environment.CONNECTION_PROVIDER, JtaAwareConnectionProviderImpl.class.getName() );
cfg.setProperty(Environment.JNDI_CLASS, "org.jnp.interfaces.NamingContextFactory");
cfg.setProperty(Environment.TRANSACTION_STRATEGY, "jta");
cfg.setProperty(Environment.CURRENT_SESSION_CONTEXT_CLASS, "jta");
cfg.setProperty(Environment.RELEASE_CONNECTIONS, "auto");
cfg.setProperty(Environment.USE_SECOND_LEVEL_CACHE, "true");
cfg.setProperty(Environment.USE_QUERY_CACHE, "true");
Properties envProps = Environment.getProperties();
envProps.put(AvailableSettings.JTA_PLATFORM, new JBossStandAloneJtaPlatform());
envProps.setProperty(Environment.CACHE_REGION_FACTORY,
"org.hibernate.test.cache.infinispan.functional.SingleNodeTestCase$TestInfinispanRegionFactory");
serviceRegistry = ServiceRegistryBuilder.buildServiceRegistry(envProps);
>>>>>>> f40f814b001d20d6b33dd5c7b8b85bbf10968676
String[] mappings = new String[] { "org/hibernate/test/cache/infinispan/functional/Item.hbm.xml" };
for ( String mapping : mappings ) { |
| Solution content |
|---|
ctx.unbind( jndiName );
}
private SessionFactory buildSessionFactory() {
// Extra options located in src/test/resources/hibernate.properties
Configuration cfg = new Configuration();
cfg.setProperty( Environment.DIALECT, "HSQL" );
cfg.setProperty( Environment.HBM2DDL_AUTO, "create-drop" );
cfg.setProperty( Environment.CONNECTION_PROVIDER, JtaAwareConnectionProviderImpl.class.getName() );
cfg.setProperty(Environment.JNDI_CLASS, "org.jnp.interfaces.NamingContextFactory");
cfg.setProperty(Environment.TRANSACTION_STRATEGY, "jta");
cfg.setProperty(Environment.CURRENT_SESSION_CONTEXT_CLASS, "jta");
cfg.setProperty(Environment.RELEASE_CONNECTIONS, "auto");
cfg.setProperty(Environment.USE_SECOND_LEVEL_CACHE, "true");
cfg.setProperty(Environment.USE_QUERY_CACHE, "true");
Properties envProps = Environment.getProperties();
envProps.putAll( cfg.getProperties() );
envProps.put(AvailableSettings.JTA_PLATFORM, new JBossStandAloneJtaPlatform());
envProps.setProperty(Environment.CACHE_REGION_FACTORY,
"org.hibernate.test.cache.infinispan.functional.SingleNodeTestCase$TestInfinispanRegionFactory");
serviceRegistry = ServiceRegistryBuilder.buildServiceRegistry(envProps);
String[] mappings = new String[] { "org/hibernate/test/cache/infinispan/functional/Item.hbm.xml" };
for ( String mapping : mappings ) { |
| File |
|---|
| JBossStandaloneJtaExampleTest.java |
| Developer's decision |
|---|
| Combination |
| Kind of conflict |
|---|
| Attribute |
| Comment |
| Method invocation |
| Method signature |
| Variable |