/** * Instantiate a new <tt>SessionFactory</tt>, using the properties and * mappings in this configuration. The <tt>SessionFactory</tt> will be * immutable, so changes made to the <tt>Configuration</tt> after * building the <tt>SessionFactory</tt> will not affect it. * * @return a new factory for <tt>Session</tt>s * @see org.hibernate.SessionFactory */ public SessionFactory buildSessionFactory() throws HibernateException { log.debug( "Preparing to build session factory with filters : " + filterDefinitions ); secondPassCompile(); validate(); Environment.verifyProperties( properties ); Properties copy = new Properties(); copy.putAll( properties ); PropertiesHelper.resolvePlaceHolders( copy ); Settings settings = buildSettings( copy ); return new SessionFactoryImpl( this, mapping, settings, getInitializedEventListeners() ); }
public void configure(Type type, Properties params, Dialect dialect) { tableName = PropertiesHelper.getString(TABLE, params, DEFAULT_TABLE_NAME); columnName = PropertiesHelper.getString(COLUMN, params, DEFAULT_COLUMN_NAME); String schemaName = params.getProperty(SCHEMA); String catalogName = params.getProperty(CATALOG); if ( tableName.indexOf( '.' )<0 ) { tableName = Table.qualify( catalogName, schemaName, tableName ); } query = "select " + columnName + " from " + dialect.appendLockHint(LockMode.UPGRADE, tableName) + dialect.getForUpdateString(); update = "update " + tableName + " set " + columnName + " = ? where " + columnName + " = ?"; }
/** * Builds a new {@link Cache} instance, and gets it's properties from the OSCache {@link Config} * which reads the properties file (<code>oscache.properties</code>) from the classpath. * If the file cannot be found or loaded, an the defaults are used. * * @param region * @param properties * @return * @throws CacheException */ public Cache buildCache(String region, Properties properties) throws CacheException { int refreshPeriod = PropertiesHelper.getInt( StringHelper.qualify(region, OSCACHE_REFRESH_PERIOD), OSCACHE_PROPERTIES, CacheEntry.INDEFINITE_EXPIRY ); String cron = OSCACHE_PROPERTIES.getProperty( StringHelper.qualify(region, OSCACHE_CRON) ); // construct the cache final OSCache cache = new OSCache(refreshPeriod, cron, region); Integer capacity = PropertiesHelper.getInteger( StringHelper.qualify(region, OSCACHE_CAPACITY), OSCACHE_PROPERTIES ); if ( capacity!=null ) cache.setCacheCapacity( capacity.intValue() ); return cache; }
public static String getConfigFilePath(Properties props) { String configResourcePath = PropertiesHelper.getString(CacheEnvironment.CONFIG_FILE_PATH_LEGACY, props, null); if (StringHelper.isEmpty(configResourcePath)) { configResourcePath = PropertiesHelper.getString(CacheEnvironment.CONFIG_FILE_PATH, props, null); } return configResourcePath; }
public static int getLockTimeoutInMillis(Properties props) { int timeout = -1; try { timeout = PropertiesHelper.getInt(LOCK_TIMEOUT, props, -1); } catch (Exception e) { Logger.getLogger(CacheEnvironment.class).finest(e); } if (timeout < 0) { timeout = MAXIMUM_LOCK_TIMEOUT; } return timeout; }
protected QueryCacheFactory createQueryCacheFactory(Properties properties) { String queryCacheFactoryClassName = PropertiesHelper.getString( Environment.QUERY_CACHE_FACTORY, properties, "org.hibernate.cache.StandardQueryCacheFactory" ); log.info("Query cache factory: " + queryCacheFactoryClassName); try { return (QueryCacheFactory) ReflectHelper.classForName(queryCacheFactoryClassName).newInstance(); } catch (Exception cnfe) { throw new HibernateException("could not instantiate QueryCacheFactory: " + queryCacheFactoryClassName, cnfe); } }
protected CacheProvider createCacheProvider(Properties properties) { String cacheClassName = PropertiesHelper.getString( Environment.CACHE_PROVIDER, properties, DEF_CACHE_PROVIDER ); log.info("Cache provider: " + cacheClassName); try { return (CacheProvider) ReflectHelper.classForName(cacheClassName).newInstance(); } catch (Exception cnfe) { throw new HibernateException("could not instantiate CacheProvider: " + cacheClassName, cnfe); } }
protected QueryTranslatorFactory createQueryTranslatorFactory(Properties properties) { String className = PropertiesHelper.getString( Environment.QUERY_TRANSLATOR, properties, "org.hibernate.hql.ast.ASTQueryTranslatorFactory" ); log.info("Query translator: " + className); try { return (QueryTranslatorFactory) ReflectHelper.classForName(className).newInstance(); } catch (Exception cnfe) { throw new HibernateException("could not instantiate QueryTranslatorFactory: " + className, cnfe); } }
/** * Create a schema exporter for the given Configuration, with the given * database connection properties. * * @deprecated properties may be specified via the Configuration object */ public SchemaExport(Configuration cfg, Properties properties) throws HibernateException { dialect = Dialect.getDialect( properties ); Properties props = new Properties(); props.putAll( dialect.getDefaultProperties() ); props.putAll( properties ); connectionHelper = new ManagedProviderConnectionHelper( props ); dropSQL = cfg.generateDropSchemaScript( dialect ); createSQL = cfg.generateSchemaCreationScript( dialect ); format = PropertiesHelper.getBoolean( Environment.FORMAT_SQL, props ); }
public void configure(Type type, Properties params, Dialect dialect) throws MappingException { identifierType = type; boolean forceTableUse = PropertiesHelper.getBoolean( FORCE_TBL_PARAM, params, false ); String sequenceName = PropertiesHelper.getString( SEQUENCE_PARAM, params, DEF_SEQUENCE_NAME ); if ( sequenceName.indexOf( '.' ) < 0 ) { String schemaName = params.getProperty( SCHEMA ); String catalogName = params.getProperty( CATALOG ); sequenceName = Table.qualify( catalogName, schemaName, sequenceName ); } int initialValue = PropertiesHelper.getInt( INITIAL_PARAM, params, DEFAULT_INITIAL_VALUE ); int incrementSize = PropertiesHelper.getInt( INCREMENT_PARAM, params, DEFAULT_INCREMENT_SIZE ); String valueColumnName = PropertiesHelper.getString( VALUE_COLUMN_PARAM, params, DEF_VALUE_COLUMN ); String defOptStrategy = incrementSize <= 1 ? OptimizerFactory.NONE : OptimizerFactory.POOL; String optimizationStrategy = PropertiesHelper.getString( OPT_PARAM, params, defOptStrategy ); if ( OptimizerFactory.NONE.equals( optimizationStrategy ) && incrementSize > 1 ) { log.warn( "config specified explicit optimizer of [" + OptimizerFactory.NONE + "], but [" + INCREMENT_PARAM + "=" + incrementSize + "; honoring optimizer setting" ); incrementSize = 1; } if ( dialect.supportsSequences() && !forceTableUse ) { if ( OptimizerFactory.POOL.equals( optimizationStrategy ) && !dialect.supportsPooledSequences() ) { // TODO : may even be better to fall back to a pooled table strategy here so that the db stored values remain consistent... optimizationStrategy = OptimizerFactory.HILO; } databaseStructure = new SequenceStructure( dialect, sequenceName, initialValue, incrementSize ); } else { databaseStructure = new TableStructure( dialect, sequenceName, valueColumnName, initialValue, incrementSize ); } optimizer = OptimizerFactory.buildOptimizer( optimizationStrategy, identifierType.getReturnedClass(), incrementSize ); databaseStructure.prepare( optimizer ); }
public void configure(Type type, Properties params, Dialect dialect) throws MappingException { tableName = PropertiesHelper.getString( TABLE_PARAM, params, DEF_TABLE ); if ( tableName.indexOf( '.' ) < 0 ) { String schemaName = params.getProperty( SCHEMA ); String catalogName = params.getProperty( CATALOG ); tableName = Table.qualify( catalogName, schemaName, tableName ); } segmentColumnName = PropertiesHelper.getString( SEGMENT_COLUMN_PARAM, params, DEF_SEGMENT_COLUMN ); segmentValue = params.getProperty( SEGMENT_VALUE_PARAM ); if ( StringHelper.isEmpty( segmentValue ) ) { log.debug( "explicit segment value for id generator [" + tableName + '.' + segmentColumnName + "] suggested; using default [" + DEF_SEGMENT_VALUE + "]" ); segmentValue = DEF_SEGMENT_VALUE; } segmentValueLength = PropertiesHelper.getInt( SEGMENT_LENGTH_PARAM, params, DEF_SEGMENT_LENGTH ); valueColumnName = PropertiesHelper.getString( VALUE_COLUMN_PARAM, params, DEF_VALUE_COLUMN ); initialValue = PropertiesHelper.getInt( INITIAL_PARAM, params, DEFAULT_INITIAL_VALUE ); incrementSize = PropertiesHelper.getInt( INCREMENT_PARAM, params, DEFAULT_INCREMENT_SIZE ); identifierType = type; String query = "select " + valueColumnName + " from " + tableName + " tbl" + " where tbl." + segmentColumnName + "=?"; HashMap lockMap = new HashMap(); lockMap.put( "tbl", LockMode.UPGRADE ); this.query = dialect.applyLocksToSql( query, lockMap, CollectionHelper.EMPTY_MAP ); update = "update " + tableName + " set " + valueColumnName + "=? " + " where " + valueColumnName + "=? and " + segmentColumnName + "=?"; insert = "insert into " + tableName + " (" + segmentColumnName + ", " + valueColumnName + ") " + " values (?,?)"; String defOptStrategy = incrementSize <= 1 ? OptimizerFactory.NONE : OptimizerFactory.POOL; String optimizationStrategy = PropertiesHelper.getString( OPT_PARAM, params, defOptStrategy ); optimizer = OptimizerFactory.buildOptimizer( optimizationStrategy, identifierType.getReturnedClass(), incrementSize ); }
public void configure(Type type, Properties params, Dialect dialect) throws MappingException { sequenceName = PropertiesHelper.getString(SEQUENCE, params, "hibernate_sequence"); parameters = params.getProperty(PARAMETERS); String schemaName = params.getProperty(SCHEMA); String catalogName = params.getProperty(CATALOG); if (sequenceName.indexOf( '.' ) < 0) { sequenceName = Table.qualify( catalogName, schemaName, sequenceName ); } this.identifierType = type; sql = dialect.getSequenceNextValString(sequenceName); }
public void testPlaceholderReplacement() { PropertiesHelper.resolvePlaceHolders( props ); String str = PropertiesHelper.getString( "my.nonexistent.prop", props, "did.not.exist" ); assertEquals( "did.not.exist", str ); str = PropertiesHelper.getString( "my.nonexistent.prop", props, null ); assertNull( str ); str = PropertiesHelper.getString( "my.string.prop", props, "na" ); assertEquals( "replacement did not occur", "string", str ); str = PropertiesHelper.getString( "my.string.prop", props, "did.not.exist" ); assertEquals( "replacement did not occur", "string", str ); boolean bool = PropertiesHelper.getBoolean( "my.nonexistent.prop", props ); assertFalse( "non-exists as boolean", bool ); bool = PropertiesHelper.getBoolean( "my.nonexistent.prop", props, false ); assertFalse( "non-exists as boolean", bool ); bool = PropertiesHelper.getBoolean( "my.nonexistent.prop", props, true ); assertTrue( "non-exists as boolean", bool ); bool = PropertiesHelper.getBoolean( "my.boolean.prop", props ); assertTrue( "boolean replacement did not occur", bool ); bool = PropertiesHelper.getBoolean( "my.boolean.prop", props, false ); assertTrue( "boolean replacement did not occur", bool ); int i = PropertiesHelper.getInt( "my.nonexistent.prop", props, -1 ); assertEquals( -1, i ); i = PropertiesHelper.getInt( "my.int.prop", props, 100 ); assertEquals( 1, i ); Integer I = PropertiesHelper.getInteger( "my.nonexistent.prop", props ); assertNull( I ); I = PropertiesHelper.getInteger( "my.integer.prop", props ); assertEquals( I, new Integer( 1 ) ); str = props.getProperty( "partial.prop1" ); assertEquals( "partial replacement (ends)", "tmp/middle/dir/tmp.txt", str ); str = props.getProperty( "partial.prop2" ); assertEquals( "partial replacement (midst)", "basedir/tmp/myfile.txt", str ); }
public static int getLockTimeoutInSeconds(Properties props) { try { return PropertiesHelper.getInt(LOCK_TIMEOUT, props, MAXIMUM_LOCK_TIMEOUT); } catch (Exception ignored) { } return MAXIMUM_LOCK_TIMEOUT; }
public HazelcastInstance loadInstance() throws CacheException { if (props == null) { throw new NullPointerException("Hibernate environment properties is null!"); } if (client != null && client.getLifecycleService().isRunning()) { logger.log(Level.WARNING, "Current HazelcastClient is already active! Shutting it down..."); unloadInstance(); } String address = PropertiesHelper.getString(CacheEnvironment.NATIVE_CLIENT_ADDRESS, props, null); if (address == null) { String[] hosts = PropertiesHelper.toStringArray(CacheEnvironment.NATIVE_CLIENT_HOSTS, ",", props); if (hosts != null && hosts.length > 0) { address = hosts[0]; logger.log(Level.WARNING, "Hibernate property '" + CacheEnvironment.NATIVE_CLIENT_HOSTS + "' " + "is deprecated, use '" + CacheEnvironment.NATIVE_CLIENT_ADDRESS + "' instead!"); } } String group = PropertiesHelper.getString(CacheEnvironment.NATIVE_CLIENT_GROUP, props, null); String pass = PropertiesHelper.getString(CacheEnvironment.NATIVE_CLIENT_PASSWORD, props, null); if (address == null || group == null || pass == null) { throw new CacheException("Configuration properties " + CacheEnvironment.NATIVE_CLIENT_ADDRESS + ", " + CacheEnvironment.NATIVE_CLIENT_GROUP + " and " + CacheEnvironment.NATIVE_CLIENT_PASSWORD + " are mandatory to use native client!"); } ClientConfig clientConfig = new ClientConfig(); clientConfig.setGroupConfig(new GroupConfig(group, pass)).addAddress(address); clientConfig.setUpdateAutomatic(true); return (client = HazelcastClient.newHazelcastClient(clientConfig)); }
@Override public void configure(Properties props) throws HibernateException { String driverClass = props.getProperty(Environment.DRIVER); poolSize = PropertiesHelper.getInt(Environment.POOL_SIZE, props, 20); //default pool size 20 log.info("Using Hibernate built-in connection pool (not for production use!)"); log.info("Hibernate connection pool size: " + poolSize); autocommit = PropertiesHelper.getBoolean(Environment.AUTOCOMMIT, props); log.info("autocommit mode: " + autocommit); isolation = PropertiesHelper.getInteger(Environment.ISOLATION, props); if (isolation!=null) log.info( "JDBC isolation level: " + Environment.isolationLevelToString( isolation.intValue() ) ); if (driverClass==null) { log.warn("no JDBC Driver class was specified by property " + Environment.DRIVER); } else { try { // trying via forName() first to be as close to DriverManager's semantics // NOTE for JSS: we use the context class loader because it will be able to locate the database drivers // already loaded in our plug-ins or projects driver = (Driver) Class.forName(driverClass, true, Thread.currentThread().getContextClassLoader()).newInstance(); } catch (Exception e) { try { driver = (Driver) ReflectHelper.classForName(driverClass).newInstance(); } catch (Exception e1) { log.error(e1.getMessage()); throw new HibernateException(e1); } } } url = props.getProperty( Environment.URL ); if ( url == null ) { String msg = "JDBC URL was not specified by property " + Environment.URL; log.error( msg ); throw new HibernateException( msg ); } connectionProps = ConnectionProviderFactory.getConnectionProperties( props ); log.info( "using driver: " + driverClass + " at URL: " + url ); // if debug level is enabled, then log the password, otherwise mask it if ( log.isDebugEnabled() ) { log.info( "connection properties: " + connectionProps ); } else if ( log.isInfoEnabled() ) { log.info( "connection properties: " + PropertiesHelper.maskOut(connectionProps, "password") ); } }
public static String getInstanceName(Properties props) { return PropertiesHelper.getString(HAZELCAST_INSTANCE_NAME, props, null); }
public static boolean isNativeClient(Properties props) { return PropertiesHelper.getBoolean(CacheEnvironment.USE_NATIVE_CLIENT, props, false); }
public static boolean shutdownOnStop(Properties props, boolean defaultValue) { return PropertiesHelper.getBoolean(CacheEnvironment.SHUTDOWN_ON_STOP, props, defaultValue); }
public static boolean isExplicitVersionCheckEnabled(Properties props) { return PropertiesHelper.getBoolean(CacheEnvironment.EXPLICIT_VERSION_CHECK, props, false); }
/** * Create an object-oriented view of the configuration properties */ public Settings buildSettings() throws HibernateException { Properties clone = ( Properties ) properties.clone(); PropertiesHelper.resolvePlaceHolders( clone ); return settingsFactory.buildSettings( clone ); }
public static BytecodeProvider buildBytecodeProvider(Properties properties) { String provider = PropertiesHelper.getString( Environment.BYTECODE_PROVIDER, properties, "cglib" ); log.info( "Bytecode provider name : " + provider ); return buildBytecodeProvider( provider ); }
public void configure(Type type, Properties params, Dialect dialect) throws MappingException { tableName = PropertiesHelper.getString(ID_TABLE, params, DEFAULT_TABLE); pkColumnName = PropertiesHelper.getString(PK_COLUMN_NAME, params, DEFAULT_PK_COLUMN); valueColumnName = PropertiesHelper.getString(VALUE_COLUMN_NAME, params, DEFAULT_VALUE_COLUMN); String schemaName = params.getProperty(SCHEMA); String catalogName = params.getProperty(CATALOG); keySize = PropertiesHelper.getInt(PK_LENGTH_NAME, params, DEFAULT_PK_LENGTH); String keyValue = PropertiesHelper.getString(PK_VALUE_NAME, params, params.getProperty(TABLE) ); if ( tableName.indexOf( '.' )<0 ) { tableName = Table.qualify( catalogName, schemaName, tableName ); } query = "select " + valueColumnName + " from " + dialect.appendLockHint(LockMode.UPGRADE, tableName) + " where " + pkColumnName + " = '" + keyValue + "'" + dialect.getForUpdateString(); update = "update " + tableName + " set " + valueColumnName + " = ? where " + valueColumnName + " = ? and " + pkColumnName + " = '" + keyValue + "'"; insert = "insert into " + tableName + "(" + pkColumnName + ", " + valueColumnName + ") " + "values('"+ keyValue +"', ?)"; //hilo config maxLo = PropertiesHelper.getInt(MAX_LO, params, Short.MAX_VALUE); lo = maxLo + 1; // so we "clock over" on the first invocation returnClass = type.getReturnedClass(); }
public void configure(Type type, Properties params, Dialect d) throws MappingException { super.configure(type, params, d); maxLo = PropertiesHelper.getInt(MAX_LO, params, 9); lo = maxLo + 1; // so we "clock over" on the first invocation returnClass = type.getReturnedClass(); }
public void configure(Type type, Properties params, Dialect d) { sep = PropertiesHelper.getString("separator", params, ""); }
public void configure(Type type, Properties params, Dialect d) { super.configure(type, params, d); maxLo = PropertiesHelper.getInt(MAX_LO, params, Short.MAX_VALUE); lo = maxLo + 1; // so we "clock over" on the first invocation returnClass = type.getReturnedClass(); }
public static boolean isLiteMember(Properties props) { return PropertiesHelper.getBoolean(CacheEnvironment.USE_LITE_MEMBER, props, false); }
@Deprecated public static boolean isSuperClient(Properties props) { return PropertiesHelper.getBoolean(CacheEnvironment.USE_SUPER_CLIENT, props, false); }