@Override public void loadState(final Graph graph, final Configuration config) { configuration = new BaseConfiguration(); if (config != null) { ConfigurationUtils.copy(config, configuration); } intermediateBatchSize = configuration.getLong(INTERMEDIATE_BATCH_SIZE_CFG_KEY, 0L); elementComputeKeys.add(VertexComputeKey.of(DEFAULT_BULK_LOADER_VERTEX_ID, true)); bulkLoader = createBulkLoader(); }
@Override public void storeState(final Configuration config) { VertexProgram.super.storeState(config); if (configuration != null) { ConfigurationUtils.copy(configuration, config); } }
/** * Loads the main application properties file, given by {@link ECDB_APP_PROPERTIES_FILE}. * Note that core-api properties take precedence over application properties; * ideally there shouldn't be any property name collisions in the first place. */ protected void initConfig() { // need to call getConfiguration before adding our config so // that core-api config gets initialised and logged first. CompositeConfiguration config = Eurocarb.getConfiguration(); log.info("adding eurocarbdb application configuration: " + ECDB_APP_PROPERTIES_FILE ); try { config.addConfiguration( new PropertiesConfiguration( ECDB_APP_PROPERTIES_FILE ) ); } catch ( ConfigurationException ex ) { throw new RuntimeException( ex ); } if ( log.isInfoEnabled() ) { log.info( "Configured eurocarb-application properties:\n" + CR + ConfigurationUtils.toString( config ) ); } }
static void initConfig() { //if ( config == null ) config = new CompositeConfiguration(); try { // log.info("adding configuration: " + EUROCARB_OVERRIDES_CONF ); // config.addConfiguration( // new PropertiesConfiguration( EUROCARB_OVERRIDES_CONF ) ); // log.info( "configured properties for core-api: \n" // + ConfigurationUtils.toString( config ) ); log.info("adding core-api configuration: " + EUROCARB_CONF ); config.addConfiguration( new PropertiesConfiguration( EUROCARB_CONF ) ); } catch ( ConfigurationException ex ) { throw new RuntimeException( ex ); } if ( log.isInfoEnabled() ) { log.info( CR + repeat('=', 20 ) + " configured eurocarb core-api properties " + repeat('=', 20 ) + CR + ConfigurationUtils.toString( config ) + CR + repeat('=', 80 ) ); } }
@Override public String toString() { StringWriter dump = new StringWriter(); PrintWriter printWriter = new PrintWriter(dump); ConfigurationUtils.dump(properties, printWriter); printWriter.flush(); return "CatalogConfiguration{id=" + id + ", prefix=" + prefix + ", properties:" + dump + '}'; }
public static GryoPool getGryoPool() { if (!INITIALIZED) { final Configuration configuration = SystemUtil.getSystemPropertiesConfiguration("tinkerpop", true); HadoopGraph.LOGGER.warn("The " + HadoopPools.class.getSimpleName() + " has not been initialized, using system properties configuration: " + ConfigurationUtils.toString(configuration)); initialize(configuration); } return GRYO_POOL; }
@SuppressWarnings("unchecked") @Override public VertexProgramQ create(final Graph graph) { if (graph != null) { ConfigurationUtils.append(graph.configuration().subset(VERTEX_PROGRAM_Q_CFG_PREFIX), configuration); } return (VertexProgramQ) VertexProgram.createVertexProgram(graph, configuration); }
@Override public void loadState(final Graph graph, final Configuration config) { configuration = new BaseConfiguration(); if (config != null) { ConfigurationUtils.copy(config, configuration); } propertyKey = configuration.getString(PROPERTY_CFG_KEY); traverserRequirements = configuration.getBoolean(USE_TRAVERSER_REQUIREMENTS_CFG_KEY, true) ? Collections.singleton(TraverserRequirement.PATH) : Collections.emptySet(); elementComputeKeys.add(VertexComputeKey.of(propertyKey, false)); }
private void addExternalConfig (ConcurrentCompositeConfiguration finalConfig, String configName, String configFolder) throws ConfigurationException { if(configFolder != null) configFolder += File.separator + FilenameUtils.getFullPath (configName); else configFolder = FilenameUtils.getFullPath (configName); configName = FilenameUtils.getName (configName); URL url = ConfigurationUtils.locate (configFolder, configName); if(url!=null){ PropertiesConfiguration externalFolderPropConfig = new PropertiesConfiguration (url); finalConfig.addConfiguration (externalFolderPropConfig); } }
/** * Returns the configuration data. If configuration data is null, method first attempts to * create a configuration data from JVM's system properties.<br> * * @return test configuration data */ public synchronized Configuration getData() { if (data != null) { return data; } final HierarchicalConfiguration cfg = new HierarchicalConfiguration(); cfg.copy(new EnvironmentConfiguration());// copy the environment variables. cfg.copy(new SystemConfiguration());// JVM args log.debug("Configuration data from Env:\n" + ConfigurationUtils.toString(cfg)); return data = prepareData(cfg); }
/** * Uses the current configuration data passed in as argument and does the following: <br> * 1. Look for config.properties file on class path and load it if present.<br> * 2. Look for config.path in CLI params. If present, load it and overwrite any existing * properties.<br> * 3. Overwrite existing data with whatever was specified via cli. <br> * * @param testData test configuration data * @return processed test configuration data */ private synchronized Configuration prepareData(Configuration testData) { Configuration resultData = null; // step 1. config.properties on classpath URL cfgFile = this.getClass().getResource(DD_CONFIG_FILE_NAME); if (cfgFile != null) { log.info("Loading Configuration File: {}", cfgFile); resultData = getConfigFileData(cfgFile.getFile()); } else { log.warn("Config file not found! " + DD_CONFIG_FILE_NAME); } if (resultData != null) { log.debug("Loaded data from " + DD_CONFIG_FILE_NAME + " on classpath"); } // step 2. config file specified on cli if (testData.containsKey(TESTINPUT_CONFIG_PATH)) { String filePath = testData.getString(TESTINPUT_CONFIG_PATH); if (checkFilePath(filePath)) { Configuration tmpData = getConfigFileData(filePath); resultData = overrideConfigProperties(resultData, tmpData); log.debug("Loaded data from config file '{}'", filePath); } } log.debug("Overriding using properties specified via commandline arguments"); resultData = overrideConfigProperties(resultData, testData); if (resultData == null) { String error = "Configuration data can not be null. Please specify test " + "configuration information via config file on classpath or filesystem or via cli"; log.error(error); throw new DDException(error); } log.debug("DDConfig: {}", ConfigurationUtils.toString(resultData)); return resultData; }
/** * Gets the configuration based on the specified property file.<br> * * @param filePath file containing configuration information. * @return configuration Configuration if found else returns null. */ private static Configuration getConfigFileData(String filePath) { PropertiesConfiguration flatConfig = new PropertiesConfiguration(); try { flatConfig.load(filePath); return ConfigurationUtils.convertToHierarchical(flatConfig); } catch (ConfigurationException e) { log.warn("Failed to load configuration from File {}", filePath, e); return null; } }
/** * Segregate the data based on the namespace.<br> * * @param dataFromFile HierarchicalConfiguration data loaded from file. * @param clazz the test class. * @return Map of namespace/test vs the testData. * @throws Exception */ private static Map<String, HierarchicalConfiguration> segregate( HierarchicalConfiguration dataFromFile, Class<?> clazz) throws Exception { Map<String, HierarchicalConfiguration> dataMap = new HashMap<String, HierarchicalConfiguration>(); log.debug("Data : " + ConfigurationUtils.toString(dataFromFile)); List<HierarchicalConfiguration> allNameSpaces = null; String[] classNames = null; allNameSpaces = dataFromFile.configurationsAt(TAG_NAMESPACE); log.debug(allNameSpaces.size() + " namespaces given."); for (int i = 0; i < allNameSpaces.size(); i++) { HierarchicalConfiguration aNameSpaceData = allNameSpaces.get(i); classNames = aNameSpaceData.getStringArray(TAG_CLASS_NAME); // data is added to individual testData in the same namespace is added // to the dataMap using testClassName as key for (String testClassName : classNames) { log.debug(" ** Namespace: " + i + " > " + testClassName); if (!dataMap.containsKey(testClassName)) { dataMap.put(testClassName, aNameSpaceData); } else { append(dataMap.get(testClassName), aNameSpaceData); } } } log.debug("Data keys: " + dataMap.keySet()); log.debug("Load size: " + dataMap.size()); return dataMap; }
public GraknSparkComputer(final HadoopGraph hadoopGraph) { super(hadoopGraph); this.sparkConfiguration = new HadoopConfiguration(); ConfigurationUtils.copy(this.hadoopGraph.configuration(), this.sparkConfiguration); }
@SuppressWarnings("unchecked") @Override public BulkLoaderVertexProgram create(final Graph graph) { ConfigurationUtils.append(graph.configuration().subset(BULK_LOADER_VERTEX_PROGRAM_CFG_PREFIX), configuration); return (BulkLoaderVertexProgram) VertexProgram.createVertexProgram(graph, configuration); }
public SparkGraphComputer(final HadoopGraph hadoopGraph) { super(hadoopGraph); this.sparkConfiguration = new HadoopConfiguration(); ConfigurationUtils.copy(this.hadoopGraph.configuration(), this.sparkConfiguration); }
private static void dumpConfig(String description, Configuration config) { System.out.println(description); ConfigurationUtils.dump(config, System.out); System.out.println("\n"); }
/** * Converts the connection, dataset and task objects into an Iguana Configuration * * @param connections * @param datasets * @param tasks * @return the Iguana Configuration */ public static Configuration createIguanConfig(List<Connection> connections, List<Dataset> datasets, List<Task> tasks) { //Create Config Object PropertiesConfiguration conf = new PropertiesConfiguration(); int index = 0; List<String> objects = new LinkedList<String>(); //add all connections to it for (Connection con : connections) { //set for each connection properties String conID = "connection" + index; //set name, endpoint and update endpoint conf.addProperty(conID + ".name", con.getName()); conf.addProperty(conID + ".service", con.getService()); conf.addProperty(conID + ".update.service", con.getService()); conf.addProperty(conID + ".user", con.getUser()); conf.addProperty(conID + ".password", con.getPwd()); objects.add(conID); index++; } //set the connections to use conf.addProperty(COMMON.CONFIG_CONNECTIONS, objects.toArray()); index = 0; objects = new LinkedList<String>(); //add all datasets for (Dataset dataset : datasets) { //set for each dataset properties //set the ID String datasetID = "dataset" + index; //set name of dataset conf.addProperty(datasetID + ".name", dataset.getName()); //set DataGenerator class name and constructor arguments if(dataset.getDatasetGeneratorClassName()!=null&&!dataset.getDatasetGeneratorClassName().isEmpty()) { conf.addProperty(datasetID + ".dg.class", dataset.getDatasetGeneratorClassName()); conf.addProperty(datasetID + ".constructorArgs", dataset.getConstructorArgs()); } objects.add(datasetID); index++; } //set the datasets to use conf.addProperty(COMMON.CONFIG_DATASETS, objects.toArray()); index = 0; objects = new LinkedList<String>(); //set all tasks for (Task task : tasks) { //set for each task properties String taskID = "task" + index; //set class name of task conf.addProperty(taskID + ".class", task.getClassName()); //set constructor arguments of class ConfigurationUtils.append(task.getSubConfiguration(taskID), conf); objects.add(taskID); index++; } //set the tasks to use conf.addProperty(COMMON.CONFIG_TASKS, objects.toArray()); //return the config return conf; }
@Override public String toString() { return ConfigurationUtils.toString(internalConfig); }