public static LoggerConfig getOrCreateLoggerConfig(String name) { LoggerContext context = (LoggerContext) LogManager.getContext(false); Configuration config = context.getConfiguration(); LoggerConfig logConfig = config.getLoggerConfig(name); boolean update = false; if (!logConfig.getName().equals(name)) { List<AppenderRef> appenderRefs = logConfig.getAppenderRefs(); Map<Property, Boolean> properties = logConfig.getProperties(); Set<Property> props = properties == null ? null : properties.keySet(); logConfig = LoggerConfig.createLogger(String.valueOf(logConfig.isAdditive()), logConfig.getLevel(), name, String.valueOf(logConfig.isIncludeLocation()), appenderRefs == null ? null : appenderRefs.toArray(new AppenderRef[appenderRefs.size()]), props == null ? null : props.toArray(new Property[props.size()]), config, null); config.addLogger(name, logConfig); update = true; } if (update) { context.updateLoggers(); } return logConfig; }
public static LoggerConfig getOrCreateLoggerConfig(String name, boolean additive, boolean forceAdditivity) { LoggerContext context = (LoggerContext) LogManager.getContext(false); Configuration config = context.getConfiguration(); LoggerConfig logConfig = config.getLoggerConfig(name); boolean update = false; if (!logConfig.getName().equals(name)) { List<AppenderRef> appenderRefs = logConfig.getAppenderRefs(); Map<Property, Boolean> properties = logConfig.getProperties(); Set<Property> props = properties == null ? null : properties.keySet(); logConfig = LoggerConfig.createLogger(String.valueOf(additive), logConfig.getLevel(), name, String.valueOf(logConfig.isIncludeLocation()), appenderRefs == null ? null : appenderRefs.toArray(new AppenderRef[appenderRefs.size()]), props == null ? null : props.toArray(new Property[props.size()]), config, null); config.addLogger(name, logConfig); update = true; } if (forceAdditivity && logConfig.isAdditive() != additive) { logConfig.setAdditive(additive); update = true; } if (update) { context.updateLoggers(); } return logConfig; }
public KafkaManager(final LoggerContext loggerContext, final String name, final String topic, final String zkServers, final String mail, final String rpc, final String app, final String host, final Property[] properties) { super(loggerContext, name); this.topic = topic; this.zkServers = zkServers; this.mail = mail; this.rpc = rpc; this.app = app; this.orginApp = app; this.host = host; this.checkAndSetConfig(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName()); this.checkAndSetConfig(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); // 设置分区类, 使用自定义的KeyModPartitioner,同样的key进入相同的partition this.checkAndSetConfig(ProducerConfig.PARTITIONER_CLASS_CONFIG, KeyModPartitioner.class.getName()); // xml配置里面的参数 for (final Property property : properties) { this.config.put(property.getName(), property.getValue()); } // 由于容器部署需要从外部获取host this.config.put(ProducerConfig.CLIENT_ID_CONFIG, this.app + Constants.MIDDLE_LINE + this.host + Constants.MIDDLE_LINE + "log4j2"); }
private static Map<String, String> createMap(final List<Property> properties) { final Map<String, String> contextMap = ThreadContext.getImmutableContext(); if (contextMap == null && (properties == null || properties.size() == 0)) { return null; } if (properties == null || properties.size() == 0) { return contextMap; // contextMap is not null } final Map<String, String> map = new HashMap<String, String>(contextMap); for (final Property prop : properties) { if (!map.containsKey(prop.getName())) { map.put(prop.getName(), prop.getValue()); } } return Collections.unmodifiableMap(map); }
@Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append(" {"); boolean first = true; for (final Map.Entry<Property, Boolean> entry : properties.entrySet()) { if (!first) { sb.append(", "); } final Property prop = entry.getKey(); sb.append(prop.getName()).append("=").append(prop.getValue()); first = false; } sb.append("}"); return sb.toString(); }
@PluginFactory public static LoggerConfig createLogger( @PluginAttribute("additivity") final String additivity, @PluginAttribute("level") final String levelName, @PluginAttribute("includeLocation") final String includeLocation, @PluginElement("AppenderRef") final AppenderRef[] refs, @PluginElement("Properties") final Property[] properties, @PluginConfiguration final Configuration config, @PluginElement("Filters") final Filter filter) { final List<AppenderRef> appenderRefs = Arrays.asList(refs); Level level; try { level = Level.toLevel(levelName, Level.ERROR); } catch (final Exception ex) { LOGGER.error( "Invalid Log level specified: {}. Defaulting to Error", levelName); level = Level.ERROR; } final boolean additive = Booleans.parseBoolean(additivity, true); return new AsyncLoggerConfig(LogManager.ROOT_LOGGER_NAME, appenderRefs, filter, level, additive, properties, config, includeLocation(includeLocation)); }
/** * Merges the contents of the specified map into the contextMap, after * replacing any variables in the property values with the * StrSubstitutor-supplied actual values. * * @param properties configured properties * @param strSubstitutor used to lookup values of variables in properties */ public void mergePropertiesIntoContextMap( final Map<Property, Boolean> properties, final StrSubstitutor strSubstitutor) { if (properties == null) { return; // nothing to do } final Map<String, String> map = (contextMap == null) ? new HashMap<String, String>() : new HashMap<String, String>(contextMap); for (final Map.Entry<Property, Boolean> entry : properties.entrySet()) { final Property prop = entry.getKey(); if (map.containsKey(prop.getName())) { continue; // contextMap overrides config properties } final String value = entry.getValue() ? strSubstitutor.replace(prop .getValue()) : prop.getValue(); map.put(prop.getName(), value); } contextMap = map; }
@Test public void testH2Properties() throws SQLException { final Property[] properties = new Property[] { // @formatter:off Property.createProperty("username", JdbcH2TestHelper.USER_NAME), Property.createProperty("password", JdbcH2TestHelper.PASSWORD), // @formatter:on }; // @formatter:off final PoolingDriverConnectionSource source = PoolingDriverConnectionSource.newPoolingDriverConnectionSourceBuilder() .setConnectionString(JdbcH2TestHelper.CONNECTION_STRING) .setProperties(properties) .build(); // @formatter:on try (Connection conn = source.getConnection()) { Assert.assertFalse(conn.isClosed()); } }
@Test public void testH2PropertiesAndPoolName() throws SQLException { final Property[] properties = new Property[] { // @formatter:off Property.createProperty("username", JdbcH2TestHelper.USER_NAME), Property.createProperty("password", JdbcH2TestHelper.PASSWORD), // @formatter:on }; // @formatter:off final PoolingDriverConnectionSource source = PoolingDriverConnectionSource.newPoolingDriverConnectionSourceBuilder() .setConnectionString(JdbcH2TestHelper.CONNECTION_STRING) .setProperties(properties) .setPoolName("MyPoolName") .build(); // @formatter:on try (Connection conn = source.getConnection()) { Assert.assertFalse(conn.isClosed()); } }
public KafkaManager(final LoggerContext loggerContext, final String name, final String topic, final boolean syncSend, final Property[] properties, final String key) { super(loggerContext, name); this.topic = Objects.requireNonNull(topic, "topic"); this.syncSend = syncSend; config.setProperty("key.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer"); config.setProperty("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer"); config.setProperty("batch.size", "0"); for (final Property property : properties) { config.setProperty(property.getName(), property.getValue()); } this.key = key; this.timeoutMillis = Integer.parseInt(config.getProperty("timeout.ms", DEFAULT_TIMEOUT_MILLIS)); }
@Deprecated public static KafkaAppender createAppender( final Layout<? extends Serializable> layout, final Filter filter, final String name, final boolean ignoreExceptions, final String topic, final Property[] properties, final Configuration configuration, final String key) { if (layout == null) { AbstractLifeCycle.LOGGER.error("No layout provided for KafkaAppender"); return null; } final KafkaManager kafkaManager = new KafkaManager(configuration.getLoggerContext(), name, topic, true, properties, key); return new KafkaAppender(name, layout, filter, ignoreExceptions, kafkaManager); }
/** * If there are no configuration properties, this injector will return the thread context's internal data * structure. Otherwise the configuration properties are combined with the thread context key-value pairs into the * specified reusable StringMap. * * @param props list of configuration properties, may be {@code null} * @param ignore a {@code StringMap} instance from the log event * @return a {@code StringMap} combining configuration properties with thread context data */ @Override public StringMap injectContextData(final List<Property> props, final StringMap ignore) { // If there are no configuration properties we want to just return the ThreadContext's StringMap: // it is a copy-on-write data structure so we are sure ThreadContext changes will not affect our copy. final StringMap immutableCopy = ThreadContext.getThreadContextMap().getReadOnlyContextData(); if (props == null || props.isEmpty()) { return immutableCopy; // this will replace the LogEvent's context data with the returned instance } // However, if the list of Properties is non-empty we need to combine the properties and the ThreadContext // data. Note that we cannot reuse the specified StringMap: some Loggers may have properties defined // and others not, so the LogEvent's context data may have been replaced with an immutable copy from // the ThreadContext - this will throw an UnsupportedOperationException if we try to modify it. final StringMap result = ContextDataFactory.createContextData(props.size() + immutableCopy.size()); copyProperties(props, result); result.putAll(immutableCopy); return result; }
@Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append(" {"); boolean first = true; for (final Map.Entry<Property, Boolean> entry : properties.entrySet()) { if (!first) { sb.append(", "); } final Property prop = entry.getKey(); sb.append(prop.getName()).append('=').append(prop.getValue()); first = false; } sb.append('}'); return sb.toString(); }
public HttpURLConnectionManager(final Configuration configuration, final LoggerContext loggerContext, final String name, final URL url, final String method, final int connectTimeoutMillis, final int readTimeoutMillis, final Property[] headers, final SslConfiguration sslConfiguration, final boolean verifyHostname) { super(configuration, loggerContext, name); this.url = url; if (!(url.getProtocol().equalsIgnoreCase("http") || url.getProtocol().equalsIgnoreCase("https"))) { throw new ConfigurationException("URL must have scheme http or https"); } this.isHttps = this.url.getProtocol().equalsIgnoreCase("https"); this.method = Objects.requireNonNull(method, "method"); this.connectTimeoutMillis = connectTimeoutMillis; this.readTimeoutMillis = readTimeoutMillis; this.headers = headers != null ? headers : new Property[0]; this.sslConfiguration = sslConfiguration; if (this.sslConfiguration != null && !isHttps) { throw new ConfigurationException("SSL configuration can only be specified with URL scheme https"); } this.verifyHostname = verifyHostname; }
@PluginFactory public static LoggerConfig createLogger( @PluginAttribute("additivity") final String additivity, @PluginAttribute("level") final String levelName, @PluginAttribute("includeLocation") final String includeLocation, @PluginElement("AppenderRef") final AppenderRef[] refs, @PluginElement("Properties") final Property[] properties, @PluginConfiguration final Configuration config, @PluginElement("Filter") final Filter filter) { final List<AppenderRef> appenderRefs = Arrays.asList(refs); Level level; try { level = Level.toLevel(levelName, Level.ERROR); } catch (final Exception ex) { LOGGER.error( "Invalid Log level specified: {}. Defaulting to Error", levelName); level = Level.ERROR; } final boolean additive = Booleans.parseBoolean(additivity, true); return new AsyncLoggerConfig(LogManager.ROOT_LOGGER_NAME, appenderRefs, filter, level, additive, properties, config, AsyncLoggerConfig.includeLocation(includeLocation)); }
@Test public void testAppendCustomHeader() throws Exception { wireMockRule.stubFor(post(urlEqualTo("/test/log4j/")) .willReturn(SUCCESS_RESPONSE)); final Appender appender = HttpAppender.newBuilder() .withName("Http") .withLayout(JsonLayout.createDefaultLayout()) .setConfiguration(ctx.getConfiguration()) .setUrl(new URL("http://localhost:" + wireMockRule.port() + "/test/log4j/")) .setHeaders(new Property[] { Property.createProperty("X-Test", "header value"), Property.createProperty("X-Runtime", "${java:runtime}") }) .build(); appender.append(createLogEvent()); wireMockRule.verify(postRequestedFor(urlEqualTo("/test/log4j/")) .withHeader("Host", containing("localhost")) .withHeader("X-Test", equalTo("header value")) .withHeader("X-Runtime", equalTo(JAVA_LOOKUP.getRuntime())) .withHeader("Content-Type", containing("application/json")) .withRequestBody(containing("\"message\" : \"" + LOG_MESSAGE + "\""))); }
@Test public void testH2Properties() throws SQLException { Property[] properties = new Property[] { // @formatter:off Property.createProperty("username", JdbcH2TestHelper.USER_NAME), Property.createProperty("password", JdbcH2TestHelper.PASSWORD), // @formatter:on }; // @formatter:off DriverManagerConnectionSource source = DriverManagerConnectionSource.newBuilder() .setConnectionString(JdbcH2TestHelper.CONNECTION_STRING) .setProperties(properties) .build(); // @formatter:on try (Connection conn = source.getConnection()) { Assert.assertFalse(conn.isClosed()); } }
public static void main(String [] args){ try{ System.out.println(logger1.isDebugEnabled()); System.out.println(logger1.isErrorEnabled()); System.out.println(logger1.isInfoEnabled()); System.out.println(logger2.isDebugEnabled()); System.out.println(logger2.isErrorEnabled()); System.out.println(logger2.isInfoEnabled()); System.out.println(logger3.isDebugEnabled()); System.out.println(logger3.isErrorEnabled()); System.out.println(logger3.isInfoEnabled()); System.out.println(logger4.isDebugEnabled()); System.out.println(logger4.isErrorEnabled()); System.out.println(logger4.isInfoEnabled()); org.apache.logging.log4j.spi.LoggerContext context=LogManager.getContext(); Logger logger = (Logger) LogManager.getLogger(); LoggerConfig config=logger.get(); Map<Property, Boolean> properties=config.getProperties(); System.out.println(config.getName()); LoggerContext ctx=LoggerContext.getContext(); Object appenders=ctx.getConfiguration().getAppenders(); System.out.println(appenders.toString()); }catch(Exception e){ e.printStackTrace(); }finally{ System.exit(0); } }
@PluginFactory public static KafkaAppender createAppender( @PluginElement("Layout") final Layout<? extends Serializable> layout, @PluginElement("Filter") final Filter filter, @Required(message = "No name provided for KafkaAppender") @PluginAttribute("name") final String name, @Required(message = "No topic provided for KafkaAppender") @PluginAttribute("topic") final String topic, @Required(message = "No zkServers provided for KafkaAppender") @PluginAttribute("zkServers") final String zkServers, @Required(message = "No mail provided for KafkaAppender") @PluginAttribute("mail") final String mail, @Required(message = "No rpc provided for KafkaAppender") @PluginAttribute("rpc") final String rpc, @Required(message = "No app provided for KafkaAppender") @PluginAttribute("app") final String app, @PluginElement("Properties") final Property[] properties, @PluginConfiguration final Configuration configuration) { final KafkaManager kafkaManager = new KafkaManager(configuration.getLoggerContext(), name, topic, zkServers, mail, rpc, app, SysUtil.host, properties); return new KafkaAppender(name, layout, filter, kafkaManager); }
@PluginFactory public static KafkaAppender createAppender(@PluginAttribute("name") final String name, @PluginElement("Filter") final Filter filter, @PluginAttribute("ignoreExceptions") final String ignore, @PluginAttribute("topic") final String topic, @PluginAttribute("enable") String enable, @PluginAttribute("syncsend") String syncsend, @PluginElement("Layout") Layout<? extends Serializable> layout, @PluginElement("Properties") final Property[] properties) { boolean ignoreExceptions = Booleans.parseBoolean(ignore, true); boolean enableKafka = Booleans.parseBoolean(enable, true); boolean sync = Booleans.parseBoolean(syncsend, false); KafkaProducer<String, String> producer = null; Map<String, Object> props = new HashMap<String, Object>(); for (Property property : properties) { props.put(property.getName(), property.getValue()); } props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); if(enableKafka) producer = new KafkaProducer<String, String>(props); if(layout == null) { layout = SerializedLayout.createLayout(); } return new KafkaAppender(name, filter, layout, ignoreExceptions, producer , topic, sync); }
/** * Creates an instance of {@link LoggerConfig} which may be * {@link AsyncLoggerConfig} if asynchronous loggers are used. * * @param configuration the owner configuration * @param asyncLoggers * @param loggerName the name of the logger * @param level the {@link Level} of the logger * @param additivity if additivity is enabled for the logger * @return an instance of {@link LoggerConfig} or * {@link AsyncLoggerConfig} */ protected static LoggerConfig createLoggerConfig(final Configuration configuration, boolean asyncLoggers, final String loggerName, final Level level, final boolean additivity) { final Filter filter = null; if (asyncLoggers) { // XXX Obscure static factory methods. return new AsyncLoggerConfig(loggerName, Collections.<AppenderRef>emptyList(), filter, level, additivity, new Property[0], configuration, false) { private static final long serialVersionUID = 1L; }; } else { return LoggerConfig.createLogger(String.valueOf(additivity), level, loggerName, null, new AppenderRef[0], new Property[0], configuration, filter); } }
/** * Constructor. * @param name The name of the Appender. * @param agents The agents. * @param properties The Flume configuration properties. * @param batchSize The number of events to include in a batch. * @param dataDir The directory where Flume should write to. */ public FactoryData(final String name, final Agent[] agents, final Property[] properties, final int batchSize, final String dataDir) { this.name = name; this.agents = agents; this.batchSize = batchSize; this.properties = properties; this.dataDir = dataDir; }
/** * Returns a FlumeAvroManager. * @param name The name of the manager. * @param agents The agents to use. * @param properties Properties to pass to the Manager. * @param batchSize The number of events to include in a batch. * @param retries The number of times to retry connecting before giving up. * @param connectionTimeout The amount of time to wait to establish a connection. * @param requestTimeout The amount of time to wait for a response to a request. * @param delay Amount of time to delay before delivering a batch. * @param dataDir The location of the Berkeley database. * @return A FlumeAvroManager. */ public static FlumePersistentManager getManager(final String name, final Agent[] agents, final Property[] properties, int batchSize, final int retries, final int connectionTimeout, final int requestTimeout, final int delay, final int lockTimeoutRetryCount, final String dataDir) { if (agents == null || agents.length == 0) { throw new IllegalArgumentException("At least one agent is required"); } if (batchSize <= 0) { batchSize = 1; } final String dataDirectory = Strings.isEmpty(dataDir) ? DEFAULT_DATA_DIR : dataDir; final StringBuilder sb = new StringBuilder("FlumePersistent["); boolean first = true; for (final Agent agent : agents) { if (!first) { sb.append(","); } sb.append(agent.getHost()).append(":").append(agent.getPort()); first = false; } sb.append("]"); sb.append(" ").append(dataDirectory); return getManager(sb.toString(), factory, new FactoryData(name, agents, batchSize, retries, connectionTimeout, requestTimeout, delay, lockTimeoutRetryCount, dataDir, properties)); }
/** * Constructor. * @param name The name of the Appender. * @param agents The agents. * @param batchSize The number of events to include in a batch. * @param dataDir The directory for data. */ public FactoryData(final String name, final Agent[] agents, final int batchSize, final int retries, final int connectionTimeout, final int requestTimeout, final int delay, final int lockTimeoutRetryCount, final String dataDir, final Property[] properties) { this.name = name; this.agents = agents; this.batchSize = batchSize; this.dataDir = dataDir; this.retries = retries; this.connectionTimeout = connectionTimeout; this.requestTimeout = requestTimeout; this.delay = delay; this.lockTimeoutRetryCount = lockTimeoutRetryCount; this.properties = properties; }
/** * Create the Properties component. * @param properties An array of Property elements. * @param config The Configuration. * @return An Interpolator that includes the configuration properties. */ @PluginFactory public static StrLookup configureSubstitutor(@PluginElement("Properties") final Property[] properties, @PluginConfiguration final Configuration config) { if (properties == null) { return new Interpolator(null); } final Map<String, String> map = new HashMap<String, String>(config.getProperties()); for (final Property prop : properties) { map.put(prop.getName(), prop.getValue()); } return new Interpolator(new MapLookup(map)); }
private PropertiesRewritePolicy(final Configuration config, final List<Property> props) { this.config = config; this.properties = new HashMap<Property, Boolean>(props.size()); for (final Property property : props) { final Boolean interpolate = Boolean.valueOf(property.getValue().contains("${")); properties.put(property, interpolate); } }
/** * Rewrite the event. * @param source a logging event that may be returned or * used to create a new logging event. * @return The LogEvent after rewriting. */ @Override public LogEvent rewrite(final LogEvent source) { final Map<String, String> props = new HashMap<String, String>(source.getContextMap()); for (final Map.Entry<Property, Boolean> entry : properties.entrySet()) { final Property prop = entry.getKey(); props.put(prop.getName(), entry.getValue().booleanValue() ? config.getStrSubstitutor().replace(prop.getValue()) : prop.getValue()); } return new Log4jLogEvent(source.getLoggerName(), source.getMarker(), source.getFQCN(), source.getLevel(), source.getMessage(), source.getThrown(), props, source.getContextStack(), source.getThreadName(), source.getSource(), source.getMillis()); }
/** * The factory method to create the PropertiesRewritePolicy. * @param config The Configuration. * @param props key/value pairs for the new keys and values. * @return The PropertiesRewritePolicy. */ @PluginFactory public static PropertiesRewritePolicy createPolicy(@PluginConfiguration final Configuration config, @PluginElement("Properties") final Property[] props) { if (props == null || props.length == 0) { LOGGER.error("Properties must be specified for the PropertiesRewritePolicy"); return null; } final List<Property> properties = Arrays.asList(props); return new PropertiesRewritePolicy(config, properties); }
protected AsyncLoggerConfig(final String name, final List<AppenderRef> appenders, final Filter filter, final Level level, final boolean additive, final Property[] properties, final Configuration config, final boolean includeLocation) { super(name, appenders, filter, level, additive, properties, config, includeLocation); }
/** * Factory method to create a LoggerConfig. * * @param additivity True if additive, false otherwise. * @param levelName The Level to be associated with the Logger. * @param loggerName The name of the Logger. * @param includeLocation "true" if location should be passed downstream * @param refs An array of Appender names. * @param properties Properties to pass to the Logger. * @param config The Configuration. * @param filter A Filter. * @return A new LoggerConfig. */ @PluginFactory public static LoggerConfig createLogger( @PluginAttribute("additivity") final String additivity, @PluginAttribute("level") final String levelName, @PluginAttribute("name") final String loggerName, @PluginAttribute("includeLocation") final String includeLocation, @PluginElement("AppenderRef") final AppenderRef[] refs, @PluginElement("Properties") final Property[] properties, @PluginConfiguration final Configuration config, @PluginElement("Filters") final Filter filter) { if (loggerName == null) { LOGGER.error("Loggers cannot be configured without a name"); return null; } final List<AppenderRef> appenderRefs = Arrays.asList(refs); Level level; try { level = Level.toLevel(levelName, Level.ERROR); } catch (final Exception ex) { LOGGER.error( "Invalid Log level specified: {}. Defaulting to Error", levelName); level = Level.ERROR; } final String name = loggerName.equals("root") ? "" : loggerName; final boolean additive = Booleans.parseBoolean(additivity, true); return new AsyncLoggerConfig(name, appenderRefs, filter, level, additive, properties, config, includeLocation(includeLocation)); }
/** * This method is called by the EventHandler that processes the * RingBufferLogEvent in a separate thread. * * @param event the event to log */ public void actualAsyncLog(final RingBufferLogEvent event) { final Map<Property, Boolean> properties = config.loggerConfig.getProperties(); event.mergePropertiesIntoContextMap(properties, config.config.getStrSubstitutor()); config.logEvent(event); }
static Map<String, String> createMap(final List<Property> properties) { final Map<String, String> contextMap = ThreadContext.getImmutableContext(); if (properties == null || properties.isEmpty()) { return contextMap; // may be ThreadContext.EMPTY_MAP but not null } final Map<String, String> map = new HashMap<>(contextMap); for (final Property prop : properties) { if (!map.containsKey(prop.getName())) { map.put(prop.getName(), prop.getValue()); } } return Collections.unmodifiableMap(map); }
public PoolingDriverConnectionSource(final String driverClassName, final String connectionString, final char[] userName, final char[] password, final Property[] properties, final String poolName) throws SQLException { super(driverClassName, connectionString, URL_PREFIX + poolName, userName, password, properties); this.poolName = poolName; setupDriver(connectionString); }
/** * Returns a FlumeAvroManager. * @param name The name of the manager. * @param agents The agents to use. * @param properties Properties to pass to the Manager. * @param batchSize The number of events to include in a batch. * @param retries The number of times to retry connecting before giving up. * @param connectionTimeout The amount of time to wait to establish a connection. * @param requestTimeout The amount of time to wait for a response to a request. * @param delayMillis Amount of time to delay before delivering a batch. * @param lockTimeoutRetryCount The number of times to retry after a lock timeout. * @param dataDir The location of the Berkeley database. * @return A FlumeAvroManager. */ public static FlumePersistentManager getManager(final String name, final Agent[] agents, final Property[] properties, int batchSize, final int retries, final int connectionTimeout, final int requestTimeout, final int delayMillis, final int lockTimeoutRetryCount, final String dataDir) { if (agents == null || agents.length == 0) { throw new IllegalArgumentException("At least one agent is required"); } if (batchSize <= 0) { batchSize = 1; } final String dataDirectory = Strings.isEmpty(dataDir) ? DEFAULT_DATA_DIR : dataDir; final StringBuilder sb = new StringBuilder("FlumePersistent["); boolean first = true; for (final Agent agent : agents) { if (!first) { sb.append(','); } sb.append(agent.getHost()).append(':').append(agent.getPort()); first = false; } sb.append(']'); sb.append(' ').append(dataDirectory); return getManager(sb.toString(), factory, new FactoryData(name, agents, batchSize, retries, connectionTimeout, requestTimeout, delayMillis, lockTimeoutRetryCount, dataDir, properties)); }
/** * Constructor. * @param name The name of the Appender. * @param agents The agents. * @param batchSize The number of events to include in a batch. * @param dataDir The directory for data. */ public FactoryData(final String name, final Agent[] agents, final int batchSize, final int retries, final int connectionTimeout, final int requestTimeout, final int delayMillis, final int lockTimeoutRetryCount, final String dataDir, final Property[] properties) { this.name = name; this.agents = agents; this.batchSize = batchSize; this.dataDir = dataDir; this.retries = retries; this.connectionTimeout = connectionTimeout; this.requestTimeout = requestTimeout; this.delayMillis = delayMillis; this.lockTimeoutRetryCount = lockTimeoutRetryCount; this.properties = properties; }
/** * Constructor. * @param loggerName The name of the Logger. * @param marker The Marker or null. * @param loggerFQCN The fully qualified class name of the caller. * @param level The logging Level. * @param message The Message. * @param properties the properties to be merged with ThreadContext key-value pairs into the event's ReadOnlyStringMap. * @param t A Throwable or null. */ // This constructor is called from LogEventFactories. public Log4jLogEvent(final String loggerName, final Marker marker, final String loggerFQCN, final Level level, final Message message, final List<Property> properties, final Throwable t) { this(loggerName, marker, loggerFQCN, level, message, t, null, createContextData(properties), ThreadContext.getDepth() == 0 ? null : ThreadContext.cloneStack(), // mutable copy 0, // thread id null, // thread name 0, // thread priority null, // StackTraceElement source CLOCK, // nanoClock.nanoTime()); }
/** * Puts key-value pairs from both the specified list of properties as well as the thread context into the * specified reusable StringMap. * * @param props list of configuration properties, may be {@code null} * @param ignore a {@code StringMap} instance from the log event * @return a {@code StringMap} combining configuration properties with thread context data */ @Override public StringMap injectContextData(final List<Property> props, final StringMap ignore) { final Map<String, String> copy = ThreadContext.getImmutableContext(); // The DefaultThreadContextMap stores context data in a Map<String, String>. // This is a copy-on-write data structure so we are sure ThreadContext changes will not affect our copy. // If there are no configuration properties returning a thin wrapper around the copy // is faster than copying the elements into the LogEvent's reusable StringMap. if (props == null || props.isEmpty()) { // this will replace the LogEvent's context data with the returned instance. // NOTE: must mark as frozen or downstream components may attempt to modify (UnsupportedOperationEx) return copy.isEmpty() ? ContextDataFactory.emptyFrozenContextData() : frozenStringMap(copy); } // If the list of Properties is non-empty we need to combine the properties and the ThreadContext // data. Note that we cannot reuse the specified StringMap: some Loggers may have properties defined // and others not, so the LogEvent's context data may have been replaced with an immutable copy from // the ThreadContext - this will throw an UnsupportedOperationException if we try to modify it. final StringMap result = new JdkMapAdapterStringMap(new HashMap<>(copy)); for (int i = 0; i < props.size(); i++) { final Property prop = props.get(i); if (!copy.containsKey(prop.getName())) { result.putValue(prop.getName(), prop.getValue()); } } result.freeze(); return result; }
/** * Puts key-value pairs from both the specified list of properties as well as the thread context into the * specified reusable StringMap. * * @param props list of configuration properties, may be {@code null} * @param reusable a {@code StringMap} instance that may be reused to avoid creating temporary objects * @return a {@code StringMap} combining configuration properties with thread context data */ @Override public StringMap injectContextData(final List<Property> props, final StringMap reusable) { // When the ThreadContext is garbage-free, we must copy its key-value pairs into the specified reusable // StringMap. We cannot return the ThreadContext's internal data structure because it may be modified later // and such modifications should not be reflected in the log event. copyProperties(props, reusable); final ReadOnlyStringMap immutableCopy = ThreadContext.getThreadContextMap().getReadOnlyContextData(); reusable.putAll(immutableCopy); return reusable; }
/** * Copies key-value pairs from the specified property list into the specified {@code StringMap}. * * @param properties list of configuration properties, may be {@code null} * @param result the {@code StringMap} object to add the key-values to. Must be non-{@code null}. */ public static void copyProperties(final List<Property> properties, final StringMap result) { if (properties != null) { for (int i = 0; i < properties.size(); i++) { final Property prop = properties.get(i); result.putValue(prop.getName(), prop.getValue()); } } }