@Override protected void doStart() { logs = new AWSLogsClient( getCredentials(), getClientConfiguration() ); logs.setRegion(RegionUtils.getRegion(region)); if (!skipCreate) { if (!logGroupExists(logGroup)) { createLogGroup(logGroup); } if (!logStreamExists(logGroup, logStream)) { createLogStream(logGroup, logStream); } } queue = new LinkedBlockingQueue<>(internalQueueSize); worker = new Worker<>(this); worker.setName(format("%s-worker", getName())); worker.setDaemon(true); worker.start(); }
AWSLogsStub(String logGroupName, String logStreamName, String logRegion) { this.logGroupName = logGroupName; this.logStreamName = logStreamName; AWSLogs awsLogs = new AWSLogsClient(); if (logRegion != null) { awsLogs.setRegion(RegionUtils.getRegion(logRegion)); } this.awsLogs = awsLogs; }
public CloudWatchMonitor(AWSLogsClient client) { this.logsClient = client; if(!Validation.checkCloudWatchMonitorConfig(logsClient)) { latestLogs = Arrays.asList(failedConfigurationLogsMessage); return; } lastPollTime = 0L; }
void setAwsLogsClient(AWSLogsClient awsLogsClient) { this.awsLogsClient = awsLogsClient; }
@Test(timeout = 5000) public void testBasic() throws InterruptedException { CloudWatchAppender appender = new CloudWatchAppender(); AWSLogsClient awsLogClient = createMock(AWSLogsClient.class); appender.setAwsLogsClient(awsLogClient); appender.setMaxBatchSize(1); appender.setRegion("region"); final String logGroup = "pfqoejpfqe"; appender.setLogGroup(logGroup); final String logStream = "pffqjfqjpoqoejpfqe"; appender.setLogStream(logStream); PatternLayout layout = new PatternLayout(); layout.setContext(new LoggerContext()); layout.setPattern("[%thread] %level %logger{20} - %msg%n%xThrowable"); layout.start(); appender.setLayout(layout); LoggingEvent event = new LoggingEvent(); event.setTimeStamp(System.currentTimeMillis()); String loggerName = "name"; event.setLoggerName(loggerName); Level level = Level.DEBUG; event.setLevel(level); String message = "fjpewjfpewjfpewjfepowf"; event.setMessage(message); String threadName = Thread.currentThread().getName(); final String fullMessage = "[" + threadName + "] " + level + " " + loggerName + " - " + message + "\n"; final PutLogEventsResult result = new PutLogEventsResult(); String sequence = "ewopjfewfj"; result.setNextSequenceToken(sequence); expect(awsLogClient.putLogEvents(isA(PutLogEventsRequest.class))).andAnswer(new IAnswer<PutLogEventsResult>() { @Override public PutLogEventsResult answer() { PutLogEventsRequest request = (PutLogEventsRequest) getCurrentArguments()[0]; assertEquals(logGroup, request.getLogGroupName()); assertEquals(logStream, request.getLogStreamName()); List<InputLogEvent> events = request.getLogEvents(); assertEquals(1, events.size()); assertEquals(fullMessage, events.get(0).getMessage()); return result; } }).times(2); awsLogClient.shutdown(); // ===================================== replay(awsLogClient); appender.start(); // for coverage appender.start(); appender.append(event); Thread.sleep(10); appender.append(event); while (appender.getEventsWrittenCount() < 2) { Thread.sleep(10); } appender.stop(); verify(awsLogClient); }
@Ignore @Test public void testMetricFilters() { List<String> lines; PrintStream original = System.out; try { final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); System.setOut(new PrintStream(outputStream, true)); Logger logger = LoggerFactory.getLogger("TEST-LOGGER"); logger.info("TEST-MESSAGE"); TestMetricSet metricSet = new TestMetricSet(); MetricRegistry registry = new MetricRegistry(); registry.registerAll(metricSet); metricSet.testCounter.inc(); metricSet.testMeter.mark(1L); metricSet.testHistogram.update(1L); metricSet.testTimer.update(1L, TimeUnit.MINUTES); Slf4jReporter reporter = Slf4jReporter.forRegistry(registry) .markWith(MarkerFactory.getMarker("METRIC")) .outputTo(logger) .build(); reporter.report(); lines = Arrays.asList(outputStream.toString().split("\\n")); } finally { System.setOut(original); } assertNotNull(lines); AWSLogsClient client = new AWSLogsClient(); // TODO: Loop for each kind of metric String metricFilterPattern = String.format(COMPLETE_FILTER_PATTERN_MAP.get("COUNTER"), "test.namespace/testCounter"); TestMetricFilterRequest request = new TestMetricFilterRequest() .withFilterPattern(metricFilterPattern) .withLogEventMessages(lines); TestMetricFilterResult result = client.testMetricFilter(request); MetricFilterMatchRecord matchRecord = result.getMatches().get(0); assertEquals("test.namespace/testCounter", matchRecord.getExtractedValues().get("$name")); assertEquals("1", matchRecord.getExtractedValues().get("$count")); MetricFilterMatchRecord matchRecord2 = result.getMatches().get(1); assertEquals("test.namespace/testGauge", matchRecord2.getExtractedValues().get("$name")); assertEquals("42", matchRecord2.getExtractedValues().get("$value")); }
public static boolean checkCloudWatchMonitorConfig(AWSLogsClient client) { if(client == null) { return false; } return true; }
public AWSLogsClient getCloudWatchLogsClient() throws InvalidInputException { AWSLogsClient client = new AWSLogsClient(awsCredentialsProvider, getClientConfiguration()); client.setEndpoint("https://logs." + region + ".amazonaws.com"); return client; }
private CloudWatchAppender(final String name, final String awsLogGroupName, final String awsLogStreamName, final String awsLogStreamFlushPeriodInSeconds, final Layout<Serializable> layout) { super(name, null, layout == null ? PatternLayout.createDefaultLayout() : layout, false); // figure out the flush period int flushPeriod = AWS_LOG_STREAM_FLUSH_PERIOD_IN_SECONDS; if (awsLogStreamFlushPeriodInSeconds != null) { try { flushPeriod = Integer.parseInt(awsLogStreamFlushPeriodInSeconds); } catch (NumberFormatException nfe) { debug("Bad awsLogStreamFlushPeriodInSeconds (" + awsLogStreamFlushPeriodInSeconds + "), defaulting to: " + AWS_LOG_STREAM_FLUSH_PERIOD_IN_SECONDS + "s"); } } else { debug("No awsLogStreamFlushPeriodInSeconds specified, defaulted to " + AWS_LOG_STREAM_FLUSH_PERIOD_IN_SECONDS + "s"); } flushPeriodMillis = flushPeriod * 1000; try { awsLogsClient = new AWSLogsClient(); // this should pull the credentials automatically from the environment // set the group name this.logGroupName = awsLogGroupName; // determine the stream name (prefix) and suffix it with the timestamp to ensure uniqueness String logStreamNamePrefix = awsLogStreamName; if (logStreamNamePrefix == null) { logStreamNamePrefix = ENV_LOG_STREAM_NAME; } if (logStreamNamePrefix == null) { logStreamNamePrefix = AWS_INSTANCE_ID; } String finalLogStreamName; do { finalLogStreamName = logStreamNamePrefix + " " + getTimeNow(); this.sequenceTokenCache = createLogGroupAndLogStreamIfNeeded(logGroupName, finalLogStreamName); } while (this.sequenceTokenCache != null); logStreamName = finalLogStreamName; } catch (Exception e) { e.printStackTrace(); } }
@Override protected void createAWSClient() { client = tryClientFactory(clientFactoryMethod, AWSLogs.class, true); if ((client == null) && (clientEndpoint == null)) { client = tryClientFactory("com.amazonaws.services.logs.AWSLogsClientBuilder.defaultClient", AWSLogs.class, false); } if (client == null) { LogLog.debug(getClass().getSimpleName() + ": creating service client via constructor"); client = tryConfigureEndpointOrRegion(new AWSLogsClient(), clientEndpoint); } }