@Test public void reportMetersCountersGaugesWithZeroValuesOnlyWhenConfigured() throws Exception { metricRegistry.register(ARBITRARY_GAUGE_NAME, (Gauge<Long>) () -> 0L); metricRegistry.meter(ARBITRARY_METER_NAME).mark(0); metricRegistry.counter(ARBITRARY_COUNTER_NAME).inc(0); metricRegistry.timer(ARBITRARY_TIMER_NAME).update(-1L, TimeUnit.NANOSECONDS); buildReportWithSleep(reporterBuilder .withArithmeticMean() .withOneMinuteMeanRate() .withFiveMinuteMeanRate() .withFifteenMinuteMeanRate() .withZeroValuesSubmission() .withMeanRate()); verify(mockAmazonCloudWatchAsyncClient, times(1)).putMetricDataAsync(metricDataRequestCaptor.capture()); final PutMetricDataRequest putMetricDataRequest = metricDataRequestCaptor.getValue(); final List<MetricDatum> metricData = putMetricDataRequest.getMetricData(); for (final MetricDatum metricDatum : metricData) { assertThat(metricDatum.getValue()).isEqualTo(0.0); } }
@Test public void shouldReportCounterValueDelta() throws Exception { metricRegistry.counter(ARBITRARY_COUNTER_NAME).inc(); metricRegistry.counter(ARBITRARY_COUNTER_NAME).inc(); final CloudWatchReporter cloudWatchReporter = reporterBuilder.build(); cloudWatchReporter.report(); MetricDatum metricDatum = firstMetricDatumFromCapturedRequest(); assertThat(metricDatum.getValue().intValue()).isEqualTo(2); metricDataRequestCaptor.getAllValues().clear(); metricRegistry.counter(ARBITRARY_COUNTER_NAME).inc(); metricRegistry.counter(ARBITRARY_COUNTER_NAME).inc(); metricRegistry.counter(ARBITRARY_COUNTER_NAME).inc(); metricRegistry.counter(ARBITRARY_COUNTER_NAME).inc(); metricRegistry.counter(ARBITRARY_COUNTER_NAME).inc(); metricRegistry.counter(ARBITRARY_COUNTER_NAME).inc(); cloudWatchReporter.report(); metricDatum = firstMetricDatumFromCapturedRequest(); assertThat(metricDatum.getValue().intValue()).isEqualTo(6); verify(mockAmazonCloudWatchAsyncClient, times(2)).putMetricDataAsync(any(PutMetricDataRequest.class)); }
private MetricDatum metricDatumByDimensionFromCapturedRequest(final String dimensionValue) { final PutMetricDataRequest putMetricDataRequest = metricDataRequestCaptor.getValue(); final List<MetricDatum> metricData = putMetricDataRequest.getMetricData(); final Optional<MetricDatum> metricDatumOptional = metricData .stream() .filter(metricDatum -> metricDatum.getDimensions() .contains(new Dimension().withName(DIMENSION_NAME_TYPE).withValue(dimensionValue))) .findFirst(); if (metricDatumOptional.isPresent()) { return metricDatumOptional.get(); } throw new IllegalStateException("Could not find MetricDatum for Dimension value: " + dimensionValue); }
@Override protected void runOneIteration() throws Exception { try { _cloudWatch.putMetricData( new PutMetricDataRequest() .withNamespace(NAMESPACE) .withMetricData( new MetricDatum() .withTimestamp(new Date()) .withMetricName(ACTIVE_AND_PENDING_SCANS) .withValue((double) (_activeScanCount + _pendingScanCount)) .withUnit(StandardUnit.Count) .withDimensions(_dimensions))); } catch (AmazonClientException e) { _log.error("Failed to publish active and pending scans metric", e); } }
@Test public void sendMetricFromHeaderValues() throws Exception { template.send("direct:start", ExchangePattern.InOnly, new Processor() { public void process(Exchange exchange) throws Exception { exchange.getIn().setHeader(CwConstants.METRIC_NAMESPACE, "camel.apache.org/overriden"); exchange.getIn().setHeader(CwConstants.METRIC_NAME, "OverridenMetric"); exchange.getIn().setHeader(CwConstants.METRIC_VALUE, Double.valueOf(3)); exchange.getIn().setHeader(CwConstants.METRIC_UNIT, StandardUnit.Bytes.toString()); exchange.getIn().setHeader(CwConstants.METRIC_TIMESTAMP, LATER); } }); ArgumentCaptor<PutMetricDataRequest> argument = ArgumentCaptor.forClass(PutMetricDataRequest.class); verify(cloudWatchClient).putMetricData(argument.capture()); assertEquals("camel.apache.org/overriden", argument.getValue().getNamespace()); assertEquals("OverridenMetric", argument.getValue().getMetricData().get(0).getMetricName()); assertEquals(Double.valueOf(3), argument.getValue().getMetricData().get(0).getValue()); assertEquals(StandardUnit.Bytes.toString(), argument.getValue().getMetricData().get(0).getUnit()); assertEquals(LATER, argument.getValue().getMetricData().get(0).getTimestamp()); }
@Test public void sendManuallyCreatedMetric() throws Exception { template.send("direct:start", ExchangePattern.InOnly, new Processor() { public void process(Exchange exchange) throws Exception { MetricDatum metricDatum = new MetricDatum() .withMetricName("errorCount") .withValue(Double.valueOf(0)); exchange.getIn().setBody(metricDatum); } }); ArgumentCaptor<PutMetricDataRequest> argument = ArgumentCaptor.forClass(PutMetricDataRequest.class); verify(cloudWatchClient).putMetricData(argument.capture()); assertEquals("errorCount", argument.getValue().getMetricData().get(0).getMetricName()); assertEquals(Double.valueOf(0), argument.getValue().getMetricData().get(0).getValue()); }
@Test public void useDefaultValuesForMetricUnitAndMetricValue() throws Exception { template.send("direct:start", ExchangePattern.InOnly, new Processor() { public void process(Exchange exchange) throws Exception { exchange.getIn().setHeader(CwConstants.METRIC_NAME, "errorCount"); } }); ArgumentCaptor<PutMetricDataRequest> argument = ArgumentCaptor.forClass(PutMetricDataRequest.class); verify(cloudWatchClient).putMetricData(argument.capture()); assertEquals("errorCount", argument.getValue().getMetricData().get(0).getMetricName()); assertEquals(Double.valueOf(1), argument.getValue().getMetricData().get(0).getValue()); assertEquals(StandardUnit.Count.toString(), argument.getValue().getMetricData().get(0).getUnit()); }
@Test public void setsMeticDimensions() throws Exception { template.send("direct:start", ExchangePattern.InOnly, new Processor() { public void process(Exchange exchange) throws Exception { exchange.getIn().setHeader(CwConstants.METRIC_NAME, "errorCount"); Map<String, String> dimensionsMap = new LinkedHashMap<String, String>(); dimensionsMap.put("keyOne", "valueOne"); dimensionsMap.put("keyTwo", "valueTwo"); exchange.getIn().setHeader(CwConstants.METRIC_DIMENSIONS, dimensionsMap); } }); ArgumentCaptor<PutMetricDataRequest> argument = ArgumentCaptor.forClass(PutMetricDataRequest.class); verify(cloudWatchClient).putMetricData(argument.capture()); List<Dimension> dimensions = argument.getValue().getMetricData().get(0).getDimensions(); Dimension dimension = dimensions.get(0); assertThat(dimensions.size(), is(2)); assertEquals("keyOne", dimension.getName()); assertEquals("valueOne", dimension.getValue()); }
@Override public void send() throws IOException { final List<MetricDatum> datumList; synchronized (buffer) { datumList = new ArrayList<MetricDatum>(buffer); buffer.clear(); } // note: Each PutMetricData request is limited to 40 KB in size for HTTP POST requests. final PutMetricDataRequest request = new PutMetricDataRequest() .withNamespace(cloudWatchNamespace).withMetricData(datumList); try { awsCloudWatch.putMetricData(request); } catch (final Exception e) { // pas catch (AmazonCloudWatchException) sinon ClassNotFoundException dans Jenkins par ex throw new IOException("Error connecting to AWS CloudWatch", e); } }
@Override public void run() { try { List<MetricDatum> metricData = new ArrayList<>(); Date now = new Date(); metricData.addAll(heartbeats.entrySet().stream().map(entry -> new MetricDatum().withMetricName("Heartbeats") .withDimensions(new Dimension().withName("Client").withValue(entry.getKey())) .withTimestamp(now) .withUnit(StandardUnit.Count) .withValue(entry.getValue().doubleValue())).collect(Collectors.toList())); heartbeats.clear(); for (List<MetricDatum> chunk :partitionList(metricData, 20)) { awsClient.putMetricData(new PutMetricDataRequest().withNamespace(namespace).withMetricData(chunk)); } } catch (Throwable e) { log.error("Failed to publish CloudWatch metrics: {}", e.toString()); } }
protected void sendBatch(Map<String, Collection<MetricDatum>> datums) { for (final Map.Entry<String, Collection<MetricDatum>> e : datums.entrySet()) { for (final List<MetricDatum> batch : Lists.partition(Lists.newLinkedList(e.getValue()), BATCH_SIZE)) { cloudWatch.putMetricDataAsync(new PutMetricDataRequest().withNamespace(e.getKey()) .withMetricData(batch), new AsyncHandler<PutMetricDataRequest, Void>() { @Override public void onError(Exception exception) { LOG.error("PutMetricData failed", exception); LOG.info("Requeueing metric data."); queuedDatums.putAll(e.getKey(), batch); } @Override public void onSuccess(PutMetricDataRequest request, Void result) { LOG.info("Successfully put " + request.getMetricData().size() + " datums for namespace " + request.getNamespace()); LOG.debug("Request", request); } }); } } }
private void addHistograms(SortedMap<String, Histogram> histograms, LinkedList<PutMetricDataRequest> requests, Date timestamp) { logger.debug("Adding Histograms..."); for (String name : histograms.keySet()) { Histogram histogram = histograms.get(name); Snapshot snapshot = histogram.getSnapshot(); checkAndAddDatum(MetricFilter.Stat.COUNT, name, histogram.getCount(), requests, timestamp); checkAndAddDatum(MetricFilter.Stat.MIN, name, snapshot.getMin(), requests, timestamp); checkAndAddDatum(MetricFilter.Stat.MAX, name, snapshot.getMax(), requests, timestamp); checkAndAddDatum(MetricFilter.Stat.MEAN, name, snapshot.getMean(), requests, timestamp); checkAndAddDatum(MetricFilter.Stat.STDDEV, name, snapshot.getStdDev(), requests, timestamp); checkAndAddDatum(MetricFilter.Stat.PERCENTILE_75, name, snapshot.get75thPercentile(), requests, timestamp); checkAndAddDatum(MetricFilter.Stat.PERCENTILE_95, name, snapshot.get95thPercentile(), requests, timestamp); checkAndAddDatum(MetricFilter.Stat.PERCENTILE_98, name, snapshot.get98thPercentile(), requests, timestamp); checkAndAddDatum(MetricFilter.Stat.PERCENTILE_99, name, snapshot.get99thPercentile(), requests, timestamp); checkAndAddDatum(MetricFilter.Stat.PERCENTILE_999, name, snapshot.get999thPercentile(), requests, timestamp); } }
@Override public boolean matches(Object o) { if (!(o instanceof PutMetricDataRequest)) { errorText = "Invalid arg type " + o; return false; } PutMetricDataRequest request = (PutMetricDataRequest) o; if (validators.length != request.getMetricData().size()) { errorText = "Got " + request.getMetricData().size() + " data elements, but had only " + validators.length + " validators."; return false; } for (int i = 0; i < request.getMetricData().size(); i++) { try { validators[i].validate(request.getMetricData().get(i)); } catch (Exception e) { errorText = e.getMessage(); return false; } } return true; }
@Override protected void runOnce() throws Exception { ClickEvent event = inputQueue.take(); String partitionKey = event.getSessionId(); ByteBuffer data = ByteBuffer.wrap( event.getPayload().getBytes("UTF-8")); recordsPut.getAndIncrement(); PutRecordResult res = kinesis.putRecord( STREAM_NAME, data, partitionKey); MetricDatum d = new MetricDatum() .withDimensions( new Dimension().withName("StreamName").withValue(STREAM_NAME), new Dimension().withName("ShardId").withValue(res.getShardId()), new Dimension().withName("Host").withValue( InetAddress.getLocalHost().toString())) .withValue(1.0) .withMetricName("RecordsPut"); cw.putMetricData(new PutMetricDataRequest() .withMetricData(d) .withNamespace("MySampleProducer")); }
/** * Send a datum object to this metric. * * @param value the measurement to record */ public void sendValue(double value) { MetricDatum datum = new MetricDatum(); datum.setMetricName(name); datum.setUnit(unit); datum.setValue(value); PutMetricDataRequest request = new PutMetricDataRequest(); request.setNamespace(namespace); request.withMetricData(datum); logger.info(String.format("logging %.2f to cloudwatch metric %s/%s", value, namespace, name)); client.putMetricData(request); }
private void postToCloudwatch(String namespace, List<CloudwatchStatistic> statistics) { List<MetricDatum> metricDatumList = statistics.stream() .map(CloudwatchStatistic::toMetricsDatum) .collect(toList()); PutMetricDataRequest request = new PutMetricDataRequest(); request.setNamespace(namespace); request.setMetricData(metricDatumList); amazonCloudWatch.putMetricData(request); }
@Test public void shouldNotInvokeCloudWatchClientInDryRunMode() throws Exception { metricRegistry.counter(ARBITRARY_COUNTER_NAME).inc(); reporterBuilder.withDryRun().build().report(); verify(mockAmazonCloudWatchAsyncClient, never()).putMetricDataAsync(any(PutMetricDataRequest.class)); }
@Test public void shouldNotReportGaugeWhenMetricValueNotOfTypeNumber() throws Exception { metricRegistry.register(ARBITRARY_GAUGE_NAME, (Gauge<String>) () -> "bad value type"); reporterBuilder.build().report(); verify(mockAmazonCloudWatchAsyncClient, never()).putMetricDataAsync(any(PutMetricDataRequest.class)); }
@Test public void neverReportMetersCountersGaugesWithZeroValues() throws Exception { metricRegistry.register(ARBITRARY_GAUGE_NAME, (Gauge<Long>) () -> 0L); metricRegistry.meter(ARBITRARY_METER_NAME).mark(0); metricRegistry.counter(ARBITRARY_COUNTER_NAME).inc(0); buildReportWithSleep(reporterBuilder .withArithmeticMean() .withOneMinuteMeanRate() .withFiveMinuteMeanRate() .withFifteenMinuteMeanRate() .withMeanRate()); verify(mockAmazonCloudWatchAsyncClient, never()).putMetricDataAsync(any(PutMetricDataRequest.class)); }
@Test public void shouldNotReportUnchangedCounterValue() throws Exception { metricRegistry.counter(ARBITRARY_COUNTER_NAME).inc(); final CloudWatchReporter cloudWatchReporter = reporterBuilder.build(); cloudWatchReporter.report(); MetricDatum metricDatum = firstMetricDatumFromCapturedRequest(); assertThat(metricDatum.getValue().intValue()).isEqualTo(1); metricDataRequestCaptor.getAllValues().clear(); cloudWatchReporter.report(); verify(mockAmazonCloudWatchAsyncClient, times(1)).putMetricDataAsync(any(PutMetricDataRequest.class)); }
private List<Dimension> allDimensionsFromCapturedRequest() { final PutMetricDataRequest putMetricDataRequest = metricDataRequestCaptor.getValue(); final List<MetricDatum> metricData = putMetricDataRequest.getMetricData(); final List<Dimension> all = new LinkedList<>(); for (final MetricDatum metricDatum : metricData) { all.addAll(metricDatum.getDimensions()); } return all; }
public static void main(String[] args) { final String USAGE = "To run this example, supply a data point:\n" + "Ex: PutMetricData <data_point>\n"; if (args.length != 1) { System.out.println(USAGE); System.exit(1); } Double data_point = Double.parseDouble(args[0]); final AmazonCloudWatch cw = AmazonCloudWatchClientBuilder.defaultClient(); Dimension dimension = new Dimension() .withName("UNIQUE_PAGES") .withValue("URLS"); MetricDatum datum = new MetricDatum() .withMetricName("PAGES_VISITED") .withUnit(StandardUnit.None) .withValue(data_point) .withDimensions(dimension); PutMetricDataRequest request = new PutMetricDataRequest() .withNamespace("SITE/TRAFFIC") .withMetricData(datum); PutMetricDataResult response = cw.putMetricData(request); System.out.printf("Successfully put data point %f", data_point); }
public void process(Exchange exchange) throws Exception { List<MetricDatum> metricData = getMetricData(exchange); PutMetricDataRequest request = new PutMetricDataRequest() .withMetricData(metricData) .withNamespace(determineNameSpace(exchange)); log.info("Sending request [{}] from exchange [{}]...", request, exchange); getEndpoint().getCloudWatchClient().putMetricData(request); }
public void write(Query query) { PutMetricDataRequest metricDataRequest = new PutMetricDataRequest(); metricDataRequest.setNamespace(namespace); MetricDatum metricDatum = new MetricDatum(); // Converts the Objects to Double-values for CloudWatch metricDatum.setMetricName(query.getKey()); metricDatum.setValue(convertToDouble(query.getValue())); metricDatum.setTimestamp(new Date()); metricDataRequest.withMetricData(metricDatum); cloudWatchClient.putMetricData(metricDataRequest); }
public void run() { try { PutMetricDataRequest putMetricDataRequest = new PutMetricDataRequest(); putMetricDataRequest.withMetricData(new MetricDatum() .withDimensions(hostDimension) .withMetricName("BytesPerEvent") .withValue(bytesHistogram.getSnapshot().getMean())); putMetricDataRequest.withMetricData(new MetricDatum() .withDimensions(hostDimension) .withMetricName("KplEventQueueCount") .withValue(producer.getKplQueueSize() * 1.0)); putMetricDataRequest.withMetricData(new MetricDatum() .withDimensions(hostDimension) .withMetricName("InternalEventQueueCount") .withValue(producer.getInternalQueueSize() * 1.0)); putMetricDataRequest.withMetricData(new MetricDatum() .withDimensions(hostDimension) .withMetricName("QueuedEventsPerSecond") .withValue(queuedMeter.getMeanRate())); putMetricDataRequest.withMetricData(new MetricDatum() .withDimensions(hostDimension) .withMetricName("CompletedEventsPerSecond") .withValue(completedMeter.getMeanRate())); putMetricDataRequest.withNamespace(appName); cloudWatch.putMetricData(putMetricDataRequest); LOGGER.debug("Published metrics to CloudWatch [{}].", this.toString()); } catch (Exception e) { LOGGER.error("Problem posting or reading cloudwatch metrics.", e); } }
/** * Collect the aggregated values (min, max, count, sum) into a * StatisticSet and send the data to Cloud Watch */ @Override public void run() { PutMetricDataRequest localPutMetricDataRequest = zeroValuePutMetricDataRequest; MetricDatum metricDatum = localPutMetricDataRequest.getMetricData().get(0); if (sampleCount > 0) { localPutMetricDataRequest = putMetricDataRequest; metricDatum = localPutMetricDataRequest.getMetricData().get(0); StatisticSet statisticSet = metricDatum.getStatisticValues(); synchronized (lock) { statisticSet.setMaximum(maximum); statisticSet.setMinimum(minimum); statisticSet.setSampleCount(sampleCount); statisticSet.setSum(sum); minimum = Double.MAX_VALUE; maximum = Double.MIN_VALUE; sampleCount = 0; sum = 0; } } metricDatum.setTimestamp(new Date()); if (log.isDebugEnabled()) { log.debug("sending " + localPutMetricDataRequest); } cloudWatchClient.putMetricData(localPutMetricDataRequest); }
@Override public synchronized void persist(Map<ControlledMetric<?, ?>, MetricValueDetails> metricValues, long timeMillis) throws IOException { Map<String, List<MetricDatum>> metricMap = buildMetricsMap(metricValues); // now write them to cloud-watch for (Map.Entry<String, List<MetricDatum>> entry : metricMap.entrySet()) { String nameSpace = nameSpacePrefix + ": " + MiscUtils.capitalize(entry.getKey()); List<MetricDatum> datumList = entry.getValue(); // we need to build multiple requests to post X datum at a time int endIndex; for (int startIndex = 0; startIndex < datumList.size(); startIndex = endIndex) { endIndex = startIndex + MAX_NUM_DATUM_ALLOWED_PER_POST; if (endIndex > datumList.size()) { endIndex = datumList.size(); } List<MetricDatum> requestDatumList; if (startIndex == 0 && endIndex == datumList.size()) { // no need to make a sub-list requestDatumList = datumList; } else { requestDatumList = datumList.subList(startIndex, endIndex); } PutMetricDataRequest request = new PutMetricDataRequest().withNamespace(nameSpace).withMetricData(requestDatumList); try { cloudWatchClient.putMetricData(request); } catch (Exception e) { throw new IOException("Could not publish metrics to CloudWatch", e); } } } }
@Override @SuppressWarnings("rawtypes") public void report( SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters, SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters, SortedMap<String, Timer> timers) { logger.info("Starting metrics publishing to AWS CloudWatch."); LinkedList<PutMetricDataRequest> requests = new LinkedList<>(); addMetricData(gauges, counters, histograms, meters, timers, requests, new Date()); if (requests.isEmpty()) { logger.debug("No metric data to send to AWS."); return; } for (PutMetricDataRequest request : requests) { try { for (MetricDatum datum : request.getMetricData()) { logger.debug("Sending metric " + datum); } cloudWatch.putMetricData(request); } catch (Exception e) { logger.error("Failed to log metrics to CloudWatch discarding metrics for this attempt...",e); return; } } logger.info("Finished metrics publishing to AWS CloudWatch."); }
@SuppressWarnings("rawtypes") private void addMetricData(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters, SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters, SortedMap<String, Timer> timers, LinkedList<PutMetricDataRequest> requests, Date timestamp) { addGauges(gauges, requests, timestamp); addCounters(counters, requests, timestamp); addHistograms(histograms, requests, timestamp); addMeters(meters, requests, timestamp); addTimers(timers, requests, timestamp); }
private void addCounters(SortedMap<String, Counter> counters, LinkedList<PutMetricDataRequest> requests, Date timestamp) { logger.debug("Adding Counters..."); for (String name : counters.keySet()) { Counter counter = counters.get(name); addDatum(filter.getMatchingMetricDescriptor(name, Stat.ALL).getAlias(), counter.getCount(), requests, timestamp); } }
private void addTimers(SortedMap<String, Timer> timers, LinkedList<PutMetricDataRequest> requests, Date timestamp) { logger.debug("Adding Timers..."); for (String name : timers.keySet()) { Timer timer = timers.get(name); checkAndAddDatum(MetricFilter.Stat.COUNT, name, timer.getCount(), requests, timestamp); addMetered(name, timer, requests, timestamp); } }
@SuppressWarnings("rawtypes") private void addGauges(SortedMap<String, Gauge> gauges, LinkedList<PutMetricDataRequest> requests, Date timestamp) { logger.debug("Adding Gauges..."); for (String name : gauges.keySet()) { Gauge<?> gauge = gauges.get(name); if (!(gauge.getValue() instanceof Number)) { logger.warn("Encountered Gauge with non-numeric value. Gauge:{}, Value:{}", name, gauge.getValue()); continue; } Double value = ((Number) gauge.getValue()).doubleValue(); addDatum(filter.getMatchingMetricDescriptor(name, Stat.ALL).getAlias(), value, requests, timestamp); } }
private void addDatum(String name, double value, LinkedList<PutMetricDataRequest> requests, Date timestamp) { if (logger.isDebugEnabled()) { logger.debug("Adding Datum {} with value {} at {}", name, value, timestamp); } if (requests.isEmpty() || requests.getLast().getMetricData().size() == MAX_CLOUDWATCH_DATUM_PER_REQUEST) { requests.add(createRequest()); } PutMetricDataRequest request = requests.getLast(); MetricDatum datum = new MetricDatum().withTimestamp(timestamp).withValue(value).withMetricName(name).withUnit(StandardUnit.None).withDimensions(createDimensions()); request.withMetricData(datum); }
/** * Ensure all metrics are published event when the max number of cloudwatch metrics (per request) is exceeded. */ @Test public void testPublishInMultipleCloudWatchRequests() throws InterruptedException { MetricRegistry metricRegistry = new MetricRegistry(); StringBuilder filter = new StringBuilder(); for (int i = 0; i < MetricsCloudWatchReporter.MAX_CLOUDWATCH_DATUM_PER_REQUEST + 1; i++) { String metric = "UnitTestCounter" + i; metricRegistry.counter(metric).inc(); if (i > 0) { filter.append(","); } filter.append(metric).append("=").append(metric); } final AmazonCloudWatch amazonCloudWatch = Mockito.mock(AmazonCloudWatch.class); reporter = new MetricsCloudWatchReporter( APP_NAME, APP_VERSION, APP_ENVIRONMENT, filter.toString(), 2, TimeUnit.SECONDS, metricRegistry, createCloudWatchFactory(amazonCloudWatch)); reporter.start(); Mockito.verify(amazonCloudWatch, Mockito.never()).putMetricData(Mockito.any(PutMetricDataRequest.class)); Thread.sleep(3000); Mockito.verify(amazonCloudWatch, Mockito.times(2)).putMetricData(Mockito.any(PutMetricDataRequest.class)); }
private void sendCloudWatchConsistencyAlert() { MetricDatum datum = new MetricDatum(); datum.setMetricName(cloudWatchConsistencyMetric); datum.setUnit(StandardUnit.Count); datum.setValue(1.0); PutMetricDataRequest request = new PutMetricDataRequest(); request.setNamespace(namespace); request.setMetricData(Collections.singleton(datum)); cloudWatch.putMetricData(request); }
private void sendCloudWatchTimeoutAlert() { MetricDatum datum = new MetricDatum(); datum.setMetricName(cloudWatchTimeoutMetric); datum.setUnit(StandardUnit.Count); datum.setValue(1.0); PutMetricDataRequest request = new PutMetricDataRequest(); request.setNamespace(namespace); request.setMetricData(Collections.singleton(datum)); cloudWatch.putMetricData(request); }
private void sendData( Collection<MetricDatum> metricData ) { PutMetricDataRequest request = new PutMetricDataRequest(); request.setNamespace( namespace ); request.setMetricData( metricData ); metricsClient.putMetricData( request ); }
private MetricDatum firstMetricDatumFromCapturedRequest() { final PutMetricDataRequest putMetricDataRequest = metricDataRequestCaptor.getValue(); return putMetricDataRequest.getMetricData().get(0); }
private List<Dimension> firstMetricDatumDimensionsFromCapturedRequest() { final PutMetricDataRequest putMetricDataRequest = metricDataRequestCaptor.getValue(); final MetricDatum metricDatum = putMetricDataRequest.getMetricData().get(0); return metricDatum.getDimensions(); }
@Override public void write(ArrayList<Stat> stats, long invokeTimeMs, Set<Tag> tags) { Date dt = new Date(); dt.setTime(invokeTimeMs); Collection<Dimension> parentDims = tagsToDimensions(tags); List<MetricDatum> metrics = new ArrayList<MetricDatum>(); /* * Create CW metric objects from bender internal Stat objects */ for (Stat stat : stats) { /* * Dimension are CW's version of metric tags. A conversion must be done. */ Collection<Dimension> metricDims = tagsToDimensions(stat.getTags()); metricDims.addAll(parentDims); MetricDatum metric = new MetricDatum(); metric.setMetricName(stat.getName()); // TODO: add units to Stat object SYSTEMS-870 metric.setUnit(StandardUnit.None); metric.setTimestamp(dt); metric.setDimensions(metricDims); metric.setValue((double) stat.getValue()); metrics.add(metric); } /* * Not very well documented in java docs but CW only allows 20 metrics at a time. */ List<List<MetricDatum>> chunks = ListUtils.partition(metrics, 20); for (List<MetricDatum> chunk : chunks) { PutMetricDataRequest req = new PutMetricDataRequest(); req.withMetricData(chunk); req.setNamespace(namespace); this.client.putMetricData(req); } }