@PostMapping("/") @ResponseBody ResponseEntity<StreamingResponseBody> post(@RequestPart final MultipartFile file) throws Exception { if (file.isEmpty()) { throw new ValidationException("file was empty"); } final HttpHeaders headers = new HttpHeaders(); headers.setContentDispositionFormData("filename", "sample.txt"); final StreamingResponseBody body = out -> out.write(file.getBytes()); return ResponseEntity.ok() .headers(headers) .contentType(MediaType.valueOf("application/force-download")) .body(body); }
@RequestMapping(path = "config", method = RequestMethod.GET) public ResponseEntity<StreamingResponseBody> getConfig() { HttpHeaders headers = new HttpHeaders(); // 'produces' in annotation does not work with stream headers.setContentType(MediaType.APPLICATION_JSON); headers.set(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"cluman_config.json\""); return new ResponseEntity<>((os) -> { appConfigService.write(MimeTypeUtils.APPLICATION_JSON_VALUE, os); }, headers, HttpStatus.OK); }
@Test public void whenNoCursorsThenLatestOffsetsAreUsed() throws NakadiException, IOException, InvalidCursorException { when(eventTypeRepository.findByName(TEST_EVENT_TYPE_NAME)).thenReturn(EVENT_TYPE); final List<PartitionStatistics> tps2 = ImmutableList.of( new KafkaPartitionStatistics(timeline, 0, 0, 87), new KafkaPartitionStatistics(timeline, 1, 0, 34)); when(timelineService.getActiveTimeline(any())).thenReturn(timeline); when(topicRepositoryMock.loadTopicStatistics(eq(Collections.singletonList(timeline)))) .thenReturn(tps2); final ArgumentCaptor<EventStreamConfig> configCaptor = ArgumentCaptor.forClass(EventStreamConfig.class); final EventStream eventStreamMock = mock(EventStream.class); when(eventStreamFactoryMock.createEventStream(any(), any(), configCaptor.capture(), any())) .thenReturn(eventStreamMock); final StreamingResponseBody responseBody = createStreamingResponseBody(1, 0, 1, 1, 0, null); responseBody.writeTo(new ByteArrayOutputStream()); final EventStreamConfig streamConfig = configCaptor.getValue(); assertThat( streamConfig.getCursors(), equalTo(tps2.stream().map(PartitionStatistics::getLast).collect(Collectors.toList()))); }
public static StreamingResponseBody toStreaming(final HystrixCommand<InputStream> command) { return outputStream -> { final Observable<InputStream> stream = command.toObservable(); stream.toBlocking().subscribe(inputStream -> { try { IOUtils.copyLarge(inputStream, outputStream); outputStream.flush(); } catch (IOException e) { try { inputStream.close(); } catch (IOException closingException) { LOGGER.warn("could not close command result, a http connection may be leaked !", closingException); } LOGGER.error("Unable to fully copy command result '{}'.", command.getClass(), e); } }, TDPException::rethrowOrWrap); }; }
/** * Apply the preparation to the dataset out of the given IDs. * * @param preparationId the preparation id to apply on the dataset. * @param datasetId the dataset id to transform. * @param formatName The output {@link ExportFormat format}. This format also set the MIME response type. * @param stepId the preparation step id to use (default is 'head'). * @param name the transformation name. * @param exportParams additional (optional) export parameters. */ //@formatter:off @RequestMapping(value = "/apply/preparation/{preparationId}/dataset/{datasetId}/{format}", method = GET) @ApiOperation(value = "Transform the given preparation to the given format on the given dataset id", notes = "This operation transforms the dataset using preparation id in the provided format.") @VolumeMetered public StreamingResponseBody applyOnDataset(@ApiParam(value = "Preparation id to apply.") @PathVariable(value = "preparationId") final String preparationId, @ApiParam(value = "DataSet id to transform.") @PathVariable(value = "datasetId") final String datasetId, @ApiParam(value = "Output format") @PathVariable("format") final String formatName, @ApiParam(value = "Step id", defaultValue = "head") @RequestParam(value = "stepId", required = false, defaultValue = "head") final String stepId, @ApiParam(value = "Name of the transformation", defaultValue = "untitled") @RequestParam(value = "name", required = false, defaultValue = "untitled") final String name, @RequestParam final Map<String, String> exportParams) { //@formatter:on final ExportParameters exportParameters = new ExportParameters(); exportParameters.setPreparationId(preparationId); exportParameters.setDatasetId(datasetId); exportParameters.setExportType(formatName); exportParameters.setStepId(stepId); exportParameters.setExportName(name); exportParameters.getArguments().putAll(exportParams); return executeSampleExportStrategy(exportParameters); }
/** * Add the following preparation in cache. * * @param preparation the preparation to cache. * @param stepId the preparation step id. */ private void addPreparationInCache(Preparation preparation, String stepId) { final ExportParameters exportParameters = new ExportParameters(); exportParameters.setPreparationId(preparation.getId()); exportParameters.setExportType("JSON"); exportParameters.setStepId(stepId); exportParameters.setDatasetId(preparation.getDataSetId()); final StreamingResponseBody streamingResponseBody = executeSampleExportStrategy(exportParameters); try { // the result is not important here as it will be cached ! streamingResponseBody.writeTo(new NullOutputStream()); } catch (IOException e) { throw new TDPException(UNEXPECTED_EXCEPTION, e); } }
@Test public void shouldUsedVersionedPreparation() throws IOException { // Given final ExportParameters parameters = new ExportParameters(); parameters.setExportType("JSON"); parameters.setPreparationId("prep-1234"); parameters.setStepId("step-1234"); final Preparation preparation = new Preparation(); preparation.setId("prep-1234"); preparation.setHeadId("step-1234"); configurePreparation(preparation, "prep-1234", "step-1234"); // When final StreamingResponseBody body = strategy.execute(parameters); body.writeTo(new NullOutputStream()); // Then final ArgumentCaptor<Configuration> captor = ArgumentCaptor.forClass(Configuration.class); verify(transformer).buildExecutable(any(), captor.capture()); assertEquals("prep-1234", captor.getValue().getPreparationId()); assertEquals("step-1234", captor.getValue().getPreparation().getHeadId()); }
@Test public void shouldUsedHeadPreparation() throws IOException { // Given final ExportParameters parameters = new ExportParameters(); parameters.setExportType("JSON"); parameters.setPreparationId("prep-1234"); parameters.setStepId("head"); final Preparation preparation = new Preparation(); preparation.setId("prep-1234"); preparation.setHeadId("head"); configurePreparation(preparation, "prep-1234", "head"); // When final StreamingResponseBody body = strategy.execute(parameters); body.writeTo(new NullOutputStream()); // Then final ArgumentCaptor<Configuration> captor = ArgumentCaptor.forClass(Configuration.class); verify(transformer).buildExecutable(any(), captor.capture()); assertEquals("prep-1234", captor.getValue().getPreparationId()); assertEquals("head", captor.getValue().getPreparation().getHeadId()); }
/** * Returns a list containing all data sets metadata that are compatible with a preparation identified by * <tt>preparationId</tt>: its id. If no compatible data set is found an empty list is returned. The base data set * of the preparation with id <tt>preparationId</tt> is never returned in the list. * * @param preparationId the specified preparation id * @param sort the sort criterion: either name or date. * @param order the sorting order: either asc or desc */ @RequestMapping(value = "/api/preparations/{id}/basedatasets", method = RequestMethod.GET, produces = APPLICATION_JSON_VALUE) @ApiOperation(value = "Get all data sets that are compatible with a preparation.", notes = "Returns the list of data sets the current user is allowed to see and that are compatible with the preparation.") @Timed public StreamingResponseBody listCompatibleDatasets( @PathVariable(value = "id") @ApiParam(name = "id", value = "Preparation id.") String preparationId, @ApiParam(value = "Sort key (by name or date), defaults to 'date'.") @RequestParam(defaultValue = "creationDate", required = false) Sort sort, @ApiParam(value = "Order for sort key (desc or asc), defaults to 'desc'.") @RequestParam(defaultValue = "desc", required = false) Order order) { if (LOG.isDebugEnabled()) { LOG.debug("Listing compatible datasets (pool: {} )...", getConnectionStats()); } try { // get the preparation final Preparation preparation = internalGetPreparation(preparationId); // to list compatible datasets String dataSetId = preparation.getDataSetId(); HystrixCommand<InputStream> listCommand = getCommand(CompatibleDataSetList.class, dataSetId, sort, order); return CommandHelper.toStreaming(listCommand); } finally { if (LOG.isDebugEnabled()) { LOG.debug("Listing compatible datasets (pool: {}) done.", getConnectionStats()); } } }
@RequestMapping(value = "/api/preparations/preview/add", method = POST, consumes = APPLICATION_JSON_VALUE, produces = APPLICATION_JSON_VALUE) @ApiOperation(value = "Get a preview between the head step and a new appended transformation") public StreamingResponseBody previewAdd(@RequestBody @Valid final PreviewAddParameters input) { //@formatter:on Preparation preparation = null; List<Action> actions = new ArrayList<>(0); // get preparation details with dealing with preparations if (StringUtils.isNotBlank(input.getPreparationId())) { preparation = internalGetPreparation(input.getPreparationId()); actions = internalGetActions(preparation.getId()); } final HystrixCommand<InputStream> transformation = getCommand(PreviewAdd.class, input, preparation, actions); return executePreviewCommand(transformation); }
@RequestMapping(value = "/api/datasets/preview/{id}", method = GET, produces = APPLICATION_JSON_VALUE) @ApiOperation(value = "Get a data set by id.", produces = APPLICATION_JSON_VALUE, notes = "Get a data set based on given id.") @Timed public ResponseEntity<StreamingResponseBody> preview(@ApiParam(value = "Id of the data set to get") @PathVariable(value = "id") String id, @RequestParam(defaultValue = "true") @ApiParam(name = "metadata", value = "Include metadata information in the response") boolean metadata, @RequestParam(defaultValue = "") @ApiParam(name = "sheetName", value = "Sheet name to preview") String sheetName) { if (LOG.isDebugEnabled()) { LOG.debug("Requesting dataset #{} (pool: {})...", id, getConnectionStats()); } try { GenericCommand<InputStream> retrievalCommand = getCommand(DataSetPreview.class, id, metadata, sheetName); return toStreaming(retrievalCommand); } finally { if (LOG.isDebugEnabled()) { LOG.debug("Request dataset #{} (pool: {}) done.", id, getConnectionStats()); } } }
/** * Get the suggested action dynamic params. Dynamic params depends on the context (dataset / preparation / actual * transformations) */ @RequestMapping(value = "/api/transform/suggest/{action}/params", method = GET, produces = APPLICATION_JSON_VALUE) @ApiOperation(value = "Get the transformation dynamic parameters", notes = "Returns the transformation parameters.") @Timed public ResponseEntity<StreamingResponseBody> suggestActionParams( @ApiParam(value = "Transformation name.") @PathVariable("action") final String action, @ApiParam(value = "Suggested dynamic transformation input (preparation id or dataset id") @Valid final DynamicParamsInput dynamicParamsInput) { // get preparation/dataset content HystrixCommand<InputStream> inputData; final String preparationId = dynamicParamsInput.getPreparationId(); if (isNotBlank(preparationId)) { inputData = getCommand(PreparationGetContent.class, preparationId, dynamicParamsInput.getStepId()); } else { inputData = getCommand(DataSetGet.class, dynamicParamsInput.getDatasetId(), false, false); } // get params, passing content in the body final GenericCommand<InputStream> getActionDynamicParams = getCommand(SuggestActionParams.class, inputData, action, dynamicParamsInput.getColumnId()); return CommandHelper.toStreaming(getActionDynamicParams); }
/** * Download attached file. */ @GetMapping("/file/{id:[a-f0-9]{64}}/{key:[a-f0-9]{64}}") public ResponseEntity<StreamingResponseBody> file(@PathVariable("id") final String id, @PathVariable("key") final String keyHex, final HttpSession session) { final KeyIv keyIv = new KeyIv(BaseEncoding.base16().lowerCase().decode(keyHex), resolveFileIv(id, session)); final DecryptedFile decryptedFile = messageService.resolveStoredFile(id, keyIv); final HttpHeaders headers = new HttpHeaders(); // Set application/octet-stream instead of the original mime type to force download headers.setContentType(MediaType.APPLICATION_OCTET_STREAM); if (decryptedFile.getName() != null) { headers.setContentDispositionFormData("attachment", decryptedFile.getName(), StandardCharsets.UTF_8); } headers.setContentLength(decryptedFile.getOriginalFileSize()); final StreamingResponseBody body = out -> { try (final InputStream in = messageService.getStoredFileInputStream(id, keyIv)) { ByteStreams.copy(in, out); out.flush(); } messageService.burnFile(id); }; return new ResponseEntity<>(body, headers, HttpStatus.OK); }
@Override public ResponseEntity<StreamingResponseBody> readFile(String fileLocation, String imageDir, String id, String fileName) { StreamingResponseBody streamingResponseBody = new StreamingResponseBody() { @Override public void writeTo(OutputStream outputStream) { try { String fileStr = fileLocation + File.separator + imageDir + File.separator + id + File.separator + fileName; RandomAccessFile file = new RandomAccessFile(fileStr, "r"); FileChannel inChannel = file.getChannel(); ByteBuffer buffer = ByteBuffer.allocate(1024); while (inChannel.read(buffer) > 0) { buffer.flip(); for (int i = 0; i < buffer.limit(); i++) { outputStream.write(buffer.get()); } buffer.clear(); outputStream.flush(); } inChannel.close(); file.close(); } catch (IOException e) { logger.error("Image Not Found : error " + e.getMessage()); throw new ResourceNotFoundException("Image Not Found : " + id + "/" + fileName); } } }; HttpHeaders headers = new HttpHeaders(); headers.add(HttpHeaders.CONTENT_TYPE, "image/*"); return new ResponseEntity<StreamingResponseBody>(streamingResponseBody, headers, HttpStatus.OK); }
@Override public ResponseEntity<StreamingResponseBody> readFile(String fileLocation, String imageDir, String id, String fileName) { StreamingResponseBody streamingResponseBody = new StreamingResponseBody() { @Override public void writeTo(OutputStream outputStream) { try { String fileStr = SEPARATOR + imageDir + SEPARATOR + id + SEPARATOR + fileName; DbxRequestConfig config = new DbxRequestConfig(APP_IDENTIFIER); DbxClientV2 client = new DbxClientV2(config, ACCESS_TOKEN); client.files().download(fileStr).download(outputStream); } catch (Exception e) { logger.error(e.getMessage()); throw new ResourceNotFoundException("Image Not Found : " + id + "/" + fileName); } } }; HttpHeaders headers = new HttpHeaders(); headers.add(HttpHeaders.CONTENT_TYPE, "image/*"); return new ResponseEntity<StreamingResponseBody>(streamingResponseBody, headers, HttpStatus.OK); }
/** * Stream video. * * @param video Name of the video file on the server. * @return Video stream. * @throws FileNotFoundException When the file not found. */ @RequestMapping(method = RequestMethod.GET, value = "/{video:.+}", produces = {MediaType.APPLICATION_OCTET_STREAM_VALUE}) public StreamingResponseBody stream(@PathVariable String video) throws FileNotFoundException { File videoFile = videos.get(video); final InputStream videoFileStream = new FileInputStream(videoFile); return (os) -> { readAndWrite(videoFileStream, os); }; }
@Test public void whenBatchLimitLowerThan1ThenUnprocessableEntity() throws Exception { final StreamingResponseBody responseBody = controller.streamEvents("abc", 0, 0, null, 10, null, null, requestMock, responseMock, FULL_ACCESS_CLIENT); final Problem expectedProblem = Problem.valueOf(UNPROCESSABLE_ENTITY, "batch_limit can't be lower than 1"); assertThat(responseToString(responseBody), jsonHelper.matchesObject(expectedProblem)); }
@Test public void whenTopicNotExistsThenTopicNotFound() throws IOException, NakadiException { when(eventTypeRepository.findByName(TEST_EVENT_TYPE_NAME)).thenThrow(NoSuchEventTypeException.class); final StreamingResponseBody responseBody = createStreamingResponseBody(); final Problem expectedProblem = Problem.valueOf(NOT_FOUND, "topic not found"); assertThat(responseToString(responseBody), TestUtils.JSON_TEST_HELPER.matchesObject(expectedProblem)); }
@Test public void whenStreamLimitLowerThanBatchLimitThenUnprocessableEntity() throws NakadiException, IOException { when(eventTypeRepository.findByName(TEST_EVENT_TYPE_NAME)).thenReturn(EVENT_TYPE); final StreamingResponseBody responseBody = createStreamingResponseBody(20, 10, 0, 0, 0, null); final Problem expectedProblem = Problem.valueOf(UNPROCESSABLE_ENTITY, "stream_limit can't be lower than batch_limit"); assertThat(responseToString(responseBody), TestUtils.JSON_TEST_HELPER.matchesObject(expectedProblem)); }
@Test public void whenStreamTimeoutLowerThanBatchTimeoutThenUnprocessableEntity() throws NakadiException, IOException { when(eventTypeRepository.findByName(TEST_EVENT_TYPE_NAME)).thenReturn(EVENT_TYPE); final StreamingResponseBody responseBody = createStreamingResponseBody(0, 0, 20, 10, 0, null); final Problem expectedProblem = Problem.valueOf(UNPROCESSABLE_ENTITY, "stream_timeout can't be lower than batch_flush_timeout"); assertThat(responseToString(responseBody), TestUtils.JSON_TEST_HELPER.matchesObject(expectedProblem)); }
@Test public void whenBatchLimitLowerThan1ThenUnprocessableEntity() throws NakadiException, IOException { when(eventTypeRepository.findByName(TEST_EVENT_TYPE_NAME)).thenReturn(EVENT_TYPE); final StreamingResponseBody responseBody = createStreamingResponseBody(0, 0, 0, 0, 0, null); final Problem expectedProblem = Problem.valueOf(UNPROCESSABLE_ENTITY, "batch_limit can't be lower than 1"); assertThat(responseToString(responseBody), TestUtils.JSON_TEST_HELPER.matchesObject(expectedProblem)); }
@Test public void whenWrongCursorsFormatThenBadRequest() throws NakadiException, IOException { when(eventTypeRepository.findByName(TEST_EVENT_TYPE_NAME)).thenReturn(EVENT_TYPE); final StreamingResponseBody responseBody = createStreamingResponseBody(0, 0, 0, 0, 0, "cursors_with_wrong_format"); final Problem expectedProblem = Problem.valueOf(BAD_REQUEST, "incorrect syntax of X-nakadi-cursors header"); assertThat(responseToString(responseBody), TestUtils.JSON_TEST_HELPER.matchesObject(expectedProblem)); }
@Test public void whenInvalidCursorsThenPreconditionFailed() throws Exception { final NakadiCursor cursor = NakadiCursor.of(timeline, "0", "000000000000000000"); when(eventTypeRepository.findByName(TEST_EVENT_TYPE_NAME)).thenReturn(EVENT_TYPE); when(timelineService.createEventConsumer(eq(KAFKA_CLIENT_ID), any())) .thenThrow(new InvalidCursorException(CursorError.UNAVAILABLE, cursor)); final StreamingResponseBody responseBody = createStreamingResponseBody(1, 0, 0, 0, 0, "[{\"partition\":\"0\",\"offset\":\"00000000000000000\"}]"); final Problem expectedProblem = Problem.valueOf(PRECONDITION_FAILED, "offset 000000000000000000 for partition 0 is unavailable"); assertThat(responseToString(responseBody), TestUtils.JSON_TEST_HELPER.matchesObject(expectedProblem)); }
@Test public void whenNakadiExceptionIsThrownThenServiceUnavailable() throws NakadiException, IOException { when(eventTypeRepository.findByName(TEST_EVENT_TYPE_NAME)).thenThrow(ServiceUnavailableException.class); final StreamingResponseBody responseBody = createStreamingResponseBody(); final Problem expectedProblem = Problem.valueOf(SERVICE_UNAVAILABLE); assertThat(responseToString(responseBody), TestUtils.JSON_TEST_HELPER.matchesObject(expectedProblem)); }
@Test public void whenExceptionIsThrownThenInternalServerError() throws NakadiException, IOException { when(eventTypeRepository.findByName(TEST_EVENT_TYPE_NAME)).thenThrow(NullPointerException.class); final StreamingResponseBody responseBody = createStreamingResponseBody(); final Problem expectedProblem = Problem.valueOf(INTERNAL_SERVER_ERROR); assertThat(responseToString(responseBody), TestUtils.JSON_TEST_HELPER.matchesObject(expectedProblem)); }
@Test public void testAccessDenied() throws Exception { Mockito.doThrow(AccessDeniedException.class).when(authorizationValidator) .authorizeStreamRead(any()); when(eventTypeRepository.findByName(TEST_EVENT_TYPE_NAME)).thenReturn(EVENT_TYPE); Mockito.doThrow(mockAccessDeniedException()).when(authorizationValidator).authorizeStreamRead(any()); final StreamingResponseBody responseBody = createStreamingResponseBody(0, 0, 0, 0, 0, null); final Problem expectedProblem = Problem.valueOf(FORBIDDEN, "Access on READ some-type:some-name denied"); assertThat(responseToString(responseBody), TestUtils.JSON_TEST_HELPER.matchesObject(expectedProblem)); }
private StreamingResponseBody createStreamingResponseBody(final Integer batchLimit, final Integer streamLimit, final Integer batchTimeout, final Integer streamTimeout, final Integer streamKeepAliveLimit, final String cursorsStr) throws IOException { return controller.streamEvents(TEST_EVENT_TYPE_NAME, batchLimit, streamLimit, batchTimeout, streamTimeout, streamKeepAliveLimit, cursorsStr, requestMock, responseMock, FULL_ACCESS_CLIENT); }
public static ResponseEntity<StreamingResponseBody> toStreaming(final GenericCommand<InputStream> command) { final Observable<InputStream> stream = command.toObservable(); return stream.map(is -> { // Content for the response entity final StreamingResponseBody body = outputStream -> { try { IOUtils.copyLarge(is, outputStream); outputStream.flush(); } catch (IOException e) { try { is.close(); } catch (IOException closingException) { LOGGER.warn("could not close command result, a http connection may be leaked !", closingException); } LOGGER.error("Unable to fully copy command result '{}'.", command.getClass(), e); } }; // copy all headers from the command response so that the mime-type is correctly forwarded. Command has // the correct headers due to call to toBlocking() below. final MultiValueMap<String, String> headers = new HttpHeaders(); final HttpStatus status = command.getStatus(); for (Header header : command.getCommandResponseHeaders()) { headers.put(header.getName(), Collections.singletonList(header.getValue())); } return new ResponseEntity<>(body, headers, status == null ? HttpStatus.OK : status); }).toBlocking().first(); }
@Test public void testCommandToStreamingWithHeader() throws Exception { GenericCommand<InputStream> command = new CommandHelperTestCommand(); final ResponseEntity<StreamingResponseBody> responseEntity = CommandHelper.toStreaming(command); final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); responseEntity.getBody().writeTo(outputStream); assertEquals("test", new String(outputStream.toByteArray())); assertEquals(HttpStatus.NO_CONTENT, responseEntity.getStatusCode()); assertEquals("custom value", responseEntity.getHeaders().get("custom").get(0)); }
@Test public void testCommandToStreamingWithNoHeader() throws Exception { HystrixCommand<InputStream> command = new CommandHelperTestCommand(); final StreamingResponseBody responseBody = CommandHelper.toStreaming(command); final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); responseBody.writeTo(outputStream); assertEquals("test", new String(outputStream.toByteArray())); }
@Test public void shouldUpdateStepRowMetadataForEachPersistentPreparation() throws Exception { // given when(folderRepository.locateEntry(any(), any())).thenReturn(mock(Folder.class)); when(service.execute(any(ExportParameters.class))).thenReturn(mock(StreamingResponseBody.class)); // when task.run(); // then verify(service, times(preparations.size())).execute(any()); }
@RequestMapping(value = "/apply", method = POST) @ApiOperation(value = "Run the transformation given the provided export parameters", notes = "This operation transforms the dataset or preparation using parameters in export parameters.") @VolumeMetered public StreamingResponseBody execute(@ApiParam(value = "Preparation id to apply.") @RequestBody @Valid final ExportParameters parameters) { return executeSampleExportStrategy(parameters); }
/** * Export the dataset to the given format. * * @param datasetId the dataset id to transform. * @param formatName The output {@link ExportFormat format}. This format also set the MIME response type. * @param name the transformation name. * @param exportParams additional (optional) export parameters. */ //@formatter:off @RequestMapping(value = "/export/dataset/{datasetId}/{format}", method = GET) @ApiOperation(value = "Export the given dataset") @Timed public StreamingResponseBody exportDataset( @ApiParam(value = "DataSet id to transform.") @PathVariable(value = "datasetId") final String datasetId, @ApiParam(value = "Output format") @PathVariable("format") final String formatName, @ApiParam(value = "Name of the transformation", defaultValue = "untitled") @RequestParam(value = "name", required = false, defaultValue = "untitled") final String name, @RequestParam final Map<String, String> exportParams) { //@formatter:on return applyOnDataset(null, datasetId, formatName, null, name, exportParams); }
@RequestMapping(value = "/dictionary", method = GET, produces = APPLICATION_OCTET_STREAM_VALUE) @ApiOperation(value = "Get current dictionary (as serialized object).") @Timed public StreamingResponseBody getDictionary() { return outputStream -> { // Serialize it to output LOG.debug("Returning DQ dictionaries"); TdqCategories result = TdqCategoriesFactory.createFullTdqCategories(); try (ObjectOutputStream oos = new ObjectOutputStream(new GZIPOutputStream(outputStream))) { oos.writeObject(result); } }; }
@Override public StreamingResponseBody execute(ExportParameters parameters) { final String formatName = parameters.getExportType(); final ExportFormat format = getFormat(formatName); ExportUtils.setExportHeaders(parameters.getExportName(), // parameters.getArguments().get(ExportFormat.PREFIX + CSVFormat.ParametersCSV.ENCODING), // format); return outputStream -> performOptimizedTransform(parameters, outputStream); }
@Override public StreamingResponseBody execute(ExportParameters parameters) { final TransformationCacheKey contentKey = getCacheKey(parameters); ExportUtils.setExportHeaders(parameters.getExportName(), // parameters.getArguments().get(ExportFormat.PREFIX + CSVFormat.ParametersCSV.ENCODING), // getFormat(parameters.getExportType())); return outputStream -> { try (InputStream cachedContent = contentCache.get(contentKey)) { IOUtils.copy(cachedContent, outputStream); } }; }
@Override public StreamingResponseBody execute(final ExportParameters parameters) { final String formatName = parameters.getExportType(); final ExportFormat format = getFormat(formatName); ExportUtils.setExportHeaders(parameters.getExportName(), // parameters.getArguments().get(ExportFormat.PREFIX + CSVFormat.ParametersCSV.ENCODING), // format); return outputStream -> performPreparation(parameters, outputStream); }
@Override public StreamingResponseBody execute(ExportParameters parameters) { final String formatName = parameters.getExportType(); final ExportFormat format = getFormat(formatName); ExportUtils.setExportHeaders(parameters.getExportName(), // parameters.getArguments().get(ExportFormat.PREFIX + CSVFormat.ParametersCSV.ENCODING), // format); return outputStream -> executeApplyPreparation(parameters, outputStream); }