@Test public void verifyServiceAttributeFilterAllowedAttributes() { final ReturnAllowedAttributeReleasePolicy policy = new ReturnAllowedAttributeReleasePolicy(); policy.setAllowedAttributes(Arrays.asList("attr1", "attr3")); final Principal p = mock(Principal.class); final Map<String, Object> map = new HashMap<>(); map.put("attr1", "value1"); map.put("attr2", "value2"); map.put("attr3", Arrays.asList("v3", "v4")); when(p.getAttributes()).thenReturn(map); when(p.getId()).thenReturn("principalId"); final Map<String, Object> attr = policy.getAttributes(p); assertEquals(attr.size(), 2); assertTrue(attr.containsKey("attr1")); assertTrue(attr.containsKey("attr3")); final byte[] data = SerializationUtils.serialize(policy); final ReturnAllowedAttributeReleasePolicy p2 = SerializationUtils.deserialize(data); assertNotNull(p2); assertEquals(p2.getAllowedAttributes(), policy.getAllowedAttributes()); }
@Test public void verifyServiceAttributeFilterAllAttributes() { final ReturnAllAttributeReleasePolicy policy = new ReturnAllAttributeReleasePolicy(); final Principal p = mock(Principal.class); final Map<String, Object> map = new HashMap<>(); map.put("attr1", "value1"); map.put("attr2", "value2"); map.put("attr3", Arrays.asList("v3", "v4")); when(p.getAttributes()).thenReturn(map); when(p.getId()).thenReturn("principalId"); final Map<String, Object> attr = policy.getAttributes(p); assertEquals(attr.size(), map.size()); final byte[] data = SerializationUtils.serialize(policy); final ReturnAllAttributeReleasePolicy p2 = SerializationUtils.deserialize(data); assertNotNull(p2); }
@Test public void testMessageSliceWithStreamWriteFailure() throws IOException { try (MessageAssembler assembler = newMessageAssembler("testMessageSliceWithStreamWriteFailure")) { IOException mockFailure = new IOException("mock IOException"); doThrow(mockFailure).when(mockFiledBackedStream).write(any(byte[].class), anyInt(), anyInt()); doThrow(mockFailure).when(mockFiledBackedStream).write(any(byte[].class)); doThrow(mockFailure).when(mockFiledBackedStream).write(anyInt()); doThrow(mockFailure).when(mockFiledBackedStream).flush(); final MessageSliceIdentifier identifier = new MessageSliceIdentifier(IDENTIFIER, 1); final BytesMessage message = new BytesMessage(new byte[]{1, 2, 3}); final MessageSlice messageSlice = new MessageSlice(identifier, SerializationUtils.serialize(message), 1, 1, SlicedMessageState.INITIAL_SLICE_HASH_CODE, testProbe.ref()); assembler.handleMessage(messageSlice, testProbe.ref()); final MessageSliceReply reply = testProbe.expectMsgClass(MessageSliceReply.class); assertFailedMessageSliceReply(reply, IDENTIFIER, false); assertEquals("Failure cause", mockFailure, reply.getFailure().get().getCause()); assertFalse("MessageAssembler did not remove state for " + identifier, assembler.hasState(identifier)); verify(mockFiledBackedStream).cleanup(); } }
@Test public void verifyServiceAttributeFilterAllowedAttributesWithARegexFilter() { final ReturnAllowedAttributeReleasePolicy policy = new ReturnAllowedAttributeReleasePolicy(); policy.setAllowedAttributes(Arrays.asList("attr1", "attr3", "another")); policy.setAttributeFilter(new RegisteredServiceRegexAttributeFilter("v3")); final Principal p = mock(Principal.class); final Map<String, Object> map = new HashMap<>(); map.put("attr1", "value1"); map.put("attr2", "value2"); map.put("attr3", Arrays.asList("v3", "v4")); when(p.getAttributes()).thenReturn(map); when(p.getId()).thenReturn("principalId"); final Map<String, Object> attr = policy.getAttributes(p); assertEquals(attr.size(), 1); assertTrue(attr.containsKey("attr3")); final byte[] data = SerializationUtils.serialize(policy); final ReturnAllowedAttributeReleasePolicy p2 = SerializationUtils.deserialize(data); assertNotNull(p2); assertEquals(p2.getAllowedAttributes(), policy.getAllowedAttributes()); assertEquals(p2.getAttributeFilter(), policy.getAttributeFilter()); }
@Test public void testMessageSliceWithByteSourceFailure() throws IOException { try (MessageAssembler assembler = newMessageAssembler("testMessageSliceWithByteSourceFailure")) { IOException mockFailure = new IOException("mock IOException"); doThrow(mockFailure).when(mockByteSource).openStream(); doThrow(mockFailure).when(mockByteSource).openBufferedStream(); final MessageSliceIdentifier identifier = new MessageSliceIdentifier(IDENTIFIER, 1); final BytesMessage message = new BytesMessage(new byte[]{1, 2, 3}); final MessageSlice messageSlice = new MessageSlice(identifier, SerializationUtils.serialize(message), 1, 1, SlicedMessageState.INITIAL_SLICE_HASH_CODE, testProbe.ref()); assembler.handleMessage(messageSlice, testProbe.ref()); final MessageSliceReply reply = testProbe.expectMsgClass(MessageSliceReply.class); assertFailedMessageSliceReply(reply, IDENTIFIER, false); assertEquals("Failure cause", mockFailure, reply.getFailure().get().getCause()); assertFalse("MessageAssembler did not remove state for " + identifier, assembler.hasState(identifier)); verify(mockFiledBackedStream).cleanup(); } }
private void invokeSecurityTokenServiceForToken(final AuthenticationTransaction transaction, final AuthenticationBuilder builder, final WSFederationRegisteredService rp, final SecurityTokenServiceClient sts) { final UsernamePasswordCredential up = transaction.getCredentials() .stream() .filter(UsernamePasswordCredential.class::isInstance) .map(UsernamePasswordCredential.class::cast) .findFirst() .orElse(null); if (up != null) { try { sts.getProperties().put(SecurityConstants.USERNAME, up.getUsername()); final String uid = credentialCipherExecutor.encode(up.getUsername()); sts.getProperties().put(SecurityConstants.PASSWORD, uid); final SecurityToken token = sts.requestSecurityToken(rp.getAppliesTo()); final String tokenStr = EncodingUtils.encodeBase64(SerializationUtils.serialize(token)); builder.addAttribute(WSFederationConstants.SECURITY_TOKEN_ATTRIBUTE, tokenStr); } catch (final Exception e) { throw new AuthenticationException(e.getMessage()); } } }
@Override public byte[] generateReEncryptionKey(String encryptionKeyName, String dstEncryptionKeyName, KeyPairMaterial destinationKey) throws IOException { BBS98BCReEncryptionKeyGenerator reEncryptionKeyGenerator = new BBS98BCReEncryptionKeyGenerator(); KeyPairVersion srcKey = provider.getKeyPairVersion(encryptionKeyName); KeyPairSpec srcKeySpec = new KeyPairSpec(srcKey.getPublicMaterial().getMaterial(), srcKey.getPrivateMaterial().getMaterial(), reEncryptionKeyGenerator.getAlgorithm()); KeyPairSpec dstKeySpec = new KeyPairSpec(destinationKey.getPublic(), destinationKey.getPrivate(), reEncryptionKeyGenerator.getAlgorithm()); ECPrivateKey dstSk = SerializationUtils.deserialize(dstKeySpec.getPrivate().getKeyMaterial()); try { reEncryptionKeyGenerator.initialize(dstSk.getParameters()); ReEncryptionKey reKey = reEncryptionKeyGenerator.generateReEncryptionKey( srcKeySpec.getPrivate().getKeyMaterial(), dstKeySpec.getPrivate().getKeyMaterial()); return reKey.getEncoded(); } catch(InvalidAlgorithmParameterException e) { e.printStackTrace(System.err); } return null; }
@Test public void testSingleMessageSlice() { try (MessageAssembler assembler = newMessageAssembler("testSingleMessageSlice")) { final FileBackedOutputStream fileBackStream = spy(new FileBackedOutputStream(100000000, null)); doReturn(fileBackStream).when(mockFiledBackedStreamFactory).newInstance(); final MessageSliceIdentifier identifier = new MessageSliceIdentifier(IDENTIFIER, 1); final BytesMessage message = new BytesMessage(new byte[]{1, 2, 3}); final MessageSlice messageSlice = new MessageSlice(identifier, SerializationUtils.serialize(message), 1, 1, SlicedMessageState.INITIAL_SLICE_HASH_CODE, testProbe.ref()); assembler.handleMessage(messageSlice, testProbe.ref()); final MessageSliceReply reply = testProbe.expectMsgClass(MessageSliceReply.class); assertSuccessfulMessageSliceReply(reply, IDENTIFIER, 1); assertAssembledMessage(mockAssembledMessageCallback, message, testProbe.ref()); assertFalse("MessageAssembler did not remove state for " + identifier, assembler.hasState(identifier)); verify(fileBackStream).cleanup(); } }
private Protos.TaskInfo buildCustomizedExecutorTaskInfo(final TaskContext taskContext, final CloudAppConfiguration appConfig, final CloudJobConfiguration jobConfig, final ShardingContexts shardingContexts, final Protos.Offer offer, final Protos.CommandInfo command) { Protos.TaskInfo.Builder result = Protos.TaskInfo.newBuilder().setTaskId(Protos.TaskID.newBuilder().setValue(taskContext.getId()).build()) .setName(taskContext.getTaskName()).setSlaveId(offer.getSlaveId()) .addResources(buildResource("cpus", jobConfig.getCpuCount(), offer.getResourcesList())) .addResources(buildResource("mem", jobConfig.getMemoryMB(), offer.getResourcesList())) .setData(ByteString.copyFrom(new TaskInfoData(shardingContexts, jobConfig).serialize())); Protos.ExecutorInfo.Builder executorBuilder = Protos.ExecutorInfo.newBuilder().setExecutorId(Protos.ExecutorID.newBuilder() .setValue(taskContext.getExecutorId(jobConfig.getAppName()))).setCommand(command) .addResources(buildResource("cpus", appConfig.getCpuCount(), offer.getResourcesList())) .addResources(buildResource("mem", appConfig.getMemoryMB(), offer.getResourcesList())); if (env.getJobEventRdbConfiguration().isPresent()) { executorBuilder.setData(ByteString.copyFrom(SerializationUtils.serialize(env.getJobEventRdbConfigurationMap()))).build(); } return result.setExecutor(executorBuilder.build()).build(); }
@Test public void testAssembledMessageStateExpiration() throws IOException { final int expiryDuration = 200; try (MessageAssembler assembler = newMessageAssemblerBuilder("testAssembledMessageStateExpiration") .expireStateAfterInactivity(expiryDuration, TimeUnit.MILLISECONDS).build()) { final MessageSliceIdentifier identifier = new MessageSliceIdentifier(IDENTIFIER, 1); final BytesMessage message = new BytesMessage(new byte[]{1, 2, 3}); final MessageSlice messageSlice = new MessageSlice(identifier, SerializationUtils.serialize(message), 1, 2, SlicedMessageState.INITIAL_SLICE_HASH_CODE, testProbe.ref()); assembler.handleMessage(messageSlice, testProbe.ref()); final MessageSliceReply reply = testProbe.expectMsgClass(MessageSliceReply.class); assertSuccessfulMessageSliceReply(reply, IDENTIFIER, 1); assertTrue("MessageAssembler should have remove state for " + identifier, assembler.hasState(identifier)); Uninterruptibles.sleepUninterruptibly(expiryDuration + 50, TimeUnit.MILLISECONDS); assertFalse("MessageAssembler did not remove state for " + identifier, assembler.hasState(identifier)); verify(mockFiledBackedStream).cleanup(); } }
@Override public void run() { Thread.currentThread().setContextClassLoader(TaskThread.class.getClassLoader()); executorDriver.sendStatusUpdate(Protos.TaskStatus.newBuilder().setTaskId(taskInfo.getTaskId()).setState(Protos.TaskState.TASK_RUNNING).build()); Map<String, Object> data = SerializationUtils.deserialize(taskInfo.getData().toByteArray()); ShardingContexts shardingContexts = (ShardingContexts) data.get("shardingContext"); @SuppressWarnings("unchecked") JobConfigurationContext jobConfig = new JobConfigurationContext((Map<String, String>) data.get("jobConfigContext")); try { ElasticJob elasticJob = getElasticJobInstance(jobConfig); final CloudJobFacade jobFacade = new CloudJobFacade(shardingContexts, jobConfig, jobEventBus); if (jobConfig.isTransient()) { JobExecutorFactory.getJobExecutor(elasticJob, jobFacade).execute(); executorDriver.sendStatusUpdate(Protos.TaskStatus.newBuilder().setTaskId(taskInfo.getTaskId()).setState(Protos.TaskState.TASK_FINISHED).build()); } else { new DaemonTaskScheduler(elasticJob, jobConfig, jobFacade, executorDriver, taskInfo.getTaskId()).init(); } // CHECKSTYLE:OFF } catch (final Throwable ex) { // CHECKSTYLE:ON log.error("Elastic-Job-Cloud-Executor error", ex); executorDriver.sendStatusUpdate(Protos.TaskStatus.newBuilder().setTaskId(taskInfo.getTaskId()).setState(Protos.TaskState.TASK_ERROR).setMessage(ExceptionUtil.transform(ex)).build()); executorDriver.stop(); throw ex; } }
@Test public void testSlicingWithFailure() { LOG.info("testSlicingWithFailure starting"); final BytesMessage message = new BytesMessage(new byte[]{1, 2, 3}); final int messageSliceSize = SerializationUtils.serialize(message).length / 2; try (MessageSlicer slicer = newMessageSlicer("testSlicingWithFailure", messageSliceSize)) { final boolean wasSliced = slice(slicer, IDENTIFIER, message, sendToProbe.ref(), replyToProbe.ref(), mockOnFailureCallback); assertTrue(wasSliced); MessageSlice sliceMessage = sendToProbe.expectMsgClass(MessageSlice.class); MessageSliceException failure = new MessageSliceException("mock failure", new IOException("mock IOException")); slicer.handleMessage(MessageSliceReply.failed(sliceMessage.getIdentifier(), failure, sendToProbe.ref())); assertFailureCallback(IOException.class); assertFalse("MessageSlicer did not remove state for " + sliceMessage.getIdentifier(), slicer.hasState(sliceMessage.getIdentifier())); } LOG.info("testSlicingWithFailure ending"); }
@Override public void handleData(NBTTagCompound data) { Biome b = Biome.getBiome(data.getInteger("biomeID")); DimBlockPos dbp = SerializationUtils.deserialize(data.getByteArray("blockPosData")); if (MC.getSide() == Side.CLIENT) { VoidApi.proxy.getClientListener().addScheduledTask(() -> { if (VoidApi.proxy.getClientWorld().isBlockLoaded(dbp.pos)) ChunkUtils.setBiomeAt(b, dbp.pos, VoidApi.proxy.getClientWorld());} ); } else { WorldServer ws = DimensionManager.getWorld(dbp.dim); ws.addScheduledTask(() -> { if (ws.isBlockLoaded(dbp.pos)) ChunkUtils.setBiomeAt(b, dbp.pos, ws);} ); } }
@SuppressWarnings("deprecation") @Override public MockResponse createResponse(MockRequest request) { Optional<String> targetKey = getObjectKey(request); Optional<Bucket> targetBucket = getBucket(request); Optional<S3Object> source = getSourceFromHeader(request); if (request.utils().areAllPresent(targetKey, targetBucket, source)) { S3Object copy = SerializationUtils.clone(source.get()); copy.setKey(targetKey.get()); targetBucket.get().getObjects().add(copy); } return new MockResponse( successBody("CopyObject", "<LastModified>" + ISO8601Utils.format(new Date()) + "</LastModified>\n" + " <ETag>" + UUID.randomUUID().toString() + "</ETag>")); }
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String fileName = URLDecoder.decode(request.getHeader("File-Name"), Charsets.UTF_8.name()); AttachmentSupport attachmentSuppport = (AttachmentSupport) SerializationUtils .deserialize(Base64.decodeBase64(request.getHeader("Attachment-Support"))); try { String attachmentName = attachmentSuppport.saveAttachment(fileName, request.getInputStream()); response.getWriter().print(URLEncoder.encode(attachmentName, Charsets.UTF_8.name())); response.setStatus(HttpServletResponse.SC_OK); } catch (Exception e) { logger.error("Error uploading attachment.", e); if (e.getMessage() != null) response.getWriter().print(e.getMessage()); else response.getWriter().print("Internal server error"); response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); } }
@Override public int getCommitCount(Project project) { Integer commitCount = commitCountCache.get(project.getId()); if (commitCount == null) { Environment env = getEnv(project.getId().toString()); Store store = getStore(env, DEFAULT_STORE); commitCount = env.computeInReadonlyTransaction(new TransactionalComputable<Integer>() { @Override public Integer compute(Transaction txn) { byte[] bytes = getBytes(store.get(txn, COMMIT_COUNT_KEY)); if (bytes != null) { return (Integer) SerializationUtils.deserialize(bytes); } else { return 0; } } }); commitCountCache.put(project.getId(), commitCount); } return commitCount; }
@Test public void testReadWriteObject() throws Exception { OidcProfile profile = new OidcProfile(); profile.setAccessToken(populatedAccessToken); profile.setIdTokenString(ID_TOKEN); profile.setRefreshToken(new RefreshToken(REFRESH_TOKEN)); byte[] result = SerializationUtils.serialize(profile); profile = SerializationUtils.deserialize(result); assertNotNull("accessToken", profile.getAccessToken()); assertNotNull("value", profile.getAccessToken().getValue()); assertEquals(profile.getAccessToken().getLifetime(), populatedAccessToken.getLifetime()); assertEquals(profile.getAccessToken().getScope(), populatedAccessToken.getScope()); assertEquals(profile.getIdTokenString(), ID_TOKEN); assertEquals(profile.getRefreshToken().getValue(), REFRESH_TOKEN); }
@Override public void push(Request request, ISpider spider) { Jedis jedis = pool.getResource(); if (Const.HttpMethod.POST == request.getMethod() || !isDuplicate(request, spider)) { log.debug("push to queue {}", request.getUrl()); try { jedis.rpush(getQueueKey(spider), request.getUrl()); String field = DigestUtils.md5Hex(request.getUrl()); byte[] data=SerializationUtils.serialize(request); jedis.hset((ITEM_PREFIX + spider.getName()).getBytes(), field.getBytes(), data); } finally { jedis.close(); } } }
@Override public synchronized Request poll(ISpider spider) { Jedis jedis = pool.getResource(); try { String url = jedis.lpop(getQueueKey(spider)); if (url == null) { return null; } String key = ITEM_PREFIX + spider.getName(); String field = DigestUtils.md5Hex(url); byte[] bytes = jedis.hget(key.getBytes(), field.getBytes()); Request request=SerializationUtils.deserialize(bytes); return request; } finally { jedis.close(); } }
@Before public void setup() { MockitoAnnotations.initMocks(this); data = new HashMap<>(); data.put("key1", "value1"); data.put("key2", "value2"); data.put("key3", "value3"); byteString = ByteString.copyFrom(SerializationUtils.serialize((Serializable) data)); chunk1 = getNextChunk(byteString, 0, 10); chunk2 = getNextChunk(byteString, 10, 10); chunk3 = getNextChunk(byteString, 20, byteString.size()); fbos = spy(new FileBackedOutputStream(100000000, "target")); FileBackedOutputStreamFactory mockFactory = mock(FileBackedOutputStreamFactory.class); doReturn(fbos).when(mockFactory).newInstance(); doReturn(mockFactory).when(mockContext).getFileBackedOutputStreamFactory(); }
@Test public void testSliceWithFileBackedOutputStream() throws IOException { LOG.info("testSliceWithFileBackedOutputStream starting"); final BytesMessage message = new BytesMessage(new byte[]{1, 2, 3}); FileBackedOutputStream fileBackedStream = FILE_BACKED_STREAM_FACTORY.newInstance(); try (ObjectOutputStream out = new ObjectOutputStream(fileBackedStream)) { out.writeObject(message); } try (MessageSlicer slicer = newMessageSlicer("testSliceWithFileBackedOutputStream", SerializationUtils.serialize(message).length)) { slicer.slice(SliceOptions.builder().identifier(IDENTIFIER).fileBackedOutputStream(fileBackedStream) .sendTo(ACTOR_SYSTEM.actorSelection(sendToProbe.ref().path())).replyTo(replyToProbe.ref()) .onFailureCallback(mockOnFailureCallback).build()); final MessageSlice sliceMessage = sendToProbe.expectMsgClass(MessageSlice.class); assembler.handleMessage(sliceMessage, sendToProbe.ref()); assertAssembledMessage(message, replyToProbe.ref()); } LOG.info("testSliceWithFileBackedOutputStream ending"); }
@Test public void testSingleSlice() { LOG.info("testSingleSlice starting"); // Slice a message where the serialized size is equal to the messageSliceSize. In this case it should // just send the original message. final BytesMessage message = new BytesMessage(new byte[]{1, 2, 3}); try (MessageSlicer slicer = newMessageSlicer("testSingleSlice", SerializationUtils.serialize(message).length)) { final boolean wasSliced = slice(slicer, IDENTIFIER, message, sendToProbe.ref(), replyToProbe.ref(), mockOnFailureCallback); assertFalse(wasSliced); final BytesMessage sentMessage = sendToProbe.expectMsgClass(BytesMessage.class); assertEquals("Sent message", message, sentMessage); } LOG.info("testSingleSlice ending"); }
@Test public void verifyAttributeFilterMappedAttributes() { final ReturnMappedAttributeReleasePolicy policy = new ReturnMappedAttributeReleasePolicy(); final Map<String, String> mappedAttr = new HashMap<>(); mappedAttr.put("attr1", "newAttr1"); policy.setAllowedAttributes(mappedAttr); final Principal p = mock(Principal.class); final Map<String, Object> map = new HashMap<>(); map.put("attr1", "value1"); map.put("attr2", "value2"); map.put("attr3", Arrays.asList("v3", "v4")); when(p.getAttributes()).thenReturn(map); when(p.getId()).thenReturn("principalId"); final Map<String, Object> attr = policy.getAttributes(p); assertEquals(attr.size(), 1); assertTrue(attr.containsKey("newAttr1")); final byte[] data = SerializationUtils.serialize(policy); final ReturnMappedAttributeReleasePolicy p2 = SerializationUtils.deserialize(data); assertNotNull(p2); assertEquals(p2.getAllowedAttributes(), policy.getAllowedAttributes()); }
private static SecurityToken validateSecurityTokenInAssertion(final Assertion assertion, final HttpServletRequest request, final HttpServletResponse response) { LOGGER.debug("Validating security token in CAS assertion..."); final AttributePrincipal principal = assertion.getPrincipal(); if (!principal.getAttributes().containsKey(WSFederationConstants.SECURITY_TOKEN_ATTRIBUTE)) { throw new UnauthorizedServiceException(UnauthorizedServiceException.CODE_UNAUTHZ_SERVICE); } final String token = (String) principal.getAttributes().get(WSFederationConstants.SECURITY_TOKEN_ATTRIBUTE); final byte[] securityTokenBin = EncodingUtils.decodeBase64(token); return SerializationUtils.deserialize(securityTokenBin); }
/** * Build table attribute values from ticket map. * * @param ticket the ticket * @param encTicket the encoded ticket * @return the map */ public Map<String, AttributeValue> buildTableAttributeValuesMapFromTicket(final Ticket ticket, final Ticket encTicket) { final Map<String, AttributeValue> values = new HashMap<>(); values.put(ColumnNames.ID.getName(), new AttributeValue(encTicket.getId())); values.put(ColumnNames.PREFIX.getName(), new AttributeValue(encTicket.getPrefix())); values.put(ColumnNames.CREATION_TIME.getName(), new AttributeValue(ticket.getCreationTime().toString())); values.put(ColumnNames.COUNT_OF_USES.getName(), new AttributeValue().withN(Integer.toString(ticket.getCountOfUses()))); values.put(ColumnNames.TIME_TO_LIVE.getName(), new AttributeValue().withN(Long.toString(ticket.getExpirationPolicy().getTimeToLive()))); values.put(ColumnNames.TIME_TO_IDLE.getName(), new AttributeValue().withN(Long.toString(ticket.getExpirationPolicy().getTimeToIdle()))); values.put(ColumnNames.ENCODED.getName(), new AttributeValue().withB(ByteBuffer.wrap(SerializationUtils.serialize(encTicket)))); LOGGER.debug("Created attribute values [{}] based on provided ticket [{}]", values, encTicket.getId()); return values; }
@Override public SecurityTokenTicket create(final TicketGrantingTicket ticket, final SecurityToken securityToken) { final String token = EncodingUtils.encodeBase64(SerializationUtils.serialize(securityToken)); final String id = ticketUniqueTicketIdGenerator.getNewTicketId(SecurityTokenTicket.PREFIX); final SecurityTokenTicket stt = new DefaultSecurityTokenTicket(id, ticket, this.expirationPolicy, token); ticket.getDescendantTickets().add(stt.getId()); return stt; }
@Override public ReEncryptionKey generateReEncryptionKey(byte[] keyMaterialFrom, byte[] keyMaterialTo) { final PrivateKey keyFrom = SerializationUtils.deserialize(keyMaterialFrom); final PrivateKey keyTo = SerializationUtils.deserialize(keyMaterialTo); return new BBS98ReEncryptionKeySpec( engine.rekeygen(keyFrom, keyTo)); }
private void publish(Task task) throws IOException { AMQP.BasicProperties properties = !withPriority ? MessageProperties.PERSISTENT_BASIC : new AMQP.BasicProperties.Builder() .contentType("application/octet-stream") .deliveryMode(2) .priority((int) task.getPriority()) .build(); channel.basicPublish(EXCHANGE, dispatcher.getTargetQueue(task), properties, SerializationUtils.serialize(task)); duplicateRemover.record(task); }
private void declareConsumers() throws IOException { for (String queueName : targetQueueNames) { channel.basicConsume(queueName, false, new DefaultConsumer(channel) { @Override public void handleDelivery( String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body) throws IOException { if (isSuspend.get()) { channel.basicNack(envelope.getDeliveryTag(), false, true); } Task task = SerializationUtils.deserialize(body); tasks.put(task, envelope.getDeliveryTag()); checker.schedule(() -> { Task task0 = task; synchronized (tasks) { try { if (tasks.containsKey(task0)) { channel.basicNack(tasks.remove(task0), false, true); logger.warn("Task {} pending timeout (not taken), re-add to default_task_queue.", task0); } else { synchronized (pendingTasks) { if (pendingTasks.containsKey(task0)) { channel.basicNack(pendingTasks.remove(task0), false, true); logger.warn("Task {} pending timeout (no feedback), re-add to default_task_queue.", task0); } } } } catch (IOException e) { e.printStackTrace(); } } }, timeoutInSeconds, TimeUnit.SECONDS); } }); } }
@Override public void addTask(String executorId, Task task) { if (isSuspend.get()) { return; } try { channel.basicPublish("", NEW_TASK_QUEUE, MessageProperties.PERSISTENT_BASIC, SerializationUtils.serialize(task)); totalTaskInputCount.incrementAndGet(); } catch (IOException e) { e.printStackTrace(); } }
@Override public Checkpoint deserialize(int version, byte[] bytes) throws IOException { if (version != VERSION) { throw new IOException("Invalid format version for serialized Pravega Checkpoint: " + version); } return (Checkpoint) SerializationUtils.deserialize(bytes); }
@SuppressWarnings("checkstyle:IllegalCatch") private static void saveSnapshotsToFile(DatastoreSnapshotList snapshots, String fileName, SettableFuture<RpcResult<Void>> returnFuture) { try (FileOutputStream fos = new FileOutputStream(fileName)) { SerializationUtils.serialize(snapshots, fos); returnFuture.set(newSuccessfulResult()); LOG.info("Successfully backed up datastore to file {}", fileName); } catch (IOException | RuntimeException e) { onDatastoreBackupFailure(fileName, returnFuture, e); } }
@Override public void registered(final ExecutorDriver executorDriver, final Protos.ExecutorInfo executorInfo, final Protos.FrameworkInfo frameworkInfo, final Protos.SlaveInfo slaveInfo) { if (!executorInfo.getData().isEmpty()) { Map<String, String> data = SerializationUtils.deserialize(executorInfo.getData().toByteArray()); BasicDataSource dataSource = new BasicDataSource(); dataSource.setDriverClassName(data.get("event_trace_rdb_driver")); dataSource.setUrl(data.get("event_trace_rdb_url")); dataSource.setPassword(data.get("event_trace_rdb_password")); dataSource.setUsername(data.get("event_trace_rdb_username")); jobEventBus = new JobEventBus(new JobEventRdbConfiguration(dataSource)); } }
private byte[] serialize(final Map<String, String> jobConfigurationContext) { // CHECKSTYLE:OFF LinkedHashMap<String, Object> result = new LinkedHashMap<>(2, 1); // CHECKSTYLE:ON ShardingContexts shardingContexts = new ShardingContexts(taskId, "test_job", 1, "", Collections.singletonMap(1, "a")); result.put("shardingContext", shardingContexts); result.put("jobConfigContext", jobConfigurationContext); return SerializationUtils.serialize(result); }