private void recoverJobShuffleInfo(String jobIdStr, byte[] data) throws IOException { JobID jobId; try { jobId = JobID.forName(jobIdStr); } catch (IllegalArgumentException e) { throw new IOException("Bad job ID " + jobIdStr + " in state store", e); } JobShuffleInfoProto proto = JobShuffleInfoProto.parseFrom(data); String user = proto.getUser(); TokenProto tokenProto = proto.getJobToken(); Token<JobTokenIdentifier> jobToken = new Token<JobTokenIdentifier>( tokenProto.getIdentifier().toByteArray(), tokenProto.getPassword().toByteArray(), new Text(tokenProto.getKind()), new Text(tokenProto.getService())); addJobToken(jobId, user, jobToken); }
private void recordJobShuffleInfo(JobID jobId, String user, Token<JobTokenIdentifier> jobToken) throws IOException { if (stateDb != null) { TokenProto tokenProto = TokenProto.newBuilder() .setIdentifier(ByteString.copyFrom(jobToken.getIdentifier())) .setPassword(ByteString.copyFrom(jobToken.getPassword())) .setKind(jobToken.getKind().toString()) .setService(jobToken.getService().toString()) .build(); JobShuffleInfoProto proto = JobShuffleInfoProto.newBuilder() .setUser(user).setJobToken(tokenProto).build(); try { stateDb.put(bytes(jobId.toString()), proto.toByteArray()); } catch (DBException e) { throw new IOException("Error storing " + jobId, e); } } addJobToken(jobId, user, jobToken); }
private void recordJobShuffleInfo(JobID jobId, String user, Token<JobTokenIdentifier> jobToken, String userFolder) throws IOException { if (stateDb != null) { TokenProto tokenProto = TokenProto.newBuilder() .setIdentifier(ByteString.copyFrom(jobToken.getIdentifier())) .setPassword(ByteString.copyFrom(jobToken.getPassword())) .setKind(jobToken.getKind().toString()) .setService(jobToken.getService().toString()) .build(); JobShuffleInfoProto proto = JobShuffleInfoProto.newBuilder() .setUser(user).setJobToken(tokenProto).setUserFolder(userFolder).build(); try { stateDb.put(bytes(jobId.toString()), proto.toByteArray()); } catch (DBException e) { throw new IOException("Error storing " + jobId, e); } } addJobToken(jobId, user, jobToken, userFolder); }
public static TokenProto convert(Token<?> tok) { return TokenProto.newBuilder(). setIdentifier(ByteString.copyFrom(tok.getIdentifier())). setPassword(ByteString.copyFrom(tok.getPassword())). setKind(tok.getKind().toString()). setService(tok.getService().toString()).build(); }
@Override public HdfsBlocksMetadata getHdfsBlocksMetadata(String blockPoolId, long[] blockIds, List<Token<BlockTokenIdentifier>> tokens) throws IOException { List<TokenProto> tokensProtos = new ArrayList<TokenProto>(tokens.size()); for (Token<BlockTokenIdentifier> t : tokens) { tokensProtos.add(PBHelper.convert(t)); } // Build the request GetHdfsBlockLocationsRequestProto request = GetHdfsBlockLocationsRequestProto.newBuilder() .setBlockPoolId(blockPoolId) .addAllBlockIds(Longs.asList(blockIds)) .addAllTokens(tokensProtos) .build(); // Send the RPC GetHdfsBlockLocationsResponseProto response; try { response = rpcProxy.getHdfsBlockLocations(NULL_CONTROLLER, request); } catch (ServiceException e) { throw ProtobufHelper.getRemoteException(e); } // List of volumes in the response List<ByteString> volumeIdsByteStrings = response.getVolumeIdsList(); List<byte[]> volumeIds = new ArrayList<byte[]>(volumeIdsByteStrings.size()); for (ByteString bs : volumeIdsByteStrings) { volumeIds.add(bs.toByteArray()); } // Array of indexes into the list of volumes, one per block List<Integer> volumeIndexes = response.getVolumeIndexesList(); // Parsed HdfsVolumeId values, one per block return new HdfsBlocksMetadata(blockPoolId, blockIds, volumeIds, volumeIndexes); }
@Test public void testConvertBlockToken() { Token<BlockTokenIdentifier> token = new Token<BlockTokenIdentifier>( "identifier".getBytes(), "password".getBytes(), new Text("kind"), new Text("service")); TokenProto tokenProto = PBHelper.convert(token); Token<BlockTokenIdentifier> token2 = PBHelper.convert(tokenProto); compare(token, token2); }
@Override public List<Token> getContainersToIncrease() { if (containersToIncrease != null) { return containersToIncrease; } IncreaseContainersResourceRequestProtoOrBuilder p = viaProto ? proto : builder; List<TokenProto> list = p.getIncreaseContainersList(); containersToIncrease = new ArrayList<>(); for (TokenProto c : list) { containersToIncrease.add(convertFromProtoFormat(c)); } return containersToIncrease; }
private void addIncreaseContainersToProto() { maybeInitBuilder(); builder.clearIncreaseContainers(); if (this.containersToIncrease == null) { return; } Iterable<TokenProto> iterable = new Iterable<TokenProto>() { @Override public Iterator<TokenProto> iterator() { return new Iterator<TokenProto>() { Iterator<Token> iter = containersToIncrease.iterator(); @Override public boolean hasNext() { return iter.hasNext(); } @Override public TokenProto next() { return convertToProtoFormat(iter.next()); } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; builder.addAllIncreaseContainers(iterable); }
public static TokenProto convert(Token<?> tok) { return TokenProto.newBuilder(). setIdentifier(getByteString(tok.getIdentifier())). setPassword(getByteString(tok.getPassword())). setKind(tok.getKind().toString()). setService(tok.getService().toString()).build(); }
@Test public void testConvertBlockToken() { Token<BlockTokenIdentifier> token = new Token<BlockTokenIdentifier>( "identifier".getBytes(), "password".getBytes(), new Text("kind"), new Text("service")); TokenProto tokenProto = PBHelperClient.convert(token); Token<BlockTokenIdentifier> token2 = PBHelperClient.convert(tokenProto); compare(token, token2); }
public static Token<DelegationTokenIdentifier> convertDelegationToken( TokenProto blockToken) { return new Token<>( blockToken.getIdentifier().toByteArray(), blockToken.getPassword().toByteArray(), new Text(blockToken.getKind()), new Text(blockToken.getService())); }
@Override public HdfsBlocksMetadata getHdfsBlocksMetadata(List<ExtendedBlock> blocks, List<Token<BlockTokenIdentifier>> tokens) throws IOException { // Convert to proto objects List<ExtendedBlockProto> blocksProtos = new ArrayList<>(blocks.size()); List<TokenProto> tokensProtos = new ArrayList<>(tokens.size()); for (ExtendedBlock b : blocks) { blocksProtos.add(PBHelper.convert(b)); } for (Token<BlockTokenIdentifier> t : tokens) { tokensProtos.add(PBHelper.convert(t)); } // Build the request GetHdfsBlockLocationsRequestProto request = GetHdfsBlockLocationsRequestProto.newBuilder() .addAllBlocks(blocksProtos).addAllTokens(tokensProtos).build(); // Send the RPC GetHdfsBlockLocationsResponseProto response; try { response = rpcProxy.getHdfsBlockLocations(NULL_CONTROLLER, request); } catch (ServiceException e) { throw ProtobufHelper.getRemoteException(e); } // List of volumes in the response List<ByteString> volumeIdsByteStrings = response.getVolumeIdsList(); List<byte[]> volumeIds = new ArrayList<>(volumeIdsByteStrings.size()); for (ByteString bs : volumeIdsByteStrings) { volumeIds.add(bs.toByteArray()); } // Array of indexes into the list of volumes, one per block List<Integer> volumeIndexes = response.getVolumeIndexesList(); // Parsed HdfsVolumeId values, one per block return new HdfsBlocksMetadata(blocks.toArray(new ExtendedBlock[]{}), volumeIds, volumeIndexes); }
@Test public void testConvertBlockToken() { Token<BlockTokenIdentifier> token = new Token<>("identifier".getBytes(), "password".getBytes(), new Text("kind"), new Text("service")); TokenProto tokenProto = PBHelper.convert(token); Token<BlockTokenIdentifier> token2 = PBHelper.convert(tokenProto); compare(token, token2); }
@Override public HdfsBlocksMetadata getHdfsBlocksMetadata(List<ExtendedBlock> blocks, List<Token<BlockTokenIdentifier>> tokens) throws IOException { // Convert to proto objects List<ExtendedBlockProto> blocksProtos = new ArrayList<ExtendedBlockProto>(blocks.size()); List<TokenProto> tokensProtos = new ArrayList<TokenProto>(tokens.size()); for (ExtendedBlock b : blocks) { blocksProtos.add(PBHelper.convert(b)); } for (Token<BlockTokenIdentifier> t : tokens) { tokensProtos.add(PBHelper.convert(t)); } // Build the request GetHdfsBlockLocationsRequestProto request = GetHdfsBlockLocationsRequestProto.newBuilder() .addAllBlocks(blocksProtos) .addAllTokens(tokensProtos) .build(); // Send the RPC GetHdfsBlockLocationsResponseProto response; try { response = rpcProxy.getHdfsBlockLocations(NULL_CONTROLLER, request); } catch (ServiceException e) { throw ProtobufHelper.getRemoteException(e); } // List of volumes in the response List<ByteString> volumeIdsByteStrings = response.getVolumeIdsList(); List<byte[]> volumeIds = new ArrayList<byte[]>(volumeIdsByteStrings.size()); for (ByteString bs : volumeIdsByteStrings) { volumeIds.add(bs.toByteArray()); } // Array of indexes into the list of volumes, one per block List<Integer> volumeIndexes = response.getVolumeIndexesList(); // Parsed HdfsVolumeId values, one per block return new HdfsBlocksMetadata(blocks.toArray(new ExtendedBlock[] {}), volumeIds, volumeIndexes); }
private TokenPBImpl convertFromProtoFormat(TokenProto p) { return new TokenPBImpl(p); }
private TokenProto convertToProtoFormat(Token t) { return ((TokenPBImpl) t).getProto(); }
private TokenProto convertToProtoFormat(Token t) { return ((TokenPBImpl)t).getProto(); }