Java 类org.apache.hadoop.security.proto.SecurityProtos.TokenProto 实例源码

项目:hadoop    文件:ShuffleHandler.java   
private void recoverJobShuffleInfo(String jobIdStr, byte[] data)
    throws IOException {
  JobID jobId;
  try {
    jobId = JobID.forName(jobIdStr);
  } catch (IllegalArgumentException e) {
    throw new IOException("Bad job ID " + jobIdStr + " in state store", e);
  }

  JobShuffleInfoProto proto = JobShuffleInfoProto.parseFrom(data);
  String user = proto.getUser();
  TokenProto tokenProto = proto.getJobToken();
  Token<JobTokenIdentifier> jobToken = new Token<JobTokenIdentifier>(
      tokenProto.getIdentifier().toByteArray(),
      tokenProto.getPassword().toByteArray(),
      new Text(tokenProto.getKind()), new Text(tokenProto.getService()));
  addJobToken(jobId, user, jobToken);
}
项目:hadoop    文件:ShuffleHandler.java   
private void recordJobShuffleInfo(JobID jobId, String user,
    Token<JobTokenIdentifier> jobToken) throws IOException {
  if (stateDb != null) {
    TokenProto tokenProto = TokenProto.newBuilder()
        .setIdentifier(ByteString.copyFrom(jobToken.getIdentifier()))
        .setPassword(ByteString.copyFrom(jobToken.getPassword()))
        .setKind(jobToken.getKind().toString())
        .setService(jobToken.getService().toString())
        .build();
    JobShuffleInfoProto proto = JobShuffleInfoProto.newBuilder()
        .setUser(user).setJobToken(tokenProto).build();
    try {
      stateDb.put(bytes(jobId.toString()), proto.toByteArray());
    } catch (DBException e) {
      throw new IOException("Error storing " + jobId, e);
    }
  }
  addJobToken(jobId, user, jobToken);
}
项目:aliyun-oss-hadoop-fs    文件:ShuffleHandler.java   
private void recoverJobShuffleInfo(String jobIdStr, byte[] data)
    throws IOException {
  JobID jobId;
  try {
    jobId = JobID.forName(jobIdStr);
  } catch (IllegalArgumentException e) {
    throw new IOException("Bad job ID " + jobIdStr + " in state store", e);
  }

  JobShuffleInfoProto proto = JobShuffleInfoProto.parseFrom(data);
  String user = proto.getUser();
  TokenProto tokenProto = proto.getJobToken();
  Token<JobTokenIdentifier> jobToken = new Token<JobTokenIdentifier>(
      tokenProto.getIdentifier().toByteArray(),
      tokenProto.getPassword().toByteArray(),
      new Text(tokenProto.getKind()), new Text(tokenProto.getService()));
  addJobToken(jobId, user, jobToken);
}
项目:aliyun-oss-hadoop-fs    文件:ShuffleHandler.java   
private void recordJobShuffleInfo(JobID jobId, String user,
    Token<JobTokenIdentifier> jobToken) throws IOException {
  if (stateDb != null) {
    TokenProto tokenProto = TokenProto.newBuilder()
        .setIdentifier(ByteString.copyFrom(jobToken.getIdentifier()))
        .setPassword(ByteString.copyFrom(jobToken.getPassword()))
        .setKind(jobToken.getKind().toString())
        .setService(jobToken.getService().toString())
        .build();
    JobShuffleInfoProto proto = JobShuffleInfoProto.newBuilder()
        .setUser(user).setJobToken(tokenProto).build();
    try {
      stateDb.put(bytes(jobId.toString()), proto.toByteArray());
    } catch (DBException e) {
      throw new IOException("Error storing " + jobId, e);
    }
  }
  addJobToken(jobId, user, jobToken);
}
项目:big-c    文件:ShuffleHandler.java   
private void recoverJobShuffleInfo(String jobIdStr, byte[] data)
    throws IOException {
  JobID jobId;
  try {
    jobId = JobID.forName(jobIdStr);
  } catch (IllegalArgumentException e) {
    throw new IOException("Bad job ID " + jobIdStr + " in state store", e);
  }

  JobShuffleInfoProto proto = JobShuffleInfoProto.parseFrom(data);
  String user = proto.getUser();
  TokenProto tokenProto = proto.getJobToken();
  Token<JobTokenIdentifier> jobToken = new Token<JobTokenIdentifier>(
      tokenProto.getIdentifier().toByteArray(),
      tokenProto.getPassword().toByteArray(),
      new Text(tokenProto.getKind()), new Text(tokenProto.getService()));
  addJobToken(jobId, user, jobToken);
}
项目:big-c    文件:ShuffleHandler.java   
private void recordJobShuffleInfo(JobID jobId, String user,
    Token<JobTokenIdentifier> jobToken) throws IOException {
  if (stateDb != null) {
    TokenProto tokenProto = TokenProto.newBuilder()
        .setIdentifier(ByteString.copyFrom(jobToken.getIdentifier()))
        .setPassword(ByteString.copyFrom(jobToken.getPassword()))
        .setKind(jobToken.getKind().toString())
        .setService(jobToken.getService().toString())
        .build();
    JobShuffleInfoProto proto = JobShuffleInfoProto.newBuilder()
        .setUser(user).setJobToken(tokenProto).build();
    try {
      stateDb.put(bytes(jobId.toString()), proto.toByteArray());
    } catch (DBException e) {
      throw new IOException("Error storing " + jobId, e);
    }
  }
  addJobToken(jobId, user, jobToken);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:ShuffleHandler.java   
private void recoverJobShuffleInfo(String jobIdStr, byte[] data)
    throws IOException {
  JobID jobId;
  try {
    jobId = JobID.forName(jobIdStr);
  } catch (IllegalArgumentException e) {
    throw new IOException("Bad job ID " + jobIdStr + " in state store", e);
  }

  JobShuffleInfoProto proto = JobShuffleInfoProto.parseFrom(data);
  String user = proto.getUser();
  TokenProto tokenProto = proto.getJobToken();
  Token<JobTokenIdentifier> jobToken = new Token<JobTokenIdentifier>(
      tokenProto.getIdentifier().toByteArray(),
      tokenProto.getPassword().toByteArray(),
      new Text(tokenProto.getKind()), new Text(tokenProto.getService()));
  addJobToken(jobId, user, jobToken);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:ShuffleHandler.java   
private void recordJobShuffleInfo(JobID jobId, String user,
    Token<JobTokenIdentifier> jobToken) throws IOException {
  if (stateDb != null) {
    TokenProto tokenProto = TokenProto.newBuilder()
        .setIdentifier(ByteString.copyFrom(jobToken.getIdentifier()))
        .setPassword(ByteString.copyFrom(jobToken.getPassword()))
        .setKind(jobToken.getKind().toString())
        .setService(jobToken.getService().toString())
        .build();
    JobShuffleInfoProto proto = JobShuffleInfoProto.newBuilder()
        .setUser(user).setJobToken(tokenProto).build();
    try {
      stateDb.put(bytes(jobId.toString()), proto.toByteArray());
    } catch (DBException e) {
      throw new IOException("Error storing " + jobId, e);
    }
  }
  addJobToken(jobId, user, jobToken);
}
项目:hops    文件:ShuffleHandler.java   
private void recordJobShuffleInfo(JobID jobId, String user,
    Token<JobTokenIdentifier> jobToken, String userFolder) throws IOException {
  if (stateDb != null) {
    TokenProto tokenProto = TokenProto.newBuilder()
        .setIdentifier(ByteString.copyFrom(jobToken.getIdentifier()))
        .setPassword(ByteString.copyFrom(jobToken.getPassword()))
        .setKind(jobToken.getKind().toString())
        .setService(jobToken.getService().toString())
        .build();
    JobShuffleInfoProto proto = JobShuffleInfoProto.newBuilder()
        .setUser(user).setJobToken(tokenProto).setUserFolder(userFolder).build();
    try {
      stateDb.put(bytes(jobId.toString()), proto.toByteArray());
    } catch (DBException e) {
      throw new IOException("Error storing " + jobId, e);
    }
  }
  addJobToken(jobId, user, jobToken, userFolder);
}
项目:FlexMap    文件:ShuffleHandler.java   
private void recoverJobShuffleInfo(String jobIdStr, byte[] data)
    throws IOException {
  JobID jobId;
  try {
    jobId = JobID.forName(jobIdStr);
  } catch (IllegalArgumentException e) {
    throw new IOException("Bad job ID " + jobIdStr + " in state store", e);
  }

  JobShuffleInfoProto proto = JobShuffleInfoProto.parseFrom(data);
  String user = proto.getUser();
  TokenProto tokenProto = proto.getJobToken();
  Token<JobTokenIdentifier> jobToken = new Token<JobTokenIdentifier>(
      tokenProto.getIdentifier().toByteArray(),
      tokenProto.getPassword().toByteArray(),
      new Text(tokenProto.getKind()), new Text(tokenProto.getService()));
  addJobToken(jobId, user, jobToken);
}
项目:FlexMap    文件:ShuffleHandler.java   
private void recordJobShuffleInfo(JobID jobId, String user,
    Token<JobTokenIdentifier> jobToken) throws IOException {
  if (stateDb != null) {
    TokenProto tokenProto = TokenProto.newBuilder()
        .setIdentifier(ByteString.copyFrom(jobToken.getIdentifier()))
        .setPassword(ByteString.copyFrom(jobToken.getPassword()))
        .setKind(jobToken.getKind().toString())
        .setService(jobToken.getService().toString())
        .build();
    JobShuffleInfoProto proto = JobShuffleInfoProto.newBuilder()
        .setUser(user).setJobToken(tokenProto).build();
    try {
      stateDb.put(bytes(jobId.toString()), proto.toByteArray());
    } catch (DBException e) {
      throw new IOException("Error storing " + jobId, e);
    }
  }
  addJobToken(jobId, user, jobToken);
}
项目:hadoop    文件:PBHelper.java   
public static TokenProto convert(Token<?> tok) {
  return TokenProto.newBuilder().
            setIdentifier(ByteString.copyFrom(tok.getIdentifier())).
            setPassword(ByteString.copyFrom(tok.getPassword())).
            setKind(tok.getKind().toString()).
            setService(tok.getService().toString()).build(); 
}
项目:hadoop    文件:ClientDatanodeProtocolTranslatorPB.java   
@Override
public HdfsBlocksMetadata getHdfsBlocksMetadata(String blockPoolId,
    long[] blockIds,
    List<Token<BlockTokenIdentifier>> tokens) throws IOException {
  List<TokenProto> tokensProtos = 
      new ArrayList<TokenProto>(tokens.size());
  for (Token<BlockTokenIdentifier> t : tokens) {
    tokensProtos.add(PBHelper.convert(t));
  }
  // Build the request
  GetHdfsBlockLocationsRequestProto request = 
      GetHdfsBlockLocationsRequestProto.newBuilder()
      .setBlockPoolId(blockPoolId)
      .addAllBlockIds(Longs.asList(blockIds))
      .addAllTokens(tokensProtos)
      .build();
  // Send the RPC
  GetHdfsBlockLocationsResponseProto response;
  try {
    response = rpcProxy.getHdfsBlockLocations(NULL_CONTROLLER, request);
  } catch (ServiceException e) {
    throw ProtobufHelper.getRemoteException(e);
  }
  // List of volumes in the response
  List<ByteString> volumeIdsByteStrings = response.getVolumeIdsList();
  List<byte[]> volumeIds = new ArrayList<byte[]>(volumeIdsByteStrings.size());
  for (ByteString bs : volumeIdsByteStrings) {
    volumeIds.add(bs.toByteArray());
  }
  // Array of indexes into the list of volumes, one per block
  List<Integer> volumeIndexes = response.getVolumeIndexesList();
  // Parsed HdfsVolumeId values, one per block
  return new HdfsBlocksMetadata(blockPoolId, blockIds,
      volumeIds, volumeIndexes);
}
项目:hadoop    文件:TestPBHelper.java   
@Test
public void testConvertBlockToken() {
  Token<BlockTokenIdentifier> token = new Token<BlockTokenIdentifier>(
      "identifier".getBytes(), "password".getBytes(), new Text("kind"),
      new Text("service"));
  TokenProto tokenProto = PBHelper.convert(token);
  Token<BlockTokenIdentifier> token2 = PBHelper.convert(tokenProto);
  compare(token, token2);
}
项目:aliyun-oss-hadoop-fs    文件:IncreaseContainersResourceRequestPBImpl.java   
@Override
public List<Token> getContainersToIncrease() {
  if (containersToIncrease != null) {
    return containersToIncrease;
  }
  IncreaseContainersResourceRequestProtoOrBuilder p =
      viaProto ? proto : builder;
  List<TokenProto> list = p.getIncreaseContainersList();
  containersToIncrease = new ArrayList<>();
  for (TokenProto c : list) {
    containersToIncrease.add(convertFromProtoFormat(c));
  }
  return containersToIncrease;
}
项目:aliyun-oss-hadoop-fs    文件:IncreaseContainersResourceRequestPBImpl.java   
private void addIncreaseContainersToProto() {
  maybeInitBuilder();
  builder.clearIncreaseContainers();
  if (this.containersToIncrease == null) {
    return;
  }
  Iterable<TokenProto> iterable = new Iterable<TokenProto>() {
    @Override
    public Iterator<TokenProto> iterator() {
      return new Iterator<TokenProto>() {
        Iterator<Token> iter = containersToIncrease.iterator();

        @Override
        public boolean hasNext() {
          return iter.hasNext();
        }

        @Override
        public TokenProto next() {
          return convertToProtoFormat(iter.next());
        }

        @Override
        public void remove() {
          throw new UnsupportedOperationException();
        }
      };
    }
  };
  builder.addAllIncreaseContainers(iterable);
}
项目:aliyun-oss-hadoop-fs    文件:PBHelperClient.java   
public static TokenProto convert(Token<?> tok) {
  return TokenProto.newBuilder().
      setIdentifier(getByteString(tok.getIdentifier())).
      setPassword(getByteString(tok.getPassword())).
      setKind(tok.getKind().toString()).
      setService(tok.getService().toString()).build();
}
项目:aliyun-oss-hadoop-fs    文件:TestPBHelper.java   
@Test
public void testConvertBlockToken() {
  Token<BlockTokenIdentifier> token = new Token<BlockTokenIdentifier>(
      "identifier".getBytes(), "password".getBytes(), new Text("kind"),
      new Text("service"));
  TokenProto tokenProto = PBHelperClient.convert(token);
  Token<BlockTokenIdentifier> token2 = PBHelperClient.convert(tokenProto);
  compare(token, token2);
}
项目:big-c    文件:PBHelper.java   
public static TokenProto convert(Token<?> tok) {
  return TokenProto.newBuilder().
            setIdentifier(ByteString.copyFrom(tok.getIdentifier())).
            setPassword(ByteString.copyFrom(tok.getPassword())).
            setKind(tok.getKind().toString()).
            setService(tok.getService().toString()).build(); 
}
项目:big-c    文件:ClientDatanodeProtocolTranslatorPB.java   
@Override
public HdfsBlocksMetadata getHdfsBlocksMetadata(String blockPoolId,
    long[] blockIds,
    List<Token<BlockTokenIdentifier>> tokens) throws IOException {
  List<TokenProto> tokensProtos = 
      new ArrayList<TokenProto>(tokens.size());
  for (Token<BlockTokenIdentifier> t : tokens) {
    tokensProtos.add(PBHelper.convert(t));
  }
  // Build the request
  GetHdfsBlockLocationsRequestProto request = 
      GetHdfsBlockLocationsRequestProto.newBuilder()
      .setBlockPoolId(blockPoolId)
      .addAllBlockIds(Longs.asList(blockIds))
      .addAllTokens(tokensProtos)
      .build();
  // Send the RPC
  GetHdfsBlockLocationsResponseProto response;
  try {
    response = rpcProxy.getHdfsBlockLocations(NULL_CONTROLLER, request);
  } catch (ServiceException e) {
    throw ProtobufHelper.getRemoteException(e);
  }
  // List of volumes in the response
  List<ByteString> volumeIdsByteStrings = response.getVolumeIdsList();
  List<byte[]> volumeIds = new ArrayList<byte[]>(volumeIdsByteStrings.size());
  for (ByteString bs : volumeIdsByteStrings) {
    volumeIds.add(bs.toByteArray());
  }
  // Array of indexes into the list of volumes, one per block
  List<Integer> volumeIndexes = response.getVolumeIndexesList();
  // Parsed HdfsVolumeId values, one per block
  return new HdfsBlocksMetadata(blockPoolId, blockIds,
      volumeIds, volumeIndexes);
}
项目:big-c    文件:TestPBHelper.java   
@Test
public void testConvertBlockToken() {
  Token<BlockTokenIdentifier> token = new Token<BlockTokenIdentifier>(
      "identifier".getBytes(), "password".getBytes(), new Text("kind"),
      new Text("service"));
  TokenProto tokenProto = PBHelper.convert(token);
  Token<BlockTokenIdentifier> token2 = PBHelper.convert(tokenProto);
  compare(token, token2);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:PBHelper.java   
public static TokenProto convert(Token<?> tok) {
  return TokenProto.newBuilder().
            setIdentifier(ByteString.copyFrom(tok.getIdentifier())).
            setPassword(ByteString.copyFrom(tok.getPassword())).
            setKind(tok.getKind().toString()).
            setService(tok.getService().toString()).build(); 
}
项目:hadoop-2.6.0-cdh5.4.3    文件:ClientDatanodeProtocolTranslatorPB.java   
@Override
public HdfsBlocksMetadata getHdfsBlocksMetadata(String blockPoolId,
    long[] blockIds,
    List<Token<BlockTokenIdentifier>> tokens) throws IOException {
  List<TokenProto> tokensProtos = 
      new ArrayList<TokenProto>(tokens.size());
  for (Token<BlockTokenIdentifier> t : tokens) {
    tokensProtos.add(PBHelper.convert(t));
  }
  // Build the request
  GetHdfsBlockLocationsRequestProto request = 
      GetHdfsBlockLocationsRequestProto.newBuilder()
      .setBlockPoolId(blockPoolId)
      .addAllBlockIds(Longs.asList(blockIds))
      .addAllTokens(tokensProtos)
      .build();
  // Send the RPC
  GetHdfsBlockLocationsResponseProto response;
  try {
    response = rpcProxy.getHdfsBlockLocations(NULL_CONTROLLER, request);
  } catch (ServiceException e) {
    throw ProtobufHelper.getRemoteException(e);
  }
  // List of volumes in the response
  List<ByteString> volumeIdsByteStrings = response.getVolumeIdsList();
  List<byte[]> volumeIds = new ArrayList<byte[]>(volumeIdsByteStrings.size());
  for (ByteString bs : volumeIdsByteStrings) {
    volumeIds.add(bs.toByteArray());
  }
  // Array of indexes into the list of volumes, one per block
  List<Integer> volumeIndexes = response.getVolumeIndexesList();
  // Parsed HdfsVolumeId values, one per block
  return new HdfsBlocksMetadata(blockPoolId, blockIds,
      volumeIds, volumeIndexes);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestPBHelper.java   
@Test
public void testConvertBlockToken() {
  Token<BlockTokenIdentifier> token = new Token<BlockTokenIdentifier>(
      "identifier".getBytes(), "password".getBytes(), new Text("kind"),
      new Text("service"));
  TokenProto tokenProto = PBHelper.convert(token);
  Token<BlockTokenIdentifier> token2 = PBHelper.convert(tokenProto);
  compare(token, token2);
}
项目:hops    文件:PBHelper.java   
public static Token<DelegationTokenIdentifier> convertDelegationToken(
    TokenProto blockToken) {
  return new Token<>(
      blockToken.getIdentifier().toByteArray(),
      blockToken.getPassword().toByteArray(), new Text(blockToken.getKind()),
      new Text(blockToken.getService()));
}
项目:hops    文件:ClientDatanodeProtocolTranslatorPB.java   
@Override
public HdfsBlocksMetadata getHdfsBlocksMetadata(List<ExtendedBlock> blocks,
    List<Token<BlockTokenIdentifier>> tokens) throws IOException {
  // Convert to proto objects
  List<ExtendedBlockProto> blocksProtos =
      new ArrayList<>(blocks.size());
  List<TokenProto> tokensProtos = new ArrayList<>(tokens.size());
  for (ExtendedBlock b : blocks) {
    blocksProtos.add(PBHelper.convert(b));
  }
  for (Token<BlockTokenIdentifier> t : tokens) {
    tokensProtos.add(PBHelper.convert(t));
  }
  // Build the request
  GetHdfsBlockLocationsRequestProto request =
      GetHdfsBlockLocationsRequestProto.newBuilder()
          .addAllBlocks(blocksProtos).addAllTokens(tokensProtos).build();
  // Send the RPC
  GetHdfsBlockLocationsResponseProto response;
  try {
    response = rpcProxy.getHdfsBlockLocations(NULL_CONTROLLER, request);
  } catch (ServiceException e) {
    throw ProtobufHelper.getRemoteException(e);
  }
  // List of volumes in the response
  List<ByteString> volumeIdsByteStrings = response.getVolumeIdsList();
  List<byte[]> volumeIds = new ArrayList<>(volumeIdsByteStrings.size());
  for (ByteString bs : volumeIdsByteStrings) {
    volumeIds.add(bs.toByteArray());
  }
  // Array of indexes into the list of volumes, one per block
  List<Integer> volumeIndexes = response.getVolumeIndexesList();
  // Parsed HdfsVolumeId values, one per block
  return new HdfsBlocksMetadata(blocks.toArray(new ExtendedBlock[]{}),
      volumeIds, volumeIndexes);
}
项目:hops    文件:TestPBHelper.java   
@Test
public void testConvertBlockToken() {
  Token<BlockTokenIdentifier> token =
      new Token<>("identifier".getBytes(),
          "password".getBytes(), new Text("kind"), new Text("service"));
  TokenProto tokenProto = PBHelper.convert(token);
  Token<BlockTokenIdentifier> token2 = PBHelper.convert(tokenProto);
  compare(token, token2);
}
项目:hadoop-plus    文件:PBHelper.java   
public static TokenProto convert(Token<?> tok) {
  return TokenProto.newBuilder().
            setIdentifier(ByteString.copyFrom(tok.getIdentifier())).
            setPassword(ByteString.copyFrom(tok.getPassword())).
            setKind(tok.getKind().toString()).
            setService(tok.getService().toString()).build(); 
}
项目:hadoop-plus    文件:ClientDatanodeProtocolTranslatorPB.java   
@Override
public HdfsBlocksMetadata getHdfsBlocksMetadata(List<ExtendedBlock> blocks,
    List<Token<BlockTokenIdentifier>> tokens) throws IOException {
  // Convert to proto objects
  List<ExtendedBlockProto> blocksProtos = 
      new ArrayList<ExtendedBlockProto>(blocks.size());
  List<TokenProto> tokensProtos = 
      new ArrayList<TokenProto>(tokens.size());
  for (ExtendedBlock b : blocks) {
    blocksProtos.add(PBHelper.convert(b));
  }
  for (Token<BlockTokenIdentifier> t : tokens) {
    tokensProtos.add(PBHelper.convert(t));
  }
  // Build the request
  GetHdfsBlockLocationsRequestProto request = 
      GetHdfsBlockLocationsRequestProto.newBuilder()
      .addAllBlocks(blocksProtos)
      .addAllTokens(tokensProtos)
      .build();
  // Send the RPC
  GetHdfsBlockLocationsResponseProto response;
  try {
    response = rpcProxy.getHdfsBlockLocations(NULL_CONTROLLER, request);
  } catch (ServiceException e) {
    throw ProtobufHelper.getRemoteException(e);
  }
  // List of volumes in the response
  List<ByteString> volumeIdsByteStrings = response.getVolumeIdsList();
  List<byte[]> volumeIds = new ArrayList<byte[]>(volumeIdsByteStrings.size());
  for (ByteString bs : volumeIdsByteStrings) {
    volumeIds.add(bs.toByteArray());
  }
  // Array of indexes into the list of volumes, one per block
  List<Integer> volumeIndexes = response.getVolumeIndexesList();
  // Parsed HdfsVolumeId values, one per block
  return new HdfsBlocksMetadata(blocks.toArray(new ExtendedBlock[] {}), 
      volumeIds, volumeIndexes);
}
项目:hadoop-plus    文件:TestPBHelper.java   
@Test
public void testConvertBlockToken() {
  Token<BlockTokenIdentifier> token = new Token<BlockTokenIdentifier>(
      "identifier".getBytes(), "password".getBytes(), new Text("kind"),
      new Text("service"));
  TokenProto tokenProto = PBHelper.convert(token);
  Token<BlockTokenIdentifier> token2 = PBHelper.convert(tokenProto);
  compare(token, token2);
}
项目:FlexMap    文件:PBHelper.java   
public static TokenProto convert(Token<?> tok) {
  return TokenProto.newBuilder().
            setIdentifier(ByteString.copyFrom(tok.getIdentifier())).
            setPassword(ByteString.copyFrom(tok.getPassword())).
            setKind(tok.getKind().toString()).
            setService(tok.getService().toString()).build(); 
}
项目:FlexMap    文件:ClientDatanodeProtocolTranslatorPB.java   
@Override
public HdfsBlocksMetadata getHdfsBlocksMetadata(String blockPoolId,
    long[] blockIds,
    List<Token<BlockTokenIdentifier>> tokens) throws IOException {
  List<TokenProto> tokensProtos = 
      new ArrayList<TokenProto>(tokens.size());
  for (Token<BlockTokenIdentifier> t : tokens) {
    tokensProtos.add(PBHelper.convert(t));
  }
  // Build the request
  GetHdfsBlockLocationsRequestProto request = 
      GetHdfsBlockLocationsRequestProto.newBuilder()
      .setBlockPoolId(blockPoolId)
      .addAllBlockIds(Longs.asList(blockIds))
      .addAllTokens(tokensProtos)
      .build();
  // Send the RPC
  GetHdfsBlockLocationsResponseProto response;
  try {
    response = rpcProxy.getHdfsBlockLocations(NULL_CONTROLLER, request);
  } catch (ServiceException e) {
    throw ProtobufHelper.getRemoteException(e);
  }
  // List of volumes in the response
  List<ByteString> volumeIdsByteStrings = response.getVolumeIdsList();
  List<byte[]> volumeIds = new ArrayList<byte[]>(volumeIdsByteStrings.size());
  for (ByteString bs : volumeIdsByteStrings) {
    volumeIds.add(bs.toByteArray());
  }
  // Array of indexes into the list of volumes, one per block
  List<Integer> volumeIndexes = response.getVolumeIndexesList();
  // Parsed HdfsVolumeId values, one per block
  return new HdfsBlocksMetadata(blockPoolId, blockIds,
      volumeIds, volumeIndexes);
}
项目:FlexMap    文件:TestPBHelper.java   
@Test
public void testConvertBlockToken() {
  Token<BlockTokenIdentifier> token = new Token<BlockTokenIdentifier>(
      "identifier".getBytes(), "password".getBytes(), new Text("kind"),
      new Text("service"));
  TokenProto tokenProto = PBHelper.convert(token);
  Token<BlockTokenIdentifier> token2 = PBHelper.convert(tokenProto);
  compare(token, token2);
}
项目:hops    文件:IncreaseContainersResourceRequestPBImpl.java   
@Override
public List<Token> getContainersToIncrease() {
  if (containersToIncrease != null) {
    return containersToIncrease;
  }
  IncreaseContainersResourceRequestProtoOrBuilder p =
      viaProto ? proto : builder;
  List<TokenProto> list = p.getIncreaseContainersList();
  containersToIncrease = new ArrayList<>();
  for (TokenProto c : list) {
    containersToIncrease.add(convertFromProtoFormat(c));
  }
  return containersToIncrease;
}
项目:hops    文件:IncreaseContainersResourceRequestPBImpl.java   
private void addIncreaseContainersToProto() {
  maybeInitBuilder();
  builder.clearIncreaseContainers();
  if (this.containersToIncrease == null) {
    return;
  }
  Iterable<TokenProto> iterable = new Iterable<TokenProto>() {
    @Override
    public Iterator<TokenProto> iterator() {
      return new Iterator<TokenProto>() {
        Iterator<Token> iter = containersToIncrease.iterator();

        @Override
        public boolean hasNext() {
          return iter.hasNext();
        }

        @Override
        public TokenProto next() {
          return convertToProtoFormat(iter.next());
        }

        @Override
        public void remove() {
          throw new UnsupportedOperationException();
        }
      };
    }
  };
  builder.addAllIncreaseContainers(iterable);
}
项目:hadoop    文件:CancelDelegationTokenRequestPBImpl.java   
private TokenPBImpl convertFromProtoFormat(TokenProto p) {
  return new TokenPBImpl(p);
}
项目:hadoop    文件:CancelDelegationTokenRequestPBImpl.java   
private TokenProto convertToProtoFormat(Token t) {
  return ((TokenPBImpl) t).getProto();
}
项目:hadoop    文件:GetDelegationTokenResponsePBImpl.java   
private TokenPBImpl convertFromProtoFormat(TokenProto p) {
  return new TokenPBImpl(p);
}
项目:hadoop    文件:GetDelegationTokenResponsePBImpl.java   
private TokenProto convertToProtoFormat(Token t) {
  return ((TokenPBImpl)t).getProto();
}
项目:hadoop    文件:RenewDelegationTokenRequestPBImpl.java   
private TokenPBImpl convertFromProtoFormat(TokenProto p) {
  return new TokenPBImpl(p);
}