Java 类org.apache.hadoop.io.DataInputByteBuffer 实例源码

项目:hadoop    文件:ContainerManagerImpl.java   
private Credentials parseCredentials(ContainerLaunchContext launchContext)
    throws IOException {
  Credentials credentials = new Credentials();
  // //////////// Parse credentials
  ByteBuffer tokens = launchContext.getTokens();

  if (tokens != null) {
    DataInputByteBuffer buf = new DataInputByteBuffer();
    tokens.rewind();
    buf.reset(tokens);
    credentials.readTokenStorageStream(buf);
    if (LOG.isDebugEnabled()) {
      for (Token<? extends TokenIdentifier> tk : credentials.getAllTokens()) {
        LOG.debug(tk.getService() + " = " + tk.toString());
      }
    }
  }
  // //////////// End of parsing credentials
  return credentials;
}
项目:aliyun-oss-hadoop-fs    文件:YarnServerSecurityUtils.java   
/**
 * Parses the container launch context and returns a Credential instance that
 * contains all the tokens from the launch context. 
 * @param launchContext
 * @return the credential instance
 * @throws IOException
 */
public static Credentials parseCredentials(
    ContainerLaunchContext launchContext) throws IOException {
  Credentials credentials = new Credentials();
  ByteBuffer tokens = launchContext.getTokens();

  if (tokens != null) {
    DataInputByteBuffer buf = new DataInputByteBuffer();
    tokens.rewind();
    buf.reset(tokens);
    credentials.readTokenStorageStream(buf);
    if (LOG.isDebugEnabled()) {
      for (Token<? extends TokenIdentifier> tk : credentials
          .getAllTokens()) {
        LOG.debug(tk.getService() + " = " + tk.toString());
      }
    }
  }

  return credentials;
}
项目:big-c    文件:ContainerManagerImpl.java   
private Credentials parseCredentials(ContainerLaunchContext launchContext)
    throws IOException {
  Credentials credentials = new Credentials();
  // //////////// Parse credentials
  ByteBuffer tokens = launchContext.getTokens();

  if (tokens != null) {
    DataInputByteBuffer buf = new DataInputByteBuffer();
    tokens.rewind();
    buf.reset(tokens);
    credentials.readTokenStorageStream(buf);
    if (LOG.isDebugEnabled()) {
      for (Token<? extends TokenIdentifier> tk : credentials.getAllTokens()) {
        LOG.debug(tk.getService() + " = " + tk.toString());
      }
    }
  }
  // //////////// End of parsing credentials
  return credentials;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:ContainerManagerImpl.java   
private Credentials parseCredentials(ContainerLaunchContext launchContext)
    throws IOException {
  Credentials credentials = new Credentials();
  // //////////// Parse credentials
  ByteBuffer tokens = launchContext.getTokens();

  if (tokens != null) {
    DataInputByteBuffer buf = new DataInputByteBuffer();
    tokens.rewind();
    buf.reset(tokens);
    credentials.readTokenStorageStream(buf);
    if (LOG.isDebugEnabled()) {
      for (Token<? extends TokenIdentifier> tk : credentials.getAllTokens()) {
        LOG.debug(tk.getService() + " = " + tk.toString());
      }
    }
  }
  // //////////// End of parsing credentials
  return credentials;
}
项目:hadoop-plus    文件:ContainerManagerImpl.java   
private Credentials parseCredentials(ContainerLaunchContext launchContext)
    throws YarnException {
  Credentials credentials = new Credentials();
  // //////////// Parse credentials
  ByteBuffer tokens = launchContext.getTokens();

  if (tokens != null) {
    DataInputByteBuffer buf = new DataInputByteBuffer();
    tokens.rewind();
    buf.reset(tokens);
    try {
      credentials.readTokenStorageStream(buf);
      if (LOG.isDebugEnabled()) {
        for (Token<? extends TokenIdentifier> tk : credentials.getAllTokens()) {
          LOG.debug(tk.getService() + " = " + tk.toString());
        }
      }
    } catch (IOException e) {
      throw RPCUtil.getRemoteException(e);
    }
  }
  // //////////// End of parsing credentials
  return credentials;
}
项目:hops    文件:YarnServerSecurityUtils.java   
/**
 * Parses the container launch context and returns a Credential instance that
 * contains all the tokens from the launch context. 
 * @param launchContext
 * @return the credential instance
 * @throws IOException
 */
public static Credentials parseCredentials(
    ContainerLaunchContext launchContext) throws IOException {
  Credentials credentials = new Credentials();
  ByteBuffer tokens = launchContext.getTokens();

  if (tokens != null) {
    DataInputByteBuffer buf = new DataInputByteBuffer();
    tokens.rewind();
    buf.reset(tokens);
    credentials.readTokenStorageStream(buf);
    if (LOG.isDebugEnabled()) {
      for (Token<? extends TokenIdentifier> tk : credentials
          .getAllTokens()) {
        LOG.debug(tk.getService() + " = " + tk.toString());
      }
    }
  }

  return credentials;
}
项目:hadoop-TCP    文件:ContainerManagerImpl.java   
private Credentials parseCredentials(ContainerLaunchContext launchContext)
    throws YarnException {
  Credentials credentials = new Credentials();
  // //////////// Parse credentials
  ByteBuffer tokens = launchContext.getTokens();

  if (tokens != null) {
    DataInputByteBuffer buf = new DataInputByteBuffer();
    tokens.rewind();
    buf.reset(tokens);
    try {
      credentials.readTokenStorageStream(buf);
      if (LOG.isDebugEnabled()) {
        for (Token<? extends TokenIdentifier> tk : credentials.getAllTokens()) {
          LOG.debug(tk.getService() + " = " + tk.toString());
        }
      }
    } catch (IOException e) {
      throw RPCUtil.getRemoteException(e);
    }
  }
  // //////////// End of parsing credentials
  return credentials;
}
项目:hardfs    文件:ContainerManagerImpl.java   
private Credentials parseCredentials(ContainerLaunchContext launchContext)
    throws YarnException {
  Credentials credentials = new Credentials();
  // //////////// Parse credentials
  ByteBuffer tokens = launchContext.getTokens();

  if (tokens != null) {
    DataInputByteBuffer buf = new DataInputByteBuffer();
    tokens.rewind();
    buf.reset(tokens);
    try {
      credentials.readTokenStorageStream(buf);
      if (LOG.isDebugEnabled()) {
        for (Token<? extends TokenIdentifier> tk : credentials.getAllTokens()) {
          LOG.debug(tk.getService() + " = " + tk.toString());
        }
      }
    } catch (IOException e) {
      throw RPCUtil.getRemoteException(e);
    }
  }
  // //////////// End of parsing credentials
  return credentials;
}
项目:hadoop-on-lustre2    文件:ContainerManagerImpl.java   
private Credentials parseCredentials(ContainerLaunchContext launchContext)
    throws YarnException {
  Credentials credentials = new Credentials();
  // //////////// Parse credentials
  ByteBuffer tokens = launchContext.getTokens();

  if (tokens != null) {
    DataInputByteBuffer buf = new DataInputByteBuffer();
    tokens.rewind();
    buf.reset(tokens);
    try {
      credentials.readTokenStorageStream(buf);
      if (LOG.isDebugEnabled()) {
        for (Token<? extends TokenIdentifier> tk : credentials.getAllTokens()) {
          LOG.debug(tk.getService() + " = " + tk.toString());
        }
      }
    } catch (IOException e) {
      throw RPCUtil.getRemoteException(e);
    }
  }
  // //////////// End of parsing credentials
  return credentials;
}
项目:hadoop-on-lustre2    文件:MemoryRMStateStore.java   
@Override
public synchronized void storeApplicationAttemptStateInternal(
    ApplicationAttemptId appAttemptId,
    ApplicationAttemptStateDataPBImpl attemptStateData)
    throws Exception {
  Credentials credentials = null;
  if(attemptStateData.getAppAttemptTokens() != null){
    DataInputByteBuffer dibb = new DataInputByteBuffer();
    credentials = new Credentials();
    dibb.reset(attemptStateData.getAppAttemptTokens());
    credentials.readTokenStorageStream(dibb);
  }
  ApplicationAttemptState attemptState =
      new ApplicationAttemptState(appAttemptId,
        attemptStateData.getMasterContainer(), credentials,
        attemptStateData.getStartTime());

  ApplicationState appState = state.getApplicationState().get(
      attemptState.getAttemptId().getApplicationId());
  if (appState == null) {
    throw new YarnRuntimeException("Application doesn't exist");
  }
  appState.attempts.put(attemptState.getAttemptId(), attemptState);
}
项目:hadoop    文件:AMLauncher.java   
private void setupTokens(
    ContainerLaunchContext container, ContainerId containerID)
    throws IOException {
  Map<String, String> environment = container.getEnvironment();
  environment.put(ApplicationConstants.APPLICATION_WEB_PROXY_BASE_ENV,
      application.getWebProxyBase());
  // Set AppSubmitTime and MaxAppAttempts to be consumable by the AM.
  ApplicationId applicationId =
      application.getAppAttemptId().getApplicationId();
  environment.put(
      ApplicationConstants.APP_SUBMIT_TIME_ENV,
      String.valueOf(rmContext.getRMApps()
          .get(applicationId)
          .getSubmitTime()));
  environment.put(ApplicationConstants.MAX_APP_ATTEMPTS_ENV,
      String.valueOf(rmContext.getRMApps().get(
          applicationId).getMaxAppAttempts()));

  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  if (container.getTokens() != null) {
    // TODO: Don't do this kind of checks everywhere.
    dibb.reset(container.getTokens());
    credentials.readTokenStorageStream(dibb);
  }

  // Add AMRMToken
  Token<AMRMTokenIdentifier> amrmToken = createAndSetAMRMToken();
  if (amrmToken != null) {
    credentials.addToken(amrmToken.getService(), amrmToken);
  }
  DataOutputBuffer dob = new DataOutputBuffer();
  credentials.writeTokenStorageToStream(dob);
  container.setTokens(ByteBuffer.wrap(dob.getData(), 0, dob.getLength()));
}
项目:hadoop    文件:RMAppImpl.java   
protected Credentials parseCredentials() throws IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = submissionContext.getAMContainerSpec().getTokens();
  if (tokens != null) {
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }
  return credentials;
}
项目:hadoop    文件:RMAppManager.java   
protected Credentials parseCredentials(
    ApplicationSubmissionContext application) throws IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = application.getAMContainerSpec().getTokens();
  if (tokens != null) {
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }
  return credentials;
}
项目:hadoop    文件:TestAMAuthorization.java   
public Credentials getContainerCredentials() throws IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer buf = new DataInputByteBuffer();
  containerTokens.rewind();
  buf.reset(containerTokens);
  credentials.readTokenStorageStream(buf);
  return credentials;
}
项目:hadoop    文件:YarnClientImpl.java   
private void addTimelineDelegationToken(
    ContainerLaunchContext clc) throws YarnException, IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = clc.getTokens();
  if (tokens != null) {
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }
  // If the timeline delegation token is already in the CLC, no need to add
  // one more
  for (org.apache.hadoop.security.token.Token<? extends TokenIdentifier> token : credentials
      .getAllTokens()) {
    if (token.getKind().equals(TimelineDelegationTokenIdentifier.KIND_NAME)) {
      return;
    }
  }
  org.apache.hadoop.security.token.Token<TimelineDelegationTokenIdentifier>
      timelineDelegationToken = getTimelineDelegationToken();
  if (timelineDelegationToken == null) {
    return;
  }
  credentials.addToken(timelineService, timelineDelegationToken);
  if (LOG.isDebugEnabled()) {
    LOG.debug("Add timline delegation token into credentials: "
        + timelineDelegationToken);
  }
  DataOutputBuffer dob = new DataOutputBuffer();
  credentials.writeTokenStorageToStream(dob);
  tokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
  clc.setTokens(tokens);
}
项目:hadoop    文件:ShuffleHandler.java   
/**
 * A helper function to deserialize the metadata returned by ShuffleHandler.
 * @param meta the metadata returned by the ShuffleHandler
 * @return the port the Shuffle Handler is listening on to serve shuffle data.
 */
public static int deserializeMetaData(ByteBuffer meta) throws IOException {
  //TODO this should be returning a class not just an int
  DataInputByteBuffer in = new DataInputByteBuffer();
  in.reset(meta);
  int port = in.readInt();
  return port;
}
项目:hadoop    文件:ShuffleHandler.java   
static Token<JobTokenIdentifier> deserializeServiceData(ByteBuffer secret) throws IOException {
  DataInputByteBuffer in = new DataInputByteBuffer();
  in.reset(secret);
  Token<JobTokenIdentifier> jt = new Token<JobTokenIdentifier>();
  jt.readFields(in);
  return jt;
}
项目:aliyun-oss-hadoop-fs    文件:AMLauncher.java   
@Private
@VisibleForTesting
protected void setupTokens(
    ContainerLaunchContext container, ContainerId containerID)
    throws IOException {
  Map<String, String> environment = container.getEnvironment();
  environment.put(ApplicationConstants.APPLICATION_WEB_PROXY_BASE_ENV,
      application.getWebProxyBase());
  // Set AppSubmitTime to be consumable by the AM.
  ApplicationId applicationId =
      application.getAppAttemptId().getApplicationId();
  environment.put(
      ApplicationConstants.APP_SUBMIT_TIME_ENV,
      String.valueOf(rmContext.getRMApps()
          .get(applicationId)
          .getSubmitTime()));

  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = container.getTokens();
  if (tokens != null) {
    // TODO: Don't do this kind of checks everywhere.
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }

  // Add AMRMToken
  Token<AMRMTokenIdentifier> amrmToken = createAndSetAMRMToken();
  if (amrmToken != null) {
    credentials.addToken(amrmToken.getService(), amrmToken);
  }
  DataOutputBuffer dob = new DataOutputBuffer();
  credentials.writeTokenStorageToStream(dob);
  container.setTokens(ByteBuffer.wrap(dob.getData(), 0, dob.getLength()));
}
项目:aliyun-oss-hadoop-fs    文件:RMAppImpl.java   
protected Credentials parseCredentials() throws IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = submissionContext.getAMContainerSpec().getTokens();
  if (tokens != null) {
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }
  return credentials;
}
项目:aliyun-oss-hadoop-fs    文件:RMAppManager.java   
protected Credentials parseCredentials(
    ApplicationSubmissionContext application) throws IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = application.getAMContainerSpec().getTokens();
  if (tokens != null) {
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }
  return credentials;
}
项目:aliyun-oss-hadoop-fs    文件:TestAMAuthorization.java   
public Credentials getContainerCredentials() throws IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer buf = new DataInputByteBuffer();
  containerTokens.rewind();
  buf.reset(containerTokens);
  credentials.readTokenStorageStream(buf);
  return credentials;
}
项目:aliyun-oss-hadoop-fs    文件:YarnClientImpl.java   
private void addTimelineDelegationToken(
    ContainerLaunchContext clc) throws YarnException, IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = clc.getTokens();
  if (tokens != null) {
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }
  // If the timeline delegation token is already in the CLC, no need to add
  // one more
  for (org.apache.hadoop.security.token.Token<? extends TokenIdentifier> token : credentials
      .getAllTokens()) {
    if (token.getKind().equals(TimelineDelegationTokenIdentifier.KIND_NAME)) {
      return;
    }
  }
  org.apache.hadoop.security.token.Token<TimelineDelegationTokenIdentifier>
      timelineDelegationToken = getTimelineDelegationToken();
  if (timelineDelegationToken == null) {
    return;
  }
  credentials.addToken(timelineService, timelineDelegationToken);
  if (LOG.isDebugEnabled()) {
    LOG.debug("Add timline delegation token into credentials: "
        + timelineDelegationToken);
  }
  DataOutputBuffer dob = new DataOutputBuffer();
  credentials.writeTokenStorageToStream(dob);
  tokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
  clc.setTokens(tokens);
}
项目:aliyun-oss-hadoop-fs    文件:ShuffleHandler.java   
/**
 * A helper function to deserialize the metadata returned by ShuffleHandler.
 * @param meta the metadata returned by the ShuffleHandler
 * @return the port the Shuffle Handler is listening on to serve shuffle data.
 */
public static int deserializeMetaData(ByteBuffer meta) throws IOException {
  //TODO this should be returning a class not just an int
  DataInputByteBuffer in = new DataInputByteBuffer();
  in.reset(meta);
  int port = in.readInt();
  return port;
}
项目:aliyun-oss-hadoop-fs    文件:ShuffleHandler.java   
static Token<JobTokenIdentifier> deserializeServiceData(ByteBuffer secret) throws IOException {
  DataInputByteBuffer in = new DataInputByteBuffer();
  in.reset(secret);
  Token<JobTokenIdentifier> jt = new Token<JobTokenIdentifier>();
  jt.readFields(in);
  return jt;
}
项目:twill    文件:YarnUtils.java   
/**
 * Decodes {@link Credentials} from the given buffer.
 * If the buffer is null or empty, it returns an empty Credentials.
 */
public static Credentials decodeCredentials(ByteBuffer buffer) throws IOException {
  Credentials credentials = new Credentials();
  if (buffer != null && buffer.hasRemaining()) {
    DataInputByteBuffer in = new DataInputByteBuffer();
    in.reset(buffer);
    credentials.readTokenStorageStream(in);
  }
  return credentials;
}
项目:big-c    文件:AMLauncher.java   
private void setupTokens(
    ContainerLaunchContext container, ContainerId containerID)
    throws IOException {
  Map<String, String> environment = container.getEnvironment();
  environment.put(ApplicationConstants.APPLICATION_WEB_PROXY_BASE_ENV,
      application.getWebProxyBase());
  // Set AppSubmitTime and MaxAppAttempts to be consumable by the AM.
  ApplicationId applicationId =
      application.getAppAttemptId().getApplicationId();
  environment.put(
      ApplicationConstants.APP_SUBMIT_TIME_ENV,
      String.valueOf(rmContext.getRMApps()
          .get(applicationId)
          .getSubmitTime()));
  environment.put(ApplicationConstants.MAX_APP_ATTEMPTS_ENV,
      String.valueOf(rmContext.getRMApps().get(
          applicationId).getMaxAppAttempts()));

  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  if (container.getTokens() != null) {
    // TODO: Don't do this kind of checks everywhere.
    dibb.reset(container.getTokens());
    credentials.readTokenStorageStream(dibb);
  }

  // Add AMRMToken
  Token<AMRMTokenIdentifier> amrmToken = createAndSetAMRMToken();
  if (amrmToken != null) {
    credentials.addToken(amrmToken.getService(), amrmToken);
  }
  DataOutputBuffer dob = new DataOutputBuffer();
  credentials.writeTokenStorageToStream(dob);
  container.setTokens(ByteBuffer.wrap(dob.getData(), 0, dob.getLength()));
}
项目:big-c    文件:RMAppImpl.java   
protected Credentials parseCredentials() throws IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = submissionContext.getAMContainerSpec().getTokens();
  if (tokens != null) {
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }
  return credentials;
}
项目:big-c    文件:RMAppManager.java   
protected Credentials parseCredentials(
    ApplicationSubmissionContext application) throws IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = application.getAMContainerSpec().getTokens();
  if (tokens != null) {
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }
  return credentials;
}
项目:big-c    文件:TestAMAuthorization.java   
public Credentials getContainerCredentials() throws IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer buf = new DataInputByteBuffer();
  containerTokens.rewind();
  buf.reset(containerTokens);
  credentials.readTokenStorageStream(buf);
  return credentials;
}
项目:big-c    文件:YarnClientImpl.java   
private void addTimelineDelegationToken(
    ContainerLaunchContext clc) throws YarnException, IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = clc.getTokens();
  if (tokens != null) {
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }
  // If the timeline delegation token is already in the CLC, no need to add
  // one more
  for (org.apache.hadoop.security.token.Token<? extends TokenIdentifier> token : credentials
      .getAllTokens()) {
    if (token.getKind().equals(TimelineDelegationTokenIdentifier.KIND_NAME)) {
      return;
    }
  }
  org.apache.hadoop.security.token.Token<TimelineDelegationTokenIdentifier>
      timelineDelegationToken = getTimelineDelegationToken();
  if (timelineDelegationToken == null) {
    return;
  }
  credentials.addToken(timelineService, timelineDelegationToken);
  if (LOG.isDebugEnabled()) {
    LOG.debug("Add timline delegation token into credentials: "
        + timelineDelegationToken);
  }
  DataOutputBuffer dob = new DataOutputBuffer();
  credentials.writeTokenStorageToStream(dob);
  tokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
  clc.setTokens(tokens);
}
项目:big-c    文件:ShuffleHandler.java   
/**
 * A helper function to deserialize the metadata returned by ShuffleHandler.
 * @param meta the metadata returned by the ShuffleHandler
 * @return the port the Shuffle Handler is listening on to serve shuffle data.
 */
public static int deserializeMetaData(ByteBuffer meta) throws IOException {
  //TODO this should be returning a class not just an int
  DataInputByteBuffer in = new DataInputByteBuffer();
  in.reset(meta);
  int port = in.readInt();
  return port;
}
项目:big-c    文件:ShuffleHandler.java   
static Token<JobTokenIdentifier> deserializeServiceData(ByteBuffer secret) throws IOException {
  DataInputByteBuffer in = new DataInputByteBuffer();
  in.reset(secret);
  Token<JobTokenIdentifier> jt = new Token<JobTokenIdentifier>();
  jt.readFields(in);
  return jt;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:AMLauncher.java   
private void setupTokens(
    ContainerLaunchContext container, ContainerId containerID)
    throws IOException {
  Map<String, String> environment = container.getEnvironment();
  environment.put(ApplicationConstants.APPLICATION_WEB_PROXY_BASE_ENV,
      application.getWebProxyBase());
  // Set AppSubmitTime and MaxAppAttempts to be consumable by the AM.
  ApplicationId applicationId =
      application.getAppAttemptId().getApplicationId();
  environment.put(
      ApplicationConstants.APP_SUBMIT_TIME_ENV,
      String.valueOf(rmContext.getRMApps()
          .get(applicationId)
          .getSubmitTime()));
  environment.put(ApplicationConstants.MAX_APP_ATTEMPTS_ENV,
      String.valueOf(rmContext.getRMApps().get(
          applicationId).getMaxAppAttempts()));

  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  if (container.getTokens() != null) {
    // TODO: Don't do this kind of checks everywhere.
    dibb.reset(container.getTokens());
    credentials.readTokenStorageStream(dibb);
  }

  // Add AMRMToken
  Token<AMRMTokenIdentifier> amrmToken = createAndSetAMRMToken();
  if (amrmToken != null) {
    credentials.addToken(amrmToken.getService(), amrmToken);
  }
  DataOutputBuffer dob = new DataOutputBuffer();
  credentials.writeTokenStorageToStream(dob);
  container.setTokens(ByteBuffer.wrap(dob.getData(), 0, dob.getLength()));
}
项目:hadoop-2.6.0-cdh5.4.3    文件:RMAppImpl.java   
protected Credentials parseCredentials() throws IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = submissionContext.getAMContainerSpec().getTokens();
  if (tokens != null) {
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }
  return credentials;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:RMAppManager.java   
protected Credentials parseCredentials(
    ApplicationSubmissionContext application) throws IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = application.getAMContainerSpec().getTokens();
  if (tokens != null) {
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }
  return credentials;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:MemoryRMStateStore.java   
@Override
public synchronized void storeApplicationAttemptStateInternal(
    ApplicationAttemptId appAttemptId,
    ApplicationAttemptStateData attemptStateData)
    throws Exception {
  Credentials credentials = null;
  if(attemptStateData.getAppAttemptTokens() != null){
    DataInputByteBuffer dibb = new DataInputByteBuffer();
    credentials = new Credentials();
    dibb.reset(attemptStateData.getAppAttemptTokens());
    credentials.readTokenStorageStream(dibb);
  }
  ApplicationAttemptState attemptState =
      new ApplicationAttemptState(appAttemptId,
        attemptStateData.getMasterContainer(), credentials,
        attemptStateData.getStartTime(),
        attemptStateData.getMemorySeconds(),
        attemptStateData.getVcoreSeconds());


  ApplicationState appState = state.getApplicationState().get(
      attemptState.getAttemptId().getApplicationId());
  if (appState == null) {
    throw new YarnRuntimeException("Application doesn't exist");
  }
  appState.attempts.put(attemptState.getAttemptId(), attemptState);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:MemoryRMStateStore.java   
@Override
public synchronized void updateApplicationAttemptStateInternal(
    ApplicationAttemptId appAttemptId,
    ApplicationAttemptStateData attemptStateData)
    throws Exception {
  Credentials credentials = null;
  if (attemptStateData.getAppAttemptTokens() != null) {
    DataInputByteBuffer dibb = new DataInputByteBuffer();
    credentials = new Credentials();
    dibb.reset(attemptStateData.getAppAttemptTokens());
    credentials.readTokenStorageStream(dibb);
  }
  ApplicationAttemptState updatedAttemptState =
      new ApplicationAttemptState(appAttemptId,
        attemptStateData.getMasterContainer(), credentials,
        attemptStateData.getStartTime(), attemptStateData.getState(),
        attemptStateData.getFinalTrackingUrl(),
        attemptStateData.getDiagnostics(),
        attemptStateData.getFinalApplicationStatus(),
        attemptStateData.getAMContainerExitStatus(),
        attemptStateData.getFinishTime(),
        attemptStateData.getMemorySeconds(),
        attemptStateData.getVcoreSeconds());

  ApplicationState appState =
      state.getApplicationState().get(
        updatedAttemptState.getAttemptId().getApplicationId());
  if (appState == null) {
    throw new YarnRuntimeException("Application doesn't exist");
  }
  LOG.info("Updating final state " + updatedAttemptState.getState()
      + " for attempt: " + updatedAttemptState.getAttemptId());
  appState.attempts.put(updatedAttemptState.getAttemptId(),
    updatedAttemptState);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestAMAuthorization.java   
public Credentials getContainerCredentials() throws IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer buf = new DataInputByteBuffer();
  containerTokens.rewind();
  buf.reset(containerTokens);
  credentials.readTokenStorageStream(buf);
  return credentials;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:YarnClientImpl.java   
private void addTimelineDelegationToken(
    ContainerLaunchContext clc) throws YarnException, IOException {
  Credentials credentials = new Credentials();
  DataInputByteBuffer dibb = new DataInputByteBuffer();
  ByteBuffer tokens = clc.getTokens();
  if (tokens != null) {
    dibb.reset(tokens);
    credentials.readTokenStorageStream(dibb);
    tokens.rewind();
  }
  // If the timeline delegation token is already in the CLC, no need to add
  // one more
  for (org.apache.hadoop.security.token.Token<? extends TokenIdentifier> token : credentials
      .getAllTokens()) {
    if (token.getKind().equals(TimelineDelegationTokenIdentifier.KIND_NAME)) {
      return;
    }
  }
  org.apache.hadoop.security.token.Token<TimelineDelegationTokenIdentifier>
      timelineDelegationToken = getTimelineDelegationToken();
  if (timelineDelegationToken == null) {
    return;
  }
  credentials.addToken(timelineService, timelineDelegationToken);
  if (LOG.isDebugEnabled()) {
    LOG.debug("Add timline delegation token into credentials: "
        + timelineDelegationToken);
  }
  DataOutputBuffer dob = new DataOutputBuffer();
  credentials.writeTokenStorageToStream(dob);
  tokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
  clc.setTokens(tokens);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:ShuffleHandler.java   
/**
 * A helper function to deserialize the metadata returned by ShuffleHandler.
 * @param meta the metadata returned by the ShuffleHandler
 * @return the port the Shuffle Handler is listening on to serve shuffle data.
 */
public static int deserializeMetaData(ByteBuffer meta) throws IOException {
  //TODO this should be returning a class not just an int
  DataInputByteBuffer in = new DataInputByteBuffer();
  in.reset(meta);
  int port = in.readInt();
  return port;
}