Java 类org.apache.hadoop.mapreduce.security.SecureShuffleUtils 实例源码

项目:hadoop    文件:TestShuffleHandler.java   
private static int getShuffleResponseCode(ShuffleHandler shuffle,
    Token<JobTokenIdentifier> jt) throws IOException {
  URL url = new URL("http://127.0.0.1:"
      + shuffle.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY)
      + "/mapOutput?job=job_12345_0001&reduce=0&map=attempt_12345_1_m_1_0");
  HttpURLConnection conn = (HttpURLConnection) url.openConnection();
  String encHash = SecureShuffleUtils.hashFromString(
      SecureShuffleUtils.buildMsgFrom(url),
      JobTokenSecretManager.createSecretKey(jt.getPassword()));
  conn.addRequestProperty(
      SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);
  conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_NAME,
      ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
  conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION,
      ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
  conn.connect();
  int rc = conn.getResponseCode();
  conn.disconnect();
  return rc;
}
项目:hadoop    文件:Fetcher.java   
private void setupConnectionsWithRetry(MapHost host,
    Set<TaskAttemptID> remaining, URL url) throws IOException {
  openConnectionWithRetry(host, remaining, url);
  if (stopped) {
    return;
  }

  // generate hash of the url
  String msgToEncode = SecureShuffleUtils.buildMsgFrom(url);
  String encHash = SecureShuffleUtils.hashFromString(msgToEncode,
      shuffleSecretKey);

  setupShuffleConnection(encHash);
  connect(connection, connectionTimeout);
  // verify that the thread wasn't stopped during calls to connect
  if (stopped) {
    return;
  }

  verifyConnection(url, msgToEncode, encHash);
}
项目:hadoop    文件:Fetcher.java   
private void verifyConnection(URL url, String msgToEncode, String encHash)
    throws IOException {
  // Validate response code
  int rc = connection.getResponseCode();
  if (rc != HttpURLConnection.HTTP_OK) {
    throw new IOException(
        "Got invalid response code " + rc + " from " + url +
        ": " + connection.getResponseMessage());
  }
  // get the shuffle version
  if (!ShuffleHeader.DEFAULT_HTTP_HEADER_NAME.equals(
      connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
      || !ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION.equals(
          connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))) {
    throw new IOException("Incompatible shuffle response version");
  }
  // get the replyHash which is HMac of the encHash we sent to the server
  String replyHash = connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH);
  if(replyHash==null) {
    throw new IOException("security validation of TT Map output failed");
  }
  LOG.debug("url="+msgToEncode+";encHash="+encHash+";replyHash="+replyHash);
  // verify that replyHash is HMac of encHash
  SecureShuffleUtils.verifyReply(replyHash, encHash, shuffleSecretKey);
  LOG.info("for url="+msgToEncode+" sent hash and received reply");
}
项目:hadoop    文件:TestFetcher.java   
@Test(timeout=30000)
public void testCopyFromHostConnectionTimeout() throws Exception {
  when(connection.getInputStream()).thenThrow(
      new SocketTimeoutException("This is a fake timeout :)"));

  Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
      r, metrics, except, key, connection);

  underTest.copyFromHost(host);

  verify(connection).addRequestProperty(
      SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);

  verify(allErrs).increment(1);
  verify(ss).copyFailed(map1ID, host, false, false);
  verify(ss).copyFailed(map2ID, host, false, false);

  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
项目:aliyun-oss-hadoop-fs    文件:TestShuffleHandler.java   
private static int getShuffleResponseCode(ShuffleHandler shuffle,
    Token<JobTokenIdentifier> jt) throws IOException {
  URL url = new URL("http://127.0.0.1:"
      + shuffle.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY)
      + "/mapOutput?job=job_12345_0001&reduce=0&map=attempt_12345_1_m_1_0");
  HttpURLConnection conn = (HttpURLConnection) url.openConnection();
  String encHash = SecureShuffleUtils.hashFromString(
      SecureShuffleUtils.buildMsgFrom(url),
      JobTokenSecretManager.createSecretKey(jt.getPassword()));
  conn.addRequestProperty(
      SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);
  conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_NAME,
      ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
  conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION,
      ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
  conn.connect();
  int rc = conn.getResponseCode();
  conn.disconnect();
  return rc;
}
项目:aliyun-oss-hadoop-fs    文件:Fetcher.java   
private void setupConnectionsWithRetry(MapHost host,
    Set<TaskAttemptID> remaining, URL url) throws IOException {
  openConnectionWithRetry(host, remaining, url);
  if (stopped) {
    return;
  }

  // generate hash of the url
  String msgToEncode = SecureShuffleUtils.buildMsgFrom(url);
  String encHash = SecureShuffleUtils.hashFromString(msgToEncode,
      shuffleSecretKey);

  setupShuffleConnection(encHash);
  connect(connection, connectionTimeout);
  // verify that the thread wasn't stopped during calls to connect
  if (stopped) {
    return;
  }

  verifyConnection(url, msgToEncode, encHash);
}
项目:aliyun-oss-hadoop-fs    文件:Fetcher.java   
private void verifyConnection(URL url, String msgToEncode, String encHash)
    throws IOException {
  // Validate response code
  int rc = connection.getResponseCode();
  if (rc != HttpURLConnection.HTTP_OK) {
    throw new IOException(
        "Got invalid response code " + rc + " from " + url +
        ": " + connection.getResponseMessage());
  }
  // get the shuffle version
  if (!ShuffleHeader.DEFAULT_HTTP_HEADER_NAME.equals(
      connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
      || !ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION.equals(
          connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))) {
    throw new IOException("Incompatible shuffle response version");
  }
  // get the replyHash which is HMac of the encHash we sent to the server
  String replyHash = connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH);
  if(replyHash==null) {
    throw new IOException("security validation of TT Map output failed");
  }
  LOG.debug("url="+msgToEncode+";encHash="+encHash+";replyHash="+replyHash);
  // verify that replyHash is HMac of encHash
  SecureShuffleUtils.verifyReply(replyHash, encHash, shuffleSecretKey);
  LOG.debug("for url="+msgToEncode+" sent hash and received reply");
}
项目:aliyun-oss-hadoop-fs    文件:TestFetcher.java   
@Test(timeout=30000)
public void testCopyFromHostConnectionTimeout() throws Exception {
  when(connection.getInputStream()).thenThrow(
      new SocketTimeoutException("This is a fake timeout :)"));

  Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
      r, metrics, except, key, connection);

  underTest.copyFromHost(host);

  verify(connection).addRequestProperty(
      SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);

  verify(allErrs).increment(1);
  verify(ss).copyFailed(map1ID, host, false, false);
  verify(ss).copyFailed(map2ID, host, false, false);

  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
项目:big-c    文件:TestShuffleHandler.java   
private static int getShuffleResponseCode(ShuffleHandler shuffle,
    Token<JobTokenIdentifier> jt) throws IOException {
  URL url = new URL("http://127.0.0.1:"
      + shuffle.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY)
      + "/mapOutput?job=job_12345_0001&reduce=0&map=attempt_12345_1_m_1_0");
  HttpURLConnection conn = (HttpURLConnection) url.openConnection();
  String encHash = SecureShuffleUtils.hashFromString(
      SecureShuffleUtils.buildMsgFrom(url),
      JobTokenSecretManager.createSecretKey(jt.getPassword()));
  conn.addRequestProperty(
      SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);
  conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_NAME,
      ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
  conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION,
      ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
  conn.connect();
  int rc = conn.getResponseCode();
  conn.disconnect();
  return rc;
}
项目:big-c    文件:Fetcher.java   
private void setupConnectionsWithRetry(MapHost host,
    Set<TaskAttemptID> remaining, URL url) throws IOException {
  openConnectionWithRetry(host, remaining, url);
  if (stopped) {
    return;
  }

  // generate hash of the url
  String msgToEncode = SecureShuffleUtils.buildMsgFrom(url);
  String encHash = SecureShuffleUtils.hashFromString(msgToEncode,
      shuffleSecretKey);

  setupShuffleConnection(encHash);
  connect(connection, connectionTimeout);
  // verify that the thread wasn't stopped during calls to connect
  if (stopped) {
    return;
  }

  verifyConnection(url, msgToEncode, encHash);
}
项目:big-c    文件:Fetcher.java   
private void verifyConnection(URL url, String msgToEncode, String encHash)
    throws IOException {
  // Validate response code
  int rc = connection.getResponseCode();
  if (rc != HttpURLConnection.HTTP_OK) {
    throw new IOException(
        "Got invalid response code " + rc + " from " + url +
        ": " + connection.getResponseMessage());
  }
  // get the shuffle version
  if (!ShuffleHeader.DEFAULT_HTTP_HEADER_NAME.equals(
      connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
      || !ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION.equals(
          connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))) {
    throw new IOException("Incompatible shuffle response version");
  }
  // get the replyHash which is HMac of the encHash we sent to the server
  String replyHash = connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH);
  if(replyHash==null) {
    throw new IOException("security validation of TT Map output failed");
  }
  LOG.debug("url="+msgToEncode+";encHash="+encHash+";replyHash="+replyHash);
  // verify that replyHash is HMac of encHash
  SecureShuffleUtils.verifyReply(replyHash, encHash, shuffleSecretKey);
  LOG.info("for url="+msgToEncode+" sent hash and received reply");
}
项目:big-c    文件:TestFetcher.java   
@Test(timeout=30000)
public void testCopyFromHostConnectionTimeout() throws Exception {
  when(connection.getInputStream()).thenThrow(
      new SocketTimeoutException("This is a fake timeout :)"));

  Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
      r, metrics, except, key, connection);

  underTest.copyFromHost(host);

  verify(connection).addRequestProperty(
      SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);

  verify(allErrs).increment(1);
  verify(ss).copyFailed(map1ID, host, false, false);
  verify(ss).copyFailed(map2ID, host, false, false);

  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestShuffleHandler.java   
private static int getShuffleResponseCode(ShuffleHandler shuffle,
    Token<JobTokenIdentifier> jt) throws IOException {
  URL url = new URL("http://127.0.0.1:"
      + shuffle.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY)
      + "/mapOutput?job=job_12345_0001&reduce=0&map=attempt_12345_1_m_1_0");
  HttpURLConnection conn = (HttpURLConnection) url.openConnection();
  String encHash = SecureShuffleUtils.hashFromString(
      SecureShuffleUtils.buildMsgFrom(url),
      JobTokenSecretManager.createSecretKey(jt.getPassword()));
  conn.addRequestProperty(
      SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);
  conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_NAME,
      ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
  conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION,
      ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
  conn.connect();
  int rc = conn.getResponseCode();
  conn.disconnect();
  return rc;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:Fetcher.java   
private void setupConnectionsWithRetry(MapHost host,
    Set<TaskAttemptID> remaining, URL url) throws IOException {
  openConnectionWithRetry(host, remaining, url);
  if (stopped) {
    return;
  }

  // generate hash of the url
  String msgToEncode = SecureShuffleUtils.buildMsgFrom(url);
  String encHash = SecureShuffleUtils.hashFromString(msgToEncode,
      shuffleSecretKey);

  setupShuffleConnection(encHash);
  connect(connection, connectionTimeout);
  // verify that the thread wasn't stopped during calls to connect
  if (stopped) {
    return;
  }

  verifyConnection(url, msgToEncode, encHash);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:Fetcher.java   
private void verifyConnection(URL url, String msgToEncode, String encHash)
    throws IOException {
  // Validate response code
  int rc = connection.getResponseCode();
  if (rc != HttpURLConnection.HTTP_OK) {
    throw new IOException(
        "Got invalid response code " + rc + " from " + url +
        ": " + connection.getResponseMessage());
  }
  // get the shuffle version
  if (!ShuffleHeader.DEFAULT_HTTP_HEADER_NAME.equals(
      connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
      || !ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION.equals(
          connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))) {
    throw new IOException("Incompatible shuffle response version");
  }
  // get the replyHash which is HMac of the encHash we sent to the server
  String replyHash = connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH);
  if(replyHash==null) {
    throw new IOException("security validation of TT Map output failed");
  }
  LOG.debug("url="+msgToEncode+";encHash="+encHash+";replyHash="+replyHash);
  // verify that replyHash is HMac of encHash
  SecureShuffleUtils.verifyReply(replyHash, encHash, shuffleSecretKey);
  LOG.info("for url="+msgToEncode+" sent hash and received reply");
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestFetcher.java   
@Test(timeout=30000)
public void testCopyFromHostConnectionTimeout() throws Exception {
  when(connection.getInputStream()).thenThrow(
      new SocketTimeoutException("This is a fake timeout :)"));

  Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
      r, metrics, except, key, connection);

  underTest.copyFromHost(host);

  verify(connection).addRequestProperty(
      SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);

  verify(allErrs).increment(1);
  verify(ss).copyFailed(map1ID, host, false, false);
  verify(ss).copyFailed(map2ID, host, false, false);

  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
项目:hadoop-plus    文件:TestFetcher.java   
@Test(timeout=30000)
public void testCopyFromHostConnectionTimeout() throws Exception {
  when(connection.getInputStream()).thenThrow(
      new SocketTimeoutException("This is a fake timeout :)"));

  Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
      r, metrics, except, key, connection);

  underTest.copyFromHost(host);

  verify(connection).addRequestProperty(
      SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);

  verify(allErrs).increment(1);
  verify(ss).copyFailed(map1ID, host, false, false);
  verify(ss).copyFailed(map2ID, host, false, false);

  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
项目:FlexMap    文件:TestShuffleHandler.java   
private static int getShuffleResponseCode(ShuffleHandler shuffle,
    Token<JobTokenIdentifier> jt) throws IOException {
  URL url = new URL("http://127.0.0.1:"
      + shuffle.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY)
      + "/mapOutput?job=job_12345_0001&reduce=0&map=attempt_12345_1_m_1_0");
  HttpURLConnection conn = (HttpURLConnection) url.openConnection();
  String encHash = SecureShuffleUtils.hashFromString(
      SecureShuffleUtils.buildMsgFrom(url),
      JobTokenSecretManager.createSecretKey(jt.getPassword()));
  conn.addRequestProperty(
      SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);
  conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_NAME,
      ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
  conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION,
      ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
  conn.connect();
  int rc = conn.getResponseCode();
  conn.disconnect();
  return rc;
}
项目:FlexMap    文件:Fetcher.java   
private void setupConnectionsWithRetry(MapHost host,
    Set<TaskAttemptID> remaining, URL url) throws IOException {
  openConnectionWithRetry(host, remaining, url);
  if (stopped) {
    return;
  }

  // generate hash of the url
  String msgToEncode = SecureShuffleUtils.buildMsgFrom(url);
  String encHash = SecureShuffleUtils.hashFromString(msgToEncode,
      shuffleSecretKey);

  setupShuffleConnection(encHash);
  connect(connection, connectionTimeout);
  // verify that the thread wasn't stopped during calls to connect
  if (stopped) {
    return;
  }

  verifyConnection(url, msgToEncode, encHash);
}
项目:FlexMap    文件:Fetcher.java   
private void verifyConnection(URL url, String msgToEncode, String encHash)
    throws IOException {
  // Validate response code
  int rc = connection.getResponseCode();
  if (rc != HttpURLConnection.HTTP_OK) {
    throw new IOException(
        "Got invalid response code " + rc + " from " + url +
        ": " + connection.getResponseMessage());
  }
  // get the shuffle version
  if (!ShuffleHeader.DEFAULT_HTTP_HEADER_NAME.equals(
      connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
      || !ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION.equals(
          connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))) {
    throw new IOException("Incompatible shuffle response version");
  }
  // get the replyHash which is HMac of the encHash we sent to the server
  String replyHash = connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH);
  if(replyHash==null) {
    throw new IOException("security validation of TT Map output failed");
  }
  LOG.debug("url="+msgToEncode+";encHash="+encHash+";replyHash="+replyHash);
  // verify that replyHash is HMac of encHash
  SecureShuffleUtils.verifyReply(replyHash, encHash, shuffleSecretKey);
  LOG.info("for url="+msgToEncode+" sent hash and received reply");
}
项目:FlexMap    文件:TestFetcher.java   
@Test(timeout=30000)
public void testCopyFromHostConnectionTimeout() throws Exception {
  when(connection.getInputStream()).thenThrow(
      new SocketTimeoutException("This is a fake timeout :)"));

  Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
      r, metrics, except, key, connection);

  underTest.copyFromHost(host);

  verify(connection).addRequestProperty(
      SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);

  verify(allErrs).increment(1);
  verify(ss).copyFailed(map1ID, host, false, false);
  verify(ss).copyFailed(map2ID, host, false, false);

  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
项目:hops    文件:TestShuffleHandler.java   
private static int getShuffleResponseCode(ShuffleHandler shuffle,
    Token<JobTokenIdentifier> jt) throws IOException {
  URL url = new URL("http://127.0.0.1:"
      + shuffle.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY)
      + "/mapOutput?job=job_12345_0001&reduce=0&map=attempt_12345_1_m_1_0");
  HttpURLConnection conn = (HttpURLConnection) url.openConnection();
  String encHash = SecureShuffleUtils.hashFromString(
      SecureShuffleUtils.buildMsgFrom(url),
      JobTokenSecretManager.createSecretKey(jt.getPassword()));
  conn.addRequestProperty(
      SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);
  conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_NAME,
      ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
  conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION,
      ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
  conn.connect();
  int rc = conn.getResponseCode();
  conn.disconnect();
  return rc;
}
项目:hops    文件:Fetcher.java   
private void setupConnectionsWithRetry(URL url) throws IOException {
  openConnectionWithRetry(url);
  if (stopped) {
    return;
  }

  // generate hash of the url
  String msgToEncode = SecureShuffleUtils.buildMsgFrom(url);
  String encHash = SecureShuffleUtils.hashFromString(msgToEncode,
      shuffleSecretKey);

  setupShuffleConnection(encHash);
  connect(connection, connectionTimeout);
  // verify that the thread wasn't stopped during calls to connect
  if (stopped) {
    return;
  }

  verifyConnection(url, msgToEncode, encHash);
}
项目:hops    文件:Fetcher.java   
private void verifyConnection(URL url, String msgToEncode, String encHash)
    throws IOException {
  // Validate response code
  int rc = connection.getResponseCode();
  if (rc != HttpURLConnection.HTTP_OK) {
    throw new IOException(
        "Got invalid response code " + rc + " from " + url +
        ": " + connection.getResponseMessage());
  }
  // get the shuffle version
  if (!ShuffleHeader.DEFAULT_HTTP_HEADER_NAME.equals(
      connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
      || !ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION.equals(
          connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))) {
    throw new IOException("Incompatible shuffle response version");
  }
  // get the replyHash which is HMac of the encHash we sent to the server
  String replyHash = connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH);
  if(replyHash==null) {
    throw new IOException("security validation of TT Map output failed");
  }
  LOG.debug("url="+msgToEncode+";encHash="+encHash+";replyHash="+replyHash);
  // verify that replyHash is HMac of encHash
  SecureShuffleUtils.verifyReply(replyHash, encHash, shuffleSecretKey);
  LOG.debug("for url="+msgToEncode+" sent hash and received reply");
}
项目:hops    文件:TestFetcher.java   
@Test(timeout=30000)
public void testCopyFromHostConnectionTimeout() throws Exception {
  when(connection.getInputStream()).thenThrow(
      new SocketTimeoutException("This is a fake timeout :)"));

  Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
      r, metrics, except, key, connection);

  underTest.copyFromHost(host);

  verify(connection).addRequestProperty(
      SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);

  verify(allErrs).increment(1);
  verify(ss).copyFailed(map1ID, host, false, false);
  verify(ss).copyFailed(map2ID, host, false, false);

  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
项目:hadoop-TCP    文件:TestFetcher.java   
@Test(timeout=30000)
public void testCopyFromHostConnectionTimeout() throws Exception {
  when(connection.getInputStream()).thenThrow(
      new SocketTimeoutException("This is a fake timeout :)"));

  Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
      r, metrics, except, key, connection);

  underTest.copyFromHost(host);

  verify(connection).addRequestProperty(
      SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);

  verify(allErrs).increment(1);
  verify(ss).copyFailed(map1ID, host, false, false);
  verify(ss).copyFailed(map2ID, host, false, false);

  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
项目:hardfs    文件:TestFetcher.java   
@Test(timeout=30000)
public void testCopyFromHostConnectionTimeout() throws Exception {
  when(connection.getInputStream()).thenThrow(
      new SocketTimeoutException("This is a fake timeout :)"));

  Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
      r, metrics, except, key, connection);

  underTest.copyFromHost(host);

  verify(connection).addRequestProperty(
      SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);

  verify(allErrs).increment(1);
  verify(ss).copyFailed(map1ID, host, false, false);
  verify(ss).copyFailed(map2ID, host, false, false);

  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
项目:hadoop-on-lustre2    文件:TestFetcher.java   
@Test(timeout=30000)
public void testCopyFromHostConnectionTimeout() throws Exception {
  when(connection.getInputStream()).thenThrow(
      new SocketTimeoutException("This is a fake timeout :)"));

  Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
      r, metrics, except, key, connection);

  underTest.copyFromHost(host);

  verify(connection).addRequestProperty(
      SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);

  verify(allErrs).increment(1);
  verify(ss).copyFailed(map1ID, host, false, false);
  verify(ss).copyFailed(map2ID, host, false, false);

  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
项目:hadoop    文件:Fetcher.java   
private void setupShuffleConnection(String encHash) {
  // put url hash into http header
  connection.addRequestProperty(
      SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);
  // set the read timeout
  connection.setReadTimeout(readTimeout);
  // put shuffle version into http header
  connection.addRequestProperty(ShuffleHeader.HTTP_HEADER_NAME,
      ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
  connection.addRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION,
      ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
}
项目:hadoop    文件:TestFetcher.java   
@Test
public void testReduceOutOfDiskSpace() throws Throwable {
  LOG.info("testReduceOutOfDiskSpace");

  Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
      r, metrics, except, key, connection);

  String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
  ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 10, 10, 1);
  ByteArrayOutputStream bout = new ByteArrayOutputStream();
  header.write(new DataOutputStream(bout));

  ByteArrayInputStream in = new ByteArrayInputStream(bout.toByteArray());

  when(connection.getResponseCode()).thenReturn(200);
  when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
  .thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
  when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
  .thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
  when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH))
  .thenReturn(replyHash);
  when(connection.getInputStream()).thenReturn(in);

  when(mm.reserve(any(TaskAttemptID.class), anyLong(), anyInt()))
  .thenThrow(new DiskErrorException("No disk space available"));

  underTest.copyFromHost(host);
  verify(ss).reportLocalError(any(IOException.class));
}
项目:hadoop    文件:TestFetcher.java   
@Test
public void testCopyFromHostBogusHeader() throws Exception {
  Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
      r, metrics, except, key, connection);

  String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);

  when(connection.getResponseCode()).thenReturn(200);
  when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
      .thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
  when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
      .thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
  when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH))
      .thenReturn(replyHash);
  ByteArrayInputStream in = new ByteArrayInputStream(
      "\u00010 BOGUS DATA\nBOGUS DATA\nBOGUS DATA\n".getBytes());
  when(connection.getInputStream()).thenReturn(in);

  underTest.copyFromHost(host);

  verify(connection).addRequestProperty(
      SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);

  verify(allErrs).increment(1);
  verify(ss).copyFailed(map1ID, host, true, false);
  verify(ss).copyFailed(map2ID, host, true, false);

  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
项目:hadoop    文件:TestFetcher.java   
@Test
public void testCopyFromHostIncompatibleShuffleVersion() throws Exception {
  String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);

  when(connection.getResponseCode()).thenReturn(200);
  when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
      .thenReturn("mapreduce").thenReturn("other").thenReturn("other");
  when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
      .thenReturn("1.0.1").thenReturn("1.0.0").thenReturn("1.0.1");
  when(connection.getHeaderField(
      SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH)).thenReturn(replyHash);
  ByteArrayInputStream in = new ByteArrayInputStream(new byte[0]);
  when(connection.getInputStream()).thenReturn(in);

  for (int i = 0; i < 3; ++i) {
    Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
        r, metrics, except, key, connection);
    underTest.copyFromHost(host);
  }

  verify(connection, times(3)).addRequestProperty(
      SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);

  verify(allErrs, times(3)).increment(1);
  verify(ss, times(3)).copyFailed(map1ID, host, false, false);
  verify(ss, times(3)).copyFailed(map2ID, host, false, false);

  verify(ss, times(3)).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
  verify(ss, times(3)).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
项目:hadoop    文件:TestFetcher.java   
@Test
public void testCopyFromHostIncompatibleShuffleVersionWithRetry()
    throws Exception {
  String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);

  when(connection.getResponseCode()).thenReturn(200);
  when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
      .thenReturn("mapreduce").thenReturn("other").thenReturn("other");
  when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
      .thenReturn("1.0.1").thenReturn("1.0.0").thenReturn("1.0.1");
  when(connection.getHeaderField(
      SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH)).thenReturn(replyHash);
  ByteArrayInputStream in = new ByteArrayInputStream(new byte[0]);
  when(connection.getInputStream()).thenReturn(in);

  for (int i = 0; i < 3; ++i) {
    Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(jobWithRetry, 
        id, ss, mm, r, metrics, except, key, connection);
    underTest.copyFromHost(host);
  }

  verify(connection, times(3)).addRequestProperty(
      SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);

  verify(allErrs, times(3)).increment(1);
  verify(ss, times(3)).copyFailed(map1ID, host, false, false);
  verify(ss, times(3)).copyFailed(map2ID, host, false, false);

  verify(ss, times(3)).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
  verify(ss, times(3)).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
项目:hadoop    文件:TestFetcher.java   
@Test
public void testCopyFromHostWait() throws Exception {
  Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
      r, metrics, except, key, connection);

  String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);

  when(connection.getResponseCode()).thenReturn(200);
  when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH))
      .thenReturn(replyHash);
  ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 10, 10, 1);
  ByteArrayOutputStream bout = new ByteArrayOutputStream();
  header.write(new DataOutputStream(bout));
  ByteArrayInputStream in = new ByteArrayInputStream(bout.toByteArray());
  when(connection.getInputStream()).thenReturn(in);
  when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
      .thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
  when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
      .thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
  //Defaults to null, which is what we want to test
  when(mm.reserve(any(TaskAttemptID.class), anyLong(), anyInt()))
      .thenReturn(null);

  underTest.copyFromHost(host);

  verify(connection)
      .addRequestProperty(SecureShuffleUtils.HTTP_HEADER_URL_HASH, 
        encHash);
  verify(allErrs, never()).increment(1);
  verify(ss, never()).copyFailed(map1ID, host, true, false);
  verify(ss, never()).copyFailed(map2ID, host, true, false);

  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}
项目:hadoop    文件:TestFetcher.java   
@SuppressWarnings("unchecked")
@Test(timeout=10000) 
public void testCopyFromHostCompressFailure() throws Exception {
  InMemoryMapOutput<Text, Text> immo = mock(InMemoryMapOutput.class);

  Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
      r, metrics, except, key, connection);

  String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);

  when(connection.getResponseCode()).thenReturn(200);
  when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH))
      .thenReturn(replyHash);
  ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 10, 10, 1);
  ByteArrayOutputStream bout = new ByteArrayOutputStream();
  header.write(new DataOutputStream(bout));
  ByteArrayInputStream in = new ByteArrayInputStream(bout.toByteArray());
  when(connection.getInputStream()).thenReturn(in);
  when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
      .thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
  when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
      .thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
  when(mm.reserve(any(TaskAttemptID.class), anyLong(), anyInt()))
      .thenReturn(immo);

  doThrow(new java.lang.InternalError()).when(immo)
      .shuffle(any(MapHost.class), any(InputStream.class), anyLong(), 
          anyLong(), any(ShuffleClientMetrics.class), any(Reporter.class));

  underTest.copyFromHost(host);

  verify(connection)
      .addRequestProperty(SecureShuffleUtils.HTTP_HEADER_URL_HASH, 
        encHash);
  verify(ss, times(1)).copyFailed(map1ID, host, true, false);
}
项目:hadoop    文件:TestFetcher.java   
@SuppressWarnings("unchecked")
@Test(timeout=10000)
public void testCopyFromHostWithRetryThenTimeout() throws Exception {
  InMemoryMapOutput<Text, Text> immo = mock(InMemoryMapOutput.class);
  Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(jobWithRetry,
      id, ss, mm, r, metrics, except, key, connection);

  String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);

  when(connection.getResponseCode()).thenReturn(200)
    .thenThrow(new SocketTimeoutException("forced timeout"));
  when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH))
      .thenReturn(replyHash);
  ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 10, 10, 1);
  ByteArrayOutputStream bout = new ByteArrayOutputStream();
  header.write(new DataOutputStream(bout));
  ByteArrayInputStream in = new ByteArrayInputStream(bout.toByteArray());
  when(connection.getInputStream()).thenReturn(in);
  when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
      .thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
  when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
      .thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
  when(mm.reserve(any(TaskAttemptID.class), anyLong(), anyInt()))
      .thenReturn(immo);
  doThrow(new IOException("forced error")).when(immo).shuffle(
      any(MapHost.class), any(InputStream.class), anyLong(),
      anyLong(), any(ShuffleClientMetrics.class), any(Reporter.class));

  underTest.copyFromHost(host);
  verify(allErrs).increment(1);
  verify(ss).copyFailed(map1ID, host, false, false);
}
项目:hadoop    文件:TestFetcher.java   
@SuppressWarnings("unchecked")
@Test(timeout=10000)
public void testCopyFromHostWithRetryUnreserve() throws Exception {
  InMemoryMapOutput<Text, Text> immo = mock(InMemoryMapOutput.class);
  Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(jobWithRetry,
      id, ss, mm, r, metrics, except, key, connection);

  String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);

  when(connection.getResponseCode()).thenReturn(200);
  when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH))
      .thenReturn(replyHash);
  ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 10, 10, 1);
  ByteArrayOutputStream bout = new ByteArrayOutputStream();
  header.write(new DataOutputStream(bout));
  ByteArrayInputStream in = new ByteArrayInputStream(bout.toByteArray());
  when(connection.getInputStream()).thenReturn(in);
  when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
      .thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
  when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
      .thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);

  // Verify that unreserve occurs if an exception happens after shuffle
  // buffer is reserved.
  when(mm.reserve(any(TaskAttemptID.class), anyLong(), anyInt()))
      .thenReturn(immo);
  doThrow(new IOException("forced error")).when(immo).shuffle(
      any(MapHost.class), any(InputStream.class), anyLong(),
      anyLong(), any(ShuffleClientMetrics.class), any(Reporter.class));

  underTest.copyFromHost(host);
  verify(immo).abort();
}
项目:aliyun-oss-hadoop-fs    文件:Fetcher.java   
private void setupShuffleConnection(String encHash) {
  // put url hash into http header
  connection.addRequestProperty(
      SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);
  // set the read timeout
  connection.setReadTimeout(readTimeout);
  // put shuffle version into http header
  connection.addRequestProperty(ShuffleHeader.HTTP_HEADER_NAME,
      ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
  connection.addRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION,
      ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
}
项目:aliyun-oss-hadoop-fs    文件:TestFetcher.java   
@Test
public void testReduceOutOfDiskSpace() throws Throwable {
  LOG.info("testReduceOutOfDiskSpace");

  Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
      r, metrics, except, key, connection);

  String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);
  ShuffleHeader header = new ShuffleHeader(map1ID.toString(), 10, 10, 1);
  ByteArrayOutputStream bout = new ByteArrayOutputStream();
  header.write(new DataOutputStream(bout));

  ByteArrayInputStream in = new ByteArrayInputStream(bout.toByteArray());

  when(connection.getResponseCode()).thenReturn(200);
  when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
  .thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
  when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
  .thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
  when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH))
  .thenReturn(replyHash);
  when(connection.getInputStream()).thenReturn(in);

  when(mm.reserve(any(TaskAttemptID.class), anyLong(), anyInt()))
  .thenThrow(new DiskErrorException("No disk space available"));

  underTest.copyFromHost(host);
  verify(ss).reportLocalError(any(IOException.class));
}
项目:aliyun-oss-hadoop-fs    文件:TestFetcher.java   
@Test
public void testCopyFromHostBogusHeader() throws Exception {
  Fetcher<Text,Text> underTest = new FakeFetcher<Text,Text>(job, id, ss, mm,
      r, metrics, except, key, connection);

  String replyHash = SecureShuffleUtils.generateHash(encHash.getBytes(), key);

  when(connection.getResponseCode()).thenReturn(200);
  when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
      .thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
  when(connection.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))
      .thenReturn(ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
  when(connection.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH))
      .thenReturn(replyHash);
  ByteArrayInputStream in = new ByteArrayInputStream(
      "\u00010 BOGUS DATA\nBOGUS DATA\nBOGUS DATA\n".getBytes());
  when(connection.getInputStream()).thenReturn(in);

  underTest.copyFromHost(host);

  verify(connection).addRequestProperty(
      SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);

  verify(allErrs).increment(1);
  verify(ss).copyFailed(map1ID, host, true, false);
  verify(ss).copyFailed(map2ID, host, true, false);

  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
  verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map2ID));
}