Java 类org.apache.hadoop.hdfs.web.URLUtils 实例源码

项目:hadoop-plus    文件:HsftpFileSystem.java   
@Override
protected HttpURLConnection openConnection(String path, String query)
    throws IOException {
  query = addDelegationTokenParam(query);
  final URL url = new URL("https", nnUri.getHost(), 
      nnUri.getPort(), path + '?' + query);
  HttpsURLConnection conn = (HttpsURLConnection)URLUtils.openConnection(url);
  // bypass hostname verification
  conn.setHostnameVerifier(new DummyHostnameVerifier());
  conn.setRequestMethod("GET");
  conn.connect();

  // check cert expiration date
  final int warnDays = ExpWarnDays;
  if (warnDays > 0) { // make sure only check once
    ExpWarnDays = 0;
    long expTimeThreshold = warnDays * MM_SECONDS_PER_DAY + Time.now();
    X509Certificate[] clientCerts = (X509Certificate[]) conn
        .getLocalCertificates();
    if (clientCerts != null) {
      for (X509Certificate cert : clientCerts) {
        long expTime = cert.getNotAfter().getTime();
        if (expTime < expTimeThreshold) {
          StringBuilder sb = new StringBuilder();
          sb.append("\n Client certificate "
              + cert.getSubjectX500Principal().getName());
          int dayOffSet = (int) ((expTime - Time.now()) / MM_SECONDS_PER_DAY);
          sb.append(" have " + dayOffSet + " days to expire");
          LOG.warn(sb.toString());
        }
      }
    }
  }
  return (HttpURLConnection) conn;
}
项目:hadoop-plus    文件:HftpFileSystem.java   
/**
 * Open an HTTP connection to the namenode to read file data and metadata.
 * @param path The path component of the URL
 * @param query The query component of the URL
 */
protected HttpURLConnection openConnection(String path, String query)
    throws IOException {
  query = addDelegationTokenParam(query);
  final URL url = getNamenodeURL(path, query);
  final HttpURLConnection connection =
      (HttpURLConnection)URLUtils.openConnection(url);
  connection.setRequestMethod("GET");
  connection.connect();
  return connection;
}
项目:hops    文件:HsftpFileSystem.java   
@Override
protected HttpURLConnection openConnection(String path, String query)
    throws IOException {
  query = addDelegationTokenParam(query);
  final URL url =
      new URL("https", nnUri.getHost(), nnUri.getPort(), path + '?' + query);
  HttpsURLConnection conn = (HttpsURLConnection) URLUtils.openConnection(url);
  // bypass hostname verification
  conn.setHostnameVerifier(new DummyHostnameVerifier());
  conn.setRequestMethod("GET");
  conn.connect();

  // check cert expiration date
  final int warnDays = ExpWarnDays;
  if (warnDays > 0) { // make sure only check once
    ExpWarnDays = 0;
    long expTimeThreshold = warnDays * MM_SECONDS_PER_DAY + Time.now();
    X509Certificate[] clientCerts =
        (X509Certificate[]) conn.getLocalCertificates();
    if (clientCerts != null) {
      for (X509Certificate cert : clientCerts) {
        long expTime = cert.getNotAfter().getTime();
        if (expTime < expTimeThreshold) {
          StringBuilder sb = new StringBuilder();
          sb.append("\n Client certificate " +
              cert.getSubjectX500Principal().getName());
          int dayOffSet = (int) ((expTime - Time.now()) / MM_SECONDS_PER_DAY);
          sb.append(" have " + dayOffSet + " days to expire");
          LOG.warn(sb.toString());
        }
      }
    }
  }
  return (HttpURLConnection) conn;
}
项目:hadoop-TCP    文件:HsftpFileSystem.java   
@Override
protected HttpURLConnection openConnection(String path, String query)
    throws IOException {
  query = addDelegationTokenParam(query);
  final URL url = new URL(getUnderlyingProtocol(), nnUri.getHost(),
      nnUri.getPort(), path + '?' + query);
  HttpsURLConnection conn = (HttpsURLConnection)URLUtils.openConnection(url);
  // bypass hostname verification
  conn.setHostnameVerifier(new DummyHostnameVerifier());
  conn.setRequestMethod("GET");
  conn.connect();

  // check cert expiration date
  final int warnDays = ExpWarnDays;
  if (warnDays > 0) { // make sure only check once
    ExpWarnDays = 0;
    long expTimeThreshold = warnDays * MM_SECONDS_PER_DAY + Time.now();
    X509Certificate[] clientCerts = (X509Certificate[]) conn
        .getLocalCertificates();
    if (clientCerts != null) {
      for (X509Certificate cert : clientCerts) {
        long expTime = cert.getNotAfter().getTime();
        if (expTime < expTimeThreshold) {
          StringBuilder sb = new StringBuilder();
          sb.append("\n Client certificate "
              + cert.getSubjectX500Principal().getName());
          int dayOffSet = (int) ((expTime - Time.now()) / MM_SECONDS_PER_DAY);
          sb.append(" have " + dayOffSet + " days to expire");
          LOG.warn(sb.toString());
        }
      }
    }
  }
  return (HttpURLConnection) conn;
}
项目:hadoop-TCP    文件:HftpFileSystem.java   
/**
 * Open an HTTP connection to the namenode to read file data and metadata.
 * @param path The path component of the URL
 * @param query The query component of the URL
 */
protected HttpURLConnection openConnection(String path, String query)
    throws IOException {
  query = addDelegationTokenParam(query);
  final URL url = getNamenodeURL(path, query);
  final HttpURLConnection connection =
      (HttpURLConnection)URLUtils.openConnection(url);
  connection.setRequestMethod("GET");
  connection.connect();
  return connection;
}
项目:hardfs    文件:HsftpFileSystem.java   
@Override
protected HttpURLConnection openConnection(String path, String query)
    throws IOException {
  query = addDelegationTokenParam(query);
  final URL url = new URL(getUnderlyingProtocol(), nnUri.getHost(),
      nnUri.getPort(), path + '?' + query);
  HttpsURLConnection conn = (HttpsURLConnection)URLUtils.openConnection(url);
  // bypass hostname verification
  conn.setHostnameVerifier(new DummyHostnameVerifier());
  conn.setRequestMethod("GET");
  conn.connect();

  // check cert expiration date
  final int warnDays = ExpWarnDays;
  if (warnDays > 0) { // make sure only check once
    ExpWarnDays = 0;
    long expTimeThreshold = warnDays * MM_SECONDS_PER_DAY + Time.now();
    X509Certificate[] clientCerts = (X509Certificate[]) conn
        .getLocalCertificates();
    if (clientCerts != null) {
      for (X509Certificate cert : clientCerts) {
        long expTime = cert.getNotAfter().getTime();
        if (expTime < expTimeThreshold) {
          StringBuilder sb = new StringBuilder();
          sb.append("\n Client certificate "
              + cert.getSubjectX500Principal().getName());
          int dayOffSet = (int) ((expTime - Time.now()) / MM_SECONDS_PER_DAY);
          sb.append(" have " + dayOffSet + " days to expire");
          LOG.warn(sb.toString());
        }
      }
    }
  }
  return (HttpURLConnection) conn;
}
项目:hardfs    文件:HftpFileSystem.java   
/**
 * Open an HTTP connection to the namenode to read file data and metadata.
 * @param path The path component of the URL
 * @param query The query component of the URL
 */
protected HttpURLConnection openConnection(String path, String query)
    throws IOException {
  query = addDelegationTokenParam(query);
  final URL url = getNamenodeURL(path, query);
  final HttpURLConnection connection =
      (HttpURLConnection)URLUtils.openConnection(url);
  connection.setRequestMethod("GET");
  connection.connect();
  return connection;
}
项目:hadoop-plus    文件:HftpFileSystem.java   
protected HttpURLConnection openConnection() throws IOException {
  return (HttpURLConnection)URLUtils.openConnection(url);
}
项目:hadoop-plus    文件:TestHftpURLTimeouts.java   
@BeforeClass
public static void setup() {
  URLUtils.SOCKET_TIMEOUT = 5;
}
项目:hops    文件:TestHftpURLTimeouts.java   
@BeforeClass
public static void setup() {
  URLUtils.SOCKET_TIMEOUT = 5;
}
项目:hadoop-TCP    文件:HftpFileSystem.java   
protected HttpURLConnection openConnection() throws IOException {
  return (HttpURLConnection)URLUtils.openConnection(url);
}
项目:hadoop-TCP    文件:TestHftpURLTimeouts.java   
@BeforeClass
public static void setup() {
  URLUtils.SOCKET_TIMEOUT = 5;
}
项目:hardfs    文件:HftpFileSystem.java   
protected HttpURLConnection openConnection() throws IOException {
  return (HttpURLConnection)URLUtils.openConnection(url);
}
项目:hardfs    文件:TestHftpURLTimeouts.java   
@BeforeClass
public static void setup() {
  URLUtils.SOCKET_TIMEOUT = 5;
}