Java 类org.apache.hadoop.hdfs.web.URLConnectionFactory 实例源码

项目:hadoop    文件:TestEditLogFileInputStream.java   
@Test
public void testReadURL() throws Exception {
  HttpURLConnection conn = mock(HttpURLConnection.class);
  doReturn(new ByteArrayInputStream(FAKE_LOG_DATA)).when(conn).getInputStream();
  doReturn(HttpURLConnection.HTTP_OK).when(conn).getResponseCode();
  doReturn(Integer.toString(FAKE_LOG_DATA.length)).when(conn).getHeaderField("Content-Length");

  URLConnectionFactory factory = mock(URLConnectionFactory.class);
  doReturn(conn).when(factory).openConnection(Mockito.<URL> any(),
      anyBoolean());

  URL url = new URL("http://localhost/fakeLog");
  EditLogInputStream elis = EditLogFileInputStream.fromUrl(factory, url,
      HdfsConstants.INVALID_TXID, HdfsConstants.INVALID_TXID, false);
  // Read the edit log and verify that we got all of the data.
  EnumMap<FSEditLogOpCodes, Holder<Integer>> counts = FSImageTestUtil
      .countEditLogOpTypes(elis);
  assertThat(counts.get(FSEditLogOpCodes.OP_ADD).held, is(1));
  assertThat(counts.get(FSEditLogOpCodes.OP_SET_GENSTAMP_V1).held, is(1));
  assertThat(counts.get(FSEditLogOpCodes.OP_CLOSE).held, is(1));

  // Check that length header was picked up.
  assertEquals(FAKE_LOG_DATA.length, elis.length());
  elis.close();
}
项目:aliyun-oss-hadoop-fs    文件:TestNameNodeHttpServer.java   
@BeforeClass
public static void setUp() throws Exception {
  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  base.mkdirs();
  conf = new Configuration();
  keystoresDir = new File(BASEDIR).getAbsolutePath();
  sslConfDir = KeyStoreTestUtil.getClasspathDir(TestNameNodeHttpServer.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);
  conf.set(DFSConfigKeys.DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY,
      KeyStoreTestUtil.getClientSSLConfigFileName());
  conf.set(DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
      KeyStoreTestUtil.getServerSSLConfigFileName());
}
项目:aliyun-oss-hadoop-fs    文件:TestEditLogFileInputStream.java   
@Test
public void testReadURL() throws Exception {
  HttpURLConnection conn = mock(HttpURLConnection.class);
  doReturn(new ByteArrayInputStream(FAKE_LOG_DATA)).when(conn).getInputStream();
  doReturn(HttpURLConnection.HTTP_OK).when(conn).getResponseCode();
  doReturn(Integer.toString(FAKE_LOG_DATA.length)).when(conn).getHeaderField("Content-Length");

  URLConnectionFactory factory = mock(URLConnectionFactory.class);
  doReturn(conn).when(factory).openConnection(Mockito.<URL> any(),
      anyBoolean());

  URL url = new URL("http://localhost/fakeLog");
  EditLogInputStream elis = EditLogFileInputStream.fromUrl(factory, url,
      HdfsServerConstants.INVALID_TXID, HdfsServerConstants.INVALID_TXID, false);
  // Read the edit log and verify that we got all of the data.
  EnumMap<FSEditLogOpCodes, Holder<Integer>> counts = FSImageTestUtil
      .countEditLogOpTypes(elis);
  assertThat(counts.get(FSEditLogOpCodes.OP_ADD).held, is(1));
  assertThat(counts.get(FSEditLogOpCodes.OP_SET_GENSTAMP_V1).held, is(1));
  assertThat(counts.get(FSEditLogOpCodes.OP_CLOSE).held, is(1));

  // Check that length header was picked up.
  assertEquals(FAKE_LOG_DATA.length, elis.length());
  elis.close();
}
项目:big-c    文件:DelegationTokenFetcher.java   
private static HttpURLConnection run(URLConnectionFactory factory, URL url)
    throws IOException, AuthenticationException {
  HttpURLConnection conn = null;

  try {
    conn = (HttpURLConnection) factory.openConnection(url, true);
    if (conn.getResponseCode() != HttpURLConnection.HTTP_OK) {
      String msg = conn.getResponseMessage();

      throw new IOException("Error when dealing remote token: " + msg);
    }
  } catch (IOException ie) {
    LOG.info("Error when dealing remote token:", ie);
    IOException e = getExceptionFromResponse(conn);

    if (e != null) {
      LOG.info("rethrowing exception from HTTP request: "
          + e.getLocalizedMessage());
      throw e;
    }
    throw ie;
  }
  return conn;
}
项目:big-c    文件:TestEditLogFileInputStream.java   
@Test
public void testReadURL() throws Exception {
  HttpURLConnection conn = mock(HttpURLConnection.class);
  doReturn(new ByteArrayInputStream(FAKE_LOG_DATA)).when(conn).getInputStream();
  doReturn(HttpURLConnection.HTTP_OK).when(conn).getResponseCode();
  doReturn(Integer.toString(FAKE_LOG_DATA.length)).when(conn).getHeaderField("Content-Length");

  URLConnectionFactory factory = mock(URLConnectionFactory.class);
  doReturn(conn).when(factory).openConnection(Mockito.<URL> any(),
      anyBoolean());

  URL url = new URL("http://localhost/fakeLog");
  EditLogInputStream elis = EditLogFileInputStream.fromUrl(factory, url,
      HdfsConstants.INVALID_TXID, HdfsConstants.INVALID_TXID, false);
  // Read the edit log and verify that we got all of the data.
  EnumMap<FSEditLogOpCodes, Holder<Integer>> counts = FSImageTestUtil
      .countEditLogOpTypes(elis);
  assertThat(counts.get(FSEditLogOpCodes.OP_ADD).held, is(1));
  assertThat(counts.get(FSEditLogOpCodes.OP_SET_GENSTAMP_V1).held, is(1));
  assertThat(counts.get(FSEditLogOpCodes.OP_CLOSE).held, is(1));

  // Check that length header was picked up.
  assertEquals(FAKE_LOG_DATA.length, elis.length());
  elis.close();
}
项目:hadoop-2.6.0-cdh5.4.3    文件:DelegationTokenFetcher.java   
private static HttpURLConnection run(URLConnectionFactory factory, URL url)
    throws IOException, AuthenticationException {
  HttpURLConnection conn = null;

  try {
    conn = (HttpURLConnection) factory.openConnection(url, true);
    if (conn.getResponseCode() != HttpURLConnection.HTTP_OK) {
      String msg = conn.getResponseMessage();

      throw new IOException("Error when dealing remote token: " + msg);
    }
  } catch (IOException ie) {
    LOG.info("Error when dealing remote token:", ie);
    IOException e = getExceptionFromResponse(conn);

    if (e != null) {
      LOG.info("rethrowing exception from HTTP request: "
          + e.getLocalizedMessage());
      throw e;
    }
    throw ie;
  }
  return conn;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestEditLogFileInputStream.java   
@Test
public void testReadURL() throws Exception {
  HttpURLConnection conn = mock(HttpURLConnection.class);
  doReturn(new ByteArrayInputStream(FAKE_LOG_DATA)).when(conn).getInputStream();
  doReturn(HttpURLConnection.HTTP_OK).when(conn).getResponseCode();
  doReturn(Integer.toString(FAKE_LOG_DATA.length)).when(conn).getHeaderField("Content-Length");

  URLConnectionFactory factory = mock(URLConnectionFactory.class);
  doReturn(conn).when(factory).openConnection(Mockito.<URL> any(),
      anyBoolean());

  URL url = new URL("http://localhost/fakeLog");
  EditLogInputStream elis = EditLogFileInputStream.fromUrl(factory, url,
      HdfsConstants.INVALID_TXID, HdfsConstants.INVALID_TXID, false);
  // Read the edit log and verify that we got all of the data.
  EnumMap<FSEditLogOpCodes, Holder<Integer>> counts = FSImageTestUtil
      .countEditLogOpTypes(elis);
  assertThat(counts.get(FSEditLogOpCodes.OP_ADD).held, is(1));
  assertThat(counts.get(FSEditLogOpCodes.OP_SET_GENSTAMP_V1).held, is(1));
  assertThat(counts.get(FSEditLogOpCodes.OP_CLOSE).held, is(1));

  // Check that length header was picked up.
  assertEquals(FAKE_LOG_DATA.length, elis.length());
  elis.close();
}
项目:FlexMap    文件:DelegationTokenFetcher.java   
private static HttpURLConnection run(URLConnectionFactory factory, URL url)
    throws IOException, AuthenticationException {
  HttpURLConnection conn = null;

  try {
    conn = (HttpURLConnection) factory.openConnection(url, true);
    if (conn.getResponseCode() != HttpURLConnection.HTTP_OK) {
      String msg = conn.getResponseMessage();

      throw new IOException("Error when dealing remote token: " + msg);
    }
  } catch (IOException ie) {
    LOG.info("Error when dealing remote token:", ie);
    IOException e = getExceptionFromResponse(conn);

    if (e != null) {
      LOG.info("rethrowing exception from HTTP request: "
          + e.getLocalizedMessage());
      throw e;
    }
    throw ie;
  }
  return conn;
}
项目:FlexMap    文件:TestEditLogFileInputStream.java   
@Test
public void testReadURL() throws Exception {
  HttpURLConnection conn = mock(HttpURLConnection.class);
  doReturn(new ByteArrayInputStream(FAKE_LOG_DATA)).when(conn).getInputStream();
  doReturn(HttpURLConnection.HTTP_OK).when(conn).getResponseCode();
  doReturn(Integer.toString(FAKE_LOG_DATA.length)).when(conn).getHeaderField("Content-Length");

  URLConnectionFactory factory = mock(URLConnectionFactory.class);
  doReturn(conn).when(factory).openConnection(Mockito.<URL> any(),
      anyBoolean());

  URL url = new URL("http://localhost/fakeLog");
  EditLogInputStream elis = EditLogFileInputStream.fromUrl(factory, url,
      HdfsConstants.INVALID_TXID, HdfsConstants.INVALID_TXID, false);
  // Read the edit log and verify that we got all of the data.
  EnumMap<FSEditLogOpCodes, Holder<Integer>> counts = FSImageTestUtil
      .countEditLogOpTypes(elis);
  assertThat(counts.get(FSEditLogOpCodes.OP_ADD).held, is(1));
  assertThat(counts.get(FSEditLogOpCodes.OP_SET_GENSTAMP_V1).held, is(1));
  assertThat(counts.get(FSEditLogOpCodes.OP_CLOSE).held, is(1));

  // Check that length header was picked up.
  assertEquals(FAKE_LOG_DATA.length, elis.length());
  elis.close();
}
项目:hadoop-on-lustre2    文件:DelegationTokenFetcher.java   
private static HttpURLConnection run(URLConnectionFactory factory, URL url)
    throws IOException, AuthenticationException {
  HttpURLConnection conn = null;

  try {
    conn = (HttpURLConnection) factory.openConnection(url, true);
    if (conn.getResponseCode() != HttpURLConnection.HTTP_OK) {
      String msg = conn.getResponseMessage();

      throw new IOException("Error when dealing remote token: " + msg);
    }
  } catch (IOException ie) {
    LOG.info("Error when dealing remote token:", ie);
    IOException e = getExceptionFromResponse(conn);

    if (e != null) {
      LOG.info("rethrowing exception from HTTP request: "
          + e.getLocalizedMessage());
      throw e;
    }
    throw ie;
  }
  return conn;
}
项目:hadoop-on-lustre2    文件:TestEditLogFileInputStream.java   
@Test
public void testReadURL() throws Exception {
  HttpURLConnection conn = mock(HttpURLConnection.class);
  doReturn(new ByteArrayInputStream(FAKE_LOG_DATA)).when(conn).getInputStream();
  doReturn(HttpURLConnection.HTTP_OK).when(conn).getResponseCode();
  doReturn(Integer.toString(FAKE_LOG_DATA.length)).when(conn).getHeaderField("Content-Length");

  URLConnectionFactory factory = mock(URLConnectionFactory.class);
  doReturn(conn).when(factory).openConnection(Mockito.<URL> any(),
      anyBoolean());

  URL url = new URL("http://localhost/fakeLog");
  EditLogInputStream elis = EditLogFileInputStream.fromUrl(factory, url,
      HdfsConstants.INVALID_TXID, HdfsConstants.INVALID_TXID, false);
  // Read the edit log and verify that we got all of the data.
  EnumMap<FSEditLogOpCodes, Holder<Integer>> counts = FSImageTestUtil
      .countEditLogOpTypes(elis);
  assertThat(counts.get(FSEditLogOpCodes.OP_ADD).held, is(1));
  assertThat(counts.get(FSEditLogOpCodes.OP_SET_GENSTAMP_V1).held, is(1));
  assertThat(counts.get(FSEditLogOpCodes.OP_CLOSE).held, is(1));

  // Check that length header was picked up.
  assertEquals(FAKE_LOG_DATA.length, elis.length());
  elis.close();
}
项目:hadoop    文件:DFSck.java   
public DFSck(Configuration conf, PrintStream out) throws IOException {
  super(conf);
  this.ugi = UserGroupInformation.getCurrentUser();
  this.out = out;
  this.connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);
  this.isSpnegoEnabled = UserGroupInformation.isSecurityEnabled();
}
项目:hadoop    文件:DelegationTokenFetcher.java   
/**
 * Cancel a Delegation Token.
 * @param nnAddr the NameNode's address
 * @param tok the token to cancel
 * @throws IOException
 * @throws AuthenticationException
 */
static public void cancelDelegationToken(URLConnectionFactory factory,
    URI nnAddr, Token<DelegationTokenIdentifier> tok) throws IOException,
    AuthenticationException {
  StringBuilder buf = new StringBuilder(nnAddr.toString())
      .append(CancelDelegationTokenServlet.PATH_SPEC).append("?")
      .append(CancelDelegationTokenServlet.TOKEN).append("=")
      .append(tok.encodeToUrlString());
  HttpURLConnection conn = run(factory, new URL(buf.toString()));
  conn.disconnect();
}
项目:hadoop    文件:QuorumJournalManager.java   
QuorumJournalManager(Configuration conf,
    URI uri, NamespaceInfo nsInfo,
    AsyncLogger.Factory loggerFactory) throws IOException {
  Preconditions.checkArgument(conf != null, "must be configured");

  this.conf = conf;
  this.uri = uri;
  this.nsInfo = nsInfo;
  this.loggers = new AsyncLoggerSet(createLoggers(loggerFactory));
  this.connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);

  // Configure timeouts.
  this.startSegmentTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_START_SEGMENT_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_START_SEGMENT_TIMEOUT_DEFAULT);
  this.prepareRecoveryTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_PREPARE_RECOVERY_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_PREPARE_RECOVERY_TIMEOUT_DEFAULT);
  this.acceptRecoveryTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_ACCEPT_RECOVERY_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_ACCEPT_RECOVERY_TIMEOUT_DEFAULT);
  this.finalizeSegmentTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_FINALIZE_SEGMENT_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_FINALIZE_SEGMENT_TIMEOUT_DEFAULT);
  this.selectInputStreamsTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_SELECT_INPUT_STREAMS_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_SELECT_INPUT_STREAMS_TIMEOUT_DEFAULT);
  this.getJournalStateTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_GET_JOURNAL_STATE_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_GET_JOURNAL_STATE_TIMEOUT_DEFAULT);
  this.newEpochTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_NEW_EPOCH_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_NEW_EPOCH_TIMEOUT_DEFAULT);
  this.writeTxnsTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_WRITE_TXNS_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_WRITE_TXNS_TIMEOUT_DEFAULT);
}
项目:hadoop    文件:TestNameNodeHttpServer.java   
@BeforeClass
public static void setUp() throws Exception {
  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  base.mkdirs();
  conf = new Configuration();
  keystoresDir = new File(BASEDIR).getAbsolutePath();
  sslConfDir = KeyStoreTestUtil.getClasspathDir(TestNameNodeHttpServer.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);
}
项目:aliyun-oss-hadoop-fs    文件:ConfRefreshTokenBasedAccessTokenProvider.java   
void refresh() throws IOException {
  try {
    OkHttpClient client = new OkHttpClient();
    client.setConnectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
        TimeUnit.MILLISECONDS);
    client.setReadTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
              TimeUnit.MILLISECONDS);

    String bodyString = Utils.postBody(GRANT_TYPE, REFRESH_TOKEN,
        REFRESH_TOKEN, refreshToken,
        CLIENT_ID, clientId);

    RequestBody body = RequestBody.create(URLENCODED, bodyString);

    Request request = new Request.Builder()
        .url(refreshURL)
        .post(body)
        .build();
    Response responseBody = client.newCall(request).execute();

    if (responseBody.code() != HttpStatus.SC_OK) {
      throw new IllegalArgumentException("Received invalid http response: "
          + responseBody.code() + ", text = " + responseBody.toString());
    }

    ObjectMapper mapper = new ObjectMapper();
    Map<?, ?> response = mapper.reader(Map.class)
        .readValue(responseBody.body().string());


    String newExpiresIn = response.get(EXPIRES_IN).toString();
    accessTokenTimer.setExpiresIn(newExpiresIn);

    accessToken = response.get(ACCESS_TOKEN).toString();
  } catch (Exception e) {
    throw new IOException("Exception while refreshing access token", e);
  }
}
项目:aliyun-oss-hadoop-fs    文件:CredentialBasedAccessTokenProvider.java   
void refresh() throws IOException {
  try {
    OkHttpClient client = new OkHttpClient();
    client.setConnectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
        TimeUnit.MILLISECONDS);
    client.setReadTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
        TimeUnit.MILLISECONDS);

    String bodyString = Utils.postBody(CLIENT_SECRET, getCredential(),
        GRANT_TYPE, CLIENT_CREDENTIALS,
        CLIENT_ID, clientId);

    RequestBody body = RequestBody.create(URLENCODED, bodyString);

    Request request = new Request.Builder()
        .url(refreshURL)
        .post(body)
        .build();
    Response responseBody = client.newCall(request).execute();

    if (responseBody.code() != HttpStatus.SC_OK) {
      throw new IllegalArgumentException("Received invalid http response: "
          + responseBody.code() + ", text = " + responseBody.toString());
    }

    ObjectMapper mapper = new ObjectMapper();
    Map<?, ?> response = mapper.reader(Map.class)
        .readValue(responseBody.body().string());

    String newExpiresIn = response.get(EXPIRES_IN).toString();
    timer.setExpiresIn(newExpiresIn);

    accessToken = response.get(ACCESS_TOKEN).toString();

  } catch (Exception e) {
    throw new IOException("Unable to obtain access token from credential", e);
  }
}
项目:aliyun-oss-hadoop-fs    文件:DFSck.java   
public DFSck(Configuration conf, PrintStream out) throws IOException {
  super(conf);
  this.ugi = UserGroupInformation.getCurrentUser();
  this.out = out;
  this.connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);
  this.isSpnegoEnabled = UserGroupInformation.isSecurityEnabled();
}
项目:aliyun-oss-hadoop-fs    文件:QuorumJournalManager.java   
QuorumJournalManager(Configuration conf,
    URI uri, NamespaceInfo nsInfo,
    AsyncLogger.Factory loggerFactory) throws IOException {
  Preconditions.checkArgument(conf != null, "must be configured");

  this.conf = conf;
  this.uri = uri;
  this.nsInfo = nsInfo;
  this.loggers = new AsyncLoggerSet(createLoggers(loggerFactory));
  this.connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);

  // Configure timeouts.
  this.startSegmentTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_START_SEGMENT_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_START_SEGMENT_TIMEOUT_DEFAULT);
  this.prepareRecoveryTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_PREPARE_RECOVERY_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_PREPARE_RECOVERY_TIMEOUT_DEFAULT);
  this.acceptRecoveryTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_ACCEPT_RECOVERY_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_ACCEPT_RECOVERY_TIMEOUT_DEFAULT);
  this.finalizeSegmentTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_FINALIZE_SEGMENT_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_FINALIZE_SEGMENT_TIMEOUT_DEFAULT);
  this.selectInputStreamsTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_SELECT_INPUT_STREAMS_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_SELECT_INPUT_STREAMS_TIMEOUT_DEFAULT);
  this.getJournalStateTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_GET_JOURNAL_STATE_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_GET_JOURNAL_STATE_TIMEOUT_DEFAULT);
  this.newEpochTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_NEW_EPOCH_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_NEW_EPOCH_TIMEOUT_DEFAULT);
  this.writeTxnsTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_WRITE_TXNS_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_WRITE_TXNS_TIMEOUT_DEFAULT);
}
项目:big-c    文件:DFSck.java   
public DFSck(Configuration conf, PrintStream out) throws IOException {
  super(conf);
  this.ugi = UserGroupInformation.getCurrentUser();
  this.out = out;
  this.connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);
  this.isSpnegoEnabled = UserGroupInformation.isSecurityEnabled();
}
项目:big-c    文件:DelegationTokenFetcher.java   
/**
 * Cancel a Delegation Token.
 * @param nnAddr the NameNode's address
 * @param tok the token to cancel
 * @throws IOException
 * @throws AuthenticationException
 */
static public void cancelDelegationToken(URLConnectionFactory factory,
    URI nnAddr, Token<DelegationTokenIdentifier> tok) throws IOException,
    AuthenticationException {
  StringBuilder buf = new StringBuilder(nnAddr.toString())
      .append(CancelDelegationTokenServlet.PATH_SPEC).append("?")
      .append(CancelDelegationTokenServlet.TOKEN).append("=")
      .append(tok.encodeToUrlString());
  HttpURLConnection conn = run(factory, new URL(buf.toString()));
  conn.disconnect();
}
项目:big-c    文件:DelegationTokenFetcher.java   
/**
 * Renew a Delegation Token.
 * @param nnAddr the NameNode's address
 * @param tok the token to renew
 * @return the Date that the token will expire next.
 * @throws IOException
 * @throws AuthenticationException
 */
static public long renewDelegationToken(URLConnectionFactory factory,
    URI nnAddr, Token<DelegationTokenIdentifier> tok) throws IOException,
    AuthenticationException {
  StringBuilder buf = new StringBuilder(nnAddr.toString())
      .append(RenewDelegationTokenServlet.PATH_SPEC).append("?")
      .append(RenewDelegationTokenServlet.TOKEN).append("=")
      .append(tok.encodeToUrlString());

  HttpURLConnection connection = null;
  BufferedReader in = null;
  try {
    connection = run(factory, new URL(buf.toString()));
    in = new BufferedReader(new InputStreamReader(
        connection.getInputStream(), Charsets.UTF_8));
    long result = Long.parseLong(in.readLine());
    return result;
  } catch (IOException ie) {
    LOG.info("error in renew over HTTP", ie);
    IOException e = getExceptionFromResponse(connection);

    if (e != null) {
      LOG.info("rethrowing exception from HTTP request: "
          + e.getLocalizedMessage());
      throw e;
    }
    throw ie;
  } finally {
    IOUtils.cleanup(LOG, in);
    if (connection != null) {
      connection.disconnect();
    }
  }
}
项目:big-c    文件:QuorumJournalManager.java   
QuorumJournalManager(Configuration conf,
    URI uri, NamespaceInfo nsInfo,
    AsyncLogger.Factory loggerFactory) throws IOException {
  Preconditions.checkArgument(conf != null, "must be configured");

  this.conf = conf;
  this.uri = uri;
  this.nsInfo = nsInfo;
  this.loggers = new AsyncLoggerSet(createLoggers(loggerFactory));
  this.connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);

  // Configure timeouts.
  this.startSegmentTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_START_SEGMENT_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_START_SEGMENT_TIMEOUT_DEFAULT);
  this.prepareRecoveryTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_PREPARE_RECOVERY_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_PREPARE_RECOVERY_TIMEOUT_DEFAULT);
  this.acceptRecoveryTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_ACCEPT_RECOVERY_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_ACCEPT_RECOVERY_TIMEOUT_DEFAULT);
  this.finalizeSegmentTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_FINALIZE_SEGMENT_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_FINALIZE_SEGMENT_TIMEOUT_DEFAULT);
  this.selectInputStreamsTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_SELECT_INPUT_STREAMS_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_SELECT_INPUT_STREAMS_TIMEOUT_DEFAULT);
  this.getJournalStateTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_GET_JOURNAL_STATE_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_GET_JOURNAL_STATE_TIMEOUT_DEFAULT);
  this.newEpochTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_NEW_EPOCH_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_NEW_EPOCH_TIMEOUT_DEFAULT);
  this.writeTxnsTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_WRITE_TXNS_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_WRITE_TXNS_TIMEOUT_DEFAULT);
}
项目:big-c    文件:TestNameNodeHttpServer.java   
@BeforeClass
public static void setUp() throws Exception {
  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  base.mkdirs();
  conf = new Configuration();
  keystoresDir = new File(BASEDIR).getAbsolutePath();
  sslConfDir = KeyStoreTestUtil.getClasspathDir(TestNameNodeHttpServer.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:DFSck.java   
public DFSck(Configuration conf, PrintStream out) throws IOException {
  super(conf);
  this.ugi = UserGroupInformation.getCurrentUser();
  this.out = out;
  this.connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);
  this.isSpnegoEnabled = UserGroupInformation.isSecurityEnabled();
}
项目:hadoop-2.6.0-cdh5.4.3    文件:DelegationTokenFetcher.java   
/**
 * Cancel a Delegation Token.
 * @param nnAddr the NameNode's address
 * @param tok the token to cancel
 * @throws IOException
 * @throws AuthenticationException
 */
static public void cancelDelegationToken(URLConnectionFactory factory,
    URI nnAddr, Token<DelegationTokenIdentifier> tok) throws IOException,
    AuthenticationException {
  StringBuilder buf = new StringBuilder(nnAddr.toString())
      .append(CancelDelegationTokenServlet.PATH_SPEC).append("?")
      .append(CancelDelegationTokenServlet.TOKEN).append("=")
      .append(tok.encodeToUrlString());
  HttpURLConnection conn = run(factory, new URL(buf.toString()));
  conn.disconnect();
}
项目:hadoop-2.6.0-cdh5.4.3    文件:DelegationTokenFetcher.java   
/**
 * Renew a Delegation Token.
 * @param nnAddr the NameNode's address
 * @param tok the token to renew
 * @return the Date that the token will expire next.
 * @throws IOException
 * @throws AuthenticationException
 */
static public long renewDelegationToken(URLConnectionFactory factory,
    URI nnAddr, Token<DelegationTokenIdentifier> tok) throws IOException,
    AuthenticationException {
  StringBuilder buf = new StringBuilder(nnAddr.toString())
      .append(RenewDelegationTokenServlet.PATH_SPEC).append("?")
      .append(RenewDelegationTokenServlet.TOKEN).append("=")
      .append(tok.encodeToUrlString());

  HttpURLConnection connection = null;
  BufferedReader in = null;
  try {
    connection = run(factory, new URL(buf.toString()));
    in = new BufferedReader(new InputStreamReader(
        connection.getInputStream(), Charsets.UTF_8));
    long result = Long.parseLong(in.readLine());
    return result;
  } catch (IOException ie) {
    LOG.info("error in renew over HTTP", ie);
    IOException e = getExceptionFromResponse(connection);

    if (e != null) {
      LOG.info("rethrowing exception from HTTP request: "
          + e.getLocalizedMessage());
      throw e;
    }
    throw ie;
  } finally {
    IOUtils.cleanup(LOG, in);
    if (connection != null) {
      connection.disconnect();
    }
  }
}
项目:hadoop-2.6.0-cdh5.4.3    文件:QuorumJournalManager.java   
QuorumJournalManager(Configuration conf,
    URI uri, NamespaceInfo nsInfo,
    AsyncLogger.Factory loggerFactory) throws IOException {
  Preconditions.checkArgument(conf != null, "must be configured");

  this.conf = conf;
  this.uri = uri;
  this.nsInfo = nsInfo;
  this.loggers = new AsyncLoggerSet(createLoggers(loggerFactory));
  this.connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);

  // Configure timeouts.
  this.startSegmentTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_START_SEGMENT_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_START_SEGMENT_TIMEOUT_DEFAULT);
  this.prepareRecoveryTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_PREPARE_RECOVERY_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_PREPARE_RECOVERY_TIMEOUT_DEFAULT);
  this.acceptRecoveryTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_ACCEPT_RECOVERY_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_ACCEPT_RECOVERY_TIMEOUT_DEFAULT);
  this.finalizeSegmentTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_FINALIZE_SEGMENT_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_FINALIZE_SEGMENT_TIMEOUT_DEFAULT);
  this.selectInputStreamsTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_SELECT_INPUT_STREAMS_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_SELECT_INPUT_STREAMS_TIMEOUT_DEFAULT);
  this.getJournalStateTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_GET_JOURNAL_STATE_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_GET_JOURNAL_STATE_TIMEOUT_DEFAULT);
  this.newEpochTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_NEW_EPOCH_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_NEW_EPOCH_TIMEOUT_DEFAULT);
  this.writeTxnsTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_WRITE_TXNS_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_WRITE_TXNS_TIMEOUT_DEFAULT);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestNameNodeHttpServer.java   
@BeforeClass
public static void setUp() throws Exception {
  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  base.mkdirs();
  conf = new Configuration();
  keystoresDir = new File(BASEDIR).getAbsolutePath();
  sslConfDir = KeyStoreTestUtil.getClasspathDir(TestNameNodeHttpServer.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);
}
项目:FlexMap    文件:DFSck.java   
public DFSck(Configuration conf, PrintStream out) throws IOException {
  super(conf);
  this.ugi = UserGroupInformation.getCurrentUser();
  this.out = out;
  this.connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);
  this.isSpnegoEnabled = UserGroupInformation.isSecurityEnabled();
}
项目:FlexMap    文件:DelegationTokenFetcher.java   
/**
 * Cancel a Delegation Token.
 * @param nnAddr the NameNode's address
 * @param tok the token to cancel
 * @throws IOException
 * @throws AuthenticationException
 */
static public void cancelDelegationToken(URLConnectionFactory factory,
    URI nnAddr, Token<DelegationTokenIdentifier> tok) throws IOException,
    AuthenticationException {
  StringBuilder buf = new StringBuilder(nnAddr.toString())
      .append(CancelDelegationTokenServlet.PATH_SPEC).append("?")
      .append(CancelDelegationTokenServlet.TOKEN).append("=")
      .append(tok.encodeToUrlString());
  HttpURLConnection conn = run(factory, new URL(buf.toString()));
  conn.disconnect();
}
项目:FlexMap    文件:DelegationTokenFetcher.java   
/**
 * Renew a Delegation Token.
 * @param nnAddr the NameNode's address
 * @param tok the token to renew
 * @return the Date that the token will expire next.
 * @throws IOException
 * @throws AuthenticationException
 */
static public long renewDelegationToken(URLConnectionFactory factory,
    URI nnAddr, Token<DelegationTokenIdentifier> tok) throws IOException,
    AuthenticationException {
  StringBuilder buf = new StringBuilder(nnAddr.toString())
      .append(RenewDelegationTokenServlet.PATH_SPEC).append("?")
      .append(RenewDelegationTokenServlet.TOKEN).append("=")
      .append(tok.encodeToUrlString());

  HttpURLConnection connection = null;
  BufferedReader in = null;
  try {
    connection = run(factory, new URL(buf.toString()));
    in = new BufferedReader(new InputStreamReader(
        connection.getInputStream(), Charsets.UTF_8));
    long result = Long.parseLong(in.readLine());
    return result;
  } catch (IOException ie) {
    LOG.info("error in renew over HTTP", ie);
    IOException e = getExceptionFromResponse(connection);

    if (e != null) {
      LOG.info("rethrowing exception from HTTP request: "
          + e.getLocalizedMessage());
      throw e;
    }
    throw ie;
  } finally {
    IOUtils.cleanup(LOG, in);
    if (connection != null) {
      connection.disconnect();
    }
  }
}
项目:FlexMap    文件:QuorumJournalManager.java   
QuorumJournalManager(Configuration conf,
    URI uri, NamespaceInfo nsInfo,
    AsyncLogger.Factory loggerFactory) throws IOException {
  Preconditions.checkArgument(conf != null, "must be configured");

  this.conf = conf;
  this.uri = uri;
  this.nsInfo = nsInfo;
  this.loggers = new AsyncLoggerSet(createLoggers(loggerFactory));
  this.connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);

  // Configure timeouts.
  this.startSegmentTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_START_SEGMENT_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_START_SEGMENT_TIMEOUT_DEFAULT);
  this.prepareRecoveryTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_PREPARE_RECOVERY_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_PREPARE_RECOVERY_TIMEOUT_DEFAULT);
  this.acceptRecoveryTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_ACCEPT_RECOVERY_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_ACCEPT_RECOVERY_TIMEOUT_DEFAULT);
  this.finalizeSegmentTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_FINALIZE_SEGMENT_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_FINALIZE_SEGMENT_TIMEOUT_DEFAULT);
  this.selectInputStreamsTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_SELECT_INPUT_STREAMS_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_SELECT_INPUT_STREAMS_TIMEOUT_DEFAULT);
  this.getJournalStateTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_GET_JOURNAL_STATE_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_GET_JOURNAL_STATE_TIMEOUT_DEFAULT);
  this.newEpochTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_NEW_EPOCH_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_NEW_EPOCH_TIMEOUT_DEFAULT);
  this.writeTxnsTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_WRITE_TXNS_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_WRITE_TXNS_TIMEOUT_DEFAULT);
}
项目:FlexMap    文件:TestNameNodeHttpServer.java   
@BeforeClass
public static void setUp() throws Exception {
  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  base.mkdirs();
  conf = new Configuration();
  keystoresDir = new File(BASEDIR).getAbsolutePath();
  sslConfDir = KeyStoreTestUtil.getClasspathDir(TestNameNodeHttpServer.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);
}
项目:hadoop-on-lustre2    文件:DFSck.java   
public DFSck(Configuration conf, PrintStream out) throws IOException {
  super(conf);
  this.ugi = UserGroupInformation.getCurrentUser();
  this.out = out;
  this.connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);
  this.isSpnegoEnabled = UserGroupInformation.isSecurityEnabled();
}
项目:hadoop-on-lustre2    文件:DelegationTokenFetcher.java   
/**
 * Cancel a Delegation Token.
 * @param nnAddr the NameNode's address
 * @param tok the token to cancel
 * @throws IOException
 * @throws AuthenticationException
 */
static public void cancelDelegationToken(URLConnectionFactory factory,
    URI nnAddr, Token<DelegationTokenIdentifier> tok) throws IOException,
    AuthenticationException {
  StringBuilder buf = new StringBuilder(nnAddr.toString())
      .append(CancelDelegationTokenServlet.PATH_SPEC).append("?")
      .append(CancelDelegationTokenServlet.TOKEN).append("=")
      .append(tok.encodeToUrlString());
  HttpURLConnection conn = run(factory, new URL(buf.toString()));
  conn.disconnect();
}
项目:hadoop-on-lustre2    文件:DelegationTokenFetcher.java   
/**
 * Renew a Delegation Token.
 * @param nnAddr the NameNode's address
 * @param tok the token to renew
 * @return the Date that the token will expire next.
 * @throws IOException
 * @throws AuthenticationException
 */
static public long renewDelegationToken(URLConnectionFactory factory,
    URI nnAddr, Token<DelegationTokenIdentifier> tok) throws IOException,
    AuthenticationException {
  StringBuilder buf = new StringBuilder(nnAddr.toString())
      .append(RenewDelegationTokenServlet.PATH_SPEC).append("?")
      .append(RenewDelegationTokenServlet.TOKEN).append("=")
      .append(tok.encodeToUrlString());

  HttpURLConnection connection = null;
  BufferedReader in = null;
  try {
    connection = run(factory, new URL(buf.toString()));
    in = new BufferedReader(new InputStreamReader(
        connection.getInputStream(), Charsets.UTF_8));
    long result = Long.parseLong(in.readLine());
    return result;
  } catch (IOException ie) {
    LOG.info("error in renew over HTTP", ie);
    IOException e = getExceptionFromResponse(connection);

    if (e != null) {
      LOG.info("rethrowing exception from HTTP request: "
          + e.getLocalizedMessage());
      throw e;
    }
    throw ie;
  } finally {
    IOUtils.cleanup(LOG, in);
    if (connection != null) {
      connection.disconnect();
    }
  }
}
项目:hadoop-on-lustre2    文件:QuorumJournalManager.java   
QuorumJournalManager(Configuration conf,
    URI uri, NamespaceInfo nsInfo,
    AsyncLogger.Factory loggerFactory) throws IOException {
  Preconditions.checkArgument(conf != null, "must be configured");

  this.conf = conf;
  this.uri = uri;
  this.nsInfo = nsInfo;
  this.loggers = new AsyncLoggerSet(createLoggers(loggerFactory));
  this.connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);

  // Configure timeouts.
  this.startSegmentTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_START_SEGMENT_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_START_SEGMENT_TIMEOUT_DEFAULT);
  this.prepareRecoveryTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_PREPARE_RECOVERY_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_PREPARE_RECOVERY_TIMEOUT_DEFAULT);
  this.acceptRecoveryTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_ACCEPT_RECOVERY_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_ACCEPT_RECOVERY_TIMEOUT_DEFAULT);
  this.finalizeSegmentTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_FINALIZE_SEGMENT_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_FINALIZE_SEGMENT_TIMEOUT_DEFAULT);
  this.selectInputStreamsTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_SELECT_INPUT_STREAMS_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_SELECT_INPUT_STREAMS_TIMEOUT_DEFAULT);
  this.getJournalStateTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_GET_JOURNAL_STATE_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_GET_JOURNAL_STATE_TIMEOUT_DEFAULT);
  this.newEpochTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_NEW_EPOCH_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_NEW_EPOCH_TIMEOUT_DEFAULT);
  this.writeTxnsTimeoutMs = conf.getInt(
      DFSConfigKeys.DFS_QJOURNAL_WRITE_TXNS_TIMEOUT_KEY,
      DFSConfigKeys.DFS_QJOURNAL_WRITE_TXNS_TIMEOUT_DEFAULT);
}
项目:hadoop-on-lustre2    文件:TestNameNodeHttpServer.java   
@BeforeClass
public static void setUp() throws Exception {
  File base = new File(BASEDIR);
  FileUtil.fullyDelete(base);
  base.mkdirs();
  conf = new Configuration();
  keystoresDir = new File(BASEDIR).getAbsolutePath();
  sslConfDir = KeyStoreTestUtil.getClasspathDir(TestNameNodeHttpServer.class);
  KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
  connectionFactory = URLConnectionFactory
      .newDefaultURLConnectionFactory(conf);
}
项目:hadoop    文件:EditLogFileInputStream.java   
public URLLog(URLConnectionFactory connectionFactory, URL url) {
  this.connectionFactory = connectionFactory;
  this.isSpnegoEnabled = UserGroupInformation.isSecurityEnabled();
  this.url = url;
}