Java 类org.apache.hadoop.hdfs.web.resources.PutOpParam 实例源码

项目:hadoop    文件:WebHdfsHandler.java   
public void handle(ChannelHandlerContext ctx, HttpRequest req)
  throws IOException, URISyntaxException {
  String op = params.op();
  HttpMethod method = req.getMethod();
  if (PutOpParam.Op.CREATE.name().equalsIgnoreCase(op)
    && method == PUT) {
    onCreate(ctx);
  } else if (PostOpParam.Op.APPEND.name().equalsIgnoreCase(op)
    && method == POST) {
    onAppend(ctx);
  } else if (GetOpParam.Op.OPEN.name().equalsIgnoreCase(op)
    && method == GET) {
    onOpen(ctx);
  } else if(GetOpParam.Op.GETFILECHECKSUM.name().equalsIgnoreCase(op)
    && method == GET) {
    onGetFileChecksum(ctx);
  } else {
    throw new IllegalArgumentException("Invalid operation " + op);
  }
}
项目:aliyun-oss-hadoop-fs    文件:WebHdfsHandler.java   
public void handle(ChannelHandlerContext ctx, HttpRequest req)
  throws IOException, URISyntaxException {
  String op = params.op();
  HttpMethod method = req.method();
  if (PutOpParam.Op.CREATE.name().equalsIgnoreCase(op)
    && method == PUT) {
    onCreate(ctx);
  } else if (PostOpParam.Op.APPEND.name().equalsIgnoreCase(op)
    && method == POST) {
    onAppend(ctx);
  } else if (GetOpParam.Op.OPEN.name().equalsIgnoreCase(op)
    && method == GET) {
    onOpen(ctx);
  } else if(GetOpParam.Op.GETFILECHECKSUM.name().equalsIgnoreCase(op)
    && method == GET) {
    onGetFileChecksum(ctx);
  } else {
    throw new IllegalArgumentException("Invalid operation " + op);
  }
}
项目:big-c    文件:WebHdfsHandler.java   
public void handle(ChannelHandlerContext ctx, HttpRequest req)
  throws IOException, URISyntaxException {
  String op = params.op();
  HttpMethod method = req.getMethod();
  if (PutOpParam.Op.CREATE.name().equalsIgnoreCase(op)
    && method == PUT) {
    onCreate(ctx);
  } else if (PostOpParam.Op.APPEND.name().equalsIgnoreCase(op)
    && method == POST) {
    onAppend(ctx);
  } else if (GetOpParam.Op.OPEN.name().equalsIgnoreCase(op)
    && method == GET) {
    onOpen(ctx);
  } else if(GetOpParam.Op.GETFILECHECKSUM.name().equalsIgnoreCase(op)
    && method == GET) {
    onGetFileChecksum(ctx);
  } else {
    throw new IllegalArgumentException("Invalid operation " + op);
  }
}
项目:hadoop-plus    文件:WebHdfsFileSystem.java   
@Override
public FSDataOutputStream create(final Path f, final FsPermission permission,
    final boolean overwrite, final int bufferSize, final short replication,
    final long blockSize, final Progressable progress) throws IOException {
  statistics.incrementWriteOps(1);

  final HttpOpParam.Op op = PutOpParam.Op.CREATE;
  return new Runner(op, f, 
      new PermissionParam(applyUMask(permission)),
      new OverwriteParam(overwrite),
      new BufferSizeParam(bufferSize),
      new ReplicationParam(replication),
      new BlockSizeParam(blockSize))
    .run()
    .write(bufferSize);
}
项目:hadoop-plus    文件:TestWebHdfsFileSystemContract.java   
public void testCaseInsensitive() throws IOException {
  final Path p = new Path("/test/testCaseInsensitive");
  final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fs;
  final PutOpParam.Op op = PutOpParam.Op.MKDIRS;

  //replace query with mix case letters
  final URL url = webhdfs.toUrl(op, p);
  WebHdfsFileSystem.LOG.info("url      = " + url);
  final URL replaced = new URL(url.toString().replace(op.toQueryString(),
      "Op=mkDIrs"));
  WebHdfsFileSystem.LOG.info("replaced = " + replaced);

  //connect with the replaced URL.
  final HttpURLConnection conn = (HttpURLConnection)replaced.openConnection();
  conn.setRequestMethod(op.getType().toString());
  conn.connect();
  final BufferedReader in = new BufferedReader(new InputStreamReader(
      conn.getInputStream()));
  for(String line; (line = in.readLine()) != null; ) {
    WebHdfsFileSystem.LOG.info("> " + line);
  }

  //check if the command successes.
  assertTrue(fs.getFileStatus(p).isDirectory());
}
项目:hops    文件:TestWebHdfsFileSystemContract.java   
public void testCaseInsensitive() throws IOException {
  final Path p = new Path("/test/testCaseInsensitive");
  final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem) fs;
  final PutOpParam.Op op = PutOpParam.Op.MKDIRS;

  //replace query with mix case letters
  final URL url = webhdfs.toUrl(op, p);
  WebHdfsFileSystem.LOG.info("url      = " + url);
  final URL replaced =
      new URL(url.toString().replace(op.toQueryString(), "Op=mkDIrs"));
  WebHdfsFileSystem.LOG.info("replaced = " + replaced);

  //connect with the replaced URL.
  final HttpURLConnection conn =
      (HttpURLConnection) replaced.openConnection();
  conn.setRequestMethod(op.getType().toString());
  conn.connect();
  final BufferedReader in =
      new BufferedReader(new InputStreamReader(conn.getInputStream()));
  for (String line; (line = in.readLine()) != null; ) {
    WebHdfsFileSystem.LOG.info("> " + line);
  }

  //check if the command successes.
  assertTrue(fs.getFileStatus(p).isDirectory());
}
项目:hadoop-TCP    文件:WebHdfsFileSystem.java   
@Override
public FSDataOutputStream create(final Path f, final FsPermission permission,
    final boolean overwrite, final int bufferSize, final short replication,
    final long blockSize, final Progressable progress) throws IOException {
  statistics.incrementWriteOps(1);

  final HttpOpParam.Op op = PutOpParam.Op.CREATE;
  return new Runner(op, f, 
      new PermissionParam(applyUMask(permission)),
      new OverwriteParam(overwrite),
      new BufferSizeParam(bufferSize),
      new ReplicationParam(replication),
      new BlockSizeParam(blockSize))
    .run()
    .write(bufferSize);
}
项目:hadoop-TCP    文件:TestWebHdfsFileSystemContract.java   
public void testCaseInsensitive() throws IOException {
  final Path p = new Path("/test/testCaseInsensitive");
  final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fs;
  final PutOpParam.Op op = PutOpParam.Op.MKDIRS;

  //replace query with mix case letters
  final URL url = webhdfs.toUrl(op, p);
  WebHdfsFileSystem.LOG.info("url      = " + url);
  final URL replaced = new URL(url.toString().replace(op.toQueryString(),
      "Op=mkDIrs"));
  WebHdfsFileSystem.LOG.info("replaced = " + replaced);

  //connect with the replaced URL.
  final HttpURLConnection conn = (HttpURLConnection)replaced.openConnection();
  conn.setRequestMethod(op.getType().toString());
  conn.connect();
  final BufferedReader in = new BufferedReader(new InputStreamReader(
      conn.getInputStream()));
  for(String line; (line = in.readLine()) != null; ) {
    WebHdfsFileSystem.LOG.info("> " + line);
  }

  //check if the command successes.
  assertTrue(fs.getFileStatus(p).isDirectory());
}
项目:hadoop-on-lustre    文件:TestWebHdfsFileSystemContract.java   
public void testCaseInsensitive() throws IOException {
  final Path p = new Path("/test/testCaseInsensitive");
  final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fs;
  final PutOpParam.Op op = PutOpParam.Op.MKDIRS;

  //replace query with mix case letters
  final URL url = webhdfs.toUrl(op, p);
  WebHdfsFileSystem.LOG.info("url      = " + url);
  final URL replaced = new URL(url.toString().replace(op.toQueryString(),
      "Op=mkDIrs"));
  WebHdfsFileSystem.LOG.info("replaced = " + replaced);

  //connect with the replaced URL.
  final HttpURLConnection conn = (HttpURLConnection)replaced.openConnection();
  conn.setRequestMethod(op.getType().toString());
  conn.connect();
  final BufferedReader in = new BufferedReader(new InputStreamReader(
      conn.getInputStream()));
  for(String line; (line = in.readLine()) != null; ) {
    WebHdfsFileSystem.LOG.info("> " + line);
  }

  //check if the command successes.
  assertTrue(fs.getFileStatus(p).isDir());
}
项目:hadoop-on-lustre    文件:WebHdfsFileSystem.java   
URL toUrl(final HttpOpParam.Op op, final Path fspath,
    final Param<?,?>... parameters) throws IOException {
  //initialize URI path and query
  final String path = PATH_PREFIX
      + (fspath == null? "/": makeQualified(fspath).toUri().getPath());
  final String query = op.toQueryString()
      + '&' + new UserParam(ugi)
      + Param.toSortedString("&", parameters);
  final URL url;
  if (op.equals(PutOpParam.Op.RENEWDELEGATIONTOKEN)
      || op.equals(GetOpParam.Op.GETDELEGATIONTOKEN)) {
    // Skip adding delegation token for getting or renewing delegation token,
    // because these operations require kerberos authentication.
    url = getNamenodeURL(path, query);
  } else {
    url = getNamenodeURL(path, addDt2Query(query));
  }
  if (LOG.isTraceEnabled()) {
    LOG.trace("url=" + url);
  }
  return url;
}
项目:hadoop-on-lustre    文件:WebHdfsFileSystem.java   
@Override
public FSDataOutputStream create(final Path f, final FsPermission permission,
    final boolean overwrite, final int bufferSize, final short replication,
    final long blockSize, final Progressable progress) throws IOException {
  statistics.incrementWriteOps(1);

  final HttpOpParam.Op op = PutOpParam.Op.CREATE;
  return new Runner(op, f, 
      new PermissionParam(applyUMask(permission)),
      new OverwriteParam(overwrite),
      new BufferSizeParam(bufferSize),
      new ReplicationParam(replication),
      new BlockSizeParam(blockSize))
    .run()
    .write(bufferSize);
}
项目:hadoop-on-lustre    文件:DatanodeWebHdfsMethods.java   
/** Handle HTTP PUT request for the root. */
@PUT
@Path("/")
@Consumes({"*/*"})
@Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON})
public Response putRoot(
    final InputStream in,
    @Context final UserGroupInformation ugi,
    @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT)
        final DelegationParam delegation,
    @QueryParam(PutOpParam.NAME) @DefaultValue(PutOpParam.DEFAULT)
        final PutOpParam op,
    @QueryParam(PermissionParam.NAME) @DefaultValue(PermissionParam.DEFAULT)
        final PermissionParam permission,
    @QueryParam(OverwriteParam.NAME) @DefaultValue(OverwriteParam.DEFAULT)
        final OverwriteParam overwrite,
    @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
        final BufferSizeParam bufferSize,
    @QueryParam(ReplicationParam.NAME) @DefaultValue(ReplicationParam.DEFAULT)
        final ReplicationParam replication,
    @QueryParam(BlockSizeParam.NAME) @DefaultValue(BlockSizeParam.DEFAULT)
        final BlockSizeParam blockSize
    ) throws IOException, InterruptedException {
  return put(in, ugi, delegation, ROOT, op, permission, overwrite, bufferSize,
      replication, blockSize);
}
项目:hardfs    文件:WebHdfsFileSystem.java   
@Override
public FSDataOutputStream create(final Path f, final FsPermission permission,
    final boolean overwrite, final int bufferSize, final short replication,
    final long blockSize, final Progressable progress) throws IOException {
  statistics.incrementWriteOps(1);

  final HttpOpParam.Op op = PutOpParam.Op.CREATE;
  return new Runner(op, f, 
      new PermissionParam(applyUMask(permission)),
      new OverwriteParam(overwrite),
      new BufferSizeParam(bufferSize),
      new ReplicationParam(replication),
      new BlockSizeParam(blockSize))
    .run()
    .write(bufferSize);
}
项目:hardfs    文件:TestWebHdfsFileSystemContract.java   
public void testCaseInsensitive() throws IOException {
  final Path p = new Path("/test/testCaseInsensitive");
  final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fs;
  final PutOpParam.Op op = PutOpParam.Op.MKDIRS;

  //replace query with mix case letters
  final URL url = webhdfs.toUrl(op, p);
  WebHdfsFileSystem.LOG.info("url      = " + url);
  final URL replaced = new URL(url.toString().replace(op.toQueryString(),
      "Op=mkDIrs"));
  WebHdfsFileSystem.LOG.info("replaced = " + replaced);

  //connect with the replaced URL.
  final HttpURLConnection conn = (HttpURLConnection)replaced.openConnection();
  conn.setRequestMethod(op.getType().toString());
  conn.connect();
  final BufferedReader in = new BufferedReader(new InputStreamReader(
      conn.getInputStream()));
  for(String line; (line = in.readLine()) != null; ) {
    WebHdfsFileSystem.LOG.info("> " + line);
  }

  //check if the command successes.
  assertTrue(fs.getFileStatus(p).isDirectory());
}
项目:hadoop-on-lustre2    文件:WebHdfsFileSystem.java   
@Override
public FSDataOutputStream create(final Path f, final FsPermission permission,
    final boolean overwrite, final int bufferSize, final short replication,
    final long blockSize, final Progressable progress) throws IOException {
  statistics.incrementWriteOps(1);

  final HttpOpParam.Op op = PutOpParam.Op.CREATE;
  return new FsPathRunner(op, f,
      new PermissionParam(applyUMask(permission)),
      new OverwriteParam(overwrite),
      new BufferSizeParam(bufferSize),
      new ReplicationParam(replication),
      new BlockSizeParam(blockSize))
    .run()
    .write(bufferSize);
}
项目:hortonworks-extension    文件:TestWebHdfsFileSystemContract.java   
public void testCaseInsensitive() throws IOException {
  final Path p = new Path("/test/testCaseInsensitive");
  final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fs;
  final PutOpParam.Op op = PutOpParam.Op.MKDIRS;

  //replace query with mix case letters
  final URL url = webhdfs.toUrl(op, p);
  WebHdfsFileSystem.LOG.info("url      = " + url);
  final URL replaced = new URL(url.toString().replace(op.toQueryString(),
      "Op=mkDIrs"));
  WebHdfsFileSystem.LOG.info("replaced = " + replaced);

  //connect with the replaced URL.
  final HttpURLConnection conn = (HttpURLConnection)replaced.openConnection();
  conn.setRequestMethod(op.getType().toString());
  conn.connect();
  final BufferedReader in = new BufferedReader(new InputStreamReader(
      conn.getInputStream()));
  for(String line; (line = in.readLine()) != null; ) {
    WebHdfsFileSystem.LOG.info("> " + line);
  }

  //check if the command successes.
  assertTrue(fs.getFileStatus(p).isDir());
}
项目:hortonworks-extension    文件:WebHdfsFileSystem.java   
URL toUrl(final HttpOpParam.Op op, final Path fspath,
    final Param<?,?>... parameters) throws IOException {
  //initialize URI path and query
  final String path = PATH_PREFIX
      + (fspath == null? "/": makeQualified(fspath).toUri().getPath());
  final String query = op.toQueryString()
      + '&' + new UserParam(ugi)
      + Param.toSortedString("&", parameters);
  final URL url;
  if (op.equals(PutOpParam.Op.RENEWDELEGATIONTOKEN)
      || op.equals(GetOpParam.Op.GETDELEGATIONTOKEN)) {
    // Skip adding delegation token for getting or renewing delegation token,
    // because these operations require kerberos authentication.
    url = getNamenodeURL(path, query);
  } else {
    url = getNamenodeURL(path, addDt2Query(query));
  }
  if (LOG.isTraceEnabled()) {
    LOG.trace("url=" + url);
  }
  return url;
}
项目:hortonworks-extension    文件:WebHdfsFileSystem.java   
@Override
public FSDataOutputStream create(final Path f, final FsPermission permission,
    final boolean overwrite, final int bufferSize, final short replication,
    final long blockSize, final Progressable progress) throws IOException {
  statistics.incrementWriteOps(1);

  final HttpOpParam.Op op = PutOpParam.Op.CREATE;
  return new Runner(op, f, 
      new PermissionParam(applyUMask(permission)),
      new OverwriteParam(overwrite),
      new BufferSizeParam(bufferSize),
      new ReplicationParam(replication),
      new BlockSizeParam(blockSize))
    .run()
    .write(bufferSize);
}
项目:hortonworks-extension    文件:DatanodeWebHdfsMethods.java   
/** Handle HTTP PUT request for the root. */
@PUT
@Path("/")
@Consumes({"*/*"})
@Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON})
public Response putRoot(
    final InputStream in,
    @Context final UserGroupInformation ugi,
    @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT)
        final DelegationParam delegation,
    @QueryParam(PutOpParam.NAME) @DefaultValue(PutOpParam.DEFAULT)
        final PutOpParam op,
    @QueryParam(PermissionParam.NAME) @DefaultValue(PermissionParam.DEFAULT)
        final PermissionParam permission,
    @QueryParam(OverwriteParam.NAME) @DefaultValue(OverwriteParam.DEFAULT)
        final OverwriteParam overwrite,
    @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
        final BufferSizeParam bufferSize,
    @QueryParam(ReplicationParam.NAME) @DefaultValue(ReplicationParam.DEFAULT)
        final ReplicationParam replication,
    @QueryParam(BlockSizeParam.NAME) @DefaultValue(BlockSizeParam.DEFAULT)
        final BlockSizeParam blockSize
    ) throws IOException, InterruptedException {
  return put(in, ugi, delegation, ROOT, op, permission, overwrite, bufferSize,
      replication, blockSize);
}
项目:hortonworks-extension    文件:TestWebHdfsFileSystemContract.java   
public void testCaseInsensitive() throws IOException {
  final Path p = new Path("/test/testCaseInsensitive");
  final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fs;
  final PutOpParam.Op op = PutOpParam.Op.MKDIRS;

  //replace query with mix case letters
  final URL url = webhdfs.toUrl(op, p);
  WebHdfsFileSystem.LOG.info("url      = " + url);
  final URL replaced = new URL(url.toString().replace(op.toQueryString(),
      "Op=mkDIrs"));
  WebHdfsFileSystem.LOG.info("replaced = " + replaced);

  //connect with the replaced URL.
  final HttpURLConnection conn = (HttpURLConnection)replaced.openConnection();
  conn.setRequestMethod(op.getType().toString());
  conn.connect();
  final BufferedReader in = new BufferedReader(new InputStreamReader(
      conn.getInputStream()));
  for(String line; (line = in.readLine()) != null; ) {
    WebHdfsFileSystem.LOG.info("> " + line);
  }

  //check if the command successes.
  assertTrue(fs.getFileStatus(p).isDir());
}
项目:hortonworks-extension    文件:WebHdfsFileSystem.java   
URL toUrl(final HttpOpParam.Op op, final Path fspath,
    final Param<?,?>... parameters) throws IOException {
  //initialize URI path and query
  final String path = PATH_PREFIX
      + (fspath == null? "/": makeQualified(fspath).toUri().getPath());
  final String query = op.toQueryString()
      + '&' + new UserParam(ugi)
      + Param.toSortedString("&", parameters);
  final URL url;
  if (op.equals(PutOpParam.Op.RENEWDELEGATIONTOKEN)
      || op.equals(GetOpParam.Op.GETDELEGATIONTOKEN)) {
    // Skip adding delegation token for getting or renewing delegation token,
    // because these operations require kerberos authentication.
    url = getNamenodeURL(path, query);
  } else {
    url = getNamenodeURL(path, addDt2Query(query));
  }
  if (LOG.isTraceEnabled()) {
    LOG.trace("url=" + url);
  }
  return url;
}
项目:hortonworks-extension    文件:WebHdfsFileSystem.java   
@Override
public FSDataOutputStream create(final Path f, final FsPermission permission,
    final boolean overwrite, final int bufferSize, final short replication,
    final long blockSize, final Progressable progress) throws IOException {
  statistics.incrementWriteOps(1);

  final HttpOpParam.Op op = PutOpParam.Op.CREATE;
  return new Runner(op, f, 
      new PermissionParam(applyUMask(permission)),
      new OverwriteParam(overwrite),
      new BufferSizeParam(bufferSize),
      new ReplicationParam(replication),
      new BlockSizeParam(blockSize))
    .run()
    .write(bufferSize);
}
项目:hortonworks-extension    文件:DatanodeWebHdfsMethods.java   
/** Handle HTTP PUT request for the root. */
@PUT
@Path("/")
@Consumes({"*/*"})
@Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON})
public Response putRoot(
    final InputStream in,
    @Context final UserGroupInformation ugi,
    @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT)
        final DelegationParam delegation,
    @QueryParam(PutOpParam.NAME) @DefaultValue(PutOpParam.DEFAULT)
        final PutOpParam op,
    @QueryParam(PermissionParam.NAME) @DefaultValue(PermissionParam.DEFAULT)
        final PermissionParam permission,
    @QueryParam(OverwriteParam.NAME) @DefaultValue(OverwriteParam.DEFAULT)
        final OverwriteParam overwrite,
    @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
        final BufferSizeParam bufferSize,
    @QueryParam(ReplicationParam.NAME) @DefaultValue(ReplicationParam.DEFAULT)
        final ReplicationParam replication,
    @QueryParam(BlockSizeParam.NAME) @DefaultValue(BlockSizeParam.DEFAULT)
        final BlockSizeParam blockSize
    ) throws IOException, InterruptedException {
  return put(in, ugi, delegation, ROOT, op, permission, overwrite, bufferSize,
      replication, blockSize);
}
项目:hadoop    文件:TestWebHdfsUrl.java   
@Test(timeout=60000)
public void testEncodedPathUrl() throws IOException, URISyntaxException{
  Configuration conf = new Configuration();

  final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem) FileSystem.get(
      uri, conf);

  // Construct a file path that contains percentage-encoded string
  String pathName = "/hdtest010%2C60020%2C1371000602151.1371058984668";
  Path fsPath = new Path(pathName);
  URL encodedPathUrl = webhdfs.toUrl(PutOpParam.Op.CREATE, fsPath);
  // We should get back the original file path after cycling back and decoding
  Assert.assertEquals(WebHdfsFileSystem.PATH_PREFIX + pathName,
      encodedPathUrl.toURI().getPath());
}
项目:aliyun-oss-hadoop-fs    文件:TestWebHdfsUrl.java   
@Test(timeout=60000)
public void testEncodedPathUrl() throws IOException, URISyntaxException{
  Configuration conf = new Configuration();

  final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem) FileSystem.get(
      uri, conf);

  // Construct a file path that contains percentage-encoded string
  String pathName = "/hdtest010%2C60020%2C1371000602151.1371058984668";
  Path fsPath = new Path(pathName);
  URL encodedPathUrl = webhdfs.toUrl(PutOpParam.Op.CREATE, fsPath);
  // We should get back the original file path after cycling back and decoding
  Assert.assertEquals(WebHdfsFileSystem.PATH_PREFIX + pathName,
      encodedPathUrl.toURI().getPath());
}
项目:aliyun-oss-hadoop-fs    文件:TestWebHdfsDataLocality.java   
@Test
public void testChooseDatanodeBeforeNamesystemInit() throws Exception {
  NameNode nn = mock(NameNode.class);
  when(nn.getNamesystem()).thenReturn(null);
  exception.expect(IOException.class);
  exception.expectMessage("Namesystem has not been intialized yet.");
  NamenodeWebHdfsMethods.chooseDatanode(nn, "/path", PutOpParam.Op.CREATE, 0,
      DFSConfigKeys.DFS_BLOCK_SIZE_DEFAULT, null);
}
项目:big-c    文件:TestWebHdfsUrl.java   
@Test(timeout=60000)
public void testEncodedPathUrl() throws IOException, URISyntaxException{
  Configuration conf = new Configuration();

  final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem) FileSystem.get(
      uri, conf);

  // Construct a file path that contains percentage-encoded string
  String pathName = "/hdtest010%2C60020%2C1371000602151.1371058984668";
  Path fsPath = new Path(pathName);
  URL encodedPathUrl = webhdfs.toUrl(PutOpParam.Op.CREATE, fsPath);
  // We should get back the original file path after cycling back and decoding
  Assert.assertEquals(WebHdfsFileSystem.PATH_PREFIX + pathName,
      encodedPathUrl.toURI().getPath());
}
项目:hadoop-2.6.0-cdh5.4.3    文件:DatanodeWebHdfsMethods.java   
/** Handle HTTP PUT request for the root. */
@PUT
@Path("/")
@Consumes({"*/*"})
@Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON})
public Response putRoot(
    final InputStream in,
    @Context final UserGroupInformation ugi,
    @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT)
        final DelegationParam delegation,
    @QueryParam(NamenodeAddressParam.NAME)
    @DefaultValue(NamenodeAddressParam.DEFAULT)
        final NamenodeAddressParam namenode,
    @QueryParam(PutOpParam.NAME) @DefaultValue(PutOpParam.DEFAULT)
        final PutOpParam op,
    @QueryParam(PermissionParam.NAME) @DefaultValue(PermissionParam.DEFAULT)
        final PermissionParam permission,
    @QueryParam(OverwriteParam.NAME) @DefaultValue(OverwriteParam.DEFAULT)
        final OverwriteParam overwrite,
    @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
        final BufferSizeParam bufferSize,
    @QueryParam(ReplicationParam.NAME) @DefaultValue(ReplicationParam.DEFAULT)
        final ReplicationParam replication,
    @QueryParam(BlockSizeParam.NAME) @DefaultValue(BlockSizeParam.DEFAULT)
        final BlockSizeParam blockSize
    ) throws IOException, InterruptedException {
  return put(in, ugi, delegation, namenode, ROOT, op, permission,
          overwrite, bufferSize, replication, blockSize);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:DatanodeWebHdfsMethods.java   
/** Handle HTTP PUT request. */
@PUT
@Path("{" + UriFsPathParam.NAME + ":.*}")
@Consumes({"*/*"})
@Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON})
public Response put(
    final InputStream in,
    @Context final UserGroupInformation ugi,
    @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT)
        final DelegationParam delegation,
    @QueryParam(NamenodeAddressParam.NAME)
    @DefaultValue(NamenodeAddressParam.DEFAULT)
        final NamenodeAddressParam namenode,
    @PathParam(UriFsPathParam.NAME) final UriFsPathParam path,
    @QueryParam(PutOpParam.NAME) @DefaultValue(PutOpParam.DEFAULT)
        final PutOpParam op,
    @QueryParam(PermissionParam.NAME) @DefaultValue(PermissionParam.DEFAULT)
        final PermissionParam permission,
    @QueryParam(OverwriteParam.NAME) @DefaultValue(OverwriteParam.DEFAULT)
        final OverwriteParam overwrite,
    @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
        final BufferSizeParam bufferSize,
    @QueryParam(ReplicationParam.NAME) @DefaultValue(ReplicationParam.DEFAULT)
        final ReplicationParam replication,
    @QueryParam(BlockSizeParam.NAME) @DefaultValue(BlockSizeParam.DEFAULT)
        final BlockSizeParam blockSize
    ) throws IOException, InterruptedException {

  final String nnId = namenode.getValue();
  init(ugi, delegation, nnId, path, op, permission,
      overwrite, bufferSize, replication, blockSize);

  return ugi.doAs(new PrivilegedExceptionAction<Response>() {
    @Override
    public Response run() throws IOException, URISyntaxException {
      return put(in, nnId, path.getAbsolutePath(), op,
              permission, overwrite, bufferSize, replication, blockSize);
    }
  });
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestWebHdfsUrl.java   
@Test(timeout=60000)
public void testEncodedPathUrl() throws IOException, URISyntaxException{
  Configuration conf = new Configuration();

  final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem) FileSystem.get(
      uri, conf);

  // Construct a file path that contains percentage-encoded string
  String pathName = "/hdtest010%2C60020%2C1371000602151.1371058984668";
  Path fsPath = new Path(pathName);
  URL encodedPathUrl = webhdfs.toUrl(PutOpParam.Op.CREATE, fsPath);
  // We should get back the original file path after cycling back and decoding
  Assert.assertEquals(WebHdfsFileSystem.PATH_PREFIX + pathName,
      encodedPathUrl.toURI().getPath());
}
项目:hadoop-plus    文件:WebHdfsFileSystem.java   
@Override
public boolean mkdirs(Path f, FsPermission permission) throws IOException {
  statistics.incrementWriteOps(1);
  final HttpOpParam.Op op = PutOpParam.Op.MKDIRS;
  final Map<?, ?> json = run(op, f,
      new PermissionParam(applyUMask(permission)));
  return (Boolean)json.get("boolean");
}
项目:hadoop-plus    文件:WebHdfsFileSystem.java   
/**
 * Create a symlink pointing to the destination path.
 * @see org.apache.hadoop.fs.Hdfs#createSymlink(Path, Path, boolean) 
 */
public void createSymlink(Path destination, Path f, boolean createParent
    ) throws IOException {
  statistics.incrementWriteOps(1);
  final HttpOpParam.Op op = PutOpParam.Op.CREATESYMLINK;
  run(op, f, new DestinationParam(makeQualified(destination).toUri().getPath()),
      new CreateParentParam(createParent));
}
项目:hadoop-plus    文件:WebHdfsFileSystem.java   
@Override
public boolean rename(final Path src, final Path dst) throws IOException {
  statistics.incrementWriteOps(1);
  final HttpOpParam.Op op = PutOpParam.Op.RENAME;
  final Map<?, ?> json = run(op, src,
      new DestinationParam(makeQualified(dst).toUri().getPath()));
  return (Boolean)json.get("boolean");
}
项目:hadoop-plus    文件:WebHdfsFileSystem.java   
@SuppressWarnings("deprecation")
@Override
public void rename(final Path src, final Path dst,
    final Options.Rename... options) throws IOException {
  statistics.incrementWriteOps(1);
  final HttpOpParam.Op op = PutOpParam.Op.RENAME;
  run(op, src, new DestinationParam(makeQualified(dst).toUri().getPath()),
      new RenameOptionSetParam(options));
}
项目:hadoop-plus    文件:WebHdfsFileSystem.java   
@Override
public void setOwner(final Path p, final String owner, final String group
    ) throws IOException {
  if (owner == null && group == null) {
    throw new IOException("owner == null && group == null");
  }

  statistics.incrementWriteOps(1);
  final HttpOpParam.Op op = PutOpParam.Op.SETOWNER;
  run(op, p, new OwnerParam(owner), new GroupParam(group));
}
项目:hadoop-plus    文件:WebHdfsFileSystem.java   
@Override
public void setPermission(final Path p, final FsPermission permission
    ) throws IOException {
  statistics.incrementWriteOps(1);
  final HttpOpParam.Op op = PutOpParam.Op.SETPERMISSION;
  run(op, p, new PermissionParam(permission));
}
项目:hadoop-plus    文件:WebHdfsFileSystem.java   
@Override
public boolean setReplication(final Path p, final short replication
   ) throws IOException {
  statistics.incrementWriteOps(1);
  final HttpOpParam.Op op = PutOpParam.Op.SETREPLICATION;
  final Map<?, ?> json = run(op, p, new ReplicationParam(replication));
  return (Boolean)json.get("boolean");
}
项目:hadoop-plus    文件:WebHdfsFileSystem.java   
@Override
public void setTimes(final Path p, final long mtime, final long atime
    ) throws IOException {
  statistics.incrementWriteOps(1);
  final HttpOpParam.Op op = PutOpParam.Op.SETTIMES;
  run(op, p, new ModificationTimeParam(mtime), new AccessTimeParam(atime));
}
项目:hadoop-plus    文件:WebHdfsFileSystem.java   
private synchronized long renewDelegationToken(final Token<?> token
    ) throws IOException {
  final HttpOpParam.Op op = PutOpParam.Op.RENEWDELEGATIONTOKEN;
  TokenArgumentParam dtargParam = new TokenArgumentParam(
      token.encodeToUrlString());
  final Map<?, ?> m = run(op, null, dtargParam);
  return (Long) m.get("long");
}
项目:hadoop-plus    文件:WebHdfsFileSystem.java   
private synchronized void cancelDelegationToken(final Token<?> token
    ) throws IOException {
  final HttpOpParam.Op op = PutOpParam.Op.CANCELDELEGATIONTOKEN;
  TokenArgumentParam dtargParam = new TokenArgumentParam(
      token.encodeToUrlString());
  run(op, null, dtargParam);
}