Java 类org.apache.hadoop.hdfs.protocol.proto.HdfsProtos 实例源码

项目:hadoop    文件:FSDirectory.java   
/**
 * Set the FileEncryptionInfo for an INode.
 */
void setFileEncryptionInfo(String src, FileEncryptionInfo info)
    throws IOException {
  // Make the PB for the xattr
  final HdfsProtos.PerFileEncryptionInfoProto proto =
      PBHelper.convertPerFileEncInfo(info);
  final byte[] protoBytes = proto.toByteArray();
  final XAttr fileEncryptionAttr =
      XAttrHelper.buildXAttr(CRYPTO_XATTR_FILE_ENCRYPTION_INFO, protoBytes);
  final List<XAttr> xAttrs = Lists.newArrayListWithCapacity(1);
  xAttrs.add(fileEncryptionAttr);

  writeLock();
  try {
    FSDirXAttrOp.unprotectedSetXAttrs(this, src, xAttrs,
                                      EnumSet.of(XAttrSetFlag.CREATE));
  } finally {
    writeUnlock();
  }
}
项目:hadoop    文件:PBHelper.java   
public static ContentSummary convert(ContentSummaryProto cs) {
  if (cs == null) return null;
  ContentSummary.Builder builder = new ContentSummary.Builder();
  builder.length(cs.getLength()).
      fileCount(cs.getFileCount()).
      directoryCount(cs.getDirectoryCount()).
      quota(cs.getQuota()).
      spaceConsumed(cs.getSpaceConsumed()).
      spaceQuota(cs.getSpaceQuota());
  if (cs.hasTypeQuotaInfos()) {
    for (HdfsProtos.StorageTypeQuotaInfoProto info :
        cs.getTypeQuotaInfos().getTypeQuotaInfoList()) {
      StorageType type = PBHelper.convertStorageType(info.getType());
      builder.typeConsumed(type, info.getConsumed());
      builder.typeQuota(type, info.getQuota());
    }
  }
  return builder.build();
}
项目:hadoop    文件:PBHelper.java   
public static ContentSummaryProto convert(ContentSummary cs) {
  if (cs == null) return null;
  ContentSummaryProto.Builder builder = ContentSummaryProto.newBuilder();
      builder.setLength(cs.getLength()).
      setFileCount(cs.getFileCount()).
      setDirectoryCount(cs.getDirectoryCount()).
      setQuota(cs.getQuota()).
      setSpaceConsumed(cs.getSpaceConsumed()).
      setSpaceQuota(cs.getSpaceQuota());

  if (cs.isTypeQuotaSet() || cs.isTypeConsumedAvailable()) {
    HdfsProtos.StorageTypeQuotaInfosProto.Builder isb =
        HdfsProtos.StorageTypeQuotaInfosProto.newBuilder();
    for (StorageType t: StorageType.getTypesSupportingQuota()) {
      HdfsProtos.StorageTypeQuotaInfoProto info =
          HdfsProtos.StorageTypeQuotaInfoProto.newBuilder().
              setType(convertStorageType(t)).
              setConsumed(cs.getTypeConsumed(t)).
              setQuota(cs.getTypeQuota(t)).
              build();
      isb.addTypeQuotaInfo(info);
    }
    builder.setTypeQuotaInfos(isb);
  }
  return builder.build();
}
项目:aliyun-oss-hadoop-fs    文件:PBHelperClient.java   
public static CipherOption convert(HdfsProtos.CipherOptionProto proto) {
  if (proto != null) {
    CipherSuite suite = null;
    if (proto.getSuite() != null) {
      suite = convert(proto.getSuite());
    }
    byte[] inKey = null;
    if (proto.getInKey() != null) {
      inKey = proto.getInKey().toByteArray();
    }
    byte[] inIv = null;
    if (proto.getInIv() != null) {
      inIv = proto.getInIv().toByteArray();
    }
    byte[] outKey = null;
    if (proto.getOutKey() != null) {
      outKey = proto.getOutKey().toByteArray();
    }
    byte[] outIv = null;
    if (proto.getOutIv() != null) {
      outIv = proto.getOutIv().toByteArray();
    }
    return new CipherOption(suite, inKey, inIv, outKey, outIv);
  }
  return null;
}
项目:aliyun-oss-hadoop-fs    文件:PBHelperClient.java   
public static HdfsProtos.CipherOptionProto convert(CipherOption option) {
  if (option != null) {
    HdfsProtos.CipherOptionProto.Builder builder =
        HdfsProtos.CipherOptionProto.newBuilder();
    if (option.getCipherSuite() != null) {
      builder.setSuite(convert(option.getCipherSuite()));
    }
    if (option.getInKey() != null) {
      builder.setInKey(getByteString(option.getInKey()));
    }
    if (option.getInIv() != null) {
      builder.setInIv(getByteString(option.getInIv()));
    }
    if (option.getOutKey() != null) {
      builder.setOutKey(getByteString(option.getOutKey()));
    }
    if (option.getOutIv() != null) {
      builder.setOutIv(getByteString(option.getOutIv()));
    }
    return builder.build();
  }
  return null;
}
项目:aliyun-oss-hadoop-fs    文件:PBHelperClient.java   
public static ContentSummary convert(ContentSummaryProto cs) {
  if (cs == null) return null;
  ContentSummary.Builder builder = new ContentSummary.Builder();
  builder.length(cs.getLength()).
      fileCount(cs.getFileCount()).
      directoryCount(cs.getDirectoryCount()).
      quota(cs.getQuota()).
      spaceConsumed(cs.getSpaceConsumed()).
      spaceQuota(cs.getSpaceQuota());
  if (cs.hasTypeQuotaInfos()) {
    for (HdfsProtos.StorageTypeQuotaInfoProto info :
        cs.getTypeQuotaInfos().getTypeQuotaInfoList()) {
      StorageType type = convertStorageType(info.getType());
      builder.typeConsumed(type, info.getConsumed());
      builder.typeQuota(type, info.getQuota());
    }
  }
  return builder.build();
}
项目:aliyun-oss-hadoop-fs    文件:PBHelperClient.java   
public static ContentSummaryProto convert(ContentSummary cs) {
  if (cs == null) return null;
  ContentSummaryProto.Builder builder = ContentSummaryProto.newBuilder();
  builder.setLength(cs.getLength()).
      setFileCount(cs.getFileCount()).
      setDirectoryCount(cs.getDirectoryCount()).
      setQuota(cs.getQuota()).
      setSpaceConsumed(cs.getSpaceConsumed()).
      setSpaceQuota(cs.getSpaceQuota());

  if (cs.isTypeQuotaSet() || cs.isTypeConsumedAvailable()) {
    HdfsProtos.StorageTypeQuotaInfosProto.Builder isb =
        HdfsProtos.StorageTypeQuotaInfosProto.newBuilder();
    for (StorageType t: StorageType.getTypesSupportingQuota()) {
      HdfsProtos.StorageTypeQuotaInfoProto info =
          HdfsProtos.StorageTypeQuotaInfoProto.newBuilder().
              setType(convertStorageType(t)).
              setConsumed(cs.getTypeConsumed(t)).
              setQuota(cs.getTypeQuota(t)).
              build();
      isb.addTypeQuotaInfo(info);
    }
    builder.setTypeQuotaInfos(isb);
  }
  return builder.build();
}
项目:aliyun-oss-hadoop-fs    文件:FSDirectory.java   
private void addEncryptionZone(INodeWithAdditionalFields inode,
    XAttrFeature xaf) {
  if (xaf == null) {
    return;
  }
  XAttr xattr = xaf.getXAttr(CRYPTO_XATTR_ENCRYPTION_ZONE);
  if (xattr == null) {
    return;
  }
  try {
    final HdfsProtos.ZoneEncryptionInfoProto ezProto =
        HdfsProtos.ZoneEncryptionInfoProto.parseFrom(
            xattr.getValue());
    ezManager.unprotectedAddEncryptionZone(inode.getId(),
        PBHelperClient.convert(ezProto.getSuite()),
        PBHelperClient.convert(ezProto.getCryptoProtocolVersion()),
        ezProto.getKeyName());
  } catch (InvalidProtocolBufferException e) {
    NameNode.LOG.warn("Error parsing protocol buffer of " +
        "EZ XAttr " + xattr.getName() + " dir:" + inode.getFullPathName());
  }
}
项目:aliyun-oss-hadoop-fs    文件:FSDirEncryptionZoneOp.java   
/**
 * Set the FileEncryptionInfo for an INode.
 *
 * @param fsd fsdirectory
 * @param src the path of a directory which will be the root of the
 *            encryption zone.
 * @param info file encryption information
 * @throws IOException
 */
static void setFileEncryptionInfo(final FSDirectory fsd, final String src,
    final FileEncryptionInfo info) throws IOException {
  // Make the PB for the xattr
  final HdfsProtos.PerFileEncryptionInfoProto proto =
      PBHelperClient.convertPerFileEncInfo(info);
  final byte[] protoBytes = proto.toByteArray();
  final XAttr fileEncryptionAttr =
      XAttrHelper.buildXAttr(CRYPTO_XATTR_FILE_ENCRYPTION_INFO, protoBytes);
  final List<XAttr> xAttrs = Lists.newArrayListWithCapacity(1);
  xAttrs.add(fileEncryptionAttr);
  fsd.writeLock();
  try {
    FSDirXAttrOp.unprotectedSetXAttrs(fsd, src, xAttrs,
                                      EnumSet.of(XAttrSetFlag.CREATE));
  } finally {
    fsd.writeUnlock();
  }
}
项目:big-c    文件:FSDirectory.java   
/**
 * Set the FileEncryptionInfo for an INode.
 */
void setFileEncryptionInfo(String src, FileEncryptionInfo info)
    throws IOException {
  // Make the PB for the xattr
  final HdfsProtos.PerFileEncryptionInfoProto proto =
      PBHelper.convertPerFileEncInfo(info);
  final byte[] protoBytes = proto.toByteArray();
  final XAttr fileEncryptionAttr =
      XAttrHelper.buildXAttr(CRYPTO_XATTR_FILE_ENCRYPTION_INFO, protoBytes);
  final List<XAttr> xAttrs = Lists.newArrayListWithCapacity(1);
  xAttrs.add(fileEncryptionAttr);

  writeLock();
  try {
    FSDirXAttrOp.unprotectedSetXAttrs(this, src, xAttrs,
                                      EnumSet.of(XAttrSetFlag.CREATE));
  } finally {
    writeUnlock();
  }
}
项目:big-c    文件:PBHelper.java   
public static ContentSummary convert(ContentSummaryProto cs) {
  if (cs == null) return null;
  ContentSummary.Builder builder = new ContentSummary.Builder();
  builder.length(cs.getLength()).
      fileCount(cs.getFileCount()).
      directoryCount(cs.getDirectoryCount()).
      quota(cs.getQuota()).
      spaceConsumed(cs.getSpaceConsumed()).
      spaceQuota(cs.getSpaceQuota());
  if (cs.hasTypeQuotaInfos()) {
    for (HdfsProtos.StorageTypeQuotaInfoProto info :
        cs.getTypeQuotaInfos().getTypeQuotaInfoList()) {
      StorageType type = PBHelper.convertStorageType(info.getType());
      builder.typeConsumed(type, info.getConsumed());
      builder.typeQuota(type, info.getQuota());
    }
  }
  return builder.build();
}
项目:big-c    文件:PBHelper.java   
public static ContentSummaryProto convert(ContentSummary cs) {
  if (cs == null) return null;
  ContentSummaryProto.Builder builder = ContentSummaryProto.newBuilder();
      builder.setLength(cs.getLength()).
      setFileCount(cs.getFileCount()).
      setDirectoryCount(cs.getDirectoryCount()).
      setQuota(cs.getQuota()).
      setSpaceConsumed(cs.getSpaceConsumed()).
      setSpaceQuota(cs.getSpaceQuota());

  if (cs.isTypeQuotaSet() || cs.isTypeConsumedAvailable()) {
    HdfsProtos.StorageTypeQuotaInfosProto.Builder isb =
        HdfsProtos.StorageTypeQuotaInfosProto.newBuilder();
    for (StorageType t: StorageType.getTypesSupportingQuota()) {
      HdfsProtos.StorageTypeQuotaInfoProto info =
          HdfsProtos.StorageTypeQuotaInfoProto.newBuilder().
              setType(convertStorageType(t)).
              setConsumed(cs.getTypeConsumed(t)).
              setQuota(cs.getTypeQuota(t)).
              build();
      isb.addTypeQuotaInfo(info);
    }
    builder.setTypeQuotaInfos(isb);
  }
  return builder.build();
}
项目:hadoop-2.6.0-cdh5.4.3    文件:FSDirectory.java   
/**
 * Set the FileEncryptionInfo for an INode.
 */
void setFileEncryptionInfo(String src, FileEncryptionInfo info)
    throws IOException {
  // Make the PB for the xattr
  final HdfsProtos.PerFileEncryptionInfoProto proto =
      PBHelper.convertPerFileEncInfo(info);
  final byte[] protoBytes = proto.toByteArray();
  final XAttr fileEncryptionAttr =
      XAttrHelper.buildXAttr(CRYPTO_XATTR_FILE_ENCRYPTION_INFO, protoBytes);
  final List<XAttr> xAttrs = Lists.newArrayListWithCapacity(1);
  xAttrs.add(fileEncryptionAttr);

  writeLock();
  try {
    unprotectedSetXAttrs(src, xAttrs, EnumSet.of(XAttrSetFlag.CREATE));
  } finally {
    writeUnlock();
  }
}
项目:FlexMap    文件:FSDirectory.java   
/**
 * Set the FileEncryptionInfo for an INode.
 */
void setFileEncryptionInfo(String src, FileEncryptionInfo info)
    throws IOException {
  // Make the PB for the xattr
  final HdfsProtos.PerFileEncryptionInfoProto proto =
      PBHelper.convertPerFileEncInfo(info);
  final byte[] protoBytes = proto.toByteArray();
  final XAttr fileEncryptionAttr =
      XAttrHelper.buildXAttr(CRYPTO_XATTR_FILE_ENCRYPTION_INFO, protoBytes);
  final List<XAttr> xAttrs = Lists.newArrayListWithCapacity(1);
  xAttrs.add(fileEncryptionAttr);

  writeLock();
  try {
    unprotectedSetXAttrs(src, xAttrs, EnumSet.of(XAttrSetFlag.CREATE));
  } finally {
    writeUnlock();
  }
}
项目:hops    文件:PBHelper.java   
public static LocatedBlockProto convert(LocatedBlock b) {
  if (b == null) {
    return null;
  }
  Builder builder = LocatedBlockProto.newBuilder();
  DatanodeInfo[] locs = b.getLocations();
  for (int i = 0; i < locs.length; i++) {
    builder.addLocs(i, PBHelper.convert(locs[i]));
    //For compatability with newer clients
    builder.addStorageIDs("HopsFS_Hack_Storage_ID" );
    builder.addStorageTypes(HdfsProtos.StorageTypeProto.DISK);
    builder.addIsCached(false);
  }

  builder = builder.setB(PBHelper.convert(b.getBlock()))
      .setBlockToken(PBHelper.convert(b.getBlockToken()))
      .setCorrupt(b.isCorrupt()).setOffset(b.getStartOffset());
  if(b.isPhantomBlock() && b.isDataSet()){
    builder.setData(ByteString.copyFrom(b.getData()));
  }
  return builder.build();
}
项目:hadoop-oss    文件:KeyRotationBC.java   
public static HdfsProtos.PerFileEncryptionInfoProto convertPerFileEncInfo(
        FileEncryptionInfo info) {
    if (info == null) {
        return null;
    }
    return HdfsProtos.PerFileEncryptionInfoProto.newBuilder()
            .setKey(getByteString(info.getEncryptedDataEncryptionKey()))
            .setIv(getByteString(info.getIV()))
            .setEzKeyVersionName(info.getEzKeyVersionName())
            .build();
}
项目:hadoop    文件:FSDirectory.java   
/**
 * This method is always called with writeLock of FSDirectory held.
 */
public final void addToInodeMap(INode inode) {
  if (inode instanceof INodeWithAdditionalFields) {
    inodeMap.put(inode);
    if (!inode.isSymlink()) {
      final XAttrFeature xaf = inode.getXAttrFeature();
      if (xaf != null) {
        final List<XAttr> xattrs = xaf.getXAttrs();
        for (XAttr xattr : xattrs) {
          final String xaName = XAttrHelper.getPrefixName(xattr);
          if (CRYPTO_XATTR_ENCRYPTION_ZONE.equals(xaName)) {
            try {
              final HdfsProtos.ZoneEncryptionInfoProto ezProto =
                  HdfsProtos.ZoneEncryptionInfoProto.parseFrom(
                      xattr.getValue());
              ezManager.unprotectedAddEncryptionZone(inode.getId(),
                  PBHelper.convert(ezProto.getSuite()),
                  PBHelper.convert(ezProto.getCryptoProtocolVersion()),
                  ezProto.getKeyName());
            } catch (InvalidProtocolBufferException e) {
              NameNode.LOG.warn("Error parsing protocol buffer of " +
                  "EZ XAttr " + xattr.getName());
            }
          }
        }
      }
    }
  }
}
项目:hadoop    文件:PBHelper.java   
/**
 * Copy from {@code dnInfos} to a target of list of same size starting at
 * {@code startIdx}.
 */
public static List<? extends HdfsProtos.DatanodeInfoProto> convert(
    DatanodeInfo[] dnInfos, int startIdx) {
  if (dnInfos == null)
    return null;
  ArrayList<HdfsProtos.DatanodeInfoProto> protos = Lists
      .newArrayListWithCapacity(dnInfos.length);
  for (int i = startIdx; i < dnInfos.length; i++) {
    protos.add(convert(dnInfos[i]));
  }
  return protos;
}
项目:hadoop    文件:PBHelper.java   
public static HdfsProtos.FileEncryptionInfoProto convert(
    FileEncryptionInfo info) {
  if (info == null) {
    return null;
  }
  return HdfsProtos.FileEncryptionInfoProto.newBuilder()
      .setSuite(convert(info.getCipherSuite()))
      .setCryptoProtocolVersion(convert(info.getCryptoProtocolVersion()))
      .setKey(getByteString(info.getEncryptedDataEncryptionKey()))
      .setIv(getByteString(info.getIV()))
      .setEzKeyVersionName(info.getEzKeyVersionName())
      .setKeyName(info.getKeyName())
      .build();
}
项目:hadoop    文件:PBHelper.java   
public static HdfsProtos.PerFileEncryptionInfoProto convertPerFileEncInfo(
    FileEncryptionInfo info) {
  if (info == null) {
    return null;
  }
  return HdfsProtos.PerFileEncryptionInfoProto.newBuilder()
      .setKey(getByteString(info.getEncryptedDataEncryptionKey()))
      .setIv(getByteString(info.getIV()))
      .setEzKeyVersionName(info.getEzKeyVersionName())
      .build();
}
项目:hadoop    文件:PBHelper.java   
public static HdfsProtos.ZoneEncryptionInfoProto convert(
    CipherSuite suite, CryptoProtocolVersion version, String keyName) {
  if (suite == null || version == null || keyName == null) {
    return null;
  }
  return HdfsProtos.ZoneEncryptionInfoProto.newBuilder()
      .setSuite(convert(suite))
      .setCryptoProtocolVersion(convert(version))
      .setKeyName(keyName)
      .build();
}
项目:hadoop    文件:PBHelper.java   
public static FileEncryptionInfo convert(
    HdfsProtos.FileEncryptionInfoProto proto) {
  if (proto == null) {
    return null;
  }
  CipherSuite suite = convert(proto.getSuite());
  CryptoProtocolVersion version = convert(proto.getCryptoProtocolVersion());
  byte[] key = proto.getKey().toByteArray();
  byte[] iv = proto.getIv().toByteArray();
  String ezKeyVersionName = proto.getEzKeyVersionName();
  String keyName = proto.getKeyName();
  return new FileEncryptionInfo(suite, version, key, iv, keyName,
      ezKeyVersionName);
}
项目:hadoop    文件:PBHelper.java   
public static FileEncryptionInfo convert(
    HdfsProtos.PerFileEncryptionInfoProto fileProto,
    CipherSuite suite, CryptoProtocolVersion version, String keyName) {
  if (fileProto == null || suite == null || version == null ||
      keyName == null) {
    return null;
  }
  byte[] key = fileProto.getKey().toByteArray();
  byte[] iv = fileProto.getIv().toByteArray();
  String ezKeyVersionName = fileProto.getEzKeyVersionName();
  return new FileEncryptionInfo(suite, version, key, iv, keyName,
      ezKeyVersionName);
}
项目:hadoop    文件:FSImageLoader.java   
static long getFileSize(FsImageProto.INodeSection.INodeFile f) {
  long size = 0;
  for (HdfsProtos.BlockProto p : f.getBlocksList()) {
    size += p.getNumBytes();
  }
  return size;
}
项目:hadoop    文件:TestPBHelper.java   
@Test
public void testChecksumTypeProto() {
  assertEquals(DataChecksum.Type.NULL,
      PBHelper.convert(HdfsProtos.ChecksumTypeProto.CHECKSUM_NULL));
  assertEquals(DataChecksum.Type.CRC32,
      PBHelper.convert(HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32));
  assertEquals(DataChecksum.Type.CRC32C,
      PBHelper.convert(HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32C));
  assertEquals(PBHelper.convert(DataChecksum.Type.NULL),
      HdfsProtos.ChecksumTypeProto.CHECKSUM_NULL);
  assertEquals(PBHelper.convert(DataChecksum.Type.CRC32),
      HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32);
  assertEquals(PBHelper.convert(DataChecksum.Type.CRC32C),
      HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32C);
}
项目:aliyun-oss-hadoop-fs    文件:PBHelperClient.java   
/**
 * Copy from {@code dnInfos} to a target of list of same size starting at
 * {@code startIdx}.
 */
public static List<? extends HdfsProtos.DatanodeInfoProto> convert(
    DatanodeInfo[] dnInfos, int startIdx) {
  if (dnInfos == null)
    return null;
  ArrayList<HdfsProtos.DatanodeInfoProto> protos = Lists
      .newArrayListWithCapacity(dnInfos.length);
  for (int i = startIdx; i < dnInfos.length; i++) {
    protos.add(convert(dnInfos[i]));
  }
  return protos;
}
项目:aliyun-oss-hadoop-fs    文件:PBHelperClient.java   
public static CipherSuite convert(HdfsProtos.CipherSuiteProto proto) {
  switch (proto) {
  case AES_CTR_NOPADDING:
    return CipherSuite.AES_CTR_NOPADDING;
  default:
    // Set to UNKNOWN and stash the unknown enum value
    CipherSuite suite = CipherSuite.UNKNOWN;
    suite.setUnknownValue(proto.getNumber());
    return suite;
  }
}
项目:aliyun-oss-hadoop-fs    文件:PBHelperClient.java   
public static HdfsProtos.CipherSuiteProto convert(CipherSuite suite) {
  switch (suite) {
  case UNKNOWN:
    return HdfsProtos.CipherSuiteProto.UNKNOWN;
  case AES_CTR_NOPADDING:
    return HdfsProtos.CipherSuiteProto.AES_CTR_NOPADDING;
  default:
    return null;
  }
}
项目:aliyun-oss-hadoop-fs    文件:PBHelperClient.java   
public static List<HdfsProtos.CipherOptionProto> convertCipherOptions(
    List<CipherOption> options) {
  if (options != null) {
    List<HdfsProtos.CipherOptionProto> protos =
        Lists.newArrayListWithCapacity(options.size());
    for (CipherOption option : options) {
      protos.add(convert(option));
    }
    return protos;
  }
  return null;
}
项目:aliyun-oss-hadoop-fs    文件:PBHelperClient.java   
public static List<CipherOption> convertCipherOptionProtos(
    List<HdfsProtos.CipherOptionProto> protos) {
  if (protos != null) {
    List<CipherOption> options =
        Lists.newArrayListWithCapacity(protos.size());
    for (HdfsProtos.CipherOptionProto proto : protos) {
      options.add(convert(proto));
    }
    return options;
  }
  return null;
}
项目:aliyun-oss-hadoop-fs    文件:PBHelperClient.java   
public static HdfsProtos.FileEncryptionInfoProto convert(
    FileEncryptionInfo info) {
  if (info == null) {
    return null;
  }
  return HdfsProtos.FileEncryptionInfoProto.newBuilder()
      .setSuite(convert(info.getCipherSuite()))
      .setCryptoProtocolVersion(convert(info.getCryptoProtocolVersion()))
      .setKey(getByteString(info.getEncryptedDataEncryptionKey()))
      .setIv(getByteString(info.getIV()))
      .setEzKeyVersionName(info.getEzKeyVersionName())
      .setKeyName(info.getKeyName())
      .build();
}
项目:aliyun-oss-hadoop-fs    文件:PBHelperClient.java   
public static FileEncryptionInfo convert(
    HdfsProtos.FileEncryptionInfoProto proto) {
  if (proto == null) {
    return null;
  }
  CipherSuite suite = convert(proto.getSuite());
  CryptoProtocolVersion version = convert(proto.getCryptoProtocolVersion());
  byte[] key = proto.getKey().toByteArray();
  byte[] iv = proto.getIv().toByteArray();
  String ezKeyVersionName = proto.getEzKeyVersionName();
  String keyName = proto.getKeyName();
  return new FileEncryptionInfo(suite, version, key, iv, keyName,
      ezKeyVersionName);
}
项目:aliyun-oss-hadoop-fs    文件:PBHelperClient.java   
public static HdfsProtos.PerFileEncryptionInfoProto convertPerFileEncInfo(
    FileEncryptionInfo info) {
  if (info == null) {
    return null;
  }
  return HdfsProtos.PerFileEncryptionInfoProto.newBuilder()
      .setKey(getByteString(info.getEncryptedDataEncryptionKey()))
      .setIv(getByteString(info.getIV()))
      .setEzKeyVersionName(info.getEzKeyVersionName())
      .build();
}
项目:aliyun-oss-hadoop-fs    文件:PBHelperClient.java   
public static HdfsProtos.ZoneEncryptionInfoProto convert(
    CipherSuite suite, CryptoProtocolVersion version, String keyName) {
  if (suite == null || version == null || keyName == null) {
    return null;
  }
  return HdfsProtos.ZoneEncryptionInfoProto.newBuilder()
      .setSuite(convert(suite))
      .setCryptoProtocolVersion(convert(version))
      .setKeyName(keyName)
      .build();
}
项目:aliyun-oss-hadoop-fs    文件:PBHelperClient.java   
public static FileEncryptionInfo convert(
    HdfsProtos.PerFileEncryptionInfoProto fileProto,
    CipherSuite suite, CryptoProtocolVersion version, String keyName) {
  if (fileProto == null || suite == null || version == null ||
      keyName == null) {
    return null;
  }
  byte[] key = fileProto.getKey().toByteArray();
  byte[] iv = fileProto.getIv().toByteArray();
  String ezKeyVersionName = fileProto.getEzKeyVersionName();
  return new FileEncryptionInfo(suite, version, key, iv, keyName,
      ezKeyVersionName);
}
项目:aliyun-oss-hadoop-fs    文件:PBHelperClient.java   
public static ECSchema convertECSchema(HdfsProtos.ECSchemaProto schema) {
  List<HdfsProtos.ECSchemaOptionEntryProto> optionsList =
      schema.getOptionsList();
  Map<String, String> options = new HashMap<>(optionsList.size());
  for (HdfsProtos.ECSchemaOptionEntryProto option : optionsList) {
    options.put(option.getKey(), option.getValue());
  }
  return new ECSchema(schema.getCodecName(), schema.getDataUnits(),
      schema.getParityUnits(), options);
}
项目:aliyun-oss-hadoop-fs    文件:PBHelperClient.java   
public static HdfsProtos.ECSchemaProto convertECSchema(ECSchema schema) {
  HdfsProtos.ECSchemaProto.Builder builder =
      HdfsProtos.ECSchemaProto.newBuilder()
      .setCodecName(schema.getCodecName())
      .setDataUnits(schema.getNumDataUnits())
      .setParityUnits(schema.getNumParityUnits());
  Set<Map.Entry<String, String>> entrySet =
      schema.getExtraOptions().entrySet();
  for (Map.Entry<String, String> entry : entrySet) {
    builder.addOptions(HdfsProtos.ECSchemaOptionEntryProto.newBuilder()
        .setKey(entry.getKey()).setValue(entry.getValue()).build());
  }
  return builder.build();
}
项目:aliyun-oss-hadoop-fs    文件:PBHelper.java   
private static HdfsProtos.StorageUuidsProto convertStorageIDs(String[] targetStorageIDs) {
  HdfsProtos.StorageUuidsProto.Builder builder = HdfsProtos.StorageUuidsProto.newBuilder();
  for (String storageUuid : targetStorageIDs) {
    builder.addStorageUuids(storageUuid);
  }
  return builder.build();
}
项目:aliyun-oss-hadoop-fs    文件:PBHelper.java   
private static String[] convert(HdfsProtos.StorageUuidsProto targetStorageUuidsProto) {
  List<String> storageUuidsList = targetStorageUuidsProto
      .getStorageUuidsList();
  String[] storageUuids = new String[storageUuidsList.size()];
  for (int i = 0; i < storageUuidsList.size(); i++) {
    storageUuids[i] = storageUuidsList.get(i);
  }
  return storageUuids;
}
项目:aliyun-oss-hadoop-fs    文件:PBHelper.java   
public static BlockECRecoveryInfo convertBlockECRecoveryInfo(
    BlockECRecoveryInfoProto blockEcRecoveryInfoProto) {
  ExtendedBlockProto blockProto = blockEcRecoveryInfoProto.getBlock();
  ExtendedBlock block = PBHelperClient.convert(blockProto);

  DatanodeInfosProto sourceDnInfosProto = blockEcRecoveryInfoProto
      .getSourceDnInfos();
  DatanodeInfo[] sourceDnInfos = PBHelperClient.convert(sourceDnInfosProto);

  DatanodeInfosProto targetDnInfosProto = blockEcRecoveryInfoProto
      .getTargetDnInfos();
  DatanodeInfo[] targetDnInfos = PBHelperClient.convert(targetDnInfosProto);

  HdfsProtos.StorageUuidsProto targetStorageUuidsProto = blockEcRecoveryInfoProto
      .getTargetStorageUuids();
  String[] targetStorageUuids = convert(targetStorageUuidsProto);

  StorageTypesProto targetStorageTypesProto = blockEcRecoveryInfoProto
      .getTargetStorageTypes();
  StorageType[] convertStorageTypes = PBHelperClient.convertStorageTypes(
      targetStorageTypesProto.getStorageTypesList(), targetStorageTypesProto
          .getStorageTypesList().size());

  byte[] liveBlkIndices = blockEcRecoveryInfoProto.getLiveBlockIndices()
      .toByteArray();
  ErasureCodingPolicy ecPolicy =
      PBHelperClient.convertErasureCodingPolicy(
          blockEcRecoveryInfoProto.getEcPolicy());
  return new BlockECRecoveryInfo(block, sourceDnInfos, targetDnInfos,
      targetStorageUuids, convertStorageTypes, liveBlkIndices, ecPolicy);
}