Java 类org.apache.hadoop.mapreduce.lib.partition.KeyFieldHelper.KeyDescription 实例源码

项目:hadoop    文件:KeyFieldBasedPartitioner.java   
public int getPartition(K2 key, V2 value, int numReduceTasks) {
  byte[] keyBytes;

  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();
  if (allKeySpecs.size() == 0) {
    return getPartition(key.toString().hashCode(), numReduceTasks);
  }

  try {
    keyBytes = key.toString().getBytes("UTF-8");
  } catch (UnsupportedEncodingException e) {
    throw new RuntimeException("The current system does not " +
        "support UTF-8 encoding!", e);
  }
  // return 0 if the key is empty
  if (keyBytes.length == 0) {
    return 0;
  }

  int []lengthIndicesFirst = keyFieldHelper.getWordLengths(keyBytes, 0, 
      keyBytes.length);
  int currentHash = 0;
  for (KeyDescription keySpec : allKeySpecs) {
    int startChar = keyFieldHelper.getStartOffset(keyBytes, 0, 
      keyBytes.length, lengthIndicesFirst, keySpec);
     // no key found! continue
    if (startChar < 0) {
      continue;
    }
    int endChar = keyFieldHelper.getEndOffset(keyBytes, 0, keyBytes.length, 
        lengthIndicesFirst, keySpec);
    currentHash = hashCode(keyBytes, startChar, endChar, 
        currentHash);
  }
  return getPartition(currentHash, numReduceTasks);
}
项目:hadoop    文件:KeyFieldBasedComparator.java   
public int compare(byte[] b1, int s1, int l1,
    byte[] b2, int s2, int l2) {
  int n1 = WritableUtils.decodeVIntSize(b1[s1]);
  int n2 = WritableUtils.decodeVIntSize(b2[s2]);
  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();

  if (allKeySpecs.size() == 0) {
    return compareBytes(b1, s1 + n1, l1 - n1, b2, s2 + n2, l2 - n2);
  }

  int []lengthIndicesFirst = 
    keyFieldHelper.getWordLengths(b1, s1 + n1, s1 + l1);
  int []lengthIndicesSecond = 
    keyFieldHelper.getWordLengths(b2, s2 + n2, s2 + l2);

  for (KeyDescription keySpec : allKeySpecs) {
    int startCharFirst = keyFieldHelper.getStartOffset(b1, s1 + n1, s1 + l1,
      lengthIndicesFirst, keySpec);
    int endCharFirst = keyFieldHelper.getEndOffset(b1, s1 + n1, s1 + l1, 
      lengthIndicesFirst, keySpec);
    int startCharSecond = keyFieldHelper.getStartOffset(b2, s2 + n2, s2 + l2,
      lengthIndicesSecond, keySpec);
    int endCharSecond = keyFieldHelper.getEndOffset(b2, s2 + n2, s2 + l2, 
      lengthIndicesSecond, keySpec);
    int result;
    if ((result = compareByteSequence(b1, startCharFirst, endCharFirst, b2, 
        startCharSecond, endCharSecond, keySpec)) != 0) {
      return result;
    }
  }
  return 0;
}
项目:hadoop    文件:KeyFieldBasedComparator.java   
private int compareByteSequence(byte[] first, int start1, int end1, 
    byte[] second, int start2, int end2, KeyDescription key) {
  if (start1 == -1) {
    if (key.reverse) {
      return 1;
    }
    return -1;
  }
  if (start2 == -1) {
    if (key.reverse) {
      return -1; 
    }
    return 1;
  }
  int compareResult = 0;
  if (!key.numeric) {
    compareResult = compareBytes(first, start1, end1-start1 + 1, second,
      start2, end2 - start2 + 1);
  }
  if (key.numeric) {
    compareResult = numericalCompare (first, start1, end1, second, start2,
      end2);
  }
  if (key.reverse) {
    return -compareResult;
  }
  return compareResult;
}
项目:aliyun-oss-hadoop-fs    文件:KeyFieldBasedPartitioner.java   
public int getPartition(K2 key, V2 value, int numReduceTasks) {
  byte[] keyBytes;

  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();
  if (allKeySpecs.size() == 0) {
    return getPartition(key.toString().hashCode(), numReduceTasks);
  }

  try {
    keyBytes = key.toString().getBytes("UTF-8");
  } catch (UnsupportedEncodingException e) {
    throw new RuntimeException("The current system does not " +
        "support UTF-8 encoding!", e);
  }
  // return 0 if the key is empty
  if (keyBytes.length == 0) {
    return 0;
  }

  int []lengthIndicesFirst = keyFieldHelper.getWordLengths(keyBytes, 0, 
      keyBytes.length);
  int currentHash = 0;
  for (KeyDescription keySpec : allKeySpecs) {
    int startChar = keyFieldHelper.getStartOffset(keyBytes, 0, 
      keyBytes.length, lengthIndicesFirst, keySpec);
     // no key found! continue
    if (startChar < 0) {
      continue;
    }
    int endChar = keyFieldHelper.getEndOffset(keyBytes, 0, keyBytes.length, 
        lengthIndicesFirst, keySpec);
    currentHash = hashCode(keyBytes, startChar, endChar, 
        currentHash);
  }
  return getPartition(currentHash, numReduceTasks);
}
项目:aliyun-oss-hadoop-fs    文件:KeyFieldBasedComparator.java   
public int compare(byte[] b1, int s1, int l1,
    byte[] b2, int s2, int l2) {
  int n1 = WritableUtils.decodeVIntSize(b1[s1]);
  int n2 = WritableUtils.decodeVIntSize(b2[s2]);
  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();

  if (allKeySpecs.size() == 0) {
    return compareBytes(b1, s1 + n1, l1 - n1, b2, s2 + n2, l2 - n2);
  }

  int []lengthIndicesFirst = 
    keyFieldHelper.getWordLengths(b1, s1 + n1, s1 + l1);
  int []lengthIndicesSecond = 
    keyFieldHelper.getWordLengths(b2, s2 + n2, s2 + l2);

  for (KeyDescription keySpec : allKeySpecs) {
    int startCharFirst = keyFieldHelper.getStartOffset(b1, s1 + n1, s1 + l1,
      lengthIndicesFirst, keySpec);
    int endCharFirst = keyFieldHelper.getEndOffset(b1, s1 + n1, s1 + l1, 
      lengthIndicesFirst, keySpec);
    int startCharSecond = keyFieldHelper.getStartOffset(b2, s2 + n2, s2 + l2,
      lengthIndicesSecond, keySpec);
    int endCharSecond = keyFieldHelper.getEndOffset(b2, s2 + n2, s2 + l2, 
      lengthIndicesSecond, keySpec);
    int result;
    if ((result = compareByteSequence(b1, startCharFirst, endCharFirst, b2, 
        startCharSecond, endCharSecond, keySpec)) != 0) {
      return result;
    }
  }
  return 0;
}
项目:aliyun-oss-hadoop-fs    文件:KeyFieldBasedComparator.java   
private int compareByteSequence(byte[] first, int start1, int end1, 
    byte[] second, int start2, int end2, KeyDescription key) {
  if (start1 == -1) {
    if (key.reverse) {
      return 1;
    }
    return -1;
  }
  if (start2 == -1) {
    if (key.reverse) {
      return -1; 
    }
    return 1;
  }
  int compareResult = 0;
  if (!key.numeric) {
    compareResult = compareBytes(first, start1, end1-start1 + 1, second,
      start2, end2 - start2 + 1);
  }
  if (key.numeric) {
    compareResult = numericalCompare (first, start1, end1, second, start2,
      end2);
  }
  if (key.reverse) {
    return -compareResult;
  }
  return compareResult;
}
项目:big-c    文件:KeyFieldBasedPartitioner.java   
public int getPartition(K2 key, V2 value, int numReduceTasks) {
  byte[] keyBytes;

  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();
  if (allKeySpecs.size() == 0) {
    return getPartition(key.toString().hashCode(), numReduceTasks);
  }

  try {
    keyBytes = key.toString().getBytes("UTF-8");
  } catch (UnsupportedEncodingException e) {
    throw new RuntimeException("The current system does not " +
        "support UTF-8 encoding!", e);
  }
  // return 0 if the key is empty
  if (keyBytes.length == 0) {
    return 0;
  }

  int []lengthIndicesFirst = keyFieldHelper.getWordLengths(keyBytes, 0, 
      keyBytes.length);
  int currentHash = 0;
  for (KeyDescription keySpec : allKeySpecs) {
    int startChar = keyFieldHelper.getStartOffset(keyBytes, 0, 
      keyBytes.length, lengthIndicesFirst, keySpec);
     // no key found! continue
    if (startChar < 0) {
      continue;
    }
    int endChar = keyFieldHelper.getEndOffset(keyBytes, 0, keyBytes.length, 
        lengthIndicesFirst, keySpec);
    currentHash = hashCode(keyBytes, startChar, endChar, 
        currentHash);
  }
  return getPartition(currentHash, numReduceTasks);
}
项目:big-c    文件:KeyFieldBasedComparator.java   
public int compare(byte[] b1, int s1, int l1,
    byte[] b2, int s2, int l2) {
  int n1 = WritableUtils.decodeVIntSize(b1[s1]);
  int n2 = WritableUtils.decodeVIntSize(b2[s2]);
  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();

  if (allKeySpecs.size() == 0) {
    return compareBytes(b1, s1 + n1, l1 - n1, b2, s2 + n2, l2 - n2);
  }

  int []lengthIndicesFirst = 
    keyFieldHelper.getWordLengths(b1, s1 + n1, s1 + l1);
  int []lengthIndicesSecond = 
    keyFieldHelper.getWordLengths(b2, s2 + n2, s2 + l2);

  for (KeyDescription keySpec : allKeySpecs) {
    int startCharFirst = keyFieldHelper.getStartOffset(b1, s1 + n1, s1 + l1,
      lengthIndicesFirst, keySpec);
    int endCharFirst = keyFieldHelper.getEndOffset(b1, s1 + n1, s1 + l1, 
      lengthIndicesFirst, keySpec);
    int startCharSecond = keyFieldHelper.getStartOffset(b2, s2 + n2, s2 + l2,
      lengthIndicesSecond, keySpec);
    int endCharSecond = keyFieldHelper.getEndOffset(b2, s2 + n2, s2 + l2, 
      lengthIndicesSecond, keySpec);
    int result;
    if ((result = compareByteSequence(b1, startCharFirst, endCharFirst, b2, 
        startCharSecond, endCharSecond, keySpec)) != 0) {
      return result;
    }
  }
  return 0;
}
项目:big-c    文件:KeyFieldBasedComparator.java   
private int compareByteSequence(byte[] first, int start1, int end1, 
    byte[] second, int start2, int end2, KeyDescription key) {
  if (start1 == -1) {
    if (key.reverse) {
      return 1;
    }
    return -1;
  }
  if (start2 == -1) {
    if (key.reverse) {
      return -1; 
    }
    return 1;
  }
  int compareResult = 0;
  if (!key.numeric) {
    compareResult = compareBytes(first, start1, end1-start1 + 1, second,
      start2, end2 - start2 + 1);
  }
  if (key.numeric) {
    compareResult = numericalCompare (first, start1, end1, second, start2,
      end2);
  }
  if (key.reverse) {
    return -compareResult;
  }
  return compareResult;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:KeyFieldBasedPartitioner.java   
public int getPartition(K2 key, V2 value, int numReduceTasks) {
  byte[] keyBytes;

  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();
  if (allKeySpecs.size() == 0) {
    return getPartition(key.toString().hashCode(), numReduceTasks);
  }

  try {
    keyBytes = key.toString().getBytes("UTF-8");
  } catch (UnsupportedEncodingException e) {
    throw new RuntimeException("The current system does not " +
        "support UTF-8 encoding!", e);
  }
  // return 0 if the key is empty
  if (keyBytes.length == 0) {
    return 0;
  }

  int []lengthIndicesFirst = keyFieldHelper.getWordLengths(keyBytes, 0, 
      keyBytes.length);
  int currentHash = 0;
  for (KeyDescription keySpec : allKeySpecs) {
    int startChar = keyFieldHelper.getStartOffset(keyBytes, 0, 
      keyBytes.length, lengthIndicesFirst, keySpec);
     // no key found! continue
    if (startChar < 0) {
      continue;
    }
    int endChar = keyFieldHelper.getEndOffset(keyBytes, 0, keyBytes.length, 
        lengthIndicesFirst, keySpec);
    currentHash = hashCode(keyBytes, startChar, endChar, 
        currentHash);
  }
  return getPartition(currentHash, numReduceTasks);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:KeyFieldBasedComparator.java   
public int compare(byte[] b1, int s1, int l1,
    byte[] b2, int s2, int l2) {
  int n1 = WritableUtils.decodeVIntSize(b1[s1]);
  int n2 = WritableUtils.decodeVIntSize(b2[s2]);
  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();

  if (allKeySpecs.size() == 0) {
    return compareBytes(b1, s1 + n1, l1 - n1, b2, s2 + n2, l2 - n2);
  }

  int []lengthIndicesFirst = 
    keyFieldHelper.getWordLengths(b1, s1 + n1, s1 + l1);
  int []lengthIndicesSecond = 
    keyFieldHelper.getWordLengths(b2, s2 + n2, s2 + l2);

  for (KeyDescription keySpec : allKeySpecs) {
    int startCharFirst = keyFieldHelper.getStartOffset(b1, s1 + n1, s1 + l1,
      lengthIndicesFirst, keySpec);
    int endCharFirst = keyFieldHelper.getEndOffset(b1, s1 + n1, s1 + l1, 
      lengthIndicesFirst, keySpec);
    int startCharSecond = keyFieldHelper.getStartOffset(b2, s2 + n2, s2 + l2,
      lengthIndicesSecond, keySpec);
    int endCharSecond = keyFieldHelper.getEndOffset(b2, s2 + n2, s2 + l2, 
      lengthIndicesSecond, keySpec);
    int result;
    if ((result = compareByteSequence(b1, startCharFirst, endCharFirst, b2, 
        startCharSecond, endCharSecond, keySpec)) != 0) {
      return result;
    }
  }
  return 0;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:KeyFieldBasedComparator.java   
private int compareByteSequence(byte[] first, int start1, int end1, 
    byte[] second, int start2, int end2, KeyDescription key) {
  if (start1 == -1) {
    if (key.reverse) {
      return 1;
    }
    return -1;
  }
  if (start2 == -1) {
    if (key.reverse) {
      return -1; 
    }
    return 1;
  }
  int compareResult = 0;
  if (!key.numeric) {
    compareResult = compareBytes(first, start1, end1-start1 + 1, second,
      start2, end2 - start2 + 1);
  }
  if (key.numeric) {
    compareResult = numericalCompare (first, start1, end1, second, start2,
      end2);
  }
  if (key.reverse) {
    return -compareResult;
  }
  return compareResult;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:KeyFieldBasedPartitioner.java   
public int getPartition(K2 key, V2 value, int numReduceTasks) {
  byte[] keyBytes;

  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();
  if (allKeySpecs.size() == 0) {
    return getPartition(key.toString().hashCode(), numReduceTasks);
  }

  try {
    keyBytes = key.toString().getBytes("UTF-8");
  } catch (UnsupportedEncodingException e) {
    throw new RuntimeException("The current system does not " +
        "support UTF-8 encoding!", e);
  }
  // return 0 if the key is empty
  if (keyBytes.length == 0) {
    return 0;
  }

  int []lengthIndicesFirst = keyFieldHelper.getWordLengths(keyBytes, 0, 
      keyBytes.length);
  int currentHash = 0;
  for (KeyDescription keySpec : allKeySpecs) {
    int startChar = keyFieldHelper.getStartOffset(keyBytes, 0, 
      keyBytes.length, lengthIndicesFirst, keySpec);
     // no key found! continue
    if (startChar < 0) {
      continue;
    }
    int endChar = keyFieldHelper.getEndOffset(keyBytes, 0, keyBytes.length, 
        lengthIndicesFirst, keySpec);
    currentHash = hashCode(keyBytes, startChar, endChar, 
        currentHash);
  }
  return getPartition(currentHash, numReduceTasks);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:KeyFieldBasedComparator.java   
public int compare(byte[] b1, int s1, int l1,
    byte[] b2, int s2, int l2) {
  int n1 = WritableUtils.decodeVIntSize(b1[s1]);
  int n2 = WritableUtils.decodeVIntSize(b2[s2]);
  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();

  if (allKeySpecs.size() == 0) {
    return compareBytes(b1, s1 + n1, l1 - n1, b2, s2 + n2, l2 - n2);
  }

  int []lengthIndicesFirst = 
    keyFieldHelper.getWordLengths(b1, s1 + n1, s1 + l1);
  int []lengthIndicesSecond = 
    keyFieldHelper.getWordLengths(b2, s2 + n2, s2 + l2);

  for (KeyDescription keySpec : allKeySpecs) {
    int startCharFirst = keyFieldHelper.getStartOffset(b1, s1 + n1, s1 + l1,
      lengthIndicesFirst, keySpec);
    int endCharFirst = keyFieldHelper.getEndOffset(b1, s1 + n1, s1 + l1, 
      lengthIndicesFirst, keySpec);
    int startCharSecond = keyFieldHelper.getStartOffset(b2, s2 + n2, s2 + l2,
      lengthIndicesSecond, keySpec);
    int endCharSecond = keyFieldHelper.getEndOffset(b2, s2 + n2, s2 + l2, 
      lengthIndicesSecond, keySpec);
    int result;
    if ((result = compareByteSequence(b1, startCharFirst, endCharFirst, b2, 
        startCharSecond, endCharSecond, keySpec)) != 0) {
      return result;
    }
  }
  return 0;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:KeyFieldBasedComparator.java   
private int compareByteSequence(byte[] first, int start1, int end1, 
    byte[] second, int start2, int end2, KeyDescription key) {
  if (start1 == -1) {
    if (key.reverse) {
      return 1;
    }
    return -1;
  }
  if (start2 == -1) {
    if (key.reverse) {
      return -1; 
    }
    return 1;
  }
  int compareResult = 0;
  if (!key.numeric) {
    compareResult = compareBytes(first, start1, end1-start1 + 1, second,
      start2, end2 - start2 + 1);
  }
  if (key.numeric) {
    compareResult = numericalCompare (first, start1, end1, second, start2,
      end2);
  }
  if (key.reverse) {
    return -compareResult;
  }
  return compareResult;
}
项目:hadoop-plus    文件:KeyFieldBasedPartitioner.java   
public int getPartition(K2 key, V2 value, int numReduceTasks) {
  byte[] keyBytes;

  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();
  if (allKeySpecs.size() == 0) {
    return getPartition(key.toString().hashCode(), numReduceTasks);
  }

  try {
    keyBytes = key.toString().getBytes("UTF-8");
  } catch (UnsupportedEncodingException e) {
    throw new RuntimeException("The current system does not " +
        "support UTF-8 encoding!", e);
  }
  // return 0 if the key is empty
  if (keyBytes.length == 0) {
    return 0;
  }

  int []lengthIndicesFirst = keyFieldHelper.getWordLengths(keyBytes, 0, 
      keyBytes.length);
  int currentHash = 0;
  for (KeyDescription keySpec : allKeySpecs) {
    int startChar = keyFieldHelper.getStartOffset(keyBytes, 0, 
      keyBytes.length, lengthIndicesFirst, keySpec);
     // no key found! continue
    if (startChar < 0) {
      continue;
    }
    int endChar = keyFieldHelper.getEndOffset(keyBytes, 0, keyBytes.length, 
        lengthIndicesFirst, keySpec);
    currentHash = hashCode(keyBytes, startChar, endChar, 
        currentHash);
  }
  return getPartition(currentHash, numReduceTasks);
}
项目:hadoop-plus    文件:KeyFieldBasedComparator.java   
public int compare(byte[] b1, int s1, int l1,
    byte[] b2, int s2, int l2) {
  int n1 = WritableUtils.decodeVIntSize(b1[s1]);
  int n2 = WritableUtils.decodeVIntSize(b2[s2]);
  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();

  if (allKeySpecs.size() == 0) {
    return compareBytes(b1, s1 + n1, l1 - n1, b2, s2 + n2, l2 - n2);
  }

  int []lengthIndicesFirst = 
    keyFieldHelper.getWordLengths(b1, s1 + n1, s1 + l1);
  int []lengthIndicesSecond = 
    keyFieldHelper.getWordLengths(b2, s2 + n2, s2 + l2);

  for (KeyDescription keySpec : allKeySpecs) {
    int startCharFirst = keyFieldHelper.getStartOffset(b1, s1 + n1, s1 + l1,
      lengthIndicesFirst, keySpec);
    int endCharFirst = keyFieldHelper.getEndOffset(b1, s1 + n1, s1 + l1, 
      lengthIndicesFirst, keySpec);
    int startCharSecond = keyFieldHelper.getStartOffset(b2, s2 + n2, s2 + l2,
      lengthIndicesSecond, keySpec);
    int endCharSecond = keyFieldHelper.getEndOffset(b2, s2 + n2, s2 + l2, 
      lengthIndicesSecond, keySpec);
    int result;
    if ((result = compareByteSequence(b1, startCharFirst, endCharFirst, b2, 
        startCharSecond, endCharSecond, keySpec)) != 0) {
      return result;
    }
  }
  return 0;
}
项目:hadoop-plus    文件:KeyFieldBasedComparator.java   
private int compareByteSequence(byte[] first, int start1, int end1, 
    byte[] second, int start2, int end2, KeyDescription key) {
  if (start1 == -1) {
    if (key.reverse) {
      return 1;
    }
    return -1;
  }
  if (start2 == -1) {
    if (key.reverse) {
      return -1; 
    }
    return 1;
  }
  int compareResult = 0;
  if (!key.numeric) {
    compareResult = compareBytes(first, start1, end1-start1 + 1, second,
      start2, end2 - start2 + 1);
  }
  if (key.numeric) {
    compareResult = numericalCompare (first, start1, end1, second, start2,
      end2);
  }
  if (key.reverse) {
    return -compareResult;
  }
  return compareResult;
}
项目:FlexMap    文件:KeyFieldBasedPartitioner.java   
public int getPartition(K2 key, V2 value, int numReduceTasks) {
  byte[] keyBytes;

  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();
  if (allKeySpecs.size() == 0) {
    return getPartition(key.toString().hashCode(), numReduceTasks);
  }

  try {
    keyBytes = key.toString().getBytes("UTF-8");
  } catch (UnsupportedEncodingException e) {
    throw new RuntimeException("The current system does not " +
        "support UTF-8 encoding!", e);
  }
  // return 0 if the key is empty
  if (keyBytes.length == 0) {
    return 0;
  }

  int []lengthIndicesFirst = keyFieldHelper.getWordLengths(keyBytes, 0, 
      keyBytes.length);
  int currentHash = 0;
  for (KeyDescription keySpec : allKeySpecs) {
    int startChar = keyFieldHelper.getStartOffset(keyBytes, 0, 
      keyBytes.length, lengthIndicesFirst, keySpec);
     // no key found! continue
    if (startChar < 0) {
      continue;
    }
    int endChar = keyFieldHelper.getEndOffset(keyBytes, 0, keyBytes.length, 
        lengthIndicesFirst, keySpec);
    currentHash = hashCode(keyBytes, startChar, endChar, 
        currentHash);
  }
  return getPartition(currentHash, numReduceTasks);
}
项目:FlexMap    文件:KeyFieldBasedComparator.java   
public int compare(byte[] b1, int s1, int l1,
    byte[] b2, int s2, int l2) {
  int n1 = WritableUtils.decodeVIntSize(b1[s1]);
  int n2 = WritableUtils.decodeVIntSize(b2[s2]);
  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();

  if (allKeySpecs.size() == 0) {
    return compareBytes(b1, s1 + n1, l1 - n1, b2, s2 + n2, l2 - n2);
  }

  int []lengthIndicesFirst = 
    keyFieldHelper.getWordLengths(b1, s1 + n1, s1 + l1);
  int []lengthIndicesSecond = 
    keyFieldHelper.getWordLengths(b2, s2 + n2, s2 + l2);

  for (KeyDescription keySpec : allKeySpecs) {
    int startCharFirst = keyFieldHelper.getStartOffset(b1, s1 + n1, s1 + l1,
      lengthIndicesFirst, keySpec);
    int endCharFirst = keyFieldHelper.getEndOffset(b1, s1 + n1, s1 + l1, 
      lengthIndicesFirst, keySpec);
    int startCharSecond = keyFieldHelper.getStartOffset(b2, s2 + n2, s2 + l2,
      lengthIndicesSecond, keySpec);
    int endCharSecond = keyFieldHelper.getEndOffset(b2, s2 + n2, s2 + l2, 
      lengthIndicesSecond, keySpec);
    int result;
    if ((result = compareByteSequence(b1, startCharFirst, endCharFirst, b2, 
        startCharSecond, endCharSecond, keySpec)) != 0) {
      return result;
    }
  }
  return 0;
}
项目:FlexMap    文件:KeyFieldBasedComparator.java   
private int compareByteSequence(byte[] first, int start1, int end1, 
    byte[] second, int start2, int end2, KeyDescription key) {
  if (start1 == -1) {
    if (key.reverse) {
      return 1;
    }
    return -1;
  }
  if (start2 == -1) {
    if (key.reverse) {
      return -1; 
    }
    return 1;
  }
  int compareResult = 0;
  if (!key.numeric) {
    compareResult = compareBytes(first, start1, end1-start1 + 1, second,
      start2, end2 - start2 + 1);
  }
  if (key.numeric) {
    compareResult = numericalCompare (first, start1, end1, second, start2,
      end2);
  }
  if (key.reverse) {
    return -compareResult;
  }
  return compareResult;
}
项目:hops    文件:KeyFieldBasedPartitioner.java   
public int getPartition(K2 key, V2 value, int numReduceTasks) {
  byte[] keyBytes;

  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();
  if (allKeySpecs.size() == 0) {
    return getPartition(key.toString().hashCode(), numReduceTasks);
  }

  try {
    keyBytes = key.toString().getBytes("UTF-8");
  } catch (UnsupportedEncodingException e) {
    throw new RuntimeException("The current system does not " +
        "support UTF-8 encoding!", e);
  }
  // return 0 if the key is empty
  if (keyBytes.length == 0) {
    return 0;
  }

  int []lengthIndicesFirst = keyFieldHelper.getWordLengths(keyBytes, 0, 
      keyBytes.length);
  int currentHash = 0;
  for (KeyDescription keySpec : allKeySpecs) {
    int startChar = keyFieldHelper.getStartOffset(keyBytes, 0, 
      keyBytes.length, lengthIndicesFirst, keySpec);
     // no key found! continue
    if (startChar < 0) {
      continue;
    }
    int endChar = keyFieldHelper.getEndOffset(keyBytes, 0, keyBytes.length, 
        lengthIndicesFirst, keySpec);
    currentHash = hashCode(keyBytes, startChar, endChar, 
        currentHash);
  }
  return getPartition(currentHash, numReduceTasks);
}
项目:hops    文件:KeyFieldBasedComparator.java   
public int compare(byte[] b1, int s1, int l1,
    byte[] b2, int s2, int l2) {
  int n1 = WritableUtils.decodeVIntSize(b1[s1]);
  int n2 = WritableUtils.decodeVIntSize(b2[s2]);
  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();

  if (allKeySpecs.size() == 0) {
    return compareBytes(b1, s1 + n1, l1 - n1, b2, s2 + n2, l2 - n2);
  }

  int []lengthIndicesFirst = 
    keyFieldHelper.getWordLengths(b1, s1 + n1, s1 + l1);
  int []lengthIndicesSecond = 
    keyFieldHelper.getWordLengths(b2, s2 + n2, s2 + l2);

  for (KeyDescription keySpec : allKeySpecs) {
    int startCharFirst = keyFieldHelper.getStartOffset(b1, s1 + n1, s1 + l1,
      lengthIndicesFirst, keySpec);
    int endCharFirst = keyFieldHelper.getEndOffset(b1, s1 + n1, s1 + l1, 
      lengthIndicesFirst, keySpec);
    int startCharSecond = keyFieldHelper.getStartOffset(b2, s2 + n2, s2 + l2,
      lengthIndicesSecond, keySpec);
    int endCharSecond = keyFieldHelper.getEndOffset(b2, s2 + n2, s2 + l2, 
      lengthIndicesSecond, keySpec);
    int result;
    if ((result = compareByteSequence(b1, startCharFirst, endCharFirst, b2, 
        startCharSecond, endCharSecond, keySpec)) != 0) {
      return result;
    }
  }
  return 0;
}
项目:hops    文件:KeyFieldBasedComparator.java   
private int compareByteSequence(byte[] first, int start1, int end1, 
    byte[] second, int start2, int end2, KeyDescription key) {
  if (start1 == -1) {
    if (key.reverse) {
      return 1;
    }
    return -1;
  }
  if (start2 == -1) {
    if (key.reverse) {
      return -1; 
    }
    return 1;
  }
  int compareResult = 0;
  if (!key.numeric) {
    compareResult = compareBytes(first, start1, end1-start1 + 1, second,
      start2, end2 - start2 + 1);
  }
  if (key.numeric) {
    compareResult = numericalCompare (first, start1, end1, second, start2,
      end2);
  }
  if (key.reverse) {
    return -compareResult;
  }
  return compareResult;
}
项目:hadoop-TCP    文件:KeyFieldBasedPartitioner.java   
public int getPartition(K2 key, V2 value, int numReduceTasks) {
  byte[] keyBytes;

  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();
  if (allKeySpecs.size() == 0) {
    return getPartition(key.toString().hashCode(), numReduceTasks);
  }

  try {
    keyBytes = key.toString().getBytes("UTF-8");
  } catch (UnsupportedEncodingException e) {
    throw new RuntimeException("The current system does not " +
        "support UTF-8 encoding!", e);
  }
  // return 0 if the key is empty
  if (keyBytes.length == 0) {
    return 0;
  }

  int []lengthIndicesFirst = keyFieldHelper.getWordLengths(keyBytes, 0, 
      keyBytes.length);
  int currentHash = 0;
  for (KeyDescription keySpec : allKeySpecs) {
    int startChar = keyFieldHelper.getStartOffset(keyBytes, 0, 
      keyBytes.length, lengthIndicesFirst, keySpec);
     // no key found! continue
    if (startChar < 0) {
      continue;
    }
    int endChar = keyFieldHelper.getEndOffset(keyBytes, 0, keyBytes.length, 
        lengthIndicesFirst, keySpec);
    currentHash = hashCode(keyBytes, startChar, endChar, 
        currentHash);
  }
  return getPartition(currentHash, numReduceTasks);
}
项目:hadoop-TCP    文件:KeyFieldBasedComparator.java   
public int compare(byte[] b1, int s1, int l1,
    byte[] b2, int s2, int l2) {
  int n1 = WritableUtils.decodeVIntSize(b1[s1]);
  int n2 = WritableUtils.decodeVIntSize(b2[s2]);
  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();

  if (allKeySpecs.size() == 0) {
    return compareBytes(b1, s1 + n1, l1 - n1, b2, s2 + n2, l2 - n2);
  }

  int []lengthIndicesFirst = 
    keyFieldHelper.getWordLengths(b1, s1 + n1, s1 + l1);
  int []lengthIndicesSecond = 
    keyFieldHelper.getWordLengths(b2, s2 + n2, s2 + l2);

  for (KeyDescription keySpec : allKeySpecs) {
    int startCharFirst = keyFieldHelper.getStartOffset(b1, s1 + n1, s1 + l1,
      lengthIndicesFirst, keySpec);
    int endCharFirst = keyFieldHelper.getEndOffset(b1, s1 + n1, s1 + l1, 
      lengthIndicesFirst, keySpec);
    int startCharSecond = keyFieldHelper.getStartOffset(b2, s2 + n2, s2 + l2,
      lengthIndicesSecond, keySpec);
    int endCharSecond = keyFieldHelper.getEndOffset(b2, s2 + n2, s2 + l2, 
      lengthIndicesSecond, keySpec);
    int result;
    if ((result = compareByteSequence(b1, startCharFirst, endCharFirst, b2, 
        startCharSecond, endCharSecond, keySpec)) != 0) {
      return result;
    }
  }
  return 0;
}
项目:hadoop-TCP    文件:KeyFieldBasedComparator.java   
private int compareByteSequence(byte[] first, int start1, int end1, 
    byte[] second, int start2, int end2, KeyDescription key) {
  if (start1 == -1) {
    if (key.reverse) {
      return 1;
    }
    return -1;
  }
  if (start2 == -1) {
    if (key.reverse) {
      return -1; 
    }
    return 1;
  }
  int compareResult = 0;
  if (!key.numeric) {
    compareResult = compareBytes(first, start1, end1-start1 + 1, second,
      start2, end2 - start2 + 1);
  }
  if (key.numeric) {
    compareResult = numericalCompare (first, start1, end1, second, start2,
      end2);
  }
  if (key.reverse) {
    return -compareResult;
  }
  return compareResult;
}
项目:hadoop-on-lustre    文件:KeyFieldBasedPartitioner.java   
public int getPartition(K2 key, V2 value, int numReduceTasks) {
  byte[] keyBytes;

  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();
  if (allKeySpecs.size() == 0) {
    return getPartition(key.toString().hashCode(), numReduceTasks);
  }

  try {
    keyBytes = key.toString().getBytes("UTF-8");
  } catch (UnsupportedEncodingException e) {
    throw new RuntimeException("The current system does not " +
        "support UTF-8 encoding!", e);
  }
  // return 0 if the key is empty
  if (keyBytes.length == 0) {
    return 0;
  }

  int []lengthIndicesFirst = keyFieldHelper.getWordLengths(keyBytes, 0, 
      keyBytes.length);
  int currentHash = 0;
  for (KeyDescription keySpec : allKeySpecs) {
    int startChar = keyFieldHelper.getStartOffset(keyBytes, 0, 
      keyBytes.length, lengthIndicesFirst, keySpec);
     // no key found! continue
    if (startChar < 0) {
      continue;
    }
    int endChar = keyFieldHelper.getEndOffset(keyBytes, 0, keyBytes.length, 
        lengthIndicesFirst, keySpec);
    currentHash = hashCode(keyBytes, startChar, endChar, 
        currentHash);
  }
  return getPartition(currentHash, numReduceTasks);
}
项目:hadoop-on-lustre    文件:KeyFieldBasedComparator.java   
public int compare(byte[] b1, int s1, int l1,
    byte[] b2, int s2, int l2) {
  int n1 = WritableUtils.decodeVIntSize(b1[s1]);
  int n2 = WritableUtils.decodeVIntSize(b2[s2]);
  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();

  if (allKeySpecs.size() == 0) {
    return compareBytes(b1, s1 + n1, l1 - n1, b2, s2 + n2, l2 - n2);
  }

  int []lengthIndicesFirst = 
    keyFieldHelper.getWordLengths(b1, s1 + n1, s1 + l1);
  int []lengthIndicesSecond = 
    keyFieldHelper.getWordLengths(b2, s2 + n2, s2 + l2);

  for (KeyDescription keySpec : allKeySpecs) {
    int startCharFirst = keyFieldHelper.getStartOffset(b1, s1 + n1, s1 + l1,
      lengthIndicesFirst, keySpec);
    int endCharFirst = keyFieldHelper.getEndOffset(b1, s1 + n1, s1 + l1, 
      lengthIndicesFirst, keySpec);
    int startCharSecond = keyFieldHelper.getStartOffset(b2, s2 + n2, s2 + l2,
      lengthIndicesSecond, keySpec);
    int endCharSecond = keyFieldHelper.getEndOffset(b2, s2 + n2, s2 + l2, 
      lengthIndicesSecond, keySpec);
    int result;
    if ((result = compareByteSequence(b1, startCharFirst, endCharFirst, b2, 
        startCharSecond, endCharSecond, keySpec)) != 0) {
      return result;
    }
  }
  return 0;
}
项目:hadoop-on-lustre    文件:KeyFieldBasedComparator.java   
private int compareByteSequence(byte[] first, int start1, int end1, 
    byte[] second, int start2, int end2, KeyDescription key) {
  if (start1 == -1) {
    if (key.reverse) {
      return 1;
    }
    return -1;
  }
  if (start2 == -1) {
    if (key.reverse) {
      return -1; 
    }
    return 1;
  }
  int compareResult = 0;
  if (!key.numeric) {
    compareResult = compareBytes(first, start1, end1-start1 + 1, second,
      start2, end2 - start2 + 1);
  }
  if (key.numeric) {
    compareResult = numericalCompare (first, start1, end1, second, start2,
      end2);
  }
  if (key.reverse) {
    return -compareResult;
  }
  return compareResult;
}
项目:hardfs    文件:KeyFieldBasedPartitioner.java   
public int getPartition(K2 key, V2 value, int numReduceTasks) {
  byte[] keyBytes;

  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();
  if (allKeySpecs.size() == 0) {
    return getPartition(key.toString().hashCode(), numReduceTasks);
  }

  try {
    keyBytes = key.toString().getBytes("UTF-8");
  } catch (UnsupportedEncodingException e) {
    throw new RuntimeException("The current system does not " +
        "support UTF-8 encoding!", e);
  }
  // return 0 if the key is empty
  if (keyBytes.length == 0) {
    return 0;
  }

  int []lengthIndicesFirst = keyFieldHelper.getWordLengths(keyBytes, 0, 
      keyBytes.length);
  int currentHash = 0;
  for (KeyDescription keySpec : allKeySpecs) {
    int startChar = keyFieldHelper.getStartOffset(keyBytes, 0, 
      keyBytes.length, lengthIndicesFirst, keySpec);
     // no key found! continue
    if (startChar < 0) {
      continue;
    }
    int endChar = keyFieldHelper.getEndOffset(keyBytes, 0, keyBytes.length, 
        lengthIndicesFirst, keySpec);
    currentHash = hashCode(keyBytes, startChar, endChar, 
        currentHash);
  }
  return getPartition(currentHash, numReduceTasks);
}
项目:hardfs    文件:KeyFieldBasedComparator.java   
public int compare(byte[] b1, int s1, int l1,
    byte[] b2, int s2, int l2) {
  int n1 = WritableUtils.decodeVIntSize(b1[s1]);
  int n2 = WritableUtils.decodeVIntSize(b2[s2]);
  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();

  if (allKeySpecs.size() == 0) {
    return compareBytes(b1, s1 + n1, l1 - n1, b2, s2 + n2, l2 - n2);
  }

  int []lengthIndicesFirst = 
    keyFieldHelper.getWordLengths(b1, s1 + n1, s1 + l1);
  int []lengthIndicesSecond = 
    keyFieldHelper.getWordLengths(b2, s2 + n2, s2 + l2);

  for (KeyDescription keySpec : allKeySpecs) {
    int startCharFirst = keyFieldHelper.getStartOffset(b1, s1 + n1, s1 + l1,
      lengthIndicesFirst, keySpec);
    int endCharFirst = keyFieldHelper.getEndOffset(b1, s1 + n1, s1 + l1, 
      lengthIndicesFirst, keySpec);
    int startCharSecond = keyFieldHelper.getStartOffset(b2, s2 + n2, s2 + l2,
      lengthIndicesSecond, keySpec);
    int endCharSecond = keyFieldHelper.getEndOffset(b2, s2 + n2, s2 + l2, 
      lengthIndicesSecond, keySpec);
    int result;
    if ((result = compareByteSequence(b1, startCharFirst, endCharFirst, b2, 
        startCharSecond, endCharSecond, keySpec)) != 0) {
      return result;
    }
  }
  return 0;
}
项目:hardfs    文件:KeyFieldBasedComparator.java   
private int compareByteSequence(byte[] first, int start1, int end1, 
    byte[] second, int start2, int end2, KeyDescription key) {
  if (start1 == -1) {
    if (key.reverse) {
      return 1;
    }
    return -1;
  }
  if (start2 == -1) {
    if (key.reverse) {
      return -1; 
    }
    return 1;
  }
  int compareResult = 0;
  if (!key.numeric) {
    compareResult = compareBytes(first, start1, end1-start1 + 1, second,
      start2, end2 - start2 + 1);
  }
  if (key.numeric) {
    compareResult = numericalCompare (first, start1, end1, second, start2,
      end2);
  }
  if (key.reverse) {
    return -compareResult;
  }
  return compareResult;
}
项目:hadoop-on-lustre2    文件:KeyFieldBasedPartitioner.java   
public int getPartition(K2 key, V2 value, int numReduceTasks) {
  byte[] keyBytes;

  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();
  if (allKeySpecs.size() == 0) {
    return getPartition(key.toString().hashCode(), numReduceTasks);
  }

  try {
    keyBytes = key.toString().getBytes("UTF-8");
  } catch (UnsupportedEncodingException e) {
    throw new RuntimeException("The current system does not " +
        "support UTF-8 encoding!", e);
  }
  // return 0 if the key is empty
  if (keyBytes.length == 0) {
    return 0;
  }

  int []lengthIndicesFirst = keyFieldHelper.getWordLengths(keyBytes, 0, 
      keyBytes.length);
  int currentHash = 0;
  for (KeyDescription keySpec : allKeySpecs) {
    int startChar = keyFieldHelper.getStartOffset(keyBytes, 0, 
      keyBytes.length, lengthIndicesFirst, keySpec);
     // no key found! continue
    if (startChar < 0) {
      continue;
    }
    int endChar = keyFieldHelper.getEndOffset(keyBytes, 0, keyBytes.length, 
        lengthIndicesFirst, keySpec);
    currentHash = hashCode(keyBytes, startChar, endChar, 
        currentHash);
  }
  return getPartition(currentHash, numReduceTasks);
}
项目:hadoop-on-lustre2    文件:KeyFieldBasedComparator.java   
public int compare(byte[] b1, int s1, int l1,
    byte[] b2, int s2, int l2) {
  int n1 = WritableUtils.decodeVIntSize(b1[s1]);
  int n2 = WritableUtils.decodeVIntSize(b2[s2]);
  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();

  if (allKeySpecs.size() == 0) {
    return compareBytes(b1, s1 + n1, l1 - n1, b2, s2 + n2, l2 - n2);
  }

  int []lengthIndicesFirst = 
    keyFieldHelper.getWordLengths(b1, s1 + n1, s1 + l1);
  int []lengthIndicesSecond = 
    keyFieldHelper.getWordLengths(b2, s2 + n2, s2 + l2);

  for (KeyDescription keySpec : allKeySpecs) {
    int startCharFirst = keyFieldHelper.getStartOffset(b1, s1 + n1, s1 + l1,
      lengthIndicesFirst, keySpec);
    int endCharFirst = keyFieldHelper.getEndOffset(b1, s1 + n1, s1 + l1, 
      lengthIndicesFirst, keySpec);
    int startCharSecond = keyFieldHelper.getStartOffset(b2, s2 + n2, s2 + l2,
      lengthIndicesSecond, keySpec);
    int endCharSecond = keyFieldHelper.getEndOffset(b2, s2 + n2, s2 + l2, 
      lengthIndicesSecond, keySpec);
    int result;
    if ((result = compareByteSequence(b1, startCharFirst, endCharFirst, b2, 
        startCharSecond, endCharSecond, keySpec)) != 0) {
      return result;
    }
  }
  return 0;
}
项目:hadoop-on-lustre2    文件:KeyFieldBasedComparator.java   
private int compareByteSequence(byte[] first, int start1, int end1, 
    byte[] second, int start2, int end2, KeyDescription key) {
  if (start1 == -1) {
    if (key.reverse) {
      return 1;
    }
    return -1;
  }
  if (start2 == -1) {
    if (key.reverse) {
      return -1; 
    }
    return 1;
  }
  int compareResult = 0;
  if (!key.numeric) {
    compareResult = compareBytes(first, start1, end1-start1 + 1, second,
      start2, end2 - start2 + 1);
  }
  if (key.numeric) {
    compareResult = numericalCompare (first, start1, end1, second, start2,
      end2);
  }
  if (key.reverse) {
    return -compareResult;
  }
  return compareResult;
}
项目:hanoi-hadoop-2.0.0-cdh    文件:KeyFieldBasedPartitioner.java   
public int getPartition(K2 key, V2 value, int numReduceTasks) {
  byte[] keyBytes;

  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();
  if (allKeySpecs.size() == 0) {
    return getPartition(key.toString().hashCode(), numReduceTasks);
  }

  try {
    keyBytes = key.toString().getBytes("UTF-8");
  } catch (UnsupportedEncodingException e) {
    throw new RuntimeException("The current system does not " +
        "support UTF-8 encoding!", e);
  }
  // return 0 if the key is empty
  if (keyBytes.length == 0) {
    return 0;
  }

  int []lengthIndicesFirst = keyFieldHelper.getWordLengths(keyBytes, 0, 
      keyBytes.length);
  int currentHash = 0;
  for (KeyDescription keySpec : allKeySpecs) {
    int startChar = keyFieldHelper.getStartOffset(keyBytes, 0, 
      keyBytes.length, lengthIndicesFirst, keySpec);
     // no key found! continue
    if (startChar < 0) {
      continue;
    }
    int endChar = keyFieldHelper.getEndOffset(keyBytes, 0, keyBytes.length, 
        lengthIndicesFirst, keySpec);
    currentHash = hashCode(keyBytes, startChar, endChar, 
        currentHash);
  }
  return getPartition(currentHash, numReduceTasks);
}
项目:hanoi-hadoop-2.0.0-cdh    文件:KeyFieldBasedComparator.java   
public int compare(byte[] b1, int s1, int l1,
    byte[] b2, int s2, int l2) {
  int n1 = WritableUtils.decodeVIntSize(b1[s1]);
  int n2 = WritableUtils.decodeVIntSize(b2[s2]);
  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();

  if (allKeySpecs.size() == 0) {
    return compareBytes(b1, s1 + n1, l1 - n1, b2, s2 + n2, l2 - n2);
  }

  int []lengthIndicesFirst = 
    keyFieldHelper.getWordLengths(b1, s1 + n1, s1 + l1);
  int []lengthIndicesSecond = 
    keyFieldHelper.getWordLengths(b2, s2 + n2, s2 + l2);

  for (KeyDescription keySpec : allKeySpecs) {
    int startCharFirst = keyFieldHelper.getStartOffset(b1, s1 + n1, s1 + l1,
      lengthIndicesFirst, keySpec);
    int endCharFirst = keyFieldHelper.getEndOffset(b1, s1 + n1, s1 + l1, 
      lengthIndicesFirst, keySpec);
    int startCharSecond = keyFieldHelper.getStartOffset(b2, s2 + n2, s2 + l2,
      lengthIndicesSecond, keySpec);
    int endCharSecond = keyFieldHelper.getEndOffset(b2, s2 + n2, s2 + l2, 
      lengthIndicesSecond, keySpec);
    int result;
    if ((result = compareByteSequence(b1, startCharFirst, endCharFirst, b2, 
        startCharSecond, endCharSecond, keySpec)) != 0) {
      return result;
    }
  }
  return 0;
}
项目:hanoi-hadoop-2.0.0-cdh    文件:KeyFieldBasedComparator.java   
private int compareByteSequence(byte[] first, int start1, int end1, 
    byte[] second, int start2, int end2, KeyDescription key) {
  if (start1 == -1) {
    if (key.reverse) {
      return 1;
    }
    return -1;
  }
  if (start2 == -1) {
    if (key.reverse) {
      return -1; 
    }
    return 1;
  }
  int compareResult = 0;
  if (!key.numeric) {
    compareResult = compareBytes(first, start1, end1-start1 + 1, second,
      start2, end2 - start2 + 1);
  }
  if (key.numeric) {
    compareResult = numericalCompare (first, start1, end1, second, start2,
      end2);
  }
  if (key.reverse) {
    return -compareResult;
  }
  return compareResult;
}
项目:mapreduce-fork    文件:KeyFieldBasedPartitioner.java   
public int getPartition(K2 key, V2 value, int numReduceTasks) {
  byte[] keyBytes;

  List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();
  if (allKeySpecs.size() == 0) {
    return getPartition(key.toString().hashCode(), numReduceTasks);
  }

  try {
    keyBytes = key.toString().getBytes("UTF-8");
  } catch (UnsupportedEncodingException e) {
    throw new RuntimeException("The current system does not " +
        "support UTF-8 encoding!", e);
  }
  // return 0 if the key is empty
  if (keyBytes.length == 0) {
    return 0;
  }

  int []lengthIndicesFirst = keyFieldHelper.getWordLengths(keyBytes, 0, 
      keyBytes.length);
  int currentHash = 0;
  for (KeyDescription keySpec : allKeySpecs) {
    int startChar = keyFieldHelper.getStartOffset(keyBytes, 0, 
      keyBytes.length, lengthIndicesFirst, keySpec);
     // no key found! continue
    if (startChar < 0) {
      continue;
    }
    int endChar = keyFieldHelper.getEndOffset(keyBytes, 0, keyBytes.length, 
        lengthIndicesFirst, keySpec);
    currentHash = hashCode(keyBytes, startChar, endChar, 
        currentHash);
  }
  return getPartition(currentHash, numReduceTasks);
}