Java 类org.apache.hadoop.io.UTF8 实例源码

项目:hadoop    文件:MRBench.java   
/**
 * Create the job configuration.
 */
private JobConf setupJob(int numMaps, int numReduces, String jarFile) {
  JobConf jobConf = new JobConf(getConf());
  jobConf.setJarByClass(MRBench.class);
  FileInputFormat.addInputPath(jobConf, INPUT_DIR);

  jobConf.setInputFormat(TextInputFormat.class);
  jobConf.setOutputFormat(TextOutputFormat.class);

  jobConf.setOutputValueClass(UTF8.class);

  jobConf.setMapOutputKeyClass(UTF8.class);
  jobConf.setMapOutputValueClass(UTF8.class);

  if (null != jarFile) {
    jobConf.setJar(jarFile);
  }
  jobConf.setMapperClass(Map.class);
  jobConf.setReducerClass(Reduce.class);

  jobConf.setNumMapTasks(numMaps);
  jobConf.setNumReduceTasks(numReduces);
  jobConf
      .setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false);
  return jobConf; 
}
项目:aliyun-oss-hadoop-fs    文件:MRBench.java   
/**
 * Create the job configuration.
 */
private JobConf setupJob(int numMaps, int numReduces, String jarFile) {
  JobConf jobConf = new JobConf(getConf());
  jobConf.setJarByClass(MRBench.class);
  FileInputFormat.addInputPath(jobConf, INPUT_DIR);

  jobConf.setInputFormat(TextInputFormat.class);
  jobConf.setOutputFormat(TextOutputFormat.class);

  jobConf.setOutputValueClass(UTF8.class);

  jobConf.setMapOutputKeyClass(UTF8.class);
  jobConf.setMapOutputValueClass(UTF8.class);

  if (null != jarFile) {
    jobConf.setJar(jarFile);
  }
  jobConf.setMapperClass(Map.class);
  jobConf.setReducerClass(Reduce.class);

  jobConf.setNumMapTasks(numMaps);
  jobConf.setNumReduceTasks(numReduces);
  jobConf
      .setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false);
  return jobConf; 
}
项目:big-c    文件:MRBench.java   
/**
 * Create the job configuration.
 */
private JobConf setupJob(int numMaps, int numReduces, String jarFile) {
  JobConf jobConf = new JobConf(getConf());
  jobConf.setJarByClass(MRBench.class);
  FileInputFormat.addInputPath(jobConf, INPUT_DIR);

  jobConf.setInputFormat(TextInputFormat.class);
  jobConf.setOutputFormat(TextOutputFormat.class);

  jobConf.setOutputValueClass(UTF8.class);

  jobConf.setMapOutputKeyClass(UTF8.class);
  jobConf.setMapOutputValueClass(UTF8.class);

  if (null != jarFile) {
    jobConf.setJar(jarFile);
  }
  jobConf.setMapperClass(Map.class);
  jobConf.setReducerClass(Reduce.class);

  jobConf.setNumMapTasks(numMaps);
  jobConf.setNumReduceTasks(numReduces);
  jobConf
      .setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false);
  return jobConf; 
}
项目:hadoop-2.6.0-cdh5.4.3    文件:MRBench.java   
/**
 * Create the job configuration.
 */
private JobConf setupJob(int numMaps, int numReduces, String jarFile) {
  JobConf jobConf = new JobConf(getConf());
  jobConf.setJarByClass(MRBench.class);
  FileInputFormat.addInputPath(jobConf, INPUT_DIR);

  jobConf.setInputFormat(TextInputFormat.class);
  jobConf.setOutputFormat(TextOutputFormat.class);

  jobConf.setOutputValueClass(UTF8.class);

  jobConf.setMapOutputKeyClass(UTF8.class);
  jobConf.setMapOutputValueClass(UTF8.class);

  if (null != jarFile) {
    jobConf.setJar(jarFile);
  }
  jobConf.setMapperClass(Map.class);
  jobConf.setReducerClass(Reduce.class);

  jobConf.setNumMapTasks(numMaps);
  jobConf.setNumReduceTasks(numReduces);
  jobConf
      .setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false);
  return jobConf; 
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestFileSystem.java   
public static void writeTest(FileSystem fs, boolean fastCheck)
  throws Exception {

  fs.delete(DATA_DIR, true);
  fs.delete(WRITE_DIR, true);

  JobConf job = new JobConf(conf, TestFileSystem.class);
  job.setBoolean("fs.test.fastCheck", fastCheck);

  FileInputFormat.setInputPaths(job, CONTROL_DIR);
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(WriteMapper.class);
  job.setReducerClass(LongSumReducer.class);

  FileOutputFormat.setOutputPath(job, WRITE_DIR);
  job.setOutputKeyClass(UTF8.class);
  job.setOutputValueClass(LongWritable.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestFileSystem.java   
public static void readTest(FileSystem fs, boolean fastCheck)
  throws Exception {

  fs.delete(READ_DIR, true);

  JobConf job = new JobConf(conf, TestFileSystem.class);
  job.setBoolean("fs.test.fastCheck", fastCheck);


  FileInputFormat.setInputPaths(job, CONTROL_DIR);
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(ReadMapper.class);
  job.setReducerClass(LongSumReducer.class);

  FileOutputFormat.setOutputPath(job, READ_DIR);
  job.setOutputKeyClass(UTF8.class);
  job.setOutputValueClass(LongWritable.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestFileSystem.java   
public static void seekTest(FileSystem fs, boolean fastCheck)
  throws Exception {

  fs.delete(READ_DIR, true);

  JobConf job = new JobConf(conf, TestFileSystem.class);
  job.setBoolean("fs.test.fastCheck", fastCheck);

  FileInputFormat.setInputPaths(job,CONTROL_DIR);
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(SeekMapper.class);
  job.setReducerClass(LongSumReducer.class);

  FileOutputFormat.setOutputPath(job, READ_DIR);
  job.setOutputKeyClass(UTF8.class);
  job.setOutputValueClass(LongWritable.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:MRBench.java   
/**
 * Create the job configuration.
 */
private JobConf setupJob(int numMaps, int numReduces, String jarFile) {
  JobConf jobConf = new JobConf(getConf());
  jobConf.setJarByClass(MRBench.class);
  FileInputFormat.addInputPath(jobConf, INPUT_DIR);

  jobConf.setInputFormat(TextInputFormat.class);
  jobConf.setOutputFormat(TextOutputFormat.class);

  jobConf.setOutputValueClass(UTF8.class);

  jobConf.setMapOutputKeyClass(UTF8.class);
  jobConf.setMapOutputValueClass(UTF8.class);

  if (null != jarFile) {
    jobConf.setJar(jarFile);
  }
  jobConf.setMapperClass(Map.class);
  jobConf.setReducerClass(Reduce.class);

  jobConf.setNumMapTasks(numMaps);
  jobConf.setNumReduceTasks(numReduces);
  jobConf
      .setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false);
  return jobConf; 
}
项目:hadoop-EAR    文件:TestFileSystem.java   
public static void writeTest(FileSystem fs, boolean fastCheck)
  throws Exception {

  fs.delete(DATA_DIR, true);
  fs.delete(WRITE_DIR, true);

  JobConf job = new JobConf(conf, TestFileSystem.class);
  job.setBoolean("fs.test.fastCheck", fastCheck);

  FileInputFormat.setInputPaths(job, CONTROL_DIR);
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(WriteMapper.class);
  job.setReducerClass(LongSumReducer.class);

  FileOutputFormat.setOutputPath(job, WRITE_DIR);
  job.setOutputKeyClass(UTF8.class);
  job.setOutputValueClass(LongWritable.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}
项目:hadoop-EAR    文件:TestFileSystem.java   
public static void readTest(FileSystem fs, boolean fastCheck)
  throws Exception {

  fs.delete(READ_DIR, true);

  JobConf job = new JobConf(conf, TestFileSystem.class);
  job.setBoolean("fs.test.fastCheck", fastCheck);


  FileInputFormat.setInputPaths(job, CONTROL_DIR);
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(ReadMapper.class);
  job.setReducerClass(LongSumReducer.class);

  FileOutputFormat.setOutputPath(job, READ_DIR);
  job.setOutputKeyClass(UTF8.class);
  job.setOutputValueClass(LongWritable.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}
项目:hadoop-EAR    文件:TestFileSystem.java   
public static void seekTest(FileSystem fs, boolean fastCheck)
  throws Exception {

  fs.delete(READ_DIR, true);

  JobConf job = new JobConf(conf, TestFileSystem.class);
  job.setBoolean("fs.test.fastCheck", fastCheck);

  FileInputFormat.setInputPaths(job,CONTROL_DIR);
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(SeekMapper.class);
  job.setReducerClass(LongSumReducer.class);

  FileOutputFormat.setOutputPath(job, READ_DIR);
  job.setOutputKeyClass(UTF8.class);
  job.setOutputValueClass(LongWritable.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}
项目:hadoop-EAR    文件:MRBench.java   
/**
 * Create the job configuration.
 */
private JobConf setupJob(int numMaps, int numReduces, String jarFile) {
  JobConf jobConf = new JobConf(getConf());
  jobConf.setJarByClass(MRBench.class);
  FileInputFormat.addInputPath(jobConf, INPUT_DIR);

  jobConf.setInputFormat(TextInputFormat.class);
  jobConf.setOutputFormat(TextOutputFormat.class);

  jobConf.setOutputValueClass(UTF8.class);

  jobConf.setMapOutputKeyClass(UTF8.class);
  jobConf.setMapOutputValueClass(UTF8.class);

  if (null != jarFile) {
    jobConf.setJar(jarFile);
  }
  jobConf.setMapperClass(Map.class);
  jobConf.setReducerClass(Reduce.class);

  jobConf.setNumMapTasks(numMaps);
  jobConf.setNumReduceTasks(numReduces);

  return jobConf; 
}
项目:hadoop-EAR    文件:PosixUserNameChecker.java   
@Override
public boolean isValidUserName(String username) {
  if (username == null || username.length() == 0)
    return false;
  int len = username.length();
  char[] carray = UTF8.getCharArray(len);
  username.getChars(0, len, carray, 0);
  char fc = carray[0];
  if (!((fc >= 'a' && fc <= 'z') || fc == '_')) {
    return false;
  }
  for (int i = 1; i < len; i++) {
    char c = carray[i];
    if (!((c >= 'a' && c <= 'z') || (c >= '0' && c <= '9') || c == '-'
        || c == '_' || (c == '$' && i == len - 1))) {
      return false;
    }
  }
  return true;
}
项目:hadoop-plus    文件:MRBench.java   
/**
 * Create the job configuration.
 */
private JobConf setupJob(int numMaps, int numReduces, String jarFile) {
  JobConf jobConf = new JobConf(getConf());
  jobConf.setJarByClass(MRBench.class);
  FileInputFormat.addInputPath(jobConf, INPUT_DIR);

  jobConf.setInputFormat(TextInputFormat.class);
  jobConf.setOutputFormat(TextOutputFormat.class);

  jobConf.setOutputValueClass(UTF8.class);

  jobConf.setMapOutputKeyClass(UTF8.class);
  jobConf.setMapOutputValueClass(UTF8.class);

  if (null != jarFile) {
    jobConf.setJar(jarFile);
  }
  jobConf.setMapperClass(Map.class);
  jobConf.setReducerClass(Reduce.class);

  jobConf.setNumMapTasks(numMaps);
  jobConf.setNumReduceTasks(numReduces);
  jobConf
      .setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false);
  return jobConf; 
}
项目:FlexMap    文件:MRBench.java   
/**
 * Create the job configuration.
 */
private JobConf setupJob(int numMaps, int numReduces, String jarFile) {
  JobConf jobConf = new JobConf(getConf());
  jobConf.setJarByClass(MRBench.class);
  FileInputFormat.addInputPath(jobConf, INPUT_DIR);

  jobConf.setInputFormat(TextInputFormat.class);
  jobConf.setOutputFormat(TextOutputFormat.class);

  jobConf.setOutputValueClass(UTF8.class);

  jobConf.setMapOutputKeyClass(UTF8.class);
  jobConf.setMapOutputValueClass(UTF8.class);

  if (null != jarFile) {
    jobConf.setJar(jarFile);
  }
  jobConf.setMapperClass(Map.class);
  jobConf.setReducerClass(Reduce.class);

  jobConf.setNumMapTasks(numMaps);
  jobConf.setNumReduceTasks(numReduces);
  jobConf
      .setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false);
  return jobConf; 
}
项目:hops    文件:MRBench.java   
/**
 * Create the job configuration.
 */
private JobConf setupJob(int numMaps, int numReduces, String jarFile) {
  JobConf jobConf = new JobConf(getConf());
  jobConf.setJarByClass(MRBench.class);
  FileInputFormat.addInputPath(jobConf, INPUT_DIR);

  jobConf.setInputFormat(TextInputFormat.class);
  jobConf.setOutputFormat(TextOutputFormat.class);

  jobConf.setOutputValueClass(UTF8.class);

  jobConf.setMapOutputKeyClass(UTF8.class);
  jobConf.setMapOutputValueClass(UTF8.class);

  if (null != jarFile) {
    jobConf.setJar(jarFile);
  }
  jobConf.setMapperClass(Map.class);
  jobConf.setReducerClass(Reduce.class);

  jobConf.setNumMapTasks(numMaps);
  jobConf.setNumReduceTasks(numReduces);
  jobConf
      .setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false);
  return jobConf; 
}
项目:HiveKa    文件:KafkaKey.java   
@Override
public void readFields(DataInput in) throws IOException {
  this.leaderId = UTF8.readString(in);
  this.partition = in.readInt();
  this.beginOffset = in.readLong();
  this.offset = in.readLong();
  this.checksum = in.readLong();
  this.topic = in.readUTF();
  this.time = in.readLong();
  this.server = in.readUTF(); // left for legacy
  this.service = in.readUTF(); // left for legacy
  this.partitionMap = new MapWritable();
  try {
    this.partitionMap.readFields(in);
  } catch (IOException e) {
    this.setServer(this.server);
    this.setService(this.service);
  }
}
项目:hadoop-TCP    文件:MRBench.java   
/**
 * Create the job configuration.
 */
private JobConf setupJob(int numMaps, int numReduces, String jarFile) {
  JobConf jobConf = new JobConf(getConf());
  jobConf.setJarByClass(MRBench.class);
  FileInputFormat.addInputPath(jobConf, INPUT_DIR);

  jobConf.setInputFormat(TextInputFormat.class);
  jobConf.setOutputFormat(TextOutputFormat.class);

  jobConf.setOutputValueClass(UTF8.class);

  jobConf.setMapOutputKeyClass(UTF8.class);
  jobConf.setMapOutputValueClass(UTF8.class);

  if (null != jarFile) {
    jobConf.setJar(jarFile);
  }
  jobConf.setMapperClass(Map.class);
  jobConf.setReducerClass(Reduce.class);

  jobConf.setNumMapTasks(numMaps);
  jobConf.setNumReduceTasks(numReduces);
  jobConf
      .setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false);
  return jobConf; 
}
项目:hadoop-on-lustre    文件:TestFileSystem.java   
public static void writeTest(FileSystem fs, boolean fastCheck)
  throws Exception {

  fs.delete(DATA_DIR, true);
  fs.delete(WRITE_DIR, true);

  JobConf job = new JobConf(conf, TestFileSystem.class);
  job.setBoolean("fs.test.fastCheck", fastCheck);

  FileInputFormat.setInputPaths(job, CONTROL_DIR);
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(WriteMapper.class);
  job.setReducerClass(LongSumReducer.class);

  FileOutputFormat.setOutputPath(job, WRITE_DIR);
  job.setOutputKeyClass(UTF8.class);
  job.setOutputValueClass(LongWritable.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}
项目:hadoop-on-lustre    文件:TestFileSystem.java   
public static void readTest(FileSystem fs, boolean fastCheck)
  throws Exception {

  fs.delete(READ_DIR, true);

  JobConf job = new JobConf(conf, TestFileSystem.class);
  job.setBoolean("fs.test.fastCheck", fastCheck);


  FileInputFormat.setInputPaths(job, CONTROL_DIR);
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(ReadMapper.class);
  job.setReducerClass(LongSumReducer.class);

  FileOutputFormat.setOutputPath(job, READ_DIR);
  job.setOutputKeyClass(UTF8.class);
  job.setOutputValueClass(LongWritable.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}
项目:hadoop-on-lustre    文件:TestFileSystem.java   
public static void seekTest(FileSystem fs, boolean fastCheck)
  throws Exception {

  fs.delete(READ_DIR, true);

  JobConf job = new JobConf(conf, TestFileSystem.class);
  job.setBoolean("fs.test.fastCheck", fastCheck);

  FileInputFormat.setInputPaths(job,CONTROL_DIR);
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(SeekMapper.class);
  job.setReducerClass(LongSumReducer.class);

  FileOutputFormat.setOutputPath(job, READ_DIR);
  job.setOutputKeyClass(UTF8.class);
  job.setOutputValueClass(LongWritable.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}
项目:hadoop-on-lustre    文件:MRBench.java   
/**
 * Create the job configuration.
 */
private JobConf setupJob(int numMaps, int numReduces, String jarFile) {
  JobConf jobConf = new JobConf(getConf());
  jobConf.setJarByClass(MRBench.class);
  FileInputFormat.addInputPath(jobConf, INPUT_DIR);

  jobConf.setInputFormat(TextInputFormat.class);
  jobConf.setOutputFormat(TextOutputFormat.class);

  jobConf.setOutputValueClass(UTF8.class);

  jobConf.setMapOutputKeyClass(UTF8.class);
  jobConf.setMapOutputValueClass(UTF8.class);

  if (null != jarFile) {
    jobConf.setJar(jarFile);
  }
  jobConf.setMapperClass(Map.class);
  jobConf.setReducerClass(Reduce.class);

  jobConf.setNumMapTasks(numMaps);
  jobConf.setNumReduceTasks(numReduces);
  jobConf
      .setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false);
  return jobConf; 
}
项目:hardfs    文件:MRBench.java   
/**
 * Create the job configuration.
 */
private JobConf setupJob(int numMaps, int numReduces, String jarFile) {
  JobConf jobConf = new JobConf(getConf());
  jobConf.setJarByClass(MRBench.class);
  FileInputFormat.addInputPath(jobConf, INPUT_DIR);

  jobConf.setInputFormat(TextInputFormat.class);
  jobConf.setOutputFormat(TextOutputFormat.class);

  jobConf.setOutputValueClass(UTF8.class);

  jobConf.setMapOutputKeyClass(UTF8.class);
  jobConf.setMapOutputValueClass(UTF8.class);

  if (null != jarFile) {
    jobConf.setJar(jarFile);
  }
  jobConf.setMapperClass(Map.class);
  jobConf.setReducerClass(Reduce.class);

  jobConf.setNumMapTasks(numMaps);
  jobConf.setNumReduceTasks(numReduces);
  jobConf
      .setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false);
  return jobConf; 
}
项目:hadoop-on-lustre2    文件:MRBench.java   
/**
 * Create the job configuration.
 */
private JobConf setupJob(int numMaps, int numReduces, String jarFile) {
  JobConf jobConf = new JobConf(getConf());
  jobConf.setJarByClass(MRBench.class);
  FileInputFormat.addInputPath(jobConf, INPUT_DIR);

  jobConf.setInputFormat(TextInputFormat.class);
  jobConf.setOutputFormat(TextOutputFormat.class);

  jobConf.setOutputValueClass(UTF8.class);

  jobConf.setMapOutputKeyClass(UTF8.class);
  jobConf.setMapOutputValueClass(UTF8.class);

  if (null != jarFile) {
    jobConf.setJar(jarFile);
  }
  jobConf.setMapperClass(Map.class);
  jobConf.setReducerClass(Reduce.class);

  jobConf.setNumMapTasks(numMaps);
  jobConf.setNumReduceTasks(numReduces);
  jobConf
      .setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false);
  return jobConf; 
}
项目:RDFS    文件:TestFileSystem.java   
public static void writeTest(FileSystem fs, boolean fastCheck)
  throws Exception {

  fs.delete(DATA_DIR, true);
  fs.delete(WRITE_DIR, true);

  JobConf job = new JobConf(conf, TestFileSystem.class);
  job.setBoolean("fs.test.fastCheck", fastCheck);

  FileInputFormat.setInputPaths(job, CONTROL_DIR);
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(WriteMapper.class);
  job.setReducerClass(LongSumReducer.class);

  FileOutputFormat.setOutputPath(job, WRITE_DIR);
  job.setOutputKeyClass(UTF8.class);
  job.setOutputValueClass(LongWritable.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}
项目:RDFS    文件:TestFileSystem.java   
public static void readTest(FileSystem fs, boolean fastCheck)
  throws Exception {

  fs.delete(READ_DIR, true);

  JobConf job = new JobConf(conf, TestFileSystem.class);
  job.setBoolean("fs.test.fastCheck", fastCheck);


  FileInputFormat.setInputPaths(job, CONTROL_DIR);
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(ReadMapper.class);
  job.setReducerClass(LongSumReducer.class);

  FileOutputFormat.setOutputPath(job, READ_DIR);
  job.setOutputKeyClass(UTF8.class);
  job.setOutputValueClass(LongWritable.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}
项目:RDFS    文件:TestFileSystem.java   
public static void seekTest(FileSystem fs, boolean fastCheck)
  throws Exception {

  fs.delete(READ_DIR, true);

  JobConf job = new JobConf(conf, TestFileSystem.class);
  job.setBoolean("fs.test.fastCheck", fastCheck);

  FileInputFormat.setInputPaths(job,CONTROL_DIR);
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(SeekMapper.class);
  job.setReducerClass(LongSumReducer.class);

  FileOutputFormat.setOutputPath(job, READ_DIR);
  job.setOutputKeyClass(UTF8.class);
  job.setOutputValueClass(LongWritable.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}
项目:RDFS    文件:MRBench.java   
/**
 * Create the job configuration.
 */
private JobConf setupJob(int numMaps, int numReduces, String jarFile) {
  JobConf jobConf = new JobConf(getConf());
  jobConf.setJarByClass(MRBench.class);
  FileInputFormat.addInputPath(jobConf, INPUT_DIR);

  jobConf.setInputFormat(TextInputFormat.class);
  jobConf.setOutputFormat(TextOutputFormat.class);

  jobConf.setOutputValueClass(UTF8.class);

  jobConf.setMapOutputKeyClass(UTF8.class);
  jobConf.setMapOutputValueClass(UTF8.class);

  if (null != jarFile) {
    jobConf.setJar(jarFile);
  }
  jobConf.setMapperClass(Map.class);
  jobConf.setReducerClass(Reduce.class);

  jobConf.setNumMapTasks(numMaps);
  jobConf.setNumReduceTasks(numReduces);

  return jobConf; 
}
项目:hadoop-0.20    文件:TestFileSystem.java   
public static void writeTest(FileSystem fs, boolean fastCheck)
  throws Exception {

  fs.delete(DATA_DIR, true);
  fs.delete(WRITE_DIR, true);

  JobConf job = new JobConf(conf, TestFileSystem.class);
  job.setBoolean("fs.test.fastCheck", fastCheck);

  FileInputFormat.setInputPaths(job, CONTROL_DIR);
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(WriteMapper.class);
  job.setReducerClass(LongSumReducer.class);

  FileOutputFormat.setOutputPath(job, WRITE_DIR);
  job.setOutputKeyClass(UTF8.class);
  job.setOutputValueClass(LongWritable.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}
项目:hadoop-0.20    文件:TestFileSystem.java   
public static void readTest(FileSystem fs, boolean fastCheck)
  throws Exception {

  fs.delete(READ_DIR, true);

  JobConf job = new JobConf(conf, TestFileSystem.class);
  job.setBoolean("fs.test.fastCheck", fastCheck);


  FileInputFormat.setInputPaths(job, CONTROL_DIR);
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(ReadMapper.class);
  job.setReducerClass(LongSumReducer.class);

  FileOutputFormat.setOutputPath(job, READ_DIR);
  job.setOutputKeyClass(UTF8.class);
  job.setOutputValueClass(LongWritable.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}
项目:hadoop-0.20    文件:TestFileSystem.java   
public static void seekTest(FileSystem fs, boolean fastCheck)
  throws Exception {

  fs.delete(READ_DIR, true);

  JobConf job = new JobConf(conf, TestFileSystem.class);
  job.setBoolean("fs.test.fastCheck", fastCheck);

  FileInputFormat.setInputPaths(job,CONTROL_DIR);
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(SeekMapper.class);
  job.setReducerClass(LongSumReducer.class);

  FileOutputFormat.setOutputPath(job, READ_DIR);
  job.setOutputKeyClass(UTF8.class);
  job.setOutputValueClass(LongWritable.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}
项目:hadoop-0.20    文件:MRBench.java   
/**
 * Create the job configuration.
 */
private static JobConf setupJob(int numMaps, int numReduces, String jarFile) {
  JobConf jobConf = new JobConf(MRBench.class);
  FileInputFormat.addInputPath(jobConf, INPUT_DIR);

  jobConf.setInputFormat(TextInputFormat.class);
  jobConf.setOutputFormat(TextOutputFormat.class);

  jobConf.setOutputValueClass(UTF8.class);

  jobConf.setMapOutputKeyClass(UTF8.class);
  jobConf.setMapOutputValueClass(UTF8.class);

  if (null != jarFile) {
    jobConf.setJar(jarFile);
  }
  jobConf.setMapperClass(Map.class);
  jobConf.setReducerClass(Reduce.class);

  jobConf.setNumMapTasks(numMaps);
  jobConf.setNumReduceTasks(numReduces);

  return jobConf; 
}
项目:hanoi-hadoop-2.0.0-cdh    文件:TestFileSystem.java   
public static void writeTest(FileSystem fs, boolean fastCheck)
  throws Exception {

  fs.delete(DATA_DIR, true);
  fs.delete(WRITE_DIR, true);

  JobConf job = new JobConf(conf, TestFileSystem.class);
  job.setBoolean("fs.test.fastCheck", fastCheck);

  FileInputFormat.setInputPaths(job, CONTROL_DIR);
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(WriteMapper.class);
  job.setReducerClass(LongSumReducer.class);

  FileOutputFormat.setOutputPath(job, WRITE_DIR);
  job.setOutputKeyClass(UTF8.class);
  job.setOutputValueClass(LongWritable.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}
项目:hanoi-hadoop-2.0.0-cdh    文件:TestFileSystem.java   
public static void readTest(FileSystem fs, boolean fastCheck)
  throws Exception {

  fs.delete(READ_DIR, true);

  JobConf job = new JobConf(conf, TestFileSystem.class);
  job.setBoolean("fs.test.fastCheck", fastCheck);


  FileInputFormat.setInputPaths(job, CONTROL_DIR);
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(ReadMapper.class);
  job.setReducerClass(LongSumReducer.class);

  FileOutputFormat.setOutputPath(job, READ_DIR);
  job.setOutputKeyClass(UTF8.class);
  job.setOutputValueClass(LongWritable.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}
项目:hanoi-hadoop-2.0.0-cdh    文件:TestFileSystem.java   
public static void seekTest(FileSystem fs, boolean fastCheck)
  throws Exception {

  fs.delete(READ_DIR, true);

  JobConf job = new JobConf(conf, TestFileSystem.class);
  job.setBoolean("fs.test.fastCheck", fastCheck);

  FileInputFormat.setInputPaths(job,CONTROL_DIR);
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(SeekMapper.class);
  job.setReducerClass(LongSumReducer.class);

  FileOutputFormat.setOutputPath(job, READ_DIR);
  job.setOutputKeyClass(UTF8.class);
  job.setOutputValueClass(LongWritable.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}
项目:hanoi-hadoop-2.0.0-cdh    文件:MRBench.java   
/**
 * Create the job configuration.
 */
private JobConf setupJob(int numMaps, int numReduces, String jarFile) {
  JobConf jobConf = new JobConf(getConf());
  jobConf.setJarByClass(MRBench.class);
  FileInputFormat.addInputPath(jobConf, INPUT_DIR);

  jobConf.setInputFormat(TextInputFormat.class);
  jobConf.setOutputFormat(TextOutputFormat.class);

  jobConf.setOutputValueClass(UTF8.class);

  jobConf.setMapOutputKeyClass(UTF8.class);
  jobConf.setMapOutputValueClass(UTF8.class);

  if (null != jarFile) {
    jobConf.setJar(jarFile);
  }
  jobConf.setMapperClass(Map.class);
  jobConf.setReducerClass(Reduce.class);

  jobConf.setNumMapTasks(numMaps);
  jobConf.setNumReduceTasks(numReduces);
  jobConf
      .setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false);
  return jobConf; 
}
项目:mapreduce-fork    文件:MRBench.java   
/**
 * Create the job configuration.
 */
private JobConf setupJob(int numMaps, int numReduces, String jarFile) {
  JobConf jobConf = new JobConf(getConf());
  jobConf.setJarByClass(MRBench.class);
  FileInputFormat.addInputPath(jobConf, INPUT_DIR);

  jobConf.setInputFormat(TextInputFormat.class);
  jobConf.setOutputFormat(TextOutputFormat.class);

  jobConf.setOutputValueClass(UTF8.class);

  jobConf.setMapOutputKeyClass(UTF8.class);
  jobConf.setMapOutputValueClass(UTF8.class);

  if (null != jarFile) {
    jobConf.setJar(jarFile);
  }
  jobConf.setMapperClass(Map.class);
  jobConf.setReducerClass(Reduce.class);

  jobConf.setNumMapTasks(numMaps);
  jobConf.setNumReduceTasks(numReduces);
  jobConf
      .setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false);
  return jobConf; 
}
项目:hortonworks-extension    文件:TestFileSystem.java   
public static void writeTest(FileSystem fs, boolean fastCheck)
  throws Exception {

  fs.delete(DATA_DIR, true);
  fs.delete(WRITE_DIR, true);

  JobConf job = new JobConf(conf, TestFileSystem.class);
  job.setBoolean("fs.test.fastCheck", fastCheck);

  FileInputFormat.setInputPaths(job, CONTROL_DIR);
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(WriteMapper.class);
  job.setReducerClass(LongSumReducer.class);

  FileOutputFormat.setOutputPath(job, WRITE_DIR);
  job.setOutputKeyClass(UTF8.class);
  job.setOutputValueClass(LongWritable.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}
项目:hortonworks-extension    文件:TestFileSystem.java   
public static void readTest(FileSystem fs, boolean fastCheck)
  throws Exception {

  fs.delete(READ_DIR, true);

  JobConf job = new JobConf(conf, TestFileSystem.class);
  job.setBoolean("fs.test.fastCheck", fastCheck);


  FileInputFormat.setInputPaths(job, CONTROL_DIR);
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(ReadMapper.class);
  job.setReducerClass(LongSumReducer.class);

  FileOutputFormat.setOutputPath(job, READ_DIR);
  job.setOutputKeyClass(UTF8.class);
  job.setOutputValueClass(LongWritable.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}
项目:hortonworks-extension    文件:TestFileSystem.java   
public static void seekTest(FileSystem fs, boolean fastCheck)
  throws Exception {

  fs.delete(READ_DIR, true);

  JobConf job = new JobConf(conf, TestFileSystem.class);
  job.setBoolean("fs.test.fastCheck", fastCheck);

  FileInputFormat.setInputPaths(job,CONTROL_DIR);
  job.setInputFormat(SequenceFileInputFormat.class);

  job.setMapperClass(SeekMapper.class);
  job.setReducerClass(LongSumReducer.class);

  FileOutputFormat.setOutputPath(job, READ_DIR);
  job.setOutputKeyClass(UTF8.class);
  job.setOutputValueClass(LongWritable.class);
  job.setNumReduceTasks(1);
  JobClient.runJob(job);
}