Java 类org.apache.hadoop.io.FloatWritable 实例源码

项目:Hadoop-Codes    文件:MaximumAverageDriver.java   
public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "maxaverage");

    job.setMapperClass(MaximumAverageMapper.class);
    job.setReducerClass(MaximumAverageReducer.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(FloatWritable.class);

    FileInputFormat.setInputPaths(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    if (!job.waitForCompletion(true))
        return;
}
项目:Hadoop-Codes    文件:MaxTempDriver.java   
public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "maxtemp");

    job.setMapperClass(MaxTempMapper.class);
    job.setReducerClass(MaxTempReducer.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(FloatWritable.class);

    FileInputFormat.setInputPaths(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    if (!job.waitForCompletion(true))
        return;
}
项目:hadoop    文件:TestTupleWritable.java   
private Writable[] makeRandomWritables() {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  return writs;
}
项目:hadoop    文件:TestTupleWritable.java   
public void testIterable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable t = new TupleWritable(writs);
  for (int i = 0; i < 6; ++i) {
    t.setWritten(i);
  }
  verifIter(writs, t, 0);
}
项目:hadoop    文件:TestTupleWritable.java   
public void testNestedIterable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable sTuple = makeTuple(writs);
  assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
}
项目:hadoop    文件:TestTupleWritable.java   
public void testWritable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable sTuple = makeTuple(writs);
  ByteArrayOutputStream out = new ByteArrayOutputStream();
  sTuple.write(new DataOutputStream(out));
  ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
  TupleWritable dTuple = new TupleWritable();
  dTuple.readFields(new DataInputStream(in));
  assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
}
项目:hadoop    文件:TestJoinTupleWritable.java   
private Writable[] makeRandomWritables() {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  return writs;
}
项目:hadoop    文件:TestJoinTupleWritable.java   
public void testIterable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable t = new TupleWritable(writs);
  for (int i = 0; i < 6; ++i) {
    t.setWritten(i);
  }
  verifIter(writs, t, 0);
}
项目:hadoop    文件:TestJoinTupleWritable.java   
public void testNestedIterable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable sTuple = makeTuple(writs);
  assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
}
项目:hadoop    文件:TestJoinTupleWritable.java   
public void testWritable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable sTuple = makeTuple(writs);
  ByteArrayOutputStream out = new ByteArrayOutputStream();
  sTuple.write(new DataOutputStream(out));
  ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
  TupleWritable dTuple = new TupleWritable();
  dTuple.readFields(new DataInputStream(in));
  assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
}
项目:aliyun-oss-hadoop-fs    文件:TestTupleWritable.java   
private Writable[] makeRandomWritables() {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  return writs;
}
项目:incubator-hivemall    文件:PLSAPredictUDAF.java   
@Override
public Object terminate(@SuppressWarnings("deprecation") AggregationBuffer agg)
        throws HiveException {
    PLSAPredictAggregationBuffer myAggr = (PLSAPredictAggregationBuffer) agg;
    float[] topicDistr = myAggr.get();

    SortedMap<Float, Integer> sortedDistr = new TreeMap<Float, Integer>(
        Collections.reverseOrder());
    for (int i = 0; i < topicDistr.length; i++) {
        sortedDistr.put(topicDistr[i], i);
    }

    List<Object[]> result = new ArrayList<Object[]>();
    for (Map.Entry<Float, Integer> e : sortedDistr.entrySet()) {
        Object[] struct = new Object[2];
        struct[0] = new IntWritable(e.getValue().intValue()); // label
        struct[1] = new FloatWritable(e.getKey().floatValue()); // probability
        result.add(struct);
    }
    return result;
}
项目:aliyun-oss-hadoop-fs    文件:TestTupleWritable.java   
public void testNestedIterable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable sTuple = makeTuple(writs);
  assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
}
项目:aliyun-oss-hadoop-fs    文件:TestTupleWritable.java   
public void testWritable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable sTuple = makeTuple(writs);
  ByteArrayOutputStream out = new ByteArrayOutputStream();
  sTuple.write(new DataOutputStream(out));
  ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
  TupleWritable dTuple = new TupleWritable();
  dTuple.readFields(new DataInputStream(in));
  assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
}
项目:aliyun-oss-hadoop-fs    文件:TestJoinTupleWritable.java   
public void testIterable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable t = new TupleWritable(writs);
  for (int i = 0; i < 6; ++i) {
    t.setWritten(i);
  }
  verifIter(writs, t, 0);
}
项目:Camel    文件:HdfsProducerTest.java   
@Test
public void testWriteFloat() throws Exception {
    if (!canTest()) {
        return;
    }
    float aFloat = 12.34f;
    template.sendBody("direct:write_float", aFloat);

    Configuration conf = new Configuration();
    Path file1 = new Path("file:///" + TEMP_DIR.toUri() + "/test-camel-float");
    FileSystem fs1 = FileSystem.get(file1.toUri(), conf);
    SequenceFile.Reader reader = new SequenceFile.Reader(fs1, file1, conf);
    Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
    Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
    reader.next(key, value);
    float rFloat = ((FloatWritable) value).get();
    assertEquals(rFloat, aFloat, 0.0F);

    IOHelper.close(reader);
}
项目:aliyun-oss-hadoop-fs    文件:HadoopPlatform.java   
@Override
public void init() throws IOException {
  registerKey(NullWritable.class.getName(), NullWritableSerializer.class);
  registerKey(Text.class.getName(), TextSerializer.class);
  registerKey(LongWritable.class.getName(), LongWritableSerializer.class);
  registerKey(IntWritable.class.getName(), IntWritableSerializer.class);
  registerKey(Writable.class.getName(), DefaultSerializer.class);
  registerKey(BytesWritable.class.getName(), BytesWritableSerializer.class);
  registerKey(BooleanWritable.class.getName(), BoolWritableSerializer.class);
  registerKey(ByteWritable.class.getName(), ByteWritableSerializer.class);
  registerKey(FloatWritable.class.getName(), FloatWritableSerializer.class);
  registerKey(DoubleWritable.class.getName(), DoubleWritableSerializer.class);
  registerKey(VIntWritable.class.getName(), VIntWritableSerializer.class);
  registerKey(VLongWritable.class.getName(), VLongWritableSerializer.class);

  LOG.info("Hadoop platform inited");
}
项目:incubator-hivemall    文件:LDAPredictUDAF.java   
@Override
public Object terminate(@SuppressWarnings("deprecation") AggregationBuffer agg)
        throws HiveException {
    OnlineLDAPredictAggregationBuffer myAggr = (OnlineLDAPredictAggregationBuffer) agg;
    float[] topicDistr = myAggr.get();

    SortedMap<Float, Integer> sortedDistr = new TreeMap<Float, Integer>(
        Collections.reverseOrder());
    for (int i = 0; i < topicDistr.length; i++) {
        sortedDistr.put(topicDistr[i], i);
    }

    List<Object[]> result = new ArrayList<Object[]>();
    for (Map.Entry<Float, Integer> e : sortedDistr.entrySet()) {
        Object[] struct = new Object[2];
        struct[0] = new IntWritable(e.getValue()); // label
        struct[1] = new FloatWritable(e.getKey()); // probability
        result.add(struct);
    }
    return result;
}
项目:GeoCrawler    文件:NodeDumper.java   
/**
 * Outputs the url with the appropriate number of inlinks, outlinks, or for
 * score.
 */
public void map(Text key, Node node,
    OutputCollector<FloatWritable, Text> output, Reporter reporter)
    throws IOException {

  float number = 0;
  if (inlinks) {
    number = node.getNumInlinks();
  } else if (outlinks) {
    number = node.getNumOutlinks();
  } else {
    number = node.getInlinkScore();
  }

  // number collected with negative to be descending
  output.collect(new FloatWritable(-number), key);
}
项目:indexr    文件:IndexRSerde.java   
private static Writable createPrimitive(Object obj, PrimitiveObjectInspector inspector)
        throws SerDeException {
    if (obj == null) {
        return null;
    }
    switch (inspector.getPrimitiveCategory()) {
        case DOUBLE:
            return new DoubleWritable(((DoubleObjectInspector) inspector).get(obj));
        case FLOAT:
            return new FloatWritable(((FloatObjectInspector) inspector).get(obj));
        case INT:
            return new IntWritable(((IntObjectInspector) inspector).get(obj));
        case LONG:
            return new LongWritable(((LongObjectInspector) inspector).get(obj));
        case STRING:
            return new Text(((StringObjectInspector) inspector).getPrimitiveJavaObject(obj));
        case DATE:
            return ((DateObjectInspector) inspector).getPrimitiveWritableObject(obj);
        case TIMESTAMP:
            return ((TimestampObjectInspector) inspector).getPrimitiveWritableObject(obj);
        default:
            throw new SerDeException("Can't serialize primitive : " + inspector.getPrimitiveCategory());
    }
}
项目:GeoCrawler    文件:NodeDumper.java   
/**
 * Outputs the host or domain as key for this record and numInlinks,
 * numOutlinks or score as the value.
 */
public void map(Text key, Node node,
    OutputCollector<Text, FloatWritable> output, Reporter reporter)
    throws IOException {

  float number = 0;
  if (inlinks) {
    number = node.getNumInlinks();
  } else if (outlinks) {
    number = node.getNumOutlinks();
  } else {
    number = node.getInlinkScore();
  }

  if (host) {
    key.set(URLUtil.getHost(key.toString()));
  } else {
    key.set(URLUtil.getDomainName(key.toString()));
  }

  output.collect(key, new FloatWritable(number));
}
项目:GeoCrawler    文件:NodeDumper.java   
/**
 * Outputs either the sum or the top value for this record.
 */
public void reduce(Text key, Iterator<FloatWritable> values,
    OutputCollector<Text, FloatWritable> output, Reporter reporter)
    throws IOException {

  long numCollected = 0;
  float sumOrMax = 0;
  float val = 0;

  // collect all values, this time with the url as key
  while (values.hasNext() && (numCollected < topn)) {
    val = values.next().get();

    if (sum) {
      sumOrMax += val;
    } else {
      if (sumOrMax < val) {
        sumOrMax = val;
      }
    }

    numCollected++;
  }

  output.collect(key, new FloatWritable(sumOrMax));
}
项目:incubator-hivemall    文件:ConvertToDenseModelUDAF.java   
public boolean merge(List<FloatWritable> other) {
    if (other == null) {
        return true;
    }
    if (partial == null) {
        this.partial = new ArrayList<FloatWritable>(other);
        return true;
    }
    final int nDims = other.size();
    for (int i = 0; i < nDims; i++) {
        FloatWritable x = other.set(i, null);
        if (x != null) {
            partial.set(i, x);
        }
    }
    return true;
}
项目:big-c    文件:TestJoinTupleWritable.java   
private Writable[] makeRandomWritables() {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  return writs;
}
项目:big-c    文件:TestJoinTupleWritable.java   
public void testIterable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable t = new TupleWritable(writs);
  for (int i = 0; i < 6; ++i) {
    t.setWritten(i);
  }
  verifIter(writs, t, 0);
}
项目:big-c    文件:TestJoinTupleWritable.java   
public void testNestedIterable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable sTuple = makeTuple(writs);
  assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
}
项目:Hadoop-Codes    文件:MaximumAverageMapper.java   
public void map(Object key, Text value, Context context)
        throws IOException, InterruptedException {

    String line = value.toString();
    String [] a = line.split(" ");
    System.out.println(line);
    int sum=0;
    for(String i:a){
        sum += Integer.parseInt(i);
    }
    float avg = sum/a.length;
    System.out.println(avg);
    context.write(new Text("maxavg"),new FloatWritable(avg) );

}
项目:Hadoop-Codes    文件:MaximumAverageReducer.java   
public void reduce(Text key, Iterable<FloatWritable> values, Context context)
        throws IOException, InterruptedException {

    float max=0;
    for (FloatWritable val : values) {
        if(val.get()>max){
            max=val.get();
        }

    }
    context.write(key, new FloatWritable(max));
}
项目:hadoop    文件:TestPipeApplication.java   
@Override
public boolean next(FloatWritable key, NullWritable value)
        throws IOException {
  progress = key;
  index++;
  return index <= 10;
}
项目:hadoop    文件:OutputHandler.java   
/**
 * Create a handler that will handle any records output from the application.
 * @param collector the "real" collector that takes the output
 * @param reporter the reporter for reporting progress
 */
public OutputHandler(OutputCollector<K, V> collector, Reporter reporter, 
                     RecordReader<FloatWritable,NullWritable> recordReader,
                     String expectedDigest) {
  this.reporter = reporter;
  this.collector = collector;
  this.recordReader = recordReader;
  this.expectedDigest = expectedDigest;
}
项目:hadoop    文件:TypedBytesWritableOutput.java   
public void write(Writable w) throws IOException {
  if (w instanceof TypedBytesWritable) {
    writeTypedBytes((TypedBytesWritable) w);
  } else if (w instanceof BytesWritable) {
    writeBytes((BytesWritable) w);
  } else if (w instanceof ByteWritable) {
    writeByte((ByteWritable) w);
  } else if (w instanceof BooleanWritable) {
    writeBoolean((BooleanWritable) w);
  } else if (w instanceof IntWritable) {
    writeInt((IntWritable) w);
  } else if (w instanceof VIntWritable) {
    writeVInt((VIntWritable) w);
  } else if (w instanceof LongWritable) {
    writeLong((LongWritable) w);
  } else if (w instanceof VLongWritable) {
    writeVLong((VLongWritable) w);
  } else if (w instanceof FloatWritable) {
    writeFloat((FloatWritable) w);
  } else if (w instanceof DoubleWritable) {
    writeDouble((DoubleWritable) w);
  } else if (w instanceof Text) {
    writeText((Text) w);
  } else if (w instanceof ArrayWritable) {
    writeArray((ArrayWritable) w);
  } else if (w instanceof MapWritable) {
    writeMap((MapWritable) w);
  } else if (w instanceof SortedMapWritable) {
    writeSortedMap((SortedMapWritable) w);
  } else if (w instanceof Record) {
    writeRecord((Record) w);
  } else {
    writeWritable(w); // last resort
  }
}
项目:hadoop    文件:TypedBytesWritableInput.java   
public Class<? extends Writable> readType() throws IOException {
  Type type = in.readType();
  if (type == null) {
    return null;
  }
  switch (type) {
  case BYTES:
    return BytesWritable.class;
  case BYTE:
    return ByteWritable.class;
  case BOOL:
    return BooleanWritable.class;
  case INT:
    return VIntWritable.class;
  case LONG:
    return VLongWritable.class;
  case FLOAT:
    return FloatWritable.class;
  case DOUBLE:
    return DoubleWritable.class;
  case STRING:
    return Text.class;
  case VECTOR:
    return ArrayWritable.class;
  case MAP:
    return MapWritable.class;
  case WRITABLE:
    return Writable.class;
  default:
    throw new RuntimeException("unknown type");
  }
}
项目:aliyun-oss-hadoop-fs    文件:TestPipeApplication.java   
@Override
public boolean next(FloatWritable key, NullWritable value)
        throws IOException {
  progress = key;
  index++;
  return index <= 10;
}
项目:incubator-hivemall    文件:AngularDistanceUDF.java   
@Override
public FloatWritable evaluate(DeferredObject[] arguments) throws HiveException {
    List<String> ftvec1 = HiveUtils.asStringList(arguments[0], arg0ListOI);
    List<String> ftvec2 = HiveUtils.asStringList(arguments[1], arg1ListOI);
    float d = 1.f - AngularSimilarityUDF.angularSimilarity(ftvec1, ftvec2);
    return new FloatWritable(d);
}
项目:incubator-hivemall    文件:EuclidSimilarity.java   
@Override
public FloatWritable evaluate(DeferredObject[] arguments) throws HiveException {
    List<String> ftvec1 = HiveUtils.asStringList(arguments[0], arg0ListOI);
    List<String> ftvec2 = HiveUtils.asStringList(arguments[1], arg1ListOI);
    float d = (float) EuclidDistanceUDF.euclidDistance(ftvec1, ftvec2);
    float sim = 1.0f / (1.0f + d);
    return new FloatWritable(sim);
}
项目:aliyun-oss-hadoop-fs    文件:TestTaskContext.java   
public void testTaskContext() {
  TaskContext context = new TaskContext(null, null, null, null, null, null, null);

  context.setInputKeyClass(IntWritable.class);
  assertEquals(IntWritable.class.getName(), context.getInputKeyClass().getName());

  context.setInputValueClass(Text.class);
  assertEquals(Text.class.getName(), context.getInputValueClass().getName()); 

  context.setOutputKeyClass(LongWritable.class);
  assertEquals(LongWritable.class.getName(), context.getOutputKeyClass().getName()); 

  context.setOutputValueClass(FloatWritable.class);
  assertEquals(FloatWritable.class.getName(), context.getOutputValueClass().getName()); 
}
项目:aliyun-oss-hadoop-fs    文件:BytesFactory.java   
public static void updateObject(Writable obj, byte[] seed) {
  if (obj instanceof IntWritable) {
    ((IntWritable)obj).set(Ints.fromByteArray(seed));
  } else if (obj instanceof FloatWritable) {
    ((FloatWritable)obj).set(r.nextFloat());
  } else if (obj instanceof DoubleWritable) {
    ((DoubleWritable)obj).set(r.nextDouble());
  } else if (obj instanceof LongWritable) {
    ((LongWritable)obj).set(Longs.fromByteArray(seed));
  } else if (obj instanceof VIntWritable) {
    ((VIntWritable)obj).set(Ints.fromByteArray(seed));
  } else if (obj instanceof VLongWritable) {
    ((VLongWritable)obj).set(Longs.fromByteArray(seed));
  } else if (obj instanceof BooleanWritable) {
    ((BooleanWritable)obj).set(seed[0] % 2 == 1 ? true : false);
  } else if (obj instanceof Text) {
    ((Text)obj).set(BytesUtil.toStringBinary(seed));
  } else if (obj instanceof ByteWritable) {
    ((ByteWritable)obj).set(seed.length > 0 ? seed[0] : 0);
  } else if (obj instanceof BytesWritable) {
    ((BytesWritable)obj).set(seed, 0, seed.length);
  } else if (obj instanceof UTF8) {
    ((UTF8)obj).set(BytesUtil.toStringBinary(seed));
  } else if (obj instanceof MockValueClass) {
    ((MockValueClass)obj).set(seed);
  } else {
    throw new IllegalArgumentException("unknown writable: " +
                                       obj.getClass().getName());
  }
}
项目:aliyun-oss-hadoop-fs    文件:OutputHandler.java   
/**
 * Create a handler that will handle any records output from the application.
 * @param collector the "real" collector that takes the output
 * @param reporter the reporter for reporting progress
 */
public OutputHandler(OutputCollector<K, V> collector, Reporter reporter, 
                     RecordReader<FloatWritable,NullWritable> recordReader,
                     String expectedDigest) {
  this.reporter = reporter;
  this.collector = collector;
  this.recordReader = recordReader;
  this.expectedDigest = expectedDigest;
}
项目:aliyun-oss-hadoop-fs    文件:TypedBytesWritableOutput.java   
public void write(Writable w) throws IOException {
  if (w instanceof TypedBytesWritable) {
    writeTypedBytes((TypedBytesWritable) w);
  } else if (w instanceof BytesWritable) {
    writeBytes((BytesWritable) w);
  } else if (w instanceof ByteWritable) {
    writeByte((ByteWritable) w);
  } else if (w instanceof BooleanWritable) {
    writeBoolean((BooleanWritable) w);
  } else if (w instanceof IntWritable) {
    writeInt((IntWritable) w);
  } else if (w instanceof VIntWritable) {
    writeVInt((VIntWritable) w);
  } else if (w instanceof LongWritable) {
    writeLong((LongWritable) w);
  } else if (w instanceof VLongWritable) {
    writeVLong((VLongWritable) w);
  } else if (w instanceof FloatWritable) {
    writeFloat((FloatWritable) w);
  } else if (w instanceof DoubleWritable) {
    writeDouble((DoubleWritable) w);
  } else if (w instanceof Text) {
    writeText((Text) w);
  } else if (w instanceof ArrayWritable) {
    writeArray((ArrayWritable) w);
  } else if (w instanceof MapWritable) {
    writeMap((MapWritable) w);
  } else if (w instanceof SortedMapWritable) {
    writeSortedMap((SortedMapWritable<?>) w);
  } else if (w instanceof Record) {
    writeRecord((Record) w);
  } else {
    writeWritable(w); // last resort
  }
}
项目:incubator-hivemall    文件:ZScoreUDF.java   
public FloatWritable evaluate(float value, float mean, float stddev) {
    if (stddev == 0.f) {
        return new FloatWritable(0.f);
    }
    float v = (value - mean) / stddev;
    return new FloatWritable(v);
}