public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf, "maxaverage");
job.setMapperClass(MaximumAverageMapper.class);
job.setReducerClass(MaximumAverageReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(FloatWritable.class);
FileInputFormat.setInputPaths(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
if (!job.waitForCompletion(true))
return;
}
java类org.apache.hadoop.io.FloatWritable的实例源码
MaximumAverageDriver.java 文件源码
项目:Hadoop-Codes
阅读 45
收藏 0
点赞 0
评论 0
MaxTempDriver.java 文件源码
项目:Hadoop-Codes
阅读 23
收藏 0
点赞 0
评论 0
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf, "maxtemp");
job.setMapperClass(MaxTempMapper.class);
job.setReducerClass(MaxTempReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(FloatWritable.class);
FileInputFormat.setInputPaths(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
if (!job.waitForCompletion(true))
return;
}
TestTupleWritable.java 文件源码
项目:hadoop
阅读 22
收藏 0
点赞 0
评论 0
private Writable[] makeRandomWritables() {
Random r = new Random();
Writable[] writs = {
new BooleanWritable(r.nextBoolean()),
new FloatWritable(r.nextFloat()),
new FloatWritable(r.nextFloat()),
new IntWritable(r.nextInt()),
new LongWritable(r.nextLong()),
new BytesWritable("dingo".getBytes()),
new LongWritable(r.nextLong()),
new IntWritable(r.nextInt()),
new BytesWritable("yak".getBytes()),
new IntWritable(r.nextInt())
};
return writs;
}
TestTupleWritable.java 文件源码
项目:hadoop
阅读 22
收藏 0
点赞 0
评论 0
public void testIterable() throws Exception {
Random r = new Random();
Writable[] writs = {
new BooleanWritable(r.nextBoolean()),
new FloatWritable(r.nextFloat()),
new FloatWritable(r.nextFloat()),
new IntWritable(r.nextInt()),
new LongWritable(r.nextLong()),
new BytesWritable("dingo".getBytes()),
new LongWritable(r.nextLong()),
new IntWritable(r.nextInt()),
new BytesWritable("yak".getBytes()),
new IntWritable(r.nextInt())
};
TupleWritable t = new TupleWritable(writs);
for (int i = 0; i < 6; ++i) {
t.setWritten(i);
}
verifIter(writs, t, 0);
}
TestTupleWritable.java 文件源码
项目:hadoop
阅读 22
收藏 0
点赞 0
评论 0
public void testNestedIterable() throws Exception {
Random r = new Random();
Writable[] writs = {
new BooleanWritable(r.nextBoolean()),
new FloatWritable(r.nextFloat()),
new FloatWritable(r.nextFloat()),
new IntWritable(r.nextInt()),
new LongWritable(r.nextLong()),
new BytesWritable("dingo".getBytes()),
new LongWritable(r.nextLong()),
new IntWritable(r.nextInt()),
new BytesWritable("yak".getBytes()),
new IntWritable(r.nextInt())
};
TupleWritable sTuple = makeTuple(writs);
assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
}
TestTupleWritable.java 文件源码
项目:hadoop
阅读 21
收藏 0
点赞 0
评论 0
public void testWritable() throws Exception {
Random r = new Random();
Writable[] writs = {
new BooleanWritable(r.nextBoolean()),
new FloatWritable(r.nextFloat()),
new FloatWritable(r.nextFloat()),
new IntWritable(r.nextInt()),
new LongWritable(r.nextLong()),
new BytesWritable("dingo".getBytes()),
new LongWritable(r.nextLong()),
new IntWritable(r.nextInt()),
new BytesWritable("yak".getBytes()),
new IntWritable(r.nextInt())
};
TupleWritable sTuple = makeTuple(writs);
ByteArrayOutputStream out = new ByteArrayOutputStream();
sTuple.write(new DataOutputStream(out));
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
TupleWritable dTuple = new TupleWritable();
dTuple.readFields(new DataInputStream(in));
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
}
TestJoinTupleWritable.java 文件源码
项目:hadoop
阅读 30
收藏 0
点赞 0
评论 0
private Writable[] makeRandomWritables() {
Random r = new Random();
Writable[] writs = {
new BooleanWritable(r.nextBoolean()),
new FloatWritable(r.nextFloat()),
new FloatWritable(r.nextFloat()),
new IntWritable(r.nextInt()),
new LongWritable(r.nextLong()),
new BytesWritable("dingo".getBytes()),
new LongWritable(r.nextLong()),
new IntWritable(r.nextInt()),
new BytesWritable("yak".getBytes()),
new IntWritable(r.nextInt())
};
return writs;
}
TestJoinTupleWritable.java 文件源码
项目:hadoop
阅读 27
收藏 0
点赞 0
评论 0
public void testIterable() throws Exception {
Random r = new Random();
Writable[] writs = {
new BooleanWritable(r.nextBoolean()),
new FloatWritable(r.nextFloat()),
new FloatWritable(r.nextFloat()),
new IntWritable(r.nextInt()),
new LongWritable(r.nextLong()),
new BytesWritable("dingo".getBytes()),
new LongWritable(r.nextLong()),
new IntWritable(r.nextInt()),
new BytesWritable("yak".getBytes()),
new IntWritable(r.nextInt())
};
TupleWritable t = new TupleWritable(writs);
for (int i = 0; i < 6; ++i) {
t.setWritten(i);
}
verifIter(writs, t, 0);
}
TestJoinTupleWritable.java 文件源码
项目:hadoop
阅读 20
收藏 0
点赞 0
评论 0
public void testNestedIterable() throws Exception {
Random r = new Random();
Writable[] writs = {
new BooleanWritable(r.nextBoolean()),
new FloatWritable(r.nextFloat()),
new FloatWritable(r.nextFloat()),
new IntWritable(r.nextInt()),
new LongWritable(r.nextLong()),
new BytesWritable("dingo".getBytes()),
new LongWritable(r.nextLong()),
new IntWritable(r.nextInt()),
new BytesWritable("yak".getBytes()),
new IntWritable(r.nextInt())
};
TupleWritable sTuple = makeTuple(writs);
assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
}
TestJoinTupleWritable.java 文件源码
项目:hadoop
阅读 31
收藏 0
点赞 0
评论 0
public void testWritable() throws Exception {
Random r = new Random();
Writable[] writs = {
new BooleanWritable(r.nextBoolean()),
new FloatWritable(r.nextFloat()),
new FloatWritable(r.nextFloat()),
new IntWritable(r.nextInt()),
new LongWritable(r.nextLong()),
new BytesWritable("dingo".getBytes()),
new LongWritable(r.nextLong()),
new IntWritable(r.nextInt()),
new BytesWritable("yak".getBytes()),
new IntWritable(r.nextInt())
};
TupleWritable sTuple = makeTuple(writs);
ByteArrayOutputStream out = new ByteArrayOutputStream();
sTuple.write(new DataOutputStream(out));
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
TupleWritable dTuple = new TupleWritable();
dTuple.readFields(new DataInputStream(in));
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
}
TestTupleWritable.java 文件源码
项目:aliyun-oss-hadoop-fs
阅读 33
收藏 0
点赞 0
评论 0
private Writable[] makeRandomWritables() {
Random r = new Random();
Writable[] writs = {
new BooleanWritable(r.nextBoolean()),
new FloatWritable(r.nextFloat()),
new FloatWritable(r.nextFloat()),
new IntWritable(r.nextInt()),
new LongWritable(r.nextLong()),
new BytesWritable("dingo".getBytes()),
new LongWritable(r.nextLong()),
new IntWritable(r.nextInt()),
new BytesWritable("yak".getBytes()),
new IntWritable(r.nextInt())
};
return writs;
}
PLSAPredictUDAF.java 文件源码
项目:incubator-hivemall
阅读 26
收藏 0
点赞 0
评论 0
@Override
public Object terminate(@SuppressWarnings("deprecation") AggregationBuffer agg)
throws HiveException {
PLSAPredictAggregationBuffer myAggr = (PLSAPredictAggregationBuffer) agg;
float[] topicDistr = myAggr.get();
SortedMap<Float, Integer> sortedDistr = new TreeMap<Float, Integer>(
Collections.reverseOrder());
for (int i = 0; i < topicDistr.length; i++) {
sortedDistr.put(topicDistr[i], i);
}
List<Object[]> result = new ArrayList<Object[]>();
for (Map.Entry<Float, Integer> e : sortedDistr.entrySet()) {
Object[] struct = new Object[2];
struct[0] = new IntWritable(e.getValue().intValue()); // label
struct[1] = new FloatWritable(e.getKey().floatValue()); // probability
result.add(struct);
}
return result;
}
TestTupleWritable.java 文件源码
项目:aliyun-oss-hadoop-fs
阅读 24
收藏 0
点赞 0
评论 0
public void testNestedIterable() throws Exception {
Random r = new Random();
Writable[] writs = {
new BooleanWritable(r.nextBoolean()),
new FloatWritable(r.nextFloat()),
new FloatWritable(r.nextFloat()),
new IntWritable(r.nextInt()),
new LongWritable(r.nextLong()),
new BytesWritable("dingo".getBytes()),
new LongWritable(r.nextLong()),
new IntWritable(r.nextInt()),
new BytesWritable("yak".getBytes()),
new IntWritable(r.nextInt())
};
TupleWritable sTuple = makeTuple(writs);
assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
}
TestTupleWritable.java 文件源码
项目:aliyun-oss-hadoop-fs
阅读 23
收藏 0
点赞 0
评论 0
public void testWritable() throws Exception {
Random r = new Random();
Writable[] writs = {
new BooleanWritable(r.nextBoolean()),
new FloatWritable(r.nextFloat()),
new FloatWritable(r.nextFloat()),
new IntWritable(r.nextInt()),
new LongWritable(r.nextLong()),
new BytesWritable("dingo".getBytes()),
new LongWritable(r.nextLong()),
new IntWritable(r.nextInt()),
new BytesWritable("yak".getBytes()),
new IntWritable(r.nextInt())
};
TupleWritable sTuple = makeTuple(writs);
ByteArrayOutputStream out = new ByteArrayOutputStream();
sTuple.write(new DataOutputStream(out));
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
TupleWritable dTuple = new TupleWritable();
dTuple.readFields(new DataInputStream(in));
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
}
TestJoinTupleWritable.java 文件源码
项目:aliyun-oss-hadoop-fs
阅读 41
收藏 0
点赞 0
评论 0
public void testIterable() throws Exception {
Random r = new Random();
Writable[] writs = {
new BooleanWritable(r.nextBoolean()),
new FloatWritable(r.nextFloat()),
new FloatWritable(r.nextFloat()),
new IntWritable(r.nextInt()),
new LongWritable(r.nextLong()),
new BytesWritable("dingo".getBytes()),
new LongWritable(r.nextLong()),
new IntWritable(r.nextInt()),
new BytesWritable("yak".getBytes()),
new IntWritable(r.nextInt())
};
TupleWritable t = new TupleWritable(writs);
for (int i = 0; i < 6; ++i) {
t.setWritten(i);
}
verifIter(writs, t, 0);
}
HdfsProducerTest.java 文件源码
项目:Camel
阅读 26
收藏 0
点赞 0
评论 0
@Test
public void testWriteFloat() throws Exception {
if (!canTest()) {
return;
}
float aFloat = 12.34f;
template.sendBody("direct:write_float", aFloat);
Configuration conf = new Configuration();
Path file1 = new Path("file:///" + TEMP_DIR.toUri() + "/test-camel-float");
FileSystem fs1 = FileSystem.get(file1.toUri(), conf);
SequenceFile.Reader reader = new SequenceFile.Reader(fs1, file1, conf);
Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
reader.next(key, value);
float rFloat = ((FloatWritable) value).get();
assertEquals(rFloat, aFloat, 0.0F);
IOHelper.close(reader);
}
HadoopPlatform.java 文件源码
项目:aliyun-oss-hadoop-fs
阅读 19
收藏 0
点赞 0
评论 0
@Override
public void init() throws IOException {
registerKey(NullWritable.class.getName(), NullWritableSerializer.class);
registerKey(Text.class.getName(), TextSerializer.class);
registerKey(LongWritable.class.getName(), LongWritableSerializer.class);
registerKey(IntWritable.class.getName(), IntWritableSerializer.class);
registerKey(Writable.class.getName(), DefaultSerializer.class);
registerKey(BytesWritable.class.getName(), BytesWritableSerializer.class);
registerKey(BooleanWritable.class.getName(), BoolWritableSerializer.class);
registerKey(ByteWritable.class.getName(), ByteWritableSerializer.class);
registerKey(FloatWritable.class.getName(), FloatWritableSerializer.class);
registerKey(DoubleWritable.class.getName(), DoubleWritableSerializer.class);
registerKey(VIntWritable.class.getName(), VIntWritableSerializer.class);
registerKey(VLongWritable.class.getName(), VLongWritableSerializer.class);
LOG.info("Hadoop platform inited");
}
LDAPredictUDAF.java 文件源码
项目:incubator-hivemall
阅读 22
收藏 0
点赞 0
评论 0
@Override
public Object terminate(@SuppressWarnings("deprecation") AggregationBuffer agg)
throws HiveException {
OnlineLDAPredictAggregationBuffer myAggr = (OnlineLDAPredictAggregationBuffer) agg;
float[] topicDistr = myAggr.get();
SortedMap<Float, Integer> sortedDistr = new TreeMap<Float, Integer>(
Collections.reverseOrder());
for (int i = 0; i < topicDistr.length; i++) {
sortedDistr.put(topicDistr[i], i);
}
List<Object[]> result = new ArrayList<Object[]>();
for (Map.Entry<Float, Integer> e : sortedDistr.entrySet()) {
Object[] struct = new Object[2];
struct[0] = new IntWritable(e.getValue()); // label
struct[1] = new FloatWritable(e.getKey()); // probability
result.add(struct);
}
return result;
}
NodeDumper.java 文件源码
项目:GeoCrawler
阅读 25
收藏 0
点赞 0
评论 0
/**
* Outputs the url with the appropriate number of inlinks, outlinks, or for
* score.
*/
public void map(Text key, Node node,
OutputCollector<FloatWritable, Text> output, Reporter reporter)
throws IOException {
float number = 0;
if (inlinks) {
number = node.getNumInlinks();
} else if (outlinks) {
number = node.getNumOutlinks();
} else {
number = node.getInlinkScore();
}
// number collected with negative to be descending
output.collect(new FloatWritable(-number), key);
}
IndexRSerde.java 文件源码
项目:indexr
阅读 21
收藏 0
点赞 0
评论 0
private static Writable createPrimitive(Object obj, PrimitiveObjectInspector inspector)
throws SerDeException {
if (obj == null) {
return null;
}
switch (inspector.getPrimitiveCategory()) {
case DOUBLE:
return new DoubleWritable(((DoubleObjectInspector) inspector).get(obj));
case FLOAT:
return new FloatWritable(((FloatObjectInspector) inspector).get(obj));
case INT:
return new IntWritable(((IntObjectInspector) inspector).get(obj));
case LONG:
return new LongWritable(((LongObjectInspector) inspector).get(obj));
case STRING:
return new Text(((StringObjectInspector) inspector).getPrimitiveJavaObject(obj));
case DATE:
return ((DateObjectInspector) inspector).getPrimitiveWritableObject(obj);
case TIMESTAMP:
return ((TimestampObjectInspector) inspector).getPrimitiveWritableObject(obj);
default:
throw new SerDeException("Can't serialize primitive : " + inspector.getPrimitiveCategory());
}
}
NodeDumper.java 文件源码
项目:GeoCrawler
阅读 25
收藏 0
点赞 0
评论 0
/**
* Outputs the host or domain as key for this record and numInlinks,
* numOutlinks or score as the value.
*/
public void map(Text key, Node node,
OutputCollector<Text, FloatWritable> output, Reporter reporter)
throws IOException {
float number = 0;
if (inlinks) {
number = node.getNumInlinks();
} else if (outlinks) {
number = node.getNumOutlinks();
} else {
number = node.getInlinkScore();
}
if (host) {
key.set(URLUtil.getHost(key.toString()));
} else {
key.set(URLUtil.getDomainName(key.toString()));
}
output.collect(key, new FloatWritable(number));
}
NodeDumper.java 文件源码
项目:GeoCrawler
阅读 24
收藏 0
点赞 0
评论 0
/**
* Outputs either the sum or the top value for this record.
*/
public void reduce(Text key, Iterator<FloatWritable> values,
OutputCollector<Text, FloatWritable> output, Reporter reporter)
throws IOException {
long numCollected = 0;
float sumOrMax = 0;
float val = 0;
// collect all values, this time with the url as key
while (values.hasNext() && (numCollected < topn)) {
val = values.next().get();
if (sum) {
sumOrMax += val;
} else {
if (sumOrMax < val) {
sumOrMax = val;
}
}
numCollected++;
}
output.collect(key, new FloatWritable(sumOrMax));
}
ConvertToDenseModelUDAF.java 文件源码
项目:incubator-hivemall
阅读 19
收藏 0
点赞 0
评论 0
public boolean merge(List<FloatWritable> other) {
if (other == null) {
return true;
}
if (partial == null) {
this.partial = new ArrayList<FloatWritable>(other);
return true;
}
final int nDims = other.size();
for (int i = 0; i < nDims; i++) {
FloatWritable x = other.set(i, null);
if (x != null) {
partial.set(i, x);
}
}
return true;
}
TestJoinTupleWritable.java 文件源码
项目:big-c
阅读 27
收藏 0
点赞 0
评论 0
private Writable[] makeRandomWritables() {
Random r = new Random();
Writable[] writs = {
new BooleanWritable(r.nextBoolean()),
new FloatWritable(r.nextFloat()),
new FloatWritable(r.nextFloat()),
new IntWritable(r.nextInt()),
new LongWritable(r.nextLong()),
new BytesWritable("dingo".getBytes()),
new LongWritable(r.nextLong()),
new IntWritable(r.nextInt()),
new BytesWritable("yak".getBytes()),
new IntWritable(r.nextInt())
};
return writs;
}
TestJoinTupleWritable.java 文件源码
项目:big-c
阅读 22
收藏 0
点赞 0
评论 0
public void testIterable() throws Exception {
Random r = new Random();
Writable[] writs = {
new BooleanWritable(r.nextBoolean()),
new FloatWritable(r.nextFloat()),
new FloatWritable(r.nextFloat()),
new IntWritable(r.nextInt()),
new LongWritable(r.nextLong()),
new BytesWritable("dingo".getBytes()),
new LongWritable(r.nextLong()),
new IntWritable(r.nextInt()),
new BytesWritable("yak".getBytes()),
new IntWritable(r.nextInt())
};
TupleWritable t = new TupleWritable(writs);
for (int i = 0; i < 6; ++i) {
t.setWritten(i);
}
verifIter(writs, t, 0);
}
TestJoinTupleWritable.java 文件源码
项目:big-c
阅读 28
收藏 0
点赞 0
评论 0
public void testNestedIterable() throws Exception {
Random r = new Random();
Writable[] writs = {
new BooleanWritable(r.nextBoolean()),
new FloatWritable(r.nextFloat()),
new FloatWritable(r.nextFloat()),
new IntWritable(r.nextInt()),
new LongWritable(r.nextLong()),
new BytesWritable("dingo".getBytes()),
new LongWritable(r.nextLong()),
new IntWritable(r.nextInt()),
new BytesWritable("yak".getBytes()),
new IntWritable(r.nextInt())
};
TupleWritable sTuple = makeTuple(writs);
assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
}
MaximumAverageMapper.java 文件源码
项目:Hadoop-Codes
阅读 17
收藏 0
点赞 0
评论 0
public void map(Object key, Text value, Context context)
throws IOException, InterruptedException {
String line = value.toString();
String [] a = line.split(" ");
System.out.println(line);
int sum=0;
for(String i:a){
sum += Integer.parseInt(i);
}
float avg = sum/a.length;
System.out.println(avg);
context.write(new Text("maxavg"),new FloatWritable(avg) );
}
MaximumAverageReducer.java 文件源码
项目:Hadoop-Codes
阅读 20
收藏 0
点赞 0
评论 0
public void reduce(Text key, Iterable<FloatWritable> values, Context context)
throws IOException, InterruptedException {
float max=0;
for (FloatWritable val : values) {
if(val.get()>max){
max=val.get();
}
}
context.write(key, new FloatWritable(max));
}
TestPipeApplication.java 文件源码
项目:hadoop
阅读 24
收藏 0
点赞 0
评论 0
@Override
public boolean next(FloatWritable key, NullWritable value)
throws IOException {
progress = key;
index++;
return index <= 10;
}
OutputHandler.java 文件源码
项目:hadoop
阅读 22
收藏 0
点赞 0
评论 0
/**
* Create a handler that will handle any records output from the application.
* @param collector the "real" collector that takes the output
* @param reporter the reporter for reporting progress
*/
public OutputHandler(OutputCollector<K, V> collector, Reporter reporter,
RecordReader<FloatWritable,NullWritable> recordReader,
String expectedDigest) {
this.reporter = reporter;
this.collector = collector;
this.recordReader = recordReader;
this.expectedDigest = expectedDigest;
}