public void map(WritableComparable key, Writable value,
OutputCollector<WritableComparable, Writable> out, Reporter reporter)
throws IOException {
try {
Thread.sleep(1000000);
} catch (InterruptedException e) {
// Do nothing
}
}
java类org.apache.hadoop.io.WritableComparable的实例源码
UtilsForTests.java 文件源码
项目:hadoop
阅读 29
收藏 0
点赞 0
评论 0
ThreadedMapBenchmark.java 文件源码
项目:hadoop
阅读 19
收藏 0
点赞 0
评论 0
public void map(WritableComparable key,
Writable value,
OutputCollector<BytesWritable, BytesWritable> output,
Reporter reporter) throws IOException {
int itemCount = 0;
while (numBytesToWrite > 0) {
int keyLength = minKeySize
+ (keySizeRange != 0
? random.nextInt(keySizeRange)
: 0);
randomKey.setSize(keyLength);
randomizeBytes(randomKey.getBytes(), 0, randomKey.getLength());
int valueLength = minValueSize
+ (valueSizeRange != 0
? random.nextInt(valueSizeRange)
: 0);
randomValue.setSize(valueLength);
randomizeBytes(randomValue.getBytes(), 0, randomValue.getLength());
output.collect(randomKey, randomValue);
numBytesToWrite -= keyLength + valueLength;
reporter.incrCounter(Counters.BYTES_WRITTEN, 1);
reporter.incrCounter(Counters.RECORDS_WRITTEN, 1);
if (++itemCount % 200 == 0) {
reporter.setStatus("wrote record " + itemCount + ". "
+ numBytesToWrite + " bytes left.");
}
}
reporter.setStatus("done with " + itemCount + " records.");
}
MRBench.java 文件源码
项目:hadoop
阅读 22
收藏 0
点赞 0
评论 0
public void map(WritableComparable key, Text value,
OutputCollector<UTF8, UTF8> output,
Reporter reporter) throws IOException
{
String line = value.toString();
output.collect(new UTF8(process(line)), new UTF8(""));
}
TestComparators.java 文件源码
项目:hadoop
阅读 29
收藏 0
点赞 0
评论 0
public int compare (WritableComparable v1, WritableComparable v2) {
int val1 = ((IntWritable)(v1)).get() / 100;
int val2 = ((IntWritable)(v2)).get() / 100;
if (val1 < val2)
return 1;
else if (val1 > val2)
return -1;
else
return 0;
}
TestJobCounters.java 文件源码
项目:hadoop
阅读 49
收藏 0
点赞 0
评论 0
public void map(WritableComparable key, Writable val,
OutputCollector<WritableComparable, Writable> output,
Reporter reporter)
throws IOException {
assertNotNull("Mapper not configured!", loader);
// load the memory
loader.load();
// work as identity mapper
output.collect(key, val);
}
TestJobCounters.java 文件源码
项目:hadoop
阅读 20
收藏 0
点赞 0
评论 0
public void reduce(WritableComparable key, Iterator<Writable> val,
OutputCollector<WritableComparable, Writable> output,
Reporter reporter)
throws IOException {
assertNotNull("Reducer not configured!", loader);
// load the memory
loader.load();
// work as identity reducer
output.collect(key, key);
}
TestWrappedRecordReaderClassloader.java 文件源码
项目:hadoop
阅读 19
收藏 0
点赞 0
评论 0
@SuppressWarnings("unchecked")
public void configure(JobConf job) {
keyclass = (Class<? extends K>) job.getClass("test.fakeif.keyclass",
NullWritable.class, WritableComparable.class);
valclass = (Class<? extends V>) job.getClass("test.fakeif.valclass",
NullWritable.class, WritableComparable.class);
}
TestWrappedRecordReaderClassloader.java 文件源码
项目:hadoop
阅读 18
收藏 0
点赞 0
评论 0
@SuppressWarnings("unchecked")
public RR_ClassLoaderChecker(JobConf job) {
assertTrue("The class loader has not been inherited from "
+ CompositeRecordReader.class.getSimpleName(),
job.getClassLoader() instanceof Fake_ClassLoader);
keyclass = (Class<? extends K>) job.getClass("test.fakeif.keyclass",
NullWritable.class, WritableComparable.class);
valclass = (Class<? extends V>) job.getClass("test.fakeif.valclass",
NullWritable.class, WritableComparable.class);
}
TestDatamerge.java 文件源码
项目:hadoop
阅读 23
收藏 0
点赞 0
评论 0
@SuppressWarnings("unchecked")
public void configure(JobConf job) {
keyclass = (Class<? extends K>) job.getClass("test.fakeif.keyclass",
IncomparableKey.class, WritableComparable.class);
valclass = (Class<? extends V>) job.getClass("test.fakeif.valclass",
NullWritable.class, WritableComparable.class);
}
MapReduceTestUtil.java 文件源码
项目:hadoop
阅读 27
收藏 0
点赞 0
评论 0
public void map(WritableComparable<?> key, Writable value, Context context)
throws IOException {
try {
Thread.sleep(1000000);
} catch (InterruptedException e) {
// Do nothing
}
}