@Override
public void reduce(Text key, Iterable<MapWritable> listOfMaps, Context context) throws IOException, InterruptedException {
for (MapWritable partialResultMap : listOfMaps) {
for (Writable attributeText : partialResultMap.keySet()) {
MapWritable partialInsideMap = (MapWritable) partialResultMap.get(attributeText);
MapWritable partialOutputMap = new MapWritable();
for (Writable rule : partialInsideMap.keySet()) {
Text regola = (Text) rule;
Text valore = (Text) partialInsideMap.get(rule);
partialOutputMap.put(new Text(regola.toString()), new Text(valore.toString()));
}
result.put((Text)attributeText, partialOutputMap);
}
}
context.write(key,result);
}
java类org.apache.hadoop.io.MapWritable的实例源码
XPathApplierReducer.java 文件源码
项目:alfred-mpi
阅读 16
收藏 0
点赞 0
评论 0
KVHiveSerDe.java 文件源码
项目:HiveKVStorageHandler2
阅读 16
收藏 0
点赞 0
评论 0
@Override
public Object deserialize(Writable wrtbl) throws SerDeException {
MapWritable input = (MapWritable) wrtbl;
Text t = new Text();
row.clear();
for (int i = 0; i < fieldCount; i++) {
t.set(majorMinorKeys.get(i));
Writable value = input.get(t);
if (value != null && !NullWritable.get().equals(value)) {
row.add(value.toString());
} else {
row.add(null);
}
}
return row;
}
SSTableRecordReader.java 文件源码
项目:cassowary
阅读 16
收藏 0
点赞 0
评论 0
@Override
public boolean next(MapWritable key, MapWritable value) throws IOException {
// if we're starting out
if (hiveRowIterator == null)
hiveRowIterator = new HiveRowIterator(startPosition, endPosition, partitioner, keyAliasNames, columnAliasNames, metadata, columnNames, rateLimiter);
if (!hiveRowIterator.hasNext())
return false;
Pair<MapWritable, MapWritable> nextPair = hiveRowIterator.next();
key.clear();
key.putAll(nextPair.left);
value.clear();
value.putAll(nextPair.right);
return true;
}
TransactionalRegionServer.java 文件源码
项目:hbase-secondary-index
阅读 19
收藏 0
点赞 0
评论 0
/**
* {@inheritDoc}
*/
@Override
protected void handleReportForDutyResponse(final MapWritable c)
throws IOException {
super.handleReportForDutyResponse(c);
initializeTHLog();
String n = Thread.currentThread().getName();
UncaughtExceptionHandler handler = new UncaughtExceptionHandler() {
public void uncaughtException(final Thread t, final Throwable e) {
abort("Set stop flag in " + t.getName(), e);
LOG.fatal("Set stop flag in " + t.getName(), e);
}
};
setDaemonThreadRunning(this.cleanOldTransactionsThread, n
+ ".oldTransactionCleaner", handler);
setDaemonThreadRunning(this.transactionLeases,
"Transactional leases");
}
GenericRecordExportMapper.java 文件源码
项目:aliyun-maxcompute-data-collectors
阅读 16
收藏 0
点赞 0
评论 0
@Override
protected void setup(Context context) throws IOException, InterruptedException {
super.setup(context);
Configuration conf = context.getConfiguration();
// Instantiate a copy of the user's class to hold and parse the record.
String recordClassName = conf.get(
ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY);
if (null == recordClassName) {
throw new IOException("Export table class name ("
+ ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY
+ ") is not set!");
}
try {
Class cls = Class.forName(recordClassName, true,
Thread.currentThread().getContextClassLoader());
recordImpl = (SqoopRecord) ReflectionUtils.newInstance(cls, conf);
} catch (ClassNotFoundException cnfe) {
throw new IOException(cnfe);
}
if (null == recordImpl) {
throw new IOException("Could not instantiate object of type "
+ recordClassName);
}
columnTypes = DefaultStringifier.load(conf, AVRO_COLUMN_TYPES_MAP,
MapWritable.class);
}
TestJdbcExportJob.java 文件源码
项目:aliyun-maxcompute-data-collectors
阅读 23
收藏 0
点赞 0
评论 0
@Test
public void testAvroWithNoColumnsSpecified() throws Exception {
SqoopOptions opts = new SqoopOptions();
opts.setExportDir("myexportdir");
JdbcExportJob jdbcExportJob = stubJdbcExportJob(opts, FileType.AVRO_DATA_FILE);
Job job = new Job();
jdbcExportJob.configureInputFormat(job, null, null, null);
assertEquals(asSetOfText("Age", "Name", "Gender"), DefaultStringifier.load(job.getConfiguration(), AvroExportMapper.AVRO_COLUMN_TYPES_MAP, MapWritable.class).keySet());
}
TestJdbcExportJob.java 文件源码
项目:aliyun-maxcompute-data-collectors
阅读 19
收藏 0
点赞 0
评论 0
@Test
public void testAvroWithAllColumnsSpecified() throws Exception {
SqoopOptions opts = new SqoopOptions();
opts.setExportDir("myexportdir");
String[] columns = { "Age", "Name", "Gender" };
opts.setColumns(columns);
JdbcExportJob jdbcExportJob = stubJdbcExportJob(opts, FileType.AVRO_DATA_FILE);
Job job = new Job();
jdbcExportJob.configureInputFormat(job, null, null, null);
assertEquals(asSetOfText("Age", "Name", "Gender"), DefaultStringifier.load(job.getConfiguration(), AvroExportMapper.AVRO_COLUMN_TYPES_MAP, MapWritable.class).keySet());
}
TestJdbcExportJob.java 文件源码
项目:aliyun-maxcompute-data-collectors
阅读 20
收藏 0
点赞 0
评论 0
@Test
public void testAvroWithOneColumnSpecified() throws Exception {
SqoopOptions opts = new SqoopOptions();
opts.setExportDir("myexportdir");
String[] columns = { "Gender" };
opts.setColumns(columns);
JdbcExportJob jdbcExportJob = stubJdbcExportJob(opts, FileType.AVRO_DATA_FILE);
Job job = new Job();
jdbcExportJob.configureInputFormat(job, null, null, null);
assertEquals(asSetOfText("Gender"), DefaultStringifier.load(job.getConfiguration(), AvroExportMapper.AVRO_COLUMN_TYPES_MAP, MapWritable.class).keySet());
}
TestJdbcExportJob.java 文件源码
项目:aliyun-maxcompute-data-collectors
阅读 17
收藏 0
点赞 0
评论 0
@Test
public void testAvroWithSomeColumnsSpecified() throws Exception {
SqoopOptions opts = new SqoopOptions();
opts.setExportDir("myexportdir");
String[] columns = { "Age", "Name" };
opts.setColumns(columns);
JdbcExportJob jdbcExportJob = stubJdbcExportJob(opts, FileType.AVRO_DATA_FILE);
Job job = new Job();
jdbcExportJob.configureInputFormat(job, null, null, null);
assertEquals(asSetOfText("Age", "Name"), DefaultStringifier.load(job.getConfiguration(), AvroExportMapper.AVRO_COLUMN_TYPES_MAP, MapWritable.class).keySet());
}
TestJdbcExportJob.java 文件源码
项目:aliyun-maxcompute-data-collectors
阅读 20
收藏 0
点赞 0
评论 0
@Test
public void testAvroWithMoreColumnsSpecified() throws Exception {
SqoopOptions opts = new SqoopOptions();
opts.setExportDir("myexportdir");
String[] columns = { "Age", "Name", "Gender", "Address" };
opts.setColumns(columns);
JdbcExportJob jdbcExportJob = stubJdbcExportJob(opts, FileType.AVRO_DATA_FILE);
Job job = new Job();
jdbcExportJob.configureInputFormat(job, null, null, null);
assertEquals(asSetOfText("Age", "Name", "Gender"), DefaultStringifier.load(job.getConfiguration(), AvroExportMapper.AVRO_COLUMN_TYPES_MAP, MapWritable.class).keySet());
}