@Test
@SuppressWarnings({"rawtypes", "unchecked"})
public void testWritableComparatorJavaSerialization() throws Exception {
Serialization ser = new JavaSerialization();
Serializer<TestWC> serializer = ser.getSerializer(TestWC.class);
DataOutputBuffer dob = new DataOutputBuffer();
serializer.open(dob);
TestWC orig = new TestWC(0);
serializer.serialize(orig);
serializer.close();
Deserializer<TestWC> deserializer = ser.getDeserializer(TestWC.class);
DataInputBuffer dib = new DataInputBuffer();
dib.reset(dob.getData(), 0, dob.getLength());
deserializer.open(dib);
TestWC deser = deserializer.deserialize(null);
deserializer.close();
assertEquals(orig, deser);
}
java类org.apache.hadoop.io.DataOutputBuffer的实例源码
TestWritableSerialization.java 文件源码
项目:hadoop
阅读 21
收藏 0
点赞 0
评论 0
SerializationTestUtil.java 文件源码
项目:hadoop
阅读 17
收藏 0
点赞 0
评论 0
/**
* A utility that tests serialization/deserialization.
* @param conf configuration to use, "io.serializations" is read to
* determine the serialization
* @param <K> the class of the item
* @param before item to (de)serialize
* @return deserialized item
*/
public static <K> K testSerialization(Configuration conf, K before)
throws Exception {
SerializationFactory factory = new SerializationFactory(conf);
Serializer<K> serializer
= factory.getSerializer(GenericsUtil.getClass(before));
Deserializer<K> deserializer
= factory.getDeserializer(GenericsUtil.getClass(before));
DataOutputBuffer out = new DataOutputBuffer();
serializer.open(out);
serializer.serialize(before);
serializer.close();
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), out.getLength());
deserializer.open(in);
K after = deserializer.deserialize(null);
deserializer.close();
return after;
}
TestKeyValueCompression.java 文件源码
项目:ditb
阅读 17
收藏 0
点赞 0
评论 0
@Test
public void testKVWithTags() throws Exception {
CompressionContext ctx = new CompressionContext(LRUDictionary.class, false, false);
DataOutputBuffer buf = new DataOutputBuffer(BUF_SIZE);
KeyValueCompression.writeKV(buf, createKV(1), ctx);
KeyValueCompression.writeKV(buf, createKV(0), ctx);
KeyValueCompression.writeKV(buf, createKV(2), ctx);
ctx.clear();
DataInputStream in = new DataInputStream(new ByteArrayInputStream(
buf.getData(), 0, buf.getLength()));
KeyValue readBack = KeyValueCompression.readKV(in, ctx);
List<Tag> tags = readBack.getTags();
assertEquals(1, tags.size());
}
IndexColumnDescriptor.java 文件源码
项目:ditb
阅读 19
收藏 0
点赞 0
评论 0
@Override
/**
* @return Convert this instance to a the pb column family type
*/ public ColumnFamilySchema convert() {
try {
DataOutputBuffer indexout = new DataOutputBuffer();
// System.out.println("winter write indexColumn descripter, indexType is: "
// + valueOfIndexType(indexType));
indexout.writeInt(valueOfIndexType(indexType));
indexout.writeInt(indexes.size());
for (IndexDescriptor indexDescriptor : indexes.values()) {
indexDescriptor.write(indexout);
}
super.setValue(INDEX, indexout.getData());
} catch (IOException e1) {
e1.printStackTrace();
}
return super.convert();
}
AzkabanSequenceFileReader.java 文件源码
项目:azkaban
阅读 31
收藏 0
点赞 0
评论 0
/** Read a compressed buffer */
private synchronized void readBuffer(DataInputBuffer buffer,
CompressionInputStream filter) throws IOException {
// Read data into a temporary buffer
DataOutputBuffer dataBuffer = new DataOutputBuffer();
try {
int dataBufferLength = WritableUtils.readVInt(in);
dataBuffer.write(in, dataBufferLength);
// Set up 'buffer' connected to the input-stream
buffer.reset(dataBuffer.getData(), 0, dataBuffer.getLength());
} finally {
dataBuffer.close();
}
// Reset the codec
filter.resetState();
}
Utils.java 文件源码
项目:flink
阅读 19
收藏 0
点赞 0
评论 0
public static void setTokensFor(ContainerLaunchContext amContainer, List<Path> paths, Configuration conf) throws IOException {
Credentials credentials = new Credentials();
// for HDFS
TokenCache.obtainTokensForNamenodes(credentials, paths.toArray(new Path[0]), conf);
// for HBase
obtainTokenForHBase(credentials, conf);
// for user
UserGroupInformation currUsr = UserGroupInformation.getCurrentUser();
Collection<Token<? extends TokenIdentifier>> usrTok = currUsr.getTokens();
for (Token<? extends TokenIdentifier> token : usrTok) {
final Text id = new Text(token.getIdentifier());
LOG.info("Adding user token " + id + " with " + token);
credentials.addToken(id, token);
}
try (DataOutputBuffer dob = new DataOutputBuffer()) {
credentials.writeTokenStorageToStream(dob);
if (LOG.isDebugEnabled()) {
LOG.debug("Wrote tokens. Credentials buffer length: " + dob.getLength());
}
ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
amContainer.setTokens(securityTokens);
}
}
TestPBRecordImpl.java 文件源码
项目:aliyun-oss-hadoop-fs
阅读 27
收藏 0
点赞 0
评论 0
@Test(timeout=10000)
public void testLocalizerStatusSerDe() throws Exception {
LocalizerStatus rsrcS = createLocalizerStatus();
assertTrue(rsrcS instanceof LocalizerStatusPBImpl);
LocalizerStatusPBImpl rsrcPb = (LocalizerStatusPBImpl) rsrcS;
DataOutputBuffer out = new DataOutputBuffer();
rsrcPb.getProto().writeDelimitedTo(out);
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), 0, out.getLength());
LocalizerStatusProto rsrcPbD =
LocalizerStatusProto.parseDelimitedFrom(in);
assertNotNull(rsrcPbD);
LocalizerStatus rsrcD =
new LocalizerStatusPBImpl(rsrcPbD);
assertEquals(rsrcS, rsrcD);
assertEquals("localizer0", rsrcS.getLocalizerId());
assertEquals("localizer0", rsrcD.getLocalizerId());
assertEquals(createLocalResourceStatus(), rsrcS.getResourceStatus(0));
assertEquals(createLocalResourceStatus(), rsrcD.getResourceStatus(0));
}
TestPBRecordImpl.java 文件源码
项目:aliyun-oss-hadoop-fs
阅读 28
收藏 0
点赞 0
评论 0
@Test(timeout=10000)
public void testLocalizerHeartbeatResponseSerDe() throws Exception {
LocalizerHeartbeatResponse rsrcS = createLocalizerHeartbeatResponse();
assertTrue(rsrcS instanceof LocalizerHeartbeatResponsePBImpl);
LocalizerHeartbeatResponsePBImpl rsrcPb =
(LocalizerHeartbeatResponsePBImpl) rsrcS;
DataOutputBuffer out = new DataOutputBuffer();
rsrcPb.getProto().writeDelimitedTo(out);
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), 0, out.getLength());
LocalizerHeartbeatResponseProto rsrcPbD =
LocalizerHeartbeatResponseProto.parseDelimitedFrom(in);
assertNotNull(rsrcPbD);
LocalizerHeartbeatResponse rsrcD =
new LocalizerHeartbeatResponsePBImpl(rsrcPbD);
assertEquals(rsrcS, rsrcD);
assertEquals(createResource(), rsrcS.getResourceSpecs().get(0).getResource());
assertEquals(createResource(), rsrcD.getResourceSpecs().get(0).getResource());
}
TestAppManager.java 文件源码
项目:aliyun-oss-hadoop-fs
阅读 23
收藏 0
点赞 0
评论 0
@Test
public void testRMAppSubmitWithInvalidTokens() throws Exception {
// Setup invalid security tokens
DataOutputBuffer dob = new DataOutputBuffer();
ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0,
dob.getLength());
asContext.getAMContainerSpec().setTokens(securityTokens);
try {
appMonitor.submitApplication(asContext, "test");
Assert.fail("Application submission should fail because" +
" Tokens are invalid.");
} catch (YarnException e) {
// Exception is expected
Assert.assertTrue("The thrown exception is not" +
" java.io.EOFException",
e.getMessage().contains("java.io.EOFException"));
}
int timeoutSecs = 0;
while ((getAppEventType() == RMAppEventType.KILL) &&
timeoutSecs++ < 20) {
Thread.sleep(1000);
}
Assert.assertEquals("app event type sent is wrong",
RMAppEventType.APP_REJECTED, getAppEventType());
asContext.getAMContainerSpec().setTokens(null);
}
TestRMAppTransitions.java 文件源码
项目:aliyun-oss-hadoop-fs
阅读 21
收藏 0
点赞 0
评论 0
@Test (timeout = 30000)
public void testAppRecoverPath() throws IOException {
LOG.info("--- START: testAppRecoverPath ---");
ApplicationSubmissionContext sub =
Records.newRecord(ApplicationSubmissionContext.class);
ContainerLaunchContext clc =
Records.newRecord(ContainerLaunchContext.class);
Credentials credentials = new Credentials();
DataOutputBuffer dob = new DataOutputBuffer();
credentials.writeTokenStorageToStream(dob);
ByteBuffer securityTokens =
ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
clc.setTokens(securityTokens);
sub.setAMContainerSpec(clc);
testCreateAppSubmittedRecovery(sub);
}