@SuppressWarnings({ "rawtypes", "unchecked" })
static DataInputBuffer createFakeCredentials(Random r, int nTok)
throws IOException {
Credentials creds = new Credentials();
byte[] password = new byte[20];
Text kind = new Text();
Text service = new Text();
Text alias = new Text();
for (int i = 0; i < nTok; ++i) {
byte[] identifier = ("idef" + i).getBytes();
r.nextBytes(password);
kind.set("kind" + i);
service.set("service" + i);
alias.set("token" + i);
Token token = new Token(identifier, password, kind, service);
creds.addToken(alias, token);
}
DataOutputBuffer buf = new DataOutputBuffer();
creds.writeTokenStorageToStream(buf);
DataInputBuffer ret = new DataInputBuffer();
ret.reset(buf.getData(), 0, buf.getLength());
return ret;
}
java类org.apache.hadoop.io.DataOutputBuffer的实例源码
TestContainerLocalizer.java 文件源码
项目:aliyun-oss-hadoop-fs
阅读 19
收藏 0
点赞 0
评论 0
TFile.java 文件源码
项目:hadoop-oss
阅读 40
收藏 0
点赞 0
评论 0
public void write(DataOutput out) throws IOException {
if (firstKey == null) {
Utils.writeVInt(out, 0);
return;
}
DataOutputBuffer dob = new DataOutputBuffer();
Utils.writeVInt(dob, firstKey.size());
dob.write(firstKey.buffer());
Utils.writeVInt(out, dob.size());
out.write(dob.getData(), 0, dob.getLength());
for (TFileIndexEntry entry : index) {
dob.reset();
entry.write(dob);
Utils.writeVInt(out, dob.getLength());
out.write(dob.getData(), 0, dob.getLength());
}
}
TestDelegationToken.java 文件源码
项目:hadoop-oss
阅读 26
收藏 0
点赞 0
评论 0
private boolean testDelegationTokenIdentiferSerializationRoundTrip(Text owner,
Text renewer, Text realUser) throws IOException {
TestDelegationTokenIdentifier dtid = new TestDelegationTokenIdentifier(
owner, renewer, realUser);
DataOutputBuffer out = new DataOutputBuffer();
dtid.writeImpl(out);
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), out.getLength());
try {
TestDelegationTokenIdentifier dtid2 =
new TestDelegationTokenIdentifier();
dtid2.readFields(in);
assertTrue(dtid.equals(dtid2));
return true;
} catch(IOException e){
return false;
}
}
CryptoStreamsTestBase.java 文件源码
项目:hadoop-oss
阅读 22
收藏 0
点赞 0
评论 0
@Before
public void setUp() throws IOException {
// Generate data
final int seed = new Random().nextInt();
final DataOutputBuffer dataBuf = new DataOutputBuffer();
final RandomDatum.Generator generator = new RandomDatum.Generator(seed);
for(int i = 0; i < count; ++i) {
generator.next();
final RandomDatum key = generator.getKey();
final RandomDatum value = generator.getValue();
key.write(dataBuf);
value.write(dataBuf);
}
LOG.info("Generated " + count + " records");
data = dataBuf.getData();
dataLen = dataBuf.getLength();
}
TestCryptoStreams.java 文件源码
项目:hadoop-oss
阅读 27
收藏 0
点赞 0
评论 0
@Override
protected OutputStream getOutputStream(int bufferSize, byte[] key, byte[] iv)
throws IOException {
DataOutputBuffer out = new DataOutputBuffer() {
@Override
public void flush() throws IOException {
buf = getData();
bufLen = getLength();
}
@Override
public void close() throws IOException {
buf = getData();
bufLen = getLength();
}
};
return new CryptoOutputStream(new FakeOutputStream(out),
codec, bufferSize, key, iv);
}
TestIndexedSort.java 文件源码
项目:hadoop-oss
阅读 20
收藏 0
点赞 0
评论 0
public WritableSortable(int j) throws IOException {
seed = r.nextLong();
r.setSeed(seed);
Text t = new Text();
StringBuilder sb = new StringBuilder();
indices = new int[j];
offsets = new int[j];
check = new String[j];
DataOutputBuffer dob = new DataOutputBuffer();
for (int i = 0; i < j; ++i) {
indices[i] = i;
offsets[i] = dob.getLength();
genRandom(t, r.nextInt(15) + 1, sb);
t.write(dob);
check[i] = t.toString();
}
eob = dob.getLength();
bytes = dob.getData();
comparator = WritableComparator.get(Text.class);
}
TestWritableSerialization.java 文件源码
项目:hadoop-oss
阅读 25
收藏 0
点赞 0
评论 0
@Test
@SuppressWarnings({"rawtypes", "unchecked"})
public void testWritableComparatorJavaSerialization() throws Exception {
Serialization ser = new JavaSerialization();
Serializer<TestWC> serializer = ser.getSerializer(TestWC.class);
DataOutputBuffer dob = new DataOutputBuffer();
serializer.open(dob);
TestWC orig = new TestWC(0);
serializer.serialize(orig);
serializer.close();
Deserializer<TestWC> deserializer = ser.getDeserializer(TestWC.class);
DataInputBuffer dib = new DataInputBuffer();
dib.reset(dob.getData(), 0, dob.getLength());
deserializer.open(dib);
TestWC deser = deserializer.deserialize(null);
deserializer.close();
assertEquals(orig, deser);
}
SerializationTestUtil.java 文件源码
项目:hadoop-oss
阅读 23
收藏 0
点赞 0
评论 0
/**
* A utility that tests serialization/deserialization.
* @param conf configuration to use, "io.serializations" is read to
* determine the serialization
* @param <K> the class of the item
* @param before item to (de)serialize
* @return deserialized item
*/
public static <K> K testSerialization(Configuration conf, K before)
throws Exception {
SerializationFactory factory = new SerializationFactory(conf);
Serializer<K> serializer
= factory.getSerializer(GenericsUtil.getClass(before));
Deserializer<K> deserializer
= factory.getDeserializer(GenericsUtil.getClass(before));
DataOutputBuffer out = new DataOutputBuffer();
serializer.open(out);
serializer.serialize(before);
serializer.close();
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), out.getLength());
deserializer.open(in);
K after = deserializer.deserialize(null);
deserializer.close();
return after;
}
TestMainframeDatasetInputSplit.java 文件源码
项目:aliyun-maxcompute-data-collectors
阅读 21
收藏 0
点赞 0
评论 0
@Test
public void testWriteRead() {
mfDatasetInputSplit.addDataset("dataSet1");
mfDatasetInputSplit.addDataset("dataSet2");
DataOutputBuffer dob = new DataOutputBuffer();
DataInputBuffer dib = new DataInputBuffer();
MainframeDatasetInputSplit mfReader = new MainframeDatasetInputSplit();
try {
mfDatasetInputSplit.write(dob);
dib.reset(dob.getData(), dob.getLength());
mfReader.readFields(dib);
Assert.assertNotNull("MFReader get data from tester", mfReader);
Assert.assertEquals(2, mfReader.getLength());
Assert.assertEquals("dataSet1", mfReader.getNextDataset());
Assert.assertEquals("dataSet2", mfReader.getNextDataset());
} catch (IOException ioe) {
Assert.fail("No IOException should be thrown!");
} catch (InterruptedException ie) {
Assert.fail("No InterruptedException should be thrown!");
}
}
TestContainerLocalizer.java 文件源码
项目:hadoop
阅读 22
收藏 0
点赞 0
评论 0
@SuppressWarnings({ "rawtypes", "unchecked" })
static DataInputBuffer createFakeCredentials(Random r, int nTok)
throws IOException {
Credentials creds = new Credentials();
byte[] password = new byte[20];
Text kind = new Text();
Text service = new Text();
Text alias = new Text();
for (int i = 0; i < nTok; ++i) {
byte[] identifier = ("idef" + i).getBytes();
r.nextBytes(password);
kind.set("kind" + i);
service.set("service" + i);
alias.set("token" + i);
Token token = new Token(identifier, password, kind, service);
creds.addToken(alias, token);
}
DataOutputBuffer buf = new DataOutputBuffer();
creds.writeTokenStorageToStream(buf);
DataInputBuffer ret = new DataInputBuffer();
ret.reset(buf.getData(), 0, buf.getLength());
return ret;
}
TestPBRecordImpl.java 文件源码
项目:hadoop
阅读 21
收藏 0
点赞 0
评论 0
@Test(timeout=10000)
public void testLocalResourceStatusSerDe() throws Exception {
LocalResourceStatus rsrcS = createLocalResourceStatus();
assertTrue(rsrcS instanceof LocalResourceStatusPBImpl);
LocalResourceStatusPBImpl rsrcPb = (LocalResourceStatusPBImpl) rsrcS;
DataOutputBuffer out = new DataOutputBuffer();
rsrcPb.getProto().writeDelimitedTo(out);
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), 0, out.getLength());
LocalResourceStatusProto rsrcPbD =
LocalResourceStatusProto.parseDelimitedFrom(in);
assertNotNull(rsrcPbD);
LocalResourceStatus rsrcD =
new LocalResourceStatusPBImpl(rsrcPbD);
assertEquals(rsrcS, rsrcD);
assertEquals(createResource(), rsrcS.getResource());
assertEquals(createResource(), rsrcD.getResource());
}
TestPBRecordImpl.java 文件源码
项目:hadoop
阅读 29
收藏 0
点赞 0
评论 0
@Test(timeout=10000)
public void testLocalizerStatusSerDe() throws Exception {
LocalizerStatus rsrcS = createLocalizerStatus();
assertTrue(rsrcS instanceof LocalizerStatusPBImpl);
LocalizerStatusPBImpl rsrcPb = (LocalizerStatusPBImpl) rsrcS;
DataOutputBuffer out = new DataOutputBuffer();
rsrcPb.getProto().writeDelimitedTo(out);
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), 0, out.getLength());
LocalizerStatusProto rsrcPbD =
LocalizerStatusProto.parseDelimitedFrom(in);
assertNotNull(rsrcPbD);
LocalizerStatus rsrcD =
new LocalizerStatusPBImpl(rsrcPbD);
assertEquals(rsrcS, rsrcD);
assertEquals("localizer0", rsrcS.getLocalizerId());
assertEquals("localizer0", rsrcD.getLocalizerId());
assertEquals(createLocalResourceStatus(), rsrcS.getResourceStatus(0));
assertEquals(createLocalResourceStatus(), rsrcD.getResourceStatus(0));
}
TestPBRecordImpl.java 文件源码
项目:hadoop
阅读 27
收藏 0
点赞 0
评论 0
@Test(timeout=10000)
public void testLocalizerHeartbeatResponseSerDe() throws Exception {
LocalizerHeartbeatResponse rsrcS = createLocalizerHeartbeatResponse();
assertTrue(rsrcS instanceof LocalizerHeartbeatResponsePBImpl);
LocalizerHeartbeatResponsePBImpl rsrcPb =
(LocalizerHeartbeatResponsePBImpl) rsrcS;
DataOutputBuffer out = new DataOutputBuffer();
rsrcPb.getProto().writeDelimitedTo(out);
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), 0, out.getLength());
LocalizerHeartbeatResponseProto rsrcPbD =
LocalizerHeartbeatResponseProto.parseDelimitedFrom(in);
assertNotNull(rsrcPbD);
LocalizerHeartbeatResponse rsrcD =
new LocalizerHeartbeatResponsePBImpl(rsrcPbD);
assertEquals(rsrcS, rsrcD);
assertEquals(createResource(), rsrcS.getResourceSpecs().get(0).getResource());
assertEquals(createResource(), rsrcD.getResourceSpecs().get(0).getResource());
}
TestRMAppTransitions.java 文件源码
项目:hadoop
阅读 24
收藏 0
点赞 0
评论 0
@Test (timeout = 30000)
public void testAppRecoverPath() throws IOException {
LOG.info("--- START: testAppRecoverPath ---");
ApplicationSubmissionContext sub =
Records.newRecord(ApplicationSubmissionContext.class);
ContainerLaunchContext clc =
Records.newRecord(ContainerLaunchContext.class);
Credentials credentials = new Credentials();
DataOutputBuffer dob = new DataOutputBuffer();
credentials.writeTokenStorageToStream(dob);
ByteBuffer securityTokens =
ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
clc.setTokens(securityTokens);
sub.setAMContainerSpec(clc);
testCreateAppSubmittedRecovery(sub);
}
TestMerge.java 文件源码
项目:hadoop
阅读 19
收藏 0
点赞 0
评论 0
public KeyValueWriter(Configuration conf, OutputStream output,
Class<K> kyClass, Class<V> valClass
) throws IOException {
keyClass = kyClass;
valueClass = valClass;
dataBuffer = new DataOutputBuffer();
SerializationFactory serializationFactory
= new SerializationFactory(conf);
keySerializer
= (Serializer<K>)serializationFactory.getSerializer(keyClass);
keySerializer.open(dataBuffer);
valueSerializer
= (Serializer<V>)serializationFactory.getSerializer(valueClass);
valueSerializer.open(dataBuffer);
outputStream = new DataOutputStream(output);
}
TestIFileStreams.java 文件源码
项目:hadoop
阅读 21
收藏 0
点赞 0
评论 0
public void testIFileStream() throws Exception {
final int DLEN = 100;
DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
IFileOutputStream ifos = new IFileOutputStream(dob);
for (int i = 0; i < DLEN; ++i) {
ifos.write(i);
}
ifos.close();
DataInputBuffer dib = new DataInputBuffer();
dib.reset(dob.getData(), DLEN + 4);
IFileInputStream ifis = new IFileInputStream(dib, 104, new Configuration());
for (int i = 0; i < DLEN; ++i) {
assertEquals(i, ifis.read());
}
ifis.close();
}
TestWritableJobConf.java 文件源码
项目:hadoop
阅读 18
收藏 0
点赞 0
评论 0
private <K> K serDeser(K conf) throws Exception {
SerializationFactory factory = new SerializationFactory(CONF);
Serializer<K> serializer =
factory.getSerializer(GenericsUtil.getClass(conf));
Deserializer<K> deserializer =
factory.getDeserializer(GenericsUtil.getClass(conf));
DataOutputBuffer out = new DataOutputBuffer();
serializer.open(out);
serializer.serialize(conf);
serializer.close();
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), out.getLength());
deserializer.open(in);
K after = deserializer.deserialize(null);
deserializer.close();
return after;
}
Chain.java 文件源码
项目:hadoop
阅读 28
收藏 0
点赞 0
评论 0
private <E> E makeCopyForPassByValue(Serialization<E> serialization,
E obj) throws IOException {
Serializer<E> ser =
serialization.getSerializer(GenericsUtil.getClass(obj));
Deserializer<E> deser =
serialization.getDeserializer(GenericsUtil.getClass(obj));
DataOutputBuffer dof = threadLocalDataOutputBuffer.get();
dof.reset();
ser.open(dof);
ser.serialize(obj);
ser.close();
obj = ReflectionUtils.newInstance(GenericsUtil.getClass(obj),
getChainJobConf());
ByteArrayInputStream bais =
new ByteArrayInputStream(dof.getData(), 0, dof.getLength());
deser.open(bais);
deser.deserialize(obj);
deser.close();
return obj;
}
EditLogBackupOutputStream.java 文件源码
项目:hadoop
阅读 21
收藏 0
点赞 0
评论 0
EditLogBackupOutputStream(NamenodeRegistration bnReg, // backup node
JournalInfo journalInfo) // active name-node
throws IOException {
super();
this.bnRegistration = bnReg;
this.journalInfo = journalInfo;
InetSocketAddress bnAddress =
NetUtils.createSocketAddr(bnRegistration.getAddress());
try {
this.backupNode = NameNodeProxies.createNonHAProxy(new HdfsConfiguration(),
bnAddress, JournalProtocol.class, UserGroupInformation.getCurrentUser(),
true).getProxy();
} catch(IOException e) {
Storage.LOG.error("Error connecting to: " + bnAddress, e);
throw e;
}
this.doubleBuf = new EditsDoubleBuffer(DEFAULT_BUFFER_SIZE);
this.out = new DataOutputBuffer(DEFAULT_BUFFER_SIZE);
}
TestDelegationTokenRemoteFetcher.java 文件源码
项目:hadoop
阅读 19
收藏 0
点赞 0
评论 0
@Override
public void handle(Channel channel, Token<DelegationTokenIdentifier> token,
String serviceUrl) throws IOException {
Assert.assertEquals(testToken, token);
Credentials creds = new Credentials();
creds.addToken(new Text(serviceUrl), token);
DataOutputBuffer out = new DataOutputBuffer();
creds.write(out);
int fileLength = out.getData().length;
ChannelBuffer cbuffer = ChannelBuffers.buffer(fileLength);
cbuffer.writeBytes(out.getData());
HttpResponse response = new DefaultHttpResponse(HTTP_1_1, OK);
response.setHeader(HttpHeaders.Names.CONTENT_LENGTH,
String.valueOf(fileLength));
response.setContent(cbuffer);
channel.write(response).addListener(ChannelFutureListener.CLOSE);
}
TestJspHelper.java 文件源码
项目:hadoop
阅读 25
收藏 0
点赞 0
评论 0
@Test
public void testReadWriteReplicaState() {
try {
DataOutputBuffer out = new DataOutputBuffer();
DataInputBuffer in = new DataInputBuffer();
for (HdfsServerConstants.ReplicaState repState : HdfsServerConstants.ReplicaState
.values()) {
repState.write(out);
in.reset(out.getData(), out.getLength());
HdfsServerConstants.ReplicaState result = HdfsServerConstants.ReplicaState
.read(in);
assertTrue("testReadWrite error !!!", repState == result);
out.reset();
in.reset();
}
} catch (Exception ex) {
fail("testReadWrite ex error ReplicaState");
}
}
TestDFSPacket.java 文件源码
项目:hadoop
阅读 25
收藏 0
点赞 0
评论 0
@Test
public void testPacket() throws Exception {
Random r = new Random(12345L);
byte[] data = new byte[chunkSize];
r.nextBytes(data);
byte[] checksum = new byte[checksumSize];
r.nextBytes(checksum);
DataOutputBuffer os = new DataOutputBuffer(data.length * 2);
byte[] packetBuf = new byte[data.length * 2];
DFSPacket p = new DFSPacket(packetBuf, maxChunksPerPacket,
0, 0, checksumSize, false);
p.setSyncBlock(true);
p.writeData(data, 0, data.length);
p.writeChecksum(checksum, 0, checksum.length);
p.writeTo(os);
//we have set syncBlock to true, so the header has the maximum length
int headerLen = PacketHeader.PKT_MAX_HEADER_LEN;
byte[] readBuf = os.getData();
assertArrayRegionsEqual(readBuf, headerLen, checksum, 0, checksum.length);
assertArrayRegionsEqual(readBuf, headerLen + checksum.length, data, 0, data.length);
}
TestRecordFactory.java 文件源码
项目:hadoop
阅读 22
收藏 0
点赞 0
评论 0
public static void testFactory(long targetBytes, long targetRecs)
throws Exception {
final Configuration conf = new Configuration();
final GridmixKey key = new GridmixKey();
final GridmixRecord val = new GridmixRecord();
LOG.info("Target bytes/records: " + targetBytes + "/" + targetRecs);
final RecordFactory f = new AvgRecordFactory(targetBytes, targetRecs, conf);
targetRecs = targetRecs <= 0 && targetBytes >= 0
? Math.max(1,
targetBytes
/ conf.getInt(AvgRecordFactory.GRIDMIX_MISSING_REC_SIZE,
64 * 1024))
: targetRecs;
long records = 0L;
final DataOutputBuffer out = new DataOutputBuffer();
while (f.next(key, val)) {
++records;
key.write(out);
val.write(out);
}
assertEquals(targetRecs, records);
assertEquals(targetBytes, out.getLength());
}
TestGridmixRecord.java 文件源码
项目:hadoop
阅读 22
收藏 0
点赞 0
评论 0
static void eqSeedTest(GridmixRecord x, GridmixRecord y, int max)
throws Exception {
final Random r = new Random();
final long s = r.nextLong();
r.setSeed(s);
LOG.info("eqSeed: " + s);
assertEquals(x.fixedBytes(), y.fixedBytes());
final int min = x.fixedBytes() + 1;
final DataOutputBuffer out1 = new DataOutputBuffer();
final DataOutputBuffer out2 = new DataOutputBuffer();
for (int i = min; i < max; ++i) {
final long seed = r.nextLong();
setSerialize(x, seed, i, out1);
setSerialize(y, seed, i, out2);
assertEquals(x, y);
assertEquals(x.hashCode(), y.hashCode());
// verify written contents match
assertEquals(out1.getLength(), out2.getLength());
// assumes that writes will grow buffer deterministically
assertEquals("Bad test", out1.getData().length, out2.getData().length);
assertArrayEquals(out1.getData(), out2.getData());
}
}
StreamXmlRecordReader.java 文件源码
项目:hadoop
阅读 20
收藏 0
点赞 0
评论 0
public synchronized boolean next(Text key, Text value) throws IOException {
numNext++;
if (pos_ >= end_) {
return false;
}
DataOutputBuffer buf = new DataOutputBuffer();
if (!readUntilMatchBegin()) {
return false;
}
if (pos_ >= end_ || !readUntilMatchEnd(buf)) {
return false;
}
// There is only one elem..key/value splitting is not done here.
byte[] record = new byte[buf.getLength()];
System.arraycopy(buf.getData(), 0, record, 0, record.length);
numRecStats(record, 0, record.length);
key.set(record);
value.set("");
return true;
}
TFile.java 文件源码
项目:hadoop
阅读 24
收藏 0
点赞 0
评论 0
public void write(DataOutput out) throws IOException {
if (firstKey == null) {
Utils.writeVInt(out, 0);
return;
}
DataOutputBuffer dob = new DataOutputBuffer();
Utils.writeVInt(dob, firstKey.size());
dob.write(firstKey.buffer());
Utils.writeVInt(out, dob.size());
out.write(dob.getData(), 0, dob.getLength());
for (TFileIndexEntry entry : index) {
dob.reset();
entry.write(dob);
Utils.writeVInt(out, dob.getLength());
out.write(dob.getData(), 0, dob.getLength());
}
}
TestDelegationToken.java 文件源码
项目:hadoop
阅读 26
收藏 0
点赞 0
评论 0
private boolean testDelegationTokenIdentiferSerializationRoundTrip(Text owner,
Text renewer, Text realUser) throws IOException {
TestDelegationTokenIdentifier dtid = new TestDelegationTokenIdentifier(
owner, renewer, realUser);
DataOutputBuffer out = new DataOutputBuffer();
dtid.writeImpl(out);
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), out.getLength());
try {
TestDelegationTokenIdentifier dtid2 =
new TestDelegationTokenIdentifier();
dtid2.readFields(in);
assertTrue(dtid.equals(dtid2));
return true;
} catch(IOException e){
return false;
}
}
CryptoStreamsTestBase.java 文件源码
项目:hadoop
阅读 20
收藏 0
点赞 0
评论 0
@Before
public void setUp() throws IOException {
// Generate data
final int seed = new Random().nextInt();
final DataOutputBuffer dataBuf = new DataOutputBuffer();
final RandomDatum.Generator generator = new RandomDatum.Generator(seed);
for(int i = 0; i < count; ++i) {
generator.next();
final RandomDatum key = generator.getKey();
final RandomDatum value = generator.getValue();
key.write(dataBuf);
value.write(dataBuf);
}
LOG.info("Generated " + count + " records");
data = dataBuf.getData();
dataLen = dataBuf.getLength();
}
TestCryptoStreams.java 文件源码
项目:hadoop
阅读 24
收藏 0
点赞 0
评论 0
@Override
protected OutputStream getOutputStream(int bufferSize, byte[] key, byte[] iv)
throws IOException {
DataOutputBuffer out = new DataOutputBuffer() {
@Override
public void flush() throws IOException {
buf = getData();
bufLen = getLength();
}
@Override
public void close() throws IOException {
buf = getData();
bufLen = getLength();
}
};
return new CryptoOutputStream(new FakeOutputStream(out),
codec, bufferSize, key, iv);
}
TestIndexedSort.java 文件源码
项目:hadoop
阅读 17
收藏 0
点赞 0
评论 0
public WritableSortable(int j) throws IOException {
seed = r.nextLong();
r.setSeed(seed);
Text t = new Text();
StringBuilder sb = new StringBuilder();
indices = new int[j];
offsets = new int[j];
check = new String[j];
DataOutputBuffer dob = new DataOutputBuffer();
for (int i = 0; i < j; ++i) {
indices[i] = i;
offsets[i] = dob.getLength();
genRandom(t, r.nextInt(15) + 1, sb);
t.write(dob);
check[i] = t.toString();
}
eob = dob.getLength();
bytes = dob.getData();
comparator = WritableComparator.get(Text.class);
}