java类org.apache.hadoop.io.SequenceFile.Reader的实例源码

TestSequenceFileSerialization.java 文件源码 项目:hadoop-oss 阅读 19 收藏 0 点赞 0 评论 0
@Test
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
DistCpV1.java 文件源码 项目:hadoop 阅读 32 收藏 0 点赞 0 评论 0
static private void finalize(Configuration conf, JobConf jobconf,
    final Path destPath, String presevedAttributes) throws IOException {
  if (presevedAttributes == null) {
    return;
  }
  EnumSet<FileAttribute> preseved = FileAttribute.parse(presevedAttributes);
  if (!preseved.contains(FileAttribute.USER)
      && !preseved.contains(FileAttribute.GROUP)
      && !preseved.contains(FileAttribute.PERMISSION)) {
    return;
  }

  FileSystem dstfs = destPath.getFileSystem(conf);
  Path dstdirlist = new Path(jobconf.get(DST_DIR_LIST_LABEL));
  try (SequenceFile.Reader in =
      new SequenceFile.Reader(jobconf, Reader.file(dstdirlist))) {
    Text dsttext = new Text();
    FilePair pair = new FilePair(); 
    for(; in.next(dsttext, pair); ) {
      Path absdst = new Path(destPath, pair.output);
      updateDestStatus(pair.input, dstfs.getFileStatus(absdst),
          preseved, dstfs);
    }
  }
}
TestSequenceFileSerialization.java 文件源码 项目:hadoop 阅读 17 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
TestSequenceFileSerialization.java 文件源码 项目:aliyun-oss-hadoop-fs 阅读 22 收藏 0 点赞 0 评论 0
@Test
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
SequenceReader.java 文件源码 项目:GeoCrawler 阅读 14 收藏 0 点赞 0 评论 0
@Override
public int count(String path) throws FileNotFoundException {
  Path file = new Path(path);
  SequenceFile.Reader reader;
  int i = 0;
  try {
    reader = new SequenceFile.Reader(conf, Reader.file(file));
    Writable key = 
        (Writable)ReflectionUtils.newInstance(reader.getKeyClass(), conf);
    Writable value = 
        (Writable)ReflectionUtils.newInstance(reader.getValueClass(), conf);

    while(reader.next(key, value)) {
      i++;
    }
    reader.close();
  } catch(FileNotFoundException fne){ 
    throw new FileNotFoundException();
  }catch (IOException e) {
    // TODO Auto-generated catch block
    LOG.error("Error occurred while reading file {} : ", file, 
        StringUtils.stringifyException(e));
    throw new WebApplicationException();
  } 
  return i;
}
LinkReader.java 文件源码 项目:GeoCrawler 阅读 16 收藏 0 点赞 0 评论 0
@Override
public int count(String path) throws FileNotFoundException {
  Path file = new Path(path);
  SequenceFile.Reader reader;
  int i = 0;
  try {
    reader = new SequenceFile.Reader(conf, Reader.file(file));
    Writable key = (Writable)ReflectionUtils.newInstance(reader.getKeyClass(), conf);
    Writable value = (Writable)ReflectionUtils.newInstance(reader.getValueClass(), conf);

    while(reader.next(key, value)) {
      i++;
    }
    reader.close();
  } catch(FileNotFoundException fne){ 
    throw new FileNotFoundException();
  }catch (IOException e) {
    // TODO Auto-generated catch block
    LOG.error("Error occurred while reading file {} : ", file, StringUtils.stringifyException(e));
    throw new WebApplicationException();
  } 
  return i;
}
DistCpV1.java 文件源码 项目:big-c 阅读 27 收藏 0 点赞 0 评论 0
static private void finalize(Configuration conf, JobConf jobconf,
    final Path destPath, String presevedAttributes) throws IOException {
  if (presevedAttributes == null) {
    return;
  }
  EnumSet<FileAttribute> preseved = FileAttribute.parse(presevedAttributes);
  if (!preseved.contains(FileAttribute.USER)
      && !preseved.contains(FileAttribute.GROUP)
      && !preseved.contains(FileAttribute.PERMISSION)) {
    return;
  }

  FileSystem dstfs = destPath.getFileSystem(conf);
  Path dstdirlist = new Path(jobconf.get(DST_DIR_LIST_LABEL));
  try (SequenceFile.Reader in =
      new SequenceFile.Reader(jobconf, Reader.file(dstdirlist))) {
    Text dsttext = new Text();
    FilePair pair = new FilePair(); 
    for(; in.next(dsttext, pair); ) {
      Path absdst = new Path(destPath, pair.output);
      updateDestStatus(pair.input, dstfs.getFileStatus(absdst),
          preseved, dstfs);
    }
  }
}
DistCpV1.java 文件源码 项目:big-c 阅读 23 收藏 0 点赞 0 评论 0
/** Check whether the file list have duplication. */
static private void checkDuplication(FileSystem fs, Path file, Path sorted,
  Configuration conf) throws IOException {
  SequenceFile.Sorter sorter = new SequenceFile.Sorter(fs,
    new Text.Comparator(), Text.class, Text.class, conf);
  sorter.sort(file, sorted);
  try (SequenceFile.Reader in =
       new SequenceFile.Reader(conf, Reader.file(sorted))) {
    Text prevdst = null, curdst = new Text();
    Text prevsrc = null, cursrc = new Text(); 
    for(; in.next(curdst, cursrc); ) {
      if (prevdst != null && curdst.equals(prevdst)) {
        throw new DuplicationException(
          "Invalid input, there are duplicated files in the sources: "
          + prevsrc + ", " + cursrc);
      }
      prevdst = curdst;
      curdst = new Text();
      prevsrc = cursrc;
      cursrc = new Text();
    }
  }
}
TestSequenceFileSerialization.java 文件源码 项目:big-c 阅读 16 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
DistCache.java 文件源码 项目:TopPI 阅读 25 收藏 0 点赞 0 评论 0
private static TIntIntMap readIntIntMap(URI[] files, Configuration conf, String token, int size) throws IOException {
    TIntIntMap map = new TIntIntHashMap(size, Constants.DEFAULT_LOAD_FACTOR, -1, -1);
    for (URI file : files) {
        if (file.getPath().contains(token)) {
            SequenceFile.Reader reader = new SequenceFile.Reader(conf, Reader.file(new Path(file)));
            IntWritable key = new IntWritable();
            IntWritable value = new IntWritable();

            while (reader.next(key, value)) {
                map.put(key.get(), value.get());
            }

            reader.close();
        }
    }

    return map;
}
TestSequenceFileSerialization.java 文件源码 项目:hadoop-2.6.0-cdh5.4.3 阅读 18 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
TestSequenceFileSerialization.java 文件源码 项目:hadoop-EAR 阅读 17 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/test.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
TestSequenceFileSerialization.java 文件源码 项目:hadoop-plus 阅读 16 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
SequenceFileRead.java 文件源码 项目:big-data-lite 阅读 15 收藏 0 点赞 0 评论 0
public static void main(String[] args) throws IOException {
String uri = args[0];
       String split = args[1];
Configuration conf = new Configuration();
Path path = new Path(uri);
SequenceFile.Reader reader = null;
try {       
reader = new SequenceFile.Reader(conf, Reader.file(path));
       Text key = new Text();
       OrdImageWritable value = new OrdImageWritable();

       int num = 0;

while (reader.next(key, value)) {
           System.out.println(key.toString() + " " + value.getByteLength());
           ImageIO.write(value.getImage(), "jpg", new File("image" +split+"_" + num++ + ".jpg"));
}
} finally {
    IOUtils.closeStream(reader);
    }       
}
AggregationOutputDumpTool.java 文件源码 项目:Pinot 阅读 15 收藏 0 点赞 0 评论 0
private static void processFile(Path path) throws Exception {
  System.out.println("Processing file:" + path);
  SequenceFile.Reader reader = new SequenceFile.Reader(new Configuration(),
      Reader.file(path));
  System.out.println(reader.getKeyClass());
  System.out.println(reader.getValueClassName());
  WritableComparable<?> key = (WritableComparable<?>) reader.getKeyClass()
      .newInstance();
  Writable val = (Writable) reader.getValueClass().newInstance();

  while (reader.next(key, val)) {
    BytesWritable writable = (BytesWritable) key;
    DimensionKey dimensionKey = DimensionKey.fromBytes(writable.getBytes());
    System.out.println(dimensionKey);
  }
}
DistCpV1.java 文件源码 项目:hops 阅读 21 收藏 0 点赞 0 评论 0
static private void finalize(Configuration conf, JobConf jobconf,
    final Path destPath, String presevedAttributes) throws IOException {
  if (presevedAttributes == null) {
    return;
  }
  EnumSet<FileAttribute> preseved = FileAttribute.parse(presevedAttributes);
  if (!preseved.contains(FileAttribute.USER)
      && !preseved.contains(FileAttribute.GROUP)
      && !preseved.contains(FileAttribute.PERMISSION)) {
    return;
  }

  FileSystem dstfs = destPath.getFileSystem(conf);
  Path dstdirlist = new Path(jobconf.get(DST_DIR_LIST_LABEL));
  try (SequenceFile.Reader in =
      new SequenceFile.Reader(jobconf, Reader.file(dstdirlist))) {
    Text dsttext = new Text();
    FilePair pair = new FilePair(); 
    for(; in.next(dsttext, pair); ) {
      Path absdst = new Path(destPath, pair.output);
      updateDestStatus(pair.input, dstfs.getFileStatus(absdst),
          preseved, dstfs);
    }
  }
}
DistCpV1.java 文件源码 项目:hops 阅读 26 收藏 0 点赞 0 评论 0
/** Check whether the file list have duplication. */
static private void checkDuplication(FileSystem fs, Path file, Path sorted,
  Configuration conf) throws IOException {
  SequenceFile.Sorter sorter = new SequenceFile.Sorter(fs,
    new Text.Comparator(), Text.class, Text.class, conf);
  sorter.sort(file, sorted);
  try (SequenceFile.Reader in =
       new SequenceFile.Reader(conf, Reader.file(sorted))) {
    Text prevdst = null, curdst = new Text();
    Text prevsrc = null, cursrc = new Text(); 
    for(; in.next(curdst, cursrc); ) {
      if (prevdst != null && curdst.equals(prevdst)) {
        throw new DuplicationException(
          "Invalid input, there are duplicated files in the sources: "
          + prevsrc + ", " + cursrc);
      }
      prevdst = curdst;
      curdst = new Text();
      prevsrc = cursrc;
      cursrc = new Text();
    }
  }
}
TestSequenceFileSerialization.java 文件源码 项目:hops 阅读 17 收藏 0 点赞 0 评论 0
@Test
public void testJavaSerialization() throws Exception {
  Path file = new Path(GenericTestUtils.getTempPath("testseqser.seq"));

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
CubeStatsReader.java 文件源码 项目:kylin 阅读 17 收藏 0 点赞 0 评论 0
public CubeStatsResult(Path path, int precision) throws IOException {
    Configuration hadoopConf = HadoopUtil.getCurrentConfiguration();
    Option seqInput = SequenceFile.Reader.file(path);
    try (Reader reader = new SequenceFile.Reader(hadoopConf, seqInput)) {
        LongWritable key = (LongWritable) ReflectionUtils.newInstance(reader.getKeyClass(), hadoopConf);
        BytesWritable value = (BytesWritable) ReflectionUtils.newInstance(reader.getValueClass(), hadoopConf);
        while (reader.next(key, value)) {
            if (key.get() == 0L) {
                percentage = Bytes.toInt(value.getBytes());
            } else if (key.get() == -1) {
                mapperOverlapRatio = Bytes.toDouble(value.getBytes());
            } else if (key.get() == -2) {
                mapperNumber = Bytes.toInt(value.getBytes());
            } else if (key.get() > 0) {
                HLLCounter hll = new HLLCounter(precision);
                ByteArray byteArray = new ByteArray(value.getBytes());
                hll.readRegisters(byteArray.asBuffer());
                counterMap.put(key.get(), hll);
            }
        }
    }
}
HiveToBaseCuboidMapperPerformanceTest.java 文件源码 项目:kylin 阅读 18 收藏 0 点赞 0 评论 0
@Ignore("convenient trial tool for dev")
@Test
public void test() throws IOException, InterruptedException {
    Configuration hconf = HadoopUtil.getCurrentConfiguration();
    HiveToBaseCuboidMapper mapper = new HiveToBaseCuboidMapper();
    Context context = MockupMapContext.create(hconf, metadataUrl, cubeName, null);

    mapper.doSetup(context);

    Reader reader = new Reader(hconf, SequenceFile.Reader.file(srcPath));
    Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), hconf);
    Text value = new Text();

    while (reader.next(key, value)) {
        mapper.map(key, value, context);
    }

    reader.close();
}
SequenceFileUtility.java 文件源码 项目:openimaj 阅读 17 收藏 0 点赞 0 评论 0
/**
 * Return the metadata map. Read mode only.
 *
 * @return metadata
 */
public Map<Text, Text> getMetadata() {
    if (!isReader) {
        throw new UnsupportedOperationException("Cannot read metadata in write mode");
    }

    Reader reader = null;
    try {
        reader = createReader();
        final Map<Text, Text> metadata = reader.getMetadata().getMetadata();
        return metadata;
    } catch (final Exception e) {
        throw new RuntimeException(e);
    } finally {
        if (reader != null)
            try {
                reader.close();
            } catch (final IOException e1) {
            }
    }
}
SequenceFileUtility.java 文件源码 项目:openimaj 阅读 17 收藏 0 点赞 0 评论 0
/**
 * @return the compression codec in use for this file.
 */
public Class<? extends CompressionCodec> getCompressionCodecClass() {
    if (!isReader)
        return DefaultCodec.class;

    Reader reader = null;
    try {
        reader = createReader();
        if (reader.getCompressionCodec() == null)
            return null;
        return reader.getCompressionCodec().getClass();
    } catch (final Exception e) {
        throw new RuntimeException(e);
    } finally {
        if (reader != null)
            try {
                reader.close();
            } catch (final IOException e1) {
            }
    }
}
LshMapper.java 文件源码 项目:pss 阅读 16 收藏 0 点赞 0 评论 0
@Override
public void configure(JobConf job) {
    l = job.getInt(LshPartitionMain.L_PROPERTY, LshPartitionMain.L_VALUE);
    try {
        Path[] localFiles = DistributedCache.getLocalCacheFiles(job);
        // System.out.println("local:" + localFiles[0].getName());
        // FileSystem fs = localFiles[0].getFileSystem(job);
        FileSystem fs = FileSystem.get(job);
        // Reader reader = new SequenceFile.Reader(fs, localFiles[0], job);
        Reader reader = new SequenceFile.Reader(fs, new Path("lshfile"), job);
        reader.next(lsh);
        reader.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
}
SeqReader.java 文件源码 项目:pss 阅读 19 收藏 0 点赞 0 评论 0
public static int readFile(Boolean printValues, FileSystem fs, Path inputPath,
        Configuration conf) throws IOException, InstantiationException, IllegalAccessException {
    int count = 0;

    Reader reader = new SequenceFile.Reader(fs, inputPath, conf);

    Writable key = (Writable) reader.getKeyClass().newInstance();
    Writable value = (Writable) reader.getValueClass().newInstance();

    System.out.println("key class:" + key.getClass().getName());
    System.out.println("value class:" + value.getClass().getName());

    while (reader.next(key, value)) {
        if (printValues)
            System.out.print("\nkey:" + key.toString() + ", value:" + value.toString());
        count++;
    }
    reader.close();
    System.out.println("\n" + inputPath.getName() + " has " + count + " records");
    return count;
}
LshMapper.java 文件源码 项目:pss 阅读 14 收藏 0 点赞 0 评论 0
@Override
public void configure(JobConf job) {
    l = job.getInt(MinHashLshDriver.L_PROPERTY, MinHashLshDriver.L_VALUE);
    try {
        Path[] localFiles = DistributedCache.getLocalCacheFiles(job);
        // System.out.println("local:" + localFiles[0].getName());
        // FileSystem fs = localFiles[0].getFileSystem(job);
        FileSystem fs = FileSystem.get(job);
        // Reader reader = new SequenceFile.Reader(fs, localFiles[0], job);
        Reader reader = new SequenceFile.Reader(fs, new Path("lshfile"), job);
        reader.next(lsh);
        reader.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
}
TestSequenceFileSerialization.java 文件源码 项目:hadoop-TCP 阅读 17 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
BaseCuboidMapperPerformanceTest.java 文件源码 项目:Kylin 阅读 16 收藏 0 点赞 0 评论 0
@Ignore("convenient trial tool for dev")
@Test
public void test() throws IOException, InterruptedException {
    Configuration hconf = new Configuration();
    BaseCuboidMapper mapper = new BaseCuboidMapper();
    Context context = MockupMapContext.create(hconf, metadataUrl, cubeName, null);

    mapper.setup(context);

    Reader reader = new Reader(hconf, SequenceFile.Reader.file(srcPath));
    Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), hconf);
    Text value = new Text();

    while (reader.next(key, value)) {
        mapper.map(key, value, context);
    }

    reader.close();
}
TestSequenceFileSerialization.java 文件源码 项目:hadoop-on-lustre 阅读 20 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/test.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
TestSequenceFileSerialization.java 文件源码 项目:hardfs 阅读 16 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
TestSequenceFileSerialization.java 文件源码 项目:hadoop-on-lustre2 阅读 18 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}


问题


面经


文章

微信
公众号

扫码关注公众号