java类org.apache.hadoop.io.SequenceFile.Writer的实例源码

TestSequenceFileSerialization.java 文件源码 项目:hadoop-oss 阅读 19 收藏 0 点赞 0 评论 0
@Test
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
CircusTrainCopyListing.java 文件源码 项目:circus-train 阅读 22 收藏 0 点赞 0 评论 0
@Override
public void doBuildListing(Path pathToListFile, DistCpOptions options) throws IOException {
  try (Writer writer = newWriter(pathToListFile)) {

    Path sourceRootPath = getRootPath(getConf());

    for (Path sourcePath : options.getSourcePaths()) {

      FileSystem fileSystem = sourcePath.getFileSystem(getConf());
      FileStatus directory = fileSystem.getFileStatus(sourcePath);

      Map<String, CopyListingFileStatus> children = new FileStatusTreeTraverser(fileSystem)
          .preOrderTraversal(directory)
          .transform(new CopyListingFileStatusFunction(fileSystem, options))
          .uniqueIndex(new RelativePathFunction(sourceRootPath));

      for (Entry<String, CopyListingFileStatus> entry : children.entrySet()) {
        LOG.debug("Adding '{}' with relative path '{}'", entry.getValue().getPath(), entry.getKey());
        writer.append(new Text(entry.getKey()), entry.getValue());
        writer.sync();
      }
    }
  }
}
TestSequenceFileSerialization.java 文件源码 项目:hadoop 阅读 17 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
TestSequenceFileSerialization.java 文件源码 项目:aliyun-oss-hadoop-fs 阅读 18 收藏 0 点赞 0 评论 0
@Test
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
TestSequenceFileSerialization.java 文件源码 项目:big-c 阅读 19 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
HdfsConsumerTest.java 文件源码 项目:Camel 阅读 25 收藏 0 点赞 0 评论 0
@Test
public void testSimpleConsumerWithEmptySequenceFile() throws Exception {
    if (!canTest()) {
        return;
    }

    final Path file = new Path(new File("target/test/test-camel-sequence-file").getAbsolutePath());
    Configuration conf = new Configuration();
    SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, BooleanWritable.class);
    writer.sync();
    writer.close();

    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(0);

    context.addRoutes(new RouteBuilder() {
        public void configure() {
            from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&chunkSize=4096&initialDelay=0").to("mock:result");
        }
    });
    context.start();

    resultEndpoint.assertIsSatisfied();
}
TestSequenceFileSerialization.java 文件源码 项目:hadoop-2.6.0-cdh5.4.3 阅读 21 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
TestSequenceFileSerialization.java 文件源码 项目:hadoop-EAR 阅读 19 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/test.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
MahoutOutput.java 文件源码 项目:IReS-Platform 阅读 22 收藏 0 点赞 0 评论 0
public MahoutOutput(String output, FileSystem fs, Configuration conf) throws IOException {

        //clear the output dir
        Path basedir = new Path(output);
        if (fs.exists(basedir)) {
            fs.delete(basedir, true); //Delete existing Directory
        }
        fs.mkdirs(basedir);

        String dictOutput = output + "/dictionary.file";
        dictWriter = createWriter(conf, Writer.file(new Path(dictOutput)),
                Writer.keyClass(Text.class), Writer.valueClass(IntWritable.class));

        String vectorsPath = output + "/tfidf-vectors";
        tfidfWriter = new SequenceFile.Writer(fs, conf,
                new Path(vectorsPath), Text.class, VectorWritable.class);
    }
TestSequenceFileSerialization.java 文件源码 项目:hadoop-plus 阅读 20 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
SequenceEventWriter.java 文件源码 项目:jetstream 阅读 68 收藏 0 点赞 0 评论 0
public SequenceEventWriterInstance(OutputStream stream,//
        Class<?> keyClass, //
        Class<?> valueClass,//
        CompressionType compressionType) {
    if (!(stream instanceof FSDataOutputStream)) {
        throw new RuntimeException(
                "OutputStream must be a FSDataOutputStream");
    }
    try {
        writer = SequenceFile.createWriter(hdfs.getHadoopConfig(),
                Writer.stream((FSDataOutputStream) stream),
                Writer.keyClass(keyClass),
                Writer.valueClass(valueClass),
                Writer.compression(compressionType));
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}
NNBench.java 文件源码 项目:hops 阅读 24 收藏 0 点赞 0 评论 0
/**
 * Create control files before a test run.
 * Number of files created is equal to the number of maps specified
 * 
 * @throws IOException on error
 */
private void createControlFiles() throws IOException {
  LOG.info("Creating " + numberOfMaps + " control files");

  for (int i = 0; i < numberOfMaps; i++) {
    String strFileName = "NNBench_Controlfile_" + i;
    Path filePath = new Path(new Path(baseDir, CONTROL_DIR_NAME),
            strFileName);

    SequenceFile.Writer writer = null;
    try {
      writer = SequenceFile.createWriter(getConf(), Writer.file(filePath),
          Writer.keyClass(Text.class), Writer.valueClass(LongWritable.class),
          Writer.compression(CompressionType.NONE));
      writer.append(new Text(strFileName), new LongWritable(i));
    } finally {
      if (writer != null) {
        writer.close();
      }
    }
  }
}
TestSequenceFileSerialization.java 文件源码 项目:hops 阅读 18 收藏 0 点赞 0 评论 0
@Test
public void testJavaSerialization() throws Exception {
  Path file = new Path(GenericTestUtils.getTempPath("testseqser.seq"));

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
SequenceFileUtility.java 文件源码 项目:openimaj 阅读 19 收藏 0 点赞 0 评论 0
@SuppressWarnings("unchecked")
private Writer createWriter(Map<String, String> metadata) throws IOException {
    final Metadata md = new Metadata();

    for (final Entry<String, String> e : metadata.entrySet()) {
        md.set(new Text(e.getKey()), new Text(e.getValue()));
    }
    final Class<K> keyClass = (Class<K>) ((ParameterizedType) getClass().getGenericSuperclass())
            .getActualTypeArguments()[0];
    final Class<V> valueClass = (Class<V>) ((ParameterizedType) getClass().getGenericSuperclass())
            .getActualTypeArguments()[1];

    return SequenceFile.createWriter(fileSystem, config, sequenceFilePath, keyClass, valueClass, compressionType,
            new DefaultCodec(), null,
            md);
}
InputFileDirectoryLoader.java 文件源码 项目:spring-usc 阅读 19 收藏 0 点赞 0 评论 0
public int run(String[] args) throws Exception {
     // Configuration processed by ToolRunner
Properties p = new Properties();
p.load(new FileInputStream(new File(args[0])));

configure(p);
String inputDirectoryName = p.getProperty("input.directory");
     File f = new File(inputDirectoryName);
     if(!f.exists() || !f.isDirectory()){
        logger.error("Invalid input directory: " + inputDirectoryName);
        return -1;
     }
     String outputFileName = p.getProperty("output.file");
     Path outputPath = new Path(outputFileName);
     SequenceFile.Writer writer = SequenceFile.createWriter(getConf(),Writer.keyClass(Text.class),
             Writer.valueClass(Text.class), Writer.file(outputPath));
     for(File document : f.listFiles())
     {
        String contents = FileUtils.readFileToString(document);
        writer.append(new Text(document.getName()), new Text(contents));
     }
     writer.close();
     return 0;
   }
TestSequenceFileSerialization.java 文件源码 项目:hadoop-TCP 阅读 17 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
TestSequenceFileSerialization.java 文件源码 项目:hadoop-on-lustre 阅读 17 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/test.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
TestSequenceFileSerialization.java 文件源码 项目:hardfs 阅读 18 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
TestSequenceFileSerialization.java 文件源码 项目:hadoop-on-lustre2 阅读 21 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/testseqser.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
MergeSortRowIdMatcher.java 文件源码 项目:incubator-blur 阅读 19 收藏 0 点赞 0 评论 0
private void writeRowIds(Writer writer, SegmentReader segmentReader) throws IOException {
  Terms terms = segmentReader.terms(BlurConstants.ROW_ID);
  if (terms == null) {
    return;
  }
  TermsEnum termsEnum = terms.iterator(null);
  BytesRef rowId;
  long s = System.nanoTime();
  while ((rowId = termsEnum.next()) != null) {
    long n = System.nanoTime();
    if (n + _10_SECONDS > s) {
      _progressable.progress();
      s = System.nanoTime();
    }
    writer.append(new Text(rowId.utf8ToString()), NullWritable.get());
  }
}
SnapshotIndexDeletionPolicy.java 文件源码 项目:incubator-blur 阅读 21 收藏 0 点赞 0 评论 0
private synchronized void storeGenerations() throws IOException {
  FileSystem fileSystem = _path.getFileSystem(_configuration);
  FileStatus[] listStatus = fileSystem.listStatus(_path);
  SortedSet<FileStatus> existing = new TreeSet<FileStatus>(Arrays.asList(listStatus));
  long currentFile;
  if (!existing.isEmpty()) {
    FileStatus last = existing.last();
    currentFile = Long.parseLong(last.getPath().getName());
  } else {
    currentFile = 0;
  }
  Path path = new Path(_path, buffer(currentFile + 1));
  LOG.info("Creating new snapshot file [{0}]", path);
  FSDataOutputStream outputStream = fileSystem.create(path, false);
  Writer writer = SequenceFile.createWriter(_configuration, outputStream, Text.class, LongWritable.class,
      CompressionType.NONE, null);
  for (Entry<String, Long> e : _namesToGenerations.entrySet()) {
    writer.append(new Text(e.getKey()), new LongWritable(e.getValue()));
  }
  writer.close();
  outputStream.close();
  cleanupOldFiles(fileSystem, existing);
}
SequenceFileLoader.java 文件源码 项目:geolint 阅读 20 收藏 0 点赞 0 评论 0
/**
 * Traverse the directory and add files to the sequencefile
 * @param seq sequencefile
 * @param pFile
 */
private static void traverseAdd(Writer seq, File pFile) {

    if(pFile.isDirectory()) {
        for(File file:pFile.listFiles()) {
            traverseAdd(seq, file);
        }
    } else {
        try {
            addFile(seq, pFile);
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

}
TestSequenceFileSerialization.java 文件源码 项目:RDFS 阅读 20 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/test.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
TestSequenceFileSerialization.java 文件源码 项目:hadoop-0.20 阅读 18 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/test.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
ConvertFastaForCloud.java 文件源码 项目:emr-sample-apps 阅读 18 收藏 0 点赞 0 评论 0
/**
 * @param args
 * @throws IOException 
 */
public static void main(String[] args) throws IOException {
    if (args.length != 2) {
        System.err.println("Usage: ConvertFastaForCloud file.fa outfile.br");
        System.exit(-1);
    }

    String infile = args[0];
    String outfile = args[1];

    System.err.println("Converting " + infile + " into " + outfile);

    JobConf config = new JobConf();

    SequenceFile.Writer writer = SequenceFile.createWriter(FileSystem.get(config), config,
            new Path(outfile), IntWritable.class, BytesWritable.class);

    convertFile(infile, writer);

    writer.close();

    System.err.println("min_seq_len: " + min_seq_len);
    System.err.println("max_seq_len: " + max_seq_len);
    System.err.println("Using DNAString version: " + DNAString.VERSION);
}
TestSequenceFileSerialization.java 文件源码 项目:hortonworks-extension 阅读 19 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/test.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
TestSequenceFileSerialization.java 文件源码 项目:hortonworks-extension 阅读 18 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/test.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
CreateSampleData.java 文件源码 项目:s3distcp 阅读 19 收藏 0 点赞 0 评论 0
private void createInputFiles(String inputPathString, long numFiles, long fileSize, String outputPath)
/*     */   {
/*     */     try {
/*  75 */       FileSystem fs = FileSystem.get(new URI(inputPathString), this.conf);
/*  76 */       fs.mkdirs(new Path(inputPathString));
/*  77 */       for (int fileNumber = 1; fileNumber <= numFiles; fileNumber++) {
/*  78 */         String inputFileName = join(inputPathString, Integer.valueOf(fileNumber));
/*  79 */         Path inputFilePath = new Path(inputFileName);
/*  80 */         fs.delete(inputFilePath, true);
/*  81 */         SequenceFile.Writer writer = SequenceFile.createWriter(fs, this.conf, inputFilePath, LongWritable.class, CreateFileInfo.class, SequenceFile.CompressionType.NONE);
/*     */         try
/*     */         {
/*  84 */           writer.append(new LongWritable(fileNumber), new CreateFileInfo(join(outputPath, Integer.valueOf(fileNumber)), fileSize));
/*     */         } finally {
/*  86 */           writer.close();
/*     */         }
/*     */       }
/*     */     } catch (Exception e) {
/*  90 */       throw new RuntimeException(e);
/*     */     }
/*     */   }
TestSequenceFileSerialization.java 文件源码 项目:hadoop-gpu 阅读 16 收藏 0 点赞 0 评论 0
public void testJavaSerialization() throws Exception {
  Path file = new Path(System.getProperty("test.build.data",".") +
      "/test.seq");

  fs.delete(file, true);
  Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
      String.class);

  writer.append(1L, "one");
  writer.append(2L, "two");

  writer.close();

  Reader reader = new Reader(fs, file, conf);
  assertEquals(1L, reader.next((Object) null));
  assertEquals("one", reader.getCurrentValue((Object) null));
  assertEquals(2L, reader.next((Object) null));
  assertEquals("two", reader.getCurrentValue((Object) null));
  assertNull(reader.next((Object) null));
  reader.close();

}
TestSequenceFileAppend.java 文件源码 项目:hadoop-oss 阅读 34 收藏 0 点赞 0 评论 0
@Test(timeout = 30000)
public void testAppendRecordCompression() throws Exception {
  GenericTestUtils.assumeInNativeProfile();

  Path file = new Path(ROOT_PATH, "testseqappendblockcompr.seq");
  fs.delete(file, true);

  Option compressOption = Writer.compression(CompressionType.RECORD,
      new GzipCodec());
  Writer writer = SequenceFile.createWriter(conf,
      SequenceFile.Writer.file(file),
      SequenceFile.Writer.keyClass(Long.class),
      SequenceFile.Writer.valueClass(String.class), compressOption);

  writer.append(1L, "one");
  writer.append(2L, "two");
  writer.close();

  verify2Values(file);

  writer = SequenceFile.createWriter(conf, SequenceFile.Writer.file(file),
      SequenceFile.Writer.keyClass(Long.class),
      SequenceFile.Writer.valueClass(String.class),
      SequenceFile.Writer.appendIfExists(true), compressOption);

  writer.append(3L, "three");
  writer.append(4L, "four");
  writer.close();

  verifyAll4Values(file);

  fs.deleteOnExit(file);
}


问题


面经


文章

微信
公众号

扫码关注公众号