@Test
/**
* Create an IFile.Writer using GzipCodec since this code does not
* have a compressor when run via the tests (ie no native libraries).
*/
public void testIFileWriterWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, rfs.create(path), Text.class, Text.class,
codec, null);
writer.close();
}
java类org.apache.hadoop.io.compress.DefaultCodec的实例源码
TestIFile.java 文件源码
项目:hadoop
阅读 29
收藏 0
点赞 0
评论 0
TestIFile.java 文件源码
项目:hadoop
阅读 22
收藏 0
点赞 0
评论 0
@Test
/** Same as above but create a reader. */
public void testIFileReaderWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
FSDataOutputStream out = rfs.create(path);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, out, Text.class, Text.class,
codec, null);
writer.close();
FSDataInputStream in = rfs.open(path);
IFile.Reader<Text, Text> reader =
new IFile.Reader<Text, Text>(conf, in, rfs.getFileStatus(path).getLen(),
codec, null);
reader.close();
// test check sum
byte[] ab= new byte[100];
int readed= reader.checksumIn.readWithChecksum(ab, 0, ab.length);
assertEquals( readed,reader.checksumIn.getChecksum().length);
}
TestIPCUtil.java 文件源码
项目:ditb
阅读 31
收藏 0
点赞 0
评论 0
/**
* For running a few tests of methods herein.
* @param args
* @throws IOException
*/
public static void main(String[] args) throws IOException {
int count = 1024;
int size = 10240;
for (String arg: args) {
if (arg.startsWith(COUNT)) {
count = Integer.parseInt(arg.replace(COUNT, ""));
} else if (arg.startsWith(SIZE)) {
size = Integer.parseInt(arg.replace(SIZE, ""));
} else {
usage(1);
}
}
IPCUtil util = new IPCUtil(HBaseConfiguration.create());
((Log4JLogger)IPCUtil.LOG).getLogger().setLevel(Level.ALL);
timerTests(util, count, size, new KeyValueCodec(), null);
timerTests(util, count, size, new KeyValueCodec(), new DefaultCodec());
timerTests(util, count, size, new KeyValueCodec(), new GzipCodec());
}
TestIFile.java 文件源码
项目:aliyun-oss-hadoop-fs
阅读 18
收藏 0
点赞 0
评论 0
@Test
/**
* Create an IFile.Writer using GzipCodec since this code does not
* have a compressor when run via the tests (ie no native libraries).
*/
public void testIFileWriterWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, rfs.create(path), Text.class, Text.class,
codec, null);
writer.close();
}
TestIFile.java 文件源码
项目:aliyun-oss-hadoop-fs
阅读 21
收藏 0
点赞 0
评论 0
@Test
/** Same as above but create a reader. */
public void testIFileReaderWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
FSDataOutputStream out = rfs.create(path);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, out, Text.class, Text.class,
codec, null);
writer.close();
FSDataInputStream in = rfs.open(path);
IFile.Reader<Text, Text> reader =
new IFile.Reader<Text, Text>(conf, in, rfs.getFileStatus(path).getLen(),
codec, null);
reader.close();
// test check sum
byte[] ab= new byte[100];
int readed= reader.checksumIn.readWithChecksum(ab, 0, ab.length);
assertEquals( readed,reader.checksumIn.getChecksum().length);
}
SequenceFileOutputFormat.java 文件源码
项目:aliyun-oss-hadoop-fs
阅读 18
收藏 0
点赞 0
评论 0
protected SequenceFile.Writer getSequenceWriter(TaskAttemptContext context,
Class<?> keyClass, Class<?> valueClass)
throws IOException {
Configuration conf = context.getConfiguration();
CompressionCodec codec = null;
CompressionType compressionType = CompressionType.NONE;
if (getCompressOutput(context)) {
// find the kind of compression to do
compressionType = getOutputCompressionType(context);
// find the right codec
Class<?> codecClass = getOutputCompressorClass(context,
DefaultCodec.class);
codec = (CompressionCodec)
ReflectionUtils.newInstance(codecClass, conf);
}
// get the path of the temporary output file
Path file = getDefaultWorkFile(context, "");
FileSystem fs = file.getFileSystem(conf);
return SequenceFile.createWriter(fs, conf, file,
keyClass,
valueClass,
compressionType,
codec,
context);
}
TestIFile.java 文件源码
项目:big-c
阅读 15
收藏 0
点赞 0
评论 0
@Test
/**
* Create an IFile.Writer using GzipCodec since this code does not
* have a compressor when run via the tests (ie no native libraries).
*/
public void testIFileWriterWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, rfs.create(path), Text.class, Text.class,
codec, null);
writer.close();
}
TestIFile.java 文件源码
项目:big-c
阅读 17
收藏 0
点赞 0
评论 0
@Test
/** Same as above but create a reader. */
public void testIFileReaderWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
FSDataOutputStream out = rfs.create(path);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, out, Text.class, Text.class,
codec, null);
writer.close();
FSDataInputStream in = rfs.open(path);
IFile.Reader<Text, Text> reader =
new IFile.Reader<Text, Text>(conf, in, rfs.getFileStatus(path).getLen(),
codec, null);
reader.close();
// test check sum
byte[] ab= new byte[100];
int readed= reader.checksumIn.readWithChecksum(ab, 0, ab.length);
assertEquals( readed,reader.checksumIn.getChecksum().length);
}
SequenceFileOutputFormat.java 文件源码
项目:big-c
阅读 20
收藏 0
点赞 0
评论 0
protected SequenceFile.Writer getSequenceWriter(TaskAttemptContext context,
Class<?> keyClass, Class<?> valueClass)
throws IOException {
Configuration conf = context.getConfiguration();
CompressionCodec codec = null;
CompressionType compressionType = CompressionType.NONE;
if (getCompressOutput(context)) {
// find the kind of compression to do
compressionType = getOutputCompressionType(context);
// find the right codec
Class<?> codecClass = getOutputCompressorClass(context,
DefaultCodec.class);
codec = (CompressionCodec)
ReflectionUtils.newInstance(codecClass, conf);
}
// get the path of the temporary output file
Path file = getDefaultWorkFile(context, "");
FileSystem fs = file.getFileSystem(conf);
return SequenceFile.createWriter(fs, conf, file,
keyClass,
valueClass,
compressionType,
codec,
context);
}
CommonFileOutputFormat.java 文件源码
项目:LiteGraph
阅读 18
收藏 0
点赞 0
评论 0
protected DataOutputStream getDataOutputStream(final TaskAttemptContext job) throws IOException, InterruptedException {
final Configuration conf = job.getConfiguration();
boolean isCompressed = getCompressOutput(job);
CompressionCodec codec = null;
String extension = "";
if (isCompressed) {
final Class<? extends CompressionCodec> codecClass = getOutputCompressorClass(job, DefaultCodec.class);
codec = ReflectionUtils.newInstance(codecClass, conf);
extension = codec.getDefaultExtension();
}
final Path file = super.getDefaultWorkFile(job, extension);
final FileSystem fs = file.getFileSystem(conf);
if (!isCompressed) {
return new DataOutputStream(fs.create(file, false));
} else {
return new DataOutputStream(codec.createOutputStream(fs.create(file, false)));
}
}
TestIFile.java 文件源码
项目:hadoop-2.6.0-cdh5.4.3
阅读 24
收藏 0
点赞 0
评论 0
@Test
/**
* Create an IFile.Writer using GzipCodec since this code does not
* have a compressor when run via the tests (ie no native libraries).
*/
public void testIFileWriterWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, rfs.create(path), Text.class, Text.class,
codec, null);
writer.close();
}
TestIFile.java 文件源码
项目:hadoop-2.6.0-cdh5.4.3
阅读 23
收藏 0
点赞 0
评论 0
@Test
/** Same as above but create a reader. */
public void testIFileReaderWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
FSDataOutputStream out = rfs.create(path);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, out, Text.class, Text.class,
codec, null);
writer.close();
FSDataInputStream in = rfs.open(path);
IFile.Reader<Text, Text> reader =
new IFile.Reader<Text, Text>(conf, in, rfs.getFileStatus(path).getLen(),
codec, null);
reader.close();
// test check sum
byte[] ab= new byte[100];
int readed= reader.checksumIn.readWithChecksum(ab, 0, ab.length);
assertEquals( readed,reader.checksumIn.getChecksum().length);
}
SequenceFileOutputFormat.java 文件源码
项目:hadoop-2.6.0-cdh5.4.3
阅读 20
收藏 0
点赞 0
评论 0
protected SequenceFile.Writer getSequenceWriter(TaskAttemptContext context,
Class<?> keyClass, Class<?> valueClass)
throws IOException {
Configuration conf = context.getConfiguration();
CompressionCodec codec = null;
CompressionType compressionType = CompressionType.NONE;
if (getCompressOutput(context)) {
// find the kind of compression to do
compressionType = getOutputCompressionType(context);
// find the right codec
Class<?> codecClass = getOutputCompressorClass(context,
DefaultCodec.class);
codec = (CompressionCodec)
ReflectionUtils.newInstance(codecClass, conf);
}
// get the path of the temporary output file
Path file = getDefaultWorkFile(context, "");
FileSystem fs = file.getFileSystem(conf);
return SequenceFile.createWriter(fs, conf, file,
keyClass,
valueClass,
compressionType,
codec,
context);
}
ReduceTask.java 文件源码
项目:hadoop-2.6.0-cdh5.4.3
阅读 17
收藏 0
点赞 0
评论 0
public MapOutputCopier(JobConf job, Reporter reporter, SecretKey jobTokenSecret) {
setName("MapOutputCopier " + reduceTask.getTaskID() + "." + id);
LOG.debug(getName() + " created");
this.reporter = reporter;
this.jobTokenSecret = jobTokenSecret;
shuffleConnectionTimeout =
job.getInt("mapreduce.reduce.shuffle.connect.timeout", STALLED_COPY_TIMEOUT);
shuffleReadTimeout =
job.getInt("mapreduce.reduce.shuffle.read.timeout", DEFAULT_READ_TIMEOUT);
if (job.getCompressMapOutput()) {
Class<? extends CompressionCodec> codecClass =
job.getMapOutputCompressorClass(DefaultCodec.class);
codec = ReflectionUtils.newInstance(codecClass, job);
decompressor = CodecPool.getDecompressor(codec);
}
}
SequenceFileAsBinaryOutputFormat.java 文件源码
项目:hadoop-2.6.0-cdh5.4.3
阅读 15
收藏 0
点赞 0
评论 0
protected SequenceFile.Writer getSequenceWriter(TaskAttemptContext context,
Class<?> keyClass, Class<?> valueClass)
throws IOException {
Configuration conf = context.getConfiguration();
CompressionCodec codec = null;
CompressionType compressionType = CompressionType.NONE;
if (getCompressOutput(context)) {
// find the kind of compression to do
compressionType = getOutputCompressionType(context);
// find the right codec
Class<?> codecClass = getOutputCompressorClass(context,
DefaultCodec.class);
codec = (CompressionCodec)
ReflectionUtils.newInstance(codecClass, conf);
}
// get the path of the temporary output file
Path file = getDefaultWorkFile(context, "");
FileSystem fs = file.getFileSystem(conf);
return SequenceFile.createWriter(fs, conf, file,
keyClass,
valueClass,
compressionType,
codec,
context);
}
TestIFile.java 文件源码
项目:hadoop-2.6.0-cdh5.4.3
阅读 29
收藏 0
点赞 0
评论 0
@Test
/**
* Create an IFile.Writer using GzipCodec since this codec does not
* have a compressor when run via the tests (ie no native libraries).
*/
public void testIFileWriterWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, rfs, path, Text.class, Text.class,
codec, null);
writer.close();
}
TestIFile.java 文件源码
项目:hadoop-2.6.0-cdh5.4.3
阅读 23
收藏 0
点赞 0
评论 0
@Test
/** Same as above but create a reader. */
public void testIFileReaderWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, rfs, path, Text.class, Text.class,
codec, null);
writer.close();
IFile.Reader<Text, Text> reader =
new IFile.Reader<Text, Text>(conf, rfs, path, codec, null);
reader.close();
}
ReduceTask.java 文件源码
项目:hadoop-EAR
阅读 21
收藏 0
点赞 0
评论 0
public MapOutputCopier(JobConf job, Reporter reporter) {
setName("MapOutputCopier " + reduceTask.getTaskID() + "." + id);
LOG.debug(getName() + " created");
this.reporter = reporter;
shuffleConnectionTimeout =
job.getInt("mapreduce.reduce.shuffle.connect.timeout", STALLED_COPY_TIMEOUT);
shuffleReadTimeout =
job.getInt("mapreduce.reduce.shuffle.read.timeout", DEFAULT_READ_TIMEOUT);
if (job.getCompressMapOutput()) {
Class<? extends CompressionCodec> codecClass =
job.getMapOutputCompressorClass(DefaultCodec.class);
codec = ReflectionUtils.newInstance(codecClass, job);
decompressor = CodecPool.getDecompressor(codec);
}
setDaemon(true);
}
SequenceFileStoreFunc.java 文件源码
项目:hiped2
阅读 18
收藏 0
点赞 0
评论 0
@Override
public void setStoreLocation(String location, Job job)
throws IOException {
job.setOutputKeyClass(keyClass);
job.setOutputValueClass(valueClass);
if (compressionType != null && compressionCodecClass != null) {
Class<? extends CompressionCodec> codecClass =
FileOutputFormat.getOutputCompressorClass(job,
DefaultCodec.class);
SequenceFileOutputFormat.
setOutputCompressorClass(job, codecClass);
SequenceFileOutputFormat.setOutputCompressionType(job,
SequenceFile.CompressionType.valueOf(compressionType));
}
FileOutputFormat.setOutputPath(job, new Path(location));
}
FsStateStore.java 文件源码
项目:Gobblin
阅读 20
收藏 0
点赞 0
评论 0
/**
* See {@link StateStore#put(String, String, T)}.
*
* <p>
* This implementation does not support putting the state object into an existing store as
* append is to be supported by the Hadoop SequenceFile (HADOOP-7139).
* </p>
*/
@Override
public void put(String storeName, String tableName, T state)
throws IOException {
Path tablePath = new Path(new Path(this.storeRootDir, storeName), tableName);
if (!this.fs.exists(tablePath) && !create(storeName, tableName)) {
throw new IOException("Failed to create a state file for table " + tableName);
}
Closer closer = Closer.create();
try {
SequenceFile.Writer writer =
closer.register(SequenceFile.createWriter(this.fs, this.conf, tablePath, Text.class, this.stateClass,
SequenceFile.CompressionType.BLOCK, new DefaultCodec()));
writer.append(new Text(Strings.nullToEmpty(state.getId())), state);
} catch (Throwable t) {
throw closer.rethrow(t);
} finally {
closer.close();
}
}
FsStateStore.java 文件源码
项目:Gobblin
阅读 20
收藏 0
点赞 0
评论 0
/**
* See {@link StateStore#putAll(String, String, Collection)}.
*
* <p>
* This implementation does not support putting the state objects into an existing store as
* append is to be supported by the Hadoop SequenceFile (HADOOP-7139).
* </p>
*/
@Override
public void putAll(String storeName, String tableName, Collection<T> states)
throws IOException {
Path tablePath = new Path(new Path(this.storeRootDir, storeName), tableName);
if (!this.fs.exists(tablePath) && !create(storeName, tableName)) {
throw new IOException("Failed to create a state file for table " + tableName);
}
Closer closer = Closer.create();
try {
SequenceFile.Writer writer =
closer.register(SequenceFile.createWriter(this.fs, this.conf, tablePath, Text.class, this.stateClass,
SequenceFile.CompressionType.BLOCK, new DefaultCodec()));
for (T state : states) {
writer.append(new Text(Strings.nullToEmpty(state.getId())), state);
}
} catch (Throwable t) {
throw closer.rethrow(t);
} finally {
closer.close();
}
}
SequenceFileUtility.java 文件源码
项目:openimaj
阅读 18
收藏 0
点赞 0
评论 0
/**
* @return the compression codec in use for this file.
*/
public Class<? extends CompressionCodec> getCompressionCodecClass() {
if (!isReader)
return DefaultCodec.class;
Reader reader = null;
try {
reader = createReader();
if (reader.getCompressionCodec() == null)
return null;
return reader.getCompressionCodec().getClass();
} catch (final Exception e) {
throw new RuntimeException(e);
} finally {
if (reader != null)
try {
reader.close();
} catch (final IOException e1) {
}
}
}
TestIFile.java 文件源码
项目:hadoop-plus
阅读 26
收藏 0
点赞 0
评论 0
@Test
/**
* Create an IFile.Writer using GzipCodec since this code does not
* have a compressor when run via the tests (ie no native libraries).
*/
public void testIFileWriterWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, rfs, path, Text.class, Text.class,
codec, null);
writer.close();
}
TestIFile.java 文件源码
项目:hadoop-plus
阅读 21
收藏 0
点赞 0
评论 0
@Test
/** Same as above but create a reader. */
public void testIFileReaderWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, rfs, path, Text.class, Text.class,
codec, null);
writer.close();
IFile.Reader<Text, Text> reader =
new IFile.Reader<Text, Text>(conf, rfs, path, codec, null);
reader.close();
// test check sum
byte[] ab= new byte[100];
int readed= reader.checksumIn.readWithChecksum(ab, 0, ab.length);
assertEquals( readed,reader.checksumIn.getChecksum().length);
}
SequenceFileUtility.java 文件源码
项目:openimaj
阅读 21
收藏 0
点赞 0
评论 0
@SuppressWarnings("unchecked")
private Writer createWriter(Map<String, String> metadata) throws IOException {
final Metadata md = new Metadata();
for (final Entry<String, String> e : metadata.entrySet()) {
md.set(new Text(e.getKey()), new Text(e.getValue()));
}
final Class<K> keyClass = (Class<K>) ((ParameterizedType) getClass().getGenericSuperclass())
.getActualTypeArguments()[0];
final Class<V> valueClass = (Class<V>) ((ParameterizedType) getClass().getGenericSuperclass())
.getActualTypeArguments()[1];
return SequenceFile.createWriter(fileSystem, config, sequenceFilePath, keyClass, valueClass, compressionType,
new DefaultCodec(), null,
md);
}
SequenceFileOutputFormat.java 文件源码
项目:hadoop-plus
阅读 19
收藏 0
点赞 0
评论 0
protected SequenceFile.Writer getSequenceWriter(TaskAttemptContext context,
Class<?> keyClass, Class<?> valueClass)
throws IOException {
Configuration conf = context.getConfiguration();
CompressionCodec codec = null;
CompressionType compressionType = CompressionType.NONE;
if (getCompressOutput(context)) {
// find the kind of compression to do
compressionType = getOutputCompressionType(context);
// find the right codec
Class<?> codecClass = getOutputCompressorClass(context,
DefaultCodec.class);
codec = (CompressionCodec)
ReflectionUtils.newInstance(codecClass, conf);
}
// get the path of the temporary output file
Path file = getDefaultWorkFile(context, "");
FileSystem fs = file.getFileSystem(conf);
return SequenceFile.createWriter(fs, conf, file,
keyClass,
valueClass,
compressionType,
codec,
context);
}
TestIPCUtil.java 文件源码
项目:pbase
阅读 26
收藏 0
点赞 0
评论 0
/**
* For running a few tests of methods herein.
* @param args
* @throws IOException
*/
public static void main(String[] args) throws IOException {
int count = 1024;
int size = 10240;
for (String arg: args) {
if (arg.startsWith(COUNT)) {
count = Integer.parseInt(arg.replace(COUNT, ""));
} else if (arg.startsWith(SIZE)) {
size = Integer.parseInt(arg.replace(SIZE, ""));
} else {
usage(1);
}
}
IPCUtil util = new IPCUtil(HBaseConfiguration.create());
((Log4JLogger)IPCUtil.LOG).getLogger().setLevel(Level.ALL);
timerTests(util, count, size, new KeyValueCodec(), null);
timerTests(util, count, size, new KeyValueCodec(), new DefaultCodec());
timerTests(util, count, size, new KeyValueCodec(), new GzipCodec());
}
CommonFileOutputFormat.java 文件源码
项目:tinkerpop
阅读 16
收藏 0
点赞 0
评论 0
protected DataOutputStream getDataOutputStream(final TaskAttemptContext job) throws IOException, InterruptedException {
final Configuration conf = job.getConfiguration();
boolean isCompressed = getCompressOutput(job);
CompressionCodec codec = null;
String extension = "";
if (isCompressed) {
final Class<? extends CompressionCodec> codecClass = getOutputCompressorClass(job, DefaultCodec.class);
codec = ReflectionUtils.newInstance(codecClass, conf);
extension = codec.getDefaultExtension();
}
final Path file = super.getDefaultWorkFile(job, extension);
final FileSystem fs = file.getFileSystem(conf);
if (!isCompressed) {
return new DataOutputStream(fs.create(file, false));
} else {
return new DataOutputStream(codec.createOutputStream(fs.create(file, false)));
}
}
TestIFile.java 文件源码
项目:FlexMap
阅读 22
收藏 0
点赞 0
评论 0
@Test
/**
* Create an IFile.Writer using GzipCodec since this code does not
* have a compressor when run via the tests (ie no native libraries).
*/
public void testIFileWriterWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, rfs.create(path), Text.class, Text.class,
codec, null);
writer.close();
}
TestIFile.java 文件源码
项目:FlexMap
阅读 20
收藏 0
点赞 0
评论 0
@Test
/** Same as above but create a reader. */
public void testIFileReaderWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
FSDataOutputStream out = rfs.create(path);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, out, Text.class, Text.class,
codec, null);
writer.close();
FSDataInputStream in = rfs.open(path);
IFile.Reader<Text, Text> reader =
new IFile.Reader<Text, Text>(conf, in, rfs.getFileStatus(path).getLen(),
codec, null);
reader.close();
// test check sum
byte[] ab= new byte[100];
int readed= reader.checksumIn.readWithChecksum(ab, 0, ab.length);
assertEquals( readed,reader.checksumIn.getChecksum().length);
}