@Override
protected Class<? extends Mapper> getMapperClass() {
if (options.getHCatTableName() != null) {
return SqoopHCatUtilities.getImportMapperClass();
}
if (options.getFileLayout() == SqoopOptions.FileLayout.TextFile) {
return TextImportMapper.class;
} else if (options.getFileLayout()
== SqoopOptions.FileLayout.SequenceFile) {
return SequenceFileImportMapper.class;
} else if (options.getFileLayout()
== SqoopOptions.FileLayout.AvroDataFile) {
return AvroImportMapper.class;
} else if (options.getFileLayout()
== SqoopOptions.FileLayout.ParquetFile) {
return ParquetImportMapper.class;
}
return null;
}
java类org.apache.hadoop.mapreduce.Mapper的实例源码
DataDrivenImportJob.java 文件源码
项目:aliyun-maxcompute-data-collectors
阅读 27
收藏 0
点赞 0
评论 0
JdbcExportJob.java 文件源码
项目:aliyun-maxcompute-data-collectors
阅读 41
收藏 0
点赞 0
评论 0
@Override
protected Class<? extends Mapper> getMapperClass() {
if (isHCatJob) {
return SqoopHCatUtilities.getExportMapperClass();
}
if (options.getOdpsTable() != null) {
return OdpsExportMapper.class;
}
switch (fileType) {
case SEQUENCE_FILE:
return SequenceFileExportMapper.class;
case AVRO_DATA_FILE:
return AvroExportMapper.class;
case PARQUET_FILE:
return ParquetExportMapper.class;
case UNKNOWN:
default:
return TextExportMapper.class;
}
}
HdfsOdpsImportJob.java 文件源码
项目:aliyun-maxcompute-data-collectors
阅读 33
收藏 0
点赞 0
评论 0
@Override
protected Class<? extends Mapper> getMapperClass() {
if (isHCatJob) {
return SqoopHCatUtilities.getExportOdpsMapperClass();
}
switch (fileType) {
case SEQUENCE_FILE:
return SequenceFileExportMapper.class;
case AVRO_DATA_FILE:
return AvroExportMapper.class;
case PARQUET_FILE:
return ParquetExportMapper.class;
case UNKNOWN:
default:
return TextExportMapper.class;
}
}
TestMainframeImportJob.java 文件源码
项目:aliyun-maxcompute-data-collectors
阅读 35
收藏 0
点赞 0
评论 0
@Test
public void testGetMainframeDatasetImportMapperClass()
throws SecurityException, NoSuchMethodException,
IllegalArgumentException, IllegalAccessException,
InvocationTargetException {
String jarFile = "dummyJarFile";
String tableName = "dummyTableName";
Path path = new Path("dummyPath");
ImportJobContext context = new ImportJobContext(tableName, jarFile,
options, path);
mfImportJob = new MainframeImportJob(options, context);
// To access protected method by means of reflection
Class[] types = {};
Method m_getMapperClass = MainframeImportJob.class.getDeclaredMethod(
"getMapperClass", types);
m_getMapperClass.setAccessible(true);
Class<? extends Mapper> mapper = (Class<? extends Mapper>) m_getMapperClass
.invoke(mfImportJob);
assertEquals(mapper,
org.apache.sqoop.mapreduce.mainframe.MainframeDatasetImportMapper.class);
}
TestMainframeImportJob.java 文件源码
项目:aliyun-maxcompute-data-collectors
阅读 31
收藏 0
点赞 0
评论 0
@Test
public void testSuperMapperClass() throws SecurityException,
NoSuchMethodException, IllegalArgumentException, IllegalAccessException,
InvocationTargetException {
String jarFile = "dummyJarFile";
String tableName = "dummyTableName";
Path path = new Path("dummyPath");
options.setFileLayout(SqoopOptions.FileLayout.AvroDataFile);
ImportJobContext context = new ImportJobContext(tableName, jarFile,
options, path);
avroImportJob = new MainframeImportJob(options, context);
// To access protected method by means of reflection
Class[] types = {};
Method m_getMapperClass = MainframeImportJob.class.getDeclaredMethod(
"getMapperClass", types);
m_getMapperClass.setAccessible(true);
Class<? extends Mapper> mapper = (Class<? extends Mapper>) m_getMapperClass
.invoke(avroImportJob);
assertEquals(mapper, org.apache.sqoop.mapreduce.AvroImportMapper.class);
}
TestChainErrors.java 文件源码
项目:hadoop
阅读 44
收藏 0
点赞 0
评论 0
/**
* Tests one of the mappers throwing exception.
*
* @throws Exception
*/
public void testChainFail() throws Exception {
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
job.setJobName("chain");
ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainMapper.addMapper(job, FailMap.class, LongWritable.class, Text.class,
IntWritable.class, Text.class, null);
ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job Not failed", !job.isSuccessful());
}
TestChainErrors.java 文件源码
项目:hadoop
阅读 36
收藏 0
点赞 0
评论 0
/**
* Tests Reducer throwing exception.
*
* @throws Exception
*/
public void testReducerFail() throws Exception {
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainReducer.setReducer(job, FailReduce.class, LongWritable.class,
Text.class, LongWritable.class, Text.class, null);
ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job Not failed", !job.isSuccessful());
}
TestChainErrors.java 文件源码
项目:hadoop
阅读 37
收藏 0
点赞 0
评论 0
/**
* Tests one of the maps consuming output.
*
* @throws Exception
*/
public void testChainMapNoOuptut() throws Exception {
Configuration conf = createJobConf();
String expectedOutput = "";
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
job.setJobName("chain");
ChainMapper.addMapper(job, ConsumeMap.class, IntWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
TestChainErrors.java 文件源码
项目:hadoop
阅读 48
收藏 0
点赞 0
评论 0
/**
* Tests reducer consuming output.
*
* @throws Exception
*/
public void testChainReduceNoOuptut() throws Exception {
Configuration conf = createJobConf();
String expectedOutput = "";
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainReducer.setReducer(job, ConsumeReduce.class, LongWritable.class,
Text.class, LongWritable.class, Text.class, null);
ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
TestMultipleInputs.java 文件源码
项目:hadoop
阅读 45
收藏 0
点赞 0
评论 0
@SuppressWarnings("unchecked")
public void testAddInputPathWithMapper() throws IOException {
final Job conf = Job.getInstance();
MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class,
MapClass.class);
MultipleInputs.addInputPath(conf, new Path("/bar"),
KeyValueTextInputFormat.class, KeyValueMapClass.class);
final Map<Path, InputFormat> inputs = MultipleInputs
.getInputFormatMap(conf);
final Map<Path, Class<? extends Mapper>> maps = MultipleInputs
.getMapperTypeMap(conf);
assertEquals(TextInputFormat.class, inputs.get(new Path("/foo")).getClass());
assertEquals(KeyValueTextInputFormat.class, inputs.get(new Path("/bar"))
.getClass());
assertEquals(MapClass.class, maps.get(new Path("/foo")));
assertEquals(KeyValueMapClass.class, maps.get(new Path("/bar")));
}
Chain.java 文件源码
项目:hadoop
阅读 28
收藏 0
点赞 0
评论 0
/**
* Add mapper(the first mapper) that reads input from the input
* context and writes to queue
*/
@SuppressWarnings("unchecked")
void addMapper(TaskInputOutputContext inputContext,
ChainBlockingQueue<KeyValuePair<?, ?>> output, int index)
throws IOException, InterruptedException {
Configuration conf = getConf(index);
Class<?> keyOutClass = conf.getClass(MAPPER_OUTPUT_KEY_CLASS, Object.class);
Class<?> valueOutClass = conf.getClass(MAPPER_OUTPUT_VALUE_CLASS,
Object.class);
RecordReader rr = new ChainRecordReader(inputContext);
RecordWriter rw = new ChainRecordWriter(keyOutClass, valueOutClass, output,
conf);
Mapper.Context mapperContext = createMapContext(rr, rw,
(MapContext) inputContext, getConf(index));
MapRunner runner = new MapRunner(mappers.get(index), mapperContext, rr, rw);
threads.add(runner);
}
RetriableFileCopyCommand.java 文件源码
项目:hadoop
阅读 51
收藏 0
点赞 0
评论 0
private Path getTmpFile(Path target, Mapper.Context context) {
Path targetWorkPath = new Path(context.getConfiguration().
get(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH));
Path root = target.equals(targetWorkPath)? targetWorkPath.getParent() : targetWorkPath;
LOG.info("Creating temp file: " +
new Path(root, ".distcp.tmp." + context.getTaskAttemptID().toString()));
return new Path(root, ".distcp.tmp." + context.getTaskAttemptID().toString());
}
TestCopyMapper.java 文件源码
项目:hadoop
阅读 34
收藏 0
点赞 0
评论 0
private void testCopyingExistingFiles(FileSystem fs, CopyMapper copyMapper,
Mapper<Text, CopyListingFileStatus, Text, Text>.Context context) {
try {
for (Path path : pathList) {
copyMapper.map(new Text(DistCpUtils.getRelativePath(new Path(SOURCE_PATH), path)),
new CopyListingFileStatus(fs.getFileStatus(path)), context);
}
Assert.assertEquals(nFiles,
context.getCounter(CopyMapper.Counter.SKIP).getValue());
}
catch (Exception exception) {
Assert.assertTrue("Caught unexpected exception:" + exception.getMessage(),
false);
}
}
IntegrationTestBigLinkedList.java 文件源码
项目:ditb
阅读 36
收藏 0
点赞 0
评论 0
public int runRandomInputGenerator(int numMappers, long numNodes, Path tmpOutput,
Integer width, Integer wrapMuplitplier) throws Exception {
LOG.info("Running RandomInputGenerator with numMappers=" + numMappers
+ ", numNodes=" + numNodes);
Job job = Job.getInstance(getConf());
job.setJobName("Random Input Generator");
job.setNumReduceTasks(0);
job.setJarByClass(getClass());
job.setInputFormatClass(GeneratorInputFormat.class);
job.setOutputKeyClass(BytesWritable.class);
job.setOutputValueClass(NullWritable.class);
setJobConf(job, numMappers, numNodes, width, wrapMuplitplier);
job.setMapperClass(Mapper.class); //identity mapper
FileOutputFormat.setOutputPath(job, tmpOutput);
job.setOutputFormatClass(SequenceFileOutputFormat.class);
boolean success = jobCompletion(job);
return success ? 0 : 1;
}
TestChainErrors.java 文件源码
项目:aliyun-oss-hadoop-fs
阅读 42
收藏 0
点赞 0
评论 0
/**
* Tests one of the mappers throwing exception.
*
* @throws Exception
*/
public void testChainFail() throws Exception {
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
job.setJobName("chain");
ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainMapper.addMapper(job, FailMap.class, LongWritable.class, Text.class,
IntWritable.class, Text.class, null);
ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job Not failed", !job.isSuccessful());
}
TestChainErrors.java 文件源码
项目:aliyun-oss-hadoop-fs
阅读 30
收藏 0
点赞 0
评论 0
/**
* Tests Reducer throwing exception.
*
* @throws Exception
*/
public void testReducerFail() throws Exception {
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainReducer.setReducer(job, FailReduce.class, LongWritable.class,
Text.class, LongWritable.class, Text.class, null);
ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job Not failed", !job.isSuccessful());
}
TestMultipleInputs.java 文件源码
项目:aliyun-oss-hadoop-fs
阅读 40
收藏 0
点赞 0
评论 0
@SuppressWarnings("unchecked")
public void testAddInputPathWithMapper() throws IOException {
final Job conf = Job.getInstance();
MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class,
MapClass.class);
MultipleInputs.addInputPath(conf, new Path("/bar"),
KeyValueTextInputFormat.class, KeyValueMapClass.class);
final Map<Path, InputFormat> inputs = MultipleInputs
.getInputFormatMap(conf);
final Map<Path, Class<? extends Mapper>> maps = MultipleInputs
.getMapperTypeMap(conf);
assertEquals(TextInputFormat.class, inputs.get(new Path("/foo")).getClass());
assertEquals(KeyValueTextInputFormat.class, inputs.get(new Path("/bar"))
.getClass());
assertEquals(MapClass.class, maps.get(new Path("/foo")));
assertEquals(KeyValueMapClass.class, maps.get(new Path("/bar")));
}
ReplicationJob.java 文件源码
项目:reair
阅读 32
收藏 0
点赞 0
评论 0
private void enumDirectories(FileSystem fs, URI rootUri, Path directory, boolean recursive,
Mapper.Context context) throws IOException, InterruptedException {
try {
for (FileStatus status : fs.listStatus(directory, hiddenFileFilter)) {
if (status.isDirectory()) {
if (recursive) {
if (directoryBlackList == null
|| !status.getPath().getName().matches(directoryBlackList)) {
enumDirectories(fs,rootUri, status.getPath(), recursive, context);
}
}
} else {
context.write(new Text(rootUri.relativize(directory.toUri()).getPath()),
new FileStatus(status));
}
}
context.progress();
} catch (FileNotFoundException e) {
return;
}
}
RetriableFileCopyCommand.java 文件源码
项目:aliyun-oss-hadoop-fs
阅读 38
收藏 0
点赞 0
评论 0
private long copyToFile(Path targetPath, FileSystem targetFS,
FileStatus sourceFileStatus, long sourceOffset, Mapper.Context context,
EnumSet<FileAttribute> fileAttributes, final FileChecksum sourceChecksum)
throws IOException {
FsPermission permission = FsPermission.getFileDefault().applyUMask(
FsPermission.getUMask(targetFS.getConf()));
final OutputStream outStream;
if (action == FileAction.OVERWRITE) {
final short repl = getReplicationFactor(fileAttributes, sourceFileStatus,
targetFS, targetPath);
final long blockSize = getBlockSize(fileAttributes, sourceFileStatus,
targetFS, targetPath);
FSDataOutputStream out = targetFS.create(targetPath, permission,
EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE),
BUFFER_SIZE, repl, blockSize, context,
getChecksumOpt(fileAttributes, sourceChecksum));
outStream = new BufferedOutputStream(out);
} else {
outStream = new BufferedOutputStream(targetFS.append(targetPath,
BUFFER_SIZE));
}
return copyBytes(sourceFileStatus, sourceOffset, outStream, BUFFER_SIZE,
context);
}
TestCopyMapper.java 文件源码
项目:aliyun-oss-hadoop-fs
阅读 36
收藏 0
点赞 0
评论 0
private void testCopyingExistingFiles(FileSystem fs, CopyMapper copyMapper,
Mapper<Text, CopyListingFileStatus, Text, Text>.Context context) {
try {
for (Path path : pathList) {
copyMapper.map(new Text(DistCpUtils.getRelativePath(new Path(SOURCE_PATH), path)),
new CopyListingFileStatus(fs.getFileStatus(path)), context);
}
Assert.assertEquals(nFiles,
context.getCounter(CopyMapper.Counter.SKIP).getValue());
}
catch (Exception exception) {
Assert.assertTrue("Caught unexpected exception:" + exception.getMessage(),
false);
}
}
CnTFIDFApp.java 文件源码
项目:HotTopicsApp
阅读 30
收藏 0
点赞 0
评论 0
@Override
protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, NullWritable>.Context context)
throws IOException, InterruptedException {
String line = value.toString();
String[] splited = line.split("\t");
String word = splited[0];
String[] fileTFs = splited[1].split(",");
int hasFilesCount = fileTFs.length;
double idf = Math.log((docCount+1.0)/hasFilesCount);
for (String fileTF : fileTFs) {
String[] fileTFArray = fileTF.split(":");
String fileName = fileTFArray[0];
double tf = Double.parseDouble(fileTFArray[1]);
double tfidf = idf*tf;
k2.set(fileName+"\t"+word+"\t"+tfidf);
context.write(k2, NullWritable.get());
}
}
TokenizerMapper.java 文件源码
项目:arks-api
阅读 37
收藏 0
点赞 0
评论 0
@Override
protected void map(Object key, Text value,
Mapper<Object, Text, Text, IntWritable>.Context context)
throws IOException, InterruptedException {
/*
* We will use StringTokenizer to extract words from given line of input
* e.g 1 This is Hello World ( Here 1 is key and Statement is value )
* <This, 1> <is, 1> <Hello, 1> <World, 1>
*/
StringTokenizer stringTokenizer = new StringTokenizer(value.toString());
while (stringTokenizer.hasMoreElements()) {
String wordTemp = stringTokenizer.nextToken();
if(wordTemp.equals("Java")){
word.set(wordTemp);
context.write(word, one);
}
}
}
TestChainErrors.java 文件源码
项目:big-c
阅读 38
收藏 0
点赞 0
评论 0
/**
* Tests Reducer throwing exception.
*
* @throws Exception
*/
public void testReducerFail() throws Exception {
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainReducer.setReducer(job, FailReduce.class, LongWritable.class,
Text.class, LongWritable.class, Text.class, null);
ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job Not failed", !job.isSuccessful());
}
TestChainErrors.java 文件源码
项目:big-c
阅读 39
收藏 0
点赞 0
评论 0
/**
* Tests one of the maps consuming output.
*
* @throws Exception
*/
public void testChainMapNoOuptut() throws Exception {
Configuration conf = createJobConf();
String expectedOutput = "";
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
job.setJobName("chain");
ChainMapper.addMapper(job, ConsumeMap.class, IntWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
TestMultipleInputs.java 文件源码
项目:big-c
阅读 31
收藏 0
点赞 0
评论 0
@SuppressWarnings("unchecked")
public void testAddInputPathWithMapper() throws IOException {
final Job conf = Job.getInstance();
MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class,
MapClass.class);
MultipleInputs.addInputPath(conf, new Path("/bar"),
KeyValueTextInputFormat.class, KeyValueMapClass.class);
final Map<Path, InputFormat> inputs = MultipleInputs
.getInputFormatMap(conf);
final Map<Path, Class<? extends Mapper>> maps = MultipleInputs
.getMapperTypeMap(conf);
assertEquals(TextInputFormat.class, inputs.get(new Path("/foo")).getClass());
assertEquals(KeyValueTextInputFormat.class, inputs.get(new Path("/bar"))
.getClass());
assertEquals(MapClass.class, maps.get(new Path("/foo")));
assertEquals(KeyValueMapClass.class, maps.get(new Path("/bar")));
}
HotTopicsApp.java 文件源码
项目:HotTopicsApp
阅读 36
收藏 0
点赞 0
评论 0
@Override
protected void setup(Mapper<LongWritable, Text, NullWritable, Text>.Context context)
throws IOException, InterruptedException {
Configuration conf = context.getConfiguration();
this.k = conf.getInt("topk", 1);
this.type = conf.get("type", "min");
if("min".equals(this.type)){
topkSet = new TreeSet<>();
}else {
topkSet = new TreeSet<>(new Comparator<TFIDFWord>() {
@Override
public int compare(TFIDFWord o1, TFIDFWord o2) {
return -o1.compareTo(o2);
}
});
}
}
BWAMemInstance.java 文件源码
项目:halvade
阅读 39
收藏 0
点赞 0
评论 0
@Override
protected void startAligner(Mapper.Context context) throws IOException, InterruptedException {
// make command
String customArgs = HalvadeConf.getCustomArgs(context.getConfiguration(), "bwa", "mem");
String[] command = CommandGenerator.bwaMem(bin, ref, null, null, isPaired, true, threads, customArgs);
pbw = new ProcessBuilderWrapper(command, bin);
// run command
// needs to be streamed to output otherwise the process blocks ...
pbw.startProcess(null, System.err);
// check if alive.
if(!pbw.isAlive())
throw new ProcessException("BWA mem", pbw.getExitState());
pbw.getSTDINWriter();
// make a SAMstream handler
ssh = new SAMStreamHandler(instance, context, false);
ssh.start();
}
TestCopyMapper.java 文件源码
项目:big-c
阅读 33
收藏 0
点赞 0
评论 0
private void testCopyingExistingFiles(FileSystem fs, CopyMapper copyMapper,
Mapper<Text, CopyListingFileStatus, Text, Text>.Context context) {
try {
for (Path path : pathList) {
copyMapper.map(new Text(DistCpUtils.getRelativePath(new Path(SOURCE_PATH), path)),
new CopyListingFileStatus(fs.getFileStatus(path)), context);
}
Assert.assertEquals(nFiles,
context.getCounter(CopyMapper.Counter.SKIP).getValue());
}
catch (Exception exception) {
Assert.assertTrue("Caught unexpected exception:" + exception.getMessage(),
false);
}
}
STARInstance.java 文件源码
项目:halvade
阅读 37
收藏 0
点赞 0
评论 0
@Override
protected void startAligner(Mapper.Context context) throws IOException, InterruptedException {
File file1 = new File(getFileName(tmpdir, taskId, 1));
if (!file1.exists()) {
file1.createNewFile();
}
fastqFile1 = new BufferedWriter(new FileWriter(file1.getAbsoluteFile()));
if(isPaired) {
File file2 = new File(getFileName(tmpdir, taskId, 2));
if (!file2.exists()) {
file2.createNewFile();
}
fastqFile2 = new BufferedWriter(new FileWriter(file2.getAbsoluteFile()));
}
// make output dir!
File starOut = new File(starOutDir);
starOut.mkdirs();
}
AlignerInstance.java 文件源码
项目:halvade
阅读 39
收藏 0
点赞 0
评论 0
protected AlignerInstance(Mapper.Context context, String bin, int task) throws IOException, URISyntaxException {
AlignerInstance.context = context;
header = null;
containers = HalvadeConf.getMapContainerCount(context.getConfiguration());
tasksLeft = HalvadeConf.getMapTasksLeft(task, context.getConfiguration());
redistribute = HalvadeConf.getRedistribute(context.getConfiguration());
mergeBam = HalvadeConf.getMergeBam(context.getConfiguration());
writableRecord = new SAMRecordWritable();
writableRegion = new ChromosomeRegion();
writeableCompactRegion = new GenomeSJ();
stub = new Text();
minChrLength = HalvadeConf.getMinChrLength(context.getConfiguration());
chr = HalvadeConf.getChrList(context.getConfiguration());
tmpdir = HalvadeConf.getScratchTempDir(context.getConfiguration());
if(!tmpdir.endsWith("/")) tmpdir = tmpdir + "/";
File tmp = new File(tmpdir);
tmp.mkdirs();
this.bin = bin;
threads = HalvadeConf.getMapThreads(context.getConfiguration());
isPaired = HalvadeConf.getIsPaired(context.getConfiguration());
Logger.DEBUG("paired? " + isPaired);
splitter = new ChromosomeSplitter(HalvadeConf.getBedRegions(context.getConfiguration()), context.getConfiguration());
keepChrSplitPairs = HalvadeConf.getkeepChrSplitPairs(context.getConfiguration());
keep = HalvadeConf.getKeepFiles(context.getConfiguration());
}