java类org.apache.hadoop.io.DefaultStringifier的实例源码

JdbcExportJob.java 文件源码 项目:aliyun-maxcompute-data-collectors 阅读 27 收藏 0 点赞 0 评论 0
private void configureGenericRecordExportInputFormat(Job job, String tableName)
    throws IOException {
  ConnManager connManager = context.getConnManager();
  Map<String, Integer> columnTypeInts;
  if (options.getCall() == null) {
    columnTypeInts = connManager.getColumnTypes(
        tableName,
        options.getSqlQuery());
  } else {
    columnTypeInts = connManager.getColumnTypesForProcedure(
        options.getCall());
  }
  String[] specifiedColumns = options.getColumns();
  MapWritable columnTypes = new MapWritable();
  for (Map.Entry<String, Integer> e : columnTypeInts.entrySet()) {
    String column = e.getKey();
    column = (specifiedColumns == null) ? column : options.getColumnNameCaseInsensitive(column);
    if (column != null) {
      Text columnName = new Text(column);
      Text columnType = new Text(connManager.toJavaType(tableName, column, e.getValue()));
      columnTypes.put(columnName, columnType);
    }
  }
  DefaultStringifier.store(job.getConfiguration(), columnTypes,
      AvroExportMapper.AVRO_COLUMN_TYPES_MAP);
}
ParquetExportMapper.java 文件源码 项目:aliyun-maxcompute-data-collectors 阅读 21 收藏 0 点赞 0 评论 0
@Override
protected void setup(Context context) throws IOException, InterruptedException {
  super.setup(context);

  Configuration conf = context.getConfiguration();

  // Instantiate a copy of the user's class to hold and parse the record.
  String recordClassName = conf.get(
      ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY);
  if (null == recordClassName) {
    throw new IOException("Export table class name ("
        + ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY
        + ") is not set!");
  }

  try {
    Class cls = Class.forName(recordClassName, true,
        Thread.currentThread().getContextClassLoader());
    recordImpl = (SqoopRecord) ReflectionUtils.newInstance(cls, conf);
  } catch (ClassNotFoundException cnfe) {
    throw new IOException(cnfe);
  }

  if (null == recordImpl) {
    throw new IOException("Could not instantiate object of type "
        + recordClassName);
  }

  columnTypes = DefaultStringifier.load(conf, AVRO_COLUMN_TYPES_MAP,
      MapWritable.class);
}
Chain.java 文件源码 项目:aliyun-oss-hadoop-fs 阅读 21 收藏 0 点赞 0 评论 0
/**
 * Creates a {@link Configuration} for the Map or Reduce in the chain.
 * 
 * <p>
 * It creates a new Configuration using the chain job's Configuration as base
 * and adds to it the configuration properties for the chain element. The keys
 * of the chain element Configuration have precedence over the given
 * Configuration.
 * </p>
 * 
 * @param jobConf
 *          the chain job's Configuration.
 * @param confKey
 *          the key for chain element configuration serialized in the chain
 *          job's Configuration.
 * @return a new Configuration aggregating the chain job's Configuration with
 *         the chain element configuration properties.
 */
protected static Configuration getChainElementConf(Configuration jobConf,
    String confKey) {
  Configuration conf = null;
  try (Stringifier<Configuration> stringifier =
      new DefaultStringifier<Configuration>(jobConf, Configuration.class);) {
    String confString = jobConf.get(confKey, null);
    if (confString != null) {
      conf = stringifier.fromString(jobConf.get(confKey, null));
    }
  } catch (IOException ioex) {
    throw new RuntimeException(ioex);
  }
  // we have to do this because the Writable desearialization clears all
  // values set in the conf making not possible do a
  // new Configuration(jobConf) in the creation of the conf above
  jobConf = new Configuration(jobConf);

  if (conf != null) {
    for (Map.Entry<String, String> entry : conf) {
      jobConf.set(entry.getKey(), entry.getValue());
    }
  }
  return jobConf;
}
ContentOutputFormat.java 文件源码 项目:marklogic-contentpump 阅读 18 收藏 0 点赞 0 评论 0
protected LinkedMapWritable getForestStatusMap(Configuration conf) 
throws IOException {
    String forestHost = conf.get(OUTPUT_FOREST_HOST);
    if (forestHost != null) {
        //Restores the object from the configuration.
        LinkedMapWritable fhmap = DefaultStringifier.load(conf, OUTPUT_FOREST_HOST, 
            LinkedMapWritable.class);
        // must be in fast load mode, otherwise won't reach here
        String s = conf.get(ASSIGNMENT_POLICY);
        //EXECUTION_MODE must have a value in mlcp;
        //default is "distributed" in hadoop connector
        String mode = conf.get(EXECUTION_MODE, MODE_DISTRIBUTED);
        if (MODE_DISTRIBUTED.equals(mode)) {
            AssignmentPolicy.Kind policy =
                    AssignmentPolicy.Kind.forName(s);
            am.initialize(policy, fhmap, conf.getInt(BATCH_SIZE, 10));
        }
        return fhmap;
    } else {
        throw new IOException("Forest host map not found");
    }
}
KeyValueOutputFormat.java 文件源码 项目:marklogic-contentpump 阅读 17 收藏 0 点赞 0 评论 0
@Override
public void checkOutputSpecs(Configuration conf, ContentSource cs) 
throws IOException {
    // check for required configuration
    if (conf.get(OUTPUT_QUERY) == null) {
        throw new IllegalArgumentException(OUTPUT_QUERY + 
        " is not specified.");
    }
    // warn against unsupported configuration
    if (conf.get(BATCH_SIZE) != null) {
        LOG.warn("Config entry for " +
                "\"mapreduce.marklogic.output.batchsize\" is not " +
                "supported for " + this.getClass().getName() + 
                " and will be ignored.");
    }
    String queryLanguage = conf.get(OUTPUT_QUERY_LANGUAGE);
    if (queryLanguage != null) {
        InternalUtilities.checkQueryLanguage(queryLanguage);
    }
    // store hosts into config system
    DefaultStringifier.store(conf, queryHosts(cs), OUTPUT_FOREST_HOST);
}
Chain.java 文件源码 项目:big-c 阅读 24 收藏 0 点赞 0 评论 0
/**
 * Creates a {@link Configuration} for the Map or Reduce in the chain.
 * 
 * <p>
 * It creates a new Configuration using the chain job's Configuration as base
 * and adds to it the configuration properties for the chain element. The keys
 * of the chain element Configuration have precedence over the given
 * Configuration.
 * </p>
 * 
 * @param jobConf
 *          the chain job's Configuration.
 * @param confKey
 *          the key for chain element configuration serialized in the chain
 *          job's Configuration.
 * @return a new Configuration aggregating the chain job's Configuration with
 *         the chain element configuration properties.
 */
protected static Configuration getChainElementConf(Configuration jobConf,
    String confKey) {
  Configuration conf = null;
  try (Stringifier<Configuration> stringifier =
      new DefaultStringifier<Configuration>(jobConf, Configuration.class);) {
    String confString = jobConf.get(confKey, null);
    if (confString != null) {
      conf = stringifier.fromString(jobConf.get(confKey, null));
    }
  } catch (IOException ioex) {
    throw new RuntimeException(ioex);
  }
  // we have to do this because the Writable desearialization clears all
  // values set in the conf making not possible do a
  // new Configuration(jobConf) in the creation of the conf above
  jobConf = new Configuration(jobConf);

  if (conf != null) {
    for (Map.Entry<String, String> entry : conf) {
      jobConf.set(entry.getKey(), entry.getValue());
    }
  }
  return jobConf;
}
Chain.java 文件源码 项目:hadoop-2.6.0-cdh5.4.3 阅读 21 收藏 0 点赞 0 评论 0
/**
 * Creates a {@link Configuration} for the Map or Reduce in the chain.
 * 
 * <p>
 * It creates a new Configuration using the chain job's Configuration as base
 * and adds to it the configuration properties for the chain element. The keys
 * of the chain element Configuration have precedence over the given
 * Configuration.
 * </p>
 * 
 * @param jobConf
 *          the chain job's Configuration.
 * @param confKey
 *          the key for chain element configuration serialized in the chain
 *          job's Configuration.
 * @return a new Configuration aggregating the chain job's Configuration with
 *         the chain element configuration properties.
 */
protected static Configuration getChainElementConf(Configuration jobConf,
    String confKey) {
  Configuration conf = null;
  try {
    Stringifier<Configuration> stringifier = 
      new DefaultStringifier<Configuration>(jobConf, Configuration.class);
    String confString = jobConf.get(confKey, null);
    if (confString != null) {
      conf = stringifier.fromString(jobConf.get(confKey, null));
    }
  } catch (IOException ioex) {
    throw new RuntimeException(ioex);
  }
  // we have to do this because the Writable desearialization clears all
  // values set in the conf making not possible do a
  // new Configuration(jobConf) in the creation of the conf above
  jobConf = new Configuration(jobConf);

  if (conf != null) {
    for (Map.Entry<String, String> entry : conf) {
      jobConf.set(entry.getKey(), entry.getValue());
    }
  }
  return jobConf;
}
Chain.java 文件源码 项目:hadoop-2.6.0-cdh5.4.3 阅读 22 收藏 0 点赞 0 评论 0
/**
 * Creates a {@link JobConf} for one of the Maps or Reduce in the chain.
 * <p/>
 * It creates a new JobConf using the chain job's JobConf as base and adds to
 * it the configuration properties for the chain element. The keys of the
 * chain element jobConf have precedence over the given JobConf.
 *
 * @param jobConf the chain job's JobConf.
 * @param confKey the key for chain element configuration serialized in the
 *                chain job's JobConf.
 * @return a new JobConf aggregating the chain job's JobConf with the chain
 *         element configuration properties.
 */
private static JobConf getChainElementConf(JobConf jobConf, String confKey) {
  JobConf conf;
  try {
    Stringifier<JobConf> stringifier =
      new DefaultStringifier<JobConf>(jobConf, JobConf.class);
    conf = stringifier.fromString(jobConf.get(confKey, null));
  } catch (IOException ioex) {
    throw new RuntimeException(ioex);
  }
  // we have to do this because the Writable desearialization clears all
  // values set in the conf making not possible do do a new JobConf(jobConf)
  // in the creation of the conf above
  jobConf = new JobConf(jobConf);

  for(Map.Entry<String, String> entry : conf) {
    jobConf.set(entry.getKey(), entry.getValue());
  }
  return jobConf;
}
Chain.java 文件源码 项目:hadoop-EAR 阅读 25 收藏 0 点赞 0 评论 0
/**
 * Creates a {@link JobConf} for one of the Maps or Reduce in the chain.
 * <p/>
 * It creates a new JobConf using the chain job's JobConf as base and adds to
 * it the configuration properties for the chain element. The keys of the
 * chain element jobConf have precedence over the given JobConf.
 *
 * @param jobConf the chain job's JobConf.
 * @param confKey the key for chain element configuration serialized in the
 *                chain job's JobConf.
 * @return a new JobConf aggregating the chain job's JobConf with the chain
 *         element configuration properties.
 */
private static JobConf getChainElementConf(JobConf jobConf, String confKey) {
  JobConf conf;
  try {
    Stringifier<JobConf> stringifier =
      new DefaultStringifier<JobConf>(jobConf, JobConf.class);
    conf = stringifier.fromString(jobConf.get(confKey, null));
  } catch (IOException ioex) {
    throw new RuntimeException(ioex);
  }
  // we have to do this because the Writable desearialization clears all
  // values set in the conf making not possible do do a new JobConf(jobConf)
  // in the creation of the conf above
  jobConf = new JobConf(jobConf);

  for(Map.Entry<String, String> entry : conf) {
    jobConf.set(entry.getKey(), entry.getValue());
  }
  return jobConf;
}
Parameters.java 文件源码 项目:Chi-FRBCS-BigDataCS 阅读 28 收藏 0 点赞 0 评论 0
@Override
public String toString() {
  Configuration conf = new Configuration();
  conf.set("io.serializations",
           "org.apache.hadoop.io.serializer.JavaSerialization,"
           + "org.apache.hadoop.io.serializer.WritableSerialization");
  DefaultStringifier<Map<String,String>> mapStringifier = new DefaultStringifier<Map<String,String>>(conf,
      GenericsUtil.getClass(params));
  try {
    return mapStringifier.toString(params);
  } catch (IOException e) {
    log.info("Encountered IOException while deserializing returning empty string", e);
    return "";
  }

}
Chain.java 文件源码 项目:hadoop-plus 阅读 18 收藏 0 点赞 0 评论 0
/**
 * Creates a {@link Configuration} for the Map or Reduce in the chain.
 * 
 * <p>
 * It creates a new Configuration using the chain job's Configuration as base
 * and adds to it the configuration properties for the chain element. The keys
 * of the chain element Configuration have precedence over the given
 * Configuration.
 * </p>
 * 
 * @param jobConf
 *          the chain job's Configuration.
 * @param confKey
 *          the key for chain element configuration serialized in the chain
 *          job's Configuration.
 * @return a new Configuration aggregating the chain job's Configuration with
 *         the chain element configuration properties.
 */
protected static Configuration getChainElementConf(Configuration jobConf,
    String confKey) {
  Configuration conf = null;
  try {
    Stringifier<Configuration> stringifier = 
      new DefaultStringifier<Configuration>(jobConf, Configuration.class);
    String confString = jobConf.get(confKey, null);
    if (confString != null) {
      conf = stringifier.fromString(jobConf.get(confKey, null));
    }
  } catch (IOException ioex) {
    throw new RuntimeException(ioex);
  }
  // we have to do this because the Writable desearialization clears all
  // values set in the conf making not possible do a
  // new Configuration(jobConf) in the creation of the conf above
  jobConf = new Configuration(jobConf);

  if (conf != null) {
    for (Map.Entry<String, String> entry : conf) {
      jobConf.set(entry.getKey(), entry.getValue());
    }
  }
  return jobConf;
}
Chain.java 文件源码 项目:FlexMap 阅读 26 收藏 0 点赞 0 评论 0
/**
 * Creates a {@link Configuration} for the Map or Reduce in the chain.
 * 
 * <p>
 * It creates a new Configuration using the chain job's Configuration as base
 * and adds to it the configuration properties for the chain element. The keys
 * of the chain element Configuration have precedence over the given
 * Configuration.
 * </p>
 * 
 * @param jobConf
 *          the chain job's Configuration.
 * @param confKey
 *          the key for chain element configuration serialized in the chain
 *          job's Configuration.
 * @return a new Configuration aggregating the chain job's Configuration with
 *         the chain element configuration properties.
 */
protected static Configuration getChainElementConf(Configuration jobConf,
    String confKey) {
  Configuration conf = null;
  try {
    Stringifier<Configuration> stringifier = 
      new DefaultStringifier<Configuration>(jobConf, Configuration.class);
    String confString = jobConf.get(confKey, null);
    if (confString != null) {
      conf = stringifier.fromString(jobConf.get(confKey, null));
    }
  } catch (IOException ioex) {
    throw new RuntimeException(ioex);
  }
  // we have to do this because the Writable desearialization clears all
  // values set in the conf making not possible do a
  // new Configuration(jobConf) in the creation of the conf above
  jobConf = new Configuration(jobConf);

  if (conf != null) {
    for (Map.Entry<String, String> entry : conf) {
      jobConf.set(entry.getKey(), entry.getValue());
    }
  }
  return jobConf;
}
Parameters.java 文件源码 项目:Chi-FRBCS-BigData-Ave 阅读 25 收藏 0 点赞 0 评论 0
@Override
public String toString() {
  Configuration conf = new Configuration();
  conf.set("io.serializations",
           "org.apache.hadoop.io.serializer.JavaSerialization,"
           + "org.apache.hadoop.io.serializer.WritableSerialization");
  DefaultStringifier<Map<String,String>> mapStringifier = new DefaultStringifier<Map<String,String>>(conf,
      GenericsUtil.getClass(params));
  try {
    return mapStringifier.toString(params);
  } catch (IOException e) {
    log.info("Encountered IOException while deserializing returning empty string", e);
    return "";
  }

}
Parameters.java 文件源码 项目:Chi-FRBCS-BigData-Max 阅读 24 收藏 0 点赞 0 评论 0
@Override
public String toString() {
  Configuration conf = new Configuration();
  conf.set("io.serializations",
           "org.apache.hadoop.io.serializer.JavaSerialization,"
           + "org.apache.hadoop.io.serializer.WritableSerialization");
  DefaultStringifier<Map<String,String>> mapStringifier = new DefaultStringifier<Map<String,String>>(conf,
      GenericsUtil.getClass(params));
  try {
    return mapStringifier.toString(params);
  } catch (IOException e) {
    log.info("Encountered IOException while deserializing returning empty string", e);
    return "";
  }

}
Chain.java 文件源码 项目:hops 阅读 23 收藏 0 点赞 0 评论 0
/**
 * Creates a {@link Configuration} for the Map or Reduce in the chain.
 * 
 * <p>
 * It creates a new Configuration using the chain job's Configuration as base
 * and adds to it the configuration properties for the chain element. The keys
 * of the chain element Configuration have precedence over the given
 * Configuration.
 * </p>
 * 
 * @param jobConf
 *          the chain job's Configuration.
 * @param confKey
 *          the key for chain element configuration serialized in the chain
 *          job's Configuration.
 * @return a new Configuration aggregating the chain job's Configuration with
 *         the chain element configuration properties.
 */
protected static Configuration getChainElementConf(Configuration jobConf,
    String confKey) {
  Configuration conf = null;
  try (Stringifier<Configuration> stringifier =
      new DefaultStringifier<Configuration>(jobConf, Configuration.class);) {
    String confString = jobConf.get(confKey, null);
    if (confString != null) {
      conf = stringifier.fromString(jobConf.get(confKey, null));
    }
  } catch (IOException ioex) {
    throw new RuntimeException(ioex);
  }
  // we have to do this because the Writable desearialization clears all
  // values set in the conf making not possible do a
  // new Configuration(jobConf) in the creation of the conf above
  jobConf = new Configuration(jobConf);

  if (conf != null) {
    for (Map.Entry<String, String> entry : conf) {
      jobConf.set(entry.getKey(), entry.getValue());
    }
  }
  return jobConf;
}
Chain.java 文件源码 项目:hadoop-TCP 阅读 24 收藏 0 点赞 0 评论 0
/**
 * Creates a {@link Configuration} for the Map or Reduce in the chain.
 * 
 * <p>
 * It creates a new Configuration using the chain job's Configuration as base
 * and adds to it the configuration properties for the chain element. The keys
 * of the chain element Configuration have precedence over the given
 * Configuration.
 * </p>
 * 
 * @param jobConf
 *          the chain job's Configuration.
 * @param confKey
 *          the key for chain element configuration serialized in the chain
 *          job's Configuration.
 * @return a new Configuration aggregating the chain job's Configuration with
 *         the chain element configuration properties.
 */
protected static Configuration getChainElementConf(Configuration jobConf,
    String confKey) {
  Configuration conf = null;
  try {
    Stringifier<Configuration> stringifier = 
      new DefaultStringifier<Configuration>(jobConf, Configuration.class);
    String confString = jobConf.get(confKey, null);
    if (confString != null) {
      conf = stringifier.fromString(jobConf.get(confKey, null));
    }
  } catch (IOException ioex) {
    throw new RuntimeException(ioex);
  }
  // we have to do this because the Writable desearialization clears all
  // values set in the conf making not possible do a
  // new Configuration(jobConf) in the creation of the conf above
  jobConf = new Configuration(jobConf);

  if (conf != null) {
    for (Map.Entry<String, String> entry : conf) {
      jobConf.set(entry.getKey(), entry.getValue());
    }
  }
  return jobConf;
}
Chain.java 文件源码 项目:hadoop-on-lustre 阅读 32 收藏 0 点赞 0 评论 0
/**
 * Creates a {@link JobConf} for one of the Maps or Reduce in the chain.
 * <p/>
 * It creates a new JobConf using the chain job's JobConf as base and adds to
 * it the configuration properties for the chain element. The keys of the
 * chain element jobConf have precedence over the given JobConf.
 *
 * @param jobConf the chain job's JobConf.
 * @param confKey the key for chain element configuration serialized in the
 *                chain job's JobConf.
 * @return a new JobConf aggregating the chain job's JobConf with the chain
 *         element configuration properties.
 */
private static JobConf getChainElementConf(JobConf jobConf, String confKey) {
  JobConf conf;
  try {
    Stringifier<JobConf> stringifier =
      new DefaultStringifier<JobConf>(jobConf, JobConf.class);
    conf = stringifier.fromString(jobConf.get(confKey, null));
  } catch (IOException ioex) {
    throw new RuntimeException(ioex);
  }
  // we have to do this because the Writable desearialization clears all
  // values set in the conf making not possible do do a new JobConf(jobConf)
  // in the creation of the conf above
  jobConf = new JobConf(jobConf);

  for(Map.Entry<String, String> entry : conf) {
    jobConf.set(entry.getKey(), entry.getValue());
  }
  return jobConf;
}
Chain.java 文件源码 项目:hardfs 阅读 23 收藏 0 点赞 0 评论 0
/**
 * Creates a {@link Configuration} for the Map or Reduce in the chain.
 * 
 * <p>
 * It creates a new Configuration using the chain job's Configuration as base
 * and adds to it the configuration properties for the chain element. The keys
 * of the chain element Configuration have precedence over the given
 * Configuration.
 * </p>
 * 
 * @param jobConf
 *          the chain job's Configuration.
 * @param confKey
 *          the key for chain element configuration serialized in the chain
 *          job's Configuration.
 * @return a new Configuration aggregating the chain job's Configuration with
 *         the chain element configuration properties.
 */
protected static Configuration getChainElementConf(Configuration jobConf,
    String confKey) {
  Configuration conf = null;
  try {
    Stringifier<Configuration> stringifier = 
      new DefaultStringifier<Configuration>(jobConf, Configuration.class);
    String confString = jobConf.get(confKey, null);
    if (confString != null) {
      conf = stringifier.fromString(jobConf.get(confKey, null));
    }
  } catch (IOException ioex) {
    throw new RuntimeException(ioex);
  }
  // we have to do this because the Writable desearialization clears all
  // values set in the conf making not possible do a
  // new Configuration(jobConf) in the creation of the conf above
  jobConf = new Configuration(jobConf);

  if (conf != null) {
    for (Map.Entry<String, String> entry : conf) {
      jobConf.set(entry.getKey(), entry.getValue());
    }
  }
  return jobConf;
}
Chain.java 文件源码 项目:hadoop-on-lustre2 阅读 34 收藏 0 点赞 0 评论 0
/**
 * Creates a {@link Configuration} for the Map or Reduce in the chain.
 * 
 * <p>
 * It creates a new Configuration using the chain job's Configuration as base
 * and adds to it the configuration properties for the chain element. The keys
 * of the chain element Configuration have precedence over the given
 * Configuration.
 * </p>
 * 
 * @param jobConf
 *          the chain job's Configuration.
 * @param confKey
 *          the key for chain element configuration serialized in the chain
 *          job's Configuration.
 * @return a new Configuration aggregating the chain job's Configuration with
 *         the chain element configuration properties.
 */
protected static Configuration getChainElementConf(Configuration jobConf,
    String confKey) {
  Configuration conf = null;
  try {
    Stringifier<Configuration> stringifier = 
      new DefaultStringifier<Configuration>(jobConf, Configuration.class);
    String confString = jobConf.get(confKey, null);
    if (confString != null) {
      conf = stringifier.fromString(jobConf.get(confKey, null));
    }
  } catch (IOException ioex) {
    throw new RuntimeException(ioex);
  }
  // we have to do this because the Writable desearialization clears all
  // values set in the conf making not possible do a
  // new Configuration(jobConf) in the creation of the conf above
  jobConf = new Configuration(jobConf);

  if (conf != null) {
    for (Map.Entry<String, String> entry : conf) {
      jobConf.set(entry.getKey(), entry.getValue());
    }
  }
  return jobConf;
}
Chain.java 文件源码 项目:RDFS 阅读 21 收藏 0 点赞 0 评论 0
/**
 * Creates a {@link JobConf} for one of the Maps or Reduce in the chain.
 * <p/>
 * It creates a new JobConf using the chain job's JobConf as base and adds to
 * it the configuration properties for the chain element. The keys of the
 * chain element jobConf have precedence over the given JobConf.
 *
 * @param jobConf the chain job's JobConf.
 * @param confKey the key for chain element configuration serialized in the
 *                chain job's JobConf.
 * @return a new JobConf aggregating the chain job's JobConf with the chain
 *         element configuration properties.
 */
private static JobConf getChainElementConf(JobConf jobConf, String confKey) {
  JobConf conf;
  try {
    Stringifier<JobConf> stringifier =
      new DefaultStringifier<JobConf>(jobConf, JobConf.class);
    conf = stringifier.fromString(jobConf.get(confKey, null));
  } catch (IOException ioex) {
    throw new RuntimeException(ioex);
  }
  // we have to do this because the Writable desearialization clears all
  // values set in the conf making not possible do do a new JobConf(jobConf)
  // in the creation of the conf above
  jobConf = new JobConf(jobConf);

  for(Map.Entry<String, String> entry : conf) {
    jobConf.set(entry.getKey(), entry.getValue());
  }
  return jobConf;
}
Chain.java 文件源码 项目:hadoop-0.20 阅读 20 收藏 0 点赞 0 评论 0
/**
 * Creates a {@link JobConf} for one of the Maps or Reduce in the chain.
 * <p/>
 * It creates a new JobConf using the chain job's JobConf as base and adds to
 * it the configuration properties for the chain element. The keys of the
 * chain element jobConf have precedence over the given JobConf.
 *
 * @param jobConf the chain job's JobConf.
 * @param confKey the key for chain element configuration serialized in the
 *                chain job's JobConf.
 * @return a new JobConf aggregating the chain job's JobConf with the chain
 *         element configuration properties.
 */
private static JobConf getChainElementConf(JobConf jobConf, String confKey) {
  JobConf conf;
  try {
    Stringifier<JobConf> stringifier =
      new DefaultStringifier<JobConf>(jobConf, JobConf.class);
    conf = stringifier.fromString(jobConf.get(confKey, null));
  } catch (IOException ioex) {
    throw new RuntimeException(ioex);
  }
  // we have to do this because the Writable desearialization clears all
  // values set in the conf making not possible do do a new JobConf(jobConf)
  // in the creation of the conf above
  jobConf = new JobConf(jobConf);

  for(Map.Entry<String, String> entry : conf) {
    jobConf.set(entry.getKey(), entry.getValue());
  }
  return jobConf;
}
Chain.java 文件源码 项目:hanoi-hadoop-2.0.0-cdh 阅读 22 收藏 0 点赞 0 评论 0
/**
 * Creates a {@link JobConf} for one of the Maps or Reduce in the chain.
 * <p/>
 * It creates a new JobConf using the chain job's JobConf as base and adds to
 * it the configuration properties for the chain element. The keys of the
 * chain element jobConf have precedence over the given JobConf.
 *
 * @param jobConf the chain job's JobConf.
 * @param confKey the key for chain element configuration serialized in the
 *                chain job's JobConf.
 * @return a new JobConf aggregating the chain job's JobConf with the chain
 *         element configuration properties.
 */
private static JobConf getChainElementConf(JobConf jobConf, String confKey) {
  JobConf conf;
  try {
    Stringifier<JobConf> stringifier =
      new DefaultStringifier<JobConf>(jobConf, JobConf.class);
    conf = stringifier.fromString(jobConf.get(confKey, null));
  } catch (IOException ioex) {
    throw new RuntimeException(ioex);
  }
  // we have to do this because the Writable desearialization clears all
  // values set in the conf making not possible do do a new JobConf(jobConf)
  // in the creation of the conf above
  jobConf = new JobConf(jobConf);

  for(Map.Entry<String, String> entry : conf) {
    jobConf.set(entry.getKey(), entry.getValue());
  }
  return jobConf;
}
JdbcExportJob.java 文件源码 项目:sqoop 阅读 23 收藏 0 点赞 0 评论 0
@Override
protected void configureInputFormat(Job job, String tableName,
    String tableClassName, String splitByCol)
    throws ClassNotFoundException, IOException {

  fileType = getInputFileType();

  super.configureInputFormat(job, tableName, tableClassName, splitByCol);

  if (fileType == FileType.AVRO_DATA_FILE) {
    LOG.debug("Configuring for Avro export");
    ConnManager connManager = context.getConnManager();
    Map<String, Integer> columnTypeInts =
      connManager.getColumnTypes(tableName, options.getSqlQuery());
    MapWritable columnTypes = new MapWritable();
    for (Map.Entry<String, Integer> e : columnTypeInts.entrySet()) {
      Text columnName = new Text(e.getKey());
      Text columnText = new Text(
          connManager.toJavaType(tableName, e.getKey(), e.getValue()));
      columnTypes.put(columnName, columnText);
    }
    DefaultStringifier.store(job.getConfiguration(), columnTypes,
        AvroExportMapper.AVRO_COLUMN_TYPES_MAP);
  }

}
Chain.java 文件源码 项目:mapreduce-fork 阅读 22 收藏 0 点赞 0 评论 0
/**
 * Creates a {@link Configuration} for the Map or Reduce in the chain.
 * 
 * <p>
 * It creates a new Configuration using the chain job's Configuration as base
 * and adds to it the configuration properties for the chain element. The keys
 * of the chain element Configuration have precedence over the given
 * Configuration.
 * </p>
 * 
 * @param jobConf
 *          the chain job's Configuration.
 * @param confKey
 *          the key for chain element configuration serialized in the chain
 *          job's Configuration.
 * @return a new Configuration aggregating the chain job's Configuration with
 *         the chain element configuration properties.
 */
protected static Configuration getChainElementConf(Configuration jobConf,
    String confKey) {
  Configuration conf = null;
  try {
    Stringifier<Configuration> stringifier = 
      new DefaultStringifier<Configuration>(jobConf, Configuration.class);
    String confString = jobConf.get(confKey, null);
    if (confString != null) {
      conf = stringifier.fromString(jobConf.get(confKey, null));
    }
  } catch (IOException ioex) {
    throw new RuntimeException(ioex);
  }
  // we have to do this because the Writable desearialization clears all
  // values set in the conf making not possible do a
  // new Configuration(jobConf) in the creation of the conf above
  jobConf = new Configuration(jobConf);

  if (conf != null) {
    for (Map.Entry<String, String> entry : conf) {
      jobConf.set(entry.getKey(), entry.getValue());
    }
  }
  return jobConf;
}
Chain.java 文件源码 项目:hortonworks-extension 阅读 28 收藏 0 点赞 0 评论 0
/**
 * Creates a {@link JobConf} for one of the Maps or Reduce in the chain.
 * <p/>
 * It creates a new JobConf using the chain job's JobConf as base and adds to
 * it the configuration properties for the chain element. The keys of the
 * chain element jobConf have precedence over the given JobConf.
 *
 * @param jobConf the chain job's JobConf.
 * @param confKey the key for chain element configuration serialized in the
 *                chain job's JobConf.
 * @return a new JobConf aggregating the chain job's JobConf with the chain
 *         element configuration properties.
 */
private static JobConf getChainElementConf(JobConf jobConf, String confKey) {
  JobConf conf;
  try {
    Stringifier<JobConf> stringifier =
      new DefaultStringifier<JobConf>(jobConf, JobConf.class);
    conf = stringifier.fromString(jobConf.get(confKey, null));
  } catch (IOException ioex) {
    throw new RuntimeException(ioex);
  }
  // we have to do this because the Writable desearialization clears all
  // values set in the conf making not possible do do a new JobConf(jobConf)
  // in the creation of the conf above
  jobConf = new JobConf(jobConf);

  for(Map.Entry<String, String> entry : conf) {
    jobConf.set(entry.getKey(), entry.getValue());
  }
  return jobConf;
}
Chain.java 文件源码 项目:hortonworks-extension 阅读 23 收藏 0 点赞 0 评论 0
/**
 * Creates a {@link JobConf} for one of the Maps or Reduce in the chain.
 * <p/>
 * It creates a new JobConf using the chain job's JobConf as base and adds to
 * it the configuration properties for the chain element. The keys of the
 * chain element jobConf have precedence over the given JobConf.
 *
 * @param jobConf the chain job's JobConf.
 * @param confKey the key for chain element configuration serialized in the
 *                chain job's JobConf.
 * @return a new JobConf aggregating the chain job's JobConf with the chain
 *         element configuration properties.
 */
private static JobConf getChainElementConf(JobConf jobConf, String confKey) {
  JobConf conf;
  try {
    Stringifier<JobConf> stringifier =
      new DefaultStringifier<JobConf>(jobConf, JobConf.class);
    conf = stringifier.fromString(jobConf.get(confKey, null));
  } catch (IOException ioex) {
    throw new RuntimeException(ioex);
  }
  // we have to do this because the Writable desearialization clears all
  // values set in the conf making not possible do do a new JobConf(jobConf)
  // in the creation of the conf above
  jobConf = new JobConf(jobConf);

  for(Map.Entry<String, String> entry : conf) {
    jobConf.set(entry.getKey(), entry.getValue());
  }
  return jobConf;
}
Chain.java 文件源码 项目:hadoop-gpu 阅读 21 收藏 0 点赞 0 评论 0
/**
 * Creates a {@link JobConf} for one of the Maps or Reduce in the chain.
 * <p/>
 * It creates a new JobConf using the chain job's JobConf as base and adds to
 * it the configuration properties for the chain element. The keys of the
 * chain element jobConf have precedence over the given JobConf.
 *
 * @param jobConf the chain job's JobConf.
 * @param confKey the key for chain element configuration serialized in the
 *                chain job's JobConf.
 * @return a new JobConf aggregating the chain job's JobConf with the chain
 *         element configuration properties.
 */
private static JobConf getChainElementConf(JobConf jobConf, String confKey) {
  JobConf conf;
  try {
    Stringifier<JobConf> stringifier =
      new DefaultStringifier<JobConf>(jobConf, JobConf.class);
    conf = stringifier.fromString(jobConf.get(confKey, null));
  } catch (IOException ioex) {
    throw new RuntimeException(ioex);
  }
  // we have to do this because the Writable desearialization clears all
  // values set in the conf making not possible do do a new JobConf(jobConf)
  // in the creation of the conf above
  jobConf = new JobConf(jobConf);

  for(Map.Entry<String, String> entry : conf) {
    jobConf.set(entry.getKey(), entry.getValue());
  }
  return jobConf;
}
GenericRecordExportMapper.java 文件源码 项目:aliyun-maxcompute-data-collectors 阅读 19 收藏 0 点赞 0 评论 0
@Override
protected void setup(Context context) throws IOException, InterruptedException {
  super.setup(context);

  Configuration conf = context.getConfiguration();

  // Instantiate a copy of the user's class to hold and parse the record.
  String recordClassName = conf.get(
      ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY);
  if (null == recordClassName) {
    throw new IOException("Export table class name ("
        + ExportJobBase.SQOOP_EXPORT_TABLE_CLASS_KEY
        + ") is not set!");
  }

  try {
    Class cls = Class.forName(recordClassName, true,
        Thread.currentThread().getContextClassLoader());
    recordImpl = (SqoopRecord) ReflectionUtils.newInstance(cls, conf);
  } catch (ClassNotFoundException cnfe) {
    throw new IOException(cnfe);
  }

  if (null == recordImpl) {
    throw new IOException("Could not instantiate object of type "
        + recordClassName);
  }

  columnTypes = DefaultStringifier.load(conf, AVRO_COLUMN_TYPES_MAP,
      MapWritable.class);
}
TestJdbcExportJob.java 文件源码 项目:aliyun-maxcompute-data-collectors 阅读 18 收藏 0 点赞 0 评论 0
@Test
public void testAvroWithNoColumnsSpecified() throws Exception {
  SqoopOptions opts = new SqoopOptions();
  opts.setExportDir("myexportdir");
  JdbcExportJob jdbcExportJob = stubJdbcExportJob(opts, FileType.AVRO_DATA_FILE);
  Job job = new Job();
  jdbcExportJob.configureInputFormat(job, null, null, null);
  assertEquals(asSetOfText("Age", "Name", "Gender"), DefaultStringifier.load(job.getConfiguration(), AvroExportMapper.AVRO_COLUMN_TYPES_MAP, MapWritable.class).keySet());
}
TestJdbcExportJob.java 文件源码 项目:aliyun-maxcompute-data-collectors 阅读 20 收藏 0 点赞 0 评论 0
@Test
public void testAvroWithAllColumnsSpecified() throws Exception {
  SqoopOptions opts = new SqoopOptions();
  opts.setExportDir("myexportdir");
  String[] columns = { "Age", "Name", "Gender" };
  opts.setColumns(columns);
  JdbcExportJob jdbcExportJob = stubJdbcExportJob(opts, FileType.AVRO_DATA_FILE);
  Job job = new Job();
  jdbcExportJob.configureInputFormat(job, null, null, null);
  assertEquals(asSetOfText("Age", "Name", "Gender"), DefaultStringifier.load(job.getConfiguration(), AvroExportMapper.AVRO_COLUMN_TYPES_MAP, MapWritable.class).keySet());
}


问题


面经


文章

微信
公众号

扫码关注公众号