java类org.apache.log4j.BasicConfigurator的实例源码

MergeTool.java 文件源码 项目:incubator-rya 阅读 24 收藏 0 点赞 0 评论 0
public static void main(final String[] args) {
    final String log4jConfiguration = System.getProperties().getProperty("log4j.configuration");
    if (StringUtils.isNotBlank(log4jConfiguration)) {
        final String parsedConfiguration = PathUtils.clean(StringUtils.removeStart(log4jConfiguration, "file:"));
        final File configFile = new File(parsedConfiguration);
        if (configFile.exists()) {
            DOMConfigurator.configure(parsedConfiguration);
        } else {
            BasicConfigurator.configure();
        }
    }
    log.info("Starting Merge Tool");

    Thread.setDefaultUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
        @Override
        public void uncaughtException(final Thread thread, final Throwable throwable) {
            log.error("Uncaught exception in " + thread.getName(), throwable);
        }
    });

    final int returnCode = setupAndRun(args);

    log.info("Finished running Merge Tool");

    System.exit(returnCode);
}
AGHCourseDetailsProvider.java 文件源码 项目:unitime 阅读 18 收藏 0 点赞 0 评论 0
public static void main(String[] args) {
    try {
        BasicConfigurator.configure();
        Debug.info(" - Initializing Hibernate ... ");
        _RootDAO.initialize();

        ApplicationProperties.getConfigProperties().setProperty(
                ApplicationProperty.CustomizationDefaultCourseUrl.key(),
                "http://syllabuskrk.agh.edu.pl/:years/pl/magnesite/modules/:courseNbr");
        ApplicationProperties.getConfigProperties().setProperty("unitime.custom.default.course_api_url",
                "http://syllabuskrk.agh.edu.pl/api/:years/modules/:courseNbr");


        ApplicationProperties.getDefaultProperties()
                .setProperty(ApplicationProperty.CustomizationDefaultCourseDetailsDownload.key(), "true");

        System.out.println("URL:" + new AGHCourseDetailsProvider()
                .getCourseUrl(new AcademicSessionInfo(231379l, "2015", "Semestr zimowy", "AGH"), "BAND", "101"));

        System.out.println("Details:\n" + new AGHCourseDetailsProvider()
                .getDetails(new AcademicSessionInfo(231379l, "2015", "Semestr zimowy", "AGH"), "BAND", "101"));

    } catch (Exception e) {
        e.printStackTrace();
    }
}
SnoMedOboGenerator.java 文件源码 项目:datasource 阅读 18 收藏 0 点赞 0 评论 0
public static void main(String[] args) {
    BasicConfigurator.configure();
    File snomedDirectory = new File(
            "/Users/bill/Documents/snomed/SnomedCT_Release_INT_20130731/RF2Release/Full/Terminology");
    File snomedConceptFile = new File(snomedDirectory, "sct2_Concept_Full_INT_20130731.txt.activeOnly");
    File snomedDescriptionFile = new File(snomedDirectory, "sct2_Description_Full-en_INT_20130731.txt.activeOnly");
    File snomedRelationFile = new File(snomedDirectory, "sct2_Relationship_Full_INT_20130731.txt.activeOnly");
    File outputOboFile = new File(snomedDirectory, "snomed-restricted.obo");
    Set<String> rootNodesToInclude = CollectionsUtil.createSet("123037004", "404684003");
    try {
        generateObo(snomedConceptFile, snomedDescriptionFile, snomedRelationFile, outputOboFile, rootNodesToInclude);
    } catch (IOException e) {
        e.printStackTrace();
        System.exit(-1);
    }
}
RunConsumer.java 文件源码 项目:HelloKafka 阅读 20 收藏 0 点赞 0 评论 0
public static void main(String[] args) {
        // log4j init
        BasicConfigurator.configure();

        SimpleConsumer simpleConsumer = new SimpleConsumer("localhost:2181", "myGroupId", "HelloKafka", "10000");
        simpleConsumer.configure();
        simpleConsumer.start();

        String message;

        while ((message = simpleConsumer.fetchMessage()) != null) {

            System.out.println("Received from kafka: " + message);

            /**
             * If you wish to commit offsets on every message, uncomment this line.
             * Best practices is to batch commit offsets (performance wise) which on the other hand may give us problems
             * like if the consumer recovers from a crash it may received messages that he have already been processed,
             * and this is because we did not commit them.
             */
//            myConsumer.consumerConnector.commitOffsets();
        }
    }
LocalCluster.java 文件源码 项目:jstorm-0.9.6.3- 阅读 24 收藏 0 点赞 0 评论 0
protected void setLogger() {
    boolean needReset = true;
    Logger rootLogger = Logger.getRootLogger();
    if (rootLogger != null) {
        Enumeration appenders = rootLogger.getAllAppenders();
        if (appenders.hasMoreElements() == true) {
            needReset = false;
        }
    }

    if (needReset == true) {
        BasicConfigurator.configure();
        rootLogger.setLevel(Level.INFO);
    }

}
RdfSClassGenerator.java 文件源码 项目:semweb4j 阅读 25 收藏 0 点赞 0 评论 0
private void generateCode() throws MojoExecutionException, MojoFailureException {
    try {
        // make sure that directory for log file exists.
        rdfReactorLogfile.getParentFile().mkdirs();

        // configure logging infrastructure for RDFReactor
        FileAppender logFileAppender = new FileAppender(new SimpleLayout(), rdfReactorLogfile.getAbsolutePath());
        BasicConfigurator.configure(logFileAppender);

    } catch (IOException ioe) {
        throw new MojoExecutionException("Cannot open log file for writing RDFReactor log messages", ioe);
    }

    getLog().info("Generating code from RDF schema file " + schemaFile + " into dir " + outputDirectory
            + ". Classes will be in package " + packageName + " and with method prefix " + methodPrefix +". skipBuiltins is " + skipBuiltins + ".");
    getLog().info("RDFReactor's log messages are written to " + rdfReactorLogfile);


    try {
        CodeGenerator.generate(schemaFile.getAbsolutePath(), outputDirectory.getAbsolutePath(), packageName, Reasoning.rdfs, skipBuiltins, methodPrefix);
    } catch (Exception e) {
        e.printStackTrace();
        throw new MojoFailureException(e, "RDFS processing error", "Could not generate code from the specified RDF schema file.");
    }
}
Examples.java 文件源码 项目:java-algebra-system 阅读 25 收藏 0 点赞 0 评论 0
/**
 * main.
 */
public static void main(String[] args) {
    BasicConfigurator.configure();
    if (args.length > 0) {
        example1();
        example2();
        example3();
        example4();
    }
    example5();
    example6();
    example10();
    example11();
    example12();
    ComputerThreads.terminate();
}
TestStarTreeSegmentCreator.java 文件源码 项目:Pinot 阅读 30 收藏 0 点赞 0 评论 0
@Test(enabled = true)
  public void testCreation() throws Exception {
    BasicConfigurator.configure();

    final SegmentGeneratorConfig config =
        SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns(avroFile, indexDir, "daysSinceEpoch",
            TimeUnit.DAYS, "testTable");
    config.setSegmentNamePostfix("1");
    config.setTimeColumnName("daysSinceEpoch");

    // Set the star tree index config
    StarTreeIndexSpec starTreeIndexSpec = new StarTreeIndexSpec();
//    starTreeIndexSpec.setSplitExcludes(Arrays.asList("D1", "daysSinceEpoch"));
    starTreeIndexSpec.setSplitExcludes(Arrays.asList("daysSinceEpoch"));
    starTreeIndexSpec.setMaxLeafRecords(4);
    config.getSchema().setStarTreeIndexSpec(starTreeIndexSpec);

    final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null);
    driver.init(config);
    driver.build();
  }
Echo.java 文件源码 项目:rct-java 阅读 26 收藏 0 点赞 0 评论 0
public static void main(String[] args) {

        BasicConfigurator.configure();

        if (args.length != 2) {
            System.err.println("Required 2 arguments!");
            System.exit(1);
        }
        try {
            TransformReceiver transformer = TransformerFactory.getInstance()
                    .createTransformReceiver();

            Thread.sleep(1000);

            Transform t = transformer.lookupTransform(args[0], args[1],
                    System.currentTimeMillis());

            System.out.println(t);
        } catch (Exception e) {
            e.printStackTrace();
            System.exit(1);
        }

        System.exit(0);
    }


问题


面经


文章

微信
公众号

扫码关注公众号