__init__.py 文件源码

python
阅读 21 收藏 0 点赞 0 评论 0

项目:dminer 作者: infosecanon 项目源码 文件源码
def entry(arguments):
    """
    The entry point for the dreammarket sink CLI interface. This defines the
    logic around the usage of command line arguments and the dreammarket sink in
    order to perform scraping, ingestion, and storage related functions.
    """
    logger.setLevel(arguments.verbosity.upper())
    if not arguments.dreammarket_username:
        logger.error("This sink requires a username to be specified through CLI or enviornment variable.")
        raise SystemExit()
    if not arguments.dreammarket_password:
        logger.error("This sink requires a password to be specified through CLI or environment variable.")
        raise SystemExit()

    if not arguments.dbc_access_key:
        logger.error("This sink requires a deathbycaptcha access key to be specified through CLI or environment variable.")
        raise SystemExit()
    if not arguments.dbc_secret_key:
        logger.error("This sink requires a deathbycaptcha secret key to be specified through CLI or environment variable.")
        raise SystemExit()


    display = Display(visible=0, size=(1366, 768))
    display.start()
    sink = DreammarketSink(
        arguments.dreammarket_username, arguments.dreammarket_password,
        arguments.dbc_access_key, arguments.dbc_secret_key,
        url_file=arguments.url_file,
        save_to_directory=arguments.save_to_directory,
        onion_url=arguments.onion_url,
        request_interval=arguments.request_interval,
        request_retries=arguments.request_retries,
        request_timeout=arguments.request_timeout,
        category=arguments.category
    )
    sink.logger = logger

    if arguments.ingest:
        if arguments.datastore == "stdout":
            store = STDOutInterface()

            parser = DreammarketParser(datastore=store)
            parser.parse(scrape_results=sink.scrape())

        elif arguments.datastore == "elasticsearch":
            store = ElasticsearchInterface(
                host=arguments.datastore_host,
                port=arguments.datastore_port
            )

            parser = DreammarketParser(datastore=store)
            parser.parse(
                scrape_results=sink.scrape(
                    daemon=arguments.daemonize
                )
            )
    else:
        list(sink.scrape())
    display.stop()
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号