scraper.py 文件源码

python
阅读 29 收藏 0 点赞 0 评论 0

项目:feedlark 作者: CPSSD 项目源码 文件源码
def update_all_feeds(worker, job):
    log(0, "'update-all-feeds' initiated")

    if key is not None:
        log(0, "Checking secret key")
        request = bson.BSON(job.data).decode()
        if 'key' not in request or request['key'] != key:
            log(2, "Secret key mismatch")
            response = bson.BSON.encode({
                'status': 'error',
                'description': 'Secret key mismatch',
                })
            return str(response)

    log(0, "Retrieving data from feed db")
    feed_db_data = get_all_feed_docs()

    try:
        for doc in feed_db_data:
            updated_feeds = gather_updates(doc)
            update_database(doc, updated_feeds)
    except Exception as e:
        log(2, "'update-all-feeds' failed")
        return str(bson.BSON.encode({
            "status": "error",
            "error-description": str(e)
        }))

    log(0, "'update-all-feeds' finished")
    return str(bson.BSON.encode({
        "status": "ok",
        "updated_feeds": [x['_id'] for x in feed_db_data],
    }))
    return str(bson.BSON.encode({"status": "ok"}))

# Get secret key, must be global.
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号