cache_utils.py 文件源码

python
阅读 26 收藏 0 点赞 0 评论 0

项目:conda-tools 作者: groutr 项目源码 文件源码
def verify_hashes(packages, archives, hash_alg='md5'):
    """
    Loop through all given package objects and compare the hashes with hashes in package archive.

    Any hash that is supported by Python's hashlib can be used for comparison.

    In the interest of speed, files are iterated in the order they appear in the archive.?
    If they are large packages, they should be opened decompressed.

    packages and archives are assumed to zippable.
    Return a tuple of file hashes that do match.
    """
    def chunked(seq, size=1024):
        for block in iter(lambda: seq.read(size), b''):
            if block:
                yield block


    if hash_alg not in hashlib.algorithms_available:
        raise ValueError("{} hash algorithm not available in hashlib.".format(hash_alg))

    _new_hasher = lambda: hashlib.new(hash_alg)
    for pk, ar in zip(packages, archives):
        for tarinfo in ar:
            th, fh = _new_hasher(), _new_hasher()
            tfile = next(ar.extract(tarinfo, destination=None))

            fpath = os.path.join(pk.path, tarinfo.path)
            if not os.path.exists(fpath):
                return False

            for x in chunked(tfile):
                th.update(x)

            with open(fpath, 'rb') as fi:
                for x in chunked(fi):
                    fh.update(x)

            if th.digest() == fh.digest():
                continue
            else:
                print("Mismathed hash: {}".format(tarinfo.path))
                return False
    return True
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号