python类listdir()的实例源码

test_performance.py 文件源码 项目:pyku 作者: dubvulture 项目源码 文件源码 阅读 44 收藏 0 点赞 0 评论 0
def test_performance(standard=True):
    folder = pyku.utils.FOLDER
    pics = sorted([os.path.join(folder, pic)
                   for pic in os.listdir(folder)
                   if os.path.isfile(os.path.join(folder, pic))])

    if standard:
        # Standard raw pixel data
        model = pyku.DigitClassifier()
    else:
        # Zoning data
        pyku.utils.DSIZE = 28.
        model = pyku.DigitClassifier(
            saved_model=pyku.utils.TRAIN_DATA+'zoning_data.npz',
            feature=pyku.DigitClassifier._zoning)

    for pic in pics[:52]:
        a = pyku.Sudoku(pic, classifier=model)
        a.extract()

    return None
sftp_upload.py 文件源码 项目:facerecognition 作者: guoxiaolu 项目源码 文件源码 阅读 33 收藏 0 点赞 0 评论 0
def sftp_upload(host,port,username,password,local,remote):
    sf = paramiko.Transport((host,port))
    sf.connect(username = username,password = password)
    sftp = paramiko.SFTPClient.from_transport(sf)
    try:
        if os.path.isdir(local):#?????????????
            for f in os.listdir(local):#??????
                sftp.put(os.path.join(local+f),os.path.join(remote+f))#????????
        else:
            sftp.put(local,remote)#????
    except Exception,e:
        print('upload exception:',e)
    sf.close()

#if __name__ == '__main__':
    # host = '121.69.75.194'#??
    # port = 22 #??
    # username = 'wac' #???
    # password = '8112whz' #??
    # local = '/Users/ngxin/Documents/xin/face_recognition/my_faces/'
    # remote = '/home/wac/ngxin/ftp_upload/'
    # local = 'F:\\sftptest\\'#?????????????????windows?????window???????????
    # remote = '/opt/tianpy5/python/test/'#?????????????????linux????
    #sftp_upload(host,port,username,password,local,remote)#??
    #sftp_download(host,port,username,password,local,remote)#??
build_meta.py 文件源码 项目:my-first-blog 作者: AnkurBegining 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def build_wheel(wheel_directory, config_settings=None,
                metadata_directory=None):
    config_settings = _fix_config(config_settings)
    wheel_directory = os.path.abspath(wheel_directory)
    sys.argv = sys.argv[:1] + ['bdist_wheel'] + \
        config_settings["--global-option"]
    _run_setup()
    if wheel_directory != 'dist':
        shutil.rmtree(wheel_directory)
        shutil.copytree('dist', wheel_directory)

    wheels = [f for f in os.listdir(wheel_directory)
              if f.endswith('.whl')]

    assert len(wheels) == 1
    return wheels[0]
models_sqlite.py 文件源码 项目:nweb 作者: pierce403 项目源码 文件源码 阅读 36 收藏 0 点赞 0 评论 0
def add_corpus():
    """add files in corpus to database"""
    db = get_db()
    files = os.listdir("corpus")
    basenames = set()
    for filename in files:
        basenames.add(filename.split('.')[0])
    for basename in basenames:
        basepath = os.path.join('corpus', basename)
        with open(basepath + '.nmap', "r") as f:
            nmap = f.read()
        try:
            with open(basepath + '.xml', "r") as f:
                xml = f.read()
        except IOError:
            xml = ""
        try:
            with open(basepath + '.gnmap', "r") as f:
                gnmap = f.read()
        except IOError:
            gnamp = ""
        for i in range(0, 100):
            rando_ip = "%d.%d.%d.%d" % (random.randrange(1,254),
                                        random.randrange(1,254),
                                        random.randrange(1,254),
                                        random.randrange(1,254))
            (ip, real_ctime) = nmap_to_ip_ctime(nmap)
            for i in range(0, random.randrange(1, 10)):
                rando_ctime = real_ctime - random.randrange(3600, 3600*24*365)
                create_sighting(nmap, xml, gnmap, rando_ctime, rando_ip)
checkpoint.py 文件源码 项目:deep-summarization 作者: harpribot 项目源码 文件源码 阅读 38 收藏 0 点赞 0 评论 0
def __init__(self, model_nm, cell_nm, attention_type):
        """

        :param model_nm:
        :param cell_nm:
        :param attention_type:
        """
        self.model_nm = model_nm
        self.cell_nm = cell_nm
        self.attention_type = attention_type
        self.last_ckpt = None
        self.last_id = 0
        self.step_save_location = 'steps.p'
        self.data_save_location = 'data'
        self.mapper_save_location = 'mapper.p'
        self.steps_per_ckpt = None
        self.num_steps_per_prediction = None
        self.present_checkpoints = None
        self.outfile = None
        # initialize the steps if not initialized
        if self.step_save_location not in os.listdir(self.get_checkpoint_location()):
            pickle.dump(0,open(self.get_step_file(), 'wb'))
util.py 文件源码 项目:lang-reps 作者: chaitanyamalaviya 项目源码 文件源码 阅读 50 收藏 0 点赞 0 评论 0
def __iter__(self):
        """
        Read a file where each line is of the form "word1 word2 ..."
        Yields lists of the form [word1, word2, ...]
        """
        if os.path.isdir(self.fname):
            filenames = [os.path.join(self.fname,f) for f in os.listdir(self.fname)]
        else:
            filenames = [self.fname]
        for filename in filenames:
            # with io.open(filename, encoding='utf-8') as f:
            with open(filename) as f:
                doc = f.read()
                for line in doc.split("\n"):
                    #if not line:  continue
                    sent = "".join([ch for ch in line.lower() if ch not in string.punctuation]).strip().split()
                    # sent = [word for word in line.strip().split()]
                    sent = [self.begin] + sent + [self.end]
                    yield sent
util.py 文件源码 项目:lang-reps 作者: chaitanyamalaviya 项目源码 文件源码 阅读 46 收藏 0 点赞 0 评论 0
def __iter__(self):
        """
        Read a file where each line is of the form "word1 word2 ..."
        Yields lists of the form [word1, word2, ...]
        """
        #jfbbb
    if os.path.isdir(self.fname):
            filenames = [os.path.join(self.fname,f) for f in os.listdir(self.fname)]
        #else:
        #    filenames = [self.fname]

        for langpath in filenames:
            with open(filename) as f:
                doc = f.read()
                for line in doc.split("\n"):
                    #if not line:  continue
                    sent = "".join([ch for ch in line.lower() if ch not in string.punctuation]).strip().split()
                    # sent = [word for word in line.strip().split()]
                    sent = [self.begin] + sent + [self.end]
                    yield sent
ez_setup.py 文件源码 项目:Adafruit_Python_PureIO 作者: adafruit 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def archive_context(filename):
    # extracting the archive
    tmpdir = tempfile.mkdtemp()
    log.warn('Extracting in %s', tmpdir)
    old_wd = os.getcwd()
    try:
        os.chdir(tmpdir)
        with get_zip_class()(filename) as archive:
            archive.extractall()

        # going in the directory
        subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
        os.chdir(subdir)
        log.warn('Now working in %s', subdir)
        yield

    finally:
        os.chdir(old_wd)
        shutil.rmtree(tmpdir)
workflow.py 文件源码 项目:alfred-mpd 作者: deanishe 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def _delete_directory_contents(self, dirpath, filter_func):
        """Delete all files in a directory.

        :param dirpath: path to directory to clear
        :type dirpath: ``unicode`` or ``str``
        :param filter_func function to determine whether a file shall be
            deleted or not.
        :type filter_func ``callable``

        """
        if os.path.exists(dirpath):
            for filename in os.listdir(dirpath):
                if not filter_func(filename):
                    continue
                path = os.path.join(dirpath, filename)
                if os.path.isdir(path):
                    shutil.rmtree(path)
                else:
                    os.unlink(path)
                self.logger.debug('Deleted : %r', path)
audio_converter.py 文件源码 项目:subtitle-synchronization 作者: AlbertoSabater 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def getAudio(freq, audio_files=None):

    files = os.listdir(DATA_DIR)
    p = re.compile('.*\.[mkv|avi]')
    files = [ f for f in files if p.match(f) ]

    if audio_files:
        files = [ f for f in files if os.path.splitext(f)[0] in audio_files]

    audio_dirs = []
    for f in files:
        name, extension = os.path.splitext(f)
        command = "ffmpeg -i {0}{1}{2} -ab 160k -ac 2 -ar {3} -vn {0}{1}_{3}.wav".format(DATA_DIR, name, extension, freq)
        audio_dirs.append(DATA_DIR + name + '_' + str(freq) + '.wav')
        subprocess.call(command, shell=True)

    return audio_dirs

# Convert timestamp to seconds
firehol_blocklists.py 文件源码 项目:Cortex-Analyzers 作者: CERT-BDF 项目源码 文件源码 阅读 43 收藏 0 点赞 0 评论 0
def __init__(self):
        Analyzer.__init__(self)

        # Get config parameters
        self.path = self.getParam('config.blocklistpath', None, 'No path to blocklists provided.')
        self.ignoreolderthandays = self.getParam('config.ignoreolderthandays', 365)
        self.utc = pytz.UTC
        self.now = dt.datetime.now(tz=self.utc)

        # Check if directory exists
        if not os.path.exists(self.path):
            os.mkdir(self.path, 0700)
            # Downloading/updating the list is implemented with an external cronjob which git pulls the repo

        # Read files in the given path and prepare file lists for ip- and netsets
        files = os.listdir(self.path)
        self.ipsets = []
        self.netsets = []
        for file in files:
            if '.ipset' in file:
                self.ipsets.append(file)
            elif '.netset' in file:
                self.netsets.append(file)
index_data.py 文件源码 项目:onto-lstm 作者: pdasigi 项目源码 文件源码 阅读 31 收藏 0 点赞 0 评论 0
def read_preposition_senses(self):
        num_senses_per_prep = []
        for filename in os.listdir(self.prep_senses_dir):
            if '.defs.xml' in filename:
                prep_str = filename.replace('.defs.xml', '')
                xml_root = ElementTree.parse("%s/%s" % (self.prep_senses_dir, filename)).getroot()
                senses = []
                for child_el in xml_root.getchildren():
                    sense_id = child_el.findtext('senseid')
                    if sense_id is not None:
                        # This will add strings like 'into-1(1)'
                        senses.append("%s-%s" % (prep_str, sense_id))
                num_senses_per_prep.append(len(senses))
                self.prep_senses[prep_str] = senses
        num_preps = len(self.prep_senses)
        print >>sys.stderr, "Read senses for %d prepositions." % num_preps
        print >>sys.stderr, "Senses per preposition: %f" % (float(sum(num_senses_per_prep))/num_preps)

    # TODO: Take a coarse-grained mapping file and implement the following function.
db.py 文件源码 项目:newsreap 作者: caronc 项目源码 文件源码 阅读 37 收藏 0 点赞 0 评论 0
def database_reset(ctx):
    """
    Reset's the database based on the current configuration
    """
    logger.info('Resetting database ...')
    ctx['NNTPSettings'].open(reset=True)
    __db_prep(ctx)

    db_path = join(ctx['NNTPSettings'].base_dir, 'cache', 'search')
    logger.debug('Scanning %s for databases...' % db_path)
    with pushd(db_path, create_if_missing=True):
        for entry in listdir(db_path):
            db_file = join(db_path, entry)
            if not isfile(db_file):
                continue

            try:
                unlink(db_file)
                logger.info('Removed %s ...' % entry)
            except:
                logger.warning('Failed to remove %s ...' % entry)
Utils.py 文件源码 项目:newsreap 作者: caronc 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def dirsize(src):
    """
    Takes a source directory and returns the entire size of all of it's
    content(s) in bytes.

    The function returns None if the size can't be properly calculated.
    """
    if not isdir(src):
        # Nothing to return
        return 0

    try:
        with pushd(src, create_if_missing=False):
            size = sum(getsize(f) for f in listdir('.') if isfile(f))

    except (OSError, IOError):
        return None

    # Return our total size
    return size
site.py 文件源码 项目:python- 作者: secondtonone1 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def addsitedir(sitedir, known_paths=None):
    """Add 'sitedir' argument to sys.path if missing and handle .pth files in
    'sitedir'"""
    if known_paths is None:
        known_paths = _init_pathinfo()
        reset = 1
    else:
        reset = 0
    sitedir, sitedircase = makepath(sitedir)
    if not sitedircase in known_paths:
        sys.path.append(sitedir)        # Add path component
    try:
        names = os.listdir(sitedir)
    except os.error:
        return
    names.sort()
    for name in names:
        if name.endswith(os.extsep + "pth"):
            addpackage(sitedir, name, known_paths)
    if reset:
        known_paths = None
    return known_paths
wheel.py 文件源码 项目:python- 作者: secondtonone1 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def _build_one(self, req, output_dir, python_tag=None):
        """Build one wheel.

        :return: The filename of the built wheel, or None if the build failed.
        """
        tempd = tempfile.mkdtemp('pip-wheel-')
        try:
            if self.__build_one(req, tempd, python_tag=python_tag):
                try:
                    wheel_name = os.listdir(tempd)[0]
                    wheel_path = os.path.join(output_dir, wheel_name)
                    shutil.move(os.path.join(tempd, wheel_name), wheel_path)
                    logger.info('Stored in directory: %s', output_dir)
                    return wheel_path
                except:
                    pass
            # Ignore return, we can't do anything else useful.
            self._clean_one(req)
            return None
        finally:
            rmtree(tempd)
test_install.py 文件源码 项目:python- 作者: secondtonone1 项目源码 文件源码 阅读 31 收藏 0 点赞 0 评论 0
def test_install():
    tempdir = mkdtemp()
    def get_supported():
        return list(wheel.pep425tags.get_supported()) + [('py3', 'none', 'win32')]
    whl = WheelFile(TESTWHEEL, context=get_supported)
    assert whl.supports_current_python(get_supported)
    try:
        locs = {}
        for key in ('purelib', 'platlib', 'scripts', 'headers', 'data'):
            locs[key] = os.path.join(tempdir, key)
            os.mkdir(locs[key])
        whl.install(overrides=locs)
        assert len(os.listdir(locs['purelib'])) == 0
        assert check(locs['platlib'], 'hello.pyd')
        assert check(locs['platlib'], 'hello', 'hello.py')
        assert check(locs['platlib'], 'hello', '__init__.py')
        assert check(locs['data'], 'hello.dat')
        assert check(locs['headers'], 'hello.dat')
        assert check(locs['scripts'], 'hello.sh')
        assert check(locs['platlib'], 'test-1.0.dist-info', 'RECORD')
    finally:
        shutil.rmtree(tempdir)
easy_install.py 文件源码 项目:python- 作者: secondtonone1 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def maybe_move(self, spec, dist_filename, setup_base):
        dst = os.path.join(self.build_directory, spec.key)
        if os.path.exists(dst):
            msg = (
                "%r already exists in %s; build directory %s will not be kept"
            )
            log.warn(msg, spec.key, self.build_directory, setup_base)
            return setup_base
        if os.path.isdir(dist_filename):
            setup_base = dist_filename
        else:
            if os.path.dirname(dist_filename) == setup_base:
                os.unlink(dist_filename)  # get it out of the tmp dir
            contents = os.listdir(setup_base)
            if len(contents) == 1:
                dist_filename = os.path.join(setup_base, contents[0])
                if os.path.isdir(dist_filename):
                    # if the only thing there is a directory, move it instead
                    setup_base = dist_filename
        ensure_directory(dst)
        shutil.move(setup_base, dst)
        return dst
msvc.py 文件源码 项目:python- 作者: secondtonone1 项目源码 文件源码 阅读 40 收藏 0 点赞 0 评论 0
def _use_last_dir_name(self, path, prefix=''):
        """
        Return name of the last dir in path or '' if no dir found.

        Parameters
        ----------
        path: str
            Use dirs in this path
        prefix: str
            Use only dirs startings by this prefix
        """
        matching_dirs = (
            dir_name
            for dir_name in reversed(os.listdir(path))
            if os.path.isdir(os.path.join(path, dir_name)) and
            dir_name.startswith(prefix)
        )
        return next(matching_dirs, None) or ''
ez_setup.py 文件源码 项目:py_find_1st 作者: roebel 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def archive_context(filename):
    # extracting the archive
    tmpdir = tempfile.mkdtemp()
    log.warn('Extracting in %s', tmpdir)
    old_wd = os.getcwd()
    try:
        os.chdir(tmpdir)
        with get_zip_class()(filename) as archive:
            archive.extractall()

        # going in the directory
        subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
        os.chdir(subdir)
        log.warn('Now working in %s', subdir)
        yield

    finally:
        os.chdir(old_wd)
        shutil.rmtree(tmpdir)
facenet.py 文件源码 项目:facerecognition 作者: guoxiaolu 项目源码 文件源码 阅读 33 收藏 0 点赞 0 评论 0
def get_model_filenames(model_dir):
    files = os.listdir(model_dir)
    meta_files = [s for s in files if s.endswith('.meta')]
    if len(meta_files)==0:
        raise ValueError('No meta file found in the model directory (%s)' % model_dir)
    elif len(meta_files)>1:
        raise ValueError('There should not be more than one meta file in the model directory (%s)' % model_dir)
    meta_file = meta_files[0]
    meta_files = [s for s in files if '.ckpt' in s]
    max_step = -1
    for f in files:
        step_str = re.match(r'(^model-[\w\- ]+.ckpt-(\d+))', f)
        if step_str is not None and len(step_str.groups())>=2:
            step = int(step_str.groups()[1])
            if step > max_step:
                max_step = step
                ckpt_file = step_str.groups()[0]
    return meta_file, ckpt_file
ftp_upload.py 文件源码 项目:facerecognition 作者: guoxiaolu 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def ftp_upload(ftp, remotefile, localfile):
    #f = open(localpath, "rb")
    #filename = os.path.split(localpath)[-1]
    try:
        #bufsize = 1024
        #localpath_file = os.listdir(localpath)
        #for filename in localpath_file:
        #fp = open(filename, 'rb')
        fp = open(localfile,'rb')
        ftp.storbinary('STOR ' + remotefile, fp)  # ????
        ftp.set_debuglevel(0)
        fp.close()  # ????
        #ftp.quit()
        print('????')

    except Exception as e:
        traceback.print_exc()
ez_setup.py 文件源码 项目:Adafruit_Python_PCA9685 作者: adafruit 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def archive_context(filename):
    # extracting the archive
    tmpdir = tempfile.mkdtemp()
    log.warn('Extracting in %s', tmpdir)
    old_wd = os.getcwd()
    try:
        os.chdir(tmpdir)
        with get_zip_class()(filename) as archive:
            archive.extractall()

        # going in the directory
        subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
        os.chdir(subdir)
        log.warn('Now working in %s', subdir)
        yield

    finally:
        os.chdir(old_wd)
        shutil.rmtree(tmpdir)
dnf_backend.py 文件源码 项目:dnfdragora 作者: manatools 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def _cacheProtected(self) :
        '''
        gets all the protected packages
        '''
        self._protected = []
        protected_conf_path='/etc/dnf/protected.d'
        conf_files = listdir(protected_conf_path)
        for f in conf_files :
            file_path = protected_conf_path + '/' + f
            with open(file_path, 'r') as content_file:
                for line in content_file:
                    if line.strip() :
                        match_all = False
                        newest_only = False
                        tags =""
                        pkgs = self.get_packages_by_name(line.strip(), newest_only)

                        for pkg in pkgs:
                            pkg_id = pkg.pkg_id
                            if (not pkg_id in self._protected) :
                                self._protected.append(pkg_id)
                        # TODO it would be better to get recursive require
                        #for pkg_id in self._protected:
                            #recursive_id = self.GetAttribute(pkg_id,'requires')
utils.py 文件源码 项目:fuel-nailgun-extension-iac 作者: openstack 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def get_config_hash(file_dir, resource_mapping, exts=['conf']):
    res = {}
    if not os.path.isdir(file_dir):
        logger.debug(
            "Directory {} not found. Returning emty dict".format(file_dir))
        return {}

    conf_files = [conf for conf in os.listdir(file_dir)
                  if conf.split('.')[-1] in exts]

    for conf_file in conf_files:
        if conf_file in resource_mapping.keys():
            drv = resource_mapping[conf_file].get(
                'driver',
                'fuel_external_git.drivers.openstack_config.OpenStackConfig'
            )
            drv_class = importutils.import_class(drv)
            config = drv_class(
                os.path.join(file_dir, conf_file),
                resource_mapping[conf_file]['resource']
            )
            deep_merge(res, config.to_config_dict())
    return res
vclustermgr.py 文件源码 项目:docklet 作者: unias 项目源码 文件源码 阅读 33 收藏 0 点赞 0 评论 0
def recover_allclusters(self):
        logger.info("recovering all vclusters for all users...")
        usersdir = self.fspath+"/global/users/"
        auth_key = env.getenv('AUTH_KEY')
        res = post_to_user("/master/user/groupinfo/", {'auth_key':auth_key})
        #logger.info(res)
        groups = json.loads(res['groups'])
        quotas = {}
        for group in groups:
            #logger.info(group)
            quotas[group['name']] = group['quotas']
        for user in os.listdir(usersdir):
            for cluster in self.list_clusters(user)[1]:
                logger.info ("recovering cluster:%s for user:%s ..." % (cluster, user))
                #res = post_to_user('/user/uid/',{'username':user,'auth_key':auth_key})
                recover_info = post_to_user("/master/user/recoverinfo/", {'username':user,'auth_key':auth_key})
                uid = recover_info['uid']
                groupname = recover_info['groupname']
                input_rate_limit = quotas[groupname]['input_rate_limit']
                output_rate_limit = quotas[groupname]['output_rate_limit']
                self.recover_cluster(cluster, user, uid, input_rate_limit, output_rate_limit)
        logger.info("recovered all vclusters for all users")
container.py 文件源码 项目:docklet 作者: unias 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def diff_containers(self):
        [status, localcontainers] = self.list_containers()
        globalpath = self.fspath+"/global/users/"
        users = os.listdir(globalpath)
        globalcontainers = []
        for user in users:
            clusters = os.listdir(globalpath+user+"/clusters")
            for cluster in clusters:
                clusterfile = open(globalpath+user+"/clusters/"+cluster, 'r')
                clusterinfo = json.loads(clusterfile.read())
                for container in clusterinfo['containers']:
                    if container['host'] == self.addr:
                        globalcontainers.append(container['containername'])
        both = []
        onlylocal = []
        onlyglobal = []
        for container in localcontainers:
            if container in globalcontainers:
                both.append(container)
            else:
                onlylocal.append(container)
        for container in globalcontainers:
            if container not in localcontainers:
                onlyglobal.append(container)
        return [both, onlylocal, onlyglobal]
monitor.py 文件源码 项目:docklet 作者: unias 项目源码 文件源码 阅读 41 收藏 0 点赞 0 评论 0
def save_billing_history(vnode_name, billing_history):
    clusters_dir = env.getenv("FS_PREFIX")+"/global/users/"+get_owner(vnode_name)+"/clusters/"
    if not os.path.exists(clusters_dir):
        return
    clusters = os.listdir(clusters_dir)
    vnode_cluster_id = get_cluster(vnode_name)
    for cluster in clusters:
        clusterpath = clusters_dir + cluster
        if not os.path.isfile(clusterpath):
            continue
        infofile = open(clusterpath, 'r')
        info = json.loads(infofile.read())
        infofile.close()
        if vnode_cluster_id != str(info['clusterid']):
            continue
        if 'billing_history' not in info:
            info['billing_history'] = {}
        info['billing_history'][vnode_name] = billing_history
        infofile = open(clusterpath, 'w')
        infofile.write(json.dumps(info))
        infofile.close()
        break
    return
monitor.py 文件源码 项目:docklet 作者: unias 项目源码 文件源码 阅读 41 收藏 0 点赞 0 评论 0
def get_billing_history(vnode_name):
    clusters_dir = env.getenv("FS_PREFIX")+"/global/users/"+get_owner(vnode_name)+"/clusters/"
    if os.path.exists(clusters_dir):
        clusters = os.listdir(clusters_dir)
        for cluster in clusters:
            clusterpath = clusters_dir + cluster
            if not os.path.isfile(clusterpath):
                continue
            infofile = open(clusterpath, 'r')
            info = json.loads(infofile.read())
            infofile.close()
            if 'billing_history' not in info or vnode_name not in info['billing_history']:
                continue
            return info['billing_history'][vnode_name]
    default = {}
    default['cpu'] = 0
    default['mem'] = 0
    default['disk'] = 0
    default['port'] = 0
    return default

# the thread to collect data from each worker and store them in monitor_hosts and monitor_vnodes
backuper.py 文件源码 项目:Telebackup 作者: LonamiWebs 项目源码 文件源码 阅读 48 收藏 0 点赞 0 评论 0
def enumerate_backups_entities():
        """Enumerates the entities of all the available backups"""
        if isdir(Backuper.backups_dir):

            # Look for subdirectories
            for directory in listdir(Backuper.backups_dir):
                entity_file = path.join(Backuper.backups_dir, directory, 'entity.tlo')

                # Ensure the entity.pickle file exists
                if isfile(entity_file):

                    # Load and yield it
                    with open(entity_file, 'rb') as file:
                        with BinaryReader(stream=file) as reader:
                            try:
                                yield reader.tgread_object()
                            except TypeNotFoundError:
                                # Old user, scheme got updated, don't care.
                                pass

    #endregion

    #region Backup exists and deletion


问题


面经


文章

微信
公众号

扫码关注公众号