python类Log()的实例源码

rpath.py 文件源码 项目:rdiff-backup 作者: sol1 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def setmtime(self, modtime):
        """Set only modtime (access time to present)"""
        log.Log(lambda: "Setting time of %s to %d" % (self.path, modtime), 7)
        if modtime < 0: log.Log("Warning: modification time of %s is"
                                "before 1970" % self.path, 2)
        try: self.conn.os.utime(self.path, (long(time.time()), modtime))
        except OverflowError:
            log.Log("Cannot change mtime of %s to %s - problem is probably"
                    "64->32bit conversion" % (self.path, modtime), 2)
        except OSError:
            # It's not possible to set a modification time for
            # directories on Windows.
            if self.conn.os.name != 'nt' or not self.isdir():
                raise
        else: self.data['mtime'] = modtime
rpath.py 文件源码 项目:rdiff-backup 作者: sol1 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def mkdir(self):
        log.Log("Making directory " + self.path, 6)
        self.conn.os.mkdir(self.path)
        self.setdata()
rpath.py 文件源码 项目:rdiff-backup 作者: sol1 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def makedirs(self):
        log.Log("Making directory path " + self.path, 6)
        self.conn.os.makedirs(self.path)
        self.setdata()
rpath.py 文件源码 项目:rdiff-backup 作者: sol1 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def rmdir(self):
        log.Log("Removing directory " + self.path, 6)
        self.conn.os.rmdir(self.path)
        self.data = {'type': None}
rpath.py 文件源码 项目:rdiff-backup 作者: sol1 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def hardlink(self, linkpath):
        """Make self into a hardlink joined to linkpath"""
        log.Log("Hard linking %s to %s" % (self.path, linkpath), 6)
        self.conn.os.link(linkpath, self.path)
        self.setdata()
rpath.py 文件源码 项目:rdiff-backup 作者: sol1 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def touch(self):
        """Make sure file at self.path exists"""
        log.Log("Touching " + self.path, 7)
        self.conn.open(self.path, "w").close()
        self.setdata()
        assert self.isreg(), self.path
rpath.py 文件源码 项目:rdiff-backup 作者: sol1 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def contains_files(self):
        """Returns true if self (or subdir) contains any regular files."""
        log.Log("Determining if directory contains files: %s" % self.path, 7)
        if not self.isdir():
            return False
        dir_entries = self.listdir()
        for entry in dir_entries:
            child_rp = self.append(entry)
            if not child_rp.isdir():
                return True
            else:
                if child_rp.contains_files():
                    return True
        return False
rpath.py 文件源码 项目:rdiff-backup 作者: sol1 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def write_from_fileobj(self, fp, compress = None):
        """Reads fp and writes to self.path.  Closes both when done

        If compress is true, fp will be gzip compressed before being
        written to self.  Returns closing value of fp.

        """
        log.Log("Writing file object to " + self.path, 7)
        assert not self.lstat(), "File %s already exists" % self.path
        outfp = self.open("wb", compress = compress)
        copyfileobj(fp, outfp)
        if outfp.close(): raise RPathException("Error closing file")
        self.setdata()
        return fp.close()
rpath.py 文件源码 项目:rdiff-backup 作者: sol1 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def write_resource_fork(self, rfork_data):
        """Write new resource fork to self"""
        log.Log("Writing resource fork to %s" % (self.index,), 7)
        fp = self.conn.open(os.path.join(self.path, '..namedfork', 'rsrc'), 'wb')
        fp.write(rfork_data)
        assert not fp.close()
        self.set_resource_fork(rfork_data)
metadata.py 文件源码 项目:rdiff-backup 作者: sol1 项目源码 文件源码 阅读 34 收藏 0 点赞 0 评论 0
def carbonfile2string(cfile):
    """Convert CarbonFile data to a string suitable for storing."""
    if not cfile: return "None"
    retvalparts = []
    retvalparts.append('creator:%s' % binascii.hexlify(cfile['creator']))
    retvalparts.append('type:%s' % binascii.hexlify(cfile['type']))
    retvalparts.append('location:%d,%d' % cfile['location'])
    retvalparts.append('flags:%d' % cfile['flags'])
    try: retvalparts.append('createDate:%d' % cfile['createDate'])
    except KeyError: log.Log("Writing pre-1.1.6 style metadata, without creation date", 9)
    return '|'.join(retvalparts)
metadata.py 文件源码 项目:rdiff-backup 作者: sol1 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def unquote_path(quoted_string):
    """Reverse what was done by quote_path"""
    def replacement_func(match_obj):
        """Unquote match obj of two character sequence"""
        two_chars = match_obj.group(0)
        if two_chars == "\\n": return "\n"
        elif two_chars == "\\\\": return "\\"
        log.Log("Warning, unknown quoted sequence %s found" % two_chars, 2)
        return two_chars
    return re.sub("\\\\n|\\\\\\\\", replacement_func, quoted_string)
metadata.py 文件源码 项目:rdiff-backup 作者: sol1 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def iterate(self):
        """Return iterator that yields all objects with records"""
        for record in self.iterate_records():
            try: yield self.record_to_object(record)
            except (ParsingError, ValueError), e:
                if self.at_end: break # Ignore whitespace/bad records at end
                log.Log("Error parsing flat file: %s" % (e,), 2)
metadata.py 文件源码 项目:rdiff-backup 作者: sol1 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def iterate_starting_with(self, index):
        """Iterate objects whose index starts with given index"""
        self.skip_to_index(index)
        if self.at_end: return
        while 1:
            next_pos = self.get_next_pos()
            try: obj = self.record_to_object(self.buf[:next_pos])
            except (ParsingError, ValueError), e:
                log.Log("Error parsing metadata file: %s" % (e,), 2)
            else:
                if obj.index[:len(index)] != index: break
                yield obj
            if self.at_end: break
            self.buf = self.buf[next_pos:]
        assert not self.fileobj.close()
metadata.py 文件源码 项目:rdiff-backup 作者: sol1 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def ConvertMetaToDiff(self):
        """Replace a mirror snapshot with a diff if it's appropriate"""
        newrp, oldrp = self.check_needs_diff()
        if not newrp: return
        log.Log("Writing mirror_metadata diff", 6)

        diff_writer = self.get_meta_writer('diff', oldrp.getinctime())
        new_iter = MetadataFile(newrp, 'r').get_objects()
        old_iter = MetadataFile(oldrp, 'r').get_objects()
        for diff_rorp in self.get_diffiter(new_iter, old_iter):
            diff_writer.write_object(diff_rorp)
        diff_writer.close() # includes sync
        oldrp.delete()
journal.py 文件源码 项目:rdiff-backup 作者: sol1 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def open_journal():
    """Make sure the journal dir exists (creating it if necessary)"""
    global journal_dir_rp, journal_dir_fp
    assert journal_dir_rp is journal_dir_fp is None
    journal_dir_rp = Globals.rbdir.append("journal")
    if not journal_dir_rp.lstat():
        log.Log("Creating journal directory %s" % (journal_dir_rp.path,), 5)
        journal_dir_rp.mkdir()
    assert journal_dir_rp.isdir()
    journal_dir_fp = journal_dir_rp.open("rb")
selection.py 文件源码 项目:rdiff-backup 作者: sol1 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def parse_catch_error(self, exc):
        """Deal with selection error exc"""
        if isinstance(exc, FilePrefixError):
            log.Log.FatalError(
"""Fatal Error: The file specification
    '%s'
cannot match any files in the base directory
    '%s'
Useful file specifications begin with the base directory or some
pattern (such as '**') which matches the base directory.""" %
            (exc, self.prefix))
        elif isinstance(exc, GlobbingError):
            log.Log.FatalError("Fatal Error while processing expression\n"
                               "%s" % exc)
        else: raise
selection.py 文件源码 项目:rdiff-backup 作者: sol1 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def parse_last_excludes(self):
        """Exit with error if last selection function isn't an exclude"""
        if (self.selection_functions and
            not self.selection_functions[-1].exclude):
            log.Log.FatalError(
"""Last selection expression:
    %s
only specifies that files be included.  Because the default is to
include all files, the expression is redundant.  Exiting because this
probably isn't what you meant.""" %
            (self.selection_functions[-1].name,))
selection.py 文件源码 项目:rdiff-backup 作者: sol1 项目源码 文件源码 阅读 31 收藏 0 点赞 0 评论 0
def filelist_read(self, filelist_fp, include, filelist_name):
        """Read filelist from fp, return (tuplelist, something_excluded)"""
        prefix_warnings = [0]
        def incr_warnings(exc):
            """Warn if prefix is incorrect"""
            prefix_warnings[0] += 1
            if prefix_warnings[0] < 6:
                log.Log("Warning: file specification '%s' in filelist %s\n"
                        "doesn't start with correct prefix %s.  Ignoring." %
                        (exc, filelist_name, self.prefix), 2)
                if prefix_warnings[0] == 5:
                    log.Log("Future prefix errors will not be logged.", 2)

        something_excluded, tuple_list = None, []
        separator = Globals.null_separator and "\0" or "\n"
        for line in filelist_fp.read().split(separator):
            if not line: continue # skip blanks
            try: tuple = self.filelist_parse_line(line, include)
            except FilePrefixError, exc:
                incr_warnings(exc)
                continue
            tuple_list.append(tuple)
            if not tuple[1]: something_excluded = 1
        if filelist_fp.close():
            log.Log("Error closing filelist %s" % filelist_name, 2)
        return (tuple_list, something_excluded)
selection.py 文件源码 项目:rdiff-backup 作者: sol1 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def regexp_get_sf(self, regexp_string, include):
        """Return selection function given by regexp_string"""
        assert include == 0 or include == 1
        try: regexp = re.compile(regexp_string)
        except:
            log.Log("Error compiling regular expression %s" % regexp_string, 1)
            raise

        def sel_func(rp):
            if regexp.search(rp.path): return include
            else: return None

        sel_func.exclude = not include
        sel_func.name = "Regular expression: %s" % regexp_string
        return sel_func
longname.py 文件源码 项目:rdiff-backup 作者: sol1 项目源码 文件源码 阅读 17 收藏 0 点赞 0 评论 0
def get_next_free():
    """Return next free filename available in the long filename directory"""
    global free_name_counter
    def scan_next_free():
        """Return value of free_name_counter by listing long filename dir"""
        log.Log("Setting next free from long filenames dir", 5)
        cur_high = 0
        for filename in get_long_rp().listdir():
            try: i = int(filename.split('.')[0])
            except ValueError: continue
            if i > cur_high: cur_high = i
        return cur_high + 1

    def read_next_free():
        """Return next int free by reading the next_free file, or None"""
        rp = get_long_rp(counter_filename)
        if not rp.lstat(): return None
        return int(rp.get_data())

    def write_next_free(i):
        """Write value i into the counter file"""
        rp = get_long_rp(counter_filename)
        if rp.lstat(): rp.delete()
        rp.write_string(str(free_name_counter))
        rp.fsync_with_dir()

    if not free_name_counter: free_name_counter = read_next_free()
    if not free_name_counter: free_name_counter = scan_next_free()
    filename = str(free_name_counter)
    rp = get_long_rp(filename)
    assert not rp.lstat(), "Unexpected file at %s found" % (rp.path,)
    free_name_counter += 1
    write_next_free(free_name_counter)
    return filename
cleanup.py 文件源码 项目:foreman-yml 作者: adfinis-sygroup 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def process_cleanup_arch(self):
        log.log(log.LOG_INFO, "Processing Cleanup of Architectures")
        for arch in self.get_config_section('cleanup-architecture'):
            try:
                self.validator.cleanup_arch(arch)
            except MultipleInvalid as e:
                log.log(log.LOG_WARN, "Cannot delete Architecture '{0}': YAML validation Error: {1}".format(arch['name'], e))
                continue

            try:
                self.fm.architectures.show(arch['name'])['id']
                log.log(log.LOG_INFO, "Delete Architecture '{0}'".format(arch['name']))

                self.fm.architectures.destroy( arch['name'] )
            except:
                log.log(log.LOG_WARN, "Architecture '{0}' already absent.".format(arch['name']))
cleanup.py 文件源码 项目:foreman-yml 作者: adfinis-sygroup 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def process_cleanup_computeprfl(self):
        log.log(log.LOG_INFO, "Processing Cleanup of Compute profiles")
        for computeprfl in self.get_config_section('cleanup-compute-profile'):
            try:
                self.validator.cleanup_computeprfl(computeprfl)
            except MultipleInvalid as e:
                log.log(log.LOG_WARN, "Cannot delete Compute profile '{0}': YAML validation Error: {1}".format(computeprfl['name'], e))
                continue

            try:
                self.fm.compute_profiles.show(computeprfl['name'])['id']
                log.log(log.LOG_INFO, "Delete Compute profile '{0}'".format(computeprfl['name']))

                self.fm.compute_profiles.destroy( computeprfl['name'] )
            except:
                log.log(log.LOG_WARN, "Compute profile '{0}' already absent.".format(computeprfl['name']))
cleanup.py 文件源码 项目:foreman-yml 作者: adfinis-sygroup 项目源码 文件源码 阅读 34 收藏 0 点赞 0 评论 0
def process_cleanup_medium(self):
        log.log(log.LOG_INFO, "Processing Cleanup of Media")
        medialist = self.fm.media.index(per_page=99999)['results']
        for medium in self.get_config_section('cleanup-medium'):
            try:
                self.validator.cleanup_medium(medium)
            except MultipleInvalid as e:
                log.log(log.LOG_WARN, "Cannot delete Medium '{0}': YAML validation Error: {1}".format(medium['name'], e))
                continue

            medium_deleted = False
            # fm.media.show(name) does not work, we need to iterate over fm.media.index()
            for mediac in medialist:
                if (mediac['name'] == medium['name']):
                    medium_deleted = True
                    log.log(log.LOG_INFO, "Delete Medium '{0}'".format(medium['name']))

                    self.fm.media.destroy( medium['name'] )
                    continue
            if not medium_deleted:
                log.log(log.LOG_WARN, "Medium '{0}' already absent.".format(medium['name']))
cleanup.py 文件源码 项目:foreman-yml 作者: adfinis-sygroup 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def process_cleanup_ptable(self):
        log.log(log.LOG_INFO, "Processing Cleanup of Partition Tables")
        for ptable in self.get_config_section('cleanup-partition-table'):
            try:
                self.validator.cleanup_ptable(ptable)
            except MultipleInvalid as e:
                log.log(log.LOG_WARN, "Cannot delete Partition Table '{0}': YAML validation Error: {1}".format(ptable['name'], e))
                continue

            try:
                self.fm.ptables.show(ptable['name'])['id']
                log.log(log.LOG_INFO, "Delete Partition Table '{0}'".format(ptable['name']))

                self.fm.ptables.destroy( ptable['name'] )
            except:
                log.log(log.LOG_WARN, "Partition Table '{0}' already absent.".format(ptable['name']))
importer.py 文件源码 项目:foreman-yml 作者: adfinis-sygroup 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def process_config_enviroment(self):
        log.log(log.LOG_INFO, "Processing Environments")
        envlist = self.fm.environments.index(per_page=99999)['results']
        for env in self.get_config_section('environment'):
            try:
                self.validator.enviroment(env)
            except MultipleInvalid as e:
                log.log(log.LOG_WARN, "Cannot create Environment '{0}': YAML validation Error: {1}".format(env['name'], e))
                continue

            env_id = False
            # fm.media.show(name) does not work, we need to iterate over fm.media.index()
            for envc in envlist:
                if (env['name'] == envc['name']):
                    env_id = envc['id']
                    log.log(log.LOG_DEBUG, "Environment '{0}' (id={1}) already present.".format(env['name'], env_id))
                    continue
            if not env_id:
                log.log(log.LOG_INFO, "Create Environment '{0}'".format(env['name']))
                self.fm.environments.create( environment = { 'name': env['name'] } )
importer.py 文件源码 项目:foreman-yml 作者: adfinis-sygroup 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def process_config_model(self):
        log.log(log.LOG_INFO, "Processing Models")
        for model in self.get_config_section('model'):
            try:
                self.validator.model(model)
            except MultipleInvalid as e:
                log.log(log.LOG_WARN, "Cannot create Model '{0}': YAML validation Error: {1}".format(model['name'], e))
                continue
            try:
                model_id = self.fm.models.show(model['name'])['id']
                log.log(log.LOG_DEBUG, "Model '{0}' (id={1}) already present.".format(model['name'], model_id))
            except:
                log.log(log.LOG_INFO, "Create Model '{0}'".format(model['name']))
                model_tpl = {
                    'name':             model['name'],
                    'info':             model['info'],
                    'vendor_class':     model['vendor-class'],
                    'hardware_model':   model['hardware-model']
                }
                self.fm.models.create( model = model_tpl )
importer.py 文件源码 项目:foreman-yml 作者: adfinis-sygroup 项目源码 文件源码 阅读 17 收藏 0 点赞 0 评论 0
def process_config_medium(self):
        log.log(log.LOG_INFO, "Processing Media")
        medialist = self.fm.media.index(per_page=99999)['results']
        for medium in self.get_config_section('medium'):
            try:
                self.validator.medium(medium)
            except MultipleInvalid as e:
                log.log(log.LOG_WARN, "Cannot create Media '{0}': YAML validation Error: {1}".format(medium['name'], e))
                continue

            medium_id = False
            # fm.media.show(name) does not work, we need to iterate over fm.media.index()
            for mediac in medialist:
                if (mediac['name'] == medium['name']):
                    medium_id = mediac['id']
                    log.log(log.LOG_DEBUG, "Medium '{0}' (id={1}) already present.".format(medium['name'], medium_id))
            if not medium_id:
                log.log(log.LOG_INFO, "Create Medium '{0}'".format(medium['name']))
                medium_tpl = {
                    'name':        medium['name'],
                    'path':        medium['path'],
                    'os_family':   medium['os-family']
                }
                self.fm.media.create( medium = medium_tpl )
importer.py 文件源码 项目:foreman-yml 作者: adfinis-sygroup 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def process_config_smartproxy(self):
        log.log(log.LOG_INFO, "Processing Smart Proxies")
        for proxy in self.get_config_section('smart-proxy'):
            try:
                proxy_id = self.fm.smart_proxies.show(proxy['name'])['id']
                log.log(log.LOG_DEBUG, "Proxy '{0}' (id={1}) already present.".format(proxy['name'], proxy_id))
            except:
                log.log(log.LOG_INFO, "Create Smart Proxy '{0}'".format(proxy['name']))
                proxy_tpl = {
                    'name': proxy['name'],
                    'url': proxy['url'],
                }
                try:
                    self.fm.smart_proxies.create( smart_proxy = proxy_tpl )
                except:
                    log.log(log.LOG_WARN, "Cannot create Smart Proxy '{0}'. Is the Proxy online? ".format(proxy['name']))
importer.py 文件源码 项目:foreman-yml 作者: adfinis-sygroup 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def process_config_ptable(self):
        log.log(log.LOG_INFO, "Processing Partition Tables")
        for ptable in self.get_config_section('partition-table'):
            try:
                self.validator.ptable(ptable)
            except MultipleInvalid as e:
                log.log(log.LOG_WARN, "Cannot create Partition Table '{0}': YAML validation Error: {1}".format(ptable['name'], e))
                continue
            try:
                ptable_id = self.fm.ptables.show(ptable['name'])['id']
                log.log(log.LOG_DEBUG, "Partition Table '{0}' (id={1}) already present.".format(ptable['name'], ptable_id))
            except:
                log.log(log.LOG_INFO, "Create Partition Table '{0}'".format(ptable['name']))
                ptable_tpl = {
                    'name':             ptable['name'],
                    'layout':           ptable['layout'],
                    'snippet':          ptable['snippet'],
                    'audit_comment':    ptable['audit-comment'],
                    'locked':           ptable['locked'],
                    'os_family':        ptable['os-family']
                }
                self.fm.ptables.create( ptable = ptable_tpl )
repair.py 文件源码 项目:MusicNow 作者: kalbhor 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def add_details(file_name, title, artist, album, lyrics=""):
    '''
    Adds the details to song
    '''

    tags = EasyMP3(file_name)
    tags["title"] = title
    tags["artist"] = artist
    tags["album"] = album
    tags.save()

    tags = ID3(file_name)
    uslt_output = USLT(encoding=3, lang=u'eng', desc=u'desc', text=lyrics)
    tags["USLT::'eng'"] = uslt_output

    tags.save(file_name)

    log.log("> Adding properties")
    log.log_indented("[*] Title: %s" % title)
    log.log_indented("[*] Artist: %s" % artist)
    log.log_indented("[*] Album: %s " % album)


问题


面经


文章

微信
公众号

扫码关注公众号