def do_quit(self, arg):
"""Quit fuzzbunch"""
try:
opencontracts = [ item.name
for item in self.session.get_itemlist()
if item.value.has_opencontract() ]
if opencontracts:
self.io.print_opensessions({'sessions' : opencontracts})
line = self.io.get_input("Really quit [n] ? ")
if line.lower() not in ("yes", "y", "q", "quit"):
return
if self.log: self.log.close()
return True
except:
pass
return True
python类Log()的实例源码
def update_dist(self, indexcomp, comp, norm, last_comp=False, classifier=None):
"""
Calculate the distance from the supplied pattern to the stored pattern
"""
# Called from CM1KEmulator.update_all_neuron_dists(), i.e., whenever COMP or LCOMP is updated
log.trace("Neuron.update_dist()")
if norm == cm1k.CM1KDistNorm.l1:
self.dist += abs(comp - self.pattern[indexcomp])
elif norm == cm1k.CM1KDistNorm.lsup:
self.dist = max(abs(comp - self.pattern[indexcomp]), self.dist)
elif norm == cm1k.CM1KDistNorm.euc:
self.dist += (comp - self.pattern[indexcomp]) * (comp - self.pattern[indexcomp])
if last_comp:
if norm == cm1k.CM1KDistNorm.euc:
self.dist = int(round(math.sqrt(self.dist)))
if (classifier == cm1k.CM1KClassifier.rbf and self.dist < self.aif) or classifier == cm1k.CM1KClassifier.knn:
# The neuron has fired
self.chip.store_firing_neuron(self)
def store_firing_neuron(self, neuron):
# Called from Neuron.update_dist(), i.e., whenever COMP or LCOMP is updated
# Call from Neuron.broadcast()
# Called by individual neurons whenever they fire
log.trace("CM1KEmulator.store_firing_neuron()")
# NOTE: firing_neurons won't be sorted until all neurons are added (see update_all_neuron_dists())
# Only store a firing neuron if its dist-and-cat combination is unique (CM1K Hardware Manual, p. 17)
unique = True
for neuron2 in self.firing_neurons:
if neuron2.dist == neuron.dist and neuron2.cat == neuron.cat:
unique = False
break
if unique:
# self.firing_neurons.append(neuron)
insert_pos = len(self.firing_neurons)
for i, neuron2 in enumerate(self.firing_neurons):
if neuron2.dist <= neuron.dist: # This must be <=, not <, so that earlier neurons win, ala CM1K spec
insert_pos = i
break
self.firing_neurons.insert(insert_pos, neuron)
def do_quit(self, arg):
"""Quit fuzzbunch"""
try:
opencontracts = [ item.name
for item in self.session.get_itemlist()
if item.value.has_opencontract() ]
if opencontracts:
self.io.print_opensessions({'sessions' : opencontracts})
line = self.io.get_input("Really quit [n] ? ")
if line.lower() not in ("yes", "y", "q", "quit"):
return
if self.log: self.log.close()
return True
except:
pass
return True
def get(db,table,field,matchfield,matchvalue):
init(db)
matchvalue = matchvalue.encode('utf-8').lower()
try:
result = db.execute("SELECT {} FROM {} WHERE {}='{}';".format(field,table,matchfield,matchvalue)).fetchone()
if result: return result[0].encode('utf-8')
else: return False
except:
log.log("***ERROR: SELECT {} FROM {} WHERE {}='{}';".format(field,table,matchfield,matchvalue))
def process_config_arch(self):
log.log(log.LOG_INFO, "Processing Architectures")
for arch in self.get_config_section('architecture'):
try:
self.validator.arch(arch)
except MultipleInvalid as e:
log.log(log.LOG_WARN, "Cannot create Architecture '{0}': YAML validation Error: {1}".format(arch['name'], e))
continue
try:
arch_id = self.fm.architectures.show(arch['name'])['id']
log.log(log.LOG_DEBUG, "Architecture '{0}' (id={1}) already present.".format(arch['name'], arch_id))
except:
log.log(log.LOG_INFO, "Create Architecture '{0}'".format(arch['name']))
self.fm.architectures.create( architecture = { 'name': arch['name'] } )
def process_config_domain(self):
log.log(log.LOG_INFO, "Processing Domains")
for domain in self.get_config_section('domain'):
try:
self.validator.domain(domain)
except MultipleInvalid as e:
log.log(log.LOG_WARN, "Cannot create Domain '{0}': YAML validation Error: {1}".format(domain['name'], e))
continue
try:
dom_id = self.fm.domains.show(domain['name'])['id']
log.log(log.LOG_DEBUG, "Domain '{0}' (id={1}) already present.".format(domain['name'], dom_id))
except:
dns_proxy_id = False
try:
dns_proxy_id = self.fm.smart_proxies.show(domain['dns-proxy'])['id']
except:
log.log(log.LOG_WARN, "Cannot get ID of DNS Smart Proxy '{0}', skipping".format(domain['dns-proxy']))
log.log(log.LOG_INFO, "Create Domain '{0}'".format(domain['name']))
dom_params = []
if (domain['parameters']):
for name,value in domain['parameters'].iteritems():
p = {
'name': name,
'value': value
}
dom_params.append(p)
dom_tpl = {
'name': domain['name'],
'fullname': domain['fullname'],
}
fixdom = {
'domain_parameters_attributes': dom_params
}
if dns_proxy_id: dom_tpl['dns_id'] = dns_proxy_id
domo = self.fm.domains.create( domain = dom_tpl )
if dom_params:
self.fm.domains.update(fixdom, domo['id'])
def process_config_os(self):
log.log(log.LOG_INFO, "Processing Operating Systems")
for operatingsystem in self.get_config_section('os'):
try:
self.validator.os(operatingsystem)
except MultipleInvalid as e:
log.log(log.LOG_WARN, "Cannot create Operating System '{0}': YAML validation Error: {1}".format(operatingsystem['name'], e))
continue
try:
os_id = self.fm.operatingsystems.show(operatingsystem['description'])['id']
log.log(log.LOG_DEBUG, "Operating System '{0}' (id={1}) already present.".format(operatingsystem['name'], os_id))
except:
log.log(log.LOG_INFO, "Create Operating System '{0}'".format(operatingsystem['name']))
os_tpl = {
'name': operatingsystem['name'],
'description': operatingsystem['description'],
'major': operatingsystem['major'],
'minor': operatingsystem['minor'],
'family': operatingsystem['family'],
'release_name': operatingsystem['release-name'],
'password_hash': operatingsystem['password-hash']
}
os_obj = self.fm.operatingsystems.create(operatingsystem=os_tpl)
# host_params
if operatingsystem['parameters'] is not None:
for name,value in operatingsystem['parameters'].iteritems():
p = {
'name': name,
'value': value
}
try:
self.fm.operatingsystems.parameters_create(os_obj['id'], p )
except:
log.log(log.LOG_WARN, "Error adding host parameter '{0}'".format(name))
def process_config_user(self):
log.log(log.LOG_INFO, "Processing users")
for user in self.get_config_section('users'):
# validate yaml
try:
self.validator.user(user)
except MultipleInvalid as e:
log.log(log.LOG_WARN, "Cannot create User '{0}': YAML validation Error: {1}".format(user['login'], e))
continue
try:
as_id = self.fm.users.show(user['login'])['id']
log.log(log.LOG_WARN, "User {0} allready exists".format(user['login']))
continue
except TypeError:
pass
# resolve auth source
if user['auth-source'] is not 'INTERNAL':
try:
as_id = self.fm.auth_source_ldaps.show(user['auth-source'])['id']
except TypeError:
log.log(log.LOG_ERROR, "Cannot resolve auth source '{0}' for user '{1}', skipping creation".format(user['login'], user['auth-source']))
continue
del(user['auth-source'])
user['auth_source_id'] = as_id
else:
del(user['auth-source'])
user['auth_source_id'] = 1
try:
self.fm.users.create(user=user)
except ForemanException as e:
msg = self.get_api_error_msg(e)
log.log(log.LOG_ERROR, "Cannot create user '{0}', api says: '{1}'".format(user['login'], msg) )
continue
def __init__(self, config, loglevel=logging.INFO):
logging.basicConfig(level=loglevel)
log.LOGLEVEL = loglevel
self.config = config['foreman']
self.loglevel = loglevel
self.validator = Validator()
def connect(self):
try:
logging.disable(logging.WARNING)
self.fm = Foreman(self.config['auth']['url'], (self.config['auth']['user'], self.config['auth']['pass']), api_version=2, use_cache=False, strict_cache=False)
# this is nescesary for detecting faulty credentials in yaml
self.fm.architectures.index()
logging.disable(self.loglevel-1)
except:
log.log(log.LOG_ERROR, "Cannot connect to Foreman-API")
sys.exit(1)
def get_details_spotify(song_name):
'''
Tries finding metadata through Spotify
'''
song_name = improvename.songname(song_name)
spotify = spotipy.Spotify()
results = spotify.search(song_name, limit=1) # Find top result
log.log_indented('* Finding metadata from Spotify.')
try:
album = (results['tracks']['items'][0]['album']
['name']) # Parse json dictionary
artist = (results['tracks']['items'][0]['album']['artists'][0]['name'])
song_title = (results['tracks']['items'][0]['name'])
try:
log_indented("* Finding lyrics from Genius.com")
lyrics = get_lyrics_genius(song_title)
except:
log_error("* Could not find lyrics from Genius.com, trying something else")
lyrics = get_lyrics_letssingit(song_title)
match_bool, score = matching_details(song_name, song_title, artist)
if match_bool:
return artist, album, song_title, lyrics, match_bool, score
else:
return None
except IndexError:
log.log_error(
'* Could not find metadata from spotify, trying something else.', indented=True)
return None
def add_albumart(albumart, song_title):
'''
Adds the album art to the song
'''
try:
img = urlopen(albumart) # Gets album art from url
except Exception:
log.log_error("* Could not add album art", indented=True)
return None
audio = EasyMP3(song_title, ID3=ID3)
try:
audio.add_tags()
except _util.error:
pass
audio.tags.add(
APIC(
encoding=3, # UTF-8
mime='image/png',
type=3, # 3 is for album art
desc='Cover',
data=img.read() # Reads and adds album art
)
)
audio.save()
log.log("> Added album art")
def get_logdir(self):
"""Retrieve the current log directory"""
(base_dir, log_dir) = self.session.get_dirs()
return log_dir
def set_logdir(self, log_dir=None):
"""Set the current log directory and create a new log file"""
if not log_dir or not os.path.exists(log_dir):
log_dir = os.path.normpath(self.default_logdir)
base_dir = self.get_basedir()
self.session.set_dirs(base_dir, log_dir)
logname = "ISF-%s.log" % util.formattime()
self.io.setlogfile(os.path.join(log_dir, logname))
def _prompt_for_logging(self, target, oldproject):
try:
if oldproject is None:
oldproject = ''
base_logdir = self.get_logdir()
base_logdir = ''.join(base_logdir[:base_logdir.find(oldproject)])
if len(base_logdir) == 0:
base_logdir = self.get_logdir()
# Request #1699: Change to include compatible logging structure
self.io.newline()
log_dir = self.io.prompt_user("Base Log directory", base_logdir, gvars=self.fbglobalvars)
log_dir = os.path.abspath(log_dir)
# Get the list of projects
self.io.print_msg("Checking %s for projects" % (log_dir))
projects = self._get_projectlist(log_dir)
# Give the user the choice to use an existing project or create a new one
project = None
while project is None:
project = self._prompt_for_project(projects)
log_dir = os.path.join(log_dir, project, 'z'+target.replace(":", "_")) # To support IPv6 address in log files
if not self.io.prompt_yn("Set target log directory to '%s'?" % (log_dir)):
log_dir = self.io.prompt_user("Target log directory?", log_dir, gvars=self.fbglobalvars)
try:
os.makedirs(log_dir) # Fix from 3.2.0 - Don't reinvent the wheel
except:
if not os.path.exists(log_dir):
raise
self.set_logdir(log_dir)
return (project, log_dir)
except OSError:
self.io.print_warning("Access Denied to '%s'! Choose a different log directory." %(log_dir))
return (None, None)
def __init__(self, id_, chip):
"""
cxt:
The CM1K offer 127 contexts in the range 1-127.
Context 0 is used during training to train all neurons against an input regardless of their contexts.
cat:
The CM1K offers 32767 categories in the range 1-32767.
Category 0 is used during training to present counterexamples (to shrink the neurons' AIFs)
If the neuron degenerates, bit 15 of the category is set to 1
(i.e., 32768 will be added to the category).
aif:
This should be indicated in the same range as dist, below, as determined by norm.
dist:
If the norm is L1, then distances will be in the range 0-65280 (255 x 256).
If the norm is Lsup (i.e., max), then distances will be in the range 0-255.
pattern:
A byte array which will be compared on a byte-by-byte basis (not bit-by-bit, so not hamming distance).
"""
log.trace("Neuron.init()")
self.id_ = id_
self.chip = chip
self.state = NeuronState.idle
self.cxt = 0 # Context
self.cat = 0 # Category
self.aif = 0 # Active influence field
self.degenerate = False # True when aif shrinks to minif
self.dist = 0
self.pattern = [] # Components (the pattern or rbf "center" stored in this neuron)
def reset_dist(self):
"""
Reset the distance to 0
"""
# Called from CM1KEmulator.reset_indexcomp(), i.e., whenever LCOMP is updated
log.trace("Neuron.reset_dist()")
self.dist = 0
def broadcast(self, input_, norm, classifier=None, aif_scale=1):
"""
Used for high level broadcast, in which the input is processed in bulk instead of per-component, i.e., per byte.
input_ of len 1-256 (for proper CM1K emulation, otherwise unlimited)
norm: A DistNorm enum
classifier: A Classifier enum
aif_scale: Modify the aif when determining whether the fire. The aif can also be permanently scaled via
CM1KEmulator.scale_all_aifs(), but this parameter enables the same behavior without altering the neuron.
"""
# Called from CM1KEmulator.broadcast()
log.trace("Neuron.broadcast()")
# This shouldn't be necessary. This function should only be called on committed and the rtl neurons.
if self.state == NeuronState.idle:
log.error("Neuron.broadcast() called on idle neuron")
return
self.dist = 0 # NOTE: Not sure this is necessary. Also, undecided whether this should simply call reset_dist().
if norm == cm1k.CM1KDistNorm.l1:
for i, comp in enumerate(input_):
self.dist += abs(comp - self.pattern[i])
elif norm == cm1k.CM1KDistNorm.lsup:
for i, comp in enumerate(input_):
self.dist = max(abs(comp - self.pattern[i]), self.dist)
elif norm == cm1k.CM1KDistNorm.euc:
for i, comp in enumerate(input_):
self.dist += (comp - self.pattern[i]) * (comp - self.pattern[i])
self.dist = int(round(math.sqrt(self.dist)))
log.log("Single neuron cat{} dist: {:>5} < {:>5} ?".format(self.cat, self.dist, self.aif))
# TODO: Use the minimum and maximum AIFs of each neuron (i.e., of each context)
aif = self.aif if aif_scale == 1 else min(max(int(round(self.aif * aif_scale)), 0), 0xFFFF)
if (classifier == cm1k.CM1KClassifier.rbf and self.dist < aif) or classifier == cm1k.CM1KClassifier.knn:
# The neuron has fired
log.log("Fire with dist{} aif{} cat{}".format(self.dist, aif, self.cat))
self.chip.store_firing_neuron(self)
def commit(self, cxt, cat, aif, pattern):
"""
Commit this neuron to the network. It will already have received a new pattern in the immediately preceding broadcast.
"""
log.trace("Neuron.commit() cxt{} cat{} aif{}".format(cxt, cat, aif))
self.state = NeuronState.com
self.cxt = cxt
self.cat = cat
self.aif = aif
self.dist = 0
# We shouldn't need to assign the pattern. It should already be assigned, but no harm done.
self.pattern = pattern
def shrink_if_necessary(self, cat, new_aif, minif):
"""
Shrink if the AIF if categories don't match and error-compensating AIF < currently held AIF.
"""
log.trace("Neuron.shrink_if_necessary()")
# TODO: create unit test where misfiring neuron has exactly the same distance as the best neuron.
if cat != self.cat and new_aif < self.aif:
self.shrink_aif(new_aif, minif)
def dump_registers(self):
"""
Log the register values
"""
for key, val in self.register_legend.iteritems():
log.log("{:12} {:>2}: {:>10} {:>10}".format(
key, val[0], self.registers[val[0]], "0x{:X}".format(self.registers[val[0]])))
# =========================================================================================================
def update_firing_dist_and_cat(self):
# Called from update_all_neuron_dists() when LCOMP is updated to seed DIST with best neuron's distance
# Called whenever DIST is read
log.trace("CM1KEmulator.update_firing_dist_and_cat()")
if self.firing_neurons:
self.write_dist_non_ui(self.firing_neurons[-1].dist)
self.write_cat(self.firing_neurons[-1].cat)
if self.firing_neurons[-1].degenerate:
self.write_cat_degenerate(True)
self.firing_neurons.pop()
else:
self.write_dist_non_ui(0xFFFF)
self.write_cat(0xFFFF)
def update_all_neuron_dists(self, last_comp=False):
# Called whenever COMP or LCOMP is updated
log.trace("CM1KEmulator.update_all_neuron_dists()")
gcr = self.read_gcr_context()
comp = self.input_[self.indexcomp]
for neuron in self.neurons:
if (neuron.state == nrn.NeuronState.com and neuron.cxt == gcr) or neuron.state == nrn.NeuronState.rtl:
neuron.update_dist(
self.indexcomp, comp, self.read_ncr_norm(), last_comp, self.read_nsr_classifier_enum())
if last_comp:
# After writing the last component, sort the firing neurons by distance
self.firing_neurons.sort(key=lambda x: x.dist, reverse=True)
def listen(self):
self.socket.listen()
log.log('Server is listening at %s:%d' % (self.config['addr'], self.config['port']))
while True:
clientSocket, _ = self.socket.accept()
if self.isTLS:
try:
clientSocket = self.context.wrap_socket(clientSocket, server_side = True)
except:
clientSocket.close()
continue
tunnel.Tunnel(clientSocket).start()
def get_logdir(self):
"""Retrieve the current log directory"""
(base_dir, log_dir) = self.session.get_dirs()
return log_dir
def set_logdir(self, log_dir=None):
"""Set the current log directory and create a new log file"""
if not log_dir:
log_dir = os.path.normpath(self.default_logdir)
base_dir = self.get_basedir()
self.session.set_dirs(base_dir, log_dir)
logname = "fuzzbunch-%s.log" % util.formattime()
self.io.setlogfile(os.path.join(log_dir, logname))
def _prompt_for_logging(self, target, oldproject):
try:
if oldproject is None:
oldproject = ''
base_logdir = self.get_logdir()
base_logdir = ''.join(base_logdir[:base_logdir.find(oldproject)])
if len(base_logdir) == 0:
base_logdir = self.get_logdir()
# Request #1699: Change to include compatible logging structure
self.io.newline()
log_dir = self.io.prompt_user("Base Log directory", base_logdir, gvars=self.fbglobalvars)
log_dir = os.path.abspath(log_dir)
# Get the list of projects
self.io.print_msg("Checking %s for projects" % (log_dir))
projects = self._get_projectlist(log_dir)
# Give the user the choice to use an existing project or create a new one
project = None
while project is None:
project = self._prompt_for_project(projects)
log_dir = os.path.join(log_dir, project, 'z'+target.replace(":", "_")) # To support IPv6 address in log files
if not self.io.prompt_yn("Set target log directory to '%s'?" % (log_dir)):
log_dir = self.io.prompt_user("Target log directory?", log_dir, gvars=self.fbglobalvars)
try:
os.makedirs(log_dir) # Fix from 3.2.0 - Don't reinvent the wheel
except:
if not os.path.exists(log_dir):
raise
self.set_logdir(log_dir)
return (project, log_dir)
except OSError:
self.io.print_warning("Access Denied to '%s'! Choose a different log directory." %(log_dir))
return (None, None)
def main():
try:
function = sys.argv[1]
except:
log.log(log.LOG_ERROR, "No action defined (Valid: dump, import, cleanup)")
sys.exit(1)
if os.path.isfile(sys.argv[1]):
config_file = sys.argv[1]
function = "legacy"
else:
try:
config_file = sys.argv[2]
except IndexError:
log.log(log.LOG_ERROR, "No YAML provided")
sys.exit(1)
try:
config_file = open(config_file, 'r')
config = yaml.load(config_file)
config_file.close()
except:
log.log(log.LOG_ERROR, "Failed to load/parse config")
sys.exit(1)
if (function == "import"):
fm = ForemanImport(config)
fm.connect()
fm_import(fm)
if (function == "dump"):
fm = ForemanDump(config)
fm.connect()
fm_dump(fm)
if (function == "cleanup"):
fm = ForemanCleanup(config)
fm_cleanup(fm)
if (function == "legacy"):
fm_cls = ForemanCleanup(config)
fm_cls.connect()
fm_cleanup(fm_cls)
fm_imp = ForemanImport(config)
fm_imp.connect()
fm_import(fm_imp)
def process_config_provisioningtpl(self):
log.log(log.LOG_INFO, "Processing Provisioning Templates")
# fm.provisioning_templates.show(name) does not work as expected, we need to iterate over fm.provisioning_templates.index()
ptlist = self.fm.provisioning_templates.index(per_page=99999)['results']
for pt in self.get_config_section('provisioning-template'):
try:
self.validator.provt(pt)
except MultipleInvalid as e:
log.log(log.LOG_WARN, "Cannot create Provisioning Template '{0}': YAML validation Error: {1}".format(pt['name'], e))
continue
pt_id = False
for ptc in ptlist:
if (ptc['name'] == pt['name']):
pt_id = ptc['id']
log.log(log.LOG_DEBUG, "Provisioning Template '{0}' (id={1}) already present.".format(pt['name'], pt_id))
if not pt_id:
log.log(log.LOG_INFO, "Create Provisioning Template '{0}'".format(pt['name']))
pt_tpl = {
'name': pt['name'],
'template': pt['template'],
'snippet': pt['snippet'],
'audit_comment': pt['audit-comment'],
'template_kind_id': pt['template-kind-id'],
'locked': pt['locked']
}
os_ids = []
for osc in pt['os']:
try:
os_id = self.fm.operatingsystems.show(osc['name'])['id']
os_ids.append(os_id)
except:
log.log(log.LOG_WARN, "Cannot link OS '{0}' to Provisioning Template '{1}'".format(osc['name'],pt['name']))
pt_tpl = {
'name': pt['name'],
'template': pt['template'],
'snippet': pt['snippet'],
'audit_comment': pt['audit-comment'],
'template_kind_id': pt['template-kind-id'],
'locked': pt['locked'],
'operatingsystem_ids': os_ids
}
prtes = self.fm.provisioning_templates.create(provisioning_template=pt_tpl)