def from_parser(body):
node = Node()
node.name = "from"
node.value = body
# print("FROM:" + body)
items = body.split(":")
system = str(items[0]).strip()
tag = str(items[1]).strip()
if system not in constants.systems:
# ?????base image
base_dockerfile = docker_fetcher.fetch(system, tag, 1)
if len(base_dockerfile) <= 0:
return None
base_parser = parser.Parser(base_dockerfile)
return base_parser.root
else:
if system not in system_count.keys():
system_count[system] = 1
else:
system_count[system] += 1
print(system)
return node
python类Parser()的实例源码
def GenerateData(argv):
savedStdout = sys.stdout
file = open('.datagon.log', 'w')
sys.stdout = file
print('------ Starting Generating Data. ------')
f = open(os.path.join(os.getcwd(), argv[0]), 'r')
tokens = tokenizer.Tokenizer(f.read())
ast = parser.Parser(tokens)
result = translator.Translator(ast)
sys.stdout = savedStdout
if len(argv) > 1:
with open(argv[1], 'w') as file:
file.write(result)
else:
print(result)
def GenerateData(argv):
savedStdout = sys.stdout
file = open('.datagon.log', 'w')
sys.stdout = file
print('------ Starting Generating Data. ------')
f = open(os.path.join(os.getcwd(), argv[0]), 'r')
tokens = tokenizer.Tokenizer(f.read())
ast = parser.Parser(tokens)
result = translator.Translator(ast)
sys.stdout = savedStdout
if len(argv) > 1:
with open(argv[1], 'w') as file:
file.write(result)
else:
print(result)
def interpret(query):
query_dict = Parser().parse(query)
if query_dict["columns"] == "*":
columns = list(PROPERTIES)
else:
columns = query_dict["columns"]
result_set = []
if "processes" in query_dict["tables"]:
result_set = _select_processes_infos(columns)
else:
raise Exception("Table not found")
if "order by" in query_dict:
column_idx = columns.index(query_dict["order by"])
result_set = sorted(result_set, key=lambda x: x[column_idx])
return columns, result_set
def render(self):
print('Rendering {}'.format(self._name))
for include in self._include_list:
parsed_ruleset = Parser()
parsed_ruleset.parse_file(join(self._config.policies, include))
self._rules.append(parsed_ruleset)
for ruleset in self._rules:
self.resolve_hostgroups(ruleset)
self.resolve_portgroups(ruleset)
if self._vendor == 'junos':
renderer = JunosRenderer(self)
renderer.render()
if self._vendor == 'ios':
renderer = IOSRenderer(self)
renderer.render()
def __init__(self):
self.parser = Parser()
def run(self):
# Get info port of ASD.
node_init_path = self.options['config-file']
configParser = Parser(node_init_path)
if (configParser.parse() != 0):
logging.critical("Port not found in config file.")
sys.exit(-1)
logging.info("Leaf Node port: %s. Home server: %s " % (configParser.port, self.options['home-url']))
# Get leaf connection.
self.leafConnection = LeafLine(configParser.port, self.leafAddress)
if self.options['sample']:
infoMap = self.get_info()
if infoMap:
logging.info("infoMap = %s", pprint.pformat(infoMap, indent=2))
else:
logging.warning("No info. returned from ASD!")
return
self.wait_for_leaf_connection(True)
# Collect data.
while True:
infoMap = self.get_info()
if infoMap:
self.phonehome(infoMap)
else:
# Report ASD is down.
logging.info("Sending back status: ASD is down.")
self.phonehome({"telemetry-agent-status": "ASD is down."})
# Attempt to reconnect.
self.leafConnection = LeafLine(configParser.port, self.leafAddress)
self.wait_for_leaf_connection()
time.sleep(self.options['interval'])
def load(self, config_path):
"""We get the parser to construct the configuration"""
log.info("Loading configuration at '%s'", config_path)
self.config_path = config_path
p = parser.Parser(self)
p.parse_file(config_path)
log.info("------------------------ BEGINNING NEW RUN -------------------------------")
def __init__(self, kb):
ExprGenerator.setup()
State.ExtraInfoGen = ExprGenerator
ForcedDecoder.model = Model()
ForcedDecoder.indepKB = IndepKnowledgeBase()
ForcedDecoder.KB = kb
kb.postedit_indepkb(ForcedDecoder.indepKB)
ForcedDecoder.parser = Parser(ForcedDecoder.indepKB, ForcedDecoder.KB, ForcedDecoder.model, State)
ForcedDecoder.questions = kb.questions
ForcedDecoder.logicalexprs = kb.logicalexprs
ForcedDecoder.verbose = False
def __init__(self, knowledgebase):
if _sanity_check:
ExprGenerator.setup()
State.ExtraInfoGen = ExprGenerator
Perceptron.model = Model()
Perceptron.indepKB = IndepKnowledgeBase()
Perceptron.KB = knowledgebase
knowledgebase.postedit_indepkb(Perceptron.indepKB)
Perceptron.c = 0
Perceptron.iter = FLAGS.iter
Perceptron.beamsize = FLAGS.beam
Perceptron.parser = Parser(Perceptron.indepKB, Perceptron.KB, Perceptron.model, State)
Perceptron.ncpus = FLAGS.ncpus
Perceptron.ontheflyfd = FLAGS.ontheflyfd
Perceptron.single_gold = FLAGS.singlegold
Perceptron.output_prefix = FLAGS.outputprefix
Perceptron.fdbeamsize = FLAGS.fdbeam
if Perceptron.ncpus > 0:
Perceptron.shared_memo_size = int(1024 * 1024 * 1024) # 1G shared memory
Perceptron.shared_memo = mmap.mmap(-1, Perceptron.shared_memo_size,
mmap.MAP_SHARED, mmap.PROT_READ | mmap.PROT_WRITE)
Perceptron.ref_beams = {}
if FLAGS.ref:
print >> LOGS, "loading refs",
hgs = pickle.load(open(FLAGS.ref))
self.load_hgs(hgs)
if FLAGS.extraref:
print >> LOGS, "loading extra refs",
hgs = pickle.load(open(FLAGS.extraref))
self.load_hgs(hgs)
def importMHT():
# Ask for the .mht file.
file_path = getFile(mw, _("Import mht file"), None, key="import")
if not file_path:
return
file_path = unicode(file_path)
# Convert mht
parser = Parser(file_path)
output = parser.run()
# Creates a temp dir instead of file since windows
# won't allow subprocesses to access it otherwise.
# https://stackoverflow.com/questions/15169101/how-to-create-a-temporary-file-that-can-be-read-by-a-subprocess
try:
temp_dir = mkdtemp()
path = os.path.join(temp_dir, 'import.html')
with open(path, 'w+') as html:
html.write(output)
# Move temp images to collection.media
media_dir = os.path.join(mw.pm.profileFolder(), "collection.media")
for meta in parser.file_map.values():
temp_path = meta.get('path')
new_path = os.path.join(media_dir, meta.get('filename'))
shutil.move(temp_path, new_path)
# import into the collection
ti = TextImporter(mw.col, path)
ti.delimiter = '\t'
ti.allowHTML = True
ti.initMapping()
MHTImportDialog(mw, ti)
# Remove file
os.remove(path)
finally:
os.rmdir(temp_dir)
def execute(text, print_result, ctx):
tokens = l.lex(text)
parser = p.Parser(tokens)
program = parser.parse_program()
if len(parser.errors) > 0:
parser.print_errors()
else:
result = e.evaluate(program, ctx)
if (print_result and type(result) != o.Null) or e.is_err(result):
print(result)
def trigger_accountnumber(self, msg):
if not self.asked_account:
self.sender.sendMessage('What is your account number and sort code?')
self.asked_account = True
else:
parser = Parser(msg['text'])
parsed = parser.parse_text()
self._db.put(msg, 'accountnumber', parsed['recepient']['acc_number'])
self._db.put(msg, 'sortcode', parsed['recepient']['sort_code'])
self.trigger_secretnumber(msg)
def on_chat_message(self, msg):
# if chat_id not in self._seen:
# return
print(msg)
parser = Parser(msg['text'])
parsed = parser.parse_text()
# have another id to transfer and send a message.
if self.current_thread:
print("current thread, {}".format(self.current_thread))
elif parsed['action'] != 'transfer':
print('action isnt transfer')
return
elif parsed['action'] == 'transfer':
print('action is transfer')
self.current_thread = True
if not self.recipient:
print('no recipient')
self.recipient = parsed['recepient']['user_alias']
self.amount = int(parsed['amount'])
try:
self.alias_id = self._db.fetch_alias(self.recipient)
except KeyError:
self.sender.sendMessage('Oh oh, looks like your friend hasn\'t registered yet. Send him a message and let him know that he should register right away.')
return
print(self.current_thread)
print(self.asked_password)
if self.asked_confirmation:
print('going to send money')
self.trigger_confirmation(msg)
elif self.asked_password:
print('going to ask confirmation')
self.trigger_confirmation(msg)
elif self.recipient and self.alias_id:
print('going to ask password')
self.trigger_password_question(msg, parsed)
def __init__(self, database, dir_path, debug=False):
self._parser = Parser()
self._wrapper = Wrapper()
self._writer = Writer(database)
self._sitemap_generator = SitemapGenerator(database)
self._feeds_generator = FeedsGenerator(database)
self._debug = debug
self._dir_path = dir_path
self._database = database
self._file_path = ""
def __init__(self):
self.parser = Parser()
def __init__(self, analytical=False, tag=None):
# Custom function mappings used for lamdify.
# TODO: this should be handled by addFuncs now.
if analytical:
self.sym2func = {"ceiling":a_umath.ceil, "Max":uwrap(SoftMaximum)}
self.conv2analytical = self.conv2analytical_simple_compression
#self.conv2analytical = self.conv2analytical_GMM
else:
self.sym2func = {"ceiling":umath.ceil}
self.tag = tag
self.analytical = analytical # Use analytical methods or MC.
self.idx_bounds = {} # Bounds for index symbols, key type: string
self.syms = {} # Symbolics used in system modeling, key type: string
self.exprs_str = [] # Original string representation of expressions.
self.exprs = [] # Sympy understandable parsed expressions.
self.given = {} # Inputs to model evaluation, key type: symbol
self.intermediates = {} # Uncertain dependent variables, key type: symbol
self.response = set() # set of symbols.
self.ordered_given = [] # List of symbols.
self.sol_inter_set = {} # Intermediate solution set, key type: symbol
self.inter_funcs = {} # Lamdified callables for intermediate vars, key type: symbol
self.sol_final_set = {} # Final solution set, key type: symbol
self.target_funcs = {} # Lamdified callables for response vars, key type: symbol
self.opts = [] # varibles left to optimize.
self.parser = Parser()
npts = 100000
def __init__(self):
self.parser = Parser()
__main__.py 文件源码
项目:Neural-Architecture-Search-with-RL
作者: dhruvramani
项目源码
文件源码
阅读 25
收藏 0
点赞 0
评论 0
def main():
args = Parser().get_parser().parse_args()
config = Config(args)
loss_dict = train_model(config)
output = "=> Test Loss : {}, Test Accuracy : {}".format(loss_dict["test_loss"], loss_dict["test_accuracy"])
with open("../stdout/test_log.log", "a+") as f:
f.write(output)
print("\033[1m\033[92m{}\033[0m\033[0m".format(output))
def main(args):
try:
(opts, args) = getopt(args, 'o:TPX')
except GetoptError:
usage()
if len(args) != 1:
usage()
from tokenizer import Tokenizer
from parser import Parser
from error import JtError
import context
from os.path import abspath
filename = abspath(args[0])
stdin = file(filename, 'r')
target = 'P'
stdout = sys.stdout
for (ok, ov) in opts:
if ok in ('-T', '-P', '-X'):
target = ok[1]
elif ok == '-o':
stdout = file(ov, 'w')
contents = stdin.read()
tokenizer = Tokenizer()
tokenizer.build()
tokenizer.input(contents)
parser = Parser(tokenizer)
result_tree = None
try:
result_tree = parser.parse()
except JtError, error:
failure(error)
context.add_pdf(result_tree)
ok = context.inspect(result_tree)
ok &= context.validate(result_tree)
if target == 'T':
print >>stdout, result_tree
if not ok:
failure()
result_tree.filename = filename
if target != 'T':
if stdout.isatty():
failure('Prevented from printing binary garbage to the terminal.')
if target == 'P':
result_tree.compile_pyc(stdout)
elif target == 'X':
result_tree.compile_x86(stdout)
else:
raise NotImplementedError()
# vim:ts=4 sts=4 sw=4 et
def main():
parser = argparse.ArgumentParser(description="The interpreter for Pluto")
parser.add_argument("-f", "--file", action="store", dest="file", type=str, help="the file to execute")
parser.add_argument("-p", "--parse", action="store_true", default=False, help="just parse the file - don't execute it")
parser.add_argument("-t", "--tree", action="store_true", default=False, help="print the parse tree")
parser.add_argument("-i", "--interactive", action="store_true", default=False, help="enter interactive mode after the file has been run")
parser.add_argument("-n", "--no-prelude", action="store_true", dest="no_prelude", help="don't load the prelude")
parser.add_argument("-v", "--version", action="version", version="Pluto, early beta version")
args = parser.parse_args()
if args.file == None:
ctx = c.Context()
if not args.no_prelude:
import_prelude(ctx)
repl(ctx)
else:
try:
text = open(args.file).read()
if args.parse or args.tree:
tokens = l.lex(text)
parse = p.Parser(tokens)
program = parse.parse_program()
if len(parse.errors) > 0:
parse.print_errors()
elif args.tree:
print(program)
return
ctx = c.Context()
if not args.no_prelude:
import_prelude(ctx)
execute(text, False, ctx)
if args.interactive:
print()
repl(ctx)
except FileNotFoundError:
print("File not found: %s" % args.file)
return