def parse(self, text, filename='', debuglevel=0):
""" Parses C code and returns an AST.
text:
A string containing the C source code
filename:
Name of the file being parsed (for meaningful
error messages)
debuglevel:
Debug level to yacc
"""
self.clex.filename = filename
self.clex.reset_lineno()
self._scope_stack = [dict()]
self._last_yielded_token = None
return self.cparser.parse(
input=text,
lexer=self.clex,
debug=debuglevel)
######################-- PRIVATE --######################
python类yacc()的实例源码
def parse(self, text, filename='', debuglevel=0):
""" Parses C code and returns an AST.
text:
A string containing the C source code
filename:
Name of the file being parsed (for meaningful
error messages)
debuglevel:
Debug level to yacc
"""
self.clex.filename = filename
self.clex.reset_lineno()
self._scope_stack = [dict()]
self._last_yielded_token = None
return self.cparser.parse(
input=text,
lexer=self.clex,
debug=debuglevel)
######################-- PRIVATE --######################
def parse(input, eof=False, debug=False):
"""Parse a whole script at once and return the generated AST and unconsumed
data in a tuple.
NOTE: eof is probably meaningless for now, the parser being unable to work
in pull mode. It should be set to True.
"""
lexer = pyshlex.PLYLexer()
remaining = lexer.add(input, eof)
if lexer.is_empty():
return [], remaining
if debug:
debug = 2
return yacc.parse(lexer=lexer, debug=debug), remaining
#-------------------------------------------------------------------------------
# AST rendering helpers
#-------------------------------------------------------------------------------
def __init__(self, filename):
"""Create the parser."""
if filename is None:
filename = ""
self.lexer = QasmLexer(filename)
self.tokens = self.lexer.tokens
self.parse_dir = tempfile.mkdtemp(prefix='qiskit')
self.precedence = (
('left', '+', '-'),
('left', '*', '/'),
('left', 'negative', 'positive'),
('right', '^'))
# For yacc, also, write_tables = Bool and optimize = Bool
self.parser = yacc.yacc(module=self, debug=False,
outputdir=self.parse_dir)
self.qasm = None
self.parse_deb = False
self.global_symtab = {} # global symtab
self.current_symtab = self.global_symtab # top of symbol stack
self.symbols = [] # symbol stack
self.external_functions = ['sin', 'cos', 'tan', 'exp', 'ln', 'sqrt','acos','atan','asin']
def __init__(self, the_lexer=None, **kwargs):
"""
Constructor.
:param lexer: a lexer object to use.
"""
if not the_lexer:
the_lexer = franca_lexer.Lexer()
self._lexer = the_lexer
self.tokens = self._lexer.tokens
# Disable debugging, by default.
if "debug" not in kwargs:
kwargs["debug"] = False
if "write_tables" not in kwargs:
kwargs["write_tables"] = False
self._parser = yacc.yacc(module=self, **kwargs)
def parse(self, qpath_string ):
'''????????
:param qpath_string: QPath???
:type qpath_string: string
:returns: list, list - ?????, ??????
'''
self._last_locator = None
qpath_string = qpath_string.strip()
self._lexer = QPathLexer()
self.tokens = self._lexer.tokens
self._parser = yacc.yacc(module=self, debuglog=self._logger, errorlog=self._logger, write_tables=0)
self._qpath_string = qpath_string
parsed_structs = []
lex_structs = []
for locator in self._parser.parse(qpath_string, self._lexer):
parsed_structs.append(locator.dumps())
lex_struct = {}
for propname in locator:
prop = locator[propname]
lex_struct[prop.name.value] = [prop.name.lexpos, prop.operator.lexpos, prop.value.lexpos]
lex_structs.append(lex_struct)
return parsed_structs, lex_structs
def __init__(self, **kw):
self.debug = kw.get('debug', 0),
self.start = kw.get('start', 'topLevelStatementsOpt')
self.lexer = lex.lex(module=self, debug=self.debug)
self.parser = yacc.yacc(module=self,
debug=self.debug,
write_tables=0,
start=self.start,
)
self.si = units.Si()
self.connections = Connections()
self.scopeStack = []
self.tempCount = 0
self.enumerations = {}
self.encapsulationStack = [Encapsulation()]
self.timeUnitFromEncapName = {}
self.clearEnvironment()
def parse(self, text, filename='', debuglevel=0):
""" Parses C code and returns an AST.
text:
A string containing the C source code
filename:
Name of the file being parsed (for meaningful
error messages)
debuglevel:
Debug level to yacc
"""
self.clex.filename = filename
self.clex.reset_lineno()
self._scope_stack = [dict()]
self._last_yielded_token = None
return self.cparser.parse(
input=text,
lexer=self.clex,
debug=debuglevel)
######################-- PRIVATE --######################
def parse(path, module_name=None, lexer=None, parser=None):
if lexer is None:
lexer = lex.lex()
if parser is None:
parser = yacc.yacc(debug=False, write_tables=0)
with open(path) as f:
data = f.read()
if module_name is None:
basename = os.path.basename(path)
module_name = os.path.splitext(basename)[0]
thrift = types.ModuleType(module_name)
setattr(thrift, '__thrift_file__', path)
thrift_stack.append(thrift)
lexer.lineno = 1
parser.parse(data)
thrift_stack.pop()
return thrift
def parse(self, data, path=None):
"""
Args:
data (str): Raw specification text.
path (Optional[str]): Path to specification on filesystem. Only
used to tag tokens with the file they originated from.
"""
assert not self.exhausted, 'Must call get_parser() to reset state.'
self.path = path
parsed_data = self.yacc.parse(data, lexer=self.lexer, debug=self.debug)
# It generally makes sense for lexer errors to come first, because
# those can be the root of parser errors. Also, since we only show one
# error max right now, it's best to show the lexing one.
for err_msg, lineno in self.lexer.errors[::-1]:
self.errors.insert(0, (err_msg, lineno, self.path))
parsed_data.extend(self.anony_defs)
self.exhausted = True
return parsed_data
def __init__(self, **kw):
super(PlyParser, self).__init__()
self.debug = kw.get('debug', 0)
self.names = {}
try:
modname = os.path.split(os.path.splitext(__file__)[0])[
1] + "_" + self.__class__.__name__
except:
modname = "parser" + "_" + self.__class__.__name__
self.debugfile = modname + ".dbg"
self.tabmodule = modname + "_" + "parsetab"
# print self.debugfile, self.tabmodule
# Builds the lexer and parser
lex.lex(module=self, debug=self.debug)
yacc.yacc(module=self,
debug=self.debug,
debugfile=self.debugfile,
tabmodule=self.tabmodule)
def parse(self, text, filename='', debuglevel=0):
""" Parses C code and returns an AST.
text:
A string containing the C source code
filename:
Name of the file being parsed (for meaningful
error messages)
debuglevel:
Debug level to yacc
"""
self.clex.filename = filename
self.clex.reset_lineno()
self._scope_stack = [dict()]
self._last_yielded_token = None
return self.cparser.parse(
input=text,
lexer=self.clex,
debug=debuglevel)
######################-- PRIVATE --######################
def init(outputdir=None):
outputdir = outputdir or os.path.dirname(__file__) # os.getcwd()
current_module = sys.modules[__name__]
#print (outputdir, current_module)
debug = 0
optimize = 0
lexer = lex.lex(optimize=0, debug=debug)
# lexer.input('on init\n declare shared parameter cutoff')
# while True:
# tok = lexer.token()
# if tok is None:
# break
# print (tok)
return yacc.yacc(method="LALR", optimize=optimize, debug=debug,
write_tables=0, module=current_module, start='script',
outputdir=outputdir, tabmodule='ksp_parser_tab')
def __init__(self):
self.lex = TDXLex()
self.lex.build()
self.tokens = self.lex.tokens
self.literals = self.lex.literals
self.parser = yacc(module = self,
start = 'tu_e',
debug = False
)
self.attr_re = re.compile(r'(LINETHICK[1-9])|STICK|VOLSTICK|LINESTICK|CROSSDOT|CIRCLEDOT|POINTDOT|DRAWNULL|DOTLINE|NODRAW|COLORSTICK')
self.color_re = re.compile(r'COLOR[0-9A-Z]+')
def __init__(self):
self.lexer = FrancaLexer(self.on_lexer_error)
self.lexer.build()
self.tokens = self.lexer.tokens
self.parser = yacc.yacc(module=self)
def generate_code(program: str):
parser = yacc.yacc()
result = parser.parse(program, debug=log)
print("the JSON format is:")
print(SyntaxTreeJSONEncoder(indent=4, separators=(',', ': ')).encode(result))
code = result.emit_code()
return CodeObj(code, scope_manager.current_const_list, scope_manager.current_name_list)
def __compile(self, code):
lex.lex()
parser = yacc.yacc(start = 'statements')
statements = parser.parse(code)
#self.__dump(statements)
return statements
def __init__(self, lex_options=None, yacc_options=None):
super(Parser, self).__init__()
lex_options = lex_options or {}
lex_options.setdefault('debug', False)
lex_options.setdefault('optimize', True)
self.lexer = Lexer(**lex_options)
self.tokens = self.lexer.tokens
yacc_options = yacc_options or {}
yacc_options.setdefault('debug', False)
yacc_options.setdefault('optimize', True)
self.parser = yacc.yacc(module=self, **yacc_options)
def __init__(self, lexer):
self.lexer = lexer
self.tokens = lexer.tokens
self.precedence = [('right', 'ELSE')]
self.start = 'program'
self.yacc = yacc.yacc(module=self, debug=0)
def parse(self):
'''Build the syntax tree.'''
return self.yacc.parse(lexer=self.lexer)
def __init__(self):
self.lexer = lex.lex(module=self)
self.parser = yacc.yacc(module=self, debug=False, write_tables=False)
def build(self, **kwargs):
""" the start attribute is mandatory !
When calling the method with a start distinct from expression you may get the following message
WARNING: Symbol 'expression' is unreachable
Nothing to be aware of
"""
# keep track of
# # start the parser
# start = 'expression'
# if 'start' in kwargs.keys(): # MANDATORY
# start = kwargs['start']
# kwargs.pop('start', None)
# debugging and logging http://www.dabeaz.com/ply/ply.html#ply_nn44
#self.parser = yacc.yacc(module=self, start=start, errorlog=yacc.NullLogger(), debug = False, **kwargs)
self.parser = yacc.yacc(module=self, start='step', errorlog=yacc.NullLogger(), debug = False, **kwargs)
# https://github.com/dabeaz/ply/blob/master/ply/yacc.py
# debug yaccdebug = True # Debugging mode. If set, yacc generates a
# a 'parser.out' file in the current directory
# """"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
# MAIN
# """"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
# example use:
def p_init_declarator_list_1(self, p):
""" init_declarator_list : init_declarator
| init_declarator_list COMMA init_declarator
"""
p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
# If the code is declaring a variable that was declared a typedef in an
# outer scope, yacc will think the name is part of declaration_specifiers,
# not init_declarator, and will then get confused by EQUALS. Pass None
# up in place of declarator, and handle this at a higher level.
#
def p_direct_declarator_5(self, p):
""" direct_declarator : direct_declarator LPAREN parameter_type_list RPAREN
| direct_declarator LPAREN identifier_list_opt RPAREN
"""
func = c_ast.FuncDecl(
args=p[3],
type=None,
coord=p[1].coord)
# To see why _get_yacc_lookahead_token is needed, consider:
# typedef char TT;
# void foo(int TT) { TT = 10; }
# Outside the function, TT is a typedef, but inside (starting and
# ending with the braces) it's a parameter. The trouble begins with
# yacc's lookahead token. We don't know if we're declaring or
# defining a function until we see LBRACE, but if we wait for yacc to
# trigger a rule on that token, then TT will have already been read
# and incorrectly interpreted as TYPEID. We need to add the
# parameters to the scope the moment the lexer sees LBRACE.
#
if self._get_yacc_lookahead_token().type == "LBRACE":
if func.args is not None:
for param in func.args.params:
if isinstance(param, c_ast.EllipsisParam): break
self._add_identifier(param.name, param.coord)
p[0] = self._type_modify_decl(decl=p[1], modifier=func)
path_template.py 文件源码
项目:endpoints-management-python
作者: cloudendpoints
项目源码
文件源码
阅读 17
收藏 0
点赞 0
评论 0
def __init__(self):
self.lexer = lex.lex(module=self)
self.parser = yacc.yacc(module=self, debug=False, write_tables=False)
self.verb = u''
self.binding_var_count = 0
self.segment_count = 0
def p_init_declarator_list_1(self, p):
""" init_declarator_list : init_declarator
| init_declarator_list COMMA init_declarator
"""
p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
# If the code is declaring a variable that was declared a typedef in an
# outer scope, yacc will think the name is part of declaration_specifiers,
# not init_declarator, and will then get confused by EQUALS. Pass None
# up in place of declarator, and handle this at a higher level.
#
def p_direct_declarator_5(self, p):
""" direct_declarator : direct_declarator LPAREN parameter_type_list RPAREN
| direct_declarator LPAREN identifier_list_opt RPAREN
"""
func = c_ast.FuncDecl(
args=p[3],
type=None,
coord=p[1].coord)
# To see why _get_yacc_lookahead_token is needed, consider:
# typedef char TT;
# void foo(int TT) { TT = 10; }
# Outside the function, TT is a typedef, but inside (starting and
# ending with the braces) it's a parameter. The trouble begins with
# yacc's lookahead token. We don't know if we're declaring or
# defining a function until we see LBRACE, but if we wait for yacc to
# trigger a rule on that token, then TT will have already been read
# and incorrectly interpreted as TYPEID. We need to add the
# parameters to the scope the moment the lexer sees LBRACE.
#
if self._get_yacc_lookahead_token().type == "LBRACE":
if func.args is not None:
for param in func.args.params:
if isinstance(param, c_ast.EllipsisParam): break
self._add_identifier(param.name, param.coord)
p[0] = self._type_modify_decl(decl=p[1], modifier=func)
def __init__(self, **kwargs):
if kwargs.pop('silent', False):
kwargs['errorlog'] = yacc.NullLogger()
kwargs.setdefault('debug', False)
kwargs.setdefault('write_tables', False)
self._parser = yacc.yacc(module=self, **kwargs)
self._lexer = Lexer()
def main(infile, outfile, loghandle):
inhandle = codecs.open(infile, 'r', 'utf8')
outhandle = codecs.open(outfile, 'w', 'utf8')
text = inhandle.read()
inhandle.close()
tokenizer = lex.lex(optimize=0, module = actrules)
#lex.input(text)
parser = yacc.yacc(module = actrules)
xmlact = parser.parse(input = text, lexer = tokenizer)
final_xml = arrange_sections.main(xmlact, loghandle)
outhandle.write(final_xml)
outhandle.close()
def build(self, **kwargs):
"""
Builds the PyCoolParser instance with yaac.yaac() by binding the lexer object and its tokens list in the
current instance scope.
:param kwargs: yaac.yaac() config parameters, complete list:
* debug: Debug mode flag.
* optimize: Optimize mode flag.
* debuglog: Debug log file path; by default parser prints to stderr.
* errorlog: Error log file path; by default parser print to stderr.
* outputdir: Output directory of parsing output; by default the .out file goes in the same directory.
:return: None
"""
# Parse the parameters
if kwargs is None or len(kwargs) == 0:
debug, write_tables, optimize, outputdir, yacctab, debuglog, errorlog = \
self._debug, self._write_tables, self._optimize, self._outputdir, self._yacctab, self._debuglog, \
self._errorlog
else:
debug = kwargs.get("debug", self._debug)
write_tables = kwargs.get("write_tables", self._write_tables)
optimize = kwargs.get("optimize", self._optimize)
outputdir = kwargs.get("outputdir", self._outputdir)
yacctab = kwargs.get("yacctab", self._yacctab)
debuglog = kwargs.get("debuglog", self._debuglog)
errorlog = kwargs.get("errorlog", self._errorlog)
# Build PyCoolLexer
self.lexer = make_lexer(debug=debug, optimize=optimize, outputdir=outputdir, debuglog=debuglog,
errorlog=errorlog)
# Expose tokens collections to this instance scope
self.tokens = self.lexer.tokens
# Build yacc parser
self.parser = yacc.yacc(module=self, write_tables=write_tables, debug=debug, optimize=optimize,
outputdir=outputdir, tabmodule=yacctab, debuglog=debuglog, errorlog=errorlog)