Skip to content

Commit

Permalink
Minor code cleanups
Browse files Browse the repository at this point in the history
  • Loading branch information
dabeaz committed May 29, 2013
1 parent 7aa4bdc commit 6513f35
Showing 1 changed file with 34 additions and 34 deletions.
68 changes: 34 additions & 34 deletions ply/yacc.py
Original file line number Diff line number Diff line change
Expand Up @@ -1424,7 +1424,7 @@ def __getitem__(self,index):
def set_precedence(self,term,assoc,level):
assert self.Productions == [None],"Must call set_precedence() before add_production()"
if term in self.Precedence:
raise GrammarError("Precedence already specified for terminal '%s'" % term)
raise GrammarError("Precedence already specified for terminal %r" % term)
if assoc not in ['left','right','nonassoc']:
raise GrammarError("Associativity must be one of 'left','right', or 'nonassoc'")
self.Precedence[term] = (assoc,level)
Expand All @@ -1449,27 +1449,27 @@ def set_precedence(self,term,assoc,level):
def add_production(self,prodname,syms,func=None,file='',line=0):

if prodname in self.Terminals:
raise GrammarError("%s:%d: Illegal rule name '%s'. Already defined as a token" % (file,line,prodname))
raise GrammarError("%s:%d: Illegal rule name %r. Already defined as a token" % (file,line,prodname))
if prodname == 'error':
raise GrammarError("%s:%d: Illegal rule name '%s'. error is a reserved word" % (file,line,prodname))
raise GrammarError("%s:%d: Illegal rule name %r. error is a reserved word" % (file,line,prodname))
if not _is_identifier.match(prodname):
raise GrammarError("%s:%d: Illegal rule name '%s'" % (file,line,prodname))
raise GrammarError("%s:%d: Illegal rule name %r" % (file,line,prodname))

# Look for literal tokens
for n,s in enumerate(syms):
if s[0] in "'\"":
try:
c = eval(s)
if (len(c) > 1):
raise GrammarError("%s:%d: Literal token %s in rule '%s' may only be a single character" % (file,line,s, prodname))
raise GrammarError("%s:%d: Literal token %s in rule %r may only be a single character" % (file,line,s, prodname))
if not c in self.Terminals:
self.Terminals[c] = []
syms[n] = c
continue
except SyntaxError:
pass
if not _is_identifier.match(s) and s != '%prec':
raise GrammarError("%s:%d: Illegal name '%s' in rule '%s'" % (file,line,s, prodname))
raise GrammarError("%s:%d: Illegal name %r in rule %r" % (file,line,s, prodname))

# Determine the precedence level
if '%prec' in syms:
Expand All @@ -1478,9 +1478,9 @@ def add_production(self,prodname,syms,func=None,file='',line=0):
if syms[-2] != '%prec':
raise GrammarError("%s:%d: Syntax error. %%prec can only appear at the end of a grammar rule" % (file,line))
precname = syms[-1]
prodprec = self.Precedence.get(precname,None)
prodprec = self.Precedence.get(precname)
if not prodprec:
raise GrammarError("%s:%d: Nothing known about the precedence of '%s'" % (file,line,precname))
raise GrammarError("%s:%d: Nothing known about the precedence of %r" % (file,line,precname))
else:
self.UsedPrecedence[precname] = 1
del syms[-2:] # Drop %prec from the rule
Expand Down Expand Up @@ -2060,13 +2060,13 @@ def lr0_closure(self,I):

def lr0_goto(self,I,x):
# First we look for a previously cached entry
g = self.lr_goto_cache.get((id(I),x),None)
g = self.lr_goto_cache.get((id(I),x))
if g: return g

# Now we generate the goto set in a way that guarantees uniqueness
# of the result

s = self.lr_goto_cache.get(x,None)
s = self.lr_goto_cache.get(x)
if not s:
s = { }
self.lr_goto_cache[x] = s
Expand All @@ -2075,13 +2075,13 @@ def lr0_goto(self,I,x):
for p in I:
n = p.lr_next
if n and n.lr_before == x:
s1 = s.get(id(n),None)
s1 = s.get(id(n))
if not s1:
s1 = { }
s[id(n)] = s1
gs.append(n)
s = s1
g = s.get('$end',None)
g = s.get('$end')
if not g:
if gs:
g = self.lr0_closure(gs)
Expand Down Expand Up @@ -2466,7 +2466,7 @@ def lr_parse_table(self):
laheads = self.grammar.Follow[p.name]
for a in laheads:
actlist.append((a,p,"reduce using rule %d (%s)" % (p.number,p)))
r = st_action.get(a,None)
r = st_action.get(a)
if r is not None:
# Whoa. Have a shift/reduce or reduce/reduce conflict
if r > 0:
Expand Down Expand Up @@ -2520,7 +2520,7 @@ def lr_parse_table(self):
if j >= 0:
# We are in a shift state
actlist.append((a,p,"shift and go to state %d" % j))
r = st_action.get(a,None)
r = st_action.get(a)
if r is not None:
# Whoa have a shift/reduce or shift/shift conflict
if r > 0:
Expand Down Expand Up @@ -2711,7 +2711,7 @@ def write_table(self,modulename,outputdir='',signature=""):

except IOError:
e = sys.exc_info()[1]
sys.stderr.write("Unable to create '%s'\n" % filename)
sys.stderr.write("Unable to create %r\n" % filename)
sys.stderr.write(str(e)+"\n")
return

Expand Down Expand Up @@ -2807,7 +2807,7 @@ def parse_grammar(doc,file,line):
except SyntaxError:
raise
except Exception:
raise SyntaxError("%s:%d: Syntax error in rule '%s'" % (file,dline,ps.strip()))
raise SyntaxError("%s:%d: Syntax error in rule %r" % (file,dline,ps.strip()))

return grammar

Expand Down Expand Up @@ -2941,7 +2941,7 @@ def validate_error_func(self):

# Get the tokens map
def get_tokens(self):
tokens = self.pdict.get("tokens",None)
tokens = self.pdict.get("tokens")
if not tokens:
self.log.error("No token list is defined")
self.error = 1
Expand Down Expand Up @@ -2970,12 +2970,12 @@ def validate_tokens(self):
terminals = {}
for n in self.tokens:
if n in terminals:
self.log.warning("Token '%s' multiply defined", n)
self.log.warning("Token %r multiply defined", n)
terminals[n] = 1

# Get the precedence map (if any)
def get_precedence(self):
self.prec = self.pdict.get("precedence",None)
self.prec = self.pdict.get("precedence")

# Validate and parse the precedence map
def validate_precedence(self):
Expand Down Expand Up @@ -3012,7 +3012,7 @@ def validate_precedence(self):
def get_pfunctions(self):
p_functions = []
for name, item in self.pdict.items():
if name[:2] != 'p_': continue
if not name.startswith('p_'): continue
if name == 'p_error': continue
if isinstance(item,(types.FunctionType,types.MethodType)):
line = func_code(item).co_firstlineno
Expand Down Expand Up @@ -3041,13 +3041,13 @@ def validate_pfunctions(self):
else:
reqargs = 1
if func_code(func).co_argcount > reqargs:
self.log.error("%s:%d: Rule '%s' has too many arguments",file,line,func.__name__)
self.log.error("%s:%d: Rule %r has too many arguments",file,line,func.__name__)
self.error = 1
elif func_code(func).co_argcount < reqargs:
self.log.error("%s:%d: Rule '%s' requires an argument",file,line,func.__name__)
self.log.error("%s:%d: Rule %r requires an argument",file,line,func.__name__)
self.error = 1
elif not func.__doc__:
self.log.warning("%s:%d: No documentation string specified in function '%s' (ignored)",file,line,func.__name__)
self.log.warning("%s:%d: No documentation string specified in function %r (ignored)",file,line,func.__name__)
else:
try:
parsed_g = parse_grammar(doc,file,line)
Expand All @@ -3066,16 +3066,16 @@ def validate_pfunctions(self):
# or functions that look like they might be grammar rules.

for n,v in self.pdict.items():
if n[0:2] == 'p_' and isinstance(v, (types.FunctionType, types.MethodType)): continue
if n[0:2] == 't_': continue
if n[0:2] == 'p_' and n != 'p_error':
self.log.warning("'%s' not defined as a function", n)
if n.startswith('p_') and isinstance(v, (types.FunctionType, types.MethodType)): continue
if n.startswith('t_'): continue
if n.startswith('p_') and n != 'p_error':
self.log.warning("%r not defined as a function", n)
if ((isinstance(v,types.FunctionType) and func_code(v).co_argcount == 1) or
(isinstance(v,types.MethodType) and func_code(v).co_argcount == 2)):
try:
doc = v.__doc__.split(" ")
if doc[1] == ':':
self.log.warning("%s:%d: Possible grammar rule '%s' defined without p_ prefix",
self.log.warning("%s:%d: Possible grammar rule %r defined without p_ prefix",
func_code(v).co_filename, func_code(v).co_firstlineno,n)
except Exception:
pass
Expand Down Expand Up @@ -3197,7 +3197,7 @@ def yacc(method='LALR', debug=yaccdebug, module=None, tabmodule=tab_module, star
# Verify the grammar structure
undefined_symbols = grammar.undefined_symbols()
for sym, prod in undefined_symbols:
errorlog.error("%s:%d: Symbol '%s' used, but not defined as a token or a rule",prod.file,prod.line,sym)
errorlog.error("%s:%d: Symbol %r used, but not defined as a token or a rule",prod.file,prod.line,sym)
errors = 1

unused_terminals = grammar.unused_terminals()
Expand All @@ -3206,7 +3206,7 @@ def yacc(method='LALR', debug=yaccdebug, module=None, tabmodule=tab_module, star
debuglog.info("Unused terminals:")
debuglog.info("")
for term in unused_terminals:
errorlog.warning("Token '%s' defined, but not used", term)
errorlog.warning("Token %r defined, but not used", term)
debuglog.info(" %s", term)

# Print out all productions to the debug log
Expand All @@ -3220,7 +3220,7 @@ def yacc(method='LALR', debug=yaccdebug, module=None, tabmodule=tab_module, star
# Find unused non-terminals
unused_rules = grammar.unused_rules()
for prod in unused_rules:
errorlog.warning("%s:%d: Rule '%s' defined, but not used", prod.file, prod.line, prod.name)
errorlog.warning("%s:%d: Rule %r defined, but not used", prod.file, prod.line, prod.name)

if len(unused_terminals) == 1:
errorlog.warning("There is 1 unused token")
Expand Down Expand Up @@ -3253,16 +3253,16 @@ def yacc(method='LALR', debug=yaccdebug, module=None, tabmodule=tab_module, star
if check_recursion:
unreachable = grammar.find_unreachable()
for u in unreachable:
errorlog.warning("Symbol '%s' is unreachable",u)
errorlog.warning("Symbol %r is unreachable",u)

infinite = grammar.infinite_cycles()
for inf in infinite:
errorlog.error("Infinite recursion detected for symbol '%s'", inf)
errorlog.error("Infinite recursion detected for symbol %r", inf)
errors = 1

unused_prec = grammar.unused_precedence()
for term, assoc in unused_prec:
errorlog.error("Precedence rule '%s' defined for unknown symbol '%s'", assoc, term)
errorlog.error("Precedence rule %r defined for unknown symbol %r", assoc, term)
errors = 1

if errors:
Expand Down

0 comments on commit 6513f35

Please sign in to comment.