|
@@ -5,7 +5,7 @@
|
|
|
# Copyright (C) 2007
|
|
# Copyright (C) 2007
|
|
|
# All rights reserved
|
|
# All rights reserved
|
|
|
#
|
|
#
|
|
|
-# This module implements an ANSI-C style lexical preprocessor for PLY.
|
|
|
|
|
|
|
+# This module implements an ANSI-C style lexical preprocessor for PLY.
|
|
|
# -----------------------------------------------------------------------------
|
|
# -----------------------------------------------------------------------------
|
|
|
from __future__ import generators
|
|
from __future__ import generators
|
|
|
|
|
|
|
@@ -77,7 +77,8 @@ def t_CPP_COMMENT2(t):
|
|
|
r'(//.*?(\n|$))'
|
|
r'(//.*?(\n|$))'
|
|
|
# replace with '/n'
|
|
# replace with '/n'
|
|
|
t.type = 'CPP_WS'; t.value = '\n'
|
|
t.type = 'CPP_WS'; t.value = '\n'
|
|
|
-
|
|
|
|
|
|
|
+ return t
|
|
|
|
|
+
|
|
|
def t_error(t):
|
|
def t_error(t):
|
|
|
t.type = t.value[0]
|
|
t.type = t.value[0]
|
|
|
t.value = t.value[0]
|
|
t.value = t.value[0]
|
|
@@ -91,8 +92,8 @@ import os.path
|
|
|
|
|
|
|
|
# -----------------------------------------------------------------------------
|
|
# -----------------------------------------------------------------------------
|
|
|
# trigraph()
|
|
# trigraph()
|
|
|
-#
|
|
|
|
|
-# Given an input string, this function replaces all trigraph sequences.
|
|
|
|
|
|
|
+#
|
|
|
|
|
+# Given an input string, this function replaces all trigraph sequences.
|
|
|
# The following mapping is used:
|
|
# The following mapping is used:
|
|
|
#
|
|
#
|
|
|
# ??= #
|
|
# ??= #
|
|
@@ -262,7 +263,7 @@ class Preprocessor(object):
|
|
|
# ----------------------------------------------------------------------
|
|
# ----------------------------------------------------------------------
|
|
|
# add_path()
|
|
# add_path()
|
|
|
#
|
|
#
|
|
|
- # Adds a search path to the preprocessor.
|
|
|
|
|
|
|
+ # Adds a search path to the preprocessor.
|
|
|
# ----------------------------------------------------------------------
|
|
# ----------------------------------------------------------------------
|
|
|
|
|
|
|
|
def add_path(self,path):
|
|
def add_path(self,path):
|
|
@@ -306,7 +307,7 @@ class Preprocessor(object):
|
|
|
|
|
|
|
|
# ----------------------------------------------------------------------
|
|
# ----------------------------------------------------------------------
|
|
|
# tokenstrip()
|
|
# tokenstrip()
|
|
|
- #
|
|
|
|
|
|
|
+ #
|
|
|
# Remove leading/trailing whitespace tokens from a token list
|
|
# Remove leading/trailing whitespace tokens from a token list
|
|
|
# ----------------------------------------------------------------------
|
|
# ----------------------------------------------------------------------
|
|
|
|
|
|
|
@@ -332,7 +333,7 @@ class Preprocessor(object):
|
|
|
# argument. Each argument is represented by a list of tokens.
|
|
# argument. Each argument is represented by a list of tokens.
|
|
|
#
|
|
#
|
|
|
# When collecting arguments, leading and trailing whitespace is removed
|
|
# When collecting arguments, leading and trailing whitespace is removed
|
|
|
- # from each argument.
|
|
|
|
|
|
|
+ # from each argument.
|
|
|
#
|
|
#
|
|
|
# This function properly handles nested parenthesis and commas---these do not
|
|
# This function properly handles nested parenthesis and commas---these do not
|
|
|
# define new arguments.
|
|
# define new arguments.
|
|
@@ -344,7 +345,7 @@ class Preprocessor(object):
|
|
|
current_arg = []
|
|
current_arg = []
|
|
|
nesting = 1
|
|
nesting = 1
|
|
|
tokenlen = len(tokenlist)
|
|
tokenlen = len(tokenlist)
|
|
|
-
|
|
|
|
|
|
|
+
|
|
|
# Search for the opening '('.
|
|
# Search for the opening '('.
|
|
|
i = 0
|
|
i = 0
|
|
|
while (i < tokenlen) and (tokenlist[i].type in self.t_WS):
|
|
while (i < tokenlen) and (tokenlist[i].type in self.t_WS):
|
|
@@ -378,7 +379,7 @@ class Preprocessor(object):
|
|
|
else:
|
|
else:
|
|
|
current_arg.append(t)
|
|
current_arg.append(t)
|
|
|
i += 1
|
|
i += 1
|
|
|
-
|
|
|
|
|
|
|
+
|
|
|
# Missing end argument
|
|
# Missing end argument
|
|
|
self.error(self.source,tokenlist[-1].lineno,"Missing ')' in macro arguments")
|
|
self.error(self.source,tokenlist[-1].lineno,"Missing ')' in macro arguments")
|
|
|
return 0, [],[]
|
|
return 0, [],[]
|
|
@@ -390,9 +391,9 @@ class Preprocessor(object):
|
|
|
# This is used to speed up macro expansion later on---we'll know
|
|
# This is used to speed up macro expansion later on---we'll know
|
|
|
# right away where to apply patches to the value to form the expansion
|
|
# right away where to apply patches to the value to form the expansion
|
|
|
# ----------------------------------------------------------------------
|
|
# ----------------------------------------------------------------------
|
|
|
-
|
|
|
|
|
|
|
+
|
|
|
def macro_prescan(self,macro):
|
|
def macro_prescan(self,macro):
|
|
|
- macro.patch = [] # Standard macro arguments
|
|
|
|
|
|
|
+ macro.patch = [] # Standard macro arguments
|
|
|
macro.str_patch = [] # String conversion expansion
|
|
macro.str_patch = [] # String conversion expansion
|
|
|
macro.var_comma_patch = [] # Variadic macro comma patch
|
|
macro.var_comma_patch = [] # Variadic macro comma patch
|
|
|
i = 0
|
|
i = 0
|
|
@@ -410,10 +411,11 @@ class Preprocessor(object):
|
|
|
elif (i > 0 and macro.value[i-1].value == '##'):
|
|
elif (i > 0 and macro.value[i-1].value == '##'):
|
|
|
macro.patch.append(('c',argnum,i-1))
|
|
macro.patch.append(('c',argnum,i-1))
|
|
|
del macro.value[i-1]
|
|
del macro.value[i-1]
|
|
|
|
|
+ i -= 1
|
|
|
continue
|
|
continue
|
|
|
elif ((i+1) < len(macro.value) and macro.value[i+1].value == '##'):
|
|
elif ((i+1) < len(macro.value) and macro.value[i+1].value == '##'):
|
|
|
macro.patch.append(('c',argnum,i))
|
|
macro.patch.append(('c',argnum,i))
|
|
|
- i += 1
|
|
|
|
|
|
|
+ del macro.value[i + 1]
|
|
|
continue
|
|
continue
|
|
|
# Standard expansion
|
|
# Standard expansion
|
|
|
else:
|
|
else:
|
|
@@ -439,7 +441,7 @@ class Preprocessor(object):
|
|
|
rep = [copy.copy(_x) for _x in macro.value]
|
|
rep = [copy.copy(_x) for _x in macro.value]
|
|
|
|
|
|
|
|
# Make string expansion patches. These do not alter the length of the replacement sequence
|
|
# Make string expansion patches. These do not alter the length of the replacement sequence
|
|
|
-
|
|
|
|
|
|
|
+
|
|
|
str_expansion = {}
|
|
str_expansion = {}
|
|
|
for argnum, i in macro.str_patch:
|
|
for argnum, i in macro.str_patch:
|
|
|
if argnum not in str_expansion:
|
|
if argnum not in str_expansion:
|
|
@@ -457,7 +459,7 @@ class Preprocessor(object):
|
|
|
# Make all other patches. The order of these matters. It is assumed that the patch list
|
|
# Make all other patches. The order of these matters. It is assumed that the patch list
|
|
|
# has been sorted in reverse order of patch location since replacements will cause the
|
|
# has been sorted in reverse order of patch location since replacements will cause the
|
|
|
# size of the replacement sequence to expand from the patch point.
|
|
# size of the replacement sequence to expand from the patch point.
|
|
|
-
|
|
|
|
|
|
|
+
|
|
|
expanded = { }
|
|
expanded = { }
|
|
|
for ptype, argnum, i in macro.patch:
|
|
for ptype, argnum, i in macro.patch:
|
|
|
# Concatenation. Argument is left unexpanded
|
|
# Concatenation. Argument is left unexpanded
|
|
@@ -494,7 +496,7 @@ class Preprocessor(object):
|
|
|
if t.value in self.macros and t.value not in expanded:
|
|
if t.value in self.macros and t.value not in expanded:
|
|
|
# Yes, we found a macro match
|
|
# Yes, we found a macro match
|
|
|
expanded[t.value] = True
|
|
expanded[t.value] = True
|
|
|
-
|
|
|
|
|
|
|
+
|
|
|
m = self.macros[t.value]
|
|
m = self.macros[t.value]
|
|
|
if not m.arglist:
|
|
if not m.arglist:
|
|
|
# A simple macro
|
|
# A simple macro
|
|
@@ -508,7 +510,7 @@ class Preprocessor(object):
|
|
|
j = i + 1
|
|
j = i + 1
|
|
|
while j < len(tokens) and tokens[j].type in self.t_WS:
|
|
while j < len(tokens) and tokens[j].type in self.t_WS:
|
|
|
j += 1
|
|
j += 1
|
|
|
- if tokens[j].value == '(':
|
|
|
|
|
|
|
+ if j < len(tokens) and tokens[j].value == '(':
|
|
|
tokcount,args,positions = self.collect_args(tokens[j:])
|
|
tokcount,args,positions = self.collect_args(tokens[j:])
|
|
|
if not m.variadic and len(args) != len(m.arglist):
|
|
if not m.variadic and len(args) != len(m.arglist):
|
|
|
self.error(self.source,t.lineno,"Macro %s requires %d arguments" % (t.value,len(m.arglist)))
|
|
self.error(self.source,t.lineno,"Macro %s requires %d arguments" % (t.value,len(m.arglist)))
|
|
@@ -526,7 +528,7 @@ class Preprocessor(object):
|
|
|
else:
|
|
else:
|
|
|
args[len(m.arglist)-1] = tokens[j+positions[len(m.arglist)-1]:j+tokcount-1]
|
|
args[len(m.arglist)-1] = tokens[j+positions[len(m.arglist)-1]:j+tokcount-1]
|
|
|
del args[len(m.arglist):]
|
|
del args[len(m.arglist):]
|
|
|
-
|
|
|
|
|
|
|
+
|
|
|
# Get macro replacement text
|
|
# Get macro replacement text
|
|
|
rep = self.macro_expand_args(m,args)
|
|
rep = self.macro_expand_args(m,args)
|
|
|
rep = self.expand_macros(rep,expanded)
|
|
rep = self.expand_macros(rep,expanded)
|
|
@@ -534,18 +536,24 @@ class Preprocessor(object):
|
|
|
r.lineno = t.lineno
|
|
r.lineno = t.lineno
|
|
|
tokens[i:j+tokcount] = rep
|
|
tokens[i:j+tokcount] = rep
|
|
|
i += len(rep)
|
|
i += len(rep)
|
|
|
|
|
+ else:
|
|
|
|
|
+ # This is not a macro. It is just a word which
|
|
|
|
|
+ # equals to name of the macro. Hence, go to the
|
|
|
|
|
+ # next token.
|
|
|
|
|
+ i += 1
|
|
|
|
|
+
|
|
|
del expanded[t.value]
|
|
del expanded[t.value]
|
|
|
continue
|
|
continue
|
|
|
elif t.value == '__LINE__':
|
|
elif t.value == '__LINE__':
|
|
|
t.type = self.t_INTEGER
|
|
t.type = self.t_INTEGER
|
|
|
t.value = self.t_INTEGER_TYPE(t.lineno)
|
|
t.value = self.t_INTEGER_TYPE(t.lineno)
|
|
|
-
|
|
|
|
|
|
|
+
|
|
|
i += 1
|
|
i += 1
|
|
|
return tokens
|
|
return tokens
|
|
|
|
|
|
|
|
- # ----------------------------------------------------------------------
|
|
|
|
|
|
|
+ # ----------------------------------------------------------------------
|
|
|
# evalexpr()
|
|
# evalexpr()
|
|
|
- #
|
|
|
|
|
|
|
+ #
|
|
|
# Evaluate an expression token sequence for the purposes of evaluating
|
|
# Evaluate an expression token sequence for the purposes of evaluating
|
|
|
# integral expressions.
|
|
# integral expressions.
|
|
|
# ----------------------------------------------------------------------
|
|
# ----------------------------------------------------------------------
|
|
@@ -592,7 +600,7 @@ class Preprocessor(object):
|
|
|
tokens[i].value = str(tokens[i].value)
|
|
tokens[i].value = str(tokens[i].value)
|
|
|
while tokens[i].value[-1] not in "0123456789abcdefABCDEF":
|
|
while tokens[i].value[-1] not in "0123456789abcdefABCDEF":
|
|
|
tokens[i].value = tokens[i].value[:-1]
|
|
tokens[i].value = tokens[i].value[:-1]
|
|
|
-
|
|
|
|
|
|
|
+
|
|
|
expr = "".join([str(x.value) for x in tokens])
|
|
expr = "".join([str(x.value) for x in tokens])
|
|
|
expr = expr.replace("&&"," and ")
|
|
expr = expr.replace("&&"," and ")
|
|
|
expr = expr.replace("||"," or ")
|
|
expr = expr.replace("||"," or ")
|
|
@@ -617,7 +625,7 @@ class Preprocessor(object):
|
|
|
|
|
|
|
|
if not source:
|
|
if not source:
|
|
|
source = ""
|
|
source = ""
|
|
|
-
|
|
|
|
|
|
|
+
|
|
|
self.define("__FILE__ \"%s\"" % source)
|
|
self.define("__FILE__ \"%s\"" % source)
|
|
|
|
|
|
|
|
self.source = source
|
|
self.source = source
|
|
@@ -636,7 +644,7 @@ class Preprocessor(object):
|
|
|
for tok in x:
|
|
for tok in x:
|
|
|
if tok.type in self.t_WS and '\n' in tok.value:
|
|
if tok.type in self.t_WS and '\n' in tok.value:
|
|
|
chunk.append(tok)
|
|
chunk.append(tok)
|
|
|
-
|
|
|
|
|
|
|
+
|
|
|
dirtokens = self.tokenstrip(x[i+1:])
|
|
dirtokens = self.tokenstrip(x[i+1:])
|
|
|
if dirtokens:
|
|
if dirtokens:
|
|
|
name = dirtokens[0].value
|
|
name = dirtokens[0].value
|
|
@@ -644,7 +652,7 @@ class Preprocessor(object):
|
|
|
else:
|
|
else:
|
|
|
name = ""
|
|
name = ""
|
|
|
args = []
|
|
args = []
|
|
|
-
|
|
|
|
|
|
|
+
|
|
|
if name == 'define':
|
|
if name == 'define':
|
|
|
if enable:
|
|
if enable:
|
|
|
for tok in self.expand_macros(chunk):
|
|
for tok in self.expand_macros(chunk):
|
|
@@ -704,7 +712,7 @@ class Preprocessor(object):
|
|
|
iftrigger = True
|
|
iftrigger = True
|
|
|
else:
|
|
else:
|
|
|
self.error(self.source,dirtokens[0].lineno,"Misplaced #elif")
|
|
self.error(self.source,dirtokens[0].lineno,"Misplaced #elif")
|
|
|
-
|
|
|
|
|
|
|
+
|
|
|
elif name == 'else':
|
|
elif name == 'else':
|
|
|
if ifstack:
|
|
if ifstack:
|
|
|
if ifstack[-1][0]:
|
|
if ifstack[-1][0]:
|
|
@@ -874,7 +882,7 @@ class Preprocessor(object):
|
|
|
def parse(self,input,source=None,ignore={}):
|
|
def parse(self,input,source=None,ignore={}):
|
|
|
self.ignore = ignore
|
|
self.ignore = ignore
|
|
|
self.parser = self.parsegen(input,source)
|
|
self.parser = self.parsegen(input,source)
|
|
|
-
|
|
|
|
|
|
|
+
|
|
|
# ----------------------------------------------------------------------
|
|
# ----------------------------------------------------------------------
|
|
|
# token()
|
|
# token()
|
|
|
#
|
|
#
|
|
@@ -904,14 +912,3 @@ if __name__ == '__main__':
|
|
|
tok = p.token()
|
|
tok = p.token()
|
|
|
if not tok: break
|
|
if not tok: break
|
|
|
print(p.source, tok)
|
|
print(p.source, tok)
|
|
|
-
|
|
|
|
|
-
|
|
|
|
|
-
|
|
|
|
|
-
|
|
|
|
|
-
|
|
|
|
|
-
|
|
|
|
|
-
|
|
|
|
|
-
|
|
|
|
|
-
|
|
|
|
|
-
|
|
|
|
|
-
|
|
|