18"""Generate an Abstract Syntax Tree (AST) for C++."""
37 import __builtin__
as builtins
43from cpp
import keywords
44from cpp
import tokenize
48if not hasattr(builtins,
'reversed'):
51 for i
in range(len(seq)-1, -1, -1):
54if not hasattr(builtins,
'next'):
60VISIBILITY_PUBLIC, VISIBILITY_PROTECTED, VISIBILITY_PRIVATE = range(3)
64FUNCTION_VIRTUAL = 0x02
65FUNCTION_PURE_VIRTUAL = 0x04
68FUNCTION_ATTRIBUTE = 0x20
69FUNCTION_UNKNOWN_ANNOTATION = 0x40
71FUNCTION_OVERRIDE = 0x100
74These are currently unused. Should really handle these properly at some point.
76TYPE_MODIFIER_INLINE = 0x010000
77TYPE_MODIFIER_EXTERN = 0x020000
78TYPE_MODIFIER_STATIC = 0x040000
79TYPE_MODIFIER_CONST = 0x080000
80TYPE_MODIFIER_REGISTER = 0x100000
81TYPE_MODIFIER_VOLATILE = 0x200000
82TYPE_MODIFIER_MUTABLE = 0x400000
85 'inline': TYPE_MODIFIER_INLINE,
86 'extern': TYPE_MODIFIER_EXTERN,
87 'static': TYPE_MODIFIER_STATIC,
88 'const': TYPE_MODIFIER_CONST,
89 'register': TYPE_MODIFIER_REGISTER,
90 'volatile': TYPE_MODIFIER_VOLATILE,
91 'mutable': TYPE_MODIFIER_MUTABLE,
95_INTERNAL_TOKEN = 'internal'
96_NAMESPACE_POP = 'ns-pop'
99# TODO(nnorwitz): use this as a singleton for templated_types, etc
100# where we don't want to create a new empty dict each time. It is also const.
102 __contains__ =
lambda self:
False
103 keys = values = items = iterkeys = itervalues = iteritems =
lambda self: ()
115 """Returns bool if this node is a declaration."""
119 """Returns bool if this node is a definition."""
123 """Returns bool if this node exportable from a header file."""
127 """Does this AST node require the definition of the node passed in?"""
133 def _StringHelper(self, name, suffix):
135 return '%s(%s)' % (name, suffix)
136 return '%s(%d, %d, %s)' % (name, self.
start, self.
end, suffix)
144 Node.__init__(self, start, end)
155 Node.__init__(self, start, end)
168 Node.__init__(self, start, end)
177 Node.__init__(self, start, end)
198 Expr.__init__(self, start, end, expr)
204 Node.__init__(self, start, end)
212 def __init__(self, start, end, name, parameter_type, default):
213 Node.__init__(self, start, end)
220 return self.
type.name == node.name
223 name = str(self.
type)
224 suffix =
'%s %s' % (name, self.
name)
226 suffix +=
' = ' +
''.join([d.name
for d
in self.
default])
232 Node.__init__(self, start, end)
239 prefix =
'::'.join(self.
namespace) +
'::'
240 return prefix + self.
name
242 def _TypeStringHelper(self, suffix):
244 names = [n
or '<anonymous>' for n
in self.
namespace]
245 suffix +=
' in ' +
'::'.join(names)
251 def __init__(self, start, end, name, var_type, initial_value, namespace):
252 _GenericDeclaration.__init__(self, start, end, name, namespace)
258 return self.
type.name == node.name
261 """Return a string that tries to reconstitute the variable decl."""
262 suffix =
'%s %s' % (self.
type, self.
name)
272 def __init__(self, start, end, name, alias, namespace):
273 _GenericDeclaration.__init__(self, start, end, name, namespace)
285 for token
in self.
alias:
286 if token
is not None and name == token.name:
291 suffix =
'%s, %s' % (self.
name, self.
alias)
296 def __init__(self, start, end, name, fields, namespace):
297 _GenericDeclaration.__init__(self, start, end, name, namespace)
307 suffix =
'%s, {%s}' % (self.
name, self.
fields)
315class Enum(_NestedType):
320 def __init__(self, start, end, name, bases, templated_types, body, namespace):
321 _GenericDeclaration.__init__(self, start, end, name, namespace)
327 return self.
bases is None and self.
body is None
338 for token_list
in self.
bases:
340 for token
in token_list:
341 if token.name == node.name:
350 suffix =
'%s, %s, %s' % (name, self.
bases, self.
body)
358class Function(_GenericDeclaration):
359 def __init__(self, start, end, name, return_type, parameters,
360 modifiers, templated_types, body, namespace):
361 _GenericDeclaration.__init__(self, start, end, name, namespace)
370 return self.
body is None
373 return self.
body is not None
384 if p.name == node.name:
391 suffix = (
'%s %s(%s), 0x%02x, %s' %
398 def __init__(self, start, end, name, in_class, return_type, parameters,
399 modifiers, templated_types, body, namespace):
400 Function.__init__(self, start, end, name, return_type, parameters,
401 modifiers, templated_types, body, namespace)
408 """Type used for any variable (eg class, primitive, struct, etc)."""
410 def __init__(self, start, end, name, templated_types, modifiers,
411 reference, pointer, array):
414 name: str name of main type
415 templated_types: [Class (Type?)] template type info between <>
416 modifiers: [str] type modifiers (keywords) eg, const, mutable, etc.
417 reference, pointer, array: bools
419 _GenericDeclaration.__init__(self, start, end, name, [])
421 if not name
and modifiers:
435 suffix = prefix + name
461 def _GetTemplateEnd(self, tokens, start):
467 if token.name ==
'<':
469 elif token.name ==
'>':
473 return tokens[start:end-1], end
476 """Convert [Token,...] to [Class(...), ] useful for base classes.
477 For example, code like class Foo : public Bar<x, y> { ... };
478 the
"Bar<x, y>" portion gets converted to an AST.
485 reference = pointer = array = False
487 def AddType(templated_types):
491 for t
in name_tokens:
492 if keywords.IsKeyword(t.name):
493 modifiers.append(t.name)
496 name =
''.join(names)
498 result.append(
Type(name_tokens[0].start, name_tokens[-1].end,
499 name, templated_types, modifiers,
500 reference, pointer, array))
507 if token.name ==
'<':
509 AddType(self.
ToType(new_tokens))
513 reference = pointer = array =
False
514 elif token.name ==
',':
516 reference = pointer = array =
False
517 elif token.name ==
'*':
519 elif token.name ==
'&':
521 elif token.name ==
'[':
523 elif token.name ==
']':
526 name_tokens.append(token)
537 if needs_name_removed:
539 for i, t
in enumerate(parts):
541 default = parts[i+1:]
542 name = parts[i-1].name
543 if name ==
']' and parts[i-2].name ==
'[':
544 name = parts[i-3].name
549 if parts[-1].token_type == tokenize.NAME:
550 name = parts.pop().name
564 if keywords.IsKeyword(p.name):
565 modifiers.append(p.name)
568 templated_types = self.
ToType(templated_tokens)
572 if next_index < end
and parts[next_index].name ==
'::':
574 elif p.name
in (
'[',
']',
'='):
576 other_tokens.append(p)
577 elif p.name
not in (
'*',
'&',
'>'):
579 if (type_name
and type_name[-1].token_type == tokenize.NAME
and
580 p.token_type == tokenize.NAME):
584 other_tokens.append(p)
586 type_name =
''.join([t.name
for t
in type_name])
587 return name, type_name, templated_types, modifiers, default, other_tokens
594 name = type_name =
''
596 pointer = reference = array =
False
600 def AddParameter(end):
604 (name, type_name, templated_types, modifiers,
605 unused_default, unused_other_tokens) = parts
606 parameter_type =
Type(first_token.start, first_token.end,
607 type_name, templated_types, modifiers,
608 reference, pointer, array)
609 p =
Parameter(first_token.start, end, name,
610 parameter_type, default)
626 type_modifiers.append(s)
633 if template_count > 0:
634 type_modifiers.append(s)
638 AddParameter(s.start)
639 name = type_name =
''
641 pointer = reference = array =
False
658 type_modifiers.append(s)
659 AddParameter(tokens[-1].end)
663 if not return_type_seq:
665 start = return_type_seq[0].start
666 end = return_type_seq[-1].end
667 _, name, templated_types, modifiers, default, other_tokens = \
669 names = [n.name
for n
in other_tokens]
670 reference =
'&' in names
671 pointer =
'*' in names
673 return Type(start, end, name, templated_types, modifiers,
674 reference, pointer, array)
678 start = names.index(
'<')
681 if names[end] ==
'>':
687 def __init__(self, token_stream, filename, in_class='', visibility=None,
711 sys.stderr.write(
'Got %s in %s @ %s %s\n' %
712 (msg, self.
filename, token, printable_queue))
724 if token.token_type == _INTERNAL_TOKEN:
725 if token.name == _NAMESPACE_POP:
731 if result
is not None:
737 def _CreateVariable(self, pos_token, name, type_name, type_modifiers,
738 ref_pointer_name_seq, templated_types, value=None):
739 reference =
'&' in ref_pointer_name_seq
740 pointer =
'*' in ref_pointer_name_seq
741 array =
'[' in ref_pointer_name_seq
742 var_type =
Type(pos_token.start, pos_token.end, type_name,
743 templated_types, type_modifiers,
744 reference, pointer, array)
748 def _GenerateOne(self, token):
749 if token.token_type == tokenize.NAME:
750 if (keywords.IsKeyword(token.name)
and
751 not keywords.IsBuiltinType(token.name)):
752 if token.name ==
'enum':
757 if next.name !=
'class':
760 method = getattr(self,
'handle_' + token.name)
768 if next.token_type == tokenize.SYNTAX
and next.name ==
'(':
769 return self.
_GetMethod([token], FUNCTION_CTOR,
None,
True)
773 syntax = tokenize.SYNTAX
774 temp_tokens, last_token = \
777 temp_tokens.insert(0, token)
778 if last_token.name ==
'(':
781 expr = bool([e
for e
in temp_tokens
if e.name ==
'='])
784 temp_tokens.append(last_token)
785 temp_tokens.extend(new_temp)
788 if last_token.name ==
'[':
792 temp_tokens.append(last_token)
793 if temp_tokens[-2].name ==
'operator':
796 temp_tokens2, last_token = \
798 temp_tokens.extend(temp_tokens2)
800 if last_token.name ==
';':
802 parts = self.
converter.DeclarationToParts(temp_tokens,
True)
803 (name, type_name, templated_types, modifiers, default,
804 unused_other_tokens) = parts
807 names = [t.name
for t
in temp_tokens]
809 start, end = self.
converter.GetTemplateIndices(names)
810 names = names[:start] + names[end:]
811 default =
''.join([t.name
for t
in default])
813 names, templated_types, default)
814 if last_token.name ==
'{':
817 method_name = temp_tokens[0].name
818 method = getattr(self,
'handle_' + method_name,
None)
824 return self.
_GetMethod(temp_tokens, 0,
None,
False)
825 elif token.token_type == tokenize.SYNTAX:
826 if token.name ==
'~' and self.
in_class:
831 if (token.token_type == tokenize.NAME
and
833 return self.
_GetMethod([token], FUNCTION_DTOR,
None,
True)
835 elif token.token_type == tokenize.PREPROCESSOR:
838 name = token.name[1:].lstrip()
839 if name.startswith(
'include'):
841 name = name[7:].strip()
844 if name.startswith(
'\\'):
845 name = name[1:].strip()
846 assert name[0]
in '<"', token
847 assert name[-1]
in '>"', token
848 system = name[0] ==
'<'
849 filename = name[1:-1]
850 return Include(token.start, token.end, filename, system)
851 if name.startswith(
'define'):
853 name = name[6:].strip()
856 for i, c
in enumerate(name):
858 value = name[i:].lstrip()
861 return Define(token.start, token.end, name, value)
862 if name.startswith(
'if')
and name[2:3].isspace():
863 condition = name[3:].strip()
864 if condition.startswith(
'0')
or condition.startswith(
'(0)'):
868 def _GetTokensUpTo(self, expected_token_type, expected_token):
871 def _GetVarTokensUpTo(self, expected_token_type, *expected_tokens):
874 while (last_token.token_type != expected_token_type
or
875 last_token.name
not in expected_tokens):
876 tokens.append(last_token)
878 return tokens, last_token
882 def _GetVarTokensUpToIgnoringTemplates(self, expected_token_type,
887 while (nesting > 0
or
888 last_token.token_type != expected_token_type
or
889 last_token.name
not in expected_tokens):
890 tokens.append(last_token)
892 if last_token.name ==
'<':
894 elif last_token.name ==
'>':
896 return tokens, last_token
899 def _IgnoreUpTo(self, token_type, token):
902 def _SkipIf0Blocks(self):
906 if token.token_type != tokenize.PREPROCESSOR:
909 name = token.name[1:].lstrip()
910 if name.startswith(
'endif'):
914 elif name.startswith(
'if'):
917 def _GetMatchingChar(self, open_paren, close_paren, GetNextToken=None):
918 if GetNextToken
is None:
923 token = GetNextToken()
925 if token.token_type == tokenize.SYNTAX:
926 if token.name == open_paren:
928 elif token.name == close_paren:
933 token = GetNextToken()
936 def _GetParameters(self):
942 def _GetNextToken(self):
947 except StopIteration:
950 def _AddBackToken(self, token):
951 if token.whence == tokenize.WHENCE_STREAM:
952 token.whence = tokenize.WHENCE_QUEUE
955 assert token.whence == tokenize.WHENCE_QUEUE, token
958 def _AddBackTokens(self, tokens):
960 if tokens[-1].whence == tokenize.WHENCE_STREAM:
962 token.whence = tokenize.WHENCE_QUEUE
965 assert tokens[-1].whence == tokenize.WHENCE_QUEUE, tokens
969 """Returns ([tokens], next_token_info)."""
973 GetNextToken =
lambda:
next(it)
974 next_token = GetNextToken()
976 last_token_was_name =
False
977 while (next_token.token_type == tokenize.NAME
or
978 (next_token.token_type == tokenize.SYNTAX
and
979 next_token.name
in (
'::',
'<'))):
982 if last_token_was_name
and next_token.token_type == tokenize.NAME:
984 last_token_was_name = next_token.token_type == tokenize.NAME
985 tokens.append(next_token)
987 if next_token.name ==
'<':
989 last_token_was_name =
True
990 next_token = GetNextToken()
991 return tokens, next_token
995 assert len(return_type_and_name) >= 1
996 return self.
_GetMethod(return_type_and_name, modifiers, templated_types,
999 def _GetMethod(self, return_type_and_name, modifiers, templated_types,
1001 template_portion =
None
1004 assert token.token_type == tokenize.SYNTAX, token
1005 if token.name ==
'<':
1007 template_portion = [token]
1010 assert token.token_type == tokenize.SYNTAX, token
1011 assert token.name ==
'(', token
1013 name = return_type_and_name.pop()
1015 if name.name ==
'>':
1017 while return_type_and_name[index].name !=
'<':
1019 template_portion = return_type_and_name[index:] + [name]
1020 del return_type_and_name[index:]
1021 name = return_type_and_name.pop()
1022 elif name.name ==
']':
1023 rt = return_type_and_name
1024 assert rt[-1].name ==
'[', return_type_and_name
1025 assert rt[-2].name ==
'operator', return_type_and_name
1026 name_seq = return_type_and_name[-2:]
1027 del return_type_and_name[-2:]
1029 name_seq[0].start, name.end)
1034 return_type = return_type_and_name
1037 indices = return_type[0]
1040 if name.name == self.
in_class and not modifiers:
1041 modifiers |= FUNCTION_CTOR
1046 if name.name ==
'operator' and not parameters:
1048 assert token.name ==
'(', token
1053 while token.token_type == tokenize.NAME:
1054 modifier_token = token
1056 if modifier_token.name ==
'const':
1057 modifiers |= FUNCTION_CONST
1058 elif modifier_token.name ==
'__attribute__':
1060 modifiers |= FUNCTION_ATTRIBUTE
1061 assert token.name ==
'(', token
1065 elif modifier_token.name ==
'throw':
1066 modifiers |= FUNCTION_THROW
1067 assert token.name ==
'(', token
1071 elif modifier_token.name ==
'override':
1072 modifiers |= FUNCTION_OVERRIDE
1073 elif modifier_token.name == modifier_token.name.upper():
1076 modifiers |= FUNCTION_UNKNOWN_ANNOTATION
1078 self.
HandleError(
'unexpected token', modifier_token)
1080 assert token.token_type == tokenize.SYNTAX, token
1082 if token.name ==
':':
1084 while token.name !=
';' and token.name !=
'{':
1089 if token.name ==
'(':
1090 if parameters[0].name ==
'*':
1092 name = parameters.pop()
1094 modifiers = [p.name
for p
in parameters]
1097 del function_parameters[-1]
1100 assert token.token_type == tokenize.SYNTAX, token
1101 assert token.name ==
';', token
1103 modifiers,
'',
None)
1112 real_name = parameters[-1]
1116 modifiers,
'',
None)
1118 if token.name ==
'{':
1123 if token.name ==
'=':
1126 if token.name ==
'default' or token.name ==
'delete':
1132 assert token.token_type == tokenize.CONSTANT, token
1133 assert token.name ==
'0', token
1134 modifiers |= FUNCTION_PURE_VIRTUAL
1137 if token.name ==
'[':
1143 assert token.name ==
';', (token, return_type_and_name, parameters)
1146 if len(return_type) > 2
and return_type[-1].name ==
'::':
1147 return_type, in_class = \
1149 return Method(indices.start, indices.end, name.name, in_class,
1150 return_type, parameters, modifiers, templated_types,
1152 return Function(indices.start, indices.end, name.name, return_type,
1153 parameters, modifiers, templated_types, body,
1156 def _GetReturnTypeAndClassName(self, token_seq):
1167 if token_seq[0].name ==
'::':
1170 end = len(token_seq) - 1
1171 if token_seq[end-1].name ==
'::':
1177 seq_copy = token_seq[i:end]
1182 new_name, next = self.
GetName(seq_copy[i:])
1183 assert new_name,
'Got empty new_name, next=%s' % next
1185 if next
and next.token_type == tokenize.SYNTAX:
1186 new_name.append(next)
1187 names.append(new_name)
1195 return_type = [e
for seq
in names[:-1]
for e
in seq]
1197 class_name = names[-1]
1198 return return_type, class_name
1233 def _GetNestedType(self, ctor):
1235 name_tokens, token = self.GetName()
1237 name =
''.join([t.name
for t
in name_tokens])
1240 if token.token_type == tokenize.SYNTAX
and token.name ==
';':
1241 return ctor(token.start, token.end, name,
None,
1242 self.namespace_stack)
1244 if token.token_type == tokenize.NAME
and self._handling_typedef:
1245 self._AddBackToken(token)
1246 return ctor(token.start, token.end, name,
None,
1247 self.namespace_stack)
1250 fields = list(self._GetMatchingChar(
'{',
'}'))
1252 if token.token_type == tokenize.SYNTAX
and token.name ==
'{':
1253 next = self._GetNextToken()
1254 new_type = ctor(token.start, token.end, name, fields,
1255 self.namespace_stack)
1258 if next.token_type != tokenize.NAME:
1264 assert token.token_type == tokenize.NAME, token
1265 return self._CreateVariable(token, token.name, name, [],
'',
None)
1270 name_tokens, var_token = self.
GetName()
1273 is_syntax = (var_token.token_type == tokenize.SYNTAX
and
1274 var_token.name[0]
in '*&')
1275 is_variable = (var_token.token_type == tokenize.NAME
and
1276 next_token.name ==
';')
1277 variable = var_token
1278 if is_syntax
and not is_variable:
1279 variable = next_token
1281 if temp.token_type == tokenize.SYNTAX
and temp.name ==
'(':
1285 t0.start-7, t0.start-2)
1286 type_and_name = [struct]
1287 type_and_name.extend(name_tokens)
1288 type_and_name.extend((var_token, next_token))
1289 return self.
_GetMethod(type_and_name, 0,
None,
False)
1290 assert temp.name ==
';', (temp, name_tokens, var_token)
1292 modifiers = [
'struct']
1293 type_name =
''.join([t.name
for t
in name_tokens])
1294 position = name_tokens[0]
1296 modifiers, var_token.name,
None)
1297 name_tokens.extend((var_token, next_token))
1301 return self.
_GetClass(Struct, VISIBILITY_PUBLIC,
None)
1331 token = token2 = self._GetNextToken()
1332 if token.name ==
'inline':
1334 token2 = self._GetNextToken()
1335 if token2.token_type == tokenize.SYNTAX
and token2.name ==
'~':
1336 return self.GetMethod(FUNCTION_VIRTUAL + FUNCTION_DTOR,
None)
1337 assert token.token_type == tokenize.NAME
or token.name ==
'::', token
1338 return_type_and_name, _ = self._GetVarTokensUpToIgnoringTemplates(
1339 tokenize.SYNTAX,
'(')
1340 return_type_and_name.insert(0, token)
1341 if token2
is not token:
1342 return_type_and_name.insert(1, token2)
1343 return self._GetMethod(return_type_and_name, FUNCTION_VIRTUAL,
1353 assert self.in_class
1354 self.visibility = VISIBILITY_PUBLIC
1386 tokens = self._GetTokensUpTo(tokenize.SYNTAX,
';')
1388 return Delete(tokens[0].start, tokens[0].end, tokens)
1392 if (token.token_type == tokenize.NAME
and
1393 keywords.IsKeyword(token.name)):
1395 method = getattr(self,
'handle_' + token.name)
1413 if name.name ==
')':
1415 if (len(tokens) >= 4
and
1416 tokens[1].name ==
'(' and tokens[2].name ==
'*'):
1419 elif name.name ==
']':
1421 if len(tokens) >= 2:
1426 new_type = self.
converter.ToType(tokens)[0]
1427 return Typedef(indices.start, indices.end, name.name,
1436 def _GetTemplatedTypes(self):
1437 result = collections.OrderedDict()
1439 len_tokens = len(tokens) - 1
1441 while i < len_tokens:
1442 key = tokens[i].name
1444 if keywords.IsKeyword(key)
or key ==
',':
1446 type_name = default =
None
1449 if tokens[i-1].name ==
'=':
1450 assert i < len_tokens,
'%s %s' % (i, tokens)
1451 default, unused_next_token = self.
GetName(tokens[i:])
1454 if tokens[i-1].name !=
',':
1457 key = tokens[i-1].name
1458 type_name = tokens[i-2]
1460 result[key] = (type_name, default)
1465 assert token.token_type == tokenize.SYNTAX, token
1466 assert token.name ==
'<', token
1470 if token.token_type == tokenize.NAME:
1471 if token.name ==
'class':
1472 return self.
_GetClass(Class, VISIBILITY_PRIVATE, templated_types)
1473 elif token.name ==
'struct':
1474 return self.
_GetClass(Struct, VISIBILITY_PUBLIC, templated_types)
1475 elif token.name ==
'friend':
1481 if last.name ==
'(':
1482 return self.
GetMethod(FUNCTION_NONE, templated_types)
1496 return self.
_GetClass(Class, VISIBILITY_PRIVATE,
None)
1498 def _GetBases(self):
1503 assert token.token_type == tokenize.NAME, token
1505 if token.name
not in (
'public',
'protected',
'private'):
1513 if token.name !=
'virtual':
1518 base, next_token = self.
GetName()
1520 assert len(bases_ast) == 1, bases_ast
1521 bases.append(bases_ast[0])
1522 assert next_token.token_type == tokenize.SYNTAX, next_token
1523 if next_token.name ==
'{':
1527 assert next_token.name ==
',', next_token
1530 def _GetClass(self, class_type, visibility, templated_types):
1533 if class_token.token_type != tokenize.NAME:
1534 assert class_token.token_type == tokenize.SYNTAX, class_token
1540 if next_token.token_type == tokenize.NAME:
1544 name_tokens, token = self.
GetName()
1545 class_name =
''.join([t.name
for t
in name_tokens])
1547 if token.token_type == tokenize.SYNTAX:
1548 if token.name ==
';':
1550 return class_type(class_token.start, class_token.end,
1551 class_name,
None, templated_types,
None,
1553 if token.name
in '*&':
1557 if next_token.name ==
';':
1559 modifiers = [
'class']
1562 modifiers, token.name,
None)
1565 tokens = (class_token, token, name_token, next_token)
1567 return self.
GetMethod(FUNCTION_NONE,
None)
1568 if token.name ==
':':
1572 if token.token_type == tokenize.SYNTAX
and token.name ==
'{':
1573 assert token.token_type == tokenize.SYNTAX, token
1574 assert token.name ==
'{', token
1578 body = list(ast.Generate())
1582 if token.token_type != tokenize.NAME:
1583 assert token.token_type == tokenize.SYNTAX, token
1584 assert token.name ==
';', token
1586 new_class = class_type(class_token.start, class_token.end,
1587 class_name, bases,
None,
1592 token.name, new_class,
1593 modifiers, token.name,
None)
1599 return class_type(class_token.start, class_token.end, class_name,
1605 name_tokens, token = self.
GetName()
1607 name =
''.join([t.name
for t
in name_tokens])
1609 assert token.token_type == tokenize.SYNTAX, token
1613 internal_token.whence = token.whence
1614 if token.name ==
'=':
1616 name, next_token = self.
GetName()
1617 assert next_token.name ==
';', next_token
1620 assert token.name ==
'{', token
1623 tokens[-1] = internal_token
1631 return Using(tokens[0].start, tokens[0].end, tokens)
1638 return self.
GetMethod(FUNCTION_CTOR,
None)
1657 token = self._GetNextToken()
1658 assert token.token_type == tokenize.SYNTAX
1659 assert token.name ==
':'
1668 tokens = self._GetTokensUpTo(tokenize.SYNTAX,
';')
1670 return Return(self.current_token.start, self.current_token.end,
None)
1671 return Return(tokens[0].start, tokens[0].end, tokens)
1675 assert len(tokens) == 1, str(tokens)
1676 return Goto(tokens[0].start, tokens[0].end, tokens[0].name)
1697 self._IgnoreUpTo(tokenize.SYNTAX,
';')
1704 """Utility method that returns an AstBuilder from source code.
1707 source: 'C++ source code'
1713 return AstBuilder(tokenize.GetTokens(source), filename)
1717 """Prints all identifiers for a C++ source file.
1721 should_print: predicate
with signature: bool
Function(token)
1723 source = utils.ReadFile(filename, False)
1725 sys.stderr.write(
'Unable to find: %s\n' % filename)
1731 for node
in builder.Generate():
1732 if should_print(node):
1734 except KeyboardInterrupt:
1741 """Prints all identifiers for each C++ source file in filenames.
1744 filenames: ['file1',
'file2', ...]
1745 should_print: predicate
with signature: bool
Function(token)
1747 for path
in filenames:
1752 for filename
in argv[1:]:
1753 source = utils.ReadFile(filename)
1757 print(
'Processing %s' % filename)
1760 entire_ast = filter(
None, builder.Generate())
1761 except KeyboardInterrupt:
1765 traceback.print_exc()
1768 for ast
in entire_ast:
1772if __name__ ==
'__main__':
def BuilderFromSource(source, filename)
def PrintIndentifiers(filename, should_print)
def PrintAllIndentifiers(filenames, should_print)
def __init__(self, start, end)
def _StringHelper(self, name, suffix)
def __init__(self, start, end, name, definition)
def __init__(self, start, end, filename, system)
def __init__(self, start, end, label)
def __init__(self, start, end, expr)
def __init__(self, start, end, expr, namespace)
def __init__(self, start, end, names)
def __init__(self, start, end, name, parameter_type, default)
def _TypeStringHelper(self, suffix)
def __init__(self, start, end, name, namespace)
def __init__(self, start, end, name, var_type, initial_value, namespace)
def __init__(self, start, end, name, alias, namespace)
def __init__(self, start, end, name, fields, namespace)
def __init__(self, start, end, name, bases, templated_types, body, namespace)
def __init__(self, start, end, name, return_type, parameters, modifiers, templated_types, body, namespace)
def __init__(self, start, end, name, in_class, return_type, parameters, modifiers, templated_types, body, namespace)
def __init__(self, start, end, name, templated_types, modifiers, reference, pointer, array)
def GetTemplateIndices(self, names)
def DeclarationToParts(self, parts, needs_name_removed)
def CreateReturnType(self, return_type_seq)
def _GetTemplateEnd(self, tokens, start)
def __init__(self, namespace_stack)
def ToParameters(self, tokens)
def handle_reinterpret_cast(self)
def _GetTemplatedTypes(self)
def _GetVarTokensUpToIgnoringTemplates(self, expected_token_type, *expected_tokens)
def handle_template(self)
def _AddBackToken(self, token)
def handle_volatile(self)
def _GetMatchingChar(self, open_paren, close_paren, GetNextToken=None)
def GetName(self, seq=None)
def HandleError(self, msg, token)
def handle_typename(self)
def handle_const_cast(self)
def handle_continue(self)
def handle_explicit(self)
def _GetNestedType(self, ctor)
def _GetVarTokensUpTo(self, expected_token_type, *expected_tokens)
def handle_dynamic_cast(self)
def handle_unsigned(self)
def handle_operator(self)
def handle_register(self)
def _GetTokensUpTo(self, expected_token_type, expected_token)
def _AddBackTokens(self, tokens)
def handle_protected(self)
def __init__(self, token_stream, filename, in_class='', visibility=None, namespace_stack=[])
def handle_static_cast(self)
def _GetReturnTypeAndClassName(self, token_seq)
def _GetMethod(self, return_type_and_name, modifiers, templated_types, get_paren)
def _GenerateOne(self, token)
def handle_namespace(self)
def _CreateVariable(self, pos_token, name, type_name, type_modifiers, ref_pointer_name_seq, templated_types, value=None)
def GetMethod(self, modifiers, templated_types)
def _GetClass(self, class_type, visibility, templated_types)
def _IgnoreUpTo(self, token_type, token)