Skip to content

Windows gcc #119

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 6 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions cxxheaderparser/lexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ class PlyLexer:
"alignas",
"alignof",
"asm",
"__asm",
"auto",
"bool",
"break",
Expand All @@ -99,6 +100,7 @@ class PlyLexer:
"enum",
"explicit",
"export",
"__extension__",
"extern",
"false",
"final",
Expand All @@ -124,6 +126,7 @@ class PlyLexer:
"public",
"register",
"reinterpret_cast",
"__restrict",
"requires",
"return",
"short",
Expand Down
30 changes: 28 additions & 2 deletions cxxheaderparser/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,6 +302,7 @@ def parse(self) -> None:
] = {
"__attribute__": self._consume_gcc_attribute,
"__declspec": self._consume_declspec,
"__extension__": lambda _1, _2: None,
"alignas": self._consume_attribute_specifier_seq,
"extern": self._parse_extern,
"friend": self._parse_friend_decl,
Expand Down Expand Up @@ -879,6 +880,12 @@ def _parse_requires_segment(
# Let the caller decide
return tok

def _consume_asm(
self, tok: LexToken, doxygen: typing.Optional[str] = None
) -> None:
tok = self._next_token_must_be("(")
self._consume_balanced_tokens(tok)

#
# Attributes
#
Expand Down Expand Up @@ -1394,8 +1401,15 @@ def _discard_ctor_initializer(self) -> None:

def _parse_bitfield(self) -> int:
# is a integral constant expression... for now, just do integers
tok = self._next_token_must_be("INT_CONST_DEC")
return int(tok.value)
const_expr = ''
while True:
tok = self.lex.token_if_not("=", ";")
if tok:
const_expr += tok.value
else:
break

return int(eval(const_expr))

def _parse_field(
self,
Expand Down Expand Up @@ -1791,6 +1805,9 @@ def _parse_parameter(

dtype = self._parse_cv_ptr(parsed_type)

# optional __restrict
tok = self.lex.token_if("__restrict")

# optional parameter pack
if self.lex.token_if("ELLIPSIS"):
param_pack = True
Expand Down Expand Up @@ -2679,6 +2696,15 @@ def _parse_declarations(
# Unset the doxygen, location
doxygen = None

tok = self.lex.token_if("__asm")
if tok:
self._consume_asm(tok)

tok = self.lex.token_if_in_set(self._attribute_start_tokens)
while tok:
self._consume_attribute(tok)
tok = self.lex.token_if_in_set(self._attribute_start_tokens)

# Check for multiple declarations
tok = self._next_token_must_be(",", ";")
location = tok.location
Expand Down
Loading