Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.8', '3.9', '3.x']
python-version: ['3.10', '3.12', '3.x']
steps:
- uses: actions/checkout@v6
-
Expand Down
156 changes: 88 additions & 68 deletions pycparserext/ext_c_lexer.py
Original file line number Diff line number Diff line change
@@ -1,92 +1,112 @@
from pycparser.c_lexer import CLexer as CLexerBase


try:
from pycparser.ply.lex import TOKEN
except ImportError:
from ply.lex import TOKEN
_COMMON_EXTRA_KEYWORDS = {
"__attribute__": "__ATTRIBUTE__",
"__attribute": "__ATTRIBUTE",
"__asm__": "__ASM__",
"__asm": "__ASM",
"asm": "ASM",
}

_GNU_EXTRA_KEYWORDS = {
"__typeof__": "__TYPEOF__",
"typeof": "TYPEOF",
"__typeof": "__TYPEOF",
"__real__": "__REAL__",
"__imag__": "__IMAG__",
"__builtin_types_compatible_p": "__BUILTIN_TYPES_COMPATIBLE_P",
"__const": "__CONST",
"__restrict__": "__RESTRICT__",
"__restrict": "__RESTRICT",
"__inline__": "__INLINE__",
"__inline": "__INLINE",
"__extension__": "__EXTENSION__",
"__volatile": "__VOLATILE",
"__volatile__": "__VOLATILE__",
"__alignof__": "__ALIGNOF__",
}

_OCL_BASE_KEYWORDS = {
"kernel": "KERNEL",
"constant": "CONSTANT",
"global": "GLOBAL",
"local": "LOCAL",
"private": "PRIVATE",
"read_only": "READ_ONLY",
"write_only": "WRITE_ONLY",
"read_write": "READ_WRITE",
}

_OCL_EXTRA_KEYWORDS = {
**{f"__{k}": f"__{v}__" for k, v in _OCL_BASE_KEYWORDS.items()},
**_OCL_BASE_KEYWORDS,
# __kernel is a special case: maps to __KERNEL (without trailing __)
"__kernel": "__KERNEL",
}


class GnuCLexer(CLexerBase):
# support '3i' for imaginary literal
floating_constant = (
"(((("
+ CLexerBase.fractional_constant+")"
+ CLexerBase.exponent_part+"?)|([0-9]+"
+ CLexerBase.exponent_part+"))i?[FfLl]?)")
"""GNU C lexer that recognizes GNU-specific keywords."""

@TOKEN(floating_constant)
def t_FLOAT_CONST(self, t):
return t
_extra_keywords = {**_COMMON_EXTRA_KEYWORDS, **_GNU_EXTRA_KEYWORDS} # noqa: RUF012

t_pppragma_ignore = " \t<>.-{}();+-*/$%@&^~!?:,0123456789="
def token(self):
tok = super().token()
if tok is not None and tok.type == "ID":
new_type = self._extra_keywords.get(tok.value)
if new_type is not None:
tok.type = new_type
return tok


class GNUCLexer(GnuCLexer):
def __init__(self, *args, **kwargs):
from warnings import warn
warn("GNUCLexer is now called GnuCLexer",
DeprecationWarning, stacklevel=2)

GnuCLexer.__init__(self, *args, **kwargs)


class OpenCLCLexer(CLexerBase):
tokens = (*CLexerBase.tokens, "LINECOMMENT")
states = (
# ('comment', 'exclusive'),
# ('preproc', 'exclusive'),
("ppline", "exclusive"), # unused
("pppragma", "exclusive"), # unused
)

def t_LINECOMMENT(self, t):
r"\/\/([^\n]+)\n"
t.lexer.lineno += t.value.count("\n")

# overrides pycparser, must have same name
def t_PPHASH(self, t):
r"[ \t]*\#([^\n]|\\\n)+[^\n\\]\n"
t.lexer.lineno += t.value.count("\n")
return t


"""OpenCL C lexer that recognizes OpenCL-specific keywords and line
comments."""

_extra_keywords = {**_COMMON_EXTRA_KEYWORDS, **_OCL_EXTRA_KEYWORDS} # noqa: RUF012

def token(self):
tok = super().token()
if tok is not None and tok.type == "ID":
new_type = self._extra_keywords.get(tok.value)
if new_type is not None:
tok.type = new_type
return tok

def _match_token(self):
"""Override to silently consume // line comments."""
text = self._lexdata
pos = self._pos
if text[pos:pos + 2] == "//":
end = text.find("\n", pos)
if end == -1:
self._pos = len(text)
else:
self._pos = end
return None
return super()._match_token()


# Legacy helper - not needed for pycparser 3.0 but kept for API compatibility
def add_lexer_keywords(cls, keywords):
cls.keywords = cls.keywords + tuple(
kw.upper() for kw in keywords)

cls.keyword_map = cls.keyword_map.copy()
cls.keyword_map.update({kw: kw.upper() for kw in keywords})

cls.tokens = cls.tokens + tuple(
kw.upper() for kw in keywords)


_COMMON_KEYWORDS = [
"__attribute__", "__attribute",
"__asm__", "__asm", "asm"]

_GNU_KEYWORDS = [
"__typeof__", "typeof", "__typeof",
"__real__", "__imag__",
"__builtin_types_compatible_p",
"__const",
"__restrict__", "__restrict",
"__inline__", "__inline",
"__extension__",
"__volatile", "__volatile__",
"__alignof__"]

add_lexer_keywords(GnuCLexer, _COMMON_KEYWORDS + _GNU_KEYWORDS)

# These will be added as unadorned keywords and keywords with '__' prepended
_CL_BASE_KEYWORDS = [
"kernel", "constant", "global", "local", "private",
"read_only", "write_only", "read_write"]
"""Add keywords to a lexer class's _extra_keywords dict.

_CL_KEYWORDS = _COMMON_KEYWORDS
_CL_KEYWORDS += _CL_BASE_KEYWORDS + ["__"+kw for kw in _CL_BASE_KEYWORDS]
This modifies the class in place.
"""
if not hasattr(cls, "_extra_keywords"):
cls._extra_keywords = {}
else:
cls._extra_keywords = dict(cls._extra_keywords)
cls._extra_keywords.update({kw: kw.upper() for kw in keywords})

add_lexer_keywords(OpenCLCLexer, _CL_KEYWORDS)

# vim: fdm=marker
Loading
Loading