mirror of https://github.com/python/cpython
GH-121970: Combine custom Pygments lexers into a package (#121976)
Co-authored-by: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com>
This commit is contained in:
parent
12c1afa9d1
commit
7431c3799e
|
@ -18,11 +18,10 @@ from pyspecific import SOURCE_URI
|
|||
# ---------------------
|
||||
|
||||
extensions = [
|
||||
'asdl_highlight',
|
||||
'c_annotations',
|
||||
'escape4chm',
|
||||
'glossary_search',
|
||||
'peg_highlight',
|
||||
'lexers',
|
||||
'pyspecific',
|
||||
'sphinx.ext.coverage',
|
||||
'sphinx.ext.doctest',
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
from .asdl_lexer import ASDLLexer
|
||||
from .peg_lexer import PEGLexer
|
||||
|
||||
|
||||
def setup(app):
|
||||
# Used for highlighting Parser/Python.asdl in library/ast.rst
|
||||
app.add_lexer("asdl", ASDLLexer)
|
||||
# Used for highlighting Grammar/python.gram in reference/grammar.rst
|
||||
app.add_lexer("peg", PEGLexer)
|
||||
|
||||
return {
|
||||
"version": "1.0",
|
||||
"parallel_read_safe": True,
|
||||
"parallel_write_safe": True,
|
||||
}
|
|
@ -1,15 +1,6 @@
|
|||
import sys
|
||||
from pathlib import Path
|
||||
from pygments.lexer import RegexLexer, bygroups, include
|
||||
from pygments.token import Comment, Keyword, Name, Operator, Punctuation, Text
|
||||
|
||||
CPYTHON_ROOT = Path(__file__).resolve().parent.parent.parent.parent
|
||||
sys.path.append(str(CPYTHON_ROOT / "Parser"))
|
||||
|
||||
from pygments.lexer import RegexLexer, bygroups, include, words
|
||||
from pygments.token import (Comment, Keyword, Name, Operator,
|
||||
Punctuation, Text)
|
||||
|
||||
from asdl import builtin_types
|
||||
from sphinx.highlighting import lexers
|
||||
|
||||
class ASDLLexer(RegexLexer):
|
||||
name = "ASDL"
|
||||
|
@ -34,7 +25,10 @@ class ASDLLexer(RegexLexer):
|
|||
r"(\w+)(\*\s|\?\s|\s)(\w+)",
|
||||
bygroups(Name.Builtin.Pseudo, Operator, Name),
|
||||
),
|
||||
(words(builtin_types), Name.Builtin),
|
||||
# Keep in line with ``builtin_types`` from Parser/asdl.py.
|
||||
# ASDL's 4 builtin types are
|
||||
# constant, identifier, int, string
|
||||
('constant|identifier|int|string', Name.Builtin),
|
||||
(r"attributes", Name.Builtin),
|
||||
(
|
||||
_name + _text_ws + "(=)",
|
||||
|
@ -46,8 +40,3 @@ class ASDLLexer(RegexLexer):
|
|||
(r".", Text),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def setup(app):
|
||||
lexers["asdl"] = ASDLLexer()
|
||||
return {'version': '1.0', 'parallel_read_safe': True}
|
|
@ -1,8 +1,6 @@
|
|||
from pygments.lexer import RegexLexer, bygroups, include
|
||||
from pygments.token import Comment, Keyword, Name, Operator, Punctuation, Text
|
||||
|
||||
from sphinx.highlighting import lexers
|
||||
|
||||
|
||||
class PEGLexer(RegexLexer):
|
||||
"""Pygments Lexer for PEG grammar (.gram) files
|
||||
|
@ -79,8 +77,3 @@ class PEGLexer(RegexLexer):
|
|||
(r".", Text),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def setup(app):
|
||||
lexers["peg"] = PEGLexer()
|
||||
return {"version": "1.0", "parallel_read_safe": True}
|
Loading…
Reference in New Issue