mirror of https://github.com/python/cpython
[3.13] GH-121970: Combine custom Pygments lexers into a package (GH-121976) (#122021)
GH-121970: Combine custom Pygments lexers into a package (GH-121976)
(cherry picked from commit 7431c3799e
)
Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Co-authored-by: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com>
This commit is contained in:
parent
58a84e58f4
commit
0d138df3fd
|
@ -18,11 +18,10 @@ from pyspecific import SOURCE_URI
|
||||||
# ---------------------
|
# ---------------------
|
||||||
|
|
||||||
extensions = [
|
extensions = [
|
||||||
'asdl_highlight',
|
|
||||||
'c_annotations',
|
'c_annotations',
|
||||||
'escape4chm',
|
'escape4chm',
|
||||||
'glossary_search',
|
'glossary_search',
|
||||||
'peg_highlight',
|
'lexers',
|
||||||
'pyspecific',
|
'pyspecific',
|
||||||
'sphinx.ext.coverage',
|
'sphinx.ext.coverage',
|
||||||
'sphinx.ext.doctest',
|
'sphinx.ext.doctest',
|
||||||
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
from .asdl_lexer import ASDLLexer
|
||||||
|
from .peg_lexer import PEGLexer
|
||||||
|
|
||||||
|
|
||||||
|
def setup(app):
|
||||||
|
# Used for highlighting Parser/Python.asdl in library/ast.rst
|
||||||
|
app.add_lexer("asdl", ASDLLexer)
|
||||||
|
# Used for highlighting Grammar/python.gram in reference/grammar.rst
|
||||||
|
app.add_lexer("peg", PEGLexer)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"version": "1.0",
|
||||||
|
"parallel_read_safe": True,
|
||||||
|
"parallel_write_safe": True,
|
||||||
|
}
|
|
@ -1,15 +1,6 @@
|
||||||
import sys
|
from pygments.lexer import RegexLexer, bygroups, include
|
||||||
from pathlib import Path
|
from pygments.token import Comment, Keyword, Name, Operator, Punctuation, Text
|
||||||
|
|
||||||
CPYTHON_ROOT = Path(__file__).resolve().parent.parent.parent.parent
|
|
||||||
sys.path.append(str(CPYTHON_ROOT / "Parser"))
|
|
||||||
|
|
||||||
from pygments.lexer import RegexLexer, bygroups, include, words
|
|
||||||
from pygments.token import (Comment, Keyword, Name, Operator,
|
|
||||||
Punctuation, Text)
|
|
||||||
|
|
||||||
from asdl import builtin_types
|
|
||||||
from sphinx.highlighting import lexers
|
|
||||||
|
|
||||||
class ASDLLexer(RegexLexer):
|
class ASDLLexer(RegexLexer):
|
||||||
name = "ASDL"
|
name = "ASDL"
|
||||||
|
@ -34,7 +25,10 @@ class ASDLLexer(RegexLexer):
|
||||||
r"(\w+)(\*\s|\?\s|\s)(\w+)",
|
r"(\w+)(\*\s|\?\s|\s)(\w+)",
|
||||||
bygroups(Name.Builtin.Pseudo, Operator, Name),
|
bygroups(Name.Builtin.Pseudo, Operator, Name),
|
||||||
),
|
),
|
||||||
(words(builtin_types), Name.Builtin),
|
# Keep in line with ``builtin_types`` from Parser/asdl.py.
|
||||||
|
# ASDL's 4 builtin types are
|
||||||
|
# constant, identifier, int, string
|
||||||
|
('constant|identifier|int|string', Name.Builtin),
|
||||||
(r"attributes", Name.Builtin),
|
(r"attributes", Name.Builtin),
|
||||||
(
|
(
|
||||||
_name + _text_ws + "(=)",
|
_name + _text_ws + "(=)",
|
||||||
|
@ -46,8 +40,3 @@ class ASDLLexer(RegexLexer):
|
||||||
(r".", Text),
|
(r".", Text),
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
lexers["asdl"] = ASDLLexer()
|
|
||||||
return {'version': '1.0', 'parallel_read_safe': True}
|
|
|
@ -1,8 +1,6 @@
|
||||||
from pygments.lexer import RegexLexer, bygroups, include
|
from pygments.lexer import RegexLexer, bygroups, include
|
||||||
from pygments.token import Comment, Keyword, Name, Operator, Punctuation, Text
|
from pygments.token import Comment, Keyword, Name, Operator, Punctuation, Text
|
||||||
|
|
||||||
from sphinx.highlighting import lexers
|
|
||||||
|
|
||||||
|
|
||||||
class PEGLexer(RegexLexer):
|
class PEGLexer(RegexLexer):
|
||||||
"""Pygments Lexer for PEG grammar (.gram) files
|
"""Pygments Lexer for PEG grammar (.gram) files
|
||||||
|
@ -81,8 +79,3 @@ class PEGLexer(RegexLexer):
|
||||||
(r".", Text),
|
(r".", Text),
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
lexers["peg"] = PEGLexer()
|
|
||||||
return {"version": "1.0", "parallel_read_safe": True}
|
|
Loading…
Reference in New Issue