Linux vps-61133.fhnet.fr 4.9.0-19-amd64 #1 SMP Debian 4.9.320-2 (2022-06-30) x86_64
Apache/2.4.25 (Debian)
Server IP : 93.113.207.21 & Your IP : 216.73.216.112
Domains :
Cant Read [ /etc/named.conf ]
User : www-data
Terminal
Auto Root
Create File
Create Folder
Localroot Suggester
Backdoor Destroyer
Readme
/
usr /
src /
Python-3.10.14 /
Doc /
tools /
extensions /
Delete
Unzip
Name
Size
Permission
Date
Action
asdl_highlight.py
1.45
KB
-rw-r--r--
2024-03-19 22:46
c_annotations.py
7.52
KB
-rw-r--r--
2024-03-19 22:46
escape4chm.py
1.78
KB
-rw-r--r--
2024-03-19 22:46
glossary_search.py
1.61
KB
-rw-r--r--
2024-03-19 22:46
patchlevel.py
1.91
KB
-rw-r--r--
2024-03-19 22:46
peg_highlight.py
2.74
KB
-rw-r--r--
2024-03-19 22:46
pyspecific.py
22.49
KB
-rw-r--r--
2024-03-19 22:46
suspicious.py
8.36
KB
-rw-r--r--
2024-03-19 22:46
Save
Rename
from pygments.lexer import RegexLexer, bygroups, include from pygments.token import Comment, Generic, Keyword, Name, Operator, Punctuation, Text from sphinx.highlighting import lexers class PEGLexer(RegexLexer): """Pygments Lexer for PEG grammar (.gram) files This lexer strips the following elements from the grammar: - Meta-tags - Variable assignments - Actions - Lookaheads - Rule types - Rule options - Rules named `invalid_*` or `incorrect_*` """ name = "PEG" aliases = ["peg"] filenames = ["*.gram"] _name = r"([^\W\d]\w*)" _text_ws = r"(\s*)" tokens = { "ws": [(r"\n", Text), (r"\s+", Text), (r"#.*$", Comment.Singleline),], "lookaheads": [ # Forced tokens (r"(&&)(?=\w+\s?)", bygroups(None)), (r"(&&)(?='.+'\s?)", bygroups(None)), (r'(&&)(?=".+"\s?)', bygroups(None)), (r"(&&)(?=\(.+\)\s?)", bygroups(None)), (r"(?<=\|\s)(&\w+\s?)", bygroups(None)), (r"(?<=\|\s)(&'.+'\s?)", bygroups(None)), (r'(?<=\|\s)(&".+"\s?)', bygroups(None)), (r"(?<=\|\s)(&\(.+\)\s?)", bygroups(None)), ], "metas": [ (r"(@\w+ '''(.|\n)+?''')", bygroups(None)), (r"^(@.*)$", bygroups(None)), ], "actions": [ (r"{(.|\n)+?}", bygroups(None)), ], "strings": [ (r"'\w+?'", Keyword), (r'"\w+?"', Keyword), (r"'\W+?'", Text), (r'"\W+?"', Text), ], "variables": [ (_name + _text_ws + "(=)", bygroups(None, None, None),), (_name + _text_ws + r"(\[[\w\d_\*]+?\])" + _text_ws + "(=)", bygroups(None, None, None, None, None),), ], "invalids": [ (r"^(\s+\|\s+.*invalid_\w+.*\n)", bygroups(None)), (r"^(\s+\|\s+.*incorrect_\w+.*\n)", bygroups(None)), (r"^(#.*invalid syntax.*(?:.|\n)*)", bygroups(None),), ], "root": [ include("invalids"), include("ws"), include("lookaheads"), include("metas"), include("actions"), include("strings"), include("variables"), (r"\b(?!(NULL|EXTRA))([A-Z_]+)\b\s*(?!\()", Text,), ( r"^\s*" + _name + r"\s*" + r"(\[.*\])?" + r"\s*" + r"(\(.+\))?" + r"\s*(:)", bygroups(Name.Function, None, None, Punctuation), ), (_name, Name.Function), (r"[\||\.|\+|\*|\?]", Operator), (r"{|}|\(|\)|\[|\]", Punctuation), (r".", Text), ], } def setup(app): lexers["peg"] = PEGLexer() return {"version": "1.0", "parallel_read_safe": True}