FX Shell Backdoor
Home
Tools
Mass Delete
Mass Deface
Symlink
About
Website : vivehg.com
Ip Address : 172.31.2.149
Port : 443
Kernel : Linux 52-72-122-155.cprapid.com 5.15.0-1084-aws #91~20.04.1-Ubuntu SMP Fri May 2 06:59:36 UTC 2025 x86_64
Protokol : HTTP/1.1
Save Data :
Koneksi :
Server : Apache
Root : /home/vivehg/public_html
G-Interface : CGI/1.1
R-Method : GET
Browser : Lainnya
Version Shell : 1.0 (Release candidate)
Author : FierzaXploit/Mr.MF33
Type
Name
options
PATH :
/
usr
/
lib
/
python2.7
/
lib2to3
/
pgen2
/
Upload
Buat File
Buat Folder
Buat Ransomweb
# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. # Modifications: # Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Parser driver. This provides a high-level interface to parse a file into a syntax tree. """ __author__ = "Guido van Rossum <guido@python.org>" __all__ = ["Driver", "load_grammar"] # Python imports import codecs import os import logging import pkgutil import StringIO import sys # Pgen imports from . import grammar, parse, token, tokenize, pgen class Driver(object): def __init__(self, grammar, convert=None, logger=None): self.grammar = grammar if logger is None: logger = logging.getLogger() self.logger = logger self.convert = convert def parse_tokens(self, tokens, debug=False): """Parse a series of tokens and return the syntax tree.""" # XXX Move the prefix computation into a wrapper around tokenize. p = parse.Parser(self.grammar, self.convert) p.setup() lineno = 1 column = 0 type = value = start = end = line_text = None prefix = u"" for quintuple in tokens: type, value, start, end, line_text = quintuple if start != (lineno, column): assert (lineno, column) <= start, ((lineno, column), start) s_lineno, s_column = start if lineno < s_lineno: prefix += "\n" * (s_lineno - lineno) lineno = s_lineno column = 0 if column < s_column: prefix += line_text[column:s_column] column = s_column if type in (tokenize.COMMENT, tokenize.NL): prefix += value lineno, column = end if value.endswith("\n"): lineno += 1 column = 0 continue if type == token.OP: type = grammar.opmap[value] if debug: self.logger.debug("%s %r (prefix=%r)", token.tok_name[type], value, prefix) if p.addtoken(type, value, (prefix, start)): if debug: self.logger.debug("Stop.") break prefix = "" lineno, column = end if value.endswith("\n"): lineno += 1 column = 0 else: # We never broke out -- EOF is too soon (how can this happen???) raise parse.ParseError("incomplete input", type, value, (prefix, start)) return p.rootnode def parse_stream_raw(self, stream, debug=False): """Parse a stream and return the syntax tree.""" tokens = tokenize.generate_tokens(stream.readline) return self.parse_tokens(tokens, debug) def parse_stream(self, stream, debug=False): """Parse a stream and return the syntax tree.""" return self.parse_stream_raw(stream, debug) def parse_file(self, filename, encoding=None, debug=False): """Parse a file and return the syntax tree.""" stream = codecs.open(filename, "r", encoding) try: return self.parse_stream(stream, debug) finally: stream.close() def parse_string(self, text, debug=False): """Parse a string and return the syntax tree.""" tokens = tokenize.generate_tokens(StringIO.StringIO(text).readline) return self.parse_tokens(tokens, debug) def _generate_pickle_name(gt): head, tail = os.path.splitext(gt) if tail == ".txt": tail = "" return head + tail + ".".join(map(str, sys.version_info)) + ".pickle" def load_grammar(gt="Grammar.txt", gp=None, save=True, force=False, logger=None): """Load the grammar (maybe from a pickle).""" if logger is None: logger = logging.getLogger() gp = _generate_pickle_name(gt) if gp is None else gp if force or not _newer(gp, gt): logger.info("Generating grammar tables from %s", gt) g = pgen.generate_grammar(gt) # the pickle files mismatch, when built on different architectures. # don't save these for now. An alternative solution might be to # include the multiarch triplet into the file name if False: logger.info("Writing grammar tables to %s", gp) try: g.dump(gp) except IOError as e: logger.info("Writing failed: %s", e) else: g = grammar.Grammar() g.load(gp) return g def _newer(a, b): """Inquire whether file a was written since file b.""" if not os.path.exists(a): return False if not os.path.exists(b): return True return os.path.getmtime(a) >= os.path.getmtime(b) def load_packaged_grammar(package, grammar_source): """Normally, loads a pickled grammar by doing pkgutil.get_data(package, pickled_grammar) where *pickled_grammar* is computed from *grammar_source* by adding the Python version and using a ``.pickle`` extension. However, if *grammar_source* is an extant file, load_grammar(grammar_source) is called instead. This facilitates using a packaged grammar file when needed but preserves load_grammar's automatic regeneration behavior when possible. """ if os.path.isfile(grammar_source): return load_grammar(grammar_source) pickled_name = _generate_pickle_name(os.path.basename(grammar_source)) data = pkgutil.get_data(package, pickled_name) g = grammar.Grammar() g.loads(data) return g def main(*args): """Main program, when run as a script: produce grammar pickle files. Calls load_grammar for each argument, a path to a grammar text file. """ if not args: args = sys.argv[1:] logging.basicConfig(level=logging.INFO, stream=sys.stdout, format='%(message)s') for gt in args: load_grammar(gt, save=True, force=True) return True if __name__ == "__main__": sys.exit(int(not main()))
__init__.py
Choose...
Edit
Rename
Delete
Now
__init__.pyc
Choose...
Edit
Rename
Delete
Now
conv.py
Choose...
Edit
Rename
Delete
Now
conv.pyc
Choose...
Edit
Rename
Delete
Now
driver.py
Choose...
Edit
Rename
Delete
Now
driver.pyc
Choose...
Edit
Rename
Delete
Now
grammar.py
Choose...
Edit
Rename
Delete
Now
grammar.pyc
Choose...
Edit
Rename
Delete
Now
literals.py
Choose...
Edit
Rename
Delete
Now
literals.pyc
Choose...
Edit
Rename
Delete
Now
parse.py
Choose...
Edit
Rename
Delete
Now
parse.pyc
Choose...
Edit
Rename
Delete
Now
pgen.py
Choose...
Edit
Rename
Delete
Now
pgen.pyc
Choose...
Edit
Rename
Delete
Now
token.py
Choose...
Edit
Rename
Delete
Now
token.pyc
Choose...
Edit
Rename
Delete
Now
tokenize.py
Choose...
Edit
Rename
Delete
Now
tokenize.pyc
Choose...
Edit
Rename
Delete
Now