diff --git a/lib/nasm.py b/lib/nasm.py index 13888f4..1074730 100644 --- a/lib/nasm.py +++ b/lib/nasm.py @@ -82,7 +82,7 @@ def nasm2shellcode(asmcode): return "" shellcode = [] - pattern = re.compile("([0-9A-F]{8})\s*([^\s]*)\s*(.*)") + pattern = re.compile(r"([0-9A-F]{8})\s*([^\s]*)\s*(.*)") matches = pattern.findall(asmcode) for line in asmcode.splitlines(): diff --git a/lib/six.py b/lib/six.py index ffa3fe1..4e15675 100644 --- a/lib/six.py +++ b/lib/six.py @@ -1,6 +1,4 @@ -"""Utilities for writing code that runs on Python 2 and 3""" - -# Copyright (c) 2010-2015 Benjamin Peterson +# Copyright (c) 2010-2020 Benjamin Peterson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -20,6 +18,8 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +"""Utilities for writing code that runs on Python 2 and 3""" + from __future__ import absolute_import import functools @@ -29,12 +29,13 @@ import types __author__ = "Benjamin Peterson " -__version__ = "1.9.0" +__version__ = "1.16.0" # Useful for very coarse version differentiation. PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) if PY3: string_types = str, @@ -57,6 +58,7 @@ else: # It's possible to have sizeof(long) != sizeof(Py_ssize_t). class X(object): + def __len__(self): return 1 << 31 try: @@ -69,6 +71,11 @@ def __len__(self): MAXSIZE = int((1 << 63) - 1) del X +if PY34: + from importlib.util import spec_from_loader +else: + spec_from_loader = None + def _add_doc(func, doc): """Add documentation to a function.""" @@ -88,7 +95,7 @@ def __init__(self, name): def __get__(self, obj, tp): result = self._resolve() - setattr(obj, self.name, result) # Invokes __set__. + setattr(obj, self.name, result) # Invokes __set__. try: # This is a bit ugly, but it avoids running this again by # removing this descriptor. @@ -160,12 +167,14 @@ def _resolve(self): class _SixMetaPathImporter(object): + """ A meta path importer to import six.moves and its submodules. This class implements a PEP302 finder and loader. It should be compatible with Python 2.5 and all existing versions of Python3 """ + def __init__(self, six_module_name): self.name = six_module_name self.known_modules = {} @@ -182,6 +191,11 @@ def find_module(self, fullname, path=None): return self return None + def find_spec(self, fullname, path, target=None): + if fullname in self.known_modules: + return spec_from_loader(fullname, self) + return None + def __get_module(self, fullname): try: return self.known_modules[fullname] @@ -219,10 +233,17 @@ def get_code(self, fullname): return None get_source = get_code # same as get_code + def create_module(self, spec): + return self.load_module(spec.name) + + def exec_module(self, module): + pass + _importer = _SixMetaPathImporter(__name__) class _MovedItems(_LazyModule): + """Lazy loading of moved objects""" __path__ = [] # mark as package @@ -234,8 +255,11 @@ class _MovedItems(_LazyModule): MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), MovedAttribute("intern", "__builtin__", "sys"), MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("getoutput", "commands", "subprocess"), MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "imp", "reload"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), MovedAttribute("reduce", "__builtin__", "functools"), MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), MovedAttribute("StringIO", "StringIO", "io"), @@ -245,21 +269,23 @@ class _MovedItems(_LazyModule): MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), - MovedModule("builtins", "__builtin__"), MovedModule("configparser", "ConfigParser"), + MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"), MovedModule("copyreg", "copy_reg"), MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), - MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"), MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), MovedModule("http_cookies", "Cookie", "http.cookies"), MovedModule("html_entities", "htmlentitydefs", "html.entities"), MovedModule("html_parser", "HTMLParser", "html.parser"), MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), - MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), @@ -292,8 +318,13 @@ class _MovedItems(_LazyModule): MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), - MovedModule("winreg", "_winreg"), ] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + for attr in _moved_attributes: setattr(_MovedItems, attr.name, attr) if isinstance(attr, MovedModule): @@ -307,6 +338,7 @@ class _MovedItems(_LazyModule): class Module_six_moves_urllib_parse(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_parse""" @@ -325,10 +357,12 @@ class Module_six_moves_urllib_parse(_LazyModule): MovedAttribute("quote_plus", "urllib", "urllib.parse"), MovedAttribute("unquote", "urllib", "urllib.parse"), MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), MovedAttribute("urlencode", "urllib", "urllib.parse"), MovedAttribute("splitquery", "urllib", "urllib.parse"), MovedAttribute("splittag", "urllib", "urllib.parse"), MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("splitvalue", "urllib", "urllib.parse"), MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), MovedAttribute("uses_params", "urlparse", "urllib.parse"), @@ -346,6 +380,7 @@ class Module_six_moves_urllib_parse(_LazyModule): class Module_six_moves_urllib_error(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_error""" @@ -365,6 +400,7 @@ class Module_six_moves_urllib_error(_LazyModule): class Module_six_moves_urllib_request(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_request""" @@ -402,6 +438,8 @@ class Module_six_moves_urllib_request(_LazyModule): MovedAttribute("URLopener", "urllib", "urllib.request"), MovedAttribute("FancyURLopener", "urllib", "urllib.request"), MovedAttribute("proxy_bypass", "urllib", "urllib.request"), + MovedAttribute("parse_http_list", "urllib2", "urllib.request"), + MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), ] for attr in _urllib_request_moved_attributes: setattr(Module_six_moves_urllib_request, attr.name, attr) @@ -414,6 +452,7 @@ class Module_six_moves_urllib_request(_LazyModule): class Module_six_moves_urllib_response(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_response""" @@ -434,6 +473,7 @@ class Module_six_moves_urllib_response(_LazyModule): class Module_six_moves_urllib_robotparser(_LazyModule): + """Lazy loading of moved objects in six.moves.urllib_robotparser""" @@ -451,6 +491,7 @@ class Module_six_moves_urllib_robotparser(_LazyModule): class Module_six_moves_urllib(types.ModuleType): + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" __path__ = [] # mark as package parse = _importer._get_module("moves.urllib_parse") @@ -521,6 +562,9 @@ def get_unbound_function(unbound): create_bound_method = types.MethodType + def create_unbound_method(func, cls): + return func + Iterator = object else: def get_unbound_function(unbound): @@ -529,6 +573,9 @@ def get_unbound_function(unbound): def create_bound_method(func, obj): return types.MethodType(func, obj, obj.__class__) + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + class Iterator(object): def next(self): @@ -567,16 +614,16 @@ def iterlists(d, **kw): viewitems = operator.methodcaller("items") else: def iterkeys(d, **kw): - return iter(d.iterkeys(**kw)) + return d.iterkeys(**kw) def itervalues(d, **kw): - return iter(d.itervalues(**kw)) + return d.itervalues(**kw) def iteritems(d, **kw): - return iter(d.iteritems(**kw)) + return d.iteritems(**kw) def iterlists(d, **kw): - return iter(d.iterlists(**kw)) + return d.iterlists(**kw) viewkeys = operator.methodcaller("viewkeys") @@ -595,34 +642,42 @@ def iterlists(d, **kw): if PY3: def b(s): return s.encode("latin-1") + def u(s): return s unichr = chr - if sys.version_info[1] <= 1: - def int2byte(i): - return bytes((i,)) - else: - # This is about 2x faster than the implementation above on 3.2+ - int2byte = operator.methodcaller("to_bytes", 1, "big") + import struct + int2byte = struct.Struct(">B").pack + del struct byte2int = operator.itemgetter(0) indexbytes = operator.getitem iterbytes = iter import io StringIO = io.StringIO BytesIO = io.BytesIO + del io _assertCountEqual = "assertCountEqual" - _assertRaisesRegex = "assertRaisesRegex" - _assertRegex = "assertRegex" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" + _assertNotRegex = "assertNotRegex" else: def b(s): return s # Workaround for standalone backslash + def u(s): return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") unichr = unichr int2byte = chr + def byte2int(bs): return ord(bs[0]) + def indexbytes(buf, i): return ord(buf[i]) iterbytes = functools.partial(itertools.imap, ord) @@ -631,6 +686,7 @@ def indexbytes(buf, i): _assertCountEqual = "assertItemsEqual" _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" _add_doc(b, """Byte literal""") _add_doc(u, """Text literal""") @@ -647,16 +703,23 @@ def assertRegex(self, *args, **kwargs): return getattr(self, _assertRegex)(*args, **kwargs) +def assertNotRegex(self, *args, **kwargs): + return getattr(self, _assertNotRegex)(*args, **kwargs) + + if PY3: exec_ = getattr(moves.builtins, "exec") - def reraise(tp, value, tb=None): - if value is None: - value = tp() - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value + try: + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + finally: + value = None + tb = None else: def exec_(_code_, _globs_=None, _locs_=None): @@ -671,21 +734,20 @@ def exec_(_code_, _globs_=None, _locs_=None): _locs_ = _globs_ exec("""exec _code_ in _globs_, _locs_""") - exec_("""def reraise(tp, value, tb=None): - raise tp, value, tb + try: + raise tp, value, tb + finally: + tb = None """) -if sys.version_info[:2] == (3, 2): +if sys.version_info[:2] > (3,): exec_("""def raise_from(value, from_value): - if from_value is None: - raise value - raise value from from_value -""") -elif sys.version_info[:2] > (3, 2): - exec_("""def raise_from(value, from_value): - raise value from from_value + try: + raise value from from_value + finally: + value = None """) else: def raise_from(value, from_value): @@ -699,13 +761,14 @@ def print_(*args, **kwargs): fp = kwargs.pop("file", sys.stdout) if fp is None: return + def write(data): if not isinstance(data, basestring): data = str(data) # If the file has an encoding, encode unicode with it. if (isinstance(fp, file) and - isinstance(data, unicode) and - fp.encoding is not None): + isinstance(data, unicode) and + fp.encoding is not None): errors = getattr(fp, "errors", None) if errors is None: errors = "strict" @@ -748,6 +811,7 @@ def write(data): write(end) if sys.version_info[:2] < (3, 3): _print = print_ + def print_(*args, **kwargs): fp = kwargs.get("file", sys.stdout) flush = kwargs.pop("flush", False) @@ -758,24 +822,58 @@ def print_(*args, **kwargs): _add_doc(reraise, """Reraise an exception.""") if sys.version_info[0:2] < (3, 4): + # This does exactly the same what the :func:`py3:functools.update_wrapper` + # function does on Python versions after 3.2. It sets the ``__wrapped__`` + # attribute on ``wrapper`` object and it doesn't raise an error if any of + # the attributes mentioned in ``assigned`` and ``updated`` are missing on + # ``wrapped`` object. + def _update_wrapper(wrapper, wrapped, + assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + for attr in assigned: + try: + value = getattr(wrapped, attr) + except AttributeError: + continue + else: + setattr(wrapper, attr, value) + for attr in updated: + getattr(wrapper, attr).update(getattr(wrapped, attr, {})) + wrapper.__wrapped__ = wrapped + return wrapper + _update_wrapper.__doc__ = functools.update_wrapper.__doc__ + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, updated=functools.WRAPPER_UPDATES): - def wrapper(f): - f = functools.wraps(wrapped, assigned, updated)(f) - f.__wrapped__ = wrapped - return f - return wrapper + return functools.partial(_update_wrapper, wrapped=wrapped, + assigned=assigned, updated=updated) + wraps.__doc__ = functools.wraps.__doc__ + else: wraps = functools.wraps + def with_metaclass(meta, *bases): """Create a base class with a metaclass.""" # This requires a bit of explanation: the basic idea is to make a dummy # metaclass for one level of class instantiation that replaces itself with # the actual metaclass. - class metaclass(meta): + class metaclass(type): + def __new__(cls, name, this_bases, d): - return meta(name, bases, d) + if sys.version_info[:2] >= (3, 7): + # This version introduced PEP 560 that requires a bit + # of extra care (we mimic what is done by __build_class__). + resolved_bases = types.resolve_bases(bases) + if resolved_bases is not bases: + d['__orig_bases__'] = bases + else: + resolved_bases = bases + return meta(name, resolved_bases, d) + + @classmethod + def __prepare__(cls, name, this_bases): + return meta.__prepare__(name, bases) return type.__new__(metaclass, 'temporary_class', (), {}) @@ -791,13 +889,75 @@ def wrapper(cls): orig_vars.pop(slots_var) orig_vars.pop('__dict__', None) orig_vars.pop('__weakref__', None) + if hasattr(cls, '__qualname__'): + orig_vars['__qualname__'] = cls.__qualname__ return metaclass(cls.__name__, cls.__bases__, orig_vars) return wrapper +def ensure_binary(s, encoding='utf-8', errors='strict'): + """Coerce **s** to six.binary_type. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> encoded to `bytes` + - `bytes` -> `bytes` + """ + if isinstance(s, binary_type): + return s + if isinstance(s, text_type): + return s.encode(encoding, errors) + raise TypeError("not expecting type '%s'" % type(s)) + + +def ensure_str(s, encoding='utf-8', errors='strict'): + """Coerce *s* to `str`. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + # Optimization: Fast return for the common case. + if type(s) is str: + return s + if PY2 and isinstance(s, text_type): + return s.encode(encoding, errors) + elif PY3 and isinstance(s, binary_type): + return s.decode(encoding, errors) + elif not isinstance(s, (text_type, binary_type)): + raise TypeError("not expecting type '%s'" % type(s)) + return s + + +def ensure_text(s, encoding='utf-8', errors='strict'): + """Coerce *s* to six.text_type. + + For Python 2: + - `unicode` -> `unicode` + - `str` -> `unicode` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, binary_type): + return s.decode(encoding, errors) + elif isinstance(s, text_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + def python_2_unicode_compatible(klass): """ - A decorator that defines __unicode__ and __str__ methods under Python 2. + A class decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method @@ -830,7 +990,7 @@ def python_2_unicode_compatible(klass): # the six meta path importer, since the other six instance will have # inserted an importer with different class. if (type(importer).__name__ == "_SixMetaPathImporter" and - importer.name == __name__): + importer.name == __name__): del sys.meta_path[i] break del i, importer diff --git a/lib/utils.py b/lib/utils.py index 8f2b038..42ab695 100644 --- a/lib/utils.py +++ b/lib/utils.py @@ -523,7 +523,7 @@ def format_disasm_code(code, nearby=None): result += line + "\n" else: color = style = None - m = re.search(".*(0x[^ ]*).*:\s*([^ ]*)", line) + m = re.search(r".*(0x[^ ]*).*:\s*([^ ]*)", line) if not m: # failed to parse result += line + "\n" continue @@ -540,7 +540,7 @@ def format_disasm_code(code, nearby=None): break prefix = line.split(":\t")[0] - addr = re.search("(0x[^\s]*)", prefix) + addr = re.search(r"(0x[^\s]*)", prefix) if addr: addr = to_int(addr.group(1)) else: @@ -589,7 +589,7 @@ def cyclic_pattern_charset(charset_type=None): charset[2] = "sn()" + charset[2] if charset_type == 2: # maximum type - charset += ['!"#$%&\()*+,-./:;<=>?@[]^_{|}~'] # string.punctuation + charset += [r'!"#$%&\()*+,-./:;<=>?@[]^_{|}~'] # string.punctuation mixed_charset = mixed = '' k = 0 diff --git a/peda.py b/peda.py index 14a7f5e..c4889a3 100644 --- a/peda.py +++ b/peda.py @@ -108,22 +108,22 @@ def execute_redirect(self, gdb_command, silent=False): else: logfd = tmpfile() logname = logfd.name - gdb.execute('set logging off') # prevent nested call + gdb.execute('set logging enabled off') # prevent nested call gdb.execute('set height 0') # disable paging gdb.execute('set logging file %s' % logname) gdb.execute('set logging overwrite on') gdb.execute('set logging redirect on') - gdb.execute('set logging on') + gdb.execute('set logging enabled on') try: gdb.execute(gdb_command) gdb.flush() - gdb.execute('set logging off') + gdb.execute('set logging enabled off') if not silent: logfd.flush() result = logfd.read() logfd.close() except Exception as e: - gdb.execute('set logging off') #to be sure + gdb.execute('set logging enabled off') #to be sure if config.Option.get("debug") == "on": msg('Exception (%s): %s' % (gdb_command, e), "red") traceback.print_exc() @@ -148,7 +148,7 @@ def parse_and_eval(self, exp): if "$"+r not in exp and "e"+r not in exp and "r"+r not in exp: exp = exp.replace(r, "$%s" % r) - p = re.compile("(.*)\[(.*)\]") # DWORD PTR [esi+eax*1] + p = re.compile(r"(.*)\[(.*)\]") # DWORD PTR [esi+eax*1] matches = p.search(exp) if not matches: p = re.compile("(.*).s:(0x.*)") # DWORD PTR ds:0xdeadbeef @@ -370,7 +370,7 @@ def getfile(self): result = None out = self.execute_redirect('info files') if out and '"' in out: - p = re.compile(".*exec file:\s*`(.*)'") + p = re.compile(r".*exec file:\s*`(.*)'") m = p.search(out) if m: result = m.group(1) @@ -547,11 +547,11 @@ def get_breakpoint(self, num): lines = out.splitlines()[1:] # breakpoint regex - p = re.compile("^(\d*)\s*(.*breakpoint)\s*(keep|del)\s*(y|n)\s*(0x[^ ]*)\s*(.*)") + p = re.compile(r"^(\d*)\s*(.*breakpoint)\s*(keep|del)\s*(y|n)\s*(0x[^ ]*)\s*(.*)") m = p.match(lines[0]) if not m: # catchpoint/watchpoint regex - p = re.compile("^(\d*)\s*(.*point)\s*(keep|del)\s*(y|n)\s*(.*)") + p = re.compile(r"^(\d*)\s*(.*point)\s*(keep|del)\s*(y|n)\s*(.*)") m = p.match(lines[0]) if not m: return None @@ -564,7 +564,7 @@ def get_breakpoint(self, num): disp = True if disp == "keep" else False enb = True if enb == "y" else False addr = to_int(addr) - m = re.match("in.*at(.*:\d*)", what) + m = re.match(r"in.*at(.*:\d*)", what) if m: what = m.group(1) else: @@ -593,7 +593,7 @@ def get_breakpoints(self): bplist = [] for line in out.splitlines(): - m = re.match("^(\d*).*", line) + m = re.match(r"^(\d*).*", line) if m and to_int(m.group(1)): bplist += [to_int(m.group(1))] @@ -913,9 +913,9 @@ def xrefs(self, search="", filename=None): out = self.execute_redirect("x/i 0x%x" % addr) if out: line = out - p = re.compile("\s*(0x[^ ]*).*?:\s*([^ ]*)\s*(.*)") + p = re.compile(r"\s*(0x[^ ]*).*?:\s*([^ ]*)\s*(.*)") else: - p = re.compile("(.*?)\s*<.*?>\s*([^ ]*)\s*(.*)") + p = re.compile(r"(.*?)\s*<.*?>\s*([^ ]*)\s*(.*)") m = p.search(line) if m: @@ -934,7 +934,7 @@ def _get_function_args_32(self, code, argc=None): """ if not argc: argc = 0 - p = re.compile(".*mov.*\[esp(.*)\],") + p = re.compile(r".*mov.*\[esp(.*)\],") matches = p.findall(code) if matches: l = len(matches) @@ -966,7 +966,7 @@ def _get_function_args_64(self, code, argc=None): # just retrieve max 6 args arg_order = ["rdi", "rsi", "rdx", "rcx", "r8", "r9"] - p = re.compile(":\s*([^ ]*)\s*(.*),") + p = re.compile(r":\s*([^ ]*)\s*(.*),") matches = p.findall(code) regs = [r for (_, r) in matches] p = re.compile(("di|si|dx|cx|r8|r9")) @@ -1113,7 +1113,7 @@ def stepuntil(self, inst, mapname=None, depth=None): break #p = re.compile(".*?:\s*([^ ]*)") - p = re.compile(".*?:\s*(.*)") + p = re.compile(r".*?:\s*(.*)") code = p.match(current_instruction).group(1) found = 0 for i in inst.replace(",", " ").split(): @@ -1220,10 +1220,10 @@ def eval_target(self, inst): inst = inst.strip() opcode = inst.split(":\t")[-1].split()[0] # this regex includes x86_64 RIP relateive address reference - p = re.compile(".*?:\s*[^ ]*\s*(.* PTR ).*(0x[^ ]*)") + p = re.compile(r".*?:\s*[^ ]*\s*(.* PTR ).*(0x[^ ]*)") m = p.search(inst) if not m: - p = re.compile(".*?:\s.*\s(0x[^ ]*|\w+)") + p = re.compile(r".*?:\s.*\s(0x[^ ]*|\w+)") m = p.search(inst) if m: target = m.group(1) @@ -1232,7 +1232,7 @@ def eval_target(self, inst): target = None else: if "]" in m.group(2): # e.g DWORD PTR [ebx+0xc] - p = re.compile(".*?:\s*[^ ]*\s*(.* PTR ).*\[(.*)\]") + p = re.compile(r".*?:\s*[^ ]*\s*(.* PTR ).*\[(.*)\]") m = p.search(inst) target = self.parse_and_eval("%s[%s]" % (m.group(1), m.group(2).strip())) @@ -1427,7 +1427,7 @@ def _get_offline_maps(): def _get_allmaps_osx(pid, remote=False): maps = [] #_DATA 00007fff77975000-00007fff77976000 [ 4K] rw-/rw- SM=COW /usr/lib/system/libremovefile.dylib - pattern = re.compile("([^\n]*)\s* ([0-9a-f][^-\s]*)-([^\s]*) \[.*\]\s([^/]*).* (.*)") + pattern = re.compile(r"([^\n]*)\s* ([0-9a-f][^-\s]*)-([^\s]*) \[.*\]\s([^/]*).* (.*)") if remote: # remote target, not yet supported return maps @@ -2093,7 +2093,7 @@ def examine_data(value, bits=32): if value >= start and value < end: if type == "code": out = self.get_disasm(value) - p = re.compile(".*?0x[^ ]*?\s(.*)") + p = re.compile(r".*?0x[^ ]*?\s(.*)") m = p.search(out) result = (to_hex(value), "code", m.group(1)) else: # rodata address @@ -2111,7 +2111,7 @@ def examine_data(value, bits=32): out = examine_data(value, bits) result = (to_hex(value), "rodata", out.split(":", 1)[1].strip()) else: - p = re.compile(".*?0x[^ ]*?\s(.*)") + p = re.compile(r".*?0x[^ ]*?\s(.*)") m = p.search(out) result = (to_hex(value), "code", m.group(1)) @@ -2211,7 +2211,7 @@ def elfentry(self): - entry address (Int) """ out = self.execute_redirect("info files") - p = re.compile("Entry point: ([^\s]*)") + p = re.compile(r"Entry point: ([^\s]*)") if out: m = p.search(out) if m: @@ -2239,7 +2239,7 @@ def elfheader(self, name=None): if not out: return {} - p = re.compile("\s*(0x[^-]*)->(0x[^ ]*) at (0x[^:]*):\s*([^ ]*)\s*(.*)") + p = re.compile(r"\s*(0x[^-]*)->(0x[^ ]*) at (0x[^:]*):\s*([^ ]*)\s*(.*)") matches = p.findall(out) for (start, end, offset, hname, attr) in matches: @@ -2313,7 +2313,7 @@ def elfsymbols(self, pattern=None): symname += "@plt" out = self.execute_redirect("info functions %s" % symname) if not out: continue - m = re.findall(".*(0x[^ ]*)\s*%s" % re.escape(symname), out) + m = re.findall(r".*(0x[^ ]*)\s*%s" % re.escape(symname), out) for addr in m: addr = to_int(addr) if self.is_address(addr, binmap): @@ -2413,7 +2413,7 @@ def readelf_header(self, filename, name=None): out = execute_external_command("%s -W -S %s" % (config.READELF, filename)) if not out: return {} - p = re.compile(".*\[.*\] (\.[^ ]*) [^0-9]* ([^ ]*) [^ ]* ([^ ]*)(.*)") + p = re.compile(r".*\[.*\] (\.[^ ]*) [^0-9]* ([^ ]*) [^ ]* ([^ ]*)(.*)") matches = p.findall(out) if not matches: return result @@ -2471,7 +2471,7 @@ def _elfheader_solib_all(): if not out: return None - p = re.compile("[^\n]*\s*(0x[^ ]*) - (0x[^ ]*) is (\.[^ ]*) in (.*)") + p = re.compile(r"[^\n]*\s*(0x[^ ]*) - (0x[^ ]*) is (\.[^ ]*) in (.*)") soheaders = p.findall(out) result = [] @@ -2678,7 +2678,7 @@ def decode_hex_escape(str_): blen = gadget[-1][0] - gadget[0][0] + 1 bytes = v[:2*blen] asmcode_rs = "; ".join([c for _, c in gadget]) - if re.search(re.escape(asmcode).replace("\ ",".*").replace("\?",".*"), asmcode_rs)\ + if re.search(re.escape(asmcode).replace(r"\ ",".*").replace(r"\?",".*"), asmcode_rs)\ and a not in result: result[a] = (bytes, asmcode_rs) result = list(result.items()) @@ -2829,7 +2829,7 @@ def search_jmpcall(self, start, end, regname=None): if regname is None: regname = "" regname = regname.lower() - pattern = re.compile(b'|'.join(JMPCALL).replace(b' ', b'\ ')) + pattern = re.compile(b'|'.join(JMPCALL).replace(b' ', br'\ ')) mem = self.dumpmem(start, end) found = pattern.finditer(mem) (arch, bits) = self.getarch() @@ -3411,7 +3411,7 @@ def xprint(self, *arg): """ text = "" exp = " ".join(list(arg)) - m = re.search(".*\[(.*)\]|.*?s:(0x[^ ]*)", exp) + m = re.search(r".*\[(.*)\]|.*?s:(0x[^ ]*)", exp) if m: addr = peda.parse_and_eval(m.group(1)) if to_int(addr): @@ -3516,7 +3516,7 @@ def procinfo(self, *arg): fdlist = os.listdir("/proc/%d/fd" % pid) for fd in fdlist: rpath = os.readlink("/proc/%d/fd/%s" % (pid, fd)) - sock = re.search("socket:\[(.*)\]", rpath) + sock = re.search(r"socket:\[(.*)\]", rpath) if sock: spath = execute_external_command("netstat -aen | grep %s" % sock.group(1)) if spath: @@ -3526,11 +3526,11 @@ def procinfo(self, *arg): # uid/gid, pid, ppid info["pid"] = pid status = open("/proc/%d/status" % pid).read() - ppid = re.search("PPid:\s*([^\s]*)", status).group(1) + ppid = re.search(r"PPid:\s*([^\s]*)", status).group(1) info["ppid"] = to_int(ppid) if ppid else -1 - uid = re.search("Uid:\s*([^\n]*)", status).group(1) + uid = re.search(r"Uid:\s*([^\n]*)", status).group(1) info["uid"] = [to_int(id) for id in uid.split()] - gid = re.search("Gid:\s*([^\n]*)", status).group(1) + gid = re.search(r"Gid:\s*([^\n]*)", status).group(1) info["gid"] = [to_int(id) for id in gid.split()] for opt in options: @@ -4122,7 +4122,7 @@ def traceinst(self, *arg): prev_depth = peda.backtrace_depth(peda.getreg("sp")) logfd = open(logname, "w") - p = re.compile(".*?:\s*[^ ]*\s*([^,]*),(.*)") + p = re.compile(r".*?:\s*[^ ]*\s*([^,]*),(.*)") while count: result = peda.stepuntil(",".join(instlist), mapname, prev_depth) if result is None: @@ -6112,7 +6112,7 @@ def complete(self, text, word): # handle SIGINT / Ctrl-C def sigint_handler(signal, frame): warning_msg("Got Ctrl+C / SIGINT!") - gdb.execute("set logging off") + gdb.execute("set logging enabled off") peda.restore_user_command("all") raise KeyboardInterrupt signal.signal(signal.SIGINT, sigint_handler)