Skip to content

Commit

Permalink
fixes issue #173 crawler will ignore SSL certificates, fixes #174 cac…
Browse files Browse the repository at this point in the history
…hes the found firewall into memory, incase we run across it again we don't waste our time trying to discover it, fixes #175 and #176 if there are unicode chars in the value it will not be saved
  • Loading branch information
ekultek committed Nov 28, 2017
1 parent 4c496b2 commit 97187c0
Show file tree
Hide file tree
Showing 8 changed files with 90 additions and 16 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ There are some requirements for this to be run successfully.

### Installation

You can download the latest [tar.gz](https://github.com/ekultek/zeus-scanner/tarball/master), the latest [zip](https://github.com/ekultek/zeus-scanner/zipball/master), or you can find the current stable release [here](https://github.com/Ekultek/Zeus-Scanner/releases/tag/v1.2). Alternatively you can install the latest development version by following the instructions that best match your operating system:
You can download the latest [tar.gz](https://github.com/ekultek/zeus-scanner/tarball/master), the latest [zip](https://github.com/ekultek/zeus-scanner/zipball/master), or you can find the current stable release [here](https://github.com/Ekultek/Zeus-Scanner/releases/tag/v1.3). Alternatively you can install the latest development version by following the instructions that best match your operating system:

**_NOTE: (optional but highly advised)_** add sqlmap and nmap to your environment PATH by moving them to `/usr/bin` or by adding them to the PATH via terminal

Expand Down
10 changes: 6 additions & 4 deletions etc/checksum/md5sum.md5
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
a48339bbd1bbcd2e27650fd930ebfa4c ./zeus.py
e4ea2d20dd1e0ec58e68159689e2cb74 ./zeus.py
4b32db388e8acda35570c734d27c950c ./etc/scripts/launch_sqlmap.sh
6ad5f22ec4a6f8324bfb1b01ab6d51ec ./etc/scripts/cleanup.sh
74d7bee13890a9dd279bb857591647ce ./etc/scripts/reinstall.sh
Expand Down Expand Up @@ -55,6 +55,7 @@ bf5285dc059c761e1719bc734ae8504f ./lib/firewall/varnish.py
cb45428e92485b759ff5cb46a0be9c73 ./lib/firewall/yunsuo.py
bbd8b4c6100070d420d48dc7dfc297eb ./lib/firewall/webknight.py
95b908a21c0ff456ae59df4c6c189c54 ./lib/firewall/wallarm.py
8fc8d62377bebbfa7ca4d70a79eab115 ./lib/firewall/bigip.py
6ea65a0160c21e144e92334acc2e3667 ./lib/firewall/anquanbao.py
7f4e3ba2f459926fc77bcddc17b933aa ./lib/firewall/generic.py
cf236a16c7869282f55dd4c5ad6347a5 ./lib/attacks/gist_lookup/__init__.py
Expand All @@ -68,15 +69,16 @@ d2846e039fefee741db24dd64f7bd50e ./lib/attacks/admin_panel_finder/__init__.py
b5cd5e913cc62112776153bdf0f60fa4 ./lib/attacks/xss_scan/__init__.py
63c45495ec1ed2e98946bef514d8805e ./lib/attacks/nmap_scan/__init__.py
216999fa0e84866d5c1d96d5676034e4 ./lib/attacks/nmap_scan/nmap_opts.py
7267f30f11ed3d096e222da949da5bea ./lib/header_check/__init__.py
b6f5f8e43c1e480329b66e193bd91751 ./lib/header_check/__init__.py
9a2bb0d52f64e12d5a63ce83874ea74a ./lib/core/common.py
1faa2b5dfad6eb538bbfe42942d2a9da ./lib/core/errors.py
d41d8cd98f00b204e9800998ecf8427e ./lib/core/__init__.py
bf158550d8f51f4841fd1b003cb71c55 ./lib/core/settings.py
4a87e14ed7a070ae15b1ed7ac7ceaecc ./lib/core/settings.py
4b507b34677b414b8338475fea2c012a ./lib/core/cache.py
9a02e5b913d210350545ac26510a63c9 ./var/search/__init__.py
8402f23a2586b6f684fb1c3c04c4386f ./var/search/selenium_search.py
63ba132381a0cc2d7629852bd5e4aa17 ./var/search/pgp_search.py
d41d8cd98f00b204e9800998ecf8427e ./var/__init__.py
d41d8cd98f00b204e9800998ecf8427e ./var/auto_issue/__init__.py
0c11c16126baf789388a661bbbefb149 ./var/auto_issue/github.py
df0c4467256fb6fb7ab6c40209e4ab6d ./var/blackwidow/__init__.py
7608a7a8195d6d66ebae7664f25639fd ./var/blackwidow/__init__.py
27 changes: 27 additions & 0 deletions lib/core/cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
from functools import wraps

import lib.core.settings


def cache(func):
"""
if we come across the same URL more then once, it will be cached into memory
so that we don't have to test it again
"""
__cache = {}

@wraps(func)
def func_wrapper(*args, **kwargs):
if args in __cache:
lib.core.settings.logger.warning(lib.core.settings.set_color(
"cached detection has shown that the target URL WAF/IPS/IDS is '{}'...".format(
__cache[args]
), level=35
))
return __cache[args]
else:
__to_cache = func(*args, **kwargs)
__cache[args] = __to_cache
return __to_cache

return func_wrapper
2 changes: 1 addition & 1 deletion lib/core/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
ISSUE_LINK = "https://github.com/ekultek/zeus-scanner/issues"

# current version <major.minor.commit.patch ID>
VERSION = "1.2.41.{}".format(PATCH_ID)
VERSION = "1.3".format(PATCH_ID)

# colors to output depending on the version
VERSION_TYPE_COLORS = {"dev": 33, "stable": 92, "other": 30}
Expand Down
19 changes: 19 additions & 0 deletions lib/firewall/bigip.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import re

from lib.core.common import HTTP_HEADER


__item__ = "BIG-IP Application Security Manager (F5 Networks)"


def detect(content, **kwargs):
headers = kwargs.get("headers", None)
detection_schema = (
re.compile(r"\ATS\w{4,}=", re.I), re.compile(r"BIGip|BipServer", re.I),
re.compile(r"\AF5\Z", re.I)
)
for detection in detection_schema:
if detection.search(headers.get(HTTP_HEADER.SERVER, "")) is not None:
return True
if detection.search(headers.get(HTTP_HEADER.SET_COOKIE, "")) is not None:
return True
41 changes: 33 additions & 8 deletions lib/header_check/__init__.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
import os
import re
import importlib
import unicodedata

import requests
from xml.dom import minidom
from requests.exceptions import ConnectionError

from var.auto_issue.github import request_issue_creation
from lib.core.cache import cache
from lib.core.common import (
write_to_log_file,
shutdown,
Expand All @@ -32,6 +34,7 @@
)


@cache
def detect_protection(url, **kwargs):
verbose = kwargs.get("verbose", False)
agent = kwargs.get("agent", None)
Expand Down Expand Up @@ -68,12 +71,13 @@ def detect_protection(url, **kwargs):

html, status, headers = protection_check_req.content, protection_check_req.status_code, protection_check_req.headers

for dbms in DBMS_ERRORS: # make sure there are no DBMS errors in the HTML
# make sure there are no DBMS errors in the HTML
for dbms in DBMS_ERRORS:
for regex in DBMS_ERRORS[dbms]:
if re.compile(regex).search(html) is not None:
logger.info(set_color(
logger.warning(set_color(
"it appears that the WAF/IDS/IPS check threw a DBMS error and may be vulnerable "
"to SQL injection attacks. it appears the backend DBMS is '{}'...".format(dbms), level=25
"to SQL injection attacks. it appears the backend DBMS is '{}'...".format(dbms), level=30
))
return None

Expand All @@ -94,7 +98,7 @@ def detect_protection(url, **kwargs):
if len(retval) >= 2:
try:
del retval[retval.index("Generic (Unknown)")]
except:
except (Exception, IndexError):
logger.warning(set_color(
"multiple firewalls identified ({}), displaying most likely...".format(
", ".join(retval)
Expand All @@ -105,9 +109,11 @@ def detect_protection(url, **kwargs):
logger.warning(set_color(
"discovered firewall is unknown to Zeus, saving fingerprint to file. "
"if you know the details or the context of the firewall please create "
"an issue with the fingerprint, or a pull request with the script...", level=30
"an issue ({}) with the fingerprint, or a pull request with the script...".format(
ISSUE_LINK
), level=30
))
fingerprint = "<!---\nStatus: {}\nHeaders: {}\n--->\n{}".format(
fingerprint = "<!---\nHTTP 1.1\nStatus Code: {}\nHTTP Headers: {}\n--->\n{}".format(
status, headers, html
)
write_to_log_file(fingerprint, UNKNOWN_FIREWALL_FINGERPRINT_PATH, UNKNOWN_FIREWALL_FILENAME)
Expand Down Expand Up @@ -143,12 +149,14 @@ def load_xml_data(path, start_node="header", search_node="name"):

def load_headers(url, **kwargs):
"""
load the URL headers
load the HTTP headers
"""
agent = kwargs.get("agent", None)
proxy = kwargs.get("proxy", None)
xforward = kwargs.get("xforward", False)

literal_match = re.compile(r"\\(\X(\d+)?\w+)?", re.I)

if proxy is not None:
proxy = proxy_string_to_dict(proxy)
if not xforward:
Expand Down Expand Up @@ -182,7 +190,23 @@ def load_headers(url, **kwargs):
[c for c in req.cookies.itervalues()], COOKIE_LOG_PATH,
COOKIE_FILENAME.format(replace_http(url))
)
return req.headers
retval = {}
do_not_use = []
http_headers = req.headers
for header in http_headers:
try:
# test to see if there are any unicode errors in the string
retval[header] = unicodedata.normalize("NFKD", u"{}".format(http_headers[header])).encode("ascii", errors="ignore")
# just to be safe, we're going to put all the possible Unicode errors into a tuple
except (UnicodeEncodeError, UnicodeDecodeError, UnicodeError, UnicodeTranslateError, UnicodeWarning):
# if there are, we're going to append them to a `do_not_use` list
do_not_use.append(header)
retval.clear()
for head in http_headers:
# if the header is in the list, we skip it
if head not in do_not_use:
retval[head] = http_headers[head]
return retval


def compare_headers(found_headers, comparable_headers):
Expand Down Expand Up @@ -222,6 +246,7 @@ def main_header_check(url, **kwargs):
"checking if target URL is protected by some kind of WAF/IPS/IDS..."
))
identified = detect_protection(url, proxy=proxy, agent=agent, verbose=verbose, xforward=xforward)

if identified is None:
logger.info(set_color(
"no WAF/IDS/IPS has been identified on target URL...", level=25
Expand Down
3 changes: 2 additions & 1 deletion var/blackwidow/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,8 @@ def test_connection(self):
make sure the connection is good before you continue
"""
try:
attempt = requests.get(self.url, params=self.headers, proxies=self.proxy)
# verify=False will take care of SSLErrors
attempt = requests.get(self.url, params=self.headers, proxies=self.proxy, verify=False)
if attempt.status_code == 200:
return ("ok", None)
return ("fail", attempt.status_code)
Expand Down
2 changes: 1 addition & 1 deletion zeus.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,7 +285,7 @@ def __run_attacks_main(**kwargs):
), level=25
))
logger.info(set_color(
"checking URL headers..."
"checking for HTTP headers..."
))
main_header_check(
url, verbose=opt.runInVerbose, agent=agent_to_use,
Expand Down

0 comments on commit 97187c0

Please sign in to comment.