-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
0 parents
commit 02672f9
Showing
16 changed files
with
654 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,223 @@ | ||
import requests, string, signal, sys, os, re | ||
from bs4 import BeautifulSoup | ||
|
||
class CXX: | ||
def __init__(self, TARGET_URL): | ||
self.ASSET_EXT = ['pdf', 'png', 'jpg', 'jpeg', 'gif', 'bmp', 'svg', 'ico', 'webp', 'mp3', 'mp4', 'ogg', 'wav', 'flac', 'aac', 'wma', 'm4a', 'm4v', 'mov', 'wmv', 'avi', 'mpg', 'mpeg', 'mkv', '3gp', '3g2', 'm4v', 'm4p', 'm4b', 'm4r', 'm4v', 'm4s', 'm4v', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'm4s', 'm4v', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'm4s', 'm4v', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'm4s', 'm4v', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'm4s', 'm4v', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'm4s', 'm4v', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'm4s', 'm4v', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'm4s', 'm4v', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'm4s', 'm4v', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'm4s', 'm4v', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'm4s', 'm4v', 'm4a', 'm4p', 'm4b', 'm4r', 'm4v', 'm4s', 'm4v', 'm4a', 'm4p', 'm4b', 'm4r'] | ||
self.SOURCE_EXT = ['js', 'css'] | ||
self.TARGET_URL = TARGET_URL | ||
self.BAD_URL = TARGET_URL | ||
self.URL_SECURITY_STRUCT = dict() | ||
self.DOMAIN = list() | ||
self.URL_TEXT = "" # url parser text | ||
self.URL_ASSET_TEXT = "" | ||
self.URL_SOURCE_TEXT = "" | ||
self.URL_INNER_SCRIPT = "" | ||
|
||
# [API:INNER] TARGET_URL schema check | ||
def schema_check(self, url): | ||
try: | ||
if ((url.find(self.BAD_URL) != -1) and (url.find("https")) != -1): | ||
return url | ||
elif (url.find('http') | url.find('https')) == -1: | ||
url = self.TARGET_URL + url | ||
return url | ||
except: | ||
return url | ||
|
||
# [API:INNER] method check | ||
def cehck_method(self): | ||
print(f"[*] Method check") | ||
try: | ||
for i in self.URL_TEXT.split('\n'): | ||
try: | ||
res_opt = requests.options(i) | ||
except: | ||
print(f"EXCEPT: {res_opt.url}") | ||
continue | ||
headers = res_opt.headers | ||
# key-value | ||
self.URL_SECURITY_STRUCT[i] = {"SECURITY":{},"COOKIE":{}, "ENV":{}, "EXTEND":{}} | ||
self.security_check(g_url_sec_check_dict=self.URL_SECURITY_STRUCT, urlName=i, url=headers) | ||
|
||
for i in self.URL_ASSET_TEXT.split('\n'): | ||
try: | ||
res_opt = requests.options(i) | ||
except: | ||
print(f"EXCEPT: {res_opt.url}") | ||
continue | ||
headers = res_opt.headers | ||
# key-value | ||
self.URL_SECURITY_STRUCT[i] = {"SECURITY":{},"COOKIE":{}, "ENV":{}, "EXTEND":{}} | ||
self.security_check(g_url_sec_check_dict=self.URL_SECURITY_STRUCT, urlName=i, url=headers) | ||
|
||
for i in self.URL_SOURCE_TEXT.split('\n'): | ||
try: | ||
res_opt = requests.options(i) | ||
except: | ||
print(f"EXCEPT: {res_opt.url}") | ||
continue | ||
headers = res_opt.headers | ||
# key-value | ||
self.URL_SECURITY_STRUCT[i] = {"SECURITY":{},"COOKIE":{}, "ENV":{}, "EXTEND":{}} | ||
self.security_check(g_url_sec_check_dict=self.URL_SECURITY_STRUCT, urlName=i, url=headers) | ||
except: | ||
pass | ||
|
||
# security check pattern with re | ||
def security_check(self, g_url_sec_check_dict, urlName, url={}): | ||
try: | ||
for url_key, url_value in url.items(): | ||
# iframe security check pattern "X-Frame-Options" | ||
if (url_key.find("X-Frame-Options") != -1): | ||
g_url_sec_check_dict[urlName]["SECURITY"]["X-Frame-Options"] = url_value | ||
# content security policy "Content-Security-Policy" | ||
elif (url_key.find("Content-Security-Policy") != -1): | ||
g_url_sec_check_dict[urlName]["SECURITY"]["Content-Security-Policy"] = url_value | ||
# X-XSS-Protection | ||
elif (url_key.find("X-XSS-Protection") != -1): | ||
g_url_sec_check_dict[urlName]["SECURITY"]["X-XSS-Protection"] = url_value | ||
# Access-Control-Allow-Origin | ||
elif (url_key.find("Access-Control-Allow-Origin") != -1): | ||
g_url_sec_check_dict[urlName]["SECURITY"]["Access-Control-Allow-Origin"] = url_value | ||
# Access-Control-Allow-Credentials | ||
elif (url_key.find("Access-Control-Allow-Credentials") != -1): | ||
g_url_sec_check_dict[urlName]["SECURITY"]["Access-Control-Allow-Credentials"] = url_value | ||
# Access-Control-Expose-Headers | ||
elif (url_key.find("Access-Control-Expose-Headers") != -1): | ||
g_url_sec_check_dict[urlName]["SECURITY"]["Access-Control-Expose-Headers"] = url_value | ||
# Access-Control-Max-Age | ||
elif (url_key.find("Access-Control-Max-Age") != -1): | ||
g_url_sec_check_dict[urlName]["SECURITY"]["Access-Control-Max-Age"] = url_value | ||
# Access-Control-Allow-Methods | ||
elif (url_key.find("Access-Control-Allow-Methods") != -1): | ||
g_url_sec_check_dict[urlName]["SECURITY"]["Access-Control-Allow-Methods"] = url_value | ||
# Access-Control-Allow-Headers | ||
elif (url_key.find("Access-Control-Allow-Headers") != -1): | ||
g_url_sec_check_dict[urlName]["SECURITY"]["Access-Control-Allow-Headers"] = url_value | ||
# COOP: Cross-Origin-Opener-Policy | ||
elif (url_key.find("Cross-Origin-Opener-Policy") != -1): | ||
g_url_sec_check_dict[urlName]["SECURITY"]["Cross-Origin-Opener-Policy"] = url_value | ||
# Server | ||
elif (url_key.find("Server") != -1): | ||
g_url_sec_check_dict[urlName]["ENV"]["Server"] = url_value | ||
# Allow method | ||
elif (url_key.find("Allow") != -1): | ||
g_url_sec_check_dict[urlName]["ENV"]["Allow-Methid"] = url_value | ||
# setCookies | ||
elif (url_key.find("set-cookie") != -1): | ||
g_url_sec_check_dict[urlName]["COOKIE"]["set-cookie"] = url_value | ||
except: | ||
pass | ||
# print(self.URL_SECURITY_STRUCT) | ||
|
||
# csp check | ||
def csp_check(self): | ||
print("[*] CSP check") | ||
for url_key, url_value in self.URL_SECURITY_STRUCT.items(): | ||
if url_value["SECURITY"].get("Content-Security-Policy") is not None: | ||
_=(url_value['SECURITY']['Content-Security-Policy']).split('\';') | ||
print(_) | ||
|
||
# inner script gadget find | ||
def inner_script_gadget(self): | ||
# value of url_sec_check dict key | ||
try: | ||
for url_key, url_value in self.URL_SECURITY_STRUCT.items(): | ||
if url_value["SECURITY"].get("X-XSS-Protection") is None or url_value["SECURITY"].get("X-XSS-Protection") == "0": | ||
req = requests.get(url_key) | ||
soup = BeautifulSoup(req.text, 'html.parser') | ||
for script in soup.find_all('script'): | ||
self.URL_INNER_SCRIPT += f"[**]//[host]:{url_key}[/host]\n<script>{script.text}</script>[**]\n" | ||
else: | ||
pass | ||
except: | ||
pass | ||
|
||
# subdomain parser | ||
def subdomain_parser(self): | ||
print("[*] Subdomain parser") | ||
|
||
# hostname filter | ||
for i in ((self.URL_ASSET_TEXT).split('\n')): | ||
try: | ||
self.DOMAIN.append(((i).split("/"))[2]) | ||
except: | ||
continue | ||
for i in ((self.URL_SOURCE_TEXT).split('\n')): | ||
try: | ||
self.DOMAIN.append(((i).split("/"))[2]) | ||
except: | ||
continue | ||
for i in ((self.URL_TEXT).split('\n')): | ||
try: | ||
self.DOMAIN.append(((i).split("/"))[2]) | ||
except: | ||
continue | ||
print(list(set(self.DOMAIN))) | ||
|
||
# cookie parser | ||
def cookie_parser(self): | ||
print("[*] Cookie parser") | ||
# self.URL_SECURITY_STRUCT | ||
for url_key, url_value in self.URL_SECURITY_STRUCT.items(): | ||
if url_value["COOKIE"].get("set-cookie") is not None: | ||
print(url_value["COOKIE"]["set-cookie"]) | ||
|
||
|
||
# [API] TARGET_URL in ALL URL | ||
def get_all_url_parse(self): | ||
print("=========================================== START ===========================================") | ||
try: | ||
req = requests.get(self.TARGET_URL) | ||
|
||
# response statuc code check 400, 500 is return | ||
if req.status_code == 400 or req.status_code == 500: | ||
return | ||
saveHtml = req.text | ||
soup = BeautifulSoup(saveHtml, 'html.parser') | ||
|
||
current_url = [] | ||
src_list = ["audio", "embed", "iframe", "img", "input", "script", "source", "track", "video"] | ||
|
||
src_allow_list = [] | ||
for i in src_list: | ||
if saveHtml.find(i) != -1: | ||
src_allow_list.append(i) | ||
|
||
for i in src_allow_list: | ||
for j in soup.find_all(i): | ||
_j = j.get('src') | ||
if _j is not None: | ||
# print(schema_check(_j)) | ||
current_url.append(self.schema_check(_j)) | ||
|
||
# href parser | ||
for link in soup.find_all('a'): | ||
# filter is not http and https | ||
_f = link.get('href') | ||
if _f is not None: | ||
# print(schema_check(_f)) | ||
current_url.append(self.schema_check(_f)) | ||
|
||
# multiple remove url | ||
current_url = list(set(current_url)) | ||
|
||
# check asset ext is in list | ||
for i in current_url: | ||
block = i.split('.') | ||
for j in block: | ||
if j in self.ASSET_EXT: | ||
self.URL_ASSET_TEXT += f"{i}\n" | ||
current_url.remove(i) | ||
if j in self.SOURCE_EXT: | ||
self.URL_SOURCE_TEXT += f"{i}\n" | ||
current_url.remove(i) | ||
|
||
for i in current_url: | ||
print(i) | ||
self.URL_TEXT += i + "\n" | ||
print(f"====================== finish ======================") | ||
except: | ||
pass | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,77 @@ | ||
import requests, string, signal, sys, os, re, random | ||
from bs4 import BeautifulSoup | ||
|
||
class PTP(): | ||
def __init__(self, target_method, target_url_host, innerScript,security_check): | ||
self.innerScript = innerScript | ||
self.security_check = security_check | ||
self.OriginHost = target_url_host | ||
self.OriginMethod = target_method | ||
self.seed = list() | ||
self.testcase = list() | ||
self.crashCount = 0 | ||
self.crash = list() | ||
|
||
# seed pool | ||
def seed_pool(self): | ||
try: | ||
is_tmp = "" | ||
for iS in self.innerScript: | ||
if iS.find('window') != -1: | ||
es_tmp = iS[iS.find('window')+7:iS.find("=")] + "\n" | ||
if (es_tmp.find("]") != -1): | ||
is_tmp += es_tmp[:es_tmp.find("]") + 1] +"\n" | ||
else: | ||
is_tmp += es_tmp | ||
|
||
self.seed = list(set(list(is_tmp.split("\n")))) | ||
except: | ||
pass | ||
|
||
def random_ascii(self): | ||
size = random.randint(1,16) | ||
return ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(size)) | ||
|
||
# mutation | ||
def mutation(self): | ||
try: | ||
for s_k, s_v in self.security_check.items(): | ||
if s_k.find(self.OriginHost) != -1: | ||
if s_k.find("?") != -1 or s_k.find("&") != -1: | ||
self.testcase.append(s_k.replace(s_k[s_k.find("?")+1:s_k.find("=")], random.choice(self.seed))) | ||
else: | ||
s_tmp = s_k + "/?" + random.choice(self.seed) + "=" + self.random_ascii() | ||
for i in range(1,random.randint(2, 10)): | ||
s_tmp += "&" + random.choice(self.seed) + "=" + self.random_ascii() | ||
self.testcase.append(s_tmp) | ||
else: | ||
pass | ||
except: | ||
pass | ||
# fuzz | ||
def ptpfuzz(self): | ||
print("[*] Start PTP Fuzzing") | ||
for tc in self.testcase: | ||
try: | ||
if self.OriginMethod == "GET": | ||
r = requests.get(tc) | ||
elif self.OriginMethod == "POST": | ||
r = requests.post(tc) | ||
else: | ||
r = requests.options(tc) | ||
if r.status_code != 200: | ||
print(f"[*] COUNT:{str(self.crashCount)}") | ||
print(f"[!!{r.status_code}]{tc}is vulnerable") | ||
self.crashCount += 1 | ||
self.crash.append(f"[{r.status_code}] => {tc}") | ||
else: | ||
pass | ||
except: | ||
pass | ||
print("[*] Finish PTP Fuzzing") | ||
|
||
|
||
|
||
|
||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,91 @@ | ||
# Web Application Logical Bug Fuzzer (BrainBreak) | ||
|
||
### Intro | ||
|
||
In today's web application environment, as many third parties or frameworks are combined, small issues or developer mistakes are increasing. Other client-side vulnerabilities can be addressed through existing fundamental security techniques, but logical bug cases cannot be easily defended. It feels like a backdoor. We have developed a fuzzer that can detect other logical bugs from existing web fuzzers and plugins. I applied the necessary skills to understand my own web logic while easily solving the CTF. (API analysis, business logic, script execution vector,...) Using this technology and the case of exporting important functions to the window object that developers often miss, and the case of using it as a query, query and data are collected when a normal URL is requested. After sending a combination of data, it is possible to detect a crash based on the response value. | ||
|
||
### Featured | ||
|
||
- Web application logic bug issue automation analysis function | ||
- Fuzzer using externally exposed objects during deployment | ||
- Extraction of internal weak logic bug script gadget | ||
- All URL parsing analysis work when rendering the target client | ||
- Use traffic for necessary tasks | ||
|
||
### ENV TETS | ||
|
||
```java | ||
OS: Window | ||
python 3.9.9 | ||
pip 21.3.1 | ||
``` | ||
|
||
### requirement | ||
|
||
```java | ||
beautifulsoup4==4.10.0 | ||
Flask==2.0.2 | ||
Flask-Bootstrap==3.3.7.1 | ||
Flask-Login==0.5.0 | ||
Flask-SocketIO==4.3.2 | ||
Jinja2==3.0.3 | ||
requests==2.27.1 | ||
Werkzeug==2.0.2 | ||
``` | ||
|
||
### Struct | ||
|
||
- The CXX class uses the `get_all_url_parse` API to parse the target client rendering URL and store the state. The `cehck_method` API makes a request with the `options` method based on the collected data and analyzes the header of the response value to determine the current security settings and internal state information. | ||
- The `inner_script_gadget` API collects the inner script information by requesting a list of URLs from which the information is collected, and uses the window object to identify the export pattern. | ||
|
||
```java | ||
CXX class | ||
``` | ||
|
||
- This PTP class is for fuzzer work. This is for data manipulation and status check of normal URLs based on the previously collected information. The `seed_pool` API determines whether a window object exists based on the collected data and performs the seed pool operation for deduplication. | ||
- The `mutation` API creates a testcase using the data of the seed, determines whether the query data is ?, &, determines whether it is GET or POST, secures random data based on this, and inserts the previously collected data to perform the mutation operation. Proceed. | ||
- `ptpfuzz` The final fuzzer work is carried out and crash judgment is proceeded based on the status code. | ||
|
||
```java | ||
PTP class | ||
``` | ||
|
||
### Run | ||
|
||
- Execute the [app.py](http://app.py/) script after installing the necessary packages | ||
|
||
```java | ||
python app.py | ||
``` | ||
|
||
- Connect to flask server (when changing the port, edit the [app.py:108](http://app.py:108/) line and run it) | ||
|
||
![Untitled](data/Untitled.png) | ||
|
||
- Wait until loading is complete (analysis in progress) | ||
|
||
![Untitled](data/Untitled%201.png) | ||
|
||
- When the Finish PTP Fuzzing message appears on the CLI window, check the completion and web server | ||
|
||
![Untitled](data/Untitled%202.png) | ||
|
||
## DashBoard Layout | ||
|
||
- Target INPUT (METHOD, SCHEMA, HOST, DATA) | ||
|
||
![Untitled](data/Untitled%203.png) | ||
|
||
- Logical BugFuzzer Crash | ||
|
||
![Untitled](data/Untitled%204.png) | ||
|
||
- Security Check | ||
|
||
![Untitled](data/Untitled%205.png) | ||
|
||
- Logical Bug InnerScript Gadgets | ||
|
||
![Untitled](data/Untitled%206.png) | ||
|
||
- Logical Origin URL, SOURCE, EXTENSTION |
Binary file not shown.
Binary file not shown.
Oops, something went wrong.