forked from rkhal101/Web-Security-Academy-Series
-
Notifications
You must be signed in to change notification settings - Fork 2
/
main.py
90 lines (76 loc) · 2.93 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
#!/usr/bin/env python3
"""
main.py
SYNOPSIS
========
::
GET & POST requests.
Just playing around with this one.
Makes a request to a user supplied URL, uses the try-except block to handle errors,
and the conditional if-elif-else block to check the status of the request.
The response is printed to the screen.
"""
import argparse
import requests
import sys
import urllib3
from bs4 import BeautifulSoup
from samba.dcerpc.dcerpc import payload
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
proxies = {'http': 'http://127.0.0.1:8080', 'https': 'http://127.0.0.1:8080'}
class Request:
"""
This class uses the POST and GET, methods to make http requests to a user supplied URL.
"""
def __init__(self, url, method=''):
self.url = url
self.method = method
def get_request(self):
"""Make a GET request to a user supplied URL."""
try:
response = requests.get(self.url, verify=False, proxies=proxies)
soup = BeautifulSoup(response.text, 'html.parser')
print(soup.prettify())
except requests.exceptions.RequestException as e:
print(e)
def post_request(self):
"""Make a POST request to a user supplied URL."""
try:
response = requests.post(self.url, verify=False, proxies=proxies, data=payload)
soup = BeautifulSoup(response.text, 'html.parser')
print(soup.prettify())
except requests.exceptions.RequestException as e:
print(e)
# The payload will be appended to the end of URL.
@staticmethod
def exploit(url, payload):
"""Exploit function that accepts a URL and a payload as arguments."""
try:
response = requests.post(url, verify=False, proxies=proxies, data=payload)
soup = BeautifulSoup(response.text, 'html.parser')
print(soup.prettify())
except requests.exceptions.RequestException as e:
print(e)
# main function
def main(name):
"""Main function. Parses the command line arguments and executes the appropriate function."""
try:
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--url', required=True, help='URL to be exploited')
parser.add_argument('-p', '--payload', required=True, help='Payload to be appended to the URL')
parser.add_argument('-m', '--method', required=True, help='Method to be used for the request')
args = parser.parse_args()
url = args.url
payload = args.payload
method = args.method
Request(url, method).exploit(url, payload)
Request(url, method).get_request()
Request(url, method).post_request()
Request(url, method).exploit(url, payload)
Request(url, method).get_request()
except IndexError:
print(f'Usage: {name} -u <url> -p <payload> -m <method>')
sys.exit(1)
# guarding the name variable
if __name__ == '__main__':
main(sys.argv)