diff --git a/.gitignore b/.gitignore index de365d7..e8e016b 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ /tests/* *.logs *.egg-info/* +__pycache__/* diff --git a/AUTHORS.md b/AUTHORS.md index eb34c19..d88e6a1 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -4,12 +4,12 @@ ---------------- * plague - * _AKA_ Ian ## Contributors ------------ - * imsi | Anonops IRC #python +### Special thanks to * Audreyr | https://github.com/audreyr/cookiecutter +* rootVIII | https://github.com/rootVIII/proxy_requests diff --git a/HISTORY.md b/HISTORY.md index b117e8a..5ff6ee9 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,4 +1,9 @@ # History +* **1.1.0** + * You can now specify which domain to start from _(when doing a full TLD scan)_ by using the -dc flag `-dc ` + * You can now save the output of a scan into a file by using the `-o ` flag +* **1.0.6** + * Fixed bug with files * **1.0.5** * Added __main__ file to root directory to prevent error when starting via python3 * **1.0.4** diff --git a/README.md b/README.md index e20e149..e082f29 100644 --- a/README.md +++ b/README.md @@ -29,22 +29,22 @@ You can now use the `tldsearcher` command from anywhere on your system ### PyPI -You can install **TLDSearcher** via pip with 2 methods. +You can install **TLDSearcher** via pip **Method 1** Issue the following command from the terminal to download the [latest version](https://pypi.org/project/tldsearcher/) from [pypi.org](https://pypi.org) > `pip3 install tldsearcher` -**Method 2** -Issue the following command from the terminal to download the [latest version](https://github.com/plague-anon/TLDSearcher/releases) from [Github](https://www.github.com) -> `pip3 install -e git+https://github.com/plague-anon/TLDSearcher#egg=pkg` +## Usage Examples +To verbosely search for **all** TLDs for _example_: +`tldsearcher -t example -v` -## Usage Example -``` -tldsearcher -t example -d com,co.uk,.net,.org -v -``` -_The `-d` flag must be a string of TLDs, separated with a comma. The preceding `.` does not have to be supplied_ +To verbosely search for specific TLDs for _example_ and output into _scan.txt_: +`tldsearcher -t example -d com,net,info,org -o scan.txt -v ` + +To see a list of TLD categories to scan for _example_: +`tldSearcher -t example -dC -o scan.txt -v` _For more examples and usage, please refer to the [Wiki][wiki]._ diff --git a/tldsearcher/__init__.py b/tldsearcher/__init__.py index 5af5f59..1034c71 100644 --- a/tldsearcher/__init__.py +++ b/tldsearcher/__init__.py @@ -2,4 +2,4 @@ __author__ = """plague""" __email__ = 'plague_anon@protonmail.com' -__version__ = '1.0.6' +__version__ = '1.1.0' diff --git a/tldsearcher/proxy_requests.py b/tldsearcher/proxy_requests.py new file mode 100644 index 0000000..c043b33 --- /dev/null +++ b/tldsearcher/proxy_requests.py @@ -0,0 +1,338 @@ +import requests +from random import randint +from re import findall +# rootVIII +# pycodestyle validated +# 2018-2020 + + +class ProxyRequests: + def __init__(self, url): + self.url = url + self.sockets = [] + self.rdata = { + 'headers': {}, + 'json': {}, + 'status_code': 0, + 'timeout': 3.0, + 'errs': [ + 'ConnectTimeout', + 'ProxyError', + 'SSLError', + 'ReadTimeout', + 'ConnectionError', + 'ConnectTimeoutError' + ] + } + self.empty_warn = 'Proxy Pool has been emptied' + self._acquire_sockets() + + def _acquire_sockets(self): + r = requests.get('https://www.sslproxies.org/') + matches = findall(r"\d+\.\d+\.\d+\.\d+\d+", r.text) + revised = [m.replace('', '') for m in matches] + self.sockets = [s[:-5].replace('', ':') for s in revised] + + def _set_request_data(self, req, socket): + self.rdata['request'] = req.text + self.rdata['headers'] = req.headers + self.rdata['status_code'] = req.status_code + self.rdata['url'] = req.url + self.rdata['raw'] = req.content + self.rdata['proxy'] = socket + try: + self.rdata['json'] = req.json() + except Exception as err: + self.rdata['json'] = {type(err).__name__: str(err)} + + def _rand_sock(self): + return randint(0, len(self.sockets) - 1) + + def _is_err(self, err): + if type(err).__name__ not in self.rdata['errs']: + raise err + + def _limit_succeeded(self): + raise Exception(self.empty_warn) + + def get(self): + if len(self.sockets) > 0: + current_socket = self.sockets.pop(self._rand_sock()) + proxies = { + 'http': 'http://' + current_socket, + 'https': 'https://' + current_socket + } + try: + request = requests.get( + self.url, + timeout=self.rdata['timeout'], + proxies=proxies) + self._set_request_data(request, current_socket) + except Exception as e: + self._is_err(e) + self.get() + else: + self._limit_succeeded() + + def get_with_headers(self): + if len(self.sockets) > 0: + current_socket = self.sockets.pop(self._rand_sock()) + proxies = { + 'http': 'http://' + current_socket, + 'https': 'https://' + current_socket + } + try: + request = requests.get( + self.url, + timeout=self.rdata['timeout'], + proxies=proxies, + headers=self.rdata['headers']) + self._set_request_data(request, current_socket) + except Exception as e: + self._is_err(e) + self.get_with_headers() + else: + self._limit_succeeded() + + def post(self, data): + if len(self.sockets) > 0: + current_socket = self.sockets.pop(self._rand_sock()) + proxies = { + 'http': 'http://' + current_socket, + 'https': 'https://' + current_socket + } + try: + request = requests.post( + self.url, + json=data, + timeout=self.rdata['timeout'], + proxies=proxies) + self._set_request_data(request, current_socket) + except Exception as e: + self._is_err(e) + self.post(data) + else: + self._limit_succeeded() + + def post_with_headers(self, data): + if len(self.sockets) > 0: + current_socket = self.sockets.pop(self._rand_sock()) + proxies = { + 'http': 'http://' + current_socket, + 'https': 'https://' + current_socket + } + try: + request = requests.post( + self.url, + json=data, + timeout=self.rdata['timeout'], + headers=self.rdata['headers'], + proxies=proxies) + self._set_request_data(request, current_socket) + except Exception as e: + self._is_err(e) + self.post_with_headers(data) + else: + self._limit_succeeded() + + def post_file(self): + if len(self.sockets) > 0: + current_socket = self.sockets.pop(self._rand_sock()) + proxies = { + 'http': 'http://' + current_socket, + 'https': 'https://' + current_socket + } + try: + request = requests.post( + self.url, + proxies=proxies, + timeout=self.rdata['timeout'], + files={'upload_file': open(self.rdata['file'], 'rb')}) + self._set_request_data(request, current_socket) + except Exception as e: + self._is_err(e) + self.post_file() + else: + self._limit_succeeded() + + def post_file_with_headers(self): + if len(self.sockets) > 0: + current_socket = self.sockets.pop(self._rand_sock()) + proxies = { + 'http': 'http://' + current_socket, + 'https': 'https://' + current_socket + } + try: + request = requests.post( + self.url, + files={'upload_file': open(self.rdata['file'], 'rb')}, + timeout=self.rdata['timeout'], + headers=self.rdata['headers'], + proxies=proxies) + self._set_request_data(request, current_socket) + except Exception as e: + self._is_err(e) + self.post_file_with_headers() + else: + self._limit_succeeded() + + def get_headers(self): + return self.rdata['headers'] + + def set_headers(self, outgoing_headers): + self.rdata['headers'] = outgoing_headers + + def set_file(self, outgoing_file): + self.rdata['file'] = outgoing_file + + def get_status_code(self): + return self.rdata['status_code'] + + def get_proxy_used(self): + return self.rdata['proxy'] + + def get_raw(self): + return self.rdata['raw'] + + def get_json(self): + return self.rdata['json'] + + def get_url(self): + return self.rdata['url'] + + def __str__(self): + return str(self.rdata['request']) + + +class ProxyRequestsBasicAuth(ProxyRequests): + def __init__(self, url, username, password): + super().__init__(url) + self.username = username + self.password = password + + def get(self): + if len(self.sockets) > 0: + current_socket = self.sockets.pop(self._rand_sock()) + proxies = { + 'http': 'http://' + current_socket, + 'https': 'https://' + current_socket + } + try: + request = requests.get( + self.url, + auth=(self.username, self.password), + timeout=self.rdata['timeout'], + proxies=proxies) + self._set_request_data(request, current_socket) + except Exception as e: + self._is_err(e) + self.get() + else: + self._limit_succeeded() + + def get_with_headers(self): + if len(self.sockets) > 0: + current_socket = self.sockets.pop(self._rand_sock()) + proxies = { + 'http': 'http://' + current_socket, + 'https': 'https://' + current_socket + } + try: + request = requests.get( + self.url, + auth=(self.username, self.password), + timeout=self.rdata['timeout'], + proxies=proxies, + headers=self.rdata['headers']) + self._set_request_data(request, current_socket) + except Exception as e: + self._is_err(e) + self.get_with_headers() + else: + self._limit_succeeded() + + def post(self, data): + if len(self.sockets) > 0: + current_socket = self.sockets.pop(self._rand_sock()) + proxies = { + 'http': 'http://' + current_socket, + 'https': 'https://' + current_socket + } + try: + request = requests.post( + self.url, + json=data, + auth=(self.username, self.password), + timeout=self.rdata['timeout'], + proxies=proxies) + self._set_request_data(request, current_socket) + except Exception as e: + self._is_err(e) + self.post(data) + else: + self._limit_succeeded() + + def post_with_headers(self, data): + if len(self.sockets) > 0: + current_socket = self.sockets.pop(self._rand_sock()) + proxies = { + 'http': 'http://' + current_socket, + 'https': 'https://' + current_socket + } + try: + request = requests.post( + self.url, + json=data, + auth=(self.username, self.password), + timeout=self.rdata['timeout'], + headers=self.rdata['headers'], + proxies=proxies) + self._set_request_data(request, current_socket) + except Exception as e: + self._is_err(e) + self.post_with_headers(data) + else: + self._limit_succeeded() + + def post_file(self): + if len(self.sockets) > 0: + current_socket = self.sockets.pop(self._rand_sock()) + proxies = { + 'http': 'http://' + current_socket, + 'https': 'https://' + current_socket + } + try: + request = requests.post( + self.url, + files={'upload_file': open(self.rdata['file'], 'rb')}, + auth=(self.username, self.password), + timeout=self.rdata['timeout'], + proxies=proxies) + self._set_request_data(request, current_socket) + except Exception as e: + self._is_err(e) + self.post_file() + else: + self._limit_succeeded() + + def post_file_with_headers(self): + if len(self.sockets) > 0: + current_socket = self.sockets.pop(self._rand_sock()) + proxies = { + 'http': 'http://' + current_socket, + 'https': 'https://' + current_socket + } + try: + request = requests.post( + self.url, + files={'upload_file': open(self.rdata['file'], 'rb')}, + auth=(self.username, self.password), + timeout=self.rdata['timeout'], + headers=self.rdata['headers'], + proxies=proxies) + self._set_request_data(request, current_socket) + except Exception as e: + self._is_err(e) + self.post_file_with_headers() + else: + self._limit_succeeded() diff --git a/tldsearcher/tldsearcher.py b/tldsearcher/tldsearcher.py index 9eb8b67..a8e0f61 100755 --- a/tldsearcher/tldsearcher.py +++ b/tldsearcher/tldsearcher.py @@ -1,36 +1,69 @@ #!/usr/bin/python3 +# KNOWN ISSUES AND BUGS | HELP WELCOME +# When scanning, and the x.y domain being tried is not responding, and the user uses CTRL-C, the system throws an error like "handled exception during gaierror exception" +# Proxy scanning is slow and always returns 0 when using .get_status_code(), or None when using .get() + +# TODO +# ==== +# Implement proxy use for requests +# Fix 'During handling of the above exception, another exception occurred:' error when using CTRL-C during a scan. +# Prevent more than one domain flag being used + import socket import argparse import time import sys import tlds +# from proxy_requests import ProxyRequests from __init__ import __version__ version = __version__ -list = 'TLDLIST' +list = 'listOfDomains' targets=[]# Targets to scan -tldList=[]# TLD list to check +listOfDomains=[]# TLD list to check result=[]# (Positive) results args='' verbose = False +proxy = False attempts=0# Number of TLDs attempts to find -pos=[] +pos=[]# list of positive results lastTry=''# Last tried TLD -domainCategory=0 +domainCategory=0# for -domain-category menu +continueDomain = ''# --domain-continue tld +continueDomainIndice=0#--domain-continue tld indice in tlds.py +outputFile='' def main(): + if len(sys.argv) <= 1: + print('Use -h for help') + else: + setTarget() + + scan() + + printer() + + if args.output: + output(f'''==================== +[/] TLD SEARCHER [\] by plague +==================== +Atempted:----------: {attempts} +Positive results:--: {len(pos)} +Last Attempt: -----: {lastTry} +======================== +[+] Positive Results [+] +======================== +{"".join(pos)}''') + + +def output(line): + original_stdout = sys.stdout + with open(args.output, 'a') as o: + sys.stdout = o + print(line) + sys.stdout = original_stdout - print(version) - # - # if len(sys.argv) <= 1: - # print('Use -h for help') - # else: - # setTarget() - # - # scan() - # - # printer() def printer(): print(f''' @@ -51,38 +84,51 @@ def scan(): Starting TLDScanner at {time.strftime('%H:%M:%S')} [+] ----------------------------------- ''') + if args.output: + output(f'''+++++++++++++++++++++++++++++++++++ +=================================== +Starting TLDScanner at {time.strftime('%H:%M:%S')} [+] +-----------------------------------''') for target in targets: - for tld in tldList: + for tld in listOfDomains: + url = f'{target}{tld}' + if verbose: + print(f'Trying: {url}') + global lastTry + lastTry=tld + global attempts - global posResults attempts += 1 - lastTry=tldList[attempts-1] - if verbose: - print(f'Trying: {target}{tld}') - url = f'{target}{tld}' - try: - response = socket.gethostbyname_ex(url) - if response[2]: - if verbose: - print(f' Found that {target} has TLD of {tld} || hostname: {response[0]} | Alias: {response[1]} | IP: {response[2]}') - else: - print(f' Found that {target} has TLD of {tld}') - pos.append(f'{target}{tld}\n') - posResults+=1 - except KeyboardInterrupt: - sys.exit(printer()) - except socket.gaierror: # No response from server - if verbose: - print(f' No match found for {target}{tld}') - except Exception as e: - if verbose: - log = [] - log.append(f'Error: {e}') + # if args.proxy: + # proxyScan(url) + # else: + # normalScan(url, target, tld) + normalScan(url, target, tld) + +# TODO: Too slow and retuns 0 +# def proxyScan(url): +# try: +# r = ProxyRequests(f'https://{url}') +# print(r.get_status_code()) +# except Exception as e: +# print(e) +def normalScan(url, target, tld): + try: + response = socket.gethostbyname_ex(url) + if response[2]: + if verbose: + print(f'[+] Found that {target} has TLD of {tld} || hostname: {response[0]} | IP: {response[2]}') + else: + print(f'[+] Found that {target} has TLD of {tld}') + pos.append(f'{target}{tld}\n') + except socket.gaierror: # No response from server + if verbose: + print(f' No match found for {target}{tld}') def sortTLD(tld): # Takes in list of domains from either user -d TLDs or -dF FILE @@ -92,9 +138,9 @@ def sortTLD(tld): tldsplitlist = str(tld).split(',',-1) # split user input at , (type is list) for tld in tldsplitlist: if tld[0]!='.': - tldList.append(f'.{tld}') + listOfDomains.append(f'.{tld}') else: - tldList.append(tld) + listOfDomains.append(tld) else: for t in tld: print(t) @@ -102,10 +148,16 @@ def sortTLD(tld): # TODO: sanitise input from user specified file def setVars(): + global listOfDomains + global outputFile + + if args.proxy: + global proxy + proxy = True + if args.verbose: global verbose verbose=True - # TODO: Prevent using more than one domain-related flag. if args.domain: sortTLD(args.domain) @@ -113,6 +165,15 @@ def setVars(): domainInputFile = open((domainFile), 'r').readlines() for d in domainInputFile: sortTLD(domainInputFile) + elif args.domainContinue: + global continueDomain + adc = args.domainContinue + if adc[0] == '.': + continueDomain = adc + else: + continueDomain = f'.{adc}' + continueDomainIndice = tlds.tldList['all'].index(continueDomain) + listOfDomains = tlds.tldList['all'][continueDomainIndice:len(tlds.tldList['all'])] elif args.domainCategory: print(''' [1] Countries --------- (ccTLD | .ua, .nz, .de, .es, .ru, etc) @@ -126,31 +187,38 @@ def setVars(): ''') print('Select which category you want to search for.') domainChoice = input('Type a number and click ENTER: ') - global tldList - tldList = tlds.getTlds(int(domainChoice)) + listOfDomains = tlds.getTlds(int(domainChoice)) else: - domainInputFile = open((list), 'r').readlines() - for x in domainInputFile: - tldList.append(x.strip('\n')) + listOfDomains = tlds.tldList['all'] + + if args.output: + outputFile = args.output + def setArgs(): - parser = argparse.ArgumentParser(description='Search for active Top Level Domains(TLD\'s) for domain names.',usage='%(prog)s {[-t ] [-tF ]} {[-d com,co.uk,.net] [-dF ] [-dC]} [-v] ') - parser.add_argument('-t', '--targets', help='targets domain name to scan for tldList', action='store') + parser = argparse.ArgumentParser(description='Search for active Top Level Domains(TLD\'s) for domain names.',usage='%(prog)s {[-t ] [-tF ]} {[-d com,co.uk,.net] [-dF ] [-dC] [-dc ]} [-o ] [-v] ') + parser.add_argument('-t', '--target', help='targets domain name to scan for listOfDomains', action='store') parser.add_argument('-tF', '--targetFile', help='Supply a targets file', action='store') - parser.add_argument('-d', '--domain', help='tldList to scan. (com,ua,nz,de)', action='store') - parser.add_argument('-dC', '--domainCategory', help='Scan TLD categories', action='store_true') - parser.add_argument('-dF', '--domainFile', help='List of tldList to scan. (Default = all)', action='store') + parser.add_argument('-d', '--domain', help='list of domains to scan. (com,ua,nz,de)', action='store') + parser.add_argument('-dC', '--domainCategory', help='Scan specific TLD categories', action='store_true') + parser.add_argument('-dc', '--domainContinue', help='Continue scanning all domains, starting from last attempt "..."', action='store') + parser.add_argument('-dF', '--domainFile', help='List of listOfDomains to scan. (Default = all)', action='store') + parser.add_argument('-o', '--output', help='File to output results into', action='store') + #parser.add_argument('-p', '--proxy', help='Use proxies for requests (VERY SLOW!)', action='store_true') parser.add_argument('-v', '--verbose', help='Verbose output mode', action='store_true') parser.add_argument('--version', help='Display version information', action='version', version='%(prog)s ' + version) global args args = parser.parse_args() def setTarget(): - if args.targets: - targets.append(args.targets) + global targets + if args.target: + targets.append(args.target) elif args.targetFile: tFile = open(('args.targetFile'), 'r') - targets.append(tFile.readlines()) + for x in tFile.readlines(): + if checkTarget(): + targets.append(tFile.readlines()) if __name__ == '__main__': setArgs()