From bbac75f72a95014f241e16645ca68201eecb5ec9 Mon Sep 17 00:00:00 2001 From: L1ghtn1ng Date: Sun, 16 Jun 2019 21:34:15 +0100 Subject: [PATCH] Pep8 fixes, remove cymon, update version --- theHarvester/__main__.py | 42 +++++++-------------------- theHarvester/discovery/__init__.py | 1 - theHarvester/discovery/crtsh.py | 2 +- theHarvester/discovery/cymon.py | 38 ------------------------ theHarvester/discovery/threatcrowd.py | 14 ++++----- theHarvester/lib/core.py | 6 ++-- 6 files changed, 20 insertions(+), 83 deletions(-) delete mode 100644 theHarvester/discovery/cymon.py diff --git a/theHarvester/__main__.py b/theHarvester/__main__.py index 70973a16..a111b6c8 100644 --- a/theHarvester/__main__.py +++ b/theHarvester/__main__.py @@ -53,7 +53,7 @@ def start(): parser.add_argument('-n', '--dns-lookup', help='enable DNS server lookup, default False', default=False, action='store_true') parser.add_argument('-c', '--dns-brute', help='perform a DNS brute force on the domain', default=False, action='store_true') parser.add_argument('-f', '--filename', help='save the results to an HTML and/or XML file', default='', type=str) - parser.add_argument('-b', '--source', help='''baidu, bing, bingapi, censys, crtsh, cymon, dnsdumpster, + parser.add_argument('-b', '--source', help='''baidu, bing, bingapi, censys, crtsh, dnsdumpster, dogpile, duckduckgo, google, google-certificates, hunter, intelx, linkedin, netcraft, securityTrails, threatcrowd, @@ -146,23 +146,13 @@ def start(): elif engineitem == 'crtsh': print('\033[94m[*] Searching CRT.sh. \033[0m') - search = crtsh.search_crtsh(word) + search = crtsh.SearchCrtsh(word) search.process() hosts = filter(search.get_hostnames()) all_hosts.extend(hosts) db = stash.stash_manager() db.store_all(word, all_hosts, 'host', 'CRTsh') - elif engineitem == 'cymon': - print('\033[94m[*] Searching Cymon. \033[0m') - from theHarvester.discovery import cymon - # Import locally or won't work. - search = cymon.search_cymon(word) - search.process() - all_ip = search.get_ipaddresses() - db = stash.stash_manager() - db.store_all(word, all_ip, 'ip', 'cymon') - elif engineitem == 'dnsdumpster': try: print('\033[94m[*] Searching DNSdumpster. \033[0m') @@ -314,7 +304,7 @@ def start(): elif engineitem == 'threatcrowd': print('\033[94m[*] Searching Threatcrowd. \033[0m') try: - search = threatcrowd.search_threatcrowd(word) + search = threatcrowd.SearchThreatcrowd(word) search.process() hosts = filter(search.get_hostnames()) all_hosts.extend(hosts) @@ -425,22 +415,13 @@ def start(): db.store_all(word, uniqueips, 'ip', 'censys') print('\033[94m[*] Searching CRT.sh. \033[0m') - search = crtsh.search_crtsh(word) + search = crtsh.SearchCrtsh(word) search.process() hosts = filter(search.get_hostnames()) all_hosts.extend(hosts) db = stash.stash_manager() db.store_all(word, all_hosts, 'host', 'CRTsh') - print('\033[94m[*] Searching Cymon. \033[0m') - from theHarvester.discovery import cymon - # Import locally or won't work. - search = cymon.search_cymon(word) - search.process() - all_ip = search.get_ipaddresses() - db = stash.stash_manager() - db.store_all(word, all_ip, 'ip', 'cymon') - try: print('\033[94m[*] Searching DNSdumpster. \033[0m') from theHarvester.discovery import dnsdumpster @@ -582,7 +563,7 @@ def start(): print('\033[94m[*] Searching Threatcrowd. \033[0m') try: - search = threatcrowd.search_threatcrowd(word) + search = threatcrowd.SearchThreatcrowd(word) search.process() hosts = filter(search.get_hostnames()) all_hosts.extend(hosts) @@ -750,10 +731,9 @@ def start(): if len(openports) > 1: print(('\t[*] Detected open ports: ' + ','.join(str(e) for e in openports))) takeover_check = 'True' - if takeover_check == 'True': - if len(openports) > 0: - search_take = takeover.take_over(domain) - search_take.process() + if takeover_check == 'True' and len(openports) > 0: + search_take = takeover.take_over(domain) + search_take.process() except Exception as e: print(e) @@ -851,9 +831,9 @@ def start(): # We have to take out the TLDs to do this. recursion = None if recursion: - start = 0 + counter = 0 for word in vhost: - search = googlesearch.search_google(word, limit, start) + search = googlesearch.search_google(word, limit, counter) search.process(google_dorking) emails = search.get_emails() hosts = search.get_hostnames() @@ -903,7 +883,7 @@ def start(): word, shodanres, dnstldres) - save = html.writehtml() + html.writehtml() except Exception as e: print(e) print('\n\033[93m[!] An error occurred while creating the output file.\n\n \033[0m') diff --git a/theHarvester/discovery/__init__.py b/theHarvester/discovery/__init__.py index 09132e82..ffe63379 100644 --- a/theHarvester/discovery/__init__.py +++ b/theHarvester/discovery/__init__.py @@ -2,7 +2,6 @@ 'bingsearch', 'censys', 'crtsh', - 'cymon', 'dnssearch', 'dogpilesearch', 'duckduckgosearch', diff --git a/theHarvester/discovery/crtsh.py b/theHarvester/discovery/crtsh.py index bebbcec5..5f33e2ff 100644 --- a/theHarvester/discovery/crtsh.py +++ b/theHarvester/discovery/crtsh.py @@ -5,7 +5,7 @@ import time -class search_crtsh: +class SearchCrtsh: def __init__(self, word): self.word = word.replace(' ', '%20') diff --git a/theHarvester/discovery/cymon.py b/theHarvester/discovery/cymon.py deleted file mode 100644 index 4452c91a..00000000 --- a/theHarvester/discovery/cymon.py +++ /dev/null @@ -1,38 +0,0 @@ -from theHarvester.discovery.constants import * -from theHarvester.lib.core import * -from theHarvester.parsers import cymonparser -import requests -import time - - -class search_cymon: - - def __init__(self, word): - self.word = word - self.url = "" - self.results = "" - self.server = 'cymon.io' - - def do_search(self): - try: - headers = {'user-agent': Core.get_user_agent(), 'Accept': '*/*', 'Referer': self.url} - response = requests.get(self.url, headers=headers) - time.sleep(getDelay()) - self.results = response.content - except Exception as e: - print(e) - - def process(self): - try: - self.url = 'https://' + self.server + '/domain/' + str(self.word) - print('\tSearching results.') - self.do_search() - except Exception as e: - print(f'Error occurred: {e}') - - def get_ipaddresses(self): - try: - ips = cymonparser.Parser(self) - return ips.search_ipaddresses() - except Exception as e: - print(f'Error occurred: {e}') diff --git a/theHarvester/discovery/threatcrowd.py b/theHarvester/discovery/threatcrowd.py index 71313525..e1bfb2dc 100644 --- a/theHarvester/discovery/threatcrowd.py +++ b/theHarvester/discovery/threatcrowd.py @@ -3,7 +3,7 @@ import requests -class search_threatcrowd: +class SearchThreatcrowd: def __init__(self, word): self.word = word.replace(' ', '%20') @@ -15,21 +15,17 @@ def __init__(self, word): self.counter = 0 def do_search(self): - try: - urly = 'https://www.threatcrowd.org/searchApi/v2/domain/report/?domain=' + self.word - except Exception as e: - print(e) + url = f'https://www.threatcrowd.org/searchApi/v2/domain/report/?domain={self.word}' headers = {'User-Agent': Core.get_user_agent()} try: - r = requests.get(urly, headers=headers) + request = requests.get(url, headers=headers) + self.results = request.text except Exception as e: print(e) - self.results = r.text self.totalresults += self.results def get_hostnames(self): - rawres = myparser.Parser(self.results, self.word) - return rawres.hostnames() + return myparser.Parser(self.results, self.word).hostnames() def process(self): self.do_search() diff --git a/theHarvester/lib/core.py b/theHarvester/lib/core.py index 908544d8..10a09348 100644 --- a/theHarvester/lib/core.py +++ b/theHarvester/lib/core.py @@ -7,7 +7,7 @@ class Core: @staticmethod def version(): - return '3.1.0.dev0' + return '3.1.0.dev1' @staticmethod def bing_key(): @@ -370,7 +370,7 @@ def get_user_agent(): # db.store_all(word, uniqueips, 'ip', 'censys') # # print('[*] Searching CRT.sh.') - # search = crtsh.search_crtsh(word) + # search = crtsh.SearchCrtsh(word) # search.process() # hosts = filter(search.get_hostnames()) # all_hosts.extend(hosts) @@ -492,7 +492,7 @@ def get_user_agent(): # # print('[*] Searching Threatcrowd.') # try: - # search = threatcrowd.search_threatcrowd(word) + # search = threatcrowd.SearchThreatcrowd(word) # search.process() # hosts = filter(search.get_hostnames()) # all_hosts.extend(hosts)