mirror of
https://github.com/laramies/theHarvester.git
synced 2024-09-20 15:26:31 +08:00
Updated takeover checks to be more thorough and make results available in output file, also fixed bug with output file when using resolvers.
This commit is contained in:
parent
6e191263dc
commit
eeeaeffe5a
|
@ -694,16 +694,16 @@ async def handler(lst):
|
|||
for ip in set(all_ip):
|
||||
try:
|
||||
ip = ip.strip()
|
||||
if '/' in ip:
|
||||
ip_list.append(str(netaddr.IPNetwork(ip)))
|
||||
else:
|
||||
ip_list.append(str(netaddr.IPAddress(ip)))
|
||||
if len(ip) > 0:
|
||||
if '/' in ip:
|
||||
ip_list.append(str(netaddr.IPNetwork(ip)))
|
||||
else:
|
||||
ip_list.append(str(netaddr.IPAddress(ip)))
|
||||
except Exception as e:
|
||||
print(f'An exception has occurred while adding: {ip} to ip_list: {e}')
|
||||
continue
|
||||
ip_list = list(sorted(ip_list))
|
||||
print('\n'.join(map(str, ip_list)))
|
||||
ip_list = list(ip_list)
|
||||
|
||||
if len(all_emails) == 0:
|
||||
print('\n[*] No emails found.')
|
||||
|
@ -716,13 +716,8 @@ async def handler(lst):
|
|||
if len(all_hosts) == 0:
|
||||
print('\n[*] No hosts found.\n\n')
|
||||
else:
|
||||
# TODO make this logic a lot less confusing
|
||||
# Full should only be used if dns resolving is actually true
|
||||
# In that case use full else use all_hosts
|
||||
db = stash.StashManager()
|
||||
if dnsresolve is None or len(final_dns_resolver_list) > 0:
|
||||
print('\n[*] Hosts found: ' + str(len(full)))
|
||||
print('---------------------')
|
||||
temp = set()
|
||||
for host in full:
|
||||
if ':' in host:
|
||||
|
@ -733,19 +728,23 @@ async def handler(lst):
|
|||
continue
|
||||
if host.endswith(word):
|
||||
if host[:4] == 'www.':
|
||||
if host[4:] in all_hosts:
|
||||
if host[4:] in all_hosts or host[4:] in full:
|
||||
temp.add(host[4:])
|
||||
continue
|
||||
temp.add(host)
|
||||
full = list(sorted(temp))
|
||||
full.sort(key=lambda el: el.split(':')[0])
|
||||
print('\n[*] Hosts found: ' + str(len(full)))
|
||||
print('---------------------')
|
||||
for host in full:
|
||||
print(host)
|
||||
# host_ip = [netaddr_ip.format() for netaddr_ip in sorted([netaddr.IPAddress(ip) for ip in ips])]
|
||||
for host in full:
|
||||
if ':' in host:
|
||||
host_ip = host.split(':')[1].split(',')
|
||||
await db.store_all(word, host_ip, 'ip', 'DNS-resolver')
|
||||
try:
|
||||
if ':' in host:
|
||||
_, addr = host.split(':')
|
||||
await db.store(word, addr, 'ip', 'DNS-resolver')
|
||||
except Exception as e:
|
||||
print(f'An exception has occurred while attempting to insert: {host} IP into DB: {e}')
|
||||
continue
|
||||
else:
|
||||
all_hosts = [host.replace('www.', '') for host in all_hosts if host.replace('www.', '') in all_hosts]
|
||||
all_hosts = list(sorted(set(all_hosts)))
|
||||
|
@ -753,33 +752,54 @@ async def handler(lst):
|
|||
print('---------------------')
|
||||
for host in all_hosts:
|
||||
print(host)
|
||||
|
||||
# DNS brute force
|
||||
if dnsbrute and dnsbrute[0] is True:
|
||||
print('\n[*] Starting DNS brute force.')
|
||||
dns_force = dnssearch.DnsForce(word, final_dns_resolver_list, verbose=True)
|
||||
hosts, ips = await dns_force.run()
|
||||
hosts = list({host for host in hosts if ':' in host})
|
||||
hosts.sort(key=lambda el: el.split(':')[0])
|
||||
resolved_pair, hosts, ips = await dns_force.run()
|
||||
# hosts = list({host for host in hosts if ':' in host})
|
||||
# hosts.sort(key=lambda el: el.split(':')[0])
|
||||
# Check if Rest API is being used if so return found hosts
|
||||
if dnsbrute[1]:
|
||||
return hosts
|
||||
print('\n[*] Hosts found after DNS brute force:')
|
||||
return resolved_pair
|
||||
db = stash.StashManager()
|
||||
for host in hosts:
|
||||
print(host)
|
||||
if host not in full:
|
||||
full.append(host)
|
||||
if host not in all_hosts:
|
||||
all_hosts.append(host)
|
||||
await db.store_all(word, hosts, 'host', 'dns_bruteforce')
|
||||
temp = set()
|
||||
for host in resolved_pair:
|
||||
if ':' in host:
|
||||
# TODO parse addresses and sort them as they are IPs
|
||||
subdomain, addr = host.split(':')
|
||||
if subdomain.endswith(word):
|
||||
# Append to full so it's within JSON/XML at the end if output file is requested
|
||||
if host not in full:
|
||||
full.append(host)
|
||||
temp.add(subdomain + ':' + addr)
|
||||
if host not in all_hosts:
|
||||
all_hosts.append(host)
|
||||
continue
|
||||
if host.endswith(word):
|
||||
if host[:4] == 'www.':
|
||||
if host[4:] in all_hosts or host[4:] in full:
|
||||
continue
|
||||
if host not in full:
|
||||
full.append(host)
|
||||
temp.add(host)
|
||||
if host not in all_hosts:
|
||||
all_hosts.append(host)
|
||||
print('\n[*] Hosts found after DNS brute force:')
|
||||
for sub in temp:
|
||||
print(sub)
|
||||
await db.store_all(word, list(sorted(temp)), 'host', 'dns_bruteforce')
|
||||
|
||||
takeover_results = dict()
|
||||
# TakeOver Checking
|
||||
if takeover_status:
|
||||
print('\n[*] Performing subdomain takeover check')
|
||||
print('\n[*] Subdomain Takeover checking IS ACTIVE RECON')
|
||||
search_take = takeover.TakeOver(all_hosts)
|
||||
await search_take.populate_fingerprints()
|
||||
await search_take.process(proxy=use_proxy)
|
||||
|
||||
takeover_results = await search_take.get_takeover_results()
|
||||
# DNS reverse lookup
|
||||
dnsrev: List = []
|
||||
# print(f'DNSlookup: {dnslookup}')
|
||||
|
@ -887,6 +907,7 @@ async def handler(lst):
|
|||
print('\033[94m[*] Searching Shodan. ')
|
||||
try:
|
||||
for ip in host_ip:
|
||||
# TODO fix shodan
|
||||
print(('\tSearching for ' + ip))
|
||||
shodan = shodansearch.SearchShodan()
|
||||
shodandict = await shodan.search_ip(ip)
|
||||
|
@ -897,7 +918,6 @@ async def handler(lst):
|
|||
break
|
||||
if isinstance(value, int):
|
||||
value = str(value)
|
||||
|
||||
if isinstance(value, list):
|
||||
value = ', '.join(map(str, value))
|
||||
rowdata.append(value)
|
||||
|
@ -949,20 +969,20 @@ async def handler(lst):
|
|||
# it should but just a validation check
|
||||
if 'ip_list' in locals():
|
||||
if all_ip and len(all_ip) >= 1 and ip_list and len(ip_list) > 0:
|
||||
json_dict["ips"] = [str(ip) for ip in ip_list]
|
||||
json_dict["ips"] = ip_list
|
||||
|
||||
if len(all_emails) > 0:
|
||||
json_dict["emails"] = [email for email in all_emails]
|
||||
json_dict["emails"] = all_emails
|
||||
|
||||
if dnsresolve is None or len(final_dns_resolver_list) > 0:
|
||||
if len(full) > 0:
|
||||
json_dict["hosts"] = [host for host in full]
|
||||
else:
|
||||
if len(all_hosts) > 0:
|
||||
json_dict["hosts"] = [host for host in all_hosts]
|
||||
if dnsresolve is None or len(final_dns_resolver_list) > 0 and len(full) > 0:
|
||||
json_dict["hosts"] = full
|
||||
elif len(all_hosts) > 0:
|
||||
json_dict["hosts"] = all_hosts
|
||||
else:
|
||||
json_dict["hosts"] = []
|
||||
|
||||
if vhost and len(vhost) > 0:
|
||||
json_dict["vhosts"] = [host for host in vhost]
|
||||
json_dict["vhosts"] = vhost
|
||||
|
||||
if len(interesting_urls) > 0:
|
||||
json_dict["interesting_urls"] = interesting_urls
|
||||
|
@ -982,10 +1002,11 @@ async def handler(lst):
|
|||
if len(linkedin_links_tracker) > 0:
|
||||
json_dict["linkedin_links"] = linkedin_links_tracker
|
||||
|
||||
if takeover_status and len(takeover_results) > 0:
|
||||
json_dict['takeover_results'] = takeover_results
|
||||
|
||||
json_dict["shodan"] = shodanres
|
||||
with open(filename, 'w+') as fp:
|
||||
# If you do not wish to install ujson you can do
|
||||
# fp.write(json.dumps(json_dict, sort_keys=True)
|
||||
try:
|
||||
import ujson as json_dumper
|
||||
except ImportError:
|
||||
|
|
|
@ -48,8 +48,8 @@ def __init__(self, domain, dnsserver, verbose: bool = False) -> None:
|
|||
async def run(self):
|
||||
print(f'Starting DNS brute forcing with {len(self.list)} words')
|
||||
checker = hostchecker.Checker(self.list, nameserver=self.dnsserver)
|
||||
_, hosts, ips = await checker.check()
|
||||
return hosts, ips
|
||||
resolved_pair, hosts, ips = await checker.check()
|
||||
return resolved_pair, hosts, ips
|
||||
|
||||
|
||||
#####################################################################
|
||||
|
|
|
@ -1,16 +1,19 @@
|
|||
from theHarvester.lib.core import *
|
||||
import re
|
||||
import ujson
|
||||
from collections import defaultdict
|
||||
from random import shuffle
|
||||
|
||||
|
||||
class TakeOver:
|
||||
|
||||
def __init__(self, hosts) -> None:
|
||||
# NOTE THIS MODULE IS ACTIVE RECON
|
||||
self.hosts = hosts
|
||||
self.results = ""
|
||||
self.totalresults = ""
|
||||
self.proxy = False
|
||||
self.fingerprints = dict()
|
||||
# https://stackoverflow.com/questions/33080869/python-how-to-create-a-dict-of-dict-of-list-with-defaultdict
|
||||
self.results = defaultdict(list)
|
||||
|
||||
async def populate_fingerprints(self):
|
||||
# Thank you to https://github.com/EdOverflow/can-i-take-over-xyz for these fingerprints
|
||||
|
@ -19,10 +22,13 @@ async def populate_fingerprints(self):
|
|||
response = await AsyncFetcher.fetch_all([populate_url], headers=headers)
|
||||
try:
|
||||
resp = response[0]
|
||||
print(f'Dumping resp: {resp}')
|
||||
unparsed_json = ujson.loads(resp)
|
||||
for unparsed_fingerprint in unparsed_json:
|
||||
if unparsed_fingerprint['status'] == 'Vulnerable':
|
||||
if unparsed_fingerprint['service'] in ["Smugsmug"]:
|
||||
# Subdomain must be in format domain.smugsmug.com
|
||||
# This will never happen as subdomains are parsed and filtered to be in format of *.word.com
|
||||
continue
|
||||
if unparsed_fingerprint['status'] == 'Vulnerable' or unparsed_fingerprint['status'] == 'Edge case':
|
||||
self.fingerprints[unparsed_fingerprint['fingerprint']] = unparsed_fingerprint['service']
|
||||
except Exception as e:
|
||||
print(f'An exception has occurred populating takeover fingerprints: {e}, defaulting to static list')
|
||||
|
@ -48,8 +54,6 @@ async def populate_fingerprints(self):
|
|||
'is not a registered InCloud YouTrack': 'JetBrains',
|
||||
'page not found': 'Uptimerobot',
|
||||
'project not found': 'Surge.sh'}
|
||||
print(f'my fingerprints')
|
||||
print(self.fingerprints)
|
||||
|
||||
async def check(self, url, resp) -> None:
|
||||
# Simple function that takes response and checks if any fingerprints exist
|
||||
|
@ -57,19 +61,26 @@ async def check(self, url, resp) -> None:
|
|||
regex = re.compile("(?=(" + "|".join(map(re.escape, list(self.fingerprints.keys()))) + "))")
|
||||
# Sanitize fingerprints
|
||||
matches = re.findall(regex, resp)
|
||||
matches = list(set(matches))
|
||||
for match in matches:
|
||||
print(f'\t\033[91m Takeover detected: {url}\033[1;32;40m')
|
||||
if match in self.fingerprints.keys():
|
||||
# Validation check as to not error out
|
||||
print(f'\t\033[91m Type of takeover is: {self.fingerprints[match]}\033[1;32;40m')
|
||||
service = self.fingerprints[match]
|
||||
print(f'\t\033[91m Type of takeover is: {service} with match: {match}\033[1;32;40m')
|
||||
self.results[url].append({match: service})
|
||||
|
||||
async def do_take(self) -> None:
|
||||
try:
|
||||
if len(self.hosts) > 0:
|
||||
tup_resps: tuple = await AsyncFetcher.fetch_all(self.hosts, takeover=True, proxy=self.proxy)
|
||||
# Returns a list of tuples in this format: (url, response)
|
||||
tup_resps = tuple(tup for tup in tup_resps if tup[1] != '')
|
||||
# Filter out responses whose responses are empty strings (indicates errored)
|
||||
https_hosts = [f'https://{host}' for host in self.hosts]
|
||||
http_hosts = [f'http://{host}' for host in self.hosts]
|
||||
all_hosts = https_hosts + http_hosts
|
||||
shuffle(all_hosts)
|
||||
tup_resps = await AsyncFetcher.fetch_all(all_hosts, takeover=True, proxy=self.proxy)
|
||||
tup_resps = [tup for tup in tup_resps if len(tup[1]) >= 1]
|
||||
for url, resp in tup_resps:
|
||||
await self.check(url, resp)
|
||||
else:
|
||||
|
@ -81,3 +92,5 @@ async def process(self, proxy: bool = False) -> None:
|
|||
self.proxy = proxy
|
||||
await self.do_take()
|
||||
|
||||
async def get_takeover_results(self):
|
||||
return self.results
|
||||
|
|
|
@ -118,10 +118,11 @@ def proxy_list() -> List:
|
|||
with open('/usr/local/etc/theHarvester/proxies.yaml', 'r') as proxy_file:
|
||||
keys = yaml.safe_load(proxy_file)
|
||||
except FileNotFoundError:
|
||||
with open('proxies.yaml', 'r') as proxy_file:
|
||||
keys = yaml.safe_load(proxy_file)
|
||||
except Exception:
|
||||
return []
|
||||
try:
|
||||
with open('proxies.yaml', 'r') as proxy_file:
|
||||
keys = yaml.safe_load(proxy_file)
|
||||
except Exception:
|
||||
return []
|
||||
http_list = [f'http://{proxy}' for proxy in keys['http']] if keys['http'] is not None else []
|
||||
return http_list
|
||||
|
||||
|
@ -329,17 +330,30 @@ async def takeover_fetch(session, url: str, proxy: str = "") -> Union[Tuple[Any,
|
|||
# Wrap in try except due to 0x89 png/jpg files
|
||||
# This fetch method solely focuses on get requests
|
||||
# TODO determine if method for post requests is necessary
|
||||
url = f'http://{url}' if str(url).startswith(('http:', 'https:')) is False else url
|
||||
# url = f'http://{url}' if str(url).startswith(('http:', 'https:')) is False else url
|
||||
# Clean up urls with proper schemas
|
||||
if proxy != "":
|
||||
async with session.get(url, proxy=proxy) as response:
|
||||
await asyncio.sleep(5)
|
||||
return url, await response.text()
|
||||
if 'https://' in url:
|
||||
sslcontext = ssl.create_default_context(cafile=certifi.where())
|
||||
async with session.get(url, proxy=proxy, ssl=sslcontext) as response:
|
||||
await asyncio.sleep(5)
|
||||
return url, await response.text()
|
||||
else:
|
||||
async with session.get(url, proxy=proxy, ssl=False) as response:
|
||||
await asyncio.sleep(5)
|
||||
return url, await response.text()
|
||||
else:
|
||||
async with session.get(url) as response:
|
||||
await asyncio.sleep(5)
|
||||
return url, await response.text()
|
||||
except Exception:
|
||||
if 'https://' in url:
|
||||
sslcontext = ssl.create_default_context(cafile=certifi.where())
|
||||
async with session.get(url, ssl=sslcontext) as response:
|
||||
await asyncio.sleep(5)
|
||||
return url, await response.text()
|
||||
else:
|
||||
async with session.get(url, ssl=False) as response:
|
||||
await asyncio.sleep(5)
|
||||
return url, await response.text()
|
||||
except Exception as e:
|
||||
print(f'Takeover check error on: {url} : {e}')
|
||||
return url, ''
|
||||
|
||||
@classmethod
|
||||
|
|
Loading…
Reference in a new issue