theHarvester/theHarvester.py

812 lines
34 KiB
Python
Raw Normal View History

2018-12-18 00:05:11 +08:00
#!/usr/bin/env python
2011-05-04 23:07:06 +08:00
import sys
import os
2011-05-04 23:07:06 +08:00
import re
import getopt
2018-03-23 06:32:50 +08:00
import stash
2018-12-20 03:39:33 +08:00
import time
try:
import requests
except:
2018-12-23 04:29:11 +08:00
print("Requests library not found, please install it before proceeding.\n\n")
sys.exit()
2018-12-16 11:07:37 +08:00
2018-11-30 05:28:37 +08:00
try:
import bs4
except:
2018-12-23 04:29:11 +08:00
print("\nBeautifulSoup library not found, please install it before proceeding.\n\n")
2018-11-30 05:28:37 +08:00
sys.exit()
2011-05-04 23:07:06 +08:00
from discovery import *
from lib import htmlExport
from lib import hostchecker
2011-05-04 23:07:06 +08:00
2018-11-30 05:28:37 +08:00
print("\n\033[92m*******************************************************************")
2018-11-23 05:20:06 +08:00
print("* *")
print("* | |_| |__ ___ /\ /\__ _ _ ____ _____ ___| |_ ___ _ __ *")
print("* | __| '_ \ / _ \ / /_/ / _` | '__\ \ / / _ \/ __| __/ _ \ '__| *")
print("* | |_| | | | __/ / __ / (_| | | \ V / __/\__ \ || __/ | *")
print("* \__|_| |_|\___| \/ /_/ \__,_|_| \_/ \___||___/\__\___|_| *")
print("* *")
print("* theHarvester Ver. 3.0.6 *")
2018-11-23 05:20:06 +08:00
print("* Coded by Christian Martorella *")
print("* Edge-Security Research *")
print("* cmartorella@edge-security.com *")
print("*******************************************************************\033[94m\n\n")
2011-05-04 23:07:06 +08:00
2018-12-20 03:39:33 +08:00
2011-05-04 23:07:06 +08:00
def usage():
comm = os.path.basename(sys.argv[0])
2018-12-20 03:39:33 +08:00
if os.path.dirname(sys.argv[0]) == os.getcwd():
comm = "./" + comm
2018-12-20 03:39:33 +08:00
2018-12-23 04:29:11 +08:00
print("Usage: theHarvester.py <options> \n")
print(" -d: company name or domain to search")
print(""" -b: source: baidu, bing, bingapi, censys, crtsh, cymon, dogpile, google,
googleCSE, googleplus, google-certificates, google-profiles,
hunter, linkedin, netcraft, pgp, threatcrowd, trello, twitter,
vhost, virustotal, yahoo, all""")
print(" -g: use Google Dorking instead of normal Google search")
print(" -s: start with result number X (default: 0)")
print(" -v: verify host name via DNS resolution and search for virtual hosts")
print(" -f: save the results into an HTML and/or XML file")
print(" -n: perform a DNS reverse query on all ranges discovered")
print(" -c: perform a DNS brute force for the domain name")
print(" -t: perform a DNS TLD expansion discovery")
print(" -e: use this DNS server")
print(" -p: port scan the detected hosts and check for Takeovers (80,443,22,21,8080)")
print(" -l: limit the number of results to work with (Bing goes from 50 to 50 results,")
print(" Google 100 to 100, and PGP doesn't use this option)")
print(" -h: use Shodan to query discovered hosts")
2018-11-23 05:20:06 +08:00
print("\nExamples:")
2018-12-23 04:29:11 +08:00
print((" " + comm + " -d acme.com -l 500 -b google -f myresults.html"))
print((" " + comm + " -d acme.com -b pgp, virustotal"))
print((" " + comm + " -d acme -l 200 -b linkedin"))
print((" " + comm + " -d acme.com -l 200 -g -b google"))
print((" " + comm + " -d acme.com -b googleCSE -l 500 -s 300"))
print((" " + comm + " -d acme.edu -l 100 -b bing -h \n"))
2011-05-04 23:07:06 +08:00
2018-12-20 03:39:33 +08:00
2011-05-04 23:07:06 +08:00
def start(argv):
if len(sys.argv) < 4:
usage()
sys.exit()
try:
2018-11-11 22:24:58 +08:00
opts, args = getopt.getopt(argv, "l:d:b:s:u:vf:nhcgpte:")
except getopt.GetoptError:
usage()
sys.exit()
2018-03-23 06:32:50 +08:00
try:
2018-12-16 11:07:37 +08:00
db = stash.stash_manager()
2018-03-23 06:32:50 +08:00
db.do_init()
2018-11-23 05:20:06 +08:00
except Exception as e:
2018-03-23 06:32:50 +08:00
pass
start = 0
host_ip = []
2018-12-18 07:14:42 +08:00
all_hosts = []
all_emails = []
filename = ""
bingapi = "yes"
dnslookup = False
dnsbrute = False
dnstld = False
shodan = False
vhost = []
virtual = False
ports_scanning = False
takeover_check = False
google_dorking = False
2018-03-23 06:32:50 +08:00
limit = 500
all_ip = []
2018-12-20 03:39:33 +08:00
full = []
dnsserver = ""
2018-11-23 05:20:06 +08:00
for value in enumerate(opts):
opt = value[1][0]
arg = value[1][1]
opt = str(opt)
arg = str(arg)
if opt == '-l':
limit = int(arg)
elif opt == '-d':
word = arg
elif opt == '-g':
google_dorking = True
elif opt == '-s':
start = int(arg)
elif opt == '-v':
virtual = "basic"
elif opt == '-f':
filename = arg
elif opt == '-n':
dnslookup = True
elif opt == '-c':
dnsbrute = True
elif opt == '-h':
shodan = True
elif opt == '-e':
dnsserver = arg
elif opt == '-p':
ports_scanning = True
elif opt == '-t':
dnstld = True
elif opt == '-b':
engines = set(arg.split(','))
2018-12-23 04:29:11 +08:00
supportedengines = set(["baidu","bing","bingapi","censys","crtsh","cymon","dogpile","google","googleCSE","googleplus",'google-certificates',"google-profiles","hunter","linkedin","netcraft","pgp","threatcrowd","trello","twitter","vhost","virustotal","yahoo","all"])
if set(engines).issubset(supportedengines):
2018-11-23 05:20:06 +08:00
print("found supported engines")
2018-12-16 11:07:37 +08:00
print(("[-] Starting harvesting process for domain: " + word + "\n"))
for engineitem in engines:
2018-12-23 04:29:11 +08:00
if engineitem == "baidu":
print("[-] Searching in Baidu..")
search = baidusearch.search_baidu(word, limit)
search.process()
2018-12-23 04:29:11 +08:00
all_emails = search.get_emails()
2018-12-18 07:14:42 +08:00
hosts = search.get_hostnames()
all_hosts.extend(hosts)
2018-12-20 03:39:33 +08:00
db = stash.stash_manager()
2018-12-23 04:29:11 +08:00
db.store_all(word, all_hosts, 'host', 'baidu')
db.store_all(word, all_emails, 'email', 'baidu')
2018-11-23 05:51:31 +08:00
2018-12-23 04:29:11 +08:00
elif engineitem == "bing" or engineitem == "bingapi":
print("[-] Searching in Bing:")
search = bingsearch.search_bing(word, limit, start)
if engineitem == "bingapi":
bingapi = "yes"
else:
bingapi = "no"
search.process(bingapi)
all_emails = search.get_emails()
2018-12-18 07:14:42 +08:00
hosts = search.get_hostnames()
all_hosts.extend(hosts)
2018-12-20 03:39:33 +08:00
db = stash.stash_manager()
2018-12-23 04:29:11 +08:00
db.store_all(word, all_hosts, 'email', 'bing')
db.store_all(word, all_hosts, 'host', 'bing')
2018-12-16 11:07:37 +08:00
2018-12-23 04:29:11 +08:00
elif engineitem == "censys":
print("[-] Searching in Censys:")
from discovery import censys
# Import locally or won't work
search = censys.search_censys(word)
search.process()
2018-12-23 04:29:11 +08:00
all_ip = search.get_ipaddresses()
2018-12-18 07:14:42 +08:00
hosts = search.get_hostnames()
all_hosts.extend(hosts)
2018-12-20 03:39:33 +08:00
db = stash.stash_manager()
2018-12-23 04:29:11 +08:00
db.store_all(word, all_hosts, 'host', 'censys')
db.store_all(word, all_ip, 'ip', 'censys')
2018-12-23 04:29:11 +08:00
elif engineitem == "crtsh":
2018-11-23 05:20:06 +08:00
print("[-] Searching in CRT.sh:")
search = crtsh.search_crtsh(word)
search.process()
2018-12-18 07:14:42 +08:00
hosts = search.get_hostnames()
all_hosts.extend(hosts)
2018-12-20 03:39:33 +08:00
db = stash.stash_manager()
2018-12-23 04:29:11 +08:00
db.store_all(word, all_hosts, 'host', 'CRTsh')
2018-12-23 04:29:11 +08:00
elif engineitem == "cymon":
print("[-] Searching in Cymon:")
from discovery import cymon
# Import locally or won't work
search = cymon.search_cymon(word)
search.process()
2018-12-23 04:29:11 +08:00
all_ip = search.get_ipaddresses()
2018-12-20 03:39:33 +08:00
db = stash.stash_manager()
2018-12-23 04:29:11 +08:00
db.store_all(word, all_ip, 'ip', 'cymon')
elif engineitem == "dogpile":
2018-11-23 05:20:06 +08:00
print("[-] Searching in Dogpilesearch..")
search = dogpilesearch.search_dogpile(word, limit)
search.process()
2018-12-23 04:29:11 +08:00
all_emails = search.get_emails()
all_hosts = search.get_hostnames()
db.store_all(word, all_hosts, 'email', 'dogpile')
db.store_all(word, all_hosts, 'host', 'dogpile')
2018-12-23 04:29:11 +08:00
elif engineitem == "google":
print("[-] Searching in Google:")
search = googlesearch.search_google(word, limit, start)
search.process(google_dorking)
emails = search.get_emails()
all_emails.extend(emails)
2018-12-18 07:14:42 +08:00
hosts = search.get_hostnames()
all_hosts.extend(hosts)
2018-12-20 03:39:33 +08:00
db = stash.stash_manager()
2018-12-23 04:29:11 +08:00
db.store_all(word, all_hosts, 'host', 'google')
db.store_all(word, all_emails, 'email', 'google')
2018-12-23 04:29:11 +08:00
elif engineitem == "googleCSE":
print("[-] Searching in Google Custom Search:")
search = googleCSE.search_googleCSE(word, limit, start)
search.process()
2018-12-23 04:29:11 +08:00
search.store_results()
all_emails = search.get_emails()
2018-12-20 03:39:33 +08:00
db = stash.stash_manager()
2018-12-18 07:14:42 +08:00
hosts = search.get_hostnames()
all_hosts.extend(hosts)
2018-12-23 04:29:11 +08:00
db.store_all(word, all_hosts, 'email', 'googleCSE')
2018-12-20 03:39:33 +08:00
db = stash.stash_manager()
2018-12-23 04:29:11 +08:00
db.store_all(word, all_hosts, 'host', 'googleCSE')
elif engineitem == "googleplus":
2018-11-23 05:20:06 +08:00
print("[-] Searching in Google+ ..")
search = googleplussearch.search_googleplus(word, limit)
search.process()
people = search.get_people()
2018-11-23 05:20:06 +08:00
print("Users from Google+:")
print("====================")
2018-12-20 03:39:33 +08:00
db = stash.stash_manager()
db.store_all(word, people, 'name', 'googleplus')
for user in people:
2018-11-23 05:20:06 +08:00
print(user)
sys.exit()
2018-12-23 04:29:11 +08:00
elif engineitem == "google-certificates":
print("[-] Searching in Google Certificate transparency report..")
search = googlecertificates.search_googlecertificates(word, limit, start)
search.process()
2018-12-23 04:29:11 +08:00
hosts = search.get_domains()
all_hosts.extend(hosts)
db = stash.stash_manager()
2018-12-23 04:29:11 +08:00
db.store_all(word, all_hosts, 'host', 'google-certificates')
elif engineitem == "google-profiles":
2018-11-23 05:20:06 +08:00
print("[-] Searching in Google profiles..")
search = googlesearch.search_google(word, limit, start)
search.process_profiles()
people = search.get_profiles()
db = stash.stash_manager()
2018-12-20 03:39:33 +08:00
db.store_all(word, people, 'name', 'google-profile')
2018-11-23 05:20:06 +08:00
print("Users from Google profiles:")
print("---------------------------")
for users in people:
2018-11-23 05:20:06 +08:00
print(users)
sys.exit()
2018-11-11 22:24:58 +08:00
elif engineitem == "hunter":
2018-11-23 05:20:06 +08:00
print("[-] Searching in Hunter:")
2018-11-11 22:24:58 +08:00
from discovery import huntersearch
2018-12-23 04:29:11 +08:00
# Import locally or won't work
2018-11-11 22:24:58 +08:00
search = huntersearch.search_hunter(word, limit, start)
search.process()
2018-12-18 07:14:42 +08:00
emails = search.get_emails()
all_emails.extend(emails)
hosts = search.get_hostnames()
all_hosts.extend(hosts)
2018-12-20 03:39:33 +08:00
db = stash.stash_manager()
2018-12-23 04:29:11 +08:00
db.store_all(word, all_hosts, 'host', 'hunter')
db.store_all(word, all_emails, 'email', 'hunter')
2018-11-11 22:24:58 +08:00
2018-12-23 04:29:11 +08:00
elif engineitem == "linkedin":
print("[-] Searching in Linkedin..")
search = linkedinsearch.search_linkedin(word, limit)
search.process()
people = search.get_people()
db = stash.stash_manager()
db.store_all(word, people, 'name', 'linkedin')
print("Users from Linkedin:")
print("-------------------")
for user in people:
print(user)
sys.exit()
elif engineitem == "netcraft":
print("[-] Searching in Netcraft:")
search = netcraft.search_netcraft(word)
2018-12-01 04:57:12 +08:00
search.process()
2018-12-18 07:14:42 +08:00
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db = stash.stash_manager()
2018-12-23 04:29:11 +08:00
db.store_all(word, all_hosts, 'host', 'netcraft')
2018-12-20 03:39:33 +08:00
2018-12-23 04:29:11 +08:00
elif engineitem == "pgp":
print("[-] Searching in PGP key server..")
search = pgpsearch.search_pgp(word)
2018-12-10 01:51:48 +08:00
search.process()
2018-12-23 04:29:11 +08:00
all_emails = search.get_emails()
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db = stash.stash_manager()
db.store_all(word, all_hosts, 'host', 'pgp')
db.store_all(word, all_emails, 'email', 'pgp')
elif engineitem == "threatcrowd":
print("[-] Searching in Threatcrowd:")
search = threatcrowd.search_threatcrowd(word)
2018-12-10 01:51:48 +08:00
search.process()
2018-12-23 04:29:11 +08:00
hosts = search.get_hostnames()
all_hosts.extend(hosts)
2018-12-10 01:51:48 +08:00
db = stash.stash_manager()
2018-12-23 04:29:11 +08:00
db.store_all(word, all_hosts, 'host', 'threatcrowd')
2018-12-16 11:07:37 +08:00
elif engineitem == "trello":
print("[-] Searching in Trello:")
from discovery import trello
2018-12-23 04:29:11 +08:00
# Import locally or won't work
search = trello.search_trello(word,limit)
search.process()
2018-12-23 04:29:11 +08:00
all_emails = search.get_emails()
all_hosts = search.get_urls()
2018-12-20 03:39:33 +08:00
db = stash.stash_manager()
2018-12-23 04:29:11 +08:00
db.store_all(word, all_hosts, 'host', 'trello')
db.store_all(word, all_emails, 'email', 'trello')
for x in all_hosts:
2018-12-16 11:07:37 +08:00
print(x)
sys.exit()
2018-12-16 11:07:37 +08:00
2018-12-23 04:29:11 +08:00
elif engineitem == "twitter":
print("[-] Searching in Twitter ..")
search = twittersearch.search_twitter(word, limit)
search.process()
people = search.get_people()
2018-12-20 03:39:33 +08:00
db = stash.stash_manager()
2018-12-23 04:29:11 +08:00
db.store_all(word, people, 'name', 'twitter')
print("Users from Twitter:")
print("-------------------")
for user in people:
print(user)
sys.exit()
2018-12-16 11:07:37 +08:00
2018-12-23 04:29:11 +08:00
# vhost
2018-12-16 11:07:37 +08:00
2018-12-23 04:29:11 +08:00
elif engineitem == "virustotal":
print("[-] Searching in Virustotal:")
search = virustotal.search_virustotal(word)
search.process()
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db = stash.stash_manager()
2018-12-23 04:29:11 +08:00
db.store_all(word, all_hosts, 'host', 'virustotal')
2018-12-16 11:07:37 +08:00
2018-12-23 04:29:11 +08:00
elif engineitem == "yahoo":
print("[-] Searching in Yahoo..")
search = yahoosearch.search_yahoo(word, limit)
search.process()
2018-12-23 04:29:11 +08:00
all_emails = search.get_emails()
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db = stash.stash_manager()
2018-12-23 04:29:11 +08:00
db.store_all(word, all_hosts, 'host', 'yahoo')
db.store_all(word, all_emails, 'email', 'yahoo')
elif engineitem == "all":
2018-11-23 05:20:06 +08:00
print(("Full harvest on " + word))
all_emails = []
all_hosts = []
2018-12-16 11:07:37 +08:00
2018-12-23 04:29:11 +08:00
# baidu
print("[-] Searching in Bing..")
bingapi = "no"
search = bingsearch.search_bing(word, limit, start)
search.process(bingapi)
emails = search.get_emails()
hosts = search.get_hostnames()
all_hosts.extend(hosts)
2018-12-16 11:07:37 +08:00
db = stash.stash_manager()
2018-12-23 04:29:11 +08:00
db.store_all(word, all_hosts, 'host', 'bing')
all_emails.extend(emails)
2018-12-23 04:29:11 +08:00
all_emails = sorted(set(all_emails))
db.store_all(word, all_emails, 'email', 'bing')
2018-12-16 11:07:37 +08:00
2018-12-23 04:29:11 +08:00
print("[-] Searching in Censys:")
from discovery import censys
search = censys.search_censys(word)
search.process()
2018-12-23 04:29:11 +08:00
all_ip = search.get_ipaddresses()
all_hosts = search.get_hostnames()
2018-12-16 11:07:37 +08:00
db = stash.stash_manager()
2018-12-23 04:29:11 +08:00
db.store_all(word, all_ip, 'ip', 'censys')
db.store_all(word, all_hosts, 'host', 'censys')
print("[-] Searching in CRTSH server..")
search = crtsh.search_crtsh(word)
search.process()
hosts = search.get_hostnames()
all_hosts.extend(hosts)
2018-12-16 11:07:37 +08:00
db = stash.stash_manager()
2018-12-23 04:29:11 +08:00
db.store_all(word, all_hosts, 'host', 'CRTsh')
2018-12-23 04:29:11 +08:00
# cymon
2018-11-11 22:24:58 +08:00
2018-12-23 04:29:11 +08:00
# dogpile
print("[-] Searching in Google..")
search = googlesearch.search_google(word, limit, start)
search.process(google_dorking)
emails = search.get_emails()
hosts = search.get_hostnames()
2018-12-23 04:29:11 +08:00
all_emails.extend(emails)
db = stash.stash_manager()
db.store_all(word, all_emails, 'email', 'google')
all_hosts.extend(hosts)
2018-12-16 11:07:37 +08:00
db = stash.stash_manager()
2018-12-23 04:29:11 +08:00
db.store_all(word, all_hosts, 'host', 'google')
print ("[-] Searching in Google Certificate transparency report..")
search = googlecertificates.search_googlecertificates(word, limit, start)
search.process()
domains = search.get_domains()
all_hosts.extend(domains)
db = stash.stash_manager()
db.store_all(word, all_hosts, 'host', 'google-certificates')
# googleplus
# google-certificates
# google-profiles
2018-11-03 07:04:20 +08:00
2018-11-23 05:20:06 +08:00
print("[-] Searching in Hunter:")
2018-11-11 22:24:58 +08:00
from discovery import huntersearch
2018-12-23 04:29:11 +08:00
# Import locally
2018-11-11 22:24:58 +08:00
search = huntersearch.search_hunter(word, limit, start)
2018-11-03 07:04:20 +08:00
search.process()
2018-11-11 22:24:58 +08:00
emails = search.get_emails()
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db = stash.stash_manager()
db.store_all(word, hosts, 'host', 'hunter')
2018-11-11 22:24:58 +08:00
all_emails.extend(emails)
2018-12-23 04:29:11 +08:00
all_emails = sorted(set(all_emails))
db.store_all(word, all_emails, 'email', 'hunter')
2018-12-23 04:29:11 +08:00
# linkedin
print("[-] Searching in Netcraft server..")
search = netcraft.search_netcraft(word)
search.process()
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db = stash.stash_manager()
2018-12-23 04:29:11 +08:00
db.store_all(word, all_hosts, 'host', 'netcraft')
2018-12-23 04:29:11 +08:00
print("[-] Searching in PGP Key server..")
search = pgpsearch.search_pgp(word)
2018-12-10 01:51:48 +08:00
search.process()
2018-12-23 04:29:11 +08:00
emails = search.get_emails()
hosts = search.get_hostnames()
all_hosts.extend(hosts)
2018-12-10 01:51:48 +08:00
db = stash.stash_manager()
2018-12-23 04:29:11 +08:00
db.store_all(word, all_hosts, 'host', 'PGP')
all_emails.extend(emails)
db = stash.stash_manager()
db.store_all(word, all_emails, 'email', 'PGP')
2018-11-23 05:20:06 +08:00
2018-12-23 04:29:11 +08:00
print("[-] Searching in ThreatCrowd server..")
try:
search = threatcrowd.search_threatcrowd(word)
search.process()
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db = stash.stash_manager()
db.store_all(word, all_hosts, 'host', 'threatcrowd')
except Exception: pass
# trello
# twitter
# vhost
print("[-] Searching in Virustotal server..")
search = virustotal.search_virustotal(word)
2018-12-23 02:46:27 +08:00
search.process()
2018-12-23 04:29:11 +08:00
hosts = search.get_hostnames()
2018-12-23 02:46:27 +08:00
all_hosts.extend(hosts)
db = stash.stash_manager()
2018-12-23 04:29:11 +08:00
db.store_all(word, all_hosts, 'host', 'virustotal')
# yahoo
2018-11-11 22:24:58 +08:00
else:
usage()
2018-12-23 04:29:11 +08:00
print("Invalid search engine, try with: baidu, bing, bingapi, censys, crtsh, cymon, dogpile, google, googleCSE, googleplus, google-certificates, google-profiles, hunter, linkedin, netcraft, pgp, threatcrowd, trello, twitter, vhost, virustotal, yahoo, all")
sys.exit()
2018-11-11 22:24:58 +08:00
2018-12-23 04:29:11 +08:00
# Results ############################################################
print("\n\033[1;32;40mHarvesting results")
2018-12-20 03:39:33 +08:00
if (len(all_ip) == 0):
2018-12-23 04:29:11 +08:00
print("No IP addresses found.")
2018-12-20 03:39:33 +08:00
else:
print("\033[1;33;40m \n[+] IP addresses found in search engines:")
print("------------------------------------")
for i in all_ip:
print(i)
print("\n\n[+] Emails found:")
print("------------------")
2018-12-23 04:29:11 +08:00
# Sanity check to see if all_emails and all_hosts are defined
2018-11-23 05:20:06 +08:00
try:
all_emails
except NameError:
print('No emails found as all_emails is not defined.')
sys.exit()
try:
all_hosts
except NameError:
print('No hosts found as all_hosts is not defined.')
sys.exit()
if all_emails == []:
2018-12-23 04:29:11 +08:00
print("No emails found.")
else:
2018-11-23 05:20:06 +08:00
print(("\n".join(all_emails)))
2018-03-23 06:32:50 +08:00
print("\033[1;33;40m \n[+] Hosts found in search engines:")
2018-11-23 05:20:06 +08:00
print("------------------------------------")
2018-12-16 11:07:37 +08:00
if all_hosts == [] or all_emails is None:
2018-12-23 04:29:11 +08:00
print("No hosts found.")
else:
2018-03-23 06:32:50 +08:00
total = len(all_hosts)
2018-11-23 05:20:06 +08:00
print(("\nTotal hosts: " + str(total) + "\n"))
2018-12-16 11:07:37 +08:00
all_hosts = sorted(set(all_hosts))
2018-11-23 05:20:06 +08:00
print("\033[94m[-] Resolving hostnames IPs...\033[1;33;40m \n ")
full_host = hostchecker.Checker(all_hosts)
full = full_host.check()
for host in full:
2018-04-16 19:55:52 +08:00
ip = host.split(':')[1]
2018-11-23 05:20:06 +08:00
print(host)
if ip != "empty":
if host_ip.count(ip.lower()):
pass
else:
host_ip.append(ip.lower())
2018-12-16 11:07:37 +08:00
2018-12-20 03:39:33 +08:00
db = stash.stash_manager()
db.store_all(word, host_ip, 'ip', 'DNS-resolver')
2018-12-23 04:29:11 +08:00
# DNS Brute force ################################################
dnsres = []
if dnsbrute == True:
2018-11-23 05:20:06 +08:00
print("\n\033[94m[-] Starting DNS brute force: \033[1;33;40m")
a = dnssearch.dns_force(word, dnsserver, verbose=True)
res = a.process()
2018-11-23 05:20:06 +08:00
print("\n\033[94m[-] Hosts found after DNS brute force:")
print("---------------------------------------")
for y in res:
2018-11-23 05:20:06 +08:00
print(y)
dnsres.append(y.split(':')[0])
if y not in full:
full.append(y)
2018-12-16 11:07:37 +08:00
db = stash.stash_manager()
db.store_all(word, dnsres, 'host', 'dns_bruteforce')
2018-12-16 11:07:37 +08:00
# Port Scanning #################################################
if ports_scanning == True:
2018-12-16 11:07:37 +08:00
print("\n\n\033[1;32;40m[-] Scanning ports (active):\n")
for x in full:
host = x.split(':')[1]
domain = x.split(':')[0]
2018-12-23 04:29:11 +08:00
if host != "empty" :
2018-12-16 11:07:37 +08:00
print(("- Scanning : " + host))
2018-12-23 04:29:11 +08:00
ports = [80,443,22,8080,21]
2018-12-16 11:07:37 +08:00
try:
2018-12-23 04:29:11 +08:00
scan = port_scanner.port_scan(host,ports)
2018-12-16 11:07:37 +08:00
openports = scan.process()
if len(openports) > 1:
2018-12-23 04:29:11 +08:00
print(("\t\033[91m Detected open ports: " + ','.join(str(e) for e in openports) + "\033[1;32;40m"))
2018-12-16 11:07:37 +08:00
takeover_check = 'True'
if takeover_check == 'True':
if len(openports) > 0:
search_take = takeover.take_over(domain)
search_take.process()
except Exception as e:
print(e)
2018-12-23 04:29:11 +08:00
# DNS reverse lookup ################################################
dnsrev = []
if dnslookup == True:
2018-11-23 05:20:06 +08:00
print("\n[+] Starting active queries:")
analyzed_ranges = []
2018-04-16 19:55:52 +08:00
for x in host_ip:
2018-11-23 05:20:06 +08:00
print(x)
ip = x.split(":")[0]
range = ip.split(".")
range[3] = "0/24"
2018-11-23 05:20:06 +08:00
s = '.'
range = s.join(range)
if not analyzed_ranges.count(range):
2018-11-23 05:20:06 +08:00
print(("\033[94m[-]Performing reverse lookup in : " + range + "\033[1;33;40m"))
a = dnssearch.dns_reverse(range, True)
a.list()
res = a.process()
analyzed_ranges.append(range)
else:
continue
for x in res:
if x.count(word):
dnsrev.append(x)
if x not in full:
full.append(x)
2018-11-23 05:20:06 +08:00
print("Hosts found after reverse lookup (in target domain):")
print("---------------------------------")
for xh in dnsrev:
2018-11-23 05:20:06 +08:00
print(xh)
2018-12-16 11:07:37 +08:00
2018-12-23 04:29:11 +08:00
# DNS TLD expansion #################################################
dnstldres = []
if dnstld == True:
2018-11-23 05:20:06 +08:00
print("[-] Starting DNS TLD expansion:")
a = dnssearch.dns_tld(word, dnsserver, verbose=True)
res = a.process()
2018-11-23 05:20:06 +08:00
print("\n[+] Hosts found after DNS TLD expansion:")
print("------------------------------------------")
for y in res:
2018-11-23 05:20:06 +08:00
print(y)
dnstldres.append(y)
if y not in full:
full.append(y)
2018-12-23 04:29:11 +08:00
# Virtual hosts search ##############################################
if virtual == "basic":
2018-11-23 05:20:06 +08:00
print("\n[+] Virtual hosts:")
print("------------------")
for l in host_ip:
search = bingsearch.search_bing(l, limit, start)
search.process_vhost()
res = search.get_allhostnames()
for x in res:
2018-12-16 11:07:37 +08:00
x = re.sub(r'[[\<\/?]*[\w]*>]*', '', x)
x = re.sub('<', '', x)
x = re.sub('>', '', x)
2018-11-23 05:20:06 +08:00
print((l + "\t" + x))
vhost.append(l + ":" + x)
full.append(l + ":" + x)
2018-12-16 11:07:37 +08:00
vhost = sorted(set(vhost))
else:
pass
2018-12-23 04:29:11 +08:00
# Shodan search ####################################################
shodanres = []
shodanvisited = []
if shodan == True:
print("\n\n\033[1;32;40m[-] Shodan DB search (passive):\n")
2018-12-16 11:07:37 +08:00
if full == []:
print('No host to search, exiting.')
sys.exit()
2018-12-20 03:39:33 +08:00
for x in full:
try:
2018-03-23 06:32:50 +08:00
ip = x.split(":")[1]
if not shodanvisited.count(ip):
2018-11-23 05:20:06 +08:00
print(("\tSearching for: " + ip))
a = shodansearch.search_shodan(ip)
shodanvisited.append(ip)
results = a.run()
2018-12-16 11:07:37 +08:00
# time.sleep(2)
for res in results['data']:
2018-12-16 11:07:37 +08:00
shodanres.append(
str("%s:%s - %s - %s - %s," % (res['ip_str'], res['port'], res['os'], res['isp'])))
except Exception as e:
pass
2018-11-23 05:20:06 +08:00
print("\n [+] Shodan results:")
print("------------------")
for x in shodanres:
2018-12-16 11:07:37 +08:00
print(x)
else:
pass
###################################################################
2018-12-23 04:29:11 +08:00
# Here we need to add explosion mode.
# Tengo que sacar los TLD para hacer esto.
recursion = None
if recursion:
start = 0
for word in vhost:
search = googlesearch.search_google(word, limit, start)
search.process(google_dorking)
emails = search.get_emails()
hosts = search.get_hostnames()
2018-11-23 05:20:06 +08:00
print(emails)
print(hosts)
else:
pass
2018-12-23 04:29:11 +08:00
# Reporting #######################################################
if filename != "":
try:
2018-12-16 01:22:02 +08:00
print("NEW REPORTING BEGINS:")
db = stash.stash_manager()
scanboarddata = db.getscanboarddata()
latestscanresults = db.getlatestscanresults(word)
2018-12-20 03:39:33 +08:00
previousscanresults = db.getlatestscanresults(word, previousday=True)
latestscanchartdata = db.latestscanchartdata(word)
scanhistorydomain = db.getscanhistorydomain(word)
pluginscanstatistics = db.getpluginscanstatistics()
2018-12-16 01:22:02 +08:00
from lib import statichtmlgenerator
generator = statichtmlgenerator.htmlgenerator(word)
HTMLcode = generator.beginhtml()
HTMLcode += generator.generatelatestscanresults(latestscanresults)
HTMLcode += generator.generatepreviousscanresults(previousscanresults)
2018-12-16 01:22:02 +08:00
from lib import reportgraph
import datetime
graph = reportgraph.graphgenerator(word)
HTMLcode += graph.drawlatestscangraph(word, latestscanchartdata)
HTMLcode += graph.drawscattergraphscanhistory(word, scanhistorydomain)
2018-12-23 04:29:11 +08:00
HTMLcode += generator.generatescanstatistics(scanstatistics)
HTMLcode += '<p><span style="color: #000000;">Report generated on ' + str(datetime.datetime.now())+'</span></p>'
2018-12-20 03:39:33 +08:00
HTMLcode += '''
</body>
</html>
'''
2018-12-23 04:29:11 +08:00
Html_file = open("report.html","w")
2018-12-16 01:22:02 +08:00
Html_file.write(HTMLcode)
Html_file.close()
print("NEW REPORTING FINISHED!")
2018-11-23 05:20:06 +08:00
print("[+] Saving files...")
html = htmlExport.htmlExport(
all_emails,
full,
vhost,
dnsres,
dnsrev,
filename,
word,
shodanres,
dnstldres)
save = html.writehtml()
except Exception as e:
2018-11-23 05:20:06 +08:00
print(e)
2018-12-23 04:29:11 +08:00
print("Error creating the file.")
try:
filename = filename.split(".")[0] + ".xml"
file = open(filename, 'w')
file.write('<?xml version="1.0" encoding="UTF-8"?><theHarvester>')
for x in all_emails:
file.write('<email>' + x + '</email>')
2018-12-20 03:39:33 +08:00
for x in full:
x = x.split(":")
if len(x) == 2:
2018-12-16 11:07:37 +08:00
file.write('<host>' + '<ip>' + x[1] + '</ip><hostname>' + x[0] + '</hostname>' + '</host>')
else:
file.write('<host>' + x + '</host>')
for x in vhost:
x = x.split(":")
if len(x) == 2:
2018-12-16 11:07:37 +08:00
file.write('<vhost>' + '<ip>' + x[1] + '</ip><hostname>' + x[0] + '</hostname>' + '</vhost>')
else:
file.write('<vhost>' + x + '</vhost>')
if shodanres != []:
shodanalysis = []
for x in shodanres:
res = x.split("SAPO")
file.write('<shodan>')
file.write('<host>' + res[0] + '</host>')
file.write('<port>' + res[2] + '</port>')
file.write('<banner><!--' + res[1] + '--></banner>')
2018-12-20 03:39:33 +08:00
reg_server = re.compile('Server:.*')
temp = reg_server.findall(res[1])
if temp != []:
shodanalysis.append(res[0] + ":" + temp[0])
file.write('</shodan>')
if shodanalysis != []:
2018-12-16 11:07:37 +08:00
shodanalysis = sorted(set(shodanalysis))
file.write('<servers>')
for x in shodanalysis:
file.write('<server>' + x + '</server>')
file.write('</servers>')
2018-12-23 04:29:11 +08:00
file.write('</theHarvester>')
2016-03-05 23:25:44 +08:00
file.flush()
file.close()
2018-11-23 05:20:06 +08:00
print("Files saved!")
except Exception as er:
2018-11-23 05:20:06 +08:00
print(("Error saving XML file: " + str(er)))
sys.exit()
2018-12-16 11:07:37 +08:00
if __name__ == "__main__":
try:
start(sys.argv[1:])
except KeyboardInterrupt:
2018-11-23 05:20:06 +08:00
print("Search interrupted by user..")
except Exception:
import traceback
print(traceback.print_exc())
sys.exit()