Starting the change from python2 to python3.7

This commit is contained in:
NotoriousRebel 2018-11-14 22:13:52 -05:00
parent 617eb58699
commit 2a20d3ff47
28 changed files with 1227 additions and 1129 deletions

8
Dockerfile Normal file
View file

@ -0,0 +1,8 @@
FROM python:2-alpine
RUN mkdir /app
RUN pip install requests
WORKDIR /app
COPY . /app
RUN chmod +x *.py
ENTRYPOINT ["/app/theHarvester.py"]
CMD ["--help"]

View file

@ -63,7 +63,7 @@ The sources are:
* shodan: Shodan Computer search engine, will search for ports and banner of the * shodan: Shodan Computer search engine, will search for ports and banner of the
discovered hosts (http://www.shodanhq.com/) discovered hosts (http://www.shodanhq.com/)
* hunter: Hunter search engine * hunter: Hunter search engine (you need to add your Key in the discovery/huntersearch.py file)
Active: Active:
------- -------
@ -104,6 +104,8 @@ https://github.com/laramies/theHarvester
Thanks: Thanks:
------- -------
* Matthew Brown @NotoriousRebel
* Janos Zold @Jzod
* John Matherly - SHODAN project * John Matherly - SHODAN project
* Lee Baird for suggestions and bugs reporting * Lee Baird for suggestions and bugs reporting
* Ahmed Aboul Ela - subdomain names dictionary (big and small) * Ahmed Aboul Ela - subdomain names dictionary (big and small)

39
censysparser.py Normal file
View file

@ -0,0 +1,39 @@
from bs4 import BeautifulSoup
import re
class parser:
def __init__(self, results):
self.results = results
self.ipaddresses = []
self.soup = BeautifulSoup(results.results,features="html.parser")
self.hostnames = []
self.numberofpages = 0
def search_hostnames(self):
try:
hostnamelist = self.soup.findAll('tt')
for hostnameitem in hostnamelist:
self.hostnames.append(hostnameitem.text)
return self.hostnames
except Exception,e:
print("Error occurred: " + e)
def search_ipaddresses(self):
try:
ipaddresslist = self.soup.findAll('a','SearchResult__title-text')
for ipaddressitem in ipaddresslist:
self.ipaddresses.append(ipaddressitem.text.strip())
return self.ipaddresses
except Exception,e:
print("Error occurred: " + e)
def search_numberofpages(self):
try:
items = self.soup.findAll(href=re.compile("page"))
for item in items:
if (item.text !='next'): #to filter out pagination
self.numberofpages+=1
return self.numberofpages
except Exception,e:
print("Error occurred: " + e)

View file

@ -13,12 +13,11 @@
import string import string
import types import types
import time import time
import Type import discovery.DNS.Type as Type
import Class import discovery.DNS.Class as Class
import Opcode import discovery.DNS.Opcode as Opcode
import asyncore import asyncore
class DNSError(Exception): class DNSError(Exception):
pass pass
@ -33,7 +32,7 @@ def ParseResolvConf(resolv_path):
try: try:
lines = open(resolv_path).readlines() lines = open(resolv_path).readlines()
except: except:
print "error in path" + resolv_path print("error in path" + resolv_path)
for line in lines: for line in lines:
line = string.strip(line) line = string.strip(line)
if not line or line[0] == ';' or line[0] == '#': if not line or line[0] == ';' or line[0] == '#':
@ -68,7 +67,9 @@ class DnsRequest:
def __init__(self, *name, **args): def __init__(self, *name, **args):
self.donefunc = None self.donefunc = None
self.async = None #fix maybe?
self.asyn= False
#self.async = None #TODO FIX async is a keyword
self.defaults = {} self.defaults = {}
self.argparse(name, args) self.argparse(name, args)
self.defaults = self.args self.defaults = self.args
@ -172,11 +173,11 @@ def req(self, *name, **args):
else: else:
qtype = self.args['qtype'] qtype = self.args['qtype']
if 'name' not in self.args: if 'name' not in self.args:
print self.args print(self.args)
raise DNSError('nothing to lookup') raise DNSError('nothing to lookup')
qname = self.args['name'] qname = self.args['name']
if qtype == Type.AXFR: if qtype == Type.AXFR:
print 'Query type AXFR, protocol forced to TCP' print('Query type AXFR, protocol forced to TCP')
protocol = 'tcp' protocol = 'tcp'
# print 'QTYPE %d(%s)' % (qtype, Type.typestr(qtype)) # print 'QTYPE %d(%s)' % (qtype, Type.typestr(qtype))
m = Lib.Mpacker() m = Lib.Mpacker()
@ -193,7 +194,7 @@ def req(self, *name, **args):
self.sendTCPRequest(server) self.sendTCPRequest(server)
except socket.error as reason: except socket.error as reason:
raise DNSError(reason) raise DNSError(reason)
if self.async: if self.asyn:
return None return None
else: else:
return self.response return self.response
@ -208,7 +209,7 @@ def sendUDPRequest(self, server):
#self.s.connect((self.ns, self.port)) #self.s.connect((self.ns, self.port))
self.conn() self.conn()
self.time_start = time.time() self.time_start = time.time()
if not self.async: if not self.asyn:
self.s.send(self.request) self.s.send(self.request)
self.response = self.processUDPReply() self.response = self.processUDPReply()
# except socket.error: # except socket.error:
@ -216,7 +217,7 @@ def sendUDPRequest(self, server):
continue continue
break break
if not self.response: if not self.response:
if not self.async: if not self.asyn:
raise DNSError('no working nameservers found') raise DNSError('no working nameservers found')
def sendTCPRequest(self, server): def sendTCPRequest(self, server):
@ -253,7 +254,7 @@ def __init__(self, *name, **args):
else: else:
self.donefunc = self.showResult self.donefunc = self.showResult
# self.realinit(name,args) # XXX todo # self.realinit(name,args) # XXX todo
self.async = 1 self.asyn = 1
def conn(self): def conn(self):
import time import time

View file

@ -26,12 +26,12 @@
import string import string
import types import types
import Type import discovery.DNS.Type as Type
import Class import discovery.DNS.Class as Class
import Opcode import discovery.DNS.Opcode as Opcode
import Status import discovery.DNS.Status as Status
from Base import DNSError from discovery.DNS.Base import DNSError
class UnpackError(DNSError): class UnpackError(DNSError):
@ -116,14 +116,15 @@ def addname(self, name):
# The case of the first occurrence of a name is preserved. # The case of the first occurrence of a name is preserved.
# Redundant dots are ignored. # Redundant dots are ignored.
list = [] list = []
for label in string.splitfields(name, '.'): for label in name.split('.'):
if label: if label:
if len(label) > 63: if len(label) > 63:
raise PackError('label too long') raise PackError('label too long')
list.append(label) list.append(label)
keys = [] keys = []
s = ''
for i in range(len(list)): for i in range(len(list)):
key = string.upper(string.joinfields(list[i:], '.')) key = str.upper((s.join(list[i:])))
keys.append(key) keys.append(key)
if key in self.index: if key in self.index:
pointer = self.index[key] pointer = self.index[key]
@ -142,8 +143,8 @@ def addname(self, name):
if offset + len(buf) < 0x3FFF: if offset + len(buf) < 0x3FFF:
index.append((keys[j], offset + len(buf))) index.append((keys[j], offset + len(buf)))
else: else:
print 'DNS.Lib.Packer.addname:', print('DNS.Lib.Packer.addname:',)
print 'warning: pointer too big' print('warning: pointer too big')
buf = buf + (chr(n) + label) buf = buf + (chr(n) + label)
if pointer: if pointer:
buf = buf + pack16bit(pointer | 0xC000) buf = buf + pack16bit(pointer | 0xC000)
@ -155,26 +156,26 @@ def addname(self, name):
def dump(self): def dump(self):
keys = sorted(self.index.keys()) keys = sorted(self.index.keys())
print '-' * 40 print('-' * 40)
for key in keys: for key in keys:
print '%20s %3d' % (key, self.index[key]) print('%20s %3d' % (key, self.index[key]))
print '-' * 40 print('-' * 40)
space = 1 space = 1
for i in range(0, len(self.buf) + 1, 2): for i in range(0, len(self.buf) + 1, 2):
if self.buf[i:i + 2] == '**': if self.buf[i:i + 2] == '**':
if not space: if not space:
print print()
space = 1 space = 1
continue continue
space = 0 space = 0
print '%4d' % i, print('%4d' % i,)
for c in self.buf[i:i + 2]: for c in self.buf[i:i + 2]:
if ' ' < c < '\177': if ' ' < c < '\177':
print ' %c' % c, print(' %c' % c,)
else: else:
print '%2d' % ord(c), print('%2d' % ord(c),)
print print()
print '-' * 40 print('-' * 40)
# Unpacking class # Unpacking class
@ -257,8 +258,8 @@ def testpacker():
p.addbytes('*' * 26) p.addbytes('*' * 26)
p.addname('') p.addname('')
timing.finish() timing.finish()
print timing.milli(), "ms total for packing" print(timing.milli(), "ms total for packing")
print round(timing.milli() / i, 4), 'ms per packing' print(round(timing.milli() / i, 4), 'ms per packing')
# p.dump() # p.dump()
u = Unpacker(p.buf) u = Unpacker(p.buf)
u.getaddr() u.getaddr()
@ -284,8 +285,8 @@ def testpacker():
u.getbytes(26), u.getbytes(26),
u.getname()) u.getname())
timing.finish() timing.finish()
print timing.milli(), "ms total for unpacking" print(timing.milli(), "ms total for unpacking")
print round(timing.milli() / i, 4), 'ms per unpacking' print(round(timing.milli() / i, 4), 'ms per unpacking')
# for item in res: print item # for item in res: print item
@ -379,7 +380,7 @@ def addSOA(self, name, klass, ttl,
def addTXT(self, name, klass, ttl, list): def addTXT(self, name, klass, ttl, list):
self.addRRheader(name, Type.TXT, klass, ttl) self.addRRheader(name, Type.TXT, klass, ttl)
if isinstance(list, types.StringType): if isinstance(list, str):
list = [list] list = [list]
for txtdata in list: for txtdata in list:
self.addstring(txtdata) self.addstring(txtdata)
@ -555,29 +556,29 @@ class Munpacker(RRunpacker, Qunpacker, Hunpacker):
# These affect the unpacker's current position! # These affect the unpacker's current position!
def dumpM(u): def dumpM(u):
print 'HEADER:', print('HEADER:',)
(id, qr, opcode, aa, tc, rd, ra, z, rcode, (id, qr, opcode, aa, tc, rd, ra, z, rcode,
qdcount, ancount, nscount, arcount) = u.getHeader() qdcount, ancount, nscount, arcount) = u.getHeader()
print 'id=%d,' % id, print('id=%d,' % id,)
print 'qr=%d, opcode=%d, aa=%d, tc=%d, rd=%d, ra=%d, z=%d, rcode=%d,' \ print('qr=%d, opcode=%d, aa=%d, tc=%d, rd=%d, ra=%d, z=%d, rcode=%d,' \
% (qr, opcode, aa, tc, rd, ra, z, rcode) % (qr, opcode, aa, tc, rd, ra, z, rcode))
if tc: if tc:
print '*** response truncated! ***' print('*** response truncated! ***')
if rcode: if rcode:
print '*** nonzero error code! (%d) ***' % rcode print('*** nonzero error code! (%d) ***' % rcode)
print ' qdcount=%d, ancount=%d, nscount=%d, arcount=%d' \ print(' qdcount=%d, ancount=%d, nscount=%d, arcount=%d' \
% (qdcount, ancount, nscount, arcount) % (qdcount, ancount, nscount, arcount))
for i in range(qdcount): for i in range(qdcount):
print 'QUESTION %d:' % i, print('QUESTION %d:' % i,)
dumpQ(u) dumpQ(u)
for i in range(ancount): for i in range(ancount):
print 'ANSWER %d:' % i, print('ANSWER %d:' % i,)
dumpRR(u) dumpRR(u)
for i in range(nscount): for i in range(nscount):
print 'AUTHORITY RECORD %d:' % i, print('AUTHORITY RECORD %d:' % i,)
dumpRR(u) dumpRR(u)
for i in range(arcount): for i in range(arcount):
print 'ADDITIONAL RECORD %d:' % i, print('ADDITIONAL RECORD %d:' % i,)
dumpRR(u) dumpRR(u)
@ -594,44 +595,44 @@ def __init__(self, u, args):
def show(self): def show(self):
import time import time
print '; <<>> PDG.py 1.0 <<>> %s %s' % (self.args['name'], print('; <<>> PDG.py 1.0 <<>> %s %s' % (self.args['name'],
self.args['qtype']) self.args['qtype']))
opt = "" opt = ""
if self.args['rd']: if self.args['rd']:
opt = opt + 'recurs ' opt = opt + 'recurs '
h = self.header h = self.header
print ';; options: ' + opt print(';; options: ' + opt)
print ';; got answer:' print(';; got answer:')
print ';; ->>HEADER<<- opcode %s, status %s, id %d' % ( print(';; ->>HEADER<<- opcode %s, status %s, id %d' % (
h['opcode'], h['status'], h['id']) h['opcode'], h['status'], h['id']))
flags = filter(lambda x, h=h: h[x], ('qr', 'aa', 'rd', 'ra', 'tc')) flags = filter(lambda x, h=h: h[x], ('qr', 'aa', 'rd', 'ra', 'tc'))
print ';; flags: %s; Ques: %d, Ans: %d, Auth: %d, Addit: %d' % ( print(';; flags: %s; Ques: %d, Ans: %d, Auth: %d, Addit: %d' % (
string.join(flags), h['qdcount'], h['ancount'], h['nscount'], ''.join(map(str,flags)), h['qdcount'], h['ancount'], h['nscount'],
h['arcount']) h['arcount']))
print ';; QUESTIONS:' print(';; QUESTIONS:')
for q in self.questions: for q in self.questions:
print ';; %s, type = %s, class = %s' % (q['qname'], q['qtypestr'], print(';; %s, type = %s, class = %s' % (q['qname'], q['qtypestr'],
q['qclassstr']) q['qclassstr']))
print print()
print ';; ANSWERS:' print(';; ANSWERS:')
for a in self.answers: for a in self.answers:
print '%-20s %-6s %-6s %s' % (a['name'], repr(a['ttl']), a['typename'], print('%-20s %-6s %-6s %s' % (a['name'], repr(a['ttl']), a['typename'],
a['data']) a['data']))
print print()
print ';; AUTHORITY RECORDS:' print(';; AUTHORITY RECORDS:')
for a in self.authority: for a in self.authority:
print '%-20s %-6s %-6s %s' % (a['name'], repr(a['ttl']), a['typename'], print('%-20s %-6s %-6s %s' % (a['name'], repr(a['ttl']), a['typename'],
a['data']) a['data']))
print print()
print ';; ADDITIONAL RECORDS:' print(';; ADDITIONAL RECORDS:')
for a in self.additional: for a in self.additional:
print '%-20s %-6s %-6s %s' % (a['name'], repr(a['ttl']), a['typename'], print('%-20s %-6s %-6s %s' % (a['name'], repr(a['ttl']), a['typename'],
a['data']) a['data']))
print print()
if 'elapsed' in self.args: if 'elapsed' in self.args:
print ';; Total query time: %d msec' % self.args['elapsed'] print(';; Total query time: %d msec' % self.args['elapsed'])
print ';; To SERVER: %s' % (self.args['server']) print(';; To SERVER: %s' % (self.args['server']))
print ';; WHEN: %s' % time.ctime(time.time()) print(';; WHEN: %s' % time.ctime(time.time()))
def storeM(self, u): def storeM(self, u):
(self.header['id'], self.header['qr'], self.header['opcode'], (self.header['id'], self.header['qr'], self.header['opcode'],
@ -682,25 +683,25 @@ def storeRR(self, u):
def dumpQ(u): def dumpQ(u):
qname, qtype, qclass = u.getQuestion() qname, qtype, qclass = u.getQuestion()
print 'qname=%s, qtype=%d(%s), qclass=%d(%s)' \ print('qname=%s, qtype=%d(%s), qclass=%d(%s)' \
% (qname, % (qname,
qtype, Type.typestr(qtype), qtype, Type.typestr(qtype),
qclass, Class.classstr(qclass)) qclass, Class.classstr(qclass)))
def dumpRR(u): def dumpRR(u):
name, type, klass, ttl, rdlength = u.getRRheader() name, type, klass, ttl, rdlength = u.getRRheader()
typename = Type.typestr(type) typename = Type.typestr(type)
print 'name=%s, type=%d(%s), class=%d(%s), ttl=%d' \ print('name=%s, type=%d(%s), class=%d(%s), ttl=%d' \
% (name, % (name,
type, typename, type, typename,
klass, Class.classstr(klass), klass, Class.classstr(klass),
ttl) ttl))
mname = 'get%sdata' % typename mname = 'get%sdata' % typename
if hasattr(u, mname): if hasattr(u, mname):
print ' formatted rdata:', getattr(u, mname)() print(' formatted rdata:', getattr(u, mname)())
else: else:
print ' binary rdata:', u.getbytes(rdlength) print(' binary rdata:', u.getbytes(rdlength))
if __name__ == "__main__": if __name__ == "__main__":
testpacker() testpacker()
@ -753,10 +754,10 @@ def dumpRR(u):
# added identifying header to top of each file # added identifying header to top of each file
# #
# Revision 1.7 2001/07/19 07:50:44 anthony # Revision 1.7 2001/07/19 07:50:44 anthony
# Added SRV (RFC 2782) support. Code from Michael Ströder. # Added SRV (RFC 2782) support. Code from Michael Str<EFBFBD>der.
# #
# Revision 1.6 2001/07/19 07:39:18 anthony # Revision 1.6 2001/07/19 07:39:18 anthony
# 'type' -> 'rrtype' in getRRheader(). Fix from Michael Ströder. # 'type' -> 'rrtype' in getRRheader(). Fix from Michael Str<EFBFBD>der.
# #
# Revision 1.5 2001/07/19 07:34:19 anthony # Revision 1.5 2001/07/19 07:34:19 anthony
# oops. glitch in storeRR (fixed now). # oops. glitch in storeRR (fixed now).

View file

@ -11,16 +11,16 @@
__version__ = '2.3.1' __version__ = '2.3.1'
import Type from discovery.DNS import Type
import Opcode from discovery.DNS import Opcode
import Status from discovery.DNS import Status
import Class from discovery.DNS import Class
from Base import DnsRequest, DNSError from discovery.DNS.Base import DnsRequest, DNSError
from Lib import DnsResult from discovery.DNS.Lib import DnsResult
from Base import * from discovery.DNS.Base import *
from Lib import * from discovery.DNS.Lib import *
Error = DNSError Error = DNSError
from lazy import * from discovery.DNS.lazy import *
Request = DnsRequest Request = DnsRequest
Result = DnsResult Result = DnsResult

View file

@ -7,7 +7,7 @@
# #
# routines for lazy people. # routines for lazy people.
import Base import discovery.DNS.Base as Base
import string import string

View file

@ -19,23 +19,21 @@
License analog to the current Python license License analog to the current Python license
""" """
import string import winreg
import re
import _winreg
def binipdisplay(s): def binipdisplay(s):
"convert a binary array of ip adresses to a python list" "convert a binary array of ip adresses to a python list"
if len(s) % 4 != 0: if len(s) % 4 != 0:
raise EnvironmentError # well ... raise EnvironmentError # well ...
ol = [] ol = []
for i in range(len(s) / 4): s = '.'
for i in range(int(len(s) / 4)):
s1 = s[:4] s1 = s[:4]
s = s[4:] s = s[4:]
ip = [] ip = []
for j in s1: for j in s1:
ip.append(str(ord(j))) ip.append(str(ord(j)))
ol.append(string.join(ip, '.')) ol.append(s.join(ip))
return ol return ol
@ -49,75 +47,75 @@ def stringdisplay(s):
def RegistryResolve(): def RegistryResolve():
nameservers = [] nameservers = []
x = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE) x = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
try: try:
y = _winreg.OpenKey(x, y = winreg.OpenKey(x,
r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters") r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters")
except EnvironmentError: # so it isn't NT/2000/XP except EnvironmentError: # so it isn't NT/2000/XP
# windows ME, perhaps? # windows ME, perhaps?
try: # for Windows ME try: # for Windows ME
y = _winreg.OpenKey(x, y = winreg.OpenKey(x,
r"SYSTEM\CurrentControlSet\Services\VxD\MSTCP") r"SYSTEM\CurrentControlSet\Services\VxD\MSTCP")
nameserver, dummytype = _winreg.QueryValueEx(y, 'NameServer') nameserver, dummytype = winreg.QueryValueEx(y, 'NameServer')
if nameserver and not (nameserver in nameservers): if nameserver and not (nameserver in nameservers):
nameservers.extend(stringdisplay(nameserver)) nameservers.extend(stringdisplay(nameserver))
except EnvironmentError: except EnvironmentError:
pass pass
return nameservers # no idea return nameservers # no idea
try: try:
nameserver = _winreg.QueryValueEx(y, "DhcpNameServer")[0].split() nameserver = winreg.QueryValueEx(y, "DhcpNameServer")[0].split()
except: except:
nameserver = _winreg.QueryValueEx(y, "NameServer")[0].split() nameserver = winreg.QueryValueEx(y, "NameServer")[0].split()
if nameserver: if nameserver:
nameservers = nameserver nameservers = nameserver
nameserver = _winreg.QueryValueEx(y, "NameServer")[0] nameserver = winreg.QueryValueEx(y, "NameServer")[0]
_winreg.CloseKey(y) winreg.CloseKey(y)
try: # for win2000 try: # for win2000
y = _winreg.OpenKey(x, y = winreg.OpenKey(x,
r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters\DNSRegisteredAdapters") r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters\DNSRegisteredAdapters")
for i in range(1000): for i in range(1000):
try: try:
n = _winreg.EnumKey(y, i) n = winreg.EnumKey(y, i)
z = _winreg.OpenKey(y, n) z = winreg.OpenKey(y, n)
dnscount, dnscounttype = _winreg.QueryValueEx(z, dnscount, dnscounttype = winreg.QueryValueEx(z,
'DNSServerAddressCount') 'DNSServerAddressCount')
dnsvalues, dnsvaluestype = _winreg.QueryValueEx(z, dnsvalues, dnsvaluestype = winreg.QueryValueEx(z,
'DNSServerAddresses') 'DNSServerAddresses')
nameservers.extend(binipdisplay(dnsvalues)) nameservers.extend(binipdisplay(dnsvalues))
_winreg.CloseKey(z) winreg.CloseKey(z)
except EnvironmentError: except EnvironmentError:
break break
_winreg.CloseKey(y) winreg.CloseKey(y)
except EnvironmentError: except EnvironmentError:
pass pass
# #
try: # for whistler try: # for whistler
y = _winreg.OpenKey(x, y = winreg.OpenKey(x,
r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters\Interfaces") r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters\Interfaces")
for i in range(1000): for i in range(1000):
try: try:
n = _winreg.EnumKey(y, i) n = winreg.EnumKey(y, i)
z = _winreg.OpenKey(y, n) z = winreg.OpenKey(y, n)
try: try:
nameserver, dummytype = _winreg.QueryValueEx( nameserver, dummytype = winreg.QueryValueEx(
z, 'NameServer') z, 'NameServer')
if nameserver and not (nameserver in nameservers): if nameserver and not (nameserver in nameservers):
nameservers.extend(stringdisplay(nameserver)) nameservers.extend(stringdisplay(nameserver))
except EnvironmentError: except EnvironmentError:
pass pass
_winreg.CloseKey(z) winreg.CloseKey(z)
except EnvironmentError: except EnvironmentError:
break break
_winreg.CloseKey(y) winreg.CloseKey(y)
except EnvironmentError: except EnvironmentError:
# print "Key Interfaces not found, just do nothing" # print "Key Interfaces not found, just do nothing"
pass pass
# #
_winreg.CloseKey(x) winreg.CloseKey(x)
return nameservers return nameservers
if __name__ == "__main__": if __name__ == "__main__":
print "Name servers:", RegistryResolve() print("Name servers:", RegistryResolve())
# #
# $Log: win32dns.py,v $ # $Log: win32dns.py,v $

View file

@ -1,23 +1,3 @@
__all__ = ["bingsearch", from api import WebAPI
"googlesearch", from client import Shodan
"googleplussearch", from exception import APIError
"pgpsearch",
"linkedinsearch",
"exaleadsearch",
"yandexsearch",
"googlesets",
"dnssearch",
"shodansearch",
"jigsaw",
"twittersearch",
"dogpilesearch",
"baidusearch",
"yahoosearch",
"netcraft",
"crtsh",
"virustotal",
"threatcrowd",
"wfuzz_search",
"port_scanner",
"takeover",
"googleCSE"]

View file

@ -1,5 +1,5 @@
import string import string
import httplib import http.client
import sys import sys
import myparser import myparser
import re import re
@ -22,7 +22,7 @@ def __init__(self, word, limit, start):
self.counter = start self.counter = start
def do_search(self): def do_search(self):
h = httplib.HTTP(self.server) h = http.client.HTTP(self.server)
h.putrequest('GET', "/search?q=%40" + self.word + h.putrequest('GET', "/search?q=%40" + self.word +
"&count=50&first=" + str(self.counter)) "&count=50&first=" + str(self.counter))
h.putheader('Host', self.hostname) h.putheader('Host', self.hostname)
@ -35,7 +35,7 @@ def do_search(self):
self.totalresults += self.results self.totalresults += self.results
def do_search_api(self): def do_search_api(self):
h = httplib.HTTP(self.apiserver) h = http.client.HTTP(self.apiserver)
h.putrequest('GET', "/xml.aspx?Appid=" + self.bingApi + "&query=%40" + h.putrequest('GET', "/xml.aspx?Appid=" + self.bingApi + "&query=%40" +
self.word + "&sources=web&web.count=40&web.offset=" + str(self.counter)) self.word + "&sources=web&web.count=40&web.offset=" + str(self.counter))
h.putheader('Host', "api.search.live.net") h.putheader('Host', "api.search.live.net")
@ -74,7 +74,7 @@ def get_allhostnames(self):
def process(self, api): def process(self, api):
if api == "yes": if api == "yes":
if self.bingApi == "": if self.bingApi == "":
print "Please insert your API key in the discovery/bingsearch.py" print("Please insert your API key in the discovery/bingsearch.py")
sys.exit() sys.exit()
while (self.counter < self.limit): while (self.counter < self.limit):
if api == "yes": if api == "yes":
@ -84,7 +84,7 @@ def process(self, api):
self.do_search() self.do_search()
time.sleep(1) time.sleep(1)
self.counter += 50 self.counter += 50
print "\tSearching " + str(self.counter) + " results..." print("\tSearching " + str(self.counter) + " results...")
def process_vhost(self): def process_vhost(self):
# Maybe it is good to use other limit for this. # Maybe it is good to use other limit for this.

71
discovery/censys.py Normal file
View file

@ -0,0 +1,71 @@
import httplib
import sys
import random
import requests
import censysparser
class search_censys:
def __init__(self, word):
self.word = word
self.url = ""
self.page = ""
self.results = ""
self.total_results = ""
self.server = "censys.io"
self.userAgent = ["(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36"
,("Mozilla/5.0 (Linux; Android 7.0; SM-G892A Build/NRD90M; wv) " +
"AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/60.0.3112.107 Mobile Safari/537.36"),
("Mozilla/5.0 (Windows Phone 10.0; Android 6.0.1; Microsoft; RM-1152) " +
"AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Mobile Safari/537.36 Edge/15.15254"),
"Mozilla/5.0 (SMART-TV; X11; Linux armv7l) AppleWebKit/537.42 (KHTML, like Gecko) Chromium/25.0.1349.2 Chrome/25.0.1349.2 Safari/537.42"
,"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36 OPR/43.0.2442.991"
,"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36 OPR/48.0.2685.52"
,"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko"
,"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko"
,"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)"]
def do_search(self):
try:
headers = {'user-agent': random.choice(self.userAgent),'Accept':'*/*','Referer':self.url}
response = requests.get(self.url, headers=headers)
self.results = response.content
self.total_results += self.results
except Exception,e:
print e
def process(self,morepage=None):
try:
if (morepage is not None):
self.page =str(morepage)
baseurl = self.url
self.url = baseurl + "&page=" + self.page
else:
self.url="https://" + self.server + "/ipv4/_search?q=" + self.word
self.do_search()
print "\tSearching Censys results.."
except Exception,e:
print("Error occurred: " + e)
def get_hostnames(self):
try:
hostnames = censysparser.parser(self)
return hostnames.search_hostnames()
except Exception,e:
print("Error occurred: " + e)
def get_ipaddresses(self):
try:
ips = censysparser.parser(self)
return ips.search_ipaddresses()
except Exception,e:
print("Error occurred: " + e)
def get_totalnumberofpages(self):
try:
pages = censysparser.parser(self)
return pages.search_numberofpages()
except Exception,e:
print("Error occurred: " + e)

View file

@ -1,5 +1,5 @@
import IPy from discovery import IPy
import DNS import discovery.DNS as DNS
import string import string
import socket import socket
import sys import sys
@ -18,7 +18,7 @@ def __init__(self, range, verbose=True):
DNS.ParseResolvConf("/etc/resolv.conf") DNS.ParseResolvConf("/etc/resolv.conf")
nameserver = DNS.defaults['server'][0] nameserver = DNS.defaults['server'][0]
except: except:
print "Error in DNS resolvers" print("Error in DNS resolvers")
sys.exit() sys.exit()
def run(self, host): def run(self, host):
@ -42,7 +42,7 @@ def get_ip_list(self, ips):
try: try:
list = IPy.IP(ips) list = IPy.IP(ips)
except: except:
print "Error in IP format, check the input and try again. (Eg. 192.168.1.0/24)" print("Error in IP format, check the input and try again. (Eg. 192.168.1.0/24)")
sys.exit() sys.exit()
name = [] name = []
for x in list: for x in list:
@ -75,12 +75,12 @@ def __init__(self, domain, dnsserver, verbose=False):
res_path = os.path.join(fileDir,'lib/resolvers.txt') res_path = os.path.join(fileDir,'lib/resolvers.txt')
with open(res_path) as f: with open(res_path) as f:
self.resolvers = f.read().splitlines() self.resolvers = f.read().splitlines()
except Exception, e: except Exception as e:
print "Resolvers file can't be open" print("Resolvers file can't be open")
try: try:
f = open(self.file, "r") f = open(self.file, "r")
except: except:
print "Error opening dns dictionary file" print("Error opening dns dictionary file")
sys.exit() sys.exit()
self.list = f.readlines() self.list = f.readlines()
@ -105,10 +105,10 @@ def getdns(self, domain):
server=primary, server=primary,
aa=1).req() aa=1).req()
except Exception as e: except Exception as e:
print e print(e)
if test.header['status'] != "NOERROR": if test.header['status'] != "NOERROR":
print "Error" print("Error")
sys.exit() sys.exit()
self.nameserver = test.answers[0]['data'] self.nameserver = test.answers[0]['data']
elif self.nameserver == "local": elif self.nameserver == "local":
@ -118,7 +118,7 @@ def getdns(self, domain):
def run(self, host): def run(self, host):
if self.nameserver == "": if self.nameserver == "":
self.nameserver = self.getdns(self.domain) self.nameserver = self.getdns(self.domain)
print "\n\033[94m[-] Using DNS server: " + self.nameserver + "\033[1;33;40m\n" print("\n\033[94m[-] Using DNS server: " + self.nameserver + "\033[1;33;40m\n")
#secure_random = random.SystemRandom() #secure_random = random.SystemRandom()
#self.nameserver = secure_random.choice(self.resolvers) #self.nameserver = secure_random.choice(self.resolvers)
@ -144,7 +144,7 @@ def process(self):
for x in self.list: for x in self.list:
host = self.run(x) host = self.run(x)
if host is not None: if host is not None:
print " : " + host.split(":")[1] print(" : " + host.split(":")[1])
results.append(host) results.append(host)
return results return results
@ -199,7 +199,7 @@ def getdns(self, domain):
0]['data'] 0]['data']
test = DNS.Request(rootdom, qtype='NS', server=primary, aa=1).req() test = DNS.Request(rootdom, qtype='NS', server=primary, aa=1).req()
if test.header['status'] != "NOERROR": if test.header['status'] != "NOERROR":
print "Error" print("Error")
sys.exit() sys.exit()
self.nameserver = test.answers[0]['data'] self.nameserver = test.answers[0]['data']
elif self.nameserver == "local": elif self.nameserver == "local":

View file

@ -1,5 +1,5 @@
import string import string
import httplib import http.client
import sys import sys
import myparser import myparser
import re import re
@ -7,7 +7,6 @@
class search_exalead: class search_exalead:
def __init__(self, word, limit, start): def __init__(self, word, limit, start):
self.word = word self.word = word
self.files = "pdf" self.files = "pdf"
@ -20,7 +19,7 @@ def __init__(self, word, limit, start):
self.counter = start self.counter = start
def do_search(self): def do_search(self):
h = httplib.HTTP(self.server) h = http.client.HTTPConnection(self.server)
h.putrequest('GET', "/search/web/results/?q=%40" + self.word + h.putrequest('GET', "/search/web/results/?q=%40" + self.word +
"&elements_per_page=50&start_index=" + str(self.counter)) "&elements_per_page=50&start_index=" + str(self.counter))
h.putheader('Host', self.hostname) h.putheader('Host', self.hostname)
@ -37,7 +36,7 @@ def do_search(self):
self.totalresults += self.results self.totalresults += self.results
def do_search_files(self, files): def do_search_files(self, files):
h = httplib.HTTP(self.server) h = http.client.HTTPConnection(self.server)
h.putrequest( h.putrequest(
'GET', 'GET',
"search/web/results/?q=" + "search/web/results/?q=" +
@ -58,7 +57,7 @@ def check_next(self):
nextres = renext.findall(self.results) nextres = renext.findall(self.results)
if nextres != []: if nextres != []:
nexty = "1" nexty = "1"
print str(self.counter) print(str(self.counter))
else: else:
nexty = "0" nexty = "0"
return nexty return nexty
@ -79,7 +78,7 @@ def process(self):
while self.counter <= self.limit: while self.counter <= self.limit:
self.do_search() self.do_search()
self.counter += 50 self.counter += 50
print "\tSearching " + str(self.counter) + " results..." print("\ tSearching " + str(self.counter) + " results...")
def process_files(self, files): def process_files(self, files):
while self.counter < self.limit: while self.counter < self.limit:

View file

@ -21,13 +21,13 @@ def __init__(self, word, limit):
def do_search(self): def do_search(self):
try: try:
urly="https://" + self.server + "/search?num=100&start=" + str(self.counter) + "&hl=en&meta=&q=site%3Aplus.google.com%20intext%3A%22Works%20at%22%20" + self.word+ "%20-inurl%3Aphotos%20-inurl%3Aabout%20-inurl%3Aposts%20-inurl%3Aplusones" urly="https://" + self.server + "/search?num=100&start=" + str(self.counter) + "&hl=en&meta=&q=site%3Aplus.google.com%20intext%3A%22Works%20at%22%20" + self.word+ "%20-inurl%3Aphotos%20-inurl%3Aabout%20-inurl%3Aposts%20-inurl%3Aplusones"
except Exception, e: except Exception as e:
print e print(e)
try: try:
headers = {'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:34.0) Gecko/20100101 Firefox/34.0'} headers = {'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:34.0) Gecko/20100101 Firefox/34.0'}
r=requests.get(urly,headers=headers) r=requests.get(urly,headers=headers)
except Exception,e: except Exception as e:
print e print(e)
self.results = r.content self.results = r.content
self.totalresults += self.results self.totalresults += self.results
@ -39,4 +39,4 @@ def process(self):
while (self.counter < self.limit): while (self.counter < self.limit):
self.do_search() self.do_search()
self.counter += 100 self.counter += 100
print "\tSearching " + str(self.counter) + " results.." print("\tSearching " + str(self.counter) + " results..")

View file

@ -32,25 +32,25 @@ def __init__(self, word, limit, start):
def do_search(self): def do_search(self):
try: #do normal scraping try: #do normal scraping
urly="http://" + self.server + "/search?num=" + self.quantity + "&start=" + str(self.counter) + "&hl=en&meta=&q=%40\"" + self.word + "\"" urly="http://" + self.server + "/search?num=" + self.quantity + "&start=" + str(self.counter) + "&hl=en&meta=&q=%40\"" + self.word + "\""
except Exception, e: except Exception as e:
print e print(e)
try: try:
params = {'User-Agent': random.choice(self.userAgent)} #select random user agent params = {'User-Agent': random.choice(self.userAgent)} #select random user agent
r=requests.get(urly,params= params) r=requests.get(urly,params= params)
except Exception,e: except Exception as e:
print e print(e)
self.results = r.content self.results = r.content
self.totalresults += self.results self.totalresults += self.results
def do_search_profiles(self): def do_search_profiles(self):
try: try:
urly="http://" + self.server + "/search?num=" + self.quantity + "&start=" + str(self.counter) + "&hl=en&meta=&q=site:www.google.com%20intitle:\"Google%20Profile\"%20\"Companies%20I%27ve%20worked%20for\"%20\"at%20" + self.word + "\"" urly="http://" + self.server + "/search?num=" + self.quantity + "&start=" + str(self.counter) + "&hl=en&meta=&q=site:www.google.com%20intitle:\"Google%20Profile\"%20\"Companies%20I%27ve%20worked%20for\"%20\"at%20" + self.word + "\""
except Exception, e: except Exception as e:
print e print(e)
try: try:
r=requests.get(urly) r=requests.get(urly)
except Exception,e: except Exception as e:
print e print (e)
self.results = r.content self.results = r.content
#'&hl=en&meta=&q=site:www.google.com%20intitle:"Google%20Profile"%20"Companies%20I%27ve%20worked%20for"%20"at%20' + self.word + '"') #'&hl=en&meta=&q=site:www.google.com%20intitle:"Google%20Profile"%20"Companies%20I%27ve%20worked%20for"%20"at%20' + self.word + '"')
self.totalresults += self.results self.totalresults += self.results
@ -77,17 +77,17 @@ def process(self,google_dorking):
self.do_search() self.do_search()
#more = self.check_next() #more = self.check_next()
time.sleep(1) time.sleep(1)
print "\tSearching " + str(self.counter) + " results..." print("\tSearching " + str(self.counter) + " results...")
self.counter += 100 self.counter += 100
else: #google dorking is true else: #google dorking is true
self.counter = 0 #reset counter self.counter = 0 #reset counter
print '\n' print('\n')
print "[-] Searching with Google Dorks: " print("[-] Searching with Google Dorks: ")
while self.counter <= self.limit and self.counter <= 200: # only 200 dorks in list while self.counter <= self.limit and self.counter <= 200: # only 200 dorks in list
self.googledork() #call google dorking method if user wanted it! self.googledork() #call google dorking method if user wanted it!
# more = self.check_next() # more = self.check_next()
time.sleep(.1) time.sleep(.5)
print "\tSearching " + str(self.counter) + " results..." print("\tSearching " + str(self.counter) + " results...")
self.counter += 100 self.counter += 100
def process_profiles(self): def process_profiles(self):
@ -95,7 +95,7 @@ def process_profiles(self):
self.do_search_profiles() self.do_search_profiles()
time.sleep(0.15) time.sleep(0.15)
self.counter += 100 self.counter += 100
print "\tSearching " + str(self.counter) + " results..." print("\tSearching " + str(self.counter) + " results...")
def append_dorks(self): def append_dorks(self):
try: # wrap in try-except incase filepaths are messed up try: # wrap in try-except incase filepaths are messed up

View file

@ -1,5 +1,5 @@
import string import string
import httplib import http.client
import sys import sys
import myparser import myparser
import re import re
@ -24,7 +24,7 @@ def __init__(self, list):
self.set = self.set + "&q" + str(id) + "=" + str(x) self.set = self.set + "&q" + str(id) + "=" + str(x)
def do_search(self): def do_search(self):
h = httplib.HTTP(self.server) h = http.client.HTTPConnection(self.server)
h.putrequest('GET', "/sets?hl=en&" + self.set) h.putrequest('GET', "/sets?hl=en&" + self.set)
h.putheader('Host', self.hostname) h.putheader('Host', self.hostname)
h.putheader('User-agent', self.userAgent) h.putheader('User-agent', self.userAgent)

View file

@ -7,28 +7,28 @@ class search_hunter:
def __init__(self, word, limit, start): def __init__(self, word, limit, start):
self.word = word self.word = word
self.limit = limit self.limit = 100
self.start = start self.start = start
self.key = "" self.key = ""
if self.key == "": if self.key == "":
print "You need an API key in order to use the Hunter search engine. You can get one here: https://hunter.io" print("You need an API key in order to use the Hunter search engine. You can get one here: https://hunter.io")
sys.exit() sys.exit()
self.results = "" self.results = ""
self.totalresults = "" self.totalresults = ""
self.counter = start self.counter = start
self.database = "https://api.hunter.io/v2/domain-search?domain=" + word + "&api_key=" + self.key self.database = "https://api.hunter.io/v2/domain-search?domain=" + word + "&api_key=" + self.key +"&limit=" + str(self.limit)
def do_search(self): def do_search(self):
try: try:
r = requests.get(self.database) r = requests.get(self.database)
except Exception,e: except Exception as e:
print e print(e)
self.results = r.content self.results = r.content
self.totalresults += self.results self.totalresults += self.results
def process(self): def process(self):
self.do_search() #only need to do it once self.do_search() #only need to do it once
print '\tDone Searching Results' print('\tDone Searching Results')
def get_emails(self): def get_emails(self):
rawres = myparser.parser(self.totalresults, self.word) rawres = myparser.parser(self.totalresults, self.word)

View file

@ -20,12 +20,12 @@ def __init__(self, word, limit):
def do_search(self): def do_search(self):
try: try:
urly="http://"+ self.server + "/search?num=100&start=" + str(self.counter) + "&hl=en&meta=&q=site%3Alinkedin.com/in%20" + self.word urly="http://"+ self.server + "/search?num=100&start=" + str(self.counter) + "&hl=en&meta=&q=site%3Alinkedin.com/in%20" + self.word
except Exception, e: except Exception as e:
print e print(e)
try: try:
r=requests.get(urly) r=requests.get(urly)
except Exception,e: except Exception as e:
print e print(e)
self.results = r.content self.results = r.content
self.totalresults += self.results self.totalresults += self.results
@ -37,4 +37,4 @@ def process(self):
while (self.counter < self.limit): while (self.counter < self.limit):
self.do_search() self.do_search()
self.counter += 100 self.counter += 100
print "\tSearching " + str(self.counter) + " results.." print("\tSearching " + str(self.counter) + " results..")

View file

@ -1,5 +1,5 @@
import string import string
import httplib import http.client
import sys import sys
import myparser import myparser
@ -15,18 +15,18 @@ def __init__(self, word):
self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6" self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6"
def process(self): def process(self):
print "\tSearching PGP results..." print("\tSearching PGP results...")
try: try:
h = httplib.HTTP(self.server) h = http.client.HTTP(self.server)
h.putrequest('GET', "/pks/lookup?search=" + self.word + "&op=index") h.putrequest('GET', "/pks/lookup?search=" + self.word + "&op=index")
h.putheader('Host', self.hostname) h.putheader('Host', self.hostname)
h.putheader('User-agent', self.userAgent) h.putheader('User-agent', self.userAgent)
h.endheaders() h.endheaders()
returncode, returnmsg, headers = h.getreply() returncode, returnmsg, headers = h.getreply()
self.results = h.getfile().read() self.results = h.getfile().read()
except Exception, e: except Exception as e:
print "Unable to connect to PGP server: ",str(e) print("Unable to connect to PGP server: ",str(e))
pass pass
def get_emails(self): def get_emails(self):

View file

@ -30,7 +30,7 @@
# Define a basestring type if necessary for Python3 compatibility # Define a basestring type if necessary for Python3 compatibility
try: try:
basestring str
except NameError: except NameError:
basestring = str basestring = str

View file

@ -1,4 +1,4 @@
from shodan import Shodan from discovery.shodan import Shodan
import sys import sys
import json import json
@ -9,7 +9,7 @@ def __init__(self, host):
self.host = host self.host = host
self.key = "oCiMsgM6rQWqiTvPxFHYcExlZgg7wvTt" self.key = "oCiMsgM6rQWqiTvPxFHYcExlZgg7wvTt"
if self.key == "": if self.key == "":
print "You need an API key in order to use SHODAN database. You can get one here: http://www.shodanhq.com/" print("You need an API key in order to use SHODAN database. You can get one here: http://www.shodanhq.com/")
sys.exit() sys.exit()
self.api = Shodan(self.key) self.api = Shodan(self.key)
@ -17,7 +17,7 @@ def run(self):
try: try:
host = self.api.host(self.host) host = self.api.host(self.host)
return host['data'] return host['data']
except Exception, e: except Exception as e:
print "SHODAN empty reply or error in the call" print("SHODAN empty reply or error in the call")
print e print(e)
return "error" return "error"

View file

@ -1,5 +1,5 @@
import string import string
import httplib import http.client
import sys import sys
import myparser import myparser
import re import re
@ -19,7 +19,7 @@ def __init__(self, word, limit, start):
self.counter = start self.counter = start
def do_search(self): def do_search(self):
h = httplib.HTTP(self.server) h = http.client.HTTPConnection(self.server)
h.putrequest('GET', "/search?text=%40" + self.word + h.putrequest('GET', "/search?text=%40" + self.word +
"&numdoc=50&lr=" + str(self.counter)) "&numdoc=50&lr=" + str(self.counter))
h.putheader('Host', self.hostname) h.putheader('Host', self.hostname)
@ -28,10 +28,10 @@ def do_search(self):
returncode, returnmsg, headers = h.getreply() returncode, returnmsg, headers = h.getreply()
self.results = h.getfile().read() self.results = h.getfile().read()
self.totalresults += self.results self.totalresults += self.results
print self.results print(self.results)
def do_search_files(self, files): # TODO def do_search_files(self, files): # TODO
h = httplib.HTTP(self.server) h = http.client.HTTPConnection(self.server)
h.putrequest('GET', "/search?text=%40" + self.word + h.putrequest('GET', "/search?text=%40" + self.word +
"&numdoc=50&lr=" + str(self.counter)) "&numdoc=50&lr=" + str(self.counter))
h.putheader('Host', self.hostname) h.putheader('Host', self.hostname)
@ -46,7 +46,7 @@ def check_next(self):
nextres = renext.findall(self.results) nextres = renext.findall(self.results)
if nextres != []: if nextres != []:
nexty = "1" nexty = "1"
print str(self.counter) print(str(self.counter))
else: else:
nexty = "0" nexty = "0"
return nexty return nexty
@ -67,7 +67,7 @@ def process(self):
while self.counter <= self.limit: while self.counter <= self.limit:
self.do_search() self.do_search()
self.counter += 50 self.counter += 50
print "Searching " + str(self.counter) + " results..." print("Searching " + str(self.counter) + " results...")
def process_files(self, files): def process_files(self, files):
while self.counter < self.limit: while self.counter < self.limit:

View file

@ -776,4 +776,4 @@ def _number_format(val, dec):
return dec and ('%.' + str(dec) + 'f') % val or int(round(val)) return dec and ('%.' + str(dec) + 'f') % val or int(round(val))
if __name__ == '__main__': if __name__ == '__main__':
print __doc__ print(__doc__)

View file

@ -1 +1,2 @@
requests==2.18.4 requests==2.18.4
bs4==0.0.1

View file

@ -29,8 +29,8 @@ def store(self,domain, resource,res_type,source):
c.execute ('INSERT INTO results (domain,resource, type, find_date, source) VALUES (?,?,?,?,?)',(self.domain,self.resource,self.type,self.date,self.source)) c.execute ('INSERT INTO results (domain,resource, type, find_date, source) VALUES (?,?,?,?,?)',(self.domain,self.resource,self.type,self.date,self.source))
conn.commit() conn.commit()
conn.close() conn.close()
except Exception, e: except Exception as e:
print e print(e)
return return
def store_all(self,domain,all,res_type,source): def store_all(self,domain,all,res_type,source):
@ -46,6 +46,6 @@ def store_all(self,domain,all,res_type,source):
c.execute ('INSERT INTO results (domain,resource, type, find_date, source) VALUES (?,?,?,?,?)',(self.domain,x,self.type,self.date,self.source)) c.execute ('INSERT INTO results (domain,resource, type, find_date, source) VALUES (?,?,?,?,?)',(self.domain,x,self.type,self.date,self.source))
conn.commit() conn.commit()
conn.close() conn.close()
except Exception, e: except Exception as e:
print e print(e)
return return

View file

@ -12,7 +12,7 @@ def test_emails(self):
results = '@domain.com***a@domain***banotherdomain.com***c@domain.com***d@sub.domain.com***' results = '@domain.com***a@domain***banotherdomain.com***c@domain.com***d@sub.domain.com***'
p = myparser.parser(results, word) p = myparser.parser(results, word)
emails = sorted(p.emails()) emails = sorted(p.emails())
self.assertEquals(emails, [ 'c@domain.com', 'd@sub.domain.com' ]) self.assertEqual(emails, [ 'c@domain.com', 'd@sub.domain.com' ])
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View file

@ -1,10 +1,8 @@
#!/usr/bin/env python #!/usr/bin/env python
import string import string
import httplib
import sys import sys
import os import os
from socket import *
import re import re
import getopt import getopt
import stash import stash
@ -13,25 +11,25 @@
try: try:
import requests import requests
except: except:
print "Request library not found, please install it before proceeding\n" print("Request library not found, please install it before proceeding\n")
sys.exit() sys.exit()
from discovery import * from discovery import *
from lib import htmlExport from lib import htmlExport
from lib import hostchecker from lib import hostchecker
print "\n \033[92m *******************************************************************" print("\n \033[92m *******************************************************************")
print "* *" print("* *")
print "* | |_| |__ ___ /\ /\__ _ _ ____ _____ ___| |_ ___ _ __ *" print("* | |_| |__ ___ /\ /\__ _ _ ____ _____ ___| |_ ___ _ __ *")
print "* | __| '_ \ / _ \ / /_/ / _` | '__\ \ / / _ \/ __| __/ _ \ '__| *" print("* | __| '_ \ / _ \ / /_/ / _` | '__\ \ / / _ \/ __| __/ _ \ '__| *")
print "* | |_| | | | __/ / __ / (_| | | \ V / __/\__ \ || __/ | *" print("* | |_| | | | __/ / __ / (_| | | \ V / __/\__ \ || __/ | *")
print "* \__|_| |_|\___| \/ /_/ \__,_|_| \_/ \___||___/\__\___|_| *" print("* \__|_| |_|\___| \/ /_/ \__,_|_| \_/ \___||___/\__\___|_| *")
print "* *" print("* *")
print "* TheHarvester Ver. 3.0.0 *" print("* TheHarvester Ver. 3.0.0 *")
print "* Coded by Christian Martorella *" print("* Coded by Christian Martorella *")
print "* Edge-Security Research *" print("* Edge-Security Research *")
print "* cmartorella@edge-security.com *" print("* cmartorella@edge-security.com *")
print "*******************************************************************\033[94m\n\n" print("*******************************************************************\033[94m\n\n")
def usage(): def usage():
@ -41,29 +39,29 @@ def usage():
if os.path.dirname(sys.argv[0]) == os.getcwd(): if os.path.dirname(sys.argv[0]) == os.getcwd():
comm = "./" + comm comm = "./" + comm
print "Usage: theharvester options \n" print("Usage: theharvester options \n")
print " -d: Domain to search or company name" print(" -d: Domain to search or company name")
print """ -b: data source: baidu, bing, bingapi, dogpile, google, googleCSE, print(""" -b: data source: baidu, bing, bingapi, dogpile, google, googleCSE,
googleplus, google-profiles, linkedin, pgp, twitter, vhost, googleplus, google-profiles, linkedin, pgp, twitter, vhost,
virustotal, threatcrowd, crtsh, netcraft, yahoo, hunter, all\n""" virustotal, threatcrowd, crtsh, netcraft, yahoo, hunter, all\n""")
print " -g: use google dorking instead of normal google search" print(" -g: use google dorking instead of normal google search")
print " -s: start in result number X (default: 0)" print(" -s: start in result number X (default: 0)")
print " -v: verify host name via dns resolution and search for virtual hosts" print(" -v: verify host name via dns resolution and search for virtual hosts")
print " -f: save the results into an HTML and XML file (both)" print(" -f: save the results into an HTML and XML file (both)")
print " -n: perform a DNS reverse query on all ranges discovered" print(" -n: perform a DNS reverse query on all ranges discovered")
print " -c: perform a DNS brute force for the domain name" print(" -c: perform a DNS brute force for the domain name")
print " -t: perform a DNS TLD expansion discovery" print(" -t: perform a DNS TLD expansion discovery")
print " -e: use this DNS server" print(" -e: use this DNS server")
print " -p: port scan the detected hosts and check for Takeovers (80,443,22,21,8080)" print(" -p: port scan the detected hosts and check for Takeovers (80,443,22,21,8080)")
print " -l: limit the number of results to work with(bing goes from 50 to 50 results," print(" -l: limit the number of results to work with(bing goes from 50 to 50 results,")
print " google 100 to 100, and pgp doesn't use this option)" print(" google 100 to 100, and pgp doesn't use this option)")
print " -h: use SHODAN database to query discovered hosts" print(" -h: use SHODAN database to query discovered hosts")
print "\nExamples:" print("\nExamples:")
print " " + comm + " -d microsoft.com -l 500 -b google -h myresults.html" print((" " + comm + " -d microsoft.com -l 500 -b google -h myresults.html"))
print " " + comm + " -d microsoft.com -b pgp" print((" " + comm + " -d microsoft.com -b pgp"))
print " " + comm + " -d microsoft -l 200 -b linkedin" print((" " + comm + " -d microsoft -l 200 -b linkedin"))
print " " + comm + " -d microsoft.com -l 200 -g -b google" print((" " + comm + " -d microsoft.com -l 200 -g -b google"))
print " " + comm + " -d apple.com -b googleCSE -l 500 -s 300\n" print((" " + comm + " -d apple.com -b googleCSE -l 500 -s 300\n"))
def start(argv): def start(argv):
@ -78,7 +76,7 @@ def start(argv):
try: try:
db=stash.stash_manager() db=stash.stash_manager()
db.do_init() db.do_init()
except Exception, e: except Exception as e:
pass pass
start = 0 start = 0
host_ip = [] host_ip = []
@ -124,11 +122,11 @@ def start(argv):
engines = set(arg.split(',')) engines = set(arg.split(','))
supportedengines = set(["baidu","bing","crtsh","bingapi","dogpile","google","googleCSE","virustotal","threatcrowd","googleplus","google-profiles","linkedin","pgp","twitter","vhost","yahoo","netcraft","hunter","all"]) supportedengines = set(["baidu","bing","crtsh","bingapi","dogpile","google","googleCSE","virustotal","threatcrowd","googleplus","google-profiles","linkedin","pgp","twitter","vhost","yahoo","netcraft","hunter","all"])
if set(engines).issubset(supportedengines): if set(engines).issubset(supportedengines):
print "found supported engines" print("found supported engines")
print "[-] Starting harvesting process for domain: " + word + "\n" print(("[-] Starting harvesting process for domain: " + word + "\n"))
for engineitem in engines: for engineitem in engines:
if engineitem == "google": if engineitem == "google":
print "[-] Searching in Google:" print("[-] Searching in Google:")
search = googlesearch.search_google(word, limit, start) search = googlesearch.search_google(word, limit, start)
search.process(google_dorking) search.process(google_dorking)
all_emails = search.get_emails() all_emails = search.get_emails()
@ -137,11 +135,11 @@ def start(argv):
try: try:
db=stash.stash_manager() db=stash.stash_manager()
db.store(word,x,'host','google') db.store(word,x,'host','google')
except Exception, e: except Exception as e:
print e print(e)
if engineitem == "netcraft": if engineitem == "netcraft":
print "[-] Searching in Netcraft:" print("[-] Searching in Netcraft:")
search = netcraft.search_netcraft(word) search = netcraft.search_netcraft(word)
search.process() search.process()
all_hosts = search.get_hostnames() all_hosts = search.get_hostnames()
@ -151,7 +149,7 @@ def start(argv):
if engineitem == "threatcrowd": if engineitem == "threatcrowd":
print "[-] Searching in Threatcrowd:" print("[-] Searching in Threatcrowd:")
search = threatcrowd.search_threatcrowd(word) search = threatcrowd.search_threatcrowd(word)
search.process() search.process()
all_hosts = search.get_hostnames() all_hosts = search.get_hostnames()
@ -160,7 +158,7 @@ def start(argv):
db.store_all(word,all_hosts,'host','threatcrowd') db.store_all(word,all_hosts,'host','threatcrowd')
if engineitem == "virustotal": if engineitem == "virustotal":
print "[-] Searching in Virustotal:" print("[-] Searching in Virustotal:")
search = virustotal.search_virustotal(word) search = virustotal.search_virustotal(word)
search.process() search.process()
all_hosts = search.get_hostnames() all_hosts = search.get_hostnames()
@ -170,7 +168,7 @@ def start(argv):
if engineitem == "crtsh": if engineitem == "crtsh":
print "[-] Searching in CRT.sh:" print("[-] Searching in CRT.sh:")
search = crtsh.search_crtsh(word) search = crtsh.search_crtsh(word)
search.process() search.process()
all_hosts = search.get_hostnames() all_hosts = search.get_hostnames()
@ -179,7 +177,7 @@ def start(argv):
db.store_all(word,all_hosts,'host','CRTsh') db.store_all(word,all_hosts,'host','CRTsh')
if engineitem == "googleCSE": if engineitem == "googleCSE":
print "[-] Searching in Google Custom Search:" print("[-] Searching in Google Custom Search:")
search = googleCSE.search_googleCSE(word, limit, start) search = googleCSE.search_googleCSE(word, limit, start)
search.process() search.process()
search.store_results() search.store_results()
@ -191,7 +189,7 @@ def start(argv):
db.store_all(word,all_hosts,'host','googleCSE') db.store_all(word,all_hosts,'host','googleCSE')
elif engineitem == "bing" or engineitem == "bingapi": elif engineitem == "bing" or engineitem == "bingapi":
print "[-] Searching in Bing:" print("[-] Searching in Bing:")
search = bingsearch.search_bing(word, limit, start) search = bingsearch.search_bing(word, limit, start)
if engineitem == "bingapi": if engineitem == "bingapi":
bingapi = "yes" bingapi = "yes"
@ -202,14 +200,14 @@ def start(argv):
all_hosts = search.get_hostnames() all_hosts = search.get_hostnames()
elif engineitem == "dogpile": elif engineitem == "dogpile":
print "[-] Searching in Dogpilesearch.." print("[-] Searching in Dogpilesearch..")
search = dogpilesearch.search_dogpile(word, limit) search = dogpilesearch.search_dogpile(word, limit)
search.process() search.process()
all_emails = search.get_emails() all_emails = search.get_emails()
all_hosts = search.get_hostnames() all_hosts = search.get_hostnames()
elif engineitem == "pgp": elif engineitem == "pgp":
print "[-] Searching in PGP key server.." print("[-] Searching in PGP key server..")
search = pgpsearch.search_pgp(word) search = pgpsearch.search_pgp(word)
search.process() search.process()
all_emails = search.get_emails() all_emails = search.get_emails()
@ -220,65 +218,65 @@ def start(argv):
db.store_all(word,all_emails,'emails','pgp') db.store_all(word,all_emails,'emails','pgp')
elif engineitem == "yahoo": elif engineitem == "yahoo":
print "[-] Searching in Yahoo.." print("[-] Searching in Yahoo..")
search = yahoosearch.search_yahoo(word, limit) search = yahoosearch.search_yahoo(word, limit)
search.process() search.process()
all_emails = search.get_emails() all_emails = search.get_emails()
all_hosts = search.get_hostnames() all_hosts = search.get_hostnames()
# elif engineitem == "baidu": elif engineitem == "baidu":
print "[-] Searching in Baidu.." print("[-] Searching in Baidu..")
search = baidusearch.search_baidu(word, limit) search = baidusearch.search_baidu(word, limit)
search.process() search.process()
all_emails = search.get_emails() all_emails = search.get_emails()
all_hosts = search.get_hostnames() all_hosts = search.get_hostnames()
elif engineitem == "googleplus": elif engineitem == "googleplus":
print "[-] Searching in Google+ .." print("[-] Searching in Google+ ..")
search = googleplussearch.search_googleplus(word, limit) search = googleplussearch.search_googleplus(word, limit)
search.process() search.process()
people = search.get_people() people = search.get_people()
print "Users from Google+:" print("Users from Google+:")
print "====================" print("====================")
for user in people: for user in people:
print user print(user)
sys.exit() sys.exit()
elif engineitem == "twitter": elif engineitem == "twitter":
print "[-] Searching in Twitter .." print("[-] Searching in Twitter ..")
search = twittersearch.search_twitter(word, limit) search = twittersearch.search_twitter(word, limit)
search.process() search.process()
people = search.get_people() people = search.get_people()
print "Users from Twitter:" print("Users from Twitter:")
print "-------------------" print("-------------------")
for user in people: for user in people:
print user print(user)
sys.exit() sys.exit()
elif engineitem == "linkedin": elif engineitem == "linkedin":
print "[-] Searching in Linkedin.." print("[-] Searching in Linkedin..")
search = linkedinsearch.search_linkedin(word, limit) search = linkedinsearch.search_linkedin(word, limit)
search.process() search.process()
people = search.get_people() people = search.get_people()
print "Users from Linkedin:" print("Users from Linkedin:")
print "-------------------" print("-------------------")
for user in people: for user in people:
print user print(user)
sys.exit() sys.exit()
elif engineitem == "google-profiles": elif engineitem == "google-profiles":
print "[-] Searching in Google profiles.." print("[-] Searching in Google profiles..")
search = googlesearch.search_google(word, limit, start) search = googlesearch.search_google(word, limit, start)
search.process_profiles() search.process_profiles()
people = search.get_profiles() people = search.get_profiles()
print "Users from Google profiles:" print("Users from Google profiles:")
print "---------------------------" print("---------------------------")
for users in people: for users in people:
print users print(users)
sys.exit() sys.exit()
elif engineitem == "hunter": elif engineitem == "hunter":
print "[-] Searching in Hunter:" print("[-] Searching in Hunter:")
from discovery import huntersearch from discovery import huntersearch
#import locally or won't work #import locally or won't work
search = huntersearch.search_hunter(word, limit, start) search = huntersearch.search_hunter(word, limit, start)
@ -287,11 +285,11 @@ def start(argv):
all_hosts = search.get_hostnames() all_hosts = search.get_hostnames()
elif engineitem == "all": elif engineitem == "all":
print "Full harvest on " + word print(("Full harvest on " + word))
all_emails = [] all_emails = []
all_hosts = [] all_hosts = []
print "[-] Searching in Google.." print("[-] Searching in Google..")
search = googlesearch.search_google(word, limit, start) search = googlesearch.search_google(word, limit, start)
search.process(google_dorking) search.process(google_dorking)
emails = search.get_emails() emails = search.get_emails()
@ -303,7 +301,7 @@ def start(argv):
db=stash.stash_manager() db=stash.stash_manager()
db.store_all(word,all_hosts,'host','google') db.store_all(word,all_hosts,'host','google')
print "[-] Searching in PGP Key server.." print("[-] Searching in PGP Key server..")
search = pgpsearch.search_pgp(word) search = pgpsearch.search_pgp(word)
search.process() search.process()
emails = search.get_emails() emails = search.get_emails()
@ -315,7 +313,7 @@ def start(argv):
db=stash.stash_manager() db=stash.stash_manager()
db.store_all(word,all_hosts,'email','PGP') db.store_all(word,all_hosts,'email','PGP')
print "[-] Searching in Netcraft server.." print("[-] Searching in Netcraft server..")
search = netcraft.search_netcraft(word) search = netcraft.search_netcraft(word)
search.process() search.process()
hosts = search.get_hostnames() hosts = search.get_hostnames()
@ -323,7 +321,7 @@ def start(argv):
db=stash.stash_manager() db=stash.stash_manager()
db.store_all(word,all_hosts,'host','netcraft') db.store_all(word,all_hosts,'host','netcraft')
print "[-] Searching in ThreatCrowd server.." print("[-] Searching in ThreatCrowd server..")
try: try:
search = threatcrowd.search_threatcrowd(word) search = threatcrowd.search_threatcrowd(word)
search.process() search.process()
@ -334,7 +332,7 @@ def start(argv):
db.store_all(word,all_hosts,'host','threatcrowd') db.store_all(word,all_hosts,'host','threatcrowd')
except Exception: pass except Exception: pass
print "[-] Searching in CRTSH server.." print("[-] Searching in CRTSH server..")
search = crtsh.search_crtsh(word) search = crtsh.search_crtsh(word)
search.process() search.process()
hosts = search.get_hostnames() hosts = search.get_hostnames()
@ -342,7 +340,7 @@ def start(argv):
db=stash.stash_manager() db=stash.stash_manager()
db.store_all(word,all_hosts,'host','CRTsh') db.store_all(word,all_hosts,'host','CRTsh')
print "[-] Searching in Virustotal server.." print("[-] Searching in Virustotal server..")
search = virustotal.search_virustotal(word) search = virustotal.search_virustotal(word)
search.process() search.process()
hosts = search.get_hostnames() hosts = search.get_hostnames()
@ -350,7 +348,7 @@ def start(argv):
db=stash.stash_manager() db=stash.stash_manager()
db.store_all(word,all_hosts,'host','virustotal') db.store_all(word,all_hosts,'host','virustotal')
print "[-] Searching in Bing.." print("[-] Searching in Bing..")
bingapi = "no" bingapi = "no"
search = bingsearch.search_bing(word, limit, start) search = bingsearch.search_bing(word, limit, start)
search.process(bingapi) search.process(bingapi)
@ -363,7 +361,7 @@ def start(argv):
#Clean up email list, sort and uniq #Clean up email list, sort and uniq
all_emails=sorted(set(all_emails)) all_emails=sorted(set(all_emails))
print "[-] Searching in Hunter:" print("[-] Searching in Hunter:")
from discovery import huntersearch from discovery import huntersearch
#import locally #import locally
search = huntersearch.search_hunter(word, limit, start) search = huntersearch.search_hunter(word, limit, start)
@ -379,34 +377,34 @@ def start(argv):
else: else:
#if engine not in ("baidu", "bing", "crtsh","bingapi","dogpile","google", "googleCSE","virustotal","threatcrowd", "googleplus", "google-profiles","linkedin", "pgp", "twitter", "vhost", "yahoo","netcraft","all"): #if engine not in ("baidu", "bing", "crtsh","bingapi","dogpile","google", "googleCSE","virustotal","threatcrowd", "googleplus", "google-profiles","linkedin", "pgp", "twitter", "vhost", "yahoo","netcraft","all"):
usage() usage()
print "Invalid search engine, try with: baidu, bing, bingapi, crtsh, dogpile, google, googleCSE, virustotal, netcraft, googleplus, google-profiles, linkedin, pgp, twitter, vhost, yahoo, hunter, all" print("Invalid search engine, try with: baidu, bing, bingapi, crtsh, dogpile, google, googleCSE, virustotal, netcraft, googleplus, google-profiles, linkedin, pgp, twitter, vhost, yahoo, hunter, all")
sys.exit() sys.exit()
#else: #else:
# pass # pass
#Results############################################################ #Results############################################################
print("\n\033[1;32;40m Harvesting results") print("\n\033[1;32;40m Harvesting results")
print "\n\n[+] Emails found:" print("\n\n[+] Emails found:")
print "------------------" print("------------------")
if all_emails == []: if all_emails == []:
print "No emails found" print("No emails found")
else: else:
print "\n".join(all_emails) print(("\n".join(all_emails)))
print("\033[1;33;40m \n[+] Hosts found in search engines:") print("\033[1;33;40m \n[+] Hosts found in search engines:")
print "------------------------------------" print("------------------------------------")
if all_hosts == []: if all_hosts == []:
print "No hosts found" print("No hosts found")
else: else:
total = len(all_hosts) total = len(all_hosts)
print "\nTotal hosts: " + str(total) + "\n" print(("\nTotal hosts: " + str(total) + "\n"))
all_hosts=sorted(set(all_hosts)) all_hosts=sorted(set(all_hosts))
print "\033[94m[-] Resolving hostnames IPs...\033[1;33;40m \n " print("\033[94m[-] Resolving hostnames IPs...\033[1;33;40m \n ")
full_host = hostchecker.Checker(all_hosts) full_host = hostchecker.Checker(all_hosts)
full = full_host.check() full = full_host.check()
for host in full: for host in full:
ip = host.split(':')[1] ip = host.split(':')[1]
print host print(host)
if ip != "empty": if ip != "empty":
if host_ip.count(ip.lower()): if host_ip.count(ip.lower()):
pass pass
@ -416,13 +414,13 @@ def start(argv):
#DNS Brute force#################################################### #DNS Brute force####################################################
dnsres = [] dnsres = []
if dnsbrute == True: if dnsbrute == True:
print "\n\033[94m[-] Starting DNS brute force: \033[1;33;40m" print("\n\033[94m[-] Starting DNS brute force: \033[1;33;40m")
a = dnssearch.dns_force(word, dnsserver, verbose=True) a = dnssearch.dns_force(word, dnsserver, verbose=True)
res = a.process() res = a.process()
print "\n\033[94m[-] Hosts found after DNS brute force:" print("\n\033[94m[-] Hosts found after DNS brute force:")
print "---------------------------------------" print("---------------------------------------")
for y in res: for y in res:
print y print(y)
dnsres.append(y.split(':')[0]) dnsres.append(y.split(':')[0])
if y not in full: if y not in full:
full.append(y) full.append(y)
@ -436,35 +434,35 @@ def start(argv):
host = x.split(':')[1] host = x.split(':')[1]
domain = x.split(':')[0] domain = x.split(':')[0]
if host != "empty" : if host != "empty" :
print "- Scanning : " + host print(("- Scanning : " + host))
ports = [80,443,22,8080,21] ports = [80,443,22,8080,21]
try: try:
scan = port_scanner.port_scan(host,ports) scan = port_scanner.port_scan(host,ports)
openports = scan.process() openports = scan.process()
if len(openports) > 1: if len(openports) > 1:
print "\t\033[91m Detected open ports: " + ','.join(str(e) for e in openports) + "\033[1;32;40m" print(("\t\033[91m Detected open ports: " + ','.join(str(e) for e in openports) + "\033[1;32;40m"))
takeover_check = 'True' takeover_check = 'True'
if takeover_check == 'True': if takeover_check == 'True':
if len(openports) > 0: if len(openports) > 0:
search_take = takeover.take_over(domain) search_take = takeover.take_over(domain)
search_take.process() search_take.process()
except Exception, e: except Exception as e:
print e print(e)
#DNS reverse lookup################################################# #DNS reverse lookup#################################################
dnsrev = [] dnsrev = []
if dnslookup == True: if dnslookup == True:
print "\n[+] Starting active queries:" print("\n[+] Starting active queries:")
analyzed_ranges = [] analyzed_ranges = []
for x in host_ip: for x in host_ip:
print x print(x)
ip = x.split(":")[0] ip = x.split(":")[0]
range = ip.split(".") range = ip.split(".")
range[3] = "0/24" range[3] = "0/24"
range = string.join(range, '.') range = string.join(range, '.')
if not analyzed_ranges.count(range): if not analyzed_ranges.count(range):
print "\033[94m[-]Performing reverse lookup in : " + range + "\033[1;33;40m" print(("\033[94m[-]Performing reverse lookup in : " + range + "\033[1;33;40m"))
a = dnssearch.dns_reverse(range, True) a = dnssearch.dns_reverse(range, True)
a.list() a.list()
res = a.process() res = a.process()
@ -476,29 +474,29 @@ def start(argv):
dnsrev.append(x) dnsrev.append(x)
if x not in full: if x not in full:
full.append(x) full.append(x)
print "Hosts found after reverse lookup (in target domain):" print("Hosts found after reverse lookup (in target domain):")
print "---------------------------------" print("---------------------------------")
for xh in dnsrev: for xh in dnsrev:
print xh print(xh)
#DNS TLD expansion################################################### #DNS TLD expansion###################################################
dnstldres = [] dnstldres = []
if dnstld == True: if dnstld == True:
print "[-] Starting DNS TLD expansion:" print("[-] Starting DNS TLD expansion:")
a = dnssearch.dns_tld(word, dnsserver, verbose=True) a = dnssearch.dns_tld(word, dnsserver, verbose=True)
res = a.process() res = a.process()
print "\n[+] Hosts found after DNS TLD expansion:" print("\n[+] Hosts found after DNS TLD expansion:")
print "------------------------------------------" print("------------------------------------------")
for y in res: for y in res:
print y print(y)
dnstldres.append(y) dnstldres.append(y)
if y not in full: if y not in full:
full.append(y) full.append(y)
#Virtual hosts search############################################### #Virtual hosts search###############################################
if virtual == "basic": if virtual == "basic":
print "\n[+] Virtual hosts:" print("\n[+] Virtual hosts:")
print "------------------" print("------------------")
for l in host_ip: for l in host_ip:
search = bingsearch.search_bing(l, limit, start) search = bingsearch.search_bing(l, limit, start)
search.process_vhost() search.process_vhost()
@ -507,7 +505,7 @@ def start(argv):
x = re.sub(r'[[\<\/?]*[\w]*>]*','',x) x = re.sub(r'[[\<\/?]*[\w]*>]*','',x)
x = re.sub('<','',x) x = re.sub('<','',x)
x = re.sub('>','',x) x = re.sub('>','',x)
print l + "\t" + x print((l + "\t" + x))
vhost.append(l + ":" + x) vhost.append(l + ":" + x)
full.append(l + ":" + x) full.append(l + ":" + x)
vhost=sorted(set(vhost)) vhost=sorted(set(vhost))
@ -522,7 +520,7 @@ def start(argv):
try: try:
ip = x.split(":")[1] ip = x.split(":")[1]
if not shodanvisited.count(ip): if not shodanvisited.count(ip):
print "\tSearching for: " + ip print(("\tSearching for: " + ip))
a = shodansearch.search_shodan(ip) a = shodansearch.search_shodan(ip)
shodanvisited.append(ip) shodanvisited.append(ip)
results = a.run() results = a.run()
@ -532,12 +530,12 @@ def start(argv):
res['info'] = '' res['info'] = ''
shodanres.append( shodanres.append(
x + "SAPO" + str(res['info']) + "SAPO" + str(res['data'])) x + "SAPO" + str(res['info']) + "SAPO" + str(res['data']))
except: except Exception:
pass pass
print "\n [+] Shodan results:" print("\n [+] Shodan results:")
print "------------------" print("------------------")
for x in shodanres: for x in shodanres:
print x.split("SAPO")[0] + ":" + x.split("SAPO")[1] print((x.split("SAPO")[0] + ":" + x.split("SAPO")[1]))
else: else:
pass pass
@ -552,15 +550,15 @@ def start(argv):
search.process(google_dorking) search.process(google_dorking)
emails = search.get_emails() emails = search.get_emails()
hosts = search.get_hostnames() hosts = search.get_hostnames()
print emails print(emails)
print hosts print(hosts)
else: else:
pass pass
#Reporting####################################################### #Reporting#######################################################
if filename != "": if filename != "":
try: try:
print "[+] Saving files..." print("[+] Saving files...")
html = htmlExport.htmlExport( html = htmlExport.htmlExport(
all_emails, all_emails,
full, full,
@ -573,8 +571,8 @@ def start(argv):
dnstldres) dnstldres)
save = html.writehtml() save = html.writehtml()
except Exception as e: except Exception as e:
print e print(e)
print "Error creating the file" print("Error creating the file")
try: try:
filename = filename.split(".")[0] + ".xml" filename = filename.split(".")[0] + ".xml"
file = open(filename, 'w') file = open(filename, 'w')
@ -629,15 +627,15 @@ def start(argv):
file.write('</theHarvester>') file.write('</theHarvester>')
file.flush() file.flush()
file.close() file.close()
print "Files saved!" print("Files saved!")
except Exception as er: except Exception as er:
print "Error saving XML file: " + er print(("Error saving XML file: " + er))
sys.exit() sys.exit()
if __name__ == "__main__": if __name__ == "__main__":
try: try:
start(sys.argv[1:]) start(sys.argv[1:])
except KeyboardInterrupt: except KeyboardInterrupt:
print "Search interrupted by user.." print("Search interrupted by user..")
except: except:
sys.exit() sys.exit()