Merge pull request #223 from NotoriousRebel/master

Added dnsdumpster as new plugin
This commit is contained in:
Matt 2019-03-15 02:52:23 +00:00 committed by GitHub
commit 8790f42b19
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 79 additions and 8 deletions

View file

@ -30,11 +30,13 @@ Passive:
* bingapi: Microsoft search engine, through the API (Requires API key, see below.)
* censys: Censys.io search engine
* censys: Censys.io search engine - www.censys.io
* crtsh: Comodo Certificate search - www.crt.sh
* cymon: Cymon.io search engine
* cymon: Cymon.io search engine - www.cymon.io
* dnsdumpster: DNSdumpster search engine - dnsdumpster.com
* dogpile: Dogpile search engine - www.dogpile.com
@ -42,7 +44,7 @@ Passive:
* google: Google search engine (Optional Google dorking.) - www.google.com
* google-certificates: Google Certificate Transparency report
* google-certificates: Google Certificate Transparency report
* hunter: Hunter search engine (Requires API key, see below.) - www.hunter.io
@ -50,7 +52,7 @@ Passive:
* linkedin: Google search engine, specific search for Linkedin users
* netcraft: Netcraft Data Mining
* netcraft: Netcraft Data Mining - www.netcraft.com
* securityTrails: Security Trails search engine, the world's largest repository<br>
of historical DNS data (Requires API key, see below.) - www.securitytrails.com

43
discovery/dnsdumpster.py Normal file
View file

@ -0,0 +1,43 @@
from lib.core import *
from parsers import myparser
import requests
class search_dnsdumpster:
def __init__(self, word):
self.word = word.replace(' ', '%20')
self.results = ""
self.totalresults = ""
self.server = 'dnsdumpster.com'
def do_search(self):
try:
agent = Core.get_user_agent()
headers = {'User-Agent': agent}
session = requests.session()
# create a session to properly verify
url = f'https://{self.server}'
request = session.get(url, headers=headers)
cookies = str(request.cookies)
# extract csrftoken from cookies
csrftoken = ''
for ch in cookies.split("=")[1]:
if ch == ' ':
break
csrftoken += ch
data = {
'Cookie': f'csfrtoken={csrftoken}', 'csrfmiddlewaretoken': {csrftoken}, 'targetip': self.word}
headers['Referer'] = url
post_req = session.post(url, headers=headers, data=data)
self.results = post_req.text
except Exception as e:
print(f'An exception occured: {e}')
self.totalresults += self.results
def get_hostnames(self):
rawres = myparser.Parser(self.totalresults, self.word)
return rawres.hostnames()
def process(self):
self.do_search() # Only need to do it once.

View file

@ -115,13 +115,13 @@ def construct_dorks(self):
right_peren = '%29'
pipe = '%7C'
# Format is google.com/search?q=dork+space+self.word
self.links = [self.database +
self.links = tuple(self.database +
str(dork).replace(':', colon).replace('+', plus).replace('.', period).replace('"', double_quote)
.replace('*', asterick).replace('[', left_bracket).replace(']', right_bracket)
.replace('?', question_mark).replace(' ', space).replace('/', slash).replace("'",single_quote)
.replace('&', ampersand).replace('(', left_peren).replace(')', right_peren).replace('|', pipe)
+ space + self.word
for dork in self.dorks]
for dork in self.dorks)
def googledork(self):
self.append_dorks() # Call functions to create list.

View file

@ -44,7 +44,7 @@ def banner():
print(r"* | |_| | | | __/ / __ / (_| | | \ V / __/\__ \ || __/ | *")
print(r"* \__|_| |_|\___| \/ /_/ \__,_|_| \_/ \___||___/\__\___|_| *")
print('* *')
print('* theHarvester 3.0.6 v364 *')
print('* theHarvester 3.0.6 v372 *')
print('* Coded by Christian Martorella *')
print('* Edge-Security Research *')
print('* cmartorella@edge-security.com *')
@ -59,6 +59,7 @@ def get_supportedengines():
'censys',
'crtsh',
'cymon',
'dnsdumpster',
'dogpile',
'duckduckgo',
'google',

View file

@ -51,7 +51,7 @@ def start():
parser.add_argument('-n', '--dns-lookup', help='enable DNS server lookup, default=False, params=True', default=False)
parser.add_argument('-c', '--dns-brute', help='perform a DNS brute force on the domain', default=False, action='store_true')
parser.add_argument('-f', '--filename', help='save the results to an HTML and/or XML file', default='', type=str)
parser.add_argument('-b', '--source', help='''baidu, bing, bingapi, censys, crtsh, cymon,
parser.add_argument('-b', '--source', help='''baidu, bing, bingapi, censys, crtsh, cymon, dnsdumpster,
dogpile, duckduckgo, google,
google-certificates, hunter, intelx,
linkedin, netcraft, securityTrails, threatcrowd,
@ -161,6 +161,19 @@ def start():
db = stash.stash_manager()
db.store_all(word, all_ip, 'ip', 'cymon')
elif engineitem == 'dnsdumpster':
try:
print('\033[94m[*] Searching DNSdumpster. \033[0m')
from discovery import dnsdumpster
search = dnsdumpster.search_dnsdumpster(word)
search.process()
hosts = filter(search.get_hostnames())
all_hosts.extend(hosts)
db = stash.stash_manager()
db.store_all(word, all_hosts, 'host', 'dnsdumpster')
except Exception as e:
print(f'\033[93m[!] An error occurred with dnsdumpster: {e} \033[0m')
elif engineitem == 'dogpile':
try:
print('\033[94m[*] Searching Dogpile. \033[0m')
@ -426,6 +439,18 @@ def start():
db = stash.stash_manager()
db.store_all(word, all_ip, 'ip', 'cymon')
try:
print('\033[94m[*] Searching DNSdumpster. \033[0m')
from discovery import dnsdumpster
search = dnsdumpster.search_dnsdumpster(word)
search.process()
hosts = filter(search.get_hostnames())
all_hosts.extend(hosts)
db = stash.stash_manager()
db.store_all(word, all_hosts, 'host', 'dnsdumpster')
except Exception as e:
print(f'\033[93m[!] An error occurred with dnsdumpster: {e} \033[0m')
print('\033[94m[*] Searching Dogpile. \033[0m')
try:
search = dogpilesearch.SearchDogpile(word, limit)