Merge pull request #101 from NotoriousRebel/master

Added Hackertarget as new source.
This commit is contained in:
J.Townsend 2020-04-06 20:23:56 +01:00 committed by GitHub
commit 01ade9cdeb
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 48 additions and 12 deletions

View file

@ -36,8 +36,8 @@ async def start():
parser.add_argument('-f', '--filename', help='save the results to an HTML and/or XML file', default='', type=str)
parser.add_argument('-b', '--source', help='''baidu, bing, bingapi, bufferoverun, certspotter, crtsh, dnsdumpster,
dogpile, duckduckgo, exalead, github-code, google,
hunter, intelx,
linkedin, linkedin_links, netcraft, otx, pentesttools, securityTrails, spyse, threatcrowd,
hackertarget, hunter, intelx,
linkedin, linkedin_links, netcraft, otx, securityTrails, spyse, threatcrowd,
trello, twitter, vhost, virustotal, yahoo, all''')
args = parser.parse_args()
@ -99,10 +99,13 @@ async def store(search_engine: Any, source: str, process_param: Any = None, stor
print(f'\033[94m[*] Searching {source[0].upper() + source[1:]}. \033[0m')
if store_host:
host_names = filter(await search_engine.get_hostnames())
full_hosts_checker = hostchecker.Checker(host_names)
temp_hosts, temp_ips = await full_hosts_checker.check()
ips.extend(temp_ips)
full.extend(temp_hosts)
if source != 'hackertarget':
full_hosts_checker = hostchecker.Checker(host_names)
temp_hosts, temp_ips = await full_hosts_checker.check()
ips.extend(temp_ips)
full.extend(temp_hosts)
else:
full.extend(host_names)
all_hosts.extend(host_names)
await db_stash.store_all(word, all_hosts, 'host', source)
if store_emails:
@ -227,6 +230,11 @@ async def store(search_engine: Any, source: str, process_param: Any = None, stor
duckduckgo_search = duckduckgosearch.SearchDuckDuckGo(word, limit)
stor_lst.append(store(duckduckgo_search, engineitem, store_host=True, store_emails=True))
elif engineitem == 'exalead':
from theHarvester.discovery import exaleadsearch
exalead_search = exaleadsearch.SearchExalead(word, limit, start)
stor_lst.append(store(exalead_search, engineitem, store_host=True, store_emails=True))
elif engineitem == 'github-code':
try:
from theHarvester.discovery import githubcode
@ -237,17 +245,17 @@ async def store(search_engine: Any, source: str, process_param: Any = None, stor
else:
pass
elif engineitem == 'exalead':
from theHarvester.discovery import exaleadsearch
exalead_search = exaleadsearch.SearchExalead(word, limit, start)
stor_lst.append(store(exalead_search, engineitem, store_host=True, store_emails=True))
elif engineitem == 'google':
from theHarvester.discovery import googlesearch
google_search = googlesearch.SearchGoogle(word, limit, start)
stor_lst.append(store(google_search, engineitem, process_param=google_dorking, store_host=True,
store_emails=True))
elif engineitem == 'hackertarget':
from theHarvester.discovery import hackertarget
hackertarget_search = hackertarget.SearchHackerTarget(word)
stor_lst.append(store(hackertarget_search, engineitem, store_host=True, store_ip=True))
elif engineitem == 'hunter':
from theHarvester.discovery import huntersearch
# Import locally or won't work.
@ -438,7 +446,7 @@ async def handler(lst):
all_hosts = sorted(list(set(all_hosts)))
db = stash.StashManager()
full = [host if ':' in host and word in host else word in host.split(':')[0] and host for host in full]
full = [host for host in full if host]
full = list({host for host in full if host})
full.sort(key=lambda el: el.split(':')[0])
for host in full:
print(host)

View file

@ -0,0 +1,27 @@
from theHarvester.lib.core import *
class SearchHackerTarget:
"""
Class uses the HackerTarget api to gather subdomains and ips
"""
def __init__(self, word):
self.word = word
self.total_results = ""
self.hostname = 'https://api.hackertarget.com'
self.proxy = False
self.results = None
async def do_search(self):
headers = {'User-agent': Core.get_user_agent()}
urls = [f'{self.hostname}/hostsearch/?q={self.word}', f'{self.hostname}/reversedns/?q={self.word}']
responses = await AsyncFetcher.fetch_all(urls, headers=headers, proxy=self.proxy)
for response in responses:
self.total_results += response.replace(",", ":")
async def process(self, proxy=False):
self.proxy = proxy
await self.do_search()
async def get_hostnames(self):
return self.total_results.splitlines()

View file

@ -143,6 +143,7 @@ def get_supportedengines() -> Set[Union[str, Any]]:
'exalead',
'github-code',
'google',
'hackertarget',
'hunter',
'intelx',
'linkedin',