Merge pull request #59 from NotoriousRebel/master

Ported yahoo & virustotal to use aiohttp
This commit is contained in:
J.Townsend 2019-12-30 00:14:11 +00:00 committed by GitHub
commit 5cbcf41990
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 15 additions and 18 deletions

View file

@ -1,6 +1,5 @@
from theHarvester.lib.core import Core
from theHarvester.lib.core import *
from theHarvester.parsers import myparser
import requests
class SearchVirustotal:
@ -12,18 +11,17 @@ def __init__(self, word):
self.quantity = '100'
self.counter = 0
def do_search(self):
async def do_search(self):
base_url = f'https://www.virustotal.com/ui/domains/{self.word}/subdomains?relationships=resolutions&cursor=STMwCi4%3D&limit=40'
headers = {'User-Agent': Core.get_user_agent()}
res = requests.get(base_url, headers=headers)
self.results = res.content.decode('UTF-8')
responses = await async_fetcher.fetch_all([base_url], headers=headers)
self.results = responses[0]
self.totalresults += self.results
def get_hostnames(self):
async def get_hostnames(self):
rawres = myparser.Parser(self.results, self.word)
return rawres.hostnames()
def process(self):
async def process(self):
print('\tSearching results.')
self.do_search()
self.get_hostnames()
await self.do_search()

View file

@ -11,22 +11,21 @@ def __init__(self, word, limit):
self.server = 'search.yahoo.com'
self.limit = limit
def do_search(self):
async def do_search(self):
base_url = f'https://{self.server}/search?p=%40{self.word}&b=xx&pz=10'
headers = {
'Host': self.server,
'User-agent': Core.get_user_agent()
}
urls = [base_url.replace("xx", str(num)) for num in range(0, self.limit, 10) if num <= self.limit]
request = (grequests.get(url, headers=headers) for url in urls)
response = grequests.imap(request, size=5)
for entry in response:
self.total_results += entry.content.decode('UTF-8')
responses = await async_fetcher.fetch_all(urls, headers=headers)
for response in responses:
self.total_results += response
def process(self):
self.do_search()
async def process(self):
await self.do_search()
def get_emails(self):
async def get_emails(self):
rawres = myparser.Parser(self.total_results, self.word)
toparse_emails = rawres.emails()
emails = set()
@ -39,6 +38,6 @@ def get_emails(self):
emails.add(email)
return list(emails)
def get_hostnames(self):
async def get_hostnames(self):
rawres = myparser.Parser(self.total_results, self.word)
return rawres.hostnames()