mirror of
https://github.com/laramies/theHarvester.git
synced 2025-02-24 22:42:56 +08:00
Merge pull request #27 from NotoriousRebel/dev
Ported security trails to use aiohttp.
This commit is contained in:
commit
450b3b5ec2
2 changed files with 24 additions and 29 deletions
|
@ -1,8 +1,7 @@
|
|||
from theHarvester.discovery.constants import *
|
||||
from theHarvester.lib.core import *
|
||||
from theHarvester.parsers import securitytrailsparser
|
||||
import requests
|
||||
import time
|
||||
import asyncio
|
||||
|
||||
|
||||
class SearchSecuritytrail:
|
||||
|
@ -17,44 +16,40 @@ def __init__(self, word):
|
|||
self.api = 'https://api.securitytrails.com/v1/'
|
||||
self.info = ()
|
||||
|
||||
def authenticate(self):
|
||||
async def authenticate(self) -> None:
|
||||
# Method to authenticate API key before sending requests.
|
||||
headers = {'APIKEY': self.key}
|
||||
url = self.api + 'ping'
|
||||
r = requests.get(url, headers=headers).text
|
||||
if 'False' in r or 'Invalid authentication' in r:
|
||||
url = f'{self.api}ping'
|
||||
auth_responses = await AsyncFetcher.fetch_all([url], headers=headers)
|
||||
auth_responses = auth_responses[0]
|
||||
if 'False' in auth_responses or 'Invalid authentication' in auth_responses:
|
||||
print('\tKey could not be authenticated exiting program.')
|
||||
time.sleep(2)
|
||||
await asyncio.sleep(2)
|
||||
|
||||
def do_search(self):
|
||||
url = ''
|
||||
headers = {}
|
||||
try:
|
||||
async def do_search(self) -> None:
|
||||
# https://api.securitytrails.com/v1/domain/domain.com
|
||||
url = self.api + 'domain/' + self.word
|
||||
url = f'{self.api}domain/{self.word}'
|
||||
headers = {'APIKEY': self.key}
|
||||
r = requests.get(url, headers=headers)
|
||||
time.sleep(2) # Not random delay because 2 seconds is required due to rate limit.
|
||||
except Exception as e:
|
||||
print(e)
|
||||
self.results = r.text
|
||||
response = await AsyncFetcher.fetch_all([url], headers=headers)
|
||||
await asyncio.sleep(2) # Not random delay because 2 seconds is required due to rate limit.
|
||||
self.results = response[0]
|
||||
self.totalresults += self.results
|
||||
url += '/subdomains' # Get subdomains now.
|
||||
r = requests.get(url, headers=headers)
|
||||
time.sleep(2)
|
||||
self.results = r.text
|
||||
subdomain_response = await AsyncFetcher.fetch_all([url], headers=headers)
|
||||
await asyncio.sleep(2)
|
||||
self.results = subdomain_response[0]
|
||||
self.totalresults += self.results
|
||||
|
||||
def process(self):
|
||||
self.authenticate()
|
||||
self.do_search()
|
||||
async def process(self) -> None:
|
||||
await self.authenticate()
|
||||
await self.do_search()
|
||||
parser = securitytrailsparser.Parser(word=self.word, text=self.totalresults)
|
||||
self.info = parser.parse_text()
|
||||
self.info = await parser.parse_text()
|
||||
# Create parser and set self.info to tuple returned from parsing text.
|
||||
print('\tDone Searching Results')
|
||||
|
||||
def get_ips(self):
|
||||
async def get_ips(self) -> set:
|
||||
return self.info[0]
|
||||
|
||||
def get_hostnames(self):
|
||||
async def get_hostnames(self) -> set:
|
||||
return self.info[1]
|
||||
|
|
|
@ -6,7 +6,7 @@ def __init__(self, word, text):
|
|||
self.hostnames = set()
|
||||
self.ips = set()
|
||||
|
||||
def parse_text(self):
|
||||
async def parse_text(self):
|
||||
sub_domain_flag = 0
|
||||
self.text = str(self.text).splitlines()
|
||||
# Split lines to get a list of lines.
|
||||
|
|
Loading…
Reference in a new issue