Changed default timeout to 60 seconds, added rapiddns to readme, and cleaned up logic.

This commit is contained in:
NotoriousRebel 2020-05-12 14:42:53 -04:00
parent aa59b5f822
commit efc8fbc04c
3 changed files with 18 additions and 12 deletions

View file

@ -50,6 +50,8 @@ Passive:
* pentesttools: Powerful Penetration Testing Tools, Easy to Use (Needs an API key and is not free for API access) - https://pentest-tools.com/home
* RapidDNS: DNS query tool which make querying subdomains or sites of a same IP easy! https://rapiddns.io
* securityTrails: Security Trails search engine, the world's largest repository of historical DNS data<br>
(Requires an API key, see below.) - www.securitytrails.com

View file

@ -17,8 +17,12 @@ async def do_search(self):
# f'{self.hostname}/sameip/{self.word}?full=1#result'
urls = [f'https://{self.hostname}/subdomain/{self.word}?full=1#result']
responses = await AsyncFetcher.fetch_all(urls, headers=headers, proxy=self.proxy)
if len(responses[0]) <= 1:
return self.total_results
soup = BeautifulSoup(responses[0], 'html.parser')
rows = soup.find("table").find("tbody").find_all("tr")
if rows:
# Sanity check
for row in rows:
cells = row.find_all("td")
if len(cells) >= 0:

View file

@ -491,8 +491,8 @@ async def takeover_fetch(session, url, proxy="") -> Union[Tuple[Any, Any], str]:
@classmethod
async def fetch_all(cls, urls, headers='', params='', json=False, takeover=False, proxy=False) -> list:
# By default timeout is 5 minutes, 30 seconds should suffice
timeout = aiohttp.ClientTimeout(total=30)
# By default timeout is 5 minutes, 60 seconds should suffice
timeout = aiohttp.ClientTimeout(total=60)
if len(headers) == 0:
headers = {'User-Agent': Core.get_user_agent()}
if takeover: