mirror of
https://github.com/laramies/theHarvester.git
synced 2024-09-20 15:26:31 +08:00
Changed default timeout to 60 seconds, added rapiddns to readme, and cleaned up logic.
This commit is contained in:
parent
aa59b5f822
commit
efc8fbc04c
|
@ -50,6 +50,8 @@ Passive:
|
|||
|
||||
* pentesttools: Powerful Penetration Testing Tools, Easy to Use (Needs an API key and is not free for API access) - https://pentest-tools.com/home
|
||||
|
||||
* RapidDNS: DNS query tool which make querying subdomains or sites of a same IP easy! https://rapiddns.io
|
||||
|
||||
* securityTrails: Security Trails search engine, the world's largest repository of historical DNS data<br>
|
||||
(Requires an API key, see below.) - www.securitytrails.com
|
||||
|
||||
|
|
|
@ -17,18 +17,22 @@ async def do_search(self):
|
|||
# f'{self.hostname}/sameip/{self.word}?full=1#result'
|
||||
urls = [f'https://{self.hostname}/subdomain/{self.word}?full=1#result']
|
||||
responses = await AsyncFetcher.fetch_all(urls, headers=headers, proxy=self.proxy)
|
||||
if len(responses[0]) <= 1:
|
||||
return self.total_results
|
||||
soup = BeautifulSoup(responses[0], 'html.parser')
|
||||
rows = soup.find("table").find("tbody").find_all("tr")
|
||||
for row in rows:
|
||||
cells = row.find_all("td")
|
||||
if len(cells) >= 0:
|
||||
# sanity check
|
||||
subdomain = str(cells[0].get_text())
|
||||
if cells[-1].get_text() == 'CNAME':
|
||||
self.total_results.append(f'{subdomain}')
|
||||
else:
|
||||
self.total_results.append(f'{subdomain}:{str(cells[1].get_text()).strip()}')
|
||||
self.total_results = list({domain for domain in self.total_results})
|
||||
if rows:
|
||||
# Sanity check
|
||||
for row in rows:
|
||||
cells = row.find_all("td")
|
||||
if len(cells) >= 0:
|
||||
# sanity check
|
||||
subdomain = str(cells[0].get_text())
|
||||
if cells[-1].get_text() == 'CNAME':
|
||||
self.total_results.append(f'{subdomain}')
|
||||
else:
|
||||
self.total_results.append(f'{subdomain}:{str(cells[1].get_text()).strip()}')
|
||||
self.total_results = list({domain for domain in self.total_results})
|
||||
except Exception as e:
|
||||
print('An exception has occurred: ' + str(e))
|
||||
|
||||
|
|
|
@ -491,8 +491,8 @@ async def takeover_fetch(session, url, proxy="") -> Union[Tuple[Any, Any], str]:
|
|||
|
||||
@classmethod
|
||||
async def fetch_all(cls, urls, headers='', params='', json=False, takeover=False, proxy=False) -> list:
|
||||
# By default timeout is 5 minutes, 30 seconds should suffice
|
||||
timeout = aiohttp.ClientTimeout(total=30)
|
||||
# By default timeout is 5 minutes, 60 seconds should suffice
|
||||
timeout = aiohttp.ClientTimeout(total=60)
|
||||
if len(headers) == 0:
|
||||
headers = {'User-Agent': Core.get_user_agent()}
|
||||
if takeover:
|
||||
|
|
Loading…
Reference in a new issue