mirror of
https://github.com/laramies/theHarvester.git
synced 2025-02-22 21:43:02 +08:00
Added findomain for suip :).
This commit is contained in:
parent
e5c4c9de36
commit
30140fc66d
1 changed files with 16 additions and 16 deletions
|
@ -12,18 +12,19 @@ def __init__(self, word: str):
|
|||
self.totalhosts: set = set()
|
||||
self.totalips: set = set()
|
||||
|
||||
async def request(self, url, params):
|
||||
async def request(self, url, params, findomain=False):
|
||||
headers = {'User-Agent': Core.get_user_agent()}
|
||||
data = {'url': self.word.replace('www.', ''), 'Submit1': 'Submit'}
|
||||
data = {'url': self.word.replace('www.', ''), 'only_resolved': '1', 'Submit1': 'Submit'} if findomain else \
|
||||
{'url': self.word.replace('www.', ''), 'Submit1': 'Submit'}
|
||||
return await AsyncFetcher.post_fetch(url, headers=headers, params=params, data=data)
|
||||
|
||||
async def handler(self, url):
|
||||
first_param = [url, (('act', 'subfinder'),), ]
|
||||
second_param = [url, (('act', 'amass'),), ]
|
||||
# TODO RESEARCH https://suip.biz/?act=findomain
|
||||
first_param = [url, (('act', 'subfinder'),), False]
|
||||
second_param = [url, (('act', 'amass'),), False]
|
||||
third_param = [url, (('act', 'findomain'),), True]
|
||||
async_requests = [
|
||||
self.request(url=url, params=params)
|
||||
for url, params in [first_param, second_param]
|
||||
self.request(url=url, params=params, findomain=findomain)
|
||||
for url, params, findomain in [first_param, second_param, third_param]
|
||||
]
|
||||
results = await asyncio.gather(*async_requests)
|
||||
return results
|
||||
|
@ -31,11 +32,13 @@ async def handler(self, url):
|
|||
async def do_search(self):
|
||||
try:
|
||||
results = await self.handler(url="https://suip.biz/")
|
||||
for result in results:
|
||||
# results has both responses in a list
|
||||
# iterate through them and parse out the urls
|
||||
for num in range(len(results)):
|
||||
# iterate through results and parse out the urls
|
||||
result = results[num]
|
||||
soup = BeautifulSoup(str(result), 'html.parser')
|
||||
hosts: list = str(soup.find('pre')).splitlines()
|
||||
hosts: list = str(soup.find('pre')).splitlines() if num != 2 else \
|
||||
[line for line in str(soup.find('pre')).splitlines() if 'A total of' not in line]
|
||||
# The last iteration is special because findomain throws in some more lines that we need to filter out
|
||||
await self.clean_hosts(hosts)
|
||||
except Exception as e:
|
||||
print(f'An exception has occurred: {e}')
|
||||
|
@ -50,8 +53,5 @@ async def process(self):
|
|||
async def clean_hosts(self, soup_hosts):
|
||||
for host in soup_hosts:
|
||||
host = str(host).strip()
|
||||
if len(host) > 1 and 'pre' not in host:
|
||||
if host[0] == '.':
|
||||
self.totalhosts.add(host[1:])
|
||||
else:
|
||||
self.totalhosts.add(host)
|
||||
if len(host) > 1 and self.word.replace('www.', '') in host:
|
||||
self.totalhosts.add(host[1:] if host[0] == '.' else host)
|
||||
|
|
Loading…
Reference in a new issue