mirror of
https://github.com/laramies/theHarvester.git
synced 2025-02-23 14:02:58 +08:00
Added findomain for suip :).
This commit is contained in:
parent
e5c4c9de36
commit
30140fc66d
1 changed files with 16 additions and 16 deletions
|
@ -12,18 +12,19 @@ def __init__(self, word: str):
|
||||||
self.totalhosts: set = set()
|
self.totalhosts: set = set()
|
||||||
self.totalips: set = set()
|
self.totalips: set = set()
|
||||||
|
|
||||||
async def request(self, url, params):
|
async def request(self, url, params, findomain=False):
|
||||||
headers = {'User-Agent': Core.get_user_agent()}
|
headers = {'User-Agent': Core.get_user_agent()}
|
||||||
data = {'url': self.word.replace('www.', ''), 'Submit1': 'Submit'}
|
data = {'url': self.word.replace('www.', ''), 'only_resolved': '1', 'Submit1': 'Submit'} if findomain else \
|
||||||
|
{'url': self.word.replace('www.', ''), 'Submit1': 'Submit'}
|
||||||
return await AsyncFetcher.post_fetch(url, headers=headers, params=params, data=data)
|
return await AsyncFetcher.post_fetch(url, headers=headers, params=params, data=data)
|
||||||
|
|
||||||
async def handler(self, url):
|
async def handler(self, url):
|
||||||
first_param = [url, (('act', 'subfinder'),), ]
|
first_param = [url, (('act', 'subfinder'),), False]
|
||||||
second_param = [url, (('act', 'amass'),), ]
|
second_param = [url, (('act', 'amass'),), False]
|
||||||
# TODO RESEARCH https://suip.biz/?act=findomain
|
third_param = [url, (('act', 'findomain'),), True]
|
||||||
async_requests = [
|
async_requests = [
|
||||||
self.request(url=url, params=params)
|
self.request(url=url, params=params, findomain=findomain)
|
||||||
for url, params in [first_param, second_param]
|
for url, params, findomain in [first_param, second_param, third_param]
|
||||||
]
|
]
|
||||||
results = await asyncio.gather(*async_requests)
|
results = await asyncio.gather(*async_requests)
|
||||||
return results
|
return results
|
||||||
|
@ -31,11 +32,13 @@ async def handler(self, url):
|
||||||
async def do_search(self):
|
async def do_search(self):
|
||||||
try:
|
try:
|
||||||
results = await self.handler(url="https://suip.biz/")
|
results = await self.handler(url="https://suip.biz/")
|
||||||
for result in results:
|
for num in range(len(results)):
|
||||||
# results has both responses in a list
|
# iterate through results and parse out the urls
|
||||||
# iterate through them and parse out the urls
|
result = results[num]
|
||||||
soup = BeautifulSoup(str(result), 'html.parser')
|
soup = BeautifulSoup(str(result), 'html.parser')
|
||||||
hosts: list = str(soup.find('pre')).splitlines()
|
hosts: list = str(soup.find('pre')).splitlines() if num != 2 else \
|
||||||
|
[line for line in str(soup.find('pre')).splitlines() if 'A total of' not in line]
|
||||||
|
# The last iteration is special because findomain throws in some more lines that we need to filter out
|
||||||
await self.clean_hosts(hosts)
|
await self.clean_hosts(hosts)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f'An exception has occurred: {e}')
|
print(f'An exception has occurred: {e}')
|
||||||
|
@ -50,8 +53,5 @@ async def process(self):
|
||||||
async def clean_hosts(self, soup_hosts):
|
async def clean_hosts(self, soup_hosts):
|
||||||
for host in soup_hosts:
|
for host in soup_hosts:
|
||||||
host = str(host).strip()
|
host = str(host).strip()
|
||||||
if len(host) > 1 and 'pre' not in host:
|
if len(host) > 1 and self.word.replace('www.', '') in host:
|
||||||
if host[0] == '.':
|
self.totalhosts.add(host[1:] if host[0] == '.' else host)
|
||||||
self.totalhosts.add(host[1:])
|
|
||||||
else:
|
|
||||||
self.totalhosts.add(host)
|
|
||||||
|
|
Loading…
Reference in a new issue