Merge pull request #1900 from thurrsense/master

This commit is contained in:
J.Townsend 2024-12-13 22:37:47 +00:00 committed by GitHub
commit 081ab270d0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 42 additions and 6 deletions

View file

@ -604,7 +604,7 @@ async def store(
elif engineitem == 'netlas':
try:
netlas_search = netlas.SearchNetlas(word)
netlas_search = netlas.SearchNetlas(word, limit)
stor_lst.append(
store(
netlas_search,

View file

@ -1,27 +1,63 @@
import json
from theHarvester.discovery.constants import MissingKey
from theHarvester.lib.core import AsyncFetcher, Core
class SearchNetlas:
def __init__(self, word) -> None:
def __init__(self, word, limit: int) -> None:
self.word = word
self.totalhosts: list = []
self.totalips: list = []
self.key = Core.netlas_key()
self.limit = limit
if self.key is None:
raise MissingKey('netlas')
self.proxy = False
async def do_search(self) -> None:
api = f'https://app.netlas.io/api/domains/?q=*.{self.word}&source_type=include&start=0&fields=*'
async def do_count(self) -> None:
"""Counts the total number of subdomains
:return: None
"""
api = f"https://app.netlas.io/api/domains_count/?q=*.{self.word}"
headers = {'X-API-Key': self.key}
response = await AsyncFetcher.fetch_all([api], json=True, headers=headers, proxy=self.proxy)
for domain in response[0]['items']:
self.totalhosts.append(domain['data']['domain'])
amount_size = response[0]['count']
self.limit = amount_size if amount_size < self.limit else self.limit
async def do_search(self) -> None:
"""Download domains for query 'q' size of 'limit'
:return: None
"""
user_agent = Core.get_user_agent()
url = "https://app.netlas.io/api/domains/download/"
payload = {
"q": f"*.{self.word}",
"fields": ["domain"],
"source_type": "include",
"size": self.limit,
"type": "json",
"indice": [0]
}
headers = {
'X-API-Key': self.key,
"User-Agent": user_agent,
}
response = await AsyncFetcher.post_fetch(url, data=payload, headers=headers, proxy=self.proxy)
resp_json = json.loads(response)
for el in resp_json:
domain = el["data"]["domain"]
self.totalhosts.append(domain)
async def get_hostnames(self) -> list:
return self.totalhosts
async def process(self, proxy: bool = False) -> None:
self.proxy = proxy
await self.do_count()
await self.do_search()