Merge branch 'master' of https://github.com/laramies/theHarvester into freshredux

This commit is contained in:
NotoriousRebel 2023-07-02 19:17:15 -04:00
commit 520b5e60fa
11 changed files with 68 additions and 19 deletions

View file

@ -1,4 +1,4 @@
FROM alpine:3.17.3
FROM alpine:3.18.0
LABEL maintainer="@jay_townsend1 & @NotoriousRebel1 (alpine @viardant)"
RUN mkdir /app
RUN mkdir /etc/theHarvester/

View file

@ -52,7 +52,7 @@ Passive:
* intelx: Intelx search engine (Requires an API key, see below.) - www.intelx.io
* omnisint: Project Crobat, A Centralised Searchable Open Source Project Sonar DNS Database - https://github.com/Cgboal/SonarSearch
* Netlas: A Shodan or Censys competitor - https://app.netlas.io
* otx: AlienVault Open Threat Exchange - https://otx.alienvault.com
@ -92,7 +92,7 @@ Active:
* DNS brute force: dictionary brute force enumeration
* Screenshots: Take screenshots of subdomains that were found
Modules that require an API key:
Modules that require an API keys:
--------------------------------
Documentation to setup API keys can be found at - https://github.com/laramies/theHarvester/wiki/Installation#api-keys
@ -107,6 +107,7 @@ Documentation to setup API keys can be found at - https://github.com/laramies/th
* hunter - limited to 10 on the free plan, so you will need to do -l 10 switch
* hunterhow
* intelx
* netlas - $
* pentesttools - $
* projecdiscovery - invite only for now
* rocketreach - $

View file

@ -30,6 +30,9 @@ apikeys:
intelx:
key:
netlas:
key:
pentestTools:
key:

View file

@ -7,16 +7,16 @@ beautifulsoup4==4.12.2
censys==2.2.2
certifi==2023.5.7
dnspython==2.3.0
fastapi==0.95.2
fastapi==0.99.0
lxml==4.9.2
netaddr==0.8.0
ujson==5.7.0
ujson==5.8.0
pyppeteer==1.0.2
PyYAML==6.0
python-dateutil==2.8.2
requests==2.30.0
requests==2.31.0
retrying==1.3.4
setuptools==67.7.2
setuptools==67.8.0
shodan==1.29.1
slowapi==0.1.8
uvicorn==0.22.0

View file

@ -2,15 +2,15 @@
flake8==6.0.0
mypy==1.3.0
mypy-extensions==1.0.0
pydantic==1.10.7
pydantic==2.0
pyre-check==0.9.18
pyflakes==3.0.1
pytest==7.3.1
pytest==7.4.0
pytest-asyncio==0.21.0
types-certifi==2021.10.8.3
types-chardet==5.0.4.6
types-ujson==5.7.0.5
types-PyYAML==6.0.12.9
types-requests==2.30.0.0
types-ujson==5.8.0.0
types-PyYAML==6.0.12.10
types-requests==2.31.0.1
types-python-dateutil==2.8.19.13
wheel==0.40.0

View file

@ -30,14 +30,13 @@ async def start(rest_args: Optional[argparse.Namespace] = None):
parser.add_argument('-e', '--dns-server', help='DNS server to use for lookup.')
parser.add_argument('-t', '--take-over', help='Check for takeovers.', default=False, action='store_true')
# TODO add dns resolver flag
parser.add_argument('-r', '--dns-resolve', help='Perform DNS resolution on subdomains with a resolver list or passed in resolvers, default False.', default="",
type=str, nargs='?')
parser.add_argument('-r', '--dns-resolve', help='Perform DNS resolution on subdomains with a resolver list or passed in resolvers, default False.', default="", type=str, nargs='?')
parser.add_argument('-n', '--dns-lookup', help='Enable DNS server lookup, default False.', default=False, action='store_true')
parser.add_argument('-c', '--dns-brute', help='Perform a DNS brute force on the domain.', default=False, action='store_true')
parser.add_argument('-f', '--filename', help='Save the results to an XML and JSON file.', default='', type=str)
parser.add_argument('-b', '--source', help='''anubis, baidu, bevigil, binaryedge, bing, bingapi, bufferoverun, brave,
censys, certspotter, criminalip, crtsh, dnsdumpster, duckduckgo, fullhunt, github-code,
hackertarget, hunter, hunterhow, intelx, otx, pentesttools, projectdiscovery,
hackertarget, hunter, hunterhow, intelx, netlas, otx, pentesttools, projectdiscovery,
rapiddns, rocketreach, securityTrails, sitedossier, subdomainfinderc99, threatminer, urlscan,
virustotal, yahoo, zoomeye''')
@ -407,6 +406,15 @@ async def store(search_engine: Any, source: str, process_param: Any = None, stor
else:
print(f'An exception has occurred in Intelx search: {e}')
elif engineitem == 'netlas':
from theHarvester.discovery import netlas
try:
netlas_search = netlas.SearchNetlas(word)
stor_lst.append(store(netlas_search, engineitem, store_host=True, store_ip=True))
except Exception as e:
if isinstance(e, MissingKey):
print(e)
elif engineitem == 'otx':
from theHarvester.discovery import otxsearch
try:

View file

@ -1,3 +1,4 @@
from theHarvester.discovery.constants import MissingKey
from theHarvester.lib.core import *
from typing import Set
@ -9,6 +10,8 @@ def __init__(self, word) -> None:
self.totalhosts: Set = set()
self.interestingurls: Set = set()
self.key = Core.bevigil_key()
if self.key is None:
raise MissingKey('bevigil')
self.proxy = False
async def do_search(self) -> None:

View file

@ -0,0 +1,29 @@
from theHarvester.discovery.constants import MissingKey
from theHarvester.lib.core import *
from typing import Set
class SearchNetlas:
def __init__(self, word) -> None:
self.word = word
self.totalhosts: List = []
self.totalips: List = []
self.key = Core.netlas_key()
if self.key is None:
raise MissingKey('netlas')
self.proxy = False
async def do_search(self) -> None:
api = f'https://app.netlas.io/api/domains/?q=*.{self.word}&source_type=include&start=0&fields=*'
headers = {'X-API-Key': self.key}
response = await AsyncFetcher.fetch_all([api], json=True, headers=headers, proxy=self.proxy)
for domain in response[0]['items']:
self.totalhosts.append(domain['data']['domain'])
async def get_hostnames(self) -> List:
return self.totalhosts
async def process(self, proxy: bool = False) -> None:
self.proxy = proxy
await self.do_search()

View file

@ -14,7 +14,7 @@ def __init__(self, word, limit) -> None:
raise MissingKey('RocketReach')
self.hosts: Set = set()
self.proxy = False
self.baseurl = 'https://api.rocketreach.co/v2/api/search'
self.baseurl = 'https://rocketreach.co/api/v2/person/search'
self.links: Set = set()
self.limit = limit
@ -26,7 +26,7 @@ async def do_search(self) -> None:
'User-Agent': Core.get_user_agent()
}
next_page = 1 # track pagniation
next_page = 1 # track pagination
for count in range(1, self.limit):
data = f'{{"query":{{"company_domain": ["{self.word}"]}}, "start": {next_page}, "page_size": 100}}'
result = await AsyncFetcher.post_fetch(self.baseurl, headers=headers, data=data, json=True)
@ -49,7 +49,7 @@ async def do_search(self) -> None:
if next_page > int(result['pagination']['total']):
break
await asyncio.sleep(get_delay() + 2)
await asyncio.sleep(get_delay() + 5)
except Exception as e:
print(f'An exception has occurred: {e}')

View file

@ -72,6 +72,10 @@ def hunterhow_key() -> str:
def intelx_key() -> str:
return Core.api_keys()['intelx']['key']
@staticmethod
def netlas_key() -> str:
return Core.api_keys()['netlas']['key']
@staticmethod
def pentest_tools_key() -> str:
return Core.api_keys()['pentestTools']['key']
@ -154,6 +158,7 @@ def get_supportedengines() -> list[str | Any]:
'hunter',
'hunterhow',
'intelx',
'netlas',
'otx',
'pentesttools',
'projectdiscovery',

View file

@ -1,4 +1,4 @@
# coding=utf-8
def version() -> str:
return '4.3.0'
return '4.3.1'