mirror of
https://github.com/laramies/theHarvester.git
synced 2025-02-25 23:13:24 +08:00
Run the queries on each range concurrently
This commit is contained in:
parent
59afb7f609
commit
d14c28e2fa
2 changed files with 65 additions and 9 deletions
|
@ -473,20 +473,28 @@ async def handler(lst):
|
|||
dnsrev = []
|
||||
if dnslookup is True:
|
||||
print('\n[*] Starting active queries.')
|
||||
# load the reverse dns tools
|
||||
from theHarvester.discovery.dnssearch import (
|
||||
reverse_ip_range,
|
||||
reverse_all_ips_in_range,
|
||||
serialize_ip_range)
|
||||
reversed_ip_ranges = set()
|
||||
from theHarvester.lib.itertools import merge_async_generators
|
||||
|
||||
# create a generator of reversed ips for each range
|
||||
reversed_ipranges = {}
|
||||
for entry in host_ip:
|
||||
ip_range = serialize_ip_range(ip=entry, netmask='24')
|
||||
if ip_range and not ip_range in reversed_ip_ranges:
|
||||
if ip_range and not ip_range in set(reversed_ipranges.keys()):
|
||||
print('\n[*] Performing reverse lookup on ' + ip_range)
|
||||
async for cname in reverse_ip_range(iprange=ip_range,verbose=True):
|
||||
if word in cname:
|
||||
dnsrev.append(cname)
|
||||
if cname not in full:
|
||||
full.append(cname)
|
||||
reversed_ip_ranges.add(ip_range)
|
||||
reversed_ipranges[ip_range] = reverse_all_ips_in_range(iprange=ip_range,verbose=True)
|
||||
|
||||
# keep only the host that contain the target domain
|
||||
async for cname in merge_async_generators(*reversed_ipranges.values()):
|
||||
if word in cname:
|
||||
dnsrev.append(cname)
|
||||
if cname not in full:
|
||||
full.append(cname)
|
||||
|
||||
# Display the newly found hosts
|
||||
print('[*] Hosts found after reverse lookup (in target domain):')
|
||||
print('--------------------------------------------------------')
|
||||
for xh in dnsrev:
|
||||
|
|
48
theHarvester/lib/itertools.py
Normal file
48
theHarvester/lib/itertools.py
Normal file
|
@ -0,0 +1,48 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
=========
|
||||
Itertools
|
||||
=========
|
||||
|
||||
Async iterator / generator tools.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import asyncio
|
||||
|
||||
from typing import Any, AsyncIterable
|
||||
|
||||
# TODO: need big focus on performance and results parsing, now does the basic.
|
||||
|
||||
#####################################################################
|
||||
# DNS FORCE
|
||||
#####################################################################
|
||||
|
||||
def merge_async_generators(
|
||||
*aiters: AsyncIterable[Any]) -> AsyncIterable[Any]:
|
||||
"""
|
||||
Merge several async generators into a single one.
|
||||
The merged generator provides items in the order of availability.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
aiters: AsyncIterable.
|
||||
A list of iterators / generators.
|
||||
|
||||
Returns
|
||||
-------
|
||||
out: AsyncIterable.
|
||||
The merged iterator / generator.
|
||||
"""
|
||||
# merge async iterators, proof of concept
|
||||
queue = asyncio.Queue(1)
|
||||
async def drain(aiter):
|
||||
async for item in aiter:
|
||||
await queue.put(item)
|
||||
async def merged():
|
||||
while not all(task.done() for task in tasks) or not queue.empty():
|
||||
yield await queue.get()
|
||||
tasks = [asyncio.create_task(drain(aiter)) for aiter in aiters]
|
||||
return merged()
|
Loading…
Reference in a new issue