Merge pull request #710 from laramies/L1ghtn1ng-patch-1

Fix Intelx module
This commit is contained in:
J.Townsend 2021-04-04 03:45:02 +01:00 committed by GitHub
commit 9b7c432558
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 33 additions and 17 deletions

View file

@ -269,7 +269,7 @@ async def store(search_engine: Any, source: str, process_param: Any = None, stor
from theHarvester.discovery import intelxsearch
# Import locally or won't work.
try:
intelx_search = intelxsearch.SearchIntelx(word, limit)
intelx_search = intelxsearch.SearchIntelx(word)
stor_lst.append(store(intelx_search, engineitem, store_host=True, store_emails=True))
except Exception as e:
if isinstance(e, MissingKey):

View file

@ -2,47 +2,63 @@
from theHarvester.lib.core import *
from theHarvester.parsers import intelxparser
import asyncio
import json
import requests
class SearchIntelx:
def __init__(self, word, limit):
def __init__(self, word):
self.word = word
# default key is public key
self.key = Core.intelx_key()
if self.key is None:
raise MissingKey('Intelx')
self.database = 'https://public.intelx.io/'
self.database = 'https://2.intelx.io'
self.results = None
self.info = ()
self.limit = limit
self.limit = 10000
self.proxy = False
self.offset = -1
async def do_search(self):
try:
user_agent = Core.get_user_agent()
headers = {'User-Agent': user_agent, 'x-key': self.key}
# data is json that corresponds to what we are searching for, sort:2 means sort by most relevant
data = f'{{"term": "{self.word}", "maxresults": {self.limit}, "media": 0, "sort": 2 , "terminate": []}}'
resp = await AsyncFetcher.post_fetch(url=f'{self.database}phonebook/search', headers=headers, data=data,
json=True, proxy=self.proxy)
uuid = resp['id']
# grab uuid to send get request to fetch data
# Based on: https://github.com/IntelligenceX/SDK/blob/master/Python/intelxapi.py
# API requests self identification
# https://intelx.io/integrations
headers = {'x-key': self.key, 'User-Agent': f'{Core.get_user_agent()}-theHarvester'}
data = {
"term": self.word,
"buckets": [],
"lookuplevel": 0,
"maxresults": self.limit,
"timeout": 5,
"datefrom": "",
"dateto": "",
"sort": 2,
"media": 0,
"terminate": [],
"target": 0
}
total_resp = requests.post(f'{self.database}/phonebook/search', headers=headers, json=data)
phonebook_id = json.loads(total_resp.text)['id']
await asyncio.sleep(2)
url = f'{self.database}phonebook/search/result?id={uuid}&offset=0&limit={self.limit}'
resp = await AsyncFetcher.fetch_all([url], headers=headers, json=True, proxy=self.proxy)
# Fetch results from phonebook based on ID
resp = await AsyncFetcher.fetch_all(
[f'{self.database}/phonebook/search/result?id={phonebook_id}&limit={self.limit}&offset={self.offset}'],
headers=headers, json=True, proxy=self.proxy)
resp = resp[0]
# TODO: Check if more results can be gathered depending on status
self.results = resp
except Exception as e:
print(f'An exception has occurred: {e}')
print(f'An exception has occurred in Intelx: {e}')
async def process(self, proxy=False):
self.proxy = proxy
await self.do_search()
intelx_parser = intelxparser.Parser()
self.info = await intelx_parser.parse_dictionaries(self.results)
# Create parser and set self.info to tuple returned from parsing text.
async def get_emails(self):
return self.info[0]