diff --git a/Pipfile b/Pipfile index b7b551e7..3bf1dc1d 100644 --- a/Pipfile +++ b/Pipfile @@ -6,6 +6,7 @@ name = "pypi" [packages] aiodns = "==2.0.0" aiohttp = "==3.6.2" +aiosqlite = "==0.11.0" beautifulsoup4 = "==4.8.2" dnspython = "==1.16.0" grequests = "==0.4.0" diff --git a/requirements.txt b/requirements.txt index 29ba126d..7c2dea36 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,6 @@ aiodns==2.0.0 aiohttp==3.6.2 +aiosqlite==0.11.0 beautifulsoup4==4.8.2 dnspython==1.16.0 flake8==3.7.9 diff --git a/theHarvester/__main__.py b/theHarvester/__main__.py index 12b73e7a..ecd1fb59 100644 --- a/theHarvester/__main__.py +++ b/theHarvester/__main__.py @@ -43,7 +43,7 @@ async def start(): args = parser.parse_args() try: db = stash.StashManager() - db.do_init() + await db.do_init() except Exception: pass @@ -94,30 +94,30 @@ async def store(search_engine: Any, source: str, process_param: Any = None, stor if store_host: host_names = filter(await search_engine.get_hostnames()) all_hosts.extend(host_names) - db_stash.store_all(word, all_hosts, 'host', source) + await db_stash.store_all(word, all_hosts, 'host', source) if store_emails: email_list = filter(await search_engine.get_emails()) all_emails.extend(email_list) - db_stash.store_all(word, email_list, 'email', source) + await db_stash.store_all(word, email_list, 'email', source) if store_ip: ips_list = await search_engine.get_ips() all_ip.extend(ips_list) - db_stash.store_all(word, all_ip, 'ip', source) + await db_stash.store_all(word, all_ip, 'ip', source) if store_data: data = filter(await search_engine.get_data()) all_hosts.extend(data) - db.store_all(word, all_hosts, 'host', source) + await db.store_all(word, all_hosts, 'host', source) if store_results: email_list, host_names, urls = await search_engine.get_results() all_emails.extend(email_list) host_names = filter(host_names) all_urls.extend(filter(urls)) all_hosts.extend(host_names) - db.store_all(word, all_hosts, 'host', source) - db.store_all(word, all_emails, 'email', source) + await db.store_all(word, all_hosts, 'host', source) + await db.store_all(word, all_emails, 'email', source) if store_people: people_list = await search_engine.get_people() - db_stash.store_all(word, people_list, 'people', source) + await db_stash.store_all(word, people_list, 'people', source) if len(people_list) == 0: print('\n[*] No users found.\n\n') else: @@ -127,7 +127,7 @@ async def store(search_engine: Any, source: str, process_param: Any = None, stor print(usr) if store_links: links = await search_engine.get_links() - db.store_all(word, links, 'name', engineitem) + await db.store_all(word, links, 'name', engineitem) if len(links) == 0: print('\n[*] No links found.\n\n') else: @@ -415,7 +415,7 @@ async def store(search_engine: Any, source: str, process_param: Any = None, stor host = str(host) print(host) host_ip = [netaddr_ip.format() for netaddr_ip in sorted([netaddr.IPAddress(ip) for ip in ips])] - db.store_all(word, host_ip, 'ip', 'DNS-resolver') + await db.store_all(word, host_ip, 'ip', 'DNS-resolver') length_urls = len(all_urls) if length_urls == 0: if len(engines) >= 1 and 'trello' in engines: @@ -559,9 +559,9 @@ async def store(search_engine: Any, source: str, process_param: Any = None, stor counter = 0 for word in vhost: search = googlesearch.SearchGoogle(word, limit, counter) - search.process(google_dorking) - emails = search.get_emails() - hosts = search.get_hostnames() + await search.process(google_dorking) + emails = await search.get_emails() + hosts = await search.get_hostnames() print(emails) print(hosts) else: @@ -572,17 +572,18 @@ async def store(search_engine: Any, source: str, process_param: Any = None, stor try: print('\n[*] Reporting started.') db = stash.StashManager() - scanboarddata = db.getscanboarddata() - latestscanresults = db.getlatestscanresults(word) - previousscanresults = db.getlatestscanresults(word, previousday=True) - latestscanchartdata = db.latestscanchartdata(word) - scanhistorydomain = db.getscanhistorydomain(word) - pluginscanstatistics = db.getpluginscanstatistics() + scanboarddata = await db.getscanboarddata() + latestscanresults = await db.getlatestscanresults(word) + previousscanresults = await db.getlatestscanresults(word, previousday=True) + latestscanchartdata = await db.latestscanchartdata(word) + scanhistorydomain = await db.getscanhistorydomain(word) + pluginscanstatistics = await db.getpluginscanstatistics() generator = statichtmlgenerator.HtmlGenerator(word) HTMLcode = generator.beginhtml() HTMLcode += generator.generatelatestscanresults(latestscanresults) HTMLcode += generator.generatepreviousscanresults(previousscanresults) graph = reportgraph.GraphGenerator(word) + await graph.init_db() HTMLcode += graph.drawlatestscangraph(word, latestscanchartdata) HTMLcode += graph.drawscattergraphscanhistory(word, scanhistorydomain) HTMLcode += generator.generatepluginscanstatistics(pluginscanstatistics) diff --git a/theHarvester/discovery/exaleadsearch.py b/theHarvester/discovery/exaleadsearch.py index fae014e9..0ca0c205 100644 --- a/theHarvester/discovery/exaleadsearch.py +++ b/theHarvester/discovery/exaleadsearch.py @@ -24,7 +24,7 @@ async def do_search(self): 'User-agent': Core.get_user_agent() } urls = [base_url.replace("xx", str(num)) for num in range(self.counter, self.limit, 50) if num <= self.limit] - responses = await async_fetcher.fetch_all(urls, headers=headers) + responses = await AsyncFetcher.fetch_all(urls, headers=headers) for response in responses: self.total_results += response @@ -36,7 +36,7 @@ async def do_search_files(self, files): 'Referer': ('http://' + self.hostname + '/search/web/results/?q=%40' + self.word), 'User-agent': Core.get_user_agent() } - responses = await async_fetcher.fetch_all([url], headers=headers) + responses = await AsyncFetcher.fetch_all([url], headers=headers) self.results = responses[0] self.total_results += self.results diff --git a/theHarvester/discovery/huntersearch.py b/theHarvester/discovery/huntersearch.py index 8699a4e8..e9ca90cf 100644 --- a/theHarvester/discovery/huntersearch.py +++ b/theHarvester/discovery/huntersearch.py @@ -17,7 +17,7 @@ def __init__(self, word, limit, start): self.database = f'https://api.hunter.io/v2/domain-search?domain={word}&api_key={self.key}&limit={self.limit}' async def do_search(self): - responses = await async_fetcher.fetch_all([self.database], headers={'User-Agent': Core.get_user_agent()}) + responses = await AsyncFetcher.fetch_all([self.database], headers={'User-Agent': Core.get_user_agent()}) self.total_results += responses[0] async def process(self): diff --git a/theHarvester/discovery/otxsearch.py b/theHarvester/discovery/otxsearch.py index 44b5c781..4c2ac8f7 100644 --- a/theHarvester/discovery/otxsearch.py +++ b/theHarvester/discovery/otxsearch.py @@ -1,39 +1,44 @@ from theHarvester.lib.core import * -import json -import grequests +import re class SearchOtx: - def __init__(self, word): self.word = word - self.results = '' - self.totalresults = '' self.totalhosts = set() self.totalips = set() - def do_search(self): - base_url = f'https://otx.alienvault.com/api/v1/indicators/domain/{self.word}/passive_dns' + async def do_search(self): + url = f'https://otx.alienvault.com/api/v1/indicators/domain/{self.word}/passive_dns' headers = {'User-Agent': Core.get_user_agent()} - try: - request = grequests.get(base_url, headers=headers) - data = grequests.map([request]) - self.results = data[0].content.decode('UTF-8') - except Exception as e: - print(e) - - self.totalresults += self.results - dct = json.loads(self.totalresults) + client = aiohttp.ClientSession(headers=headers, timeout=aiohttp.ClientTimeout(total=20)) + responses = await AsyncFetcher.fetch(client, url, json=True) + await client.close() + dct = responses + import pprint as p + # p.pprint(dct, indent=4) + # exit(-2) self.totalhosts: set = {host['hostname'] for host in dct['passive_dns']} # filter out ips that are just called NXDOMAIN - self.totalips: set = {ip['address'] for ip in dct['passive_dns'] if 'NXDOMAIN' not in ip['address']} + self.totalips: set = {ip['address'] for ip in dct['passive_dns'] + if re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", ip['address'])} - def get_hostnames(self) -> set: + async def get_hostnames(self) -> set: return self.totalhosts - def get_ips(self) -> set: + async def get_ips(self) -> set: return self.totalips - def process(self): - self.do_search() - print('\tSearching results.') + async def process(self): + await self.do_search() + + +async def main(): + x = SearchOtx(word="yale.edu") + await x.do_search() + + +if __name__ == '__main__': + import asyncio + + asyncio.run(main()) diff --git a/theHarvester/discovery/twittersearch.py b/theHarvester/discovery/twittersearch.py index d25ee6d1..db5ceb4e 100644 --- a/theHarvester/discovery/twittersearch.py +++ b/theHarvester/discovery/twittersearch.py @@ -17,6 +17,8 @@ def __init__(self, word, limit): def do_search(self): base_url = f'https://{self.server}/search?num=100&start=xx&hl=en&meta=&q=site%3Atwitter.com%20intitle%3A%22on+Twitter%22%20{self.word}' + print(base_url) + exit(-2) headers = {'User-Agent': Core.get_user_agent()} try: urls = [base_url.replace("xx", str(num)) for num in range(0, self.limit, 10) if num <= self.limit] @@ -40,3 +42,21 @@ def get_people(self): def process(self): self.do_search() + +if __name__ == '__main__': + #https://github.com/taspinar/twitterscraper + import requests + from bs4 import BeautifulSoup + PROXY_URL = 'https://free-proxy-list.net/' + + response = requests.get(PROXY_URL) + soup = BeautifulSoup(response.text, 'lxml') + table = soup.find('table', id='proxylisttable') + list_tr = table.find_all('tr') + list_td = [elem.find_all('td') for elem in list_tr] + list_td = list(filter(None, list_td)) + list_ip = [elem[0].text for elem in list_td] + list_ports = [elem[1].text for elem in list_td] + list_proxies = [':'.join(elem) for elem in list(zip(list_ip, list_ports))] + import pprint as p + p.pprint(list_proxies, indent=4) \ No newline at end of file diff --git a/theHarvester/discovery/virustotal.py b/theHarvester/discovery/virustotal.py index c4a7c009..852a6947 100644 --- a/theHarvester/discovery/virustotal.py +++ b/theHarvester/discovery/virustotal.py @@ -14,7 +14,7 @@ def __init__(self, word): async def do_search(self): base_url = f'https://www.virustotal.com/ui/domains/{self.word}/subdomains?relationships=resolutions&cursor=STMwCi4%3D&limit=40' headers = {'User-Agent': Core.get_user_agent()} - responses = await async_fetcher.fetch_all([base_url], headers=headers) + responses = await AsyncFetcher.fetch_all([base_url], headers=headers) self.results = responses[0] self.totalresults += self.results diff --git a/theHarvester/discovery/yahoosearch.py b/theHarvester/discovery/yahoosearch.py index c3387332..b2a61188 100644 --- a/theHarvester/discovery/yahoosearch.py +++ b/theHarvester/discovery/yahoosearch.py @@ -18,7 +18,7 @@ async def do_search(self): 'User-agent': Core.get_user_agent() } urls = [base_url.replace("xx", str(num)) for num in range(0, self.limit, 10) if num <= self.limit] - responses = await async_fetcher.fetch_all(urls, headers=headers) + responses = await AsyncFetcher.fetch_all(urls, headers=headers) for response in responses: self.total_results += response diff --git a/theHarvester/lib/reportgraph.py b/theHarvester/lib/reportgraph.py index 76e37934..2115eac4 100644 --- a/theHarvester/lib/reportgraph.py +++ b/theHarvester/lib/reportgraph.py @@ -3,11 +3,7 @@ import plotly import plotly.graph_objs as go -try: - db = stash.StashManager() - db.do_init() -except Exception as error: - print(f'{error}') + class GraphGenerator: @@ -22,6 +18,13 @@ def __init__(self, domain): self.scattercountshodans = [] self.scattercountvhosts = [] + async def init_db(self): + try: + db = stash.StashManager() + await db.do_init() + except Exception as error: + print(f'{error}') + def drawlatestscangraph(self, domain, latestscandata): try: self.barcolumns = ['email', 'host', 'ip', 'shodan', 'vhost'] diff --git a/theHarvester/lib/stash.py b/theHarvester/lib/stash.py index fd285a9b..6c3db5f1 100644 --- a/theHarvester/lib/stash.py +++ b/theHarvester/lib/stash.py @@ -1,5 +1,5 @@ +import aiosqlite import datetime -import sqlite3 class StashManager: @@ -15,276 +15,258 @@ def __init__(self): self.latestscanresults = [] self.previousscanresults = [] - def do_init(self): - conn = sqlite3.connect(self.db) - c = conn.cursor() - c.execute('CREATE TABLE IF NOT EXISTS results (domain text, resource text, type text, find_date date, source text)') - conn.commit() - conn.close() - return + async def do_init(self): + async with aiosqlite.connect(self.db) as db: + await db.execute( + 'CREATE TABLE IF NOT EXISTS results (domain text, resource text, type text, find_date date, source text)') + await db.commit() - def store(self, domain, resource, res_type, source): + async def store(self, domain, resource, res_type, source): self.domain = domain self.resource = resource self.type = res_type self.source = source self.date = datetime.date.today() try: - conn = sqlite3.connect(self.db) - c = conn.cursor() - c.execute('INSERT INTO results (domain,resource, type, find_date, source) VALUES (?,?,?,?,?)', - (self.domain, self.resource, self.type, self.date, self.source)) - conn.commit() - conn.close() + async with aiosqlite.connect(self.db) as db: + await db.execute('INSERT INTO results (domain,resource, type, find_date, source) VALUES (?,?,?,?,?)', + (self.domain, self.resource, self.type, self.date, self.source)) + await db.commit() except Exception as e: print(e) - return - def store_all(self, domain, all, res_type, source): + async def store_all(self, domain, all, res_type, source): self.domain = domain self.all = all self.type = res_type self.source = source self.date = datetime.date.today() - for x in self.all: - try: - conn = sqlite3.connect(self.db) - c = conn.cursor() - c.execute('INSERT INTO results (domain,resource, type, find_date, source) VALUES (?,?,?,?,?)', - (self.domain, x, self.type, self.date, self.source)) - conn.commit() - conn.close() - except Exception as e: - print(e) - return + async with aiosqlite.connect(self.db) as db: + for x in self.all: + try: + await db.execute( + 'INSERT INTO results (domain,resource, type, find_date, source) VALUES (?,?,?,?,?)', + (self.domain, x, self.type, self.date, self.source)) + await db.commit() + except Exception as e: + print(e) - def generatedashboardcode(self, domain): + async def generatedashboardcode(self, domain): try: + # TODO refactor into generic method self.latestscandomain["domain"] = domain - conn = sqlite3.connect(self.db) - c = conn.cursor() - c.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="host"''', (domain,)) - data = c.fetchone() - self.latestscandomain["host"] = data[0] - c.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="email"''', (domain,)) - data = c.fetchone() - self.latestscandomain["email"] = data[0] - c.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="ip"''', (domain,)) - data = c.fetchone() - self.latestscandomain["ip"] = data[0] - c.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="vhost"''', (domain,)) - data = c.fetchone() - self.latestscandomain["vhost"] = data[0] - c.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="shodan"''', (domain,)) - data = c.fetchone() - self.latestscandomain["shodan"] = data[0] - c.execute('''SELECT MAX(find_date) FROM results WHERE domain=?''', (domain,)) - data = c.fetchone() - self.latestscandomain["latestdate"] = data[0] - latestdate = data[0] - c.execute('''SELECT * FROM results WHERE domain=? AND find_date=? AND type="host"''', (domain, latestdate,)) - scandetailshost = c.fetchall() - self.latestscandomain["scandetailshost"] = scandetailshost - c.execute('''SELECT * FROM results WHERE domain=? AND find_date=? AND type="email"''', - (domain, latestdate,)) - scandetailsemail = c.fetchall() - self.latestscandomain["scandetailsemail"] = scandetailsemail - c.execute('''SELECT * FROM results WHERE domain=? AND find_date=? AND type="ip"''', (domain, latestdate,)) - scandetailsip = c.fetchall() - self.latestscandomain["scandetailsip"] = scandetailsip - c.execute('''SELECT * FROM results WHERE domain=? AND find_date=? AND type="vhost"''', - (domain, latestdate,)) - scandetailsvhost = c.fetchall() - self.latestscandomain["scandetailsvhost"] = scandetailsvhost - c.execute('''SELECT * FROM results WHERE domain=? AND find_date=? AND type="shodan"''', - (domain, latestdate,)) - scandetailsshodan = c.fetchall() - self.latestscandomain["scandetailsshodan"] = scandetailsshodan + async with aiosqlite.connect(self.db) as conn: + cursor = await conn.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="host"''', + (domain,)) + data = await cursor.fetchone() + self.latestscandomain["host"] = data[0] + cursor = await conn.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="email"''', + (domain,)) + data = await cursor.fetchone() + self.latestscandomain["email"] = data[0] + cursor = await conn.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="ip"''', (domain,)) + data = await cursor.fetchone() + self.latestscandomain["ip"] = data[0] + cursor = await conn.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="vhost"''', + (domain,)) + data = await cursor.fetchone() + self.latestscandomain["vhost"] = data[0] + cursor = await conn.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="shodan"''', + (domain,)) + data = await cursor.fetchone() + self.latestscandomain["shodan"] = data[0] + cursor = await conn.execute('''SELECT MAX(find_date) FROM results WHERE domain=?''', (domain,)) + data = await cursor.fetchone() + self.latestscandomain["latestdate"] = data[0] + latestdate = data[0] + cursor = await conn.execute('''SELECT * FROM results WHERE domain=? AND find_date=? AND type="host"''', + (domain, latestdate,)) + scandetailshost = await cursor.fetchall() + self.latestscandomain["scandetailshost"] = scandetailshost + cursor = await conn.execute('''SELECT * FROM results WHERE domain=? AND find_date=? AND type="email"''', + (domain, latestdate,)) + scandetailsemail = await cursor.fetchall() + self.latestscandomain["scandetailsemail"] = scandetailsemail + cursor = await conn.execute('''SELECT * FROM results WHERE domain=? AND find_date=? AND type="ip"''', + (domain, latestdate,)) + scandetailsip = await cursor.fetchall() + self.latestscandomain["scandetailsip"] = scandetailsip + cursor = await conn.execute('''SELECT * FROM results WHERE domain=? AND find_date=? AND type="vhost"''', + (domain, latestdate,)) + scandetailsvhost = await cursor.fetchall() + self.latestscandomain["scandetailsvhost"] = scandetailsvhost + cursor = await conn.execute( + '''SELECT * FROM results WHERE domain=? AND find_date=? AND type="shodan"''', + (domain, latestdate,)) + scandetailsshodan = await cursor.fetchall() + self.latestscandomain["scandetailsshodan"] = scandetailsshodan return self.latestscandomain except Exception as e: print(e) - finally: - conn.close() - def getlatestscanresults(self, domain, previousday=False): + async def getlatestscanresults(self, domain, previousday=False): try: - conn = sqlite3.connect(self.db) - if previousday: - try: - c = conn.cursor() - c.execute(''' - SELECT DISTINCT(find_date) - FROM results - WHERE find_date=date('now', '-1 day') and domain=?''', (domain,)) - previousscandate = c.fetchone() - if not previousscandate: # When theHarvester runs first time/day this query will return. - self.previousscanresults = ["No results", "No results", "No results", "No results", "No results"] - else: - c = conn.cursor() - c.execute(''' + async with aiosqlite.connect(self.db) as conn: + if previousday: + try: + cursor = await conn.execute(''' + SELECT DISTINCT(find_date) + FROM results + WHERE find_date=date('now', '-1 day') and domain=?''', (domain,)) + previousscandate = await cursor.fetchone() + if not previousscandate: # When theHarvester runs first time/day this query will return. + self.previousscanresults = ["No results", "No results", "No results", "No results", + "No results"] + else: + cursor = await conn.execute(''' + SELECT find_date, domain, source, type, resource + FROM results + WHERE find_date=? and domain=? + ORDER BY source,type + ''', (previousscandate[0], domain,)) + results = await cursor.fetchall() + self.previousscanresults = results + return self.previousscanresults + except Exception as e: + print(f'Error in getting the previous scan results from the database: {e}') + else: + try: + cursor = await conn.execute('''SELECT MAX(find_date) FROM results WHERE domain=?''', (domain,)) + latestscandate = await cursor.fetchone() + cursor = await conn.execute(''' SELECT find_date, domain, source, type, resource FROM results WHERE find_date=? and domain=? ORDER BY source,type - ''', (previousscandate[0], domain,)) - results = c.fetchall() - self.previousscanresults = results - return self.previousscanresults - except Exception as e: - print(f'Error in getting the previous scan results from the database: {e}') - else: - try: - c = conn.cursor() - c.execute('''SELECT MAX(find_date) FROM results WHERE domain=?''', (domain,)) - latestscandate = c.fetchone() - c = conn.cursor() - c.execute(''' - SELECT find_date, domain, source, type, resource - FROM results - WHERE find_date=? and domain=? - ORDER BY source,type - ''', (latestscandate[0], domain,)) - results = c.fetchall() - self.latestscanresults = results - return self.latestscanresults - except Exception as e: - print(f'Error in getting the latest scan results from the database: {e}') + ''', (latestscandate[0], domain,)) + results = await cursor.fetchall() + self.latestscanresults = results + return self.latestscanresults + except Exception as e: + print(f'Error in getting the latest scan results from the database: {e}') except Exception as e: print(f'Error connecting to theHarvester database: {e}') - finally: - conn.close() - def getscanboarddata(self): + async def getscanboarddata(self): try: - conn = sqlite3.connect(self.db) - c = conn.cursor() - c.execute('''SELECT COUNT(*) from results WHERE type="host"''') - data = c.fetchone() - self.scanboarddata["host"] = data[0] - c.execute('''SELECT COUNT(*) from results WHERE type="email"''') - data = c.fetchone() - self.scanboarddata["email"] = data[0] - c.execute('''SELECT COUNT(*) from results WHERE type="ip"''') - data = c.fetchone() - self.scanboarddata["ip"] = data[0] - c.execute('''SELECT COUNT(*) from results WHERE type="vhost"''') - data = c.fetchone() - self.scanboarddata["vhost"] = data[0] - c.execute('''SELECT COUNT(*) from results WHERE type="shodan"''') - data = c.fetchone() - self.scanboarddata["shodan"] = data[0] - c.execute('''SELECT COUNT(DISTINCT(domain)) FROM results ''') - data = c.fetchone() - self.scanboarddata["domains"] = data[0] + async with aiosqlite.connect(self.db) as conn: + + cursor = await conn.execute('''SELECT COUNT(*) from results WHERE type="host"''') + data = await cursor.fetchone() + self.scanboarddata["host"] = data[0] + cursor = await conn.execute('''SELECT COUNT(*) from results WHERE type="email"''') + data = await cursor.fetchone() + self.scanboarddata["email"] = data[0] + cursor = await conn.execute('''SELECT COUNT(*) from results WHERE type="ip"''') + data = await cursor.fetchone() + self.scanboarddata["ip"] = data[0] + cursor = await conn.execute('''SELECT COUNT(*) from results WHERE type="vhost"''') + data = await cursor.fetchone() + self.scanboarddata["vhost"] = data[0] + cursor = await conn.execute('''SELECT COUNT(*) from results WHERE type="shodan"''') + data = await cursor.fetchone() + self.scanboarddata["shodan"] = data[0] + cursor = await conn.execute('''SELECT COUNT(DISTINCT(domain)) FROM results ''') + data = await cursor.fetchone() + self.scanboarddata["domains"] = data[0] return self.scanboarddata except Exception as e: print(e) - finally: - conn.close() - def getscanhistorydomain(self, domain): + async def getscanhistorydomain(self, domain): try: - conn = sqlite3.connect(self.db) - c = conn.cursor() - c.execute('''SELECT DISTINCT(find_date) FROM results WHERE domain=?''', (domain,)) - dates = c.fetchall() - for date in dates: - c = conn.cursor() - c.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="host" AND find_date=?''', - (domain, date[0])) - counthost = c.fetchone() - c = conn.cursor() - c.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="email" AND find_date=?''', - (domain, date[0])) - countemail = c.fetchone() - c = conn.cursor() - c.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="ip" AND find_date=?''', - (domain, date[0])) - countip = c.fetchone() - c = conn.cursor() - c.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="vhost" AND find_date=?''', - (domain, date[0])) - countvhost = c.fetchone() - c = conn.cursor() - c.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="shodan" AND find_date=?''', - (domain, date[0])) - countshodan = c.fetchone() - results = { - "date": str(date[0]), - "hosts": str(counthost[0]), - "email": str(countemail[0]), - "ip": str(countip[0]), - "vhost": str(countvhost[0]), - "shodan": str(countshodan[0]) - } - self.domainscanhistory.append(results) + async with aiosqlite.connect(self.db) as conn: + cursor = await conn.execute('''SELECT DISTINCT(find_date) FROM results WHERE domain=?''', (domain,)) + dates = await cursor.fetchall() + for date in dates: + cursor = await conn.execute( + '''SELECT COUNT(*) from results WHERE domain=? AND type="host" AND find_date=?''', + (domain, date[0])) + counthost = await cursor.fetchone() + cursor = await conn.execute( + '''SELECT COUNT(*) from results WHERE domain=? AND type="email" AND find_date=?''', + (domain, date[0])) + countemail = await cursor.fetchone() + cursor = await conn.execute( + '''SELECT COUNT(*) from results WHERE domain=? AND type="ip" AND find_date=?''', + (domain, date[0])) + countip = await cursor.fetchone() + cursor = await conn.execute( + '''SELECT COUNT(*) from results WHERE domain=? AND type="vhost" AND find_date=?''', + (domain, date[0])) + countvhost = await cursor.fetchone() + cursor = await conn.execute( + '''SELECT COUNT(*) from results WHERE domain=? AND type="shodan" AND find_date=?''', + (domain, date[0])) + countshodan = await cursor.fetchone() + results = { + "date": str(date[0]), + "hosts": str(counthost[0]), + "email": str(countemail[0]), + "ip": str(countip[0]), + "vhost": str(countvhost[0]), + "shodan": str(countshodan[0]) + } + self.domainscanhistory.append(results) return self.domainscanhistory except Exception as e: print(e) - finally: - conn.close() - def getpluginscanstatistics(self): + async def getpluginscanstatistics(self): try: - conn = sqlite3.connect(self.db) - c = conn.cursor() - c.execute(''' - SELECT domain,find_date, type, source, count(*) - FROM results - GROUP BY domain, find_date, type, source - ''') - results = c.fetchall() - self.scanstats = results + async with aiosqlite.connect(self.db) as conn: + cursor = await conn.execute(''' + SELECT domain,find_date, type, source, count(*) + FROM results + GROUP BY domain, find_date, type, source + ''') + results = await cursor.fetchall() + self.scanstats = results return self.scanstats except Exception as e: print(e) - finally: - conn.close() - def latestscanchartdata(self, domain): + async def latestscanchartdata(self, domain): try: - self.latestscandomain["domain"] = domain - conn = sqlite3.connect(self.db) - c = conn.cursor() - c.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="host"''', (domain,)) - data = c.fetchone() - self.latestscandomain["host"] = data[0] - c.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="email"''', (domain,)) - data = c.fetchone() - self.latestscandomain["email"] = data[0] - c.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="ip"''', (domain,)) - data = c.fetchone() - self.latestscandomain["ip"] = data[0] - c.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="vhost"''', (domain,)) - data = c.fetchone() - self.latestscandomain["vhost"] = data[0] - c.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="shodan"''', (domain,)) - data = c.fetchone() - self.latestscandomain["shodan"] = data[0] - c.execute('''SELECT MAX(find_date) FROM results WHERE domain=?''', (domain,)) - data = c.fetchone() - self.latestscandomain["latestdate"] = data[0] - latestdate = data[0] - c.execute('''SELECT * FROM results WHERE domain=? AND find_date=? AND type="host"''', (domain, latestdate,)) - scandetailshost = c.fetchall() - self.latestscandomain["scandetailshost"] = scandetailshost - c.execute('''SELECT * FROM results WHERE domain=? AND find_date=? AND type="email"''', - (domain, latestdate,)) - scandetailsemail = c.fetchall() - self.latestscandomain["scandetailsemail"] = scandetailsemail - c.execute('''SELECT * FROM results WHERE domain=? AND find_date=? AND type="ip"''', (domain, latestdate,)) - scandetailsip = c.fetchall() - self.latestscandomain["scandetailsip"] = scandetailsip - c.execute('''SELECT * FROM results WHERE domain=? AND find_date=? AND type="vhost"''', - (domain, latestdate,)) - scandetailsvhost = c.fetchall() - self.latestscandomain["scandetailsvhost"] = scandetailsvhost - c.execute('''SELECT * FROM results WHERE domain=? AND find_date=? AND type="shodan"''', - (domain, latestdate,)) - scandetailsshodan = c.fetchall() - self.latestscandomain["scandetailsshodan"] = scandetailsshodan + async with aiosqlite.connect(self.db) as conn: + self.latestscandomain["domain"] = domain + cursor = await conn.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="host"''', (domain,)) + data = await cursor.fetchone() + self.latestscandomain["host"] = data[0] + cursor = await conn.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="email"''', (domain,)) + data = await cursor.fetchone() + self.latestscandomain["email"] = data[0] + cursor = await conn.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="ip"''', (domain,)) + data = await cursor.fetchone() + self.latestscandomain["ip"] = data[0] + cursor = await conn.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="vhost"''', (domain,)) + data = await cursor.fetchone() + self.latestscandomain["vhost"] = data[0] + cursor = await conn.execute('''SELECT COUNT(*) from results WHERE domain=? AND type="shodan"''', (domain,)) + data = await cursor.fetchone() + self.latestscandomain["shodan"] = data[0] + cursor = await conn.execute('''SELECT MAX(find_date) FROM results WHERE domain=?''', (domain,)) + data = await cursor.fetchone() + self.latestscandomain["latestdate"] = data[0] + latestdate = data[0] + cursor = await conn.execute('''SELECT * FROM results WHERE domain=? AND find_date=? AND type="host"''', (domain, latestdate,)) + scandetailshost = await cursor.fetchall() + self.latestscandomain["scandetailshost"] = scandetailshost + cursor = await conn.execute('''SELECT * FROM results WHERE domain=? AND find_date=? AND type="email"''', + (domain, latestdate,)) + scandetailsemail = await cursor.fetchall() + self.latestscandomain["scandetailsemail"] = scandetailsemail + cursor = await conn.execute('''SELECT * FROM results WHERE domain=? AND find_date=? AND type="ip"''', (domain, latestdate,)) + scandetailsip = await cursor.fetchall() + self.latestscandomain["scandetailsip"] = scandetailsip + cursor = await conn.execute('''SELECT * FROM results WHERE domain=? AND find_date=? AND type="vhost"''', + (domain, latestdate,)) + scandetailsvhost = await cursor.fetchall() + self.latestscandomain["scandetailsvhost"] = scandetailsvhost + cursor = await conn.execute('''SELECT * FROM results WHERE domain=? AND find_date=? AND type="shodan"''', + (domain, latestdate,)) + scandetailsshodan = await cursor.fetchall() + self.latestscandomain["scandetailsshodan"] = scandetailsshodan return self.latestscandomain except Exception as e: print(e) - finally: - conn.close()