Randomization

UA randomization and delays unified across Plugins.
This commit is contained in:
Laramies 2018-12-19 22:41:02 +01:00
commit 1006e05b7a
28 changed files with 614 additions and 387 deletions

View file

@ -1,12 +1,13 @@
from bs4 import BeautifulSoup
import re
class parser:
def __init__(self, results):
self.results = results
self.ipaddresses = []
self.soup = BeautifulSoup(results.results,features="html.parser")
self.soup = BeautifulSoup(results.results, features="html.parser")
self.hostnames = []
self.urls = []
self.numberofpages = 0
@ -22,7 +23,7 @@ def search_hostnames(self):
def search_ipaddresses(self):
try:
ipaddresslist = self.soup.findAll('a','SearchResult__title-text')
ipaddresslist = self.soup.findAll('a', 'SearchResult__title-text')
for ipaddressitem in ipaddresslist:
self.ipaddresses.append(ipaddressitem.text.strip())
return self.ipaddresses
@ -33,11 +34,8 @@ def search_numberofpages(self):
try:
items = self.soup.findAll(href=re.compile("page"))
for item in items:
if (item.text !='next'): #to filter out pagination
self.numberofpages+=1
if (item.text != 'next'): # to filter out pagination
self.numberofpages += 1
return self.numberofpages
except Exception as e:
print("Error occurred: " + str(e))

View file

@ -1,56 +1,56 @@
Changelog in 2.6:
-----------------
usage() improvement, CameronNemo.
Added Yahoo and Baidu search engines. Thanks to Tatanus
Added check for the existence of Requests library.
Fixed email regex to provide cleaner results. Thanks to Peter McAlpine
Changelog in 2.5:
-----------------
Changelog in 2.4:
------------------
-Fixed Linkedin Parser
-Fixed 123people
-Added Dogpile Search engine (Marcus)
-PEP8 compliant (Mario)
-Fixed XML export (Marcus)
-Expanded TLD list from http://data.iana.org/TLD/tlds-alpha-by-domain.txt (Marcus)
-DNS Bruteforce fixed (Tomas)
-Added Google Custom Search Support - Need API Key to use it.
Changelog in 2.3:
--------------
-Fixed duplicates
Changelog in 2.2:
----------------
-Added Jigsaw (www.jigsaw.com)
-Added 123People (www.123people.com)
-Added limit to google searches as the maximum results we can obtain is 1000
-Removed SET, as service was discontinued by Google
-Fixed parser to remove wrong results like emails starting with @
Changelog in 2.1:
----------------
-DNS Bruteforcer
-DNS Reverse lookups
-DNS TDL Expansion
-SHODAN DB integration
-HTML report
-DNS server selection
Changelog in 2.0:
----------------
-Complete rewrite, more modular and easy to maintain
-New sources (Exalead, Google-Profiles, Bing-Api)
-Time delay between request, to prevent search engines from blocking our IP´s
-You can start the search from the results page that you want, hence you can *resume* a search
-Export to xml
-All search engines harvesting
Changelog in 2.6:
-----------------
usage() improvement, CameronNemo.
Added Yahoo and Baidu search engines. Thanks to Tatanus
Added check for the existence of Requests library.
Fixed email regex to provide cleaner results. Thanks to Peter McAlpine
Changelog in 2.5:
-----------------
Changelog in 2.4:
------------------
-Fixed Linkedin Parser
-Fixed 123people
-Added Dogpile Search engine (Marcus)
-PEP8 compliant (Mario)
-Fixed XML export (Marcus)
-Expanded TLD list from http://data.iana.org/TLD/tlds-alpha-by-domain.txt (Marcus)
-DNS Bruteforce fixed (Tomas)
-Added Google Custom Search Support - Need API Key to use it.
Changelog in 2.3:
--------------
-Fixed duplicates
Changelog in 2.2:
----------------
-Added Jigsaw (www.jigsaw.com)
-Added 123People (www.123people.com)
-Added limit to google searches as the maximum results we can obtain is 1000
-Removed SET, as service was discontinued by Google
-Fixed parser to remove wrong results like emails starting with @
Changelog in 2.1:
----------------
-DNS Bruteforcer
-DNS Reverse lookups
-DNS TDL Expansion
-SHODAN DB integration
-HTML report
-DNS server selection
Changelog in 2.0:
----------------
-Complete rewrite, more modular and easy to maintain
-New sources (Exalead, Google-Profiles, Bing-Api)
-Time delay between request, to prevent search engines from blocking our IP´s
-You can start the search from the results page that you want, hence you can *resume* a search
-Export to xml
-All search engines harvesting

View file

@ -1,6 +1,8 @@
import myparser
import re
import requests
import time
from discovery.constants import *
class search_ask:
@ -10,18 +12,18 @@ def __init__(self, word, limit):
self.totalresults = ""
self.server = "www.ask.com"
self.hostname = "www.ask.com"
self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6"
self.quantity = "100"
self.limit = int(limit)
self.counter = 0
def do_search(self):
headers = {
'User-agent':self.userAgent
'User-agent': getUserAgent()
}
url = 'http://' + self.server + '/web?q=%40' + self.word \
+ "&pu=100&page=" + str(self.counter)
h = requests.get(url=url, headers=headers)
time.sleep(getDelay())
self.results = h.text
self.totalresults += self.results

View file

@ -1,6 +1,7 @@
import myparser
import time
import requests
from discovery.constants import *
class search_baidu:
@ -9,7 +10,6 @@ def __init__(self, word, limit):
self.total_results = ""
self.server = "www.baidu.com"
self.hostname = "www.baidu.com"
self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6"
self.limit = limit
self.counter = 0
@ -17,15 +17,15 @@ def do_search(self):
url = 'http://' + self.server + "/s?wd=%40" + self.word + "&pn=" + str(self.counter) + "&oq=" + self.word
headers = {
'Host': self.hostname,
'User-agent': self.userAgent
'User-agent': getUserAgent()
}
h = requests.get(url=url, headers=headers)
time.sleep(getDelay())
self.total_results += h.text
def process(self):
while self.counter <= self.limit and self.counter <= 1000:
self.do_search()
time.sleep(1)
print("\tSearching " + str(self.counter) + " results...")
self.counter += 10

View file

@ -2,6 +2,7 @@
import myparser
import time
import requests
from discovery.constants import *
class search_bing:
@ -12,7 +13,6 @@ def __init__(self, word, limit, start):
self.server = "www.bing.com"
self.apiserver = "api.search.live.net"
self.hostname = "www.bing.com"
self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6"
self.quantity = "50"
self.limit = int(limit)
self.bingApi = ""
@ -23,7 +23,7 @@ def do_search(self):
'Host': self.hostname,
'Cookie':'SRCHHPGUSR=ADLT=DEMOTE&NRSLT=50',
'Accept-Language': 'en-us,en',
'User-agent': self.userAgent
'User-agent': getUserAgent()
}
h = requests.get(url=('http://'+self.server + "/search?q=%40" + self.word + "&count=50&first=" + str(self.counter)),headers=headers)
self.results = h.text
@ -34,7 +34,7 @@ def do_search_api(self):
self.word + "&sources=web&web.count=40&web.offset=" + str(self.counter)
headers = {
'Host': self.apiserver,
'User-agent': self.userAgent
'User-agent': getUserAgent()
}
h = requests.get(url=url, headers=headers)
self.results = h.text
@ -45,7 +45,7 @@ def do_search_vhost(self):
'Host': self.hostname,
'Cookie': 'mkt=en-US;ui=en-US;SRCHHPGUSR=NEWWND=0&ADLT=DEMOTE&NRSLT=50',
'Accept-Language': 'en-us,en',
'User-agent': self.userAgent
'User-agent': getUserAgent()
}
url = 'http://' + self.server + "/search?q=ip:" + self.word + "&go=&count=50&FORM=QBHL&qs=n&first=" + str(self.counter)
h = requests.get(url=url, headers=headers)
@ -72,10 +72,10 @@ def process(self, api):
while (self.counter < self.limit):
if api == "yes":
self.do_search_api()
time.sleep(0.3)
time.sleep(getDelay())
else:
self.do_search()
time.sleep(1)
time.sleep(getDelay())
self.counter += 50
print("\tSearching " + str(self.counter) + " results...")

View file

@ -1,6 +1,7 @@
import random
import requests
import censysparser
import time
from discovery.constants import *
class search_censys:
@ -11,45 +12,32 @@ def __init__(self, word):
self.results = ""
self.total_results = ""
self.server = "censys.io"
self.userAgent = ["(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36"
,("Mozilla/5.0 (Linux; Android 7.0; SM-G892A Build/NRD90M; wv) " +
"AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/60.0.3112.107 Mobile Safari/537.36"),
("Mozilla/5.0 (Windows Phone 10.0; Android 6.0.1; Microsoft; RM-1152) " +
"AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Mobile Safari/537.36 Edge/15.15254"),
"Mozilla/5.0 (SMART-TV; X11; Linux armv7l) AppleWebKit/537.42 (KHTML, like Gecko) Chromium/25.0.1349.2 Chrome/25.0.1349.2 Safari/537.42"
,"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36 OPR/43.0.2442.991"
,"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36 OPR/48.0.2685.52"
,"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko"
,"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko"
,"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)"]
def do_search(self):
try:
headers = {'user-agent': random.choice(self.userAgent),'Accept':'*/*','Referer':self.url}
headers = {'user-agent': getUserAgent(), 'Accept': '*/*', 'Referer': self.url}
response = requests.get(self.url, headers=headers)
self.results = response.content
print ('-')
self.results = response.text
self.total_results += self.results
print ('-')
except Exception as e:
print(e)
def process(self):
self.url="https://" + self.server + "/ipv4/_search?q=" + str(self.word) + "&page=1"
self.url = "https://" + self.server + "/ipv4/_search?q=" + str(self.word) + "&page=1"
self.do_search()
self.counter=2
self.counter = 2
pages = censysparser.parser(self)
totalpages = pages.search_numberofpages()
while self.counter <= totalpages:
try:
self.page =str(self.counter)
self.url="https://" + self.server + "/ipv4/_search?q=" + str(self.word) + "&page=" + str(self.page)
print("\tSearching Censys results page " + self.page + "...")
self.page = str(self.counter)
self.url = "https://" + self.server + "/ipv4/_search?q=" + str(self.word) + "&page=" + str(self.page)
print("\t - Searching Censys results page " + self.page + "...")
self.do_search()
time.sleep(getDelay())
except Exception as e:
print("Error occurred: " + str(e))
self.counter+=1
self.counter += 1
def get_hostnames(self):
try:
@ -64,4 +52,3 @@ def get_ipaddresses(self):
return ips.search_ipaddresses()
except Exception as e:
print("Error occurred: " + str(e))

247
discovery/constants.py Normal file
View file

@ -0,0 +1,247 @@
"""
Module that contains constants used across plugins
Contains list of user agents and function to get random delay and user agent.
From https://github.com/tamimibrahim17/List-of-user-agents
"""
import random
user_agents = [
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1464.0 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0) chromeframe/10.0.648.205",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_0) AppleWebKit/537.4 (KHTML, like Gecko) Chrome/22.0.1229.79 Safari/537.4",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1944.0 Safari/537.36",
"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2.13) Gecko/20101213 Opera/9.80 (Windows NT 6.1; U; zh-tw) Presto/2.7.62 Version/11.01",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2",
"Mozilla/5.0 (Windows NT 6.0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2)",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11",
"Mozilla/5.0 (X11; CrOS i686 3912.101.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0; Trident/5.0; chromeframe/11.0.696.57)",
"Mozilla/5.0 (Linux; U; Android 2.3; en-us) AppleWebKit/999+ (KHTML, like Gecko) Safari/999.9",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1664.3 Safari/537.36",
"Opera/9.80 (X11; Linux i686; U; ja) Presto/2.7.62 Version/11.01",
"Mozilla/5.0 (Windows NT 4.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36",
"More Internet Explorer 9.0 user agents strings -->>",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.124 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 1.1.4322)",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; ja) Opera 11.00",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 2.0.50727; Media Center PC 6.0)",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",
"Opera/9.80 (Windows NT 5.1; U; cs) Presto/2.7.62 Version/11.01",
"Mozilla/5.0 (Windows; U; MSIE 9.0; Windows NT 9.0; en-US)",
"Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.93 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.17 Safari/537.11",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; InfoPath.1; SV1; .NET CLR 3.8.36217; WOW64; en-US)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5",
"Mozilla/5.0 (X11; FreeBSD amd64) AppleWebKit/536.5 (KHTML like Gecko) Chrome/19.0.1084.56 Safari/1EA69",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11",
"Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.4 (KHTML, like Gecko) Chrome/22.0.1229.94 Safari/537.4",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2224.3 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.116 Safari/537.36 Mozilla/5.0 (iPad; U; CPU OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B334b Safari/531.21.10",
"Mozilla/5.0 (Linux; U; Android 2.3.5; zh-cn; HTC_IncredibleS_S710e Build/GRJ90) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.2; .NET CLR 1.1.4322; .NET4.0C; Tablet PC 2.0)",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1467.0 Safari/537.36",
"Mozilla/5.0 (X11; CrOS i686 1660.57.0) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.46 Safari/535.19",
"Mozilla/5.0 (Windows NT 6.1; U; de; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 11.01",
"Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; Media Center PC 6.0; InfoPath.3; MS-RTC LM 8; Zune 4.7)",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0; Trident/4.0; GTB7.4; InfoPath.3; SV1; .NET CLR 3.1.76908; WOW64; en-US)",
"Opera/9.80 (X11; Linux x86_64; U; Ubuntu/10.10 (maverick); pl) Presto/2.7.62 Version/11.01",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.2; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0)",
"Mozilla/5.0 (compatible; MSIE 7.0; Windows NT 5.0; Trident/4.0; FBSMTWB; .NET CLR 2.0.34861; .NET CLR 3.0.3746.3218; .NET CLR 3.5.33652; msn OptimizedIE8;ENUS)",
"Opera/9.80 (Windows NT 5.1; U; en) Presto/2.9.168 Version/11.51",
"Opera/9.80 (Windows NT 6.1; U; zh-tw) Presto/2.7.62 Version/11.01",
"Opera/9.80 (Windows NT 5.1; U; MRA 5.5 (build 02842); ru) Presto/2.7.62 Version/11.00",
"Mozilla/5.0 (Linux; U; Android 2.3.3; zh-tw; HTC_Pyramid Build/GRI40) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"Opera/9.80 (Windows NT 6.1; U; cs) Presto/2.7.62 Version/11.01",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36",
"Opera/9.80 (Windows NT 6.1; U; es-ES) Presto/2.9.181 Version/12.00",
"Opera/9.80 (Windows NT 5.1; U; ru) Presto/2.7.39 Version/11.00",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.3; .NET4.0C; .NET4.0E; .NET CLR 3.5.30729; .NET CLR 3.0.30729; MS-RTC LM 8)",
"Opera/9.80 (Windows NT 5.2; U; ru) Presto/2.7.62 Version/11.01",
"Mozilla/5.0 (Windows NT 6.1; U; nl; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 11.01",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)",
"Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11",
"Opera/9.80 (Windows NT 6.1; WOW64; U; pt) Presto/2.10.229 Version/11.62",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; Media Center PC 6.0; InfoPath.2; MS-RTC LM 8",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.14 (KHTML, like Gecko) Chrome/24.0.1292.0 Safari/537.14",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.17 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0; yie8)",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36",
"Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; de) Presto/2.9.168 Version/11.52",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.2 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)",
"Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.45 Safari/535.19",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.26 Safari/537.11",
"Opera/9.80 (Windows NT 5.1; U; zh-tw) Presto/2.8.131 Version/11.10",
"Opera/9.80 (Windows NT 6.1; U; en-US) Presto/2.7.62 Version/11.01",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.0; Trident/4.0; InfoPath.1; SV1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 3.0.04506.30)",
"Mozilla/5.0 (Linux; U; Android 2.3.4; fr-fr; HTC Desire Build/GRJ22) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"Mozilla/5.0 (Windows NT 5.1) Gecko/20100101 Firefox/14.0 Opera/12.0",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 7.1; Trident/5.0)",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.60 Safari/537.17",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
"Opera/9.80 (X11; Linux i686; U; fr) Presto/2.7.62 Version/11.01",
"Mozilla/4.0 (compatible; MSIE 8.0; X11; Linux x86_64; pl) Opera 11.00",
"Opera/9.80 (X11; Linux i686; U; hu) Presto/2.9.168 Version/11.50",
"Opera/9.80 (X11; Linux x86_64; U; bg) Presto/2.8.131 Version/11.10",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2309.372 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) AppleWebKit/537.13 (KHTML, like Gecko) Chrome/24.0.1290.1 Safari/537.13",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11",
"Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14",
"Opera/9.80 (X11; Linux i686; U; it) Presto/2.7.62 Version/11.00",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.11 Safari/535.19",
"Opera/12.0(Windows NT 5.1;U;en)Presto/22.9.168 Version/12.00",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11",
"Opera/9.80 (X11; Linux i686; U; es-ES) Presto/2.8.131 Version/11.11",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; fr) Opera 11.00",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; Zune 4.0; InfoPath.3; MS-RTC LM 8; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/4.0; GTB7.4; InfoPath.1; SV1; .NET CLR 2.8.52393; WOW64; en-US)",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.13 (KHTML, like Gecko) Chrome/24.0.1290.1 Safari/537.13",
"Opera/9.80 (Windows NT 5.1; U; it) Presto/2.7.62 Version/11.00",
"Mozilla/5.0 (Linux; U; Android 2.3.3; ko-kr; LG-LU3000 Build/GRI40) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"Opera/9.80 (Windows NT 6.0; U; pl) Presto/2.7.62 Version/11.01",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
"Opera/9.80 (Windows NT 6.1; U; fi) Presto/2.7.62 Version/11.00",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
"Opera/12.80 (Windows NT 5.1; U; en) Presto/2.10.289 Version/12.02",
"Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; pl) Opera 11.00",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_2) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.15 (KHTML, like Gecko) Chrome/24.0.1295.0 Safari/537.15",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36",
"Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; GTB7.4; InfoPath.2; SV1; .NET CLR 3.3.69573; WOW64; en-US)",
"Mozilla/5.0 (Linux; U; Android 2.3.3; en-us; HTC_DesireS_S510e Build/GRI40) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.3319.102 Safari/537.36",
"Opera/9.80 (Windows NT 6.0; U; pl) Presto/2.10.229 Version/11.62",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 1.0.3705; .NET CLR 1.1.4322)",
"Mozilla/5.0 (Linux; U; Android 2.3.3; en-us; HTC_DesireS_S510e Build/GRI40) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"Opera/9.80 (Windows NT 6.1; Opera Tablet/15165; U; en) Presto/2.8.149 Version/11.1",
"Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00",
"Opera/9.80 (Windows NT 6.1; U; en-GB) Presto/2.7.62 Version/11.00",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; de) Opera 11.01",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; en) Opera 11.00",
"Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.0 Safari/536.3",
"Opera/9.80 (Windows NT 6.1; U; ko) Presto/2.7.62 Version/11.00",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_4) AppleWebKit/537.13 (KHTML, like Gecko) Chrome/24.0.1290.1 Safari/537.13",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; Media Center PC 6.0; InfoPath.3; MS-RTC LM 8; Zune 4.7",
"Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0; FunWebProducts)",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.517 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727)",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_2) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.45 Safari/535.19",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2117.157 Safari/537.36",
"Opera/9.80 (Windows NT 6.1; U; zh-cn) Presto/2.6.37 Version/11.00",
"Mozilla/5.0 (Windows; U; MSIE 9.0; WIndows NT 9.0; en-US))",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0) Opera 12.14",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; .NET CLR 1.1.4322; .NET CLR 2.0.50727)",
"Mozilla/5.0 (Linux; U; Android 2.3.3; zh-tw; HTC Pyramid Build/GRI40) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.13 (KHTML, like Gecko) Chrome/24.0.1290.1 Safari/537.13",
"Mozilla/5.0 (Windows NT 6.0; U; ja; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 11.00",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0; chromeframe/11.0.696.57)",
"Opera/9.80 (X11; Linux i686; U; ru) Presto/2.8.131 Version/11.11",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0; chromeframe/13.0.782.215)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/535.20 (KHTML, like Gecko) Chrome/19.0.1036.7 Safari/535.20",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/535.22 (KHTML, like Gecko) Chrome/19.0.1047.0 Safari/535.22",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; .NET CLR 2.7.58687; SLCC2; Media Center PC 5.0; Zune 3.4; Tablet PC 3.6; InfoPath.3)",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36",
"Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; fr) Presto/2.9.168 Version/11.52",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1664.3 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36",
"Mozilla/5.0 (X11; NetBSD) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36",
"Mozilla/5.0 (Windows NT 5.1; U; en; rv:1.8.1) Gecko/20061208 Firefox/5.0 Opera 11.11",
"Mozilla/5.0 (Macintosh; AMD Mac OS X 10_8_2) AppleWebKit/535.22 (KHTML, like Gecko) Chrome/18.6.872",
"Mozilla/5.0 (X11; OpenBSD i386) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)",
"Mozilla/5.0 (Linux; U; Android 4.0.3; de-ch; HTC Sensation Build/IML74K) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.0) yi; AppleWebKit/345667.12221 (KHTML, like Gecko) Chrome/23.0.1271.26 Safari/453667.1221",
"Mozilla/1.22 (compatible; MSIE 10.0; Windows 3.1)",
"Opera/9.80 (Windows NT 5.1; U;) Presto/2.7.62 Version/11.01",
"Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16",
"Mozilla/5.0 (Linux; U; Android 4.0.3; ko-kr; LG-L160L Build/IML74K) AppleWebkit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.21 (KHTML, like Gecko) Chrome/19.0.1042.0 Safari/535.21",
"Mozilla/5.0 (Linux; U; Android 2.3.5; en-us; HTC Vision Build/GRI40) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0; InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1309.0 Safari/537.17",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1623.0 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.137 Safari/4E423F",
"Opera/9.80 (Windows NT 6.1; U; zh-cn) Presto/2.7.62 Version/11.01",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.1; SLCC1; .NET CLR 1.1.4322)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_5_8) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.151 Safari/535.19",
"Opera/9.80 (Windows NT 6.1; U; sv) Presto/2.7.62 Version/11.01",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.62 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; Media Center PC 6.0; InfoPath.2; MS-RTC LM 8)",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.13 (KHTML, like Gecko) Chrome/24.0.1284.0 Safari/537.13",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.166 Safari/535.19",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5",
"Opera/9.80 (X11; Linux x86_64; U; fr) Presto/2.9.168 Version/11.50",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 2.0.50727; SLCC2; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; Zune 4.0; Tablet PC 2.0; InfoPath.3; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; chromeframe/12.0.742.112)",
"Mozilla/4.0 (Compatible; MSIE 8.0; Windows NT 5.2; Trident/6.0)",
"Opera/12.0(Windows NT 5.2;U;en)Presto/22.9.168 Version/12.00",
"Mozilla/5.0 (Windows NT 6.4; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36",
"Opera/9.80 (X11; Linux x86_64; U; pl) Presto/2.7.62 Version/11.00",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1468.0 Safari/537.36",
"Opera/9.80 (Windows NT 6.0; U; en) Presto/2.8.99 Version/11.10",
"Opera/9.80 (Windows NT 6.0; U; en) Presto/2.7.39 Version/11.00",
"Mozilla/5.0 (Linux; U; Android 2.3.3; zh-tw; HTC_Pyramid Build/GRI40) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari",
"Mozilla/5.0 (Windows NT 5.1; U; pl; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 11.00",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.90 Safari/537.36",
"Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; Media Center PC 6.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C)",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1866.237 Safari/537.36",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; de) Opera 11.51",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.6 Safari/537.11",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.6 Safari/537.11",
"Opera/9.80 (Windows NT 6.1; U; pl) Presto/2.7.62 Version/11.00",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 5.2; Trident/4.0; Media Center PC 4.0; SLCC1; .NET CLR 3.0.04320)",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.45 Safari/535.19",
"Mozilla/5.0 (X11; CrOS i686 4319.74.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.57 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/535.2 (KHTML, like Gecko) Chrome/18.6.872.0 Safari/535.2 UNTRUSTED/1.0 3gpp-gba UNTRUSTED/1.0",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 2.0.50727; Media Center PC 6.0)",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.16 Safari/537.36",
"Opera/9.80 (Windows NT 6.1 x64; U; en) Presto/2.7.62 Version/11.00",
"Mozilla/5.0 (Linux; U; Android 2.3.4; en-us; T-Mobile myTouch 3G Slide Build/GRI40) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.21 (KHTML, like Gecko) Chrome/19.0.1041.0 Safari/535.21",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1500.55 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.19 (KHTML, like Gecko) Ubuntu/11.10 Chromium/18.0.1025.142 Chrome/18.0.1025.142 Safari/535.19",
"Mozilla/5.0 (Windows NT 5.1; U; de; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 11.00"
]
def getDelay():
return random.randint(1, 3) - .5
def getUserAgent():
return random.choice(user_agents)

View file

@ -1,7 +1,7 @@
import requests
import myparser
import time
import random
from discovery.constants import *
class search_crtsh:
@ -10,39 +10,25 @@ def __init__(self, word):
self.results = ""
self.totalresults = ""
self.server = "https://crt.sh/?q="
self.userAgent = ["(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36"
, ("Mozilla/5.0 (Linux; Android 7.0; SM-G892A Build/NRD90M; wv) " +
"AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/60.0.3112.107 Mobile Safari/537.36"),
("Mozilla/5.0 (Windows Phone 10.0; Android 6.0.1; Microsoft; RM-1152) " +
"AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Mobile Safari/537.36 Edge/15.15254"),
"Mozilla/5.0 (SMART-TV; X11; Linux armv7l) AppleWebKit/537.42 (KHTML, like Gecko) Chromium/25.0.1349.2 Chrome/25.0.1349.2 Safari/537.42",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36 OPR/43.0.2442.991",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36 OPR/48.0.2685.52",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)"]
self.quantity = "100"
self.counter = 0
def do_search(self):
try:
urly = self.server + self.word
except Exception as e:
print(e)
try:
params = {'User-Agent': random.choice(self.userAgent)}
params = {'User-Agent': getUserAgent()}
r=requests.get(urly,headers=params)
except Exception as e:
print(e)
links = self.get_info(r.text)
for link in links:
params = {'User-Agent': random.choice(self.userAgent)}
params = {'User-Agent': getUserAgent()}
print ("\t\tSearching " + link)
r = requests.get(link, headers=params)
time.sleep(1)
time.sleep(getDelay())
self.results = r.text
self.totalresults += self.results

View file

@ -1,6 +1,7 @@
import random
import requests
import cymonparser
from discovery.constants import *
import time
class search_cymon:
@ -9,23 +10,12 @@ def __init__(self, word):
self.url = ""
self.results = ""
self.server = "cymon.io"
self.userAgent = ["(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36"
,("Mozilla/5.0 (Linux; Android 7.0; SM-G892A Build/NRD90M; wv) " +
"AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/60.0.3112.107 Mobile Safari/537.36"),
("Mozilla/5.0 (Windows Phone 10.0; Android 6.0.1; Microsoft; RM-1152) " +
"AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Mobile Safari/537.36 Edge/15.15254"),
"Mozilla/5.0 (SMART-TV; X11; Linux armv7l) AppleWebKit/537.42 (KHTML, like Gecko) Chromium/25.0.1349.2 Chrome/25.0.1349.2 Safari/537.42"
,"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36 OPR/43.0.2442.991"
,"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36 OPR/48.0.2685.52"
,"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko"
,"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko"
,"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)"]
def do_search(self):
try:
headers = {'user-agent': random.choice(self.userAgent),'Accept':'*/*','Referer':self.url}
headers = {'user-agent':getUserAgent() ,'Accept':'*/*','Referer':self.url}
response = requests.get(self.url, headers=headers)
time.sleep(getDelay())
self.results = response.content
except Exception as e:
print(e)

View file

@ -1,6 +1,7 @@
import myparser
import time
import requests
from discovery.constants import *
class search_dogpile:
@ -9,7 +10,6 @@ def __init__(self, word, limit):
self.total_results = ""
self.server = "www.dogpile.com"
self.hostname = "www.dogpile.com"
self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6"
self.limit = limit
self.counter = 0
@ -19,7 +19,7 @@ def do_search(self):
+ "&q=\"%40" + self.word + "\""
headers = {
'Host': self.hostname,
'User-agent': self.userAgent
'User-agent': getUserAgent()
}
h = requests.get(url=url, headers=headers)
self.total_results += h.text
@ -27,7 +27,7 @@ def do_search(self):
def process(self):
while self.counter <= self.limit and self.counter <= 1000:
self.do_search()
time.sleep(1)
time.sleep(getDelay())
print("\tSearching " + str(self.counter) + " results...")
self.counter += 10

View file

@ -2,6 +2,7 @@
import re
import time
import requests
from discovery.constants import *
class search_exalead:
def __init__(self, word, limit, start):
@ -11,7 +12,6 @@ def __init__(self, word, limit, start):
self.totalresults = ""
self.server = "www.exalead.com"
self.hostname = "www.exalead.com"
self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/4.0"
self.limit = limit
self.counter = start
@ -21,7 +21,7 @@ def do_search(self):
headers = {
'Host': self.hostname,
'Referer': ("http://" +self.hostname +"/search/web/results/?q=%40" +self.word),
'User-agent': self.userAgent
'User-agent': getUserAgent()
}
h = requests.get(url=url, headers=headers)
self.results = h.text
@ -33,7 +33,7 @@ def do_search_files(self, files):
headers = {
'Host': self.hostname,
'Referer': ("http://" + self.hostname + "/search/web/results/?q=%40" + self.word),
'User-agent': self.userAgent
'User-agent': getUserAgent()
}
h = requests.get(url=url, headers=headers)
self.results = h.text
@ -70,7 +70,7 @@ def process(self):
def process_files(self, files):
while self.counter < self.limit:
self.do_search_files(files)
time.sleep(1)
time.sleep(getDelay())
more = self.check_next()
if more == "1":
self.counter += 50

View file

@ -3,6 +3,7 @@
import re
import time
import requests
from discovery.constants import *
class search_googleCSE:
@ -13,7 +14,6 @@ def __init__(self, word, limit, start):
self.totalresults = ""
self.server = "www.googleapis.com"
self.hostname = "www.googleapis.com"
self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6"
self.quantity = "10"
self.limit = limit
self.counter = 1
@ -31,7 +31,7 @@ def do_search(self):
"&q=%40\"" + self.word + "\""
headers = {
'Host': self.server,
'User-agent': self.userAgent
'User-agent': getUserAgent()
}
h = requests.get(url=url, headers=headers)
self.results = h.text
@ -43,7 +43,7 @@ def do_search_files(self,files):
"&q=filetype:" + files + "%20site:" + self.word
headers = {
'Host': self.server,
'User-agent': self.userAgent
'User-agent': getUserAgent()
}
h = requests.get(url=url, headers=headers)
self.results = h.text
@ -99,6 +99,6 @@ def store_results(self):
def process_files(self, files):
while self.counter <= self.limit:
self.do_search_files(files)
time.sleep(1)
time.sleep(getDelay())
self.counter += 100
print("\tSearching " + str(self.counter) + " results...")

View file

@ -1,9 +1,6 @@
import string
import sys
import re
import time
import requests
import json
from discovery.constants import *
class search_googlecertificates:
# https://www.google.com/transparencyreport/api/v3/httpsreport/ct/certsearch?include_expired=true&include_subdomains=true&domain=
@ -12,29 +9,29 @@ def __init__(self, word, limit, start):
self.results = ""
self.totalresults = ""
self.server = "www.google.com"
self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6"
self.quantity = "100"
self.limit = limit
self.counter = start
def do_search(self):
try:
urly="https://" + self.server + "/transparencyreport/api/v3/httpsreport/ct/certsearch?include_expired=true&include_subdomains=true&domain=" + self.word
urly = "https://" + self.server + "/transparencyreport/api/v3/httpsreport/ct/certsearch?include_expired=true&include_subdomains=true&domain=" + self.word
except Exception as e:
print (e)
print(e)
try:
r=requests.get(urly)
headers = {'User-Agent': getUserAgent()}
r = requests.get(urly, headers=headers)
except Exception as e:
print (e)
print(e)
self.results = r.text
self.totalresults += self.results
def get_domains(self):
domains = []
rawres = json.loads(self.totalresults.split("\n", 2)[2])
for array in rawres[0][1]:
domains.append(array[1])
return list(set(domains))
domains = []
rawres = json.loads(self.totalresults.split("\n", 2)[2])
for array in rawres[0][1]:
domains.append(array[1])
return list(set(domains))
def process(self):
self.do_search()

View file

@ -1,5 +1,7 @@
import requests
import myparser
import time
from discovery.constants import *
class search_googleplus:
@ -9,7 +11,6 @@ def __init__(self, word, limit):
self.totalresults = ""
self.server = "www.google.com"
self.hostname = "www.google.com"
self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6"
self.quantity = "100"
self.limit = int(limit)
self.counter = 0
@ -20,7 +21,7 @@ def do_search(self):
except Exception as e:
print(e)
try:
headers = {'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:34.0) Gecko/20100101 Firefox/34.0'}
headers = {'User-Agent': getUserAgent()}
r=requests.get(urly,headers=headers)
except Exception as e:
print(e)
@ -34,5 +35,6 @@ def get_people(self):
def process(self):
while (self.counter < self.limit):
self.do_search()
time.sleep(getDelay())
self.counter += 100
print("\tSearching " + str(self.counter) + " results..")

View file

@ -1,7 +1,7 @@
import myparser
import time
import requests
import random
from discovery.constants import *
class search_google:
@ -13,43 +13,43 @@ def __init__(self, word, limit, start):
self.dorks = []
self.links = []
self.database = "https://www.google.com/search?q="
self.userAgent = ["(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36",
"Mozilla/5.0 (Linux; Android 7.0; SM-G892A Build/NRD90M; wv)",
"AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/60.0.3112.107 Mobile Safari/537.36",
"AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Mobile Safari/537.36 Edge/15.15254",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36 OPR/43.0.2442.991",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36 OPR/48.0.2685.52",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko"
]
self.quantity = "100"
self.limit = limit
self.counter = start
def do_search(self):
try: #do normal scraping
urly="http://" + self.server + "/search?num=" + self.quantity + "&start=" + str(self.counter) + "&hl=en&meta=&q=%40\"" + self.word + "\""
try: # do normal scraping
urly = "http://" + self.server + "/search?num=" + self.quantity + "&start=" + str(
self.counter) + "&hl=en&meta=&q=%40\"" + self.word + "\""
except Exception as e:
print(e)
try:
params = {'User-Agent': self.userAgent[0]} #select random user agent
r=requests.get(urly,params=params)
params = {'User-Agent': getUserAgent()} # select random user agent
r = requests.get(urly, params=params)
except Exception as e:
print(e)
self.results = r.text
if (self.search(self.results)):
time.sleep(getDelay() * 4) # sleep for a longer time
else:
time.sleep(getDelay())
self.totalresults += self.results
def do_search_profiles(self):
try:
urly="http://" + self.server + "/search?num=" + self.quantity + "&start=" + str(self.counter) + "&hl=en&meta=&q=site:www.google.com%20intitle:\"Google%20Profile\"%20\"Companies%20I%27ve%20worked%20for\"%20\"at%20" + self.word + "\""
urly = "http://" + self.server + "/search?num=" + self.quantity + "&start=" + str(
self.counter) + "&hl=en&meta=&q=site:www.google.com%20intitle:\"Google%20Profile\"%20\"Companies%20I%27ve%20worked%20for\"%20\"at%20" + self.word + "\""
except Exception as e:
print(e)
try:
r=requests.get(urly)
r = requests.get(urly)
except Exception as e:
print(e)
self.results = r.text
#'&hl=en&meta=&q=site:www.google.com%20intitle:"Google%20Profile"%20"Companies%20I%27ve%20worked%20for"%20"at%20' + self.word + '"')
if (self.search(self.results)):
time.sleep(getDelay() * 2) # sleep for a longer time
else:
time.sleep(getDelay())
self.totalresults += self.results
def get_emails(self):
@ -68,7 +68,7 @@ def get_profiles(self):
rawres = myparser.parser(self.totalresults, self.word)
return rawres.profiles()
def process(self,google_dorking):
def process(self, google_dorking):
if google_dorking == False:
while self.counter <= self.limit and self.counter <= 1000:
self.do_search()
@ -76,21 +76,19 @@ def process(self,google_dorking):
time.sleep(1.5)
print("\tSearching " + str(self.counter) + " results...")
self.counter += 100
else: #google dorking is true
self.counter = 0 #reset counter
else: # google dorking is true
self.counter = 0 # reset counter
print('\n')
print("[-] Searching with Google Dorks: ")
while self.counter <= self.limit and self.counter <= 200: # only 200 dorks in list
self.googledork() #call google dorking method if user wanted it!
# more = self.check_next()
time.sleep(.5)
while self.counter <= self.limit and self.counter <= 200: # only 200 dorks in list
self.googledork() # call google dorking method if user wanted it!
print("\tSearching " + str(self.counter) + " results...")
self.counter += 100
def process_profiles(self):
while self.counter < self.limit:
self.do_search_profiles()
time.sleep(0.25)
time.sleep(getDelay())
self.counter += 100
print("\tSearching " + str(self.counter) + " results...")
@ -102,7 +100,7 @@ def append_dorks(self):
print(error)
def construct_dorks(self):
#format is: site:targetwebsite.com + space + inurl:admindork
# format is: site:targetwebsite.com + space + inurl:admindork
colon = "%3A"
plus = "%2B"
space = '+'
@ -121,30 +119,40 @@ def construct_dorks(self):
# replace links with html encoding
self.links = [self.database + space + self.word + space +
str(dork).replace(':', colon).replace('+', plus).replace('.', period).replace('"', double_quote)
.replace("*", asterick).replace('[', left_bracket).replace(']', right_bracket)
.replace('?', question_mark).replace(' ', space).replace('/', slash).replace("'", single_quote)
.replace("&", ampersand).replace('(', left_peren).replace(')', right_peren).replace('|', pipe)
.replace("*", asterick).replace('[', left_bracket).replace(']', right_bracket)
.replace('?', question_mark).replace(' ', space).replace('/', slash).replace("'",single_quote)
.replace("&", ampersand).replace('(', left_peren).replace(')', right_peren).replace('|', pipe)
for dork in self.dorks]
def googledork(self):
self.append_dorks() # call functions to create list
self.construct_dorks()
if (self.counter >= 0 and self.counter <=100):
if (self.counter >= 0 and self.counter <= 100):
self.send_dork(start=0, end=100)
elif (self.counter >= 100 and self.counter <=200):
elif (self.counter >= 100 and self.counter <= 200):
self.send_dork(start=101, end=200)
else: #only 200 dorks to prevent google from blocking ip
else: # only 200 dorks to prevent google from blocking ip
pass
def send_dork(self, start, end): # helper function to minimize code reusability
params = {'User-Agent': random.choice(self.userAgent)}
def send_dork(self, start, end): # helper function to minimize code reusability
params = {'User-Agent': getUserAgent()}
# get random user agent to try and prevent google from blocking ip
for i in range(start, end):
try:
link = self.links[i] # get link from dork list
link = self.links[i] # get link from dork list
req = requests.get(link, params=params)
time.sleep(.15) # sleep for a short time
self.results = req.text
if (self.search(self.results)):
time.sleep(getDelay() * 4) # sleep for a longer time
else:
time.sleep(getDelay())
self.totalresults += self.results
except:
continue
def search(self, text):
# helper function to check if google has blocked traffic
for line in text.strip().splitlines():
if 'This page appears when Google automatically detects requests coming from your computer network' in line:
return True
return False

View file

@ -1,5 +1,6 @@
import myparser
import requests
from discovery.constants import *
class search_google_labs:
@ -8,7 +9,6 @@ def __init__(self, list):
self.totalresults = ""
self.server = "labs.google.com"
self.hostname = "labs.google.com"
self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6"
id = 0
self.set = ""
for x in list:
@ -22,7 +22,7 @@ def do_search(self):
url = 'http://' + self.server + "/sets?hl-en&" + self.set
headers = {
'Host': self.server,
'User-agent': self.userAgent
'User-agent': getUserAgent()
}
h = requests.get(url=url, headers=headers)
self.results = h.text

View file

@ -1,6 +1,9 @@
import myparser
import re
import requests
import time
from discovery.constants import *
# http://www.jigsaw.com/SearchAcrossCompanies.xhtml?opCode=refresh&rpage=4&mode=0&cnCountry=&order=0&orderby=0&cmName=accuvant&cnDead=false&cnExOwned=false&count=0&screenNameType=0&screenName=&omitScreenNameType=0&omitScreenName=&companyId=0&estimatedCount=277&rowsPerPage=50
class search_jigsaw:
@ -11,7 +14,6 @@ def __init__(self, word, limit):
self.totalresults = ""
self.server = "www.jigsaw.com"
self.hostname = "www.jigsaw.com"
self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6"
self.quantity = "100"
self.limit = int(limit)
self.counter = 0
@ -19,7 +21,7 @@ def __init__(self, word, limit):
def do_search(self):
url = 'http://' + self.server + "/FreeTextSearch.xhtml?opCode=search&autoSuggested=True&freeText=" + self.word
headers = {
'User-agent': self.userAgent
'User-agent': getUserAgent()
}
h = requests.get(url=url, headers=headers)
self.results = h.text
@ -41,6 +43,7 @@ def get_people(self):
def process(self):
while (self.counter < self.limit):
self.do_search()
time.sleep(getDelay())
more = self.check_next()
if more == "1":
self.counter += 100

View file

@ -1,5 +1,7 @@
import requests
import myparser
from discovery.constants import *
import time
class search_linkedin:
@ -19,7 +21,8 @@ def do_search(self):
except Exception as e:
print(e)
try:
r=requests.get(urly)
headers = {'User-Agent': getUserAgent()}
r=requests.get(urly,headers=headers)
except Exception as e:
print(e)
self.results = r.text
@ -32,5 +35,6 @@ def get_people(self):
def process(self):
while (self.counter < self.limit):
self.do_search()
time.sleep(getDelay())
self.counter += 100
print("\tSearching " + str(self.counter) + " results..")

View file

@ -1,5 +1,6 @@
import requests
import myparser
from discovery.constants import *
class search_netcraft:
@ -9,7 +10,6 @@ def __init__(self, word):
self.totalresults = ""
self.server = "www.google.com"
self.hostname = "www.google.com"
self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100116 Firefox/3.7"
self.quantity = "100"
self.counter = 0
@ -19,7 +19,7 @@ def do_search(self):
urly="https://searchdns.netcraft.com/?restriction=site+ends+with&host=" + self.word
except Exception as e:
print(e)
headers = {'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:34.0) Gecko/20100101 Firefox/34.0'}
headers = {'User-Agent':getUserAgent()}
try:
r=requests.get(urly,headers=headers)
except Exception as e:

View file

@ -1,5 +1,6 @@
import myparser
import requests
from discovery.constants import *
class search_pgp:
@ -9,22 +10,20 @@ def __init__(self, word):
self.server = "pgp.mit.edu"
#self.server = "pgp.rediris.es"
self.hostname = "pgp.mit.edu"
self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6"
def process(self):
print("\tSearching PGP results...")
try:
url = 'http://' + self.server + "/pks/lookup?search=" + self.word + "&op=index"
headers = {
'Host': self.hostname,
'User-agent': self.userAgent
'User-agent': getUserAgent()
}
h = requests.get(url=url, headers=headers)
self.results = h.text
self.results += self.results
except Exception as e:
print("Unable to connect to PGP server: ", str(e))
pass
def get_emails(self):
rawres = myparser.parser(self.results, self.word)

View file

@ -6,7 +6,6 @@ def __init__(self, host, ports):
self.threads = 25
self.host = host
self.ports = ports
self.lock = threading.BoundedSemaphore(value=self.threads)
def port_scanner(self, host, ports):

View file

@ -1,5 +1,6 @@
import requests
import myparser
from discovery.constants import *
class search_threatcrowd:
@ -9,19 +10,17 @@ def __init__(self, word):
self.totalresults = ""
self.server = "www.google.com"
self.hostname = "www.google.com"
self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100116 Firefox/3.7"
self.quantity = "100"
self.counter = 0
def do_search(self):
try:
urly="https://www.threatcrowd.org/searchApi/v2/domain/report/?domain=" + self.word
urly = "https://www.threatcrowd.org/searchApi/v2/domain/report/?domain=" + self.word
except Exception as e:
print(e)
headers = {'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:34.0) Gecko/20100101 Firefox/34.0'}
headers = {'User-Agent': getUserAgent()}
try:
r=requests.get(urly,headers=headers)
r = requests.get(urly, headers=headers)
except Exception as e:
print(e)
self.results = r.text
@ -33,4 +32,4 @@ def get_hostnames(self):
def process(self):
self.do_search()
print("\tSearching Threatcrowd results..")
print("\tSearching Threatcrowd results..")

View file

@ -1,6 +1,8 @@
import requests
import myparser
import censysparser
from discovery.constants import *
import time
class search_trello:
@ -10,7 +12,6 @@ def __init__(self, word, limit):
self.totalresults = ""
self.server = "www.google.com"
self.hostname = "www.google.com"
self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100116 Firefox/3.7"
self.quantity = "100"
self.limit = limit
self.counter = 0
@ -20,7 +21,7 @@ def do_search(self):
urly="https://"+ self.server + "/search?num=100&start=" + str(self.counter) + "&hl=en&meta=&q=site%3Atrello.com%20" + self.word
except Exception as e:
print(e)
headers = {'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:34.0) Gecko/20100101 Firefox/34.0'}
headers = {'User-Agent': getUserAgent()}
try:
r=requests.get(urly,headers=headers)
except Exception as e:
@ -32,7 +33,6 @@ def get_emails(self):
rawres = myparser.parser(self.totalresults, self.word)
return rawres.emails()
def get_urls(self):
try:
urls = myparser.parser(self.totalresults,"trello.com")
@ -43,5 +43,6 @@ def get_urls(self):
def process(self):
while (self.counter < self.limit):
self.do_search()
time.sleep(getDelay())
self.counter += 100
print("\tSearching " + str(self.counter) + " results..")

View file

@ -1,5 +1,7 @@
import requests
import myparser
from discovery.constants import *
import time
class search_twitter:
@ -9,7 +11,6 @@ def __init__(self, word, limit):
self.totalresults = ""
self.server = "www.google.com"
self.hostname = "www.google.com"
self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100116 Firefox/3.7"
self.quantity = "100"
self.limit = int(limit)
self.counter = 0
@ -19,7 +20,7 @@ def do_search(self):
urly="https://"+ self.server + "/search?num=100&start=" + str(self.counter) + "&hl=en&meta=&q=site%3Atwitter.com%20intitle%3A%22on+Twitter%22%20" + self.word
except Exception as e:
print(e)
headers = {'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:34.0) Gecko/20100101 Firefox/34.0'}
headers = {'User-Agent':getUserAgent()}
try:
r=requests.get(urly,headers=headers)
except Exception as e:
@ -34,5 +35,6 @@ def get_people(self):
def process(self):
while (self.counter < self.limit):
self.do_search()
time.sleep(getDelay())
self.counter += 100
print("\tSearching " + str(self.counter) + " results..")

View file

@ -1,5 +1,6 @@
import requests
import myparser
from discovery.constants import *
class search_virustotal:
@ -9,7 +10,6 @@ def __init__(self, word):
self.totalresults = ""
self.server = "www.google.com"
self.hostname = "www.google.com"
self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100116 Firefox/3.7"
self.quantity = "100"
self.counter = 0
@ -19,7 +19,7 @@ def do_search(self):
urly="https://www.virustotal.com/en/domain/" + self.word + "/information/"
except Exception as e:
print(e)
headers = {'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:34.0) Gecko/20100101 Firefox/34.0'}
headers = {'User-Agent': getUserAgent()}
try:
r=requests.get(urly,headers=headers)
except Exception as e:

View file

@ -1,6 +1,7 @@
import myparser
import time
import requests
from discovery.constants import *
class search_yahoo:
@ -9,7 +10,6 @@ def __init__(self, word, limit):
self.total_results = ""
self.server = "search.yahoo.com"
self.hostname = "search.yahoo.com"
self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6"
self.limit = limit
self.counter = 0
@ -18,7 +18,7 @@ def do_search(self):
+ "\"&b=" + str(self.counter) + "&pz=10"
headers = {
'Host': self.hostname,
'User-agent': self.userAgent
'User-agent': getUserAgent()
}
h = requests.get(url=url, headers=headers)
self.total_results += h.text
@ -26,7 +26,7 @@ def do_search(self):
def process(self):
while self.counter <= self.limit and self.counter <= 1000:
self.do_search()
time.sleep(1)
time.sleep(getDelay())
print("\tSearching " + str(self.counter) + " results...")
self.counter += 10

View file

@ -2,6 +2,8 @@
import re
import time
import requests
from discovery.constants import *
class search_yandex:
@ -11,7 +13,6 @@ def __init__(self, word, limit, start):
self.totalresults = ""
self.server = "yandex.com"
self.hostname = "yandex.com"
self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6"
self.limit = limit
self.counter = start
@ -19,7 +20,7 @@ def do_search(self):
url = 'http://' + self.server + "/search?text=%40" + self.word + "&numdoc=50&lr=" + str(self.counter)
headers = {
'Host': self.hostname,
'User-agent': self.userAgent
'User-agent': getUserAgent()
}
h = requests.get(url=url, headers=headers)
self.results = h.text
@ -30,7 +31,7 @@ def do_search_files(self, files): # TODO
url = 'http://' + self.server + "/search?text=%40" + self.word + "&numdoc=50&lr=" + str(self.counter)
headers = {
'Host': self.hostname,
'User-agent': self.userAgent
'User-agent': getUserAgent()
}
h = requests.get(url=url, headers=headers)
self.results = h.text
@ -67,5 +68,5 @@ def process(self):
def process_files(self, files):
while self.counter < self.limit:
self.do_search_files(files)
time.sleep(0.3)
time.sleep(getDelay())
self.counter += 50

View file

@ -38,7 +38,6 @@
def usage():
comm = os.path.basename(sys.argv[0])
if os.path.dirname(sys.argv[0]) == os.getcwd():
@ -72,7 +71,6 @@ def usage():
def start(argv):
if len(sys.argv) < 4:
usage()
sys.exit()
@ -82,7 +80,7 @@ def start(argv):
usage()
sys.exit()
try:
db=stash.stash_manager()
db = stash.stash_manager()
db.do_init()
except Exception as e:
pass
@ -103,7 +101,7 @@ def start(argv):
google_dorking = False
limit = 500
all_ip = []
full = []
full = []
dnsserver = ""
for value in enumerate(opts):
opt = value[1][0]
@ -136,10 +134,13 @@ def start(argv):
dnstld = True
elif opt == '-b':
engines = set(arg.split(','))
supportedengines = set(["baidu","bing","crtsh","censys","cymon","bingapi","dogpile","google","googleCSE","virustotal","threatcrowd","googleplus","google-profiles",'google-certificates',"linkedin","pgp","twitter","trello","vhost","yahoo","netcraft","hunter","all"])
supportedengines = set(
["baidu", "bing", "crtsh", "censys", "cymon", "bingapi", "dogpile", "google", "googleCSE", "virustotal",
"threatcrowd", "googleplus", "google-profiles", 'google-certificates', "linkedin", "pgp", "twitter",
"trello", "vhost", "yahoo", "netcraft", "hunter", "all"])
if set(engines).issubset(supportedengines):
print("found supported engines")
print(("[-] Starting harvesting process for domain: " + word + "\n"))
print(("[-] Starting harvesting process for domain: " + word + "\n"))
for engineitem in engines:
if engineitem == "google":
print("[-] Searching in Google:")
@ -149,9 +150,9 @@ def start(argv):
hosts = search.get_hostnames()
all_emails.extend(emails)
all_hosts.extend(hosts)
db=stash.stash_manager()
db.store_all(word,hosts,'host','google')
db.store_all(word,emails,'email','google')
db = stash.stash_manager()
db.store_all(word, hosts, 'host', 'google')
db.store_all(word, emails, 'email', 'google')
if engineitem == "netcraft":
print("[-] Searching in Netcraft:")
@ -159,17 +160,17 @@ def start(argv):
search.process()
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db=stash.stash_manager()
db.store_all(word,hosts,'host','netcraft')
db = stash.stash_manager()
db.store_all(word, hosts, 'host', 'netcraft')
if engineitem == "google-certificates":
print ("[-] Searching in Google Certificate transparency report..")
search = googlecertificates.search_googlecertificates(word, limit, start)
print("[-] Searching in Google Certificate transparency report..")
search = googlecertificates.search_googlecertificates(word, limit, start)
search.process()
hosts = search.get_domains()
all_hosts.extend(hosts)
db=stash.stash_manager()
db.store_all(word,hosts,'host','google-certificates')
db = stash.stash_manager()
db.store_all(word, hosts, 'host', 'google-certificates')
if engineitem == "threatcrowd":
print("[-] Searching in Threatcrowd:")
@ -177,8 +178,8 @@ def start(argv):
search.process()
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db=stash.stash_manager()
db.store_all(word,hosts,'host','threatcrowd')
db = stash.stash_manager()
db.store_all(word, hosts, 'host', 'threatcrowd')
if engineitem == "virustotal":
print("[-] Searching in Virustotal:")
@ -186,8 +187,8 @@ def start(argv):
search.process()
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db=stash.stash_manager()
db.store_all(word,hosts,'host','virustotal')
db = stash.stash_manager()
db.store_all(word, hosts, 'host', 'virustotal')
if engineitem == "crtsh":
print("[-] Searching in CRT.sh:")
@ -195,8 +196,8 @@ def start(argv):
search.process()
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db=stash.stash_manager()
db.store_all(word,hosts,'host','CRTsh')
db = stash.stash_manager()
db.store_all(word, hosts, 'host', 'CRTsh')
if engineitem == "googleCSE":
print("[-] Searching in Google Custom Search:")
@ -208,9 +209,9 @@ def start(argv):
db = stash.stash_manager()
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db.store_all(word,emails,'email','googleCSE')
db=stash.stash_manager()
db.store_all(word,hosts,'host','googleCSE')
db.store_all(word, emails, 'email', 'googleCSE')
db = stash.stash_manager()
db.store_all(word, hosts, 'host', 'googleCSE')
elif engineitem == "bing" or engineitem == "bingapi":
print("[-] Searching in Bing:")
@ -224,9 +225,9 @@ def start(argv):
all_emails.extend(emails)
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db=stash.stash_manager()
db.store_all(word,emails,'email','bing')
db.store_all(word,hosts,'host','bing')
db = stash.stash_manager()
db.store_all(word, emails, 'email', 'bing')
db.store_all(word, hosts, 'host', 'bing')
elif engineitem == "dogpile":
print("[-] Searching in Dogpilesearch..")
@ -236,8 +237,8 @@ def start(argv):
all_emails.extend(emails)
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db.store_all(word,emails,'email','dogpile')
db.store_all(word,hosts,'host','dogpile')
db.store_all(word, emails, 'email', 'dogpile')
db.store_all(word, hosts, 'host', 'dogpile')
elif engineitem == "pgp":
print("[-] Searching in PGP key server..")
@ -247,9 +248,9 @@ def start(argv):
all_emails.extend(emails)
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db=stash.stash_manager()
db.store_all(word,hosts,'host','pgp')
db.store_all(word,emails,'email','pgp')
db = stash.stash_manager()
db.store_all(word, hosts, 'host', 'pgp')
db.store_all(word, emails, 'email', 'pgp')
elif engineitem == "yahoo":
print("[-] Searching in Yahoo..")
@ -259,9 +260,9 @@ def start(argv):
all_emails.extend(emails)
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db=stash.stash_manager()
db.store_all(word,hosts,'host','yahoo')
db.store_all(word,emails,'email','yahoo')
db = stash.stash_manager()
db.store_all(word, hosts, 'host', 'yahoo')
db.store_all(word, emails, 'email', 'yahoo')
elif engineitem == "baidu":
print("[-] Searching in Baidu..")
@ -271,9 +272,9 @@ def start(argv):
all_emails.extend(emails)
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db=stash.stash_manager()
db.store_all(word,hosts,'host','baidu')
db.store_all(word,emails,'email','baidu')
db = stash.stash_manager()
db.store_all(word, hosts, 'host', 'baidu')
db.store_all(word, emails, 'email', 'baidu')
elif engineitem == "googleplus":
print("[-] Searching in Google+ ..")
@ -282,8 +283,8 @@ def start(argv):
people = search.get_people()
print("Users from Google+:")
print("====================")
db=stash.stash_manager()
db.store_all(word,people,'name','googleplus')
db = stash.stash_manager()
db.store_all(word, people, 'name', 'googleplus')
for user in people:
print(user)
sys.exit()
@ -294,7 +295,7 @@ def start(argv):
search.process()
people = search.get_people()
db = stash.stash_manager()
db.store_all(word,people,'name','twitter')
db.store_all(word, people, 'name', 'twitter')
print("Users from Twitter:")
print("-------------------")
for user in people:
@ -307,7 +308,7 @@ def start(argv):
search.process()
people = search.get_people()
db = stash.stash_manager()
db.store_all(word,people,'name','linkedin')
db.store_all(word, people, 'name', 'linkedin')
print("Users from Linkedin:")
print("-------------------")
for user in people:
@ -320,7 +321,7 @@ def start(argv):
search.process_profiles()
people = search.get_profiles()
db = stash.stash_manager()
db.store_all(word,people,'name','google-profile')
db.store_all(word, people, 'name', 'google-profile')
print("Users from Google profiles:")
print("---------------------------")
for users in people:
@ -330,21 +331,21 @@ def start(argv):
elif engineitem == "hunter":
print("[-] Searching in Hunter:")
from discovery import huntersearch
#import locally or won't work
# import locally or won't work
search = huntersearch.search_hunter(word, limit, start)
search.process()
emails = search.get_emails()
all_emails.extend(emails)
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db=stash.stash_manager()
db.store_all(word,hosts,'host','hunter')
db.store_all(word,emails,'email','hunter')
db = stash.stash_manager()
db.store_all(word, hosts, 'host', 'hunter')
db.store_all(word, emails, 'email', 'hunter')
elif engineitem == "censys":
print("[-] Searching in Censys:")
from discovery import censys
#import locally or won't work
# import locally or won't work
search = censys.search_censys(word)
search.process()
ips = search.get_ipaddresses()
@ -352,73 +353,73 @@ def start(argv):
all_hosts.extend(hosts)
all_ip.extend(ips)
db = stash.stash_manager()
db.store_all(word,hosts,'host','censys')
db.store_all(word,ips,'ip','censys')
db.store_all(word, hosts, 'host', 'censys')
db.store_all(word, ips, 'ip', 'censys')
elif engineitem == "cymon":
print("[-] Searching in Cymon:")
from discovery import cymon
#import locally or won't work
# import locally or won't work
search = cymon.search_cymon(word)
search.process()
ips = search.get_ipaddresses()
all_ip.extend(ips)
db = stash.stash_manager()
db.store_all(word,ips,'ip','cymon')
db.store_all(word, ips, 'ip', 'cymon')
elif engineitem == "trello":
print("[-] Searching in Trello:")
from discovery import trello
#import locally or won't work
search = trello.search_trello(word,limit)
# import locally or won't work
search = trello.search_trello(word, limit)
search.process()
emails = search.get_emails()
all_emails.extend(emails)
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db=stash.stash_manager()
db.store_all(word,hosts,'host','trello')
db.store_all(word,emails,'email','trello')
db = stash.stash_manager()
db.store_all(word, hosts, 'host', 'trello')
db.store_all(word, emails, 'email', 'trello')
for x in all_hosts:
print (x)
print(x)
sys.exit()
elif engineitem == "all":
print(("Full harvest on " + word))
all_emails = []
all_hosts = []
print("[-] Searching in Google..")
search = googlesearch.search_google(word, limit, start)
search.process(google_dorking)
emails = search.get_emails()
hosts = search.get_hostnames()
all_emails.extend(emails)
db=stash.stash_manager()
db.store_all(word,emails,'email','google')
db = stash.stash_manager()
db.store_all(word, emails, 'email', 'google')
all_hosts.extend(hosts)
db=stash.stash_manager()
db.store_all(word,hosts,'host','google')
db = stash.stash_manager()
db.store_all(word, hosts, 'host', 'google')
print("[-] Searching in PGP Key server..")
search = pgpsearch.search_pgp(word)
search.process()
emails = search.get_emails()
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db=stash.stash_manager()
db.store_all(word,hosts,'host','PGP')
db = stash.stash_manager()
db.store_all(word, hosts, 'host', 'PGP')
all_emails.extend(emails)
db=stash.stash_manager()
db.store_all(word,emails,'email','PGP')
db = stash.stash_manager()
db.store_all(word, emails, 'email', 'PGP')
print("[-] Searching in Netcraft server..")
search = netcraft.search_netcraft(word)
search.process()
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db=stash.stash_manager()
db.store_all(word,hosts,'host','netcraft')
db = stash.stash_manager()
db.store_all(word, hosts, 'host', 'netcraft')
print("[-] Searching in ThreatCrowd server..")
try:
@ -426,25 +427,26 @@ def start(argv):
search.process()
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db=stash.stash_manager()
db.store_all(word,hosts,'host','threatcrowd')
except Exception: pass
db = stash.stash_manager()
db.store_all(word, hosts, 'host', 'threatcrowd')
except Exception:
pass
print("[-] Searching in CRTSH server..")
search = crtsh.search_crtsh(word)
search.process()
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db=stash.stash_manager()
db.store_all(word,hosts,'host','CRTsh')
db = stash.stash_manager()
db.store_all(word, hosts, 'host', 'CRTsh')
print("[-] Searching in Virustotal server..")
search = virustotal.search_virustotal(word)
search.process()
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db=stash.stash_manager()
db.store_all(word,hosts,'host','virustotal')
db = stash.stash_manager()
db.store_all(word, hosts, 'host', 'virustotal')
print("[-] Searching in Bing..")
bingapi = "no"
@ -453,16 +455,16 @@ def start(argv):
emails = search.get_emails()
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db=stash.stash_manager()
db.store_all(word,hosts,'host','bing')
db = stash.stash_manager()
db.store_all(word, hosts, 'host', 'bing')
all_emails.extend(emails)
#Clean up email list, sort and uniq
#all_emails=sorted(set(all_emails))
db.store_all(word,emails,'email','bing')
# Clean up email list, sort and uniq
# all_emails=sorted(set(all_emails))
db.store_all(word, emails, 'email', 'bing')
print("[-] Searching in Hunter:")
from discovery import huntersearch
#import locally
# import locally
search = huntersearch.search_hunter(word, limit, start)
search.process()
emails = search.get_emails()
@ -471,17 +473,17 @@ def start(argv):
db = stash.stash_manager()
db.store_all(word, hosts, 'host', 'hunter')
all_emails.extend(emails)
#all_emails = sorted(set(all_emails))
db.store_all(word,emails,'email','hunter')
# all_emails = sorted(set(all_emails))
db.store_all(word, emails, 'email', 'hunter')
print ("[-] Searching in Google Certificate transparency report..")
print("[-] Searching in Google Certificate transparency report..")
search = googlecertificates.search_googlecertificates(word, limit, start)
search.process()
domains = search.get_domains()
all_hosts.extend(domains)
db = stash.stash_manager()
db.store_all(word, domains, 'host', 'google-certificates')
print("[-] Searching in Censys:")
from discovery import censys
search = censys.search_censys(word)
@ -491,15 +493,16 @@ def start(argv):
hosts = search.get_hostnames()
all_hosts.extend(hosts)
db = stash.stash_manager()
db.store_all(word,ips,'ip','censys')
db.store_all(word,hosts,'host','censys')
db.store_all(word, ips, 'ip', 'censys')
db.store_all(word, hosts, 'host', 'censys')
else:
usage()
print("Invalid search engine, try with: baidu, bing, bingapi, crtsh, censys, cymon, dogpile, google, googleCSE, virustotal, netcraft, googleplus, google-profiles, linkedin, pgp, twitter, vhost, yahoo, hunter, all")
print(
"Invalid search engine, try with: baidu, bing, bingapi, crtsh, censys, cymon, dogpile, google, googleCSE, virustotal, netcraft, googleplus, google-profiles, linkedin, pgp, twitter, vhost, yahoo, hunter, all")
sys.exit()
#Results############################################################
# Results############################################################
print("\n\033[1;32;40mHarvesting results")
if (len(all_ip) == 0):
print("No IP addresses found")
@ -511,7 +514,7 @@ def start(argv):
print("\n\n[+] Emails found:")
print("------------------")
#Sanity check to see if all_emails and all_hosts is defined
# Sanity check to see if all_emails and all_hosts is defined
try:
all_emails
except NameError:
@ -535,7 +538,7 @@ def start(argv):
else:
total = len(all_hosts)
print(("\nTotal hosts: " + str(total) + "\n"))
all_hosts=sorted(set(all_hosts))
all_hosts = sorted(set(all_hosts))
print("\033[94m[-] Resolving hostnames IPs...\033[1;33;40m \n ")
full_host = hostchecker.Checker(all_hosts)
full = full_host.check()
@ -548,10 +551,10 @@ def start(argv):
else:
host_ip.append(ip.lower())
db=stash.stash_manager()
db.store_all(word,host_ip,'ip','DNS-resolver')
#DNS Brute force####################################################
db = stash.stash_manager()
db.store_all(word, host_ip, 'ip', 'DNS-resolver')
# DNS Brute force####################################################
dnsres = []
if dnsbrute == True:
print("\n\033[94m[-] Starting DNS brute force: \033[1;33;40m")
@ -564,33 +567,33 @@ def start(argv):
dnsres.append(y.split(':')[0])
if y not in full:
full.append(y)
db=stash.stash_manager()
db.store_all(word,dnsres,'host','dns_bruteforce')
db = stash.stash_manager()
db.store_all(word, dnsres, 'host', 'dns_bruteforce')
#Port Scanning #################################################
# Port Scanning #################################################
if ports_scanning == True:
print("\n\n\033[1;32;40m[-] Scanning ports (active):\n")
for x in full:
host = x.split(':')[1]
domain = x.split(':')[0]
if host != "empty" :
print(("- Scanning : " + host))
ports = [80,443,22,8080,21]
try:
scan = port_scanner.port_scan(host,ports)
openports = scan.process()
if len(openports) > 1:
print(("\t\033[91m Detected open ports: " + ','.join(str(e) for e in openports) + "\033[1;32;40m"))
takeover_check = 'True'
if takeover_check == 'True':
if len(openports) > 0:
search_take = takeover.take_over(domain)
search_take.process()
except Exception as e:
print(e)
print("\n\n\033[1;32;40m[-] Scanning ports (active):\n")
for x in full:
host = x.split(':')[1]
domain = x.split(':')[0]
if host != "empty":
print(("- Scanning : " + host))
ports = [80, 443, 22, 8080, 21]
try:
scan = port_scanner.port_scan(host, ports)
openports = scan.process()
if len(openports) > 1:
print(("\t\033[91m Detected open ports: " + ','.join(
str(e) for e in openports) + "\033[1;32;40m"))
takeover_check = 'True'
if takeover_check == 'True':
if len(openports) > 0:
search_take = takeover.take_over(domain)
search_take.process()
except Exception as e:
print(e)
#DNS reverse lookup#################################################
# DNS reverse lookup#################################################
dnsrev = []
if dnslookup == True:
print("\n[+] Starting active queries:")
@ -619,8 +622,8 @@ def start(argv):
print("---------------------------------")
for xh in dnsrev:
print(xh)
#DNS TLD expansion###################################################
# DNS TLD expansion###################################################
dnstldres = []
if dnstld == True:
print("[-] Starting DNS TLD expansion:")
@ -634,7 +637,7 @@ def start(argv):
if y not in full:
full.append(y)
#Virtual hosts search###############################################
# Virtual hosts search###############################################
if virtual == "basic":
print("\n[+] Virtual hosts:")
print("------------------")
@ -643,22 +646,22 @@ def start(argv):
search.process_vhost()
res = search.get_allhostnames()
for x in res:
x = re.sub(r'[[\<\/?]*[\w]*>]*','',x)
x = re.sub('<','',x)
x = re.sub('>','',x)
x = re.sub(r'[[\<\/?]*[\w]*>]*', '', x)
x = re.sub('<', '', x)
x = re.sub('>', '', x)
print((l + "\t" + x))
vhost.append(l + ":" + x)
full.append(l + ":" + x)
vhost=sorted(set(vhost))
vhost = sorted(set(vhost))
else:
pass
#Shodan search####################################################
# Shodan search####################################################
shodanres = []
shodanvisited = []
if shodan == True:
print("\n\n\033[1;32;40m[-] Shodan DB search (passive):\n")
if full ==[]:
print ('No host to search, exiting.')
if full == []:
print('No host to search, exiting.')
sys.exit()
for x in full:
@ -669,15 +672,16 @@ def start(argv):
a = shodansearch.search_shodan(ip)
shodanvisited.append(ip)
results = a.run()
#time.sleep(2)
# time.sleep(2)
for res in results['data']:
shodanres.append(str("%s:%s - %s - %s - %s," % (res['ip_str'], res['port'],res['os'],res['isp'])))
shodanres.append(
str("%s:%s - %s - %s - %s," % (res['ip_str'], res['port'], res['os'], res['isp'])))
except Exception as e:
pass
print("\n [+] Shodan results:")
print("------------------")
for x in shodanres:
print (x)
print(x)
else:
pass
@ -697,14 +701,14 @@ def start(argv):
else:
pass
#Reporting#######################################################
# Reporting#######################################################
if filename != "":
try:
print("NEW REPORTING BEGINS:")
db = stash.stash_manager()
scanboarddata = db.getscanboarddata()
latestscanresults = db.getlatestscanresults(word)
previousscanresults = db.getlatestscanresults(word,previousday=True)
previousscanresults = db.getlatestscanresults(word, previousday=True)
latestscanchartdata = db.latestscanchartdata(word)
scanhistorydomain = db.getscanhistorydomain(word)
pluginscanstatistics = db.getpluginscanstatistics()
@ -720,12 +724,13 @@ def start(argv):
HTMLcode += graph.drawscattergraphscanhistory(word, scanhistorydomain)
HTMLcode += generator.generatepluginscanstatistics(pluginscanstatistics)
HTMLcode += generator.generatedashboardcode(scanboarddata)
HTMLcode += '<p><span style="color: #000000;">Report generated on '+ str(datetime.datetime.now())+'</span></p>'
HTMLcode +='''
</body>
</html>
'''
Html_file= open("report.html","w")
HTMLcode += '<p><span style="color: #000000;">Report generated on ' + str(
datetime.datetime.now()) + '</span></p>'
HTMLcode += '''
</body>
</html>
'''
Html_file = open("report.html", "w")
Html_file.write(HTMLcode)
Html_file.close()
print("NEW REPORTING FINISHED!")
@ -755,16 +760,15 @@ def start(argv):
for x in full:
x = x.split(":")
if len(x) == 2:
file.write('<host>' + '<ip>' + x[1] + '</ip><hostname>' + x[0] + '</hostname>' + '</host>')
file.write('<host>' + '<ip>' + x[1] + '</ip><hostname>' + x[0] + '</hostname>' + '</host>')
else:
file.write('<host>' + x + '</host>')
for x in vhost:
x = x.split(":")
if len(x) == 2:
file.write('<vhost>' + '<ip>' + x[1] + '</ip><hostname>' + x[0] + '</hostname>' + '</vhost>')
file.write('<vhost>' + '<ip>' + x[1] + '</ip><hostname>' + x[0] + '</hostname>' + '</vhost>')
else:
file.write('<vhost>' + x + '</vhost>')
if shodanres != []:
shodanalysis = []
for x in shodanres:
@ -773,29 +777,26 @@ def start(argv):
# print " res[1] " + res[1] # banner/info
# print " res[2] " + res[2] # port
file.write('<shodan>')
#page.h3(res[0])
# page.h3(res[0])
file.write('<host>' + res[0] + '</host>')
#page.a("Port :" + res[2])
# page.a("Port :" + res[2])
file.write('<port>' + res[2] + '</port>')
#page.pre(res[1])
# page.pre(res[1])
file.write('<banner><!--' + res[1] + '--></banner>')
reg_server = re.compile('Server:.*')
temp = reg_server.findall(res[1])
if temp != []:
shodanalysis.append(res[0] + ":" + temp[0])
file.write('</shodan>')
if shodanalysis != []:
shodanalysis=sorted(set(shodanalysis))
shodanalysis = sorted(set(shodanalysis))
file.write('<servers>')
for x in shodanalysis:
#page.pre(x)
# page.pre(x)
file.write('<server>' + x + '</server>')
file.write('</servers>')
file.write('</theHarvester>')
file.flush()
file.close()
@ -804,6 +805,7 @@ def start(argv):
print(("Error saving XML file: " + str(er)))
sys.exit()
if __name__ == "__main__":
try:
start(sys.argv[1:])
@ -812,4 +814,4 @@ def start(argv):
except Exception:
import traceback
print(traceback.print_exc())
sys.exit()
sys.exit()