Merge pull request #6 from cclauss/patch-3

functions ask(), bing(), google(), and yahoo()
This commit is contained in:
maldevel 2016-04-22 10:45:52 +03:00
commit 4b413d6ad6

View file

@ -97,14 +97,10 @@ class SearchEngine:
def do_search(self):
try:
urly = self.urlPattern.format(counter=str(self.counter), word=self.word)
headers = {
'User-Agent': self.userAgent,
}
headers = {'User-Agent': self.userAgent}
if(self.proxy):
proxies = {
self.proxy.scheme: "http://" + self.proxy.netloc
}
proxies = {self.proxy.scheme: "http://" + self.proxy.netloc}
r=requests.get(urly, headers=headers, proxies=proxies)
else:
r=requests.get(urly, headers=headers)
@ -154,14 +150,43 @@ def checkProxyUrl(url):
def limit_type(x):
x = int(x)
if x <= 0:
raise argparse.ArgumentTypeError("Minimum results limit is 1.")
return x
if x > 0:
return x
raise argparse.ArgumentTypeError("Minimum results limit is 1.")
def engine_type(x):
if x not in ("google", "bing", "yahoo", "ask", "all"):
raise argparse.ArgumentTypeError("Invalid search engine, try with: google, bing, yahoo, ask, all.")
return x
def engine_type(engine):
engines = 'all ask bing google yahoo'.split()
if engine in engines:
return engine
raise argparse.ArgumentTypeError("Invalid search engine, try with: {}.".format(', '.join(engines))
def ask(domain, limit, userAgent, proxy):
print(green("[+] Searching in ASK..\n"))
url = "http://www.ask.com/web?q=%40{word}"
search = SearchEngine(url, domain, limit, 0, 100, userAgent, proxy)
search.process()
return search.get_emails()
def bing(domain, limit, userAgent, proxy):
print(green("[+] Searching in Bing..\n"))
url = "http://www.bing.com/search?q=%40{word}&count=50&first={counter}"
search = SearchEngine(url, domain, limit, 0, 50, userAgent, proxy)
search.process()
return search.get_emails()
def google(domain, limit, userAgent, proxy):
print(green("[+] Searching in Google..\n"))
url = 'http://www.google.com/search?num=100&start={counter}&hl=en&q=%40"{word}"'
search = SearchEngine(url, domain, limit, 0, 100, userAgent, proxy)
search.process()
return search.get_emails()
def yahoo(domain, limit, userAgent, proxy):
print(green("[+] Searching in Yahoo..\n"))
url = "http://search.yahoo.com/search?p=%40{word}&n=100&ei=UTF-8&va_vt=any&vo_vt=any&ve_vt=any&vp_vt=any&vd=all&vst=0&vf=all&vm=p&fl=0&fr=yfp-t-152&xargs=0&pstart=1&b={counter}"
search = SearchEngine(url, domain, limit, 1, 100, userAgent, proxy)
search.process()
return search.get_emails()
if __name__ == '__main__':
@ -209,65 +234,32 @@ if __name__ == '__main__':
limit = args.limit
engine = args.engine
googleUrl = 'http://www.google.com/search?num=100&start={counter}&hl=en&q=%40"{word}"'
bingUrl = "http://www.bing.com/search?q=%40{word}&count=50&first={counter}"
askUrl = "http://www.ask.com/web?q=%40{word}"
yahooUrl = "http://search.yahoo.com/search?p=%40{word}&n=100&ei=UTF-8&va_vt=any&vo_vt=any&ve_vt=any&vp_vt=any&vd=all&vst=0&vf=all&vm=p&fl=0&fr=yfp-t-152&xargs=0&pstart=1&b={counter}"
if engine == "google":
print(green("[+] Searching in Google..\n"))
search = SearchEngine(googleUrl, domain, limit, 0, 100, userAgent, args.proxy)
search.process()
all_emails = search.get_emails()
elif engine == "bing":
print(green("[+] Searching in Bing..\n"))
search = SearchEngine(bingUrl, domain, limit, 0, 50, userAgent, args.proxy)
search.process()
all_emails = search.get_emails()
elif engine == "ask":
print(green("[+] Searching in ASK..\n"))
search = SearchEngine(askUrl, domain, limit, 0, 100, userAgent, args.proxy)
search.process()
all_emails = search.get_emails()
elif engine == "yahoo":
print(green("[+] Searching in Yahoo..\n"))
search = SearchEngine(yahooUrl, domain, limit, 1, 100, userAgent, args.proxy)
search.process()
all_emails = search.get_emails()
elif engine == "all":
if engine == "all":
print(green("[+] Searching everywhere..\n"))
all_emails = []
print(green("[+] Searching in Google..\n"))
search = SearchEngine(googleUrl, domain, limit, 0, 100, userAgent, args.proxy)
search.process()
all_emails.extend(search.get_emails())
print(green("\n[+] Searching in Bing..\n"))
search = SearchEngine(bingUrl, domain, limit, 0, 50, userAgent, args.proxy)
search.process()
all_emails.extend(search.get_emails())
print(green("\n[+] Searching in ASK..\n"))
search = SearchEngine(askUrl, domain, limit, 0, 100, userAgent, args.proxy)
search.process()
all_emails.extend(search.get_emails())
print(green("\n[+] Searching in Yahoo..\n"))
search = SearchEngine(yahooUrl, domain, limit, 1, 100, userAgent, args.proxy)
search.process()
all_emails.extend(search.get_emails())
all_emails = unique(all_emails)
print(green("\n\n[+] Emails found:"))
print(green("-" * 18))
all_emails = (ask(domain, limit, userAgent, args.proxy) +
bing(domain, limit, userAgent, args.proxy) +
yahoo(domain, limit, userAgent, args.proxy) +
google(domain, limit, userAgent, args.proxy))
elif engine == "ask":
all_emails = ask(domain, limit, userAgent, args.proxy)
elif engine == "bing":
all_emails = bing(domain, limit, userAgent, args.proxy)
elif engine == "yahoo":
all_emails = yahoo(domain, limit, userAgent, args.proxy)
elif engine == "google":
all_emails = google(domain, limit, userAgent, args.proxy)
all_emails = unique(all_emails)
if not all_emails:
print(red("No emails found"))
sys.exit(3)
else:
for emails in all_emails:
print(emails)
msg = "\n\n[+] {} mails found:".format(len(all_emails))
print(green(msg))
print(green("-" * len(msg)))
for emails in all_emails:
print(emails)
if filename:
try:
@ -285,8 +277,8 @@ if __name__ == '__main__':
filename = filename.split(".")[0] + ".xml"
with open(filename, 'w') as out_file:
out_file.write('<?xml version="1.0" encoding="UTF-8"?><EmailHarvester>')
for x in all_emails:
out_file.write('<email>{}</email>'.format(x))
for email in all_emails:
out_file.write('<email>{}</email>'.format(email))
out_file.write('</EmailHarvester>')
print(green("Files saved!"))
except Exception as er: