2018-12-28 08:49:56 +08:00
|
|
|
from discovery.constants import *
|
2019-01-06 17:50:07 +08:00
|
|
|
from lib.core import *
|
2019-01-11 10:09:47 +08:00
|
|
|
from parsers import myparser
|
|
|
|
import requests
|
2018-12-28 08:49:56 +08:00
|
|
|
import time
|
|
|
|
|
|
|
|
|
|
|
|
class search_twitter:
|
|
|
|
|
|
|
|
def __init__(self, word, limit):
|
|
|
|
self.word = word.replace(' ', '%20')
|
|
|
|
self.results = ""
|
|
|
|
self.totalresults = ""
|
|
|
|
self.server = "www.google.com"
|
|
|
|
self.hostname = "www.google.com"
|
|
|
|
self.quantity = "100"
|
|
|
|
self.limit = int(limit)
|
|
|
|
self.counter = 0
|
|
|
|
|
|
|
|
def do_search(self):
|
|
|
|
try:
|
2019-01-06 17:50:07 +08:00
|
|
|
urly = "https://" + self.server + "/search?num=100&start=" + str(self.counter) + "&hl=en&meta=&q=site%3Atwitter.com%20intitle%3A%22on+Twitter%22%20" + self.word
|
2018-12-28 08:49:56 +08:00
|
|
|
except Exception as e:
|
|
|
|
print(e)
|
2019-01-06 17:50:07 +08:00
|
|
|
headers = {'User-Agent': Core.get_user_agent()}
|
2018-12-28 08:49:56 +08:00
|
|
|
try:
|
|
|
|
r=requests.get(urly, headers=headers)
|
|
|
|
except Exception as e:
|
|
|
|
print(e)
|
|
|
|
self.results = r.text
|
|
|
|
self.totalresults += self.results
|
|
|
|
|
|
|
|
def get_people(self):
|
2019-01-06 17:50:07 +08:00
|
|
|
rawres = myparser.Parser(self.totalresults, self.word)
|
2018-12-28 08:49:56 +08:00
|
|
|
return rawres.people_twitter()
|
|
|
|
|
|
|
|
def process(self):
|
|
|
|
while self.counter < self.limit:
|
|
|
|
self.do_search()
|
|
|
|
time.sleep(getDelay())
|
|
|
|
self.counter += 100
|
2019-01-11 10:09:47 +08:00
|
|
|
print(f'\tSearching {self.counter} results.')
|