Increase async timeout and adjust request intervals and fix brave

The `aiohttp.ClientSession` timeout field has been adjusted to increase the maximum field size. This change should help handle larger HTTP responses. Additionally, the delay between asynchronous requests in bravesearch has been slightly increased to reduce the risk of hitting rate limits. A comment has also been corrected for better readability.
This commit is contained in:
L1ghtn1ng 2023-12-18 14:31:04 +00:00
parent 467a33fd3a
commit 82bdd0fec3
2 changed files with 3 additions and 4 deletions

View file

@ -19,7 +19,7 @@ async def do_search(self):
for query in [f'"{self.word}"', f"site:{self.word}"]:
try:
for offset in range(0, 50):
# To reduce total number of requests only two queries are made "self.word" and site:self.word
# To reduce the total number of requests, only two queries are made "self.word" and site:self.word
current_url = f"{self.server}{query}&offset={offset}&source=web&show_local=0&spellcheck=0"
resp = await AsyncFetcher.fetch_all(
[current_url], headers=headers, proxy=self.proxy
@ -35,9 +35,8 @@ async def do_search(self):
and "robot" in resp[0]
or "Robot" in resp[0]
):
await asyncio.sleep(get_delay() + 80)
break
await asyncio.sleep(get_delay() + 10)
await asyncio.sleep(get_delay() + 15)
except Exception as e:
print(f"An exception has occurred in bravesearch: {e}")
await asyncio.sleep(get_delay() + 80)

View file

@ -464,7 +464,7 @@ async def fetch_all(
if len(params) == 0:
async with aiohttp.ClientSession(
headers=headers, timeout=timeout
headers=headers, timeout=timeout, max_field_size=13000
) as session:
if proxy:
return await asyncio.gather(