Compare commits

..

No commits in common. "0b3a868640f58d0e8d13948d4a915fe479b261df" and "3d6a2c5ac0504ef7978851e7c6bde1a5abbbf8c7" have entirely different histories.

3 changed files with 6 additions and 9 deletions

View file

@ -11,7 +11,8 @@ import sys
import aiohttp
from src.exceptions import WikiNotFoundError, WikiOnTimeout, NoAvailableWiki
from src.exceptions import WikiOnTimeout
from src.exceptions import WikiNotFoundError
from src.misc import LimitedList
from src.discord.message import DiscordMessage
from src.config import settings
@ -60,8 +61,7 @@ class Domain:
"msgdelay": {"min": min(self.message_timings or [0]), "avg": int(sum(self.message_timings)/(len(self.message_timings) or 1)),
"max": max(self.message_timings or [0])},
"discord_messages": self.total_discord_messages_sent,
"last_failure_report": self.last_failure_report,
"wikis_on_timeout": [name for name, value in self.wikis.items() if value.is_on_timeout()]
"last_failure_report": self.last_failure_report
}
return dict_obj
def __repr__(self):
@ -135,7 +135,7 @@ class Domain:
:raises StopIteration: When None has been passed as wiki, means there are no more wikis in the queue besides timeouted ones
"""
if wiki is None:
raise NoAvailableWiki
raise StopIteration
try:
await wiki.scan()
except WikiNotFoundError as e:
@ -233,7 +233,7 @@ class Domain:
while True:
await asyncio.sleep(self.calculate_sleep_time(len(self))) # To make sure that we don't spam domains with one wiki every second we calculate a sane timeout for domains with few wikis
await self.run_wiki_scan(self.find_first_not_on_timeout(), "regular check")
except NoAvailableWiki:
except StopIteration:
logger.debug(f"Domain {self.name} received StopIteration, returning from regular_scheduler...")
return
except Exception as e:

View file

@ -26,9 +26,6 @@ class OtherWikiError(Exception):
class QueueEmpty(Exception):
pass
class NoAvailableWiki(Exception):
pass
class ListFull(Exception):
pass

View file

@ -486,7 +486,7 @@ class Wiki:
request = await self.fetch_wiki(amount=amount)
self.client.last_request = request
except (aiohttp.ServerTimeoutError, asyncio.TimeoutError, WikiServerError, ServerError) as e:
self.statistics.update(Log(type=LogType.CONNECTION_ERROR, title=str(str(e.exception) if hasattr(e, "exception") else repr(e))))
self.statistics.update(Log(type=LogType.CONNECTION_ERROR, title=str(e.exception)))
amount_of_failures = len(self.statistics.last_connection_failures()[0])
if amount_of_failures < 2:
await asyncio.sleep(5.0)