mirror of
https://gitlab.com/chicken-riders/RcGcDb.git
synced 2025-02-24 01:04:09 +00:00
Fixed anotter oopsie
This commit is contained in:
parent
d6df680e92
commit
e2077a7ca1
13
src/bot.py
13
src/bot.py
|
@ -70,6 +70,7 @@ class RcQueue:
|
||||||
group = get_domain(wiki)
|
group = get_domain(wiki)
|
||||||
self[group]["query"] = [x for x in self[group]["query"] if x.url == wiki]
|
self[group]["query"] = [x for x in self[group]["query"] if x.url == wiki]
|
||||||
if not self[group]["query"]: # if there is no wiki left in the queue, get rid of the task
|
if not self[group]["query"]: # if there is no wiki left in the queue, get rid of the task
|
||||||
|
logger.debug(f"{group} no longer has any wikis queued!")
|
||||||
all_wikis[wiki].rc_active = -1
|
all_wikis[wiki].rc_active = -1
|
||||||
self[group]["task"].cancel()
|
self[group]["task"].cancel()
|
||||||
del self.domain_list[group]
|
del self.domain_list[group]
|
||||||
|
@ -106,12 +107,15 @@ class RcQueue:
|
||||||
for db_wiki in fetch_all.fetchall():
|
for db_wiki in fetch_all.fetchall():
|
||||||
domain = get_domain(db_wiki["wiki"])
|
domain = get_domain(db_wiki["wiki"])
|
||||||
try:
|
try:
|
||||||
all_wikis[db_wiki["wiki"]]
|
if db_wiki["wiki"] not in all_wikis:
|
||||||
except KeyError:
|
raise AssertionError
|
||||||
|
self.to_remove.remove(db_wiki["wiki"])
|
||||||
|
except AssertionError:
|
||||||
all_wikis[db_wiki["wiki"]] = Wiki()
|
all_wikis[db_wiki["wiki"]] = Wiki()
|
||||||
all_wikis[db_wiki["wiki"]].rc_active = db_wiki["rcid"]
|
all_wikis[db_wiki["wiki"]].rc_active = db_wiki["rcid"]
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
try:
|
try:
|
||||||
self.to_remove.remove(db_wiki["wiki"])
|
|
||||||
current_domain = self[domain]
|
current_domain = self[domain]
|
||||||
if not db_wiki["ROWID"] < current_domain["last_rowid"]:
|
if not db_wiki["ROWID"] < current_domain["last_rowid"]:
|
||||||
current_domain["query"].append(QueuedWiki(db_wiki["wiki"], 20))
|
current_domain["query"].append(QueuedWiki(db_wiki["wiki"], 20))
|
||||||
|
@ -186,6 +190,7 @@ async def generate_domain_groups():
|
||||||
async def scan_group(group: str):
|
async def scan_group(group: str):
|
||||||
rate_limiter = rcqueue[group]["rate_limiter"]
|
rate_limiter = rcqueue[group]["rate_limiter"]
|
||||||
while True:
|
while True:
|
||||||
|
try:
|
||||||
async with rcqueue.retrieve_next_queued(group) as queued_wiki: # acquire next wiki in queue
|
async with rcqueue.retrieve_next_queued(group) as queued_wiki: # acquire next wiki in queue
|
||||||
logger.debug("Wiki {}".format(queued_wiki.url))
|
logger.debug("Wiki {}".format(queued_wiki.url))
|
||||||
local_wiki = all_wikis[queued_wiki.url] # set a reference to a wiki object from memory
|
local_wiki = all_wikis[queued_wiki.url] # set a reference to a wiki object from memory
|
||||||
|
@ -263,6 +268,8 @@ async def scan_group(group: str):
|
||||||
delay_between_wikis = calculate_delay_for_group(len(rcqueue[group]["query"])) # TODO Find a way to not execute it every wiki
|
delay_between_wikis = calculate_delay_for_group(len(rcqueue[group]["query"])) # TODO Find a way to not execute it every wiki
|
||||||
await asyncio.sleep(delay_between_wikis)
|
await asyncio.sleep(delay_between_wikis)
|
||||||
DBHandler.update_db()
|
DBHandler.update_db()
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
async def wiki_scanner():
|
async def wiki_scanner():
|
||||||
|
|
Loading…
Reference in a new issue