2020-07-09 22:24:23 +00:00
|
|
|
import logging.config
|
|
|
|
from src.config import settings
|
2020-07-09 23:58:25 +00:00
|
|
|
import sqlite3
|
2020-07-19 13:32:54 +00:00
|
|
|
from src.wiki import Wiki, process_cats, process_mwmsgs, essential_info
|
2020-07-10 13:38:36 +00:00
|
|
|
import asyncio, aiohttp
|
2020-07-20 00:52:02 +00:00
|
|
|
from src.misc import get_paths
|
2020-07-10 20:07:33 +00:00
|
|
|
from src.exceptions import *
|
|
|
|
from src.database import db_cursor
|
2020-07-19 23:40:20 +00:00
|
|
|
from collections import defaultdict
|
2020-07-21 12:15:40 +00:00
|
|
|
from src.queue_handler import DBHandler
|
2020-07-23 19:12:07 +00:00
|
|
|
from src.discord import DiscordMessage
|
2020-07-22 11:43:18 +00:00
|
|
|
from src.msgqueue import messagequeue
|
2020-07-23 19:12:07 +00:00
|
|
|
import requests
|
2020-07-09 22:24:23 +00:00
|
|
|
|
|
|
|
logging.config.dictConfig(settings["logging"])
|
|
|
|
logger = logging.getLogger("rcgcdb.bot")
|
|
|
|
logger.debug("Current settings: {settings}".format(settings=settings))
|
|
|
|
|
2020-07-10 13:38:36 +00:00
|
|
|
# Log Fail states with structure wiki_id: number of fail states
|
2020-07-11 15:54:08 +00:00
|
|
|
all_wikis: dict = {}
|
|
|
|
mw_msgs: dict = {} # will have the type of id: tuple
|
2020-07-09 22:24:23 +00:00
|
|
|
|
2020-07-10 13:38:36 +00:00
|
|
|
# First populate the all_wikis list with every wiki
|
|
|
|
# Reasons for this: 1. we require amount of wikis to calculate the cooldown between requests
|
|
|
|
# 2. Easier to code
|
2020-07-09 22:24:23 +00:00
|
|
|
|
2020-07-19 23:40:20 +00:00
|
|
|
for wiki in db_cursor.execute('SELECT DISTINCT wiki FROM rcgcdw'):
|
|
|
|
all_wikis[wiki] = Wiki()
|
2020-07-09 22:24:23 +00:00
|
|
|
|
2020-07-23 19:12:07 +00:00
|
|
|
|
2020-07-09 22:24:23 +00:00
|
|
|
# Start queueing logic
|
|
|
|
|
2020-07-19 23:40:20 +00:00
|
|
|
|
2020-07-11 15:54:08 +00:00
|
|
|
def calculate_delay() -> float:
|
2020-07-23 19:12:07 +00:00
|
|
|
min_delay = 60 / settings["max_requests_per_minute"]
|
2020-07-11 15:54:08 +00:00
|
|
|
if (len(all_wikis) * min_delay) < settings["minimal_cooldown_per_wiki_in_sec"]:
|
2020-07-23 19:12:07 +00:00
|
|
|
return settings["minimal_cooldown_per_wiki_in_sec"] / len(all_wikis)
|
2020-07-11 15:54:08 +00:00
|
|
|
else:
|
|
|
|
return min_delay
|
|
|
|
|
2020-07-19 23:40:20 +00:00
|
|
|
|
2020-07-21 12:15:40 +00:00
|
|
|
def generate_targets(wiki_url: str) -> defaultdict:
|
2020-07-19 23:40:20 +00:00
|
|
|
combinations = defaultdict(list)
|
2020-07-22 11:43:18 +00:00
|
|
|
for webhook in db_cursor.execute('SELECT ROWID, * FROM rcgcdw WHERE wiki = ?', (wiki_url,)):
|
2020-07-19 23:40:20 +00:00
|
|
|
# rowid, guild, configid, webhook, wiki, lang, display, rcid, wikiid, postid
|
|
|
|
combination = (webhook[5], webhook[6]) # lang, display
|
|
|
|
combinations[combination].append(webhook[3])
|
|
|
|
return combinations
|
|
|
|
|
|
|
|
|
2020-07-19 13:32:54 +00:00
|
|
|
async def wiki_scanner():
|
2020-07-22 11:43:18 +00:00
|
|
|
while True:
|
|
|
|
calc_delay = calculate_delay()
|
2020-07-23 09:46:32 +00:00
|
|
|
fetch_all = db_cursor.execute('SELECT * FROM rcgcdw GROUP BY wiki')
|
|
|
|
for db_wiki in fetch_all.fetchall():
|
|
|
|
logger.debug("Wiki {}".format(db_wiki[3]))
|
2020-07-22 11:43:18 +00:00
|
|
|
extended = False
|
|
|
|
if db_wiki[3] not in all_wikis:
|
|
|
|
logger.debug("New wiki: {}".format(db_wiki[3]))
|
|
|
|
all_wikis[db_wiki[3]] = Wiki()
|
|
|
|
local_wiki = all_wikis[db_wiki[3]] # set a reference to a wiki object from memory
|
|
|
|
if local_wiki.mw_messages is None:
|
|
|
|
extended = True
|
|
|
|
logger.debug("test")
|
|
|
|
try:
|
|
|
|
wiki_response = await local_wiki.fetch_wiki(extended, db_wiki[3])
|
|
|
|
await local_wiki.check_status(db_wiki[3], wiki_response.status)
|
|
|
|
except (WikiServerError, WikiError):
|
|
|
|
logger.exception("Exeption when fetching the wiki")
|
|
|
|
continue # ignore this wiki if it throws errors
|
|
|
|
try:
|
|
|
|
recent_changes_resp = await wiki_response.json(encoding="UTF-8")
|
2020-07-23 19:12:07 +00:00
|
|
|
if "error" in recent_changes_resp or "errors" in recent_changes_resp:
|
|
|
|
# TODO Remove on some errors (example "code": "readapidenied")
|
|
|
|
raise WikiError
|
2020-07-22 11:43:18 +00:00
|
|
|
recent_changes = recent_changes_resp['query']['recentchanges']
|
|
|
|
recent_changes.reverse()
|
|
|
|
except:
|
|
|
|
logger.exception("On loading json of response.")
|
2020-07-18 12:12:00 +00:00
|
|
|
continue
|
2020-07-22 11:43:18 +00:00
|
|
|
if extended:
|
|
|
|
await process_mwmsgs(recent_changes_resp, local_wiki, mw_msgs)
|
|
|
|
if db_wiki[6] is None: # new wiki, just get the last rc to not spam the channel
|
|
|
|
if len(recent_changes) > 0:
|
|
|
|
DBHandler.add(db_wiki[3], recent_changes[-1]["rcid"])
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
DBHandler.add(db_wiki[3], 0)
|
|
|
|
continue
|
|
|
|
categorize_events = {}
|
|
|
|
targets = generate_targets(db_wiki[3])
|
|
|
|
paths = get_paths(db_wiki[3], recent_changes_resp)
|
|
|
|
for change in recent_changes:
|
|
|
|
await process_cats(change, local_wiki, mw_msgs, categorize_events)
|
|
|
|
for change in recent_changes: # Yeah, second loop since the categories require to be all loaded up
|
2020-07-23 09:46:32 +00:00
|
|
|
if change["rcid"] > db_wiki[6]:
|
2020-07-22 11:43:18 +00:00
|
|
|
for target in targets.items():
|
2020-07-23 19:12:07 +00:00
|
|
|
await essential_info(change, categorize_events, local_wiki, db_wiki, target, paths,
|
|
|
|
recent_changes_resp)
|
2020-07-22 11:43:18 +00:00
|
|
|
if recent_changes:
|
|
|
|
DBHandler.add(db_wiki[3], change["rcid"])
|
2020-07-23 09:46:32 +00:00
|
|
|
DBHandler.update_db()
|
2020-07-22 11:43:18 +00:00
|
|
|
await asyncio.sleep(delay=calc_delay)
|
2020-07-21 12:15:40 +00:00
|
|
|
|
2020-07-20 12:03:55 +00:00
|
|
|
|
2020-07-19 13:32:54 +00:00
|
|
|
async def message_sender():
|
2020-07-22 11:43:18 +00:00
|
|
|
while True:
|
|
|
|
await messagequeue.resend_msgs()
|
|
|
|
|
2020-07-19 13:32:54 +00:00
|
|
|
|
2020-07-23 19:12:07 +00:00
|
|
|
def global_exception_handler(loop, context):
|
|
|
|
"""Global exception handler for asyncio, lets us know when something crashes"""
|
|
|
|
msg = context.get("exception", context["message"])
|
|
|
|
logger.error(msg)
|
2020-07-25 13:27:15 +00:00
|
|
|
#requests.post("https://discord.com/api/webhooks/" + settings["monitoring_webhook"],
|
|
|
|
# data=DiscordMessage("embed", "exception", None, content=
|
|
|
|
# "[RcGcDb] Exception detected, function might have shut down! Exception: {}".format(msg), wiki=None))
|
2020-07-23 19:12:07 +00:00
|
|
|
|
2020-07-20 12:03:55 +00:00
|
|
|
|
2020-07-19 13:32:54 +00:00
|
|
|
async def main_loop():
|
2020-07-23 19:12:07 +00:00
|
|
|
loop = asyncio.get_event_loop()
|
|
|
|
loop.set_exception_handler(global_exception_handler)
|
2020-07-19 13:32:54 +00:00
|
|
|
task1 = asyncio.create_task(wiki_scanner())
|
|
|
|
task2 = asyncio.create_task(message_sender())
|
2020-07-22 11:43:18 +00:00
|
|
|
await task1
|
|
|
|
await task2
|
2020-07-19 13:32:54 +00:00
|
|
|
|
2020-07-21 12:15:40 +00:00
|
|
|
|
2020-07-19 13:32:54 +00:00
|
|
|
asyncio.run(main_loop())
|