update database format

This commit is contained in:
Markus-Rost 2020-07-27 05:16:50 +02:00
parent abd5f680f5
commit d299b515e5
3 changed files with 30 additions and 29 deletions

View file

@ -18,7 +18,7 @@ logging.config.dictConfig(settings["logging"])
logger = logging.getLogger("rcgcdb.bot") logger = logging.getLogger("rcgcdb.bot")
logger.debug("Current settings: {settings}".format(settings=settings)) logger.debug("Current settings: {settings}".format(settings=settings))
# Log Fail states with structure wiki_id: number of fail states # Log Fail states with structure wiki_url: number of fail states
all_wikis: dict = {} all_wikis: dict = {}
mw_msgs: dict = {} # will have the type of id: tuple mw_msgs: dict = {} # will have the type of id: tuple
@ -43,10 +43,9 @@ def calculate_delay() -> float:
def generate_targets(wiki_url: str) -> defaultdict: def generate_targets(wiki_url: str) -> defaultdict:
combinations = defaultdict(list) combinations = defaultdict(list)
for webhook in db_cursor.execute('SELECT ROWID, * FROM rcgcdw WHERE wiki = ?', (wiki_url,)): for webhook in db_cursor.execute('SELECT webhook, lang, display FROM rcgcdw WHERE wiki = ?', (wiki_url,)):
# rowid, guild, configid, webhook, wiki, lang, display, rcid, wikiid, postid combination = (webhook[1], webhook[2]) # lang, display
combination = (webhook[5], webhook[6]) # lang, display combinations[combination].append(webhook[0])
combinations[combination].append(webhook[3])
return combinations return combinations
@ -54,21 +53,22 @@ async def wiki_scanner():
try: try:
while True: while True:
calc_delay = calculate_delay() calc_delay = calculate_delay()
fetch_all = db_cursor.execute('SELECT * FROM rcgcdw GROUP BY wiki') fetch_all = db_cursor.execute('SELECT webhook, wiki, lang, display, wikiid, rcid, postid FROM rcgcdw GROUP BY wiki')
# webhook, wiki, lang, display, wikiid, rcid, postid
for db_wiki in fetch_all.fetchall(): for db_wiki in fetch_all.fetchall():
logger.debug("Wiki {}".format(db_wiki[3])) logger.debug("Wiki {}".format(db_wiki[1]))
extended = False extended = False
if db_wiki[3] not in all_wikis: if db_wiki[1] not in all_wikis:
logger.debug("New wiki: {}".format(db_wiki[3])) logger.debug("New wiki: {}".format(db_wiki[1]))
all_wikis[db_wiki[3]] = Wiki() all_wikis[db_wiki[1]] = Wiki()
local_wiki = all_wikis[db_wiki[3]] # set a reference to a wiki object from memory local_wiki = all_wikis[db_wiki[1]] # set a reference to a wiki object from memory
if local_wiki.mw_messages is None: if local_wiki.mw_messages is None:
extended = True extended = True
async with aiohttp.ClientSession(headers=settings["header"], async with aiohttp.ClientSession(headers=settings["header"],
timeout=aiohttp.ClientTimeout(2.0)) as session: timeout=aiohttp.ClientTimeout(2.0)) as session:
try: try:
wiki_response = await local_wiki.fetch_wiki(extended, db_wiki[3], session) wiki_response = await local_wiki.fetch_wiki(extended, db_wiki[1], session)
await local_wiki.check_status(db_wiki[3], wiki_response.status) await local_wiki.check_status(db_wiki[1], wiki_response.status)
except (WikiServerError, WikiError): except (WikiServerError, WikiError):
logger.exception("Exeption when fetching the wiki") logger.exception("Exeption when fetching the wiki")
continue # ignore this wiki if it throws errors continue # ignore this wiki if it throws errors
@ -77,39 +77,39 @@ async def wiki_scanner():
if "error" in recent_changes_resp or "errors" in recent_changes_resp: if "error" in recent_changes_resp or "errors" in recent_changes_resp:
error = recent_changes_resp.get("error", recent_changes_resp["errors"]) error = recent_changes_resp.get("error", recent_changes_resp["errors"])
if error["code"] == "readapidenied": if error["code"] == "readapidenied":
await local_wiki.fail_add(db_wiki[3], 410) await local_wiki.fail_add(db_wiki[1], 410)
continue continue
raise WikiError raise WikiError
recent_changes = recent_changes_resp['query']['recentchanges'] recent_changes = recent_changes_resp['query']['recentchanges']
recent_changes.reverse() recent_changes.reverse()
except aiohttp.ContentTypeError: except aiohttp.ContentTypeError:
logger.exception("Wiki seems to be resulting in non-json content.") logger.exception("Wiki seems to be resulting in non-json content.")
await local_wiki.fail_add(db_wiki[3], 410) await local_wiki.fail_add(db_wiki[1], 410)
continue continue
except: except:
logger.exception("On loading json of response.") logger.exception("On loading json of response.")
continue continue
if extended: if extended:
await process_mwmsgs(recent_changes_resp, local_wiki, mw_msgs) await process_mwmsgs(recent_changes_resp, local_wiki, mw_msgs)
if db_wiki[6] is None: # new wiki, just get the last rc to not spam the channel if db_wiki[5] is None: # new wiki, just get the last rc to not spam the channel
if len(recent_changes) > 0: if len(recent_changes) > 0:
DBHandler.add(db_wiki[3], recent_changes[-1]["rcid"]) DBHandler.add(db_wiki[1], recent_changes[-1]["rcid"])
continue continue
else: else:
DBHandler.add(db_wiki[3], 0) DBHandler.add(db_wiki[1], 0)
continue continue
categorize_events = {} categorize_events = {}
targets = generate_targets(db_wiki[3]) targets = generate_targets(db_wiki[1])
paths = get_paths(db_wiki[3], recent_changes_resp) paths = get_paths(db_wiki[1], recent_changes_resp)
for change in recent_changes: for change in recent_changes:
await process_cats(change, local_wiki, mw_msgs, categorize_events) await process_cats(change, local_wiki, mw_msgs, categorize_events)
for change in recent_changes: # Yeah, second loop since the categories require to be all loaded up for change in recent_changes: # Yeah, second loop since the categories require to be all loaded up
if change["rcid"] > db_wiki[6]: if change["rcid"] > db_wiki[5]:
for target in targets.items(): for target in targets.items():
await essential_info(change, categorize_events, local_wiki, db_wiki, target, paths, await essential_info(change, categorize_events, local_wiki, db_wiki, target, paths,
recent_changes_resp) recent_changes_resp)
if recent_changes: if recent_changes:
DBHandler.add(db_wiki[3], change["rcid"]) DBHandler.add(db_wiki[1], change["rcid"])
DBHandler.update_db() DBHandler.update_db()
await asyncio.sleep(delay=calc_delay) await asyncio.sleep(delay=calc_delay)
except asyncio.CancelledError: except asyncio.CancelledError:

View file

@ -15,20 +15,20 @@ logger = logging.getLogger("rcgcdb.discord")
# User facing webhook functions # User facing webhook functions
async def wiki_removal(wiki_id, status): async def wiki_removal(wiki_url, status):
for observer in db_cursor.execute('SELECT * FROM rcgcdw WHERE wiki = ?', (wiki_id,)): for observer in db_cursor.execute('SELECT webhook, lang FROM rcgcdw WHERE wiki = ?', (wiki_url,)):
def _(string: str) -> str: def _(string: str) -> str:
"""Our own translation string to make it compatible with async""" """Our own translation string to make it compatible with async"""
return langs[observer[4]].gettext(string) return langs[observer[1]].gettext(string)
reasons = {410: _("wiki deletion"), 404: _("wiki deletion"), 401: _("wiki becoming inaccessible"), reasons = {410: _("wiki deletion"), 404: _("wiki deletion"), 401: _("wiki becoming inaccessible"),
402: _("wiki becoming inaccessible"), 403: _("wiki becoming inaccessible"), 410: _("wiki becoming inaccessible")} 402: _("wiki becoming inaccessible"), 403: _("wiki becoming inaccessible"), 410: _("wiki becoming inaccessible")}
reason = reasons.get(status, _("unknown error")) reason = reasons.get(status, _("unknown error"))
await send_to_discord_webhook(DiscordMessage("compact", "webhook/remove", webhook_url=[observer[2]], content=_("The webhook for {} has been removed due to {}.".format(wiki_id, reason)), wiki=None)) await send_to_discord_webhook(DiscordMessage("compact", "webhook/remove", webhook_url=[observer[0]], content=_("The webhook for {} has been removed due to {}.".format(wiki_url, reason)), wiki=None))
header = settings["header"] header = settings["header"]
header['Content-Type'] = 'application/json' header['Content-Type'] = 'application/json'
header['X-Audit-Log-Reason'] = "Wiki becoming unavailable" header['X-Audit-Log-Reason'] = "Wiki becoming unavailable"
async with aiohttp.ClientSession(headers=header, timeout=aiohttp.ClientTimeout(5.0)) as session: async with aiohttp.ClientSession(headers=header, timeout=aiohttp.ClientTimeout(5.0)) as session:
await session.delete("https://discord.com/api/webhooks/"+observer[2]) await session.delete("https://discord.com/api/webhooks/"+observer[0])
async def webhook_removal_monitor(webhook_url: list, reason: int): async def webhook_removal_monitor(webhook_url: list, reason: int):
@ -102,8 +102,8 @@ class DiscordMessage:
# Monitoring webhook functions # Monitoring webhook functions
async def wiki_removal_monitor(wiki_id, status): async def wiki_removal_monitor(wiki_url, status):
await send_to_discord_webhook_monitoring(DiscordMessage("compact", "webhook/remove", content="Removing {} because {}.".format(wiki_id, status), webhook_url=[None], wiki=None)) await send_to_discord_webhook_monitoring(DiscordMessage("compact", "webhook/remove", content="Removing {} because {}.".format(wiki_url, status), webhook_url=[None], wiki=None))
async def send_to_discord_webhook_monitoring(data: DiscordMessage): async def send_to_discord_webhook_monitoring(data: DiscordMessage):

View file

@ -158,6 +158,7 @@ async def process_mwmsgs(wiki_response: dict, local_wiki: Wiki, mw_msgs: dict):
mw_msgs[key] = msgs # it may be a little bit messy for sure, however I don't expect any reason to remove mw_msgs entries by one mw_msgs[key] = msgs # it may be a little bit messy for sure, however I don't expect any reason to remove mw_msgs entries by one
local_wiki.mw_messages = key local_wiki.mw_messages = key
# db_wiki: webhook, wiki, lang, display, wikiid, rcid, postid
async def essential_info(change: dict, changed_categories, local_wiki: Wiki, db_wiki: tuple, target: tuple, paths: tuple, request: dict): async def essential_info(change: dict, changed_categories, local_wiki: Wiki, db_wiki: tuple, target: tuple, paths: tuple, request: dict):
"""Prepares essential information for both embed and compact message format.""" """Prepares essential information for both embed and compact message format."""
def _(string: str) -> str: def _(string: str) -> str: