mirror of
https://gitlab.com/chicken-riders/RcGcDb.git
synced 2025-02-23 00:54:09 +00:00
Fixed like entire code so it runs like 10 horses on a race
This commit is contained in:
parent
f3594ac12b
commit
e4ae528e70
|
@ -48,8 +48,9 @@ def generate_targets(wiki_url: str) -> defaultdict:
|
||||||
async def wiki_scanner():
|
async def wiki_scanner():
|
||||||
while True:
|
while True:
|
||||||
calc_delay = calculate_delay()
|
calc_delay = calculate_delay()
|
||||||
# db_cursor.execute('SELECT DISTINCT wiki FROM rcgcdw'):
|
fetch_all = db_cursor.execute('SELECT * FROM rcgcdw GROUP BY wiki')
|
||||||
for db_wiki in db_cursor.execute('SELECT * FROM rcgcdw GROUP BY wiki'):
|
for db_wiki in fetch_all.fetchall():
|
||||||
|
logger.debug("Wiki {}".format(db_wiki[3]))
|
||||||
extended = False
|
extended = False
|
||||||
if db_wiki[3] not in all_wikis:
|
if db_wiki[3] not in all_wikis:
|
||||||
logger.debug("New wiki: {}".format(db_wiki[3]))
|
logger.debug("New wiki: {}".format(db_wiki[3]))
|
||||||
|
@ -86,13 +87,13 @@ async def wiki_scanner():
|
||||||
for change in recent_changes:
|
for change in recent_changes:
|
||||||
await process_cats(change, local_wiki, mw_msgs, categorize_events)
|
await process_cats(change, local_wiki, mw_msgs, categorize_events)
|
||||||
for change in recent_changes: # Yeah, second loop since the categories require to be all loaded up
|
for change in recent_changes: # Yeah, second loop since the categories require to be all loaded up
|
||||||
if change["rcid"] < db_wiki[6]:
|
if change["rcid"] > db_wiki[6]:
|
||||||
for target in targets.items():
|
for target in targets.items():
|
||||||
await essential_info(change, categorize_events, local_wiki, db_wiki, target, paths, recent_changes_resp)
|
await essential_info(change, categorize_events, local_wiki, db_wiki, target, paths, recent_changes_resp)
|
||||||
if recent_changes:
|
if recent_changes:
|
||||||
DBHandler.add(db_wiki[3], change["rcid"])
|
DBHandler.add(db_wiki[3], change["rcid"])
|
||||||
|
DBHandler.update_db()
|
||||||
await asyncio.sleep(delay=calc_delay)
|
await asyncio.sleep(delay=calc_delay)
|
||||||
DBHandler.update_db()
|
|
||||||
|
|
||||||
|
|
||||||
async def message_sender():
|
async def message_sender():
|
||||||
|
|
|
@ -1,16 +1,41 @@
|
||||||
import json, random, math, logging
|
import json, random, math, logging
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
from src.config import settings
|
from src.config import settings
|
||||||
from src.database import db_cursor
|
from src.database import db_cursor
|
||||||
|
from src.misc import logger
|
||||||
|
from src.config import settings
|
||||||
|
from src.database import db_cursor
|
||||||
|
from src.i18n import langs
|
||||||
|
import aiohttp, gettext
|
||||||
|
|
||||||
logger = logging.getLogger("rcgcdb.discord")
|
logger = logging.getLogger("rcgcdb.discord")
|
||||||
|
|
||||||
# General functions
|
# General functions
|
||||||
|
|
||||||
|
|
||||||
|
# User facing webhook functions
|
||||||
|
def wiki_removal(wiki_id, status):
|
||||||
|
for observer in db_cursor.execute('SELECT * FROM rcgcdw WHERE wikiid = ?', (wiki_id,)):
|
||||||
|
def _(string: str) -> str:
|
||||||
|
"""Our own translation string to make it compatible with async"""
|
||||||
|
return langs[observer[4]].gettext(string)
|
||||||
|
reasons = {410: _("wiki deletion"), 404: _("wiki deletion"), 401: _("wiki becoming inaccessible"),
|
||||||
|
402: _("wiki becoming inaccessible"), 403: _("wiki becoming inaccessible")}
|
||||||
|
reason = reasons.get(status, _("unknown error"))
|
||||||
|
send_to_discord_webhook(DiscordMessage("compact", "webhook/remove", webhook_url=[observer[2]], content=_("The webhook for {} has been removed due to {}.".format(wiki_id, reason)), wiki=None))
|
||||||
|
|
||||||
|
async def webhook_removal_monitor(webhook_url: list, reason: int):
|
||||||
|
await send_to_discord_webhook_monitoring(DiscordMessage("compact", "webhook/remove", None, content="The webhook {} has been removed due to {}.".format("https://discord.com/api/webhooks/" + webhook_url[0], reason), wiki=None),
|
||||||
|
aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(4.0)))
|
||||||
|
|
||||||
|
|
||||||
class DiscordMessage():
|
class DiscordMessage():
|
||||||
"""A class defining a typical Discord JSON representation of webhook payload."""
|
"""A class defining a typical Discord JSON representation of webhook payload."""
|
||||||
def __init__(self, message_type: str, event_type: str, webhook_url: str, content=None):
|
def __init__(self, message_type: str, event_type: str, webhook_url: list, wiki, content=None):
|
||||||
self.webhook_object = dict(allowed_mentions={"parse": []})
|
self.webhook_object = dict(allowed_mentions={"parse": []})
|
||||||
self.webhook_url = webhook_url
|
self.webhook_url = webhook_url
|
||||||
|
self.wiki = wiki
|
||||||
|
|
||||||
if message_type == "embed":
|
if message_type == "embed":
|
||||||
self.__setup_embed()
|
self.__setup_embed()
|
||||||
|
@ -47,7 +72,10 @@ class DiscordMessage():
|
||||||
|
|
||||||
def finish_embed(self):
|
def finish_embed(self):
|
||||||
if self.embed["color"] is None:
|
if self.embed["color"] is None:
|
||||||
self.embed["color"] = random.randrange(1, 16777215)
|
if settings["appearance"]["embed"].get(self.event_type, {"color": None})["color"] is None:
|
||||||
|
self.embed["color"] = random.randrange(1, 16777215)
|
||||||
|
else:
|
||||||
|
self.embed["color"] = settings["appearance"]["embed"][self.event_type]["color"]
|
||||||
else:
|
else:
|
||||||
self.embed["color"] = math.floor(self.embed["color"])
|
self.embed["color"] = math.floor(self.embed["color"])
|
||||||
|
|
||||||
|
@ -68,14 +96,54 @@ class DiscordMessage():
|
||||||
self.webhook_object["username"] = name
|
self.webhook_object["username"] = name
|
||||||
|
|
||||||
|
|
||||||
# User facing webhook functions
|
|
||||||
def wiki_removal(wiki_id, status): # TODO Add lang selector
|
|
||||||
reasons = {410: _("wiki deletion"), 404: _("wiki deletion"), 401: _("wiki becoming inaccessible"),
|
|
||||||
402: _("wiki becoming inaccessible"), 403: _("wiki becoming inaccessible")}
|
|
||||||
reason = reasons.get(status, _("unknown error"))
|
|
||||||
for observer in db_cursor.execute('SELECT * FROM observers WHERE wiki_id = ?', wiki_id):
|
|
||||||
DiscordMessage("compact", "webhook/remove", webhook_url=observer[4], content=_("The webhook for {} has been removed due to {}.".format(reason))) # TODO
|
|
||||||
|
|
||||||
# Monitoring webhook functions
|
# Monitoring webhook functions
|
||||||
def wiki_removal_monitor(wiki_id, status):
|
def wiki_removal_monitor(wiki_id, status):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
async def send_to_discord_webhook_monitoring(data: DiscordMessage, session: aiohttp.ClientSession):
|
||||||
|
header = settings["header"]
|
||||||
|
header['Content-Type'] = 'application/json'
|
||||||
|
try:
|
||||||
|
result = await session.post("https://discord.com/api/webhooks/"+settings["monitoring_webhook"], data=repr(data),
|
||||||
|
headers=header)
|
||||||
|
except (aiohttp.ClientConnectionError, aiohttp.ServerConnectionError):
|
||||||
|
logger.exception("Could not send the message to Discord")
|
||||||
|
return 3
|
||||||
|
|
||||||
|
|
||||||
|
async def send_to_discord_webhook(data: DiscordMessage, session: aiohttp.ClientSession):
|
||||||
|
header = settings["header"]
|
||||||
|
header['Content-Type'] = 'application/json'
|
||||||
|
for webhook in data.webhook_url:
|
||||||
|
try:
|
||||||
|
result = await session.post("https://discord.com/api/webhooks/"+webhook, data=repr(data),
|
||||||
|
headers=header)
|
||||||
|
except (aiohttp.ClientConnectionError, aiohttp.ServerConnectionError):
|
||||||
|
logger.exception("Could not send the message to Discord")
|
||||||
|
return 3
|
||||||
|
return await handle_discord_http(result.status, repr(data), await result.text(), data)
|
||||||
|
|
||||||
|
|
||||||
|
async def handle_discord_http(code, formatted_embed, result, dmsg):
|
||||||
|
if 300 > code > 199: # message went through
|
||||||
|
return 0
|
||||||
|
elif code == 400: # HTTP BAD REQUEST result.status_code, data, result, header
|
||||||
|
logger.error(
|
||||||
|
"Following message has been rejected by Discord, please submit a bug on our bugtracker adding it:")
|
||||||
|
logger.error(formatted_embed)
|
||||||
|
logger.error(result.text)
|
||||||
|
return 1
|
||||||
|
elif code == 401 or code == 404: # HTTP UNAUTHORIZED AND NOT FOUND
|
||||||
|
logger.error("Webhook URL is invalid or no longer in use, please replace it with proper one.")
|
||||||
|
db_cursor.execute("DELETE FROM rcgcdw WHERE webhook = ?", (dmsg.webhook_url[0],))
|
||||||
|
await webhook_removal_monitor(dmsg.webhook_url, code)
|
||||||
|
return 1
|
||||||
|
elif code == 429:
|
||||||
|
logger.error("We are sending too many requests to the Discord, slowing down...")
|
||||||
|
return 2
|
||||||
|
elif 499 < code < 600:
|
||||||
|
logger.error(
|
||||||
|
"Discord have trouble processing the event, and because the HTTP code returned is {} it means we blame them.".format(
|
||||||
|
code))
|
||||||
|
return 3
|
|
@ -4,7 +4,8 @@ import gettext
|
||||||
from urllib.parse import quote_plus
|
from urllib.parse import quote_plus
|
||||||
|
|
||||||
from src.config import settings
|
from src.config import settings
|
||||||
from src.misc import DiscordMessage, send_to_discord, escape_formatting
|
from src.misc import send_to_discord, escape_formatting
|
||||||
|
from discord import DiscordMessage
|
||||||
from src.i18n import disc
|
from src.i18n import disc
|
||||||
|
|
||||||
_ = disc.gettext
|
_ = disc.gettext
|
||||||
|
|
|
@ -5,7 +5,8 @@ import time
|
||||||
import logging
|
import logging
|
||||||
import base64
|
import base64
|
||||||
from src.config import settings
|
from src.config import settings
|
||||||
from src.misc import link_formatter, create_article_path, LinkParser, profile_field_name, ContentParser, DiscordMessage, safe_read
|
from src.misc import link_formatter, create_article_path, parse_link, profile_field_name, ContentParser, safe_read
|
||||||
|
from src.discord import DiscordMessage
|
||||||
from urllib.parse import quote_plus
|
from urllib.parse import quote_plus
|
||||||
from src.msgqueue import send_to_discord
|
from src.msgqueue import send_to_discord
|
||||||
# from html.parser import HTMLParser
|
# from html.parser import HTMLParser
|
||||||
|
@ -23,14 +24,12 @@ logger = logging.getLogger("rcgcdw.rc_formatters")
|
||||||
|
|
||||||
async def compact_formatter(action, change, parsed_comment, categories, recent_changes, target, _, ngettext, paths,
|
async def compact_formatter(action, change, parsed_comment, categories, recent_changes, target, _, ngettext, paths,
|
||||||
additional_data=None):
|
additional_data=None):
|
||||||
global LinkParser
|
|
||||||
if additional_data is None:
|
if additional_data is None:
|
||||||
additional_data = {"namespaces": {}, "tags": {}}
|
additional_data = {"namespaces": {}, "tags": {}}
|
||||||
WIKI_API_PATH = paths[0]
|
WIKI_API_PATH = paths[0]
|
||||||
WIKI_SCRIPT_PATH = paths[1]
|
WIKI_SCRIPT_PATH = paths[1]
|
||||||
WIKI_ARTICLE_PATH = paths[2]
|
WIKI_ARTICLE_PATH = paths[2]
|
||||||
WIKI_JUST_DOMAIN = paths[3]
|
WIKI_JUST_DOMAIN = paths[3]
|
||||||
LinkParser = LinkParser(paths[3])
|
|
||||||
if action != "suppressed":
|
if action != "suppressed":
|
||||||
author_url = link_formatter(create_article_path("User:{user}".format(user=change["user"]), WIKI_ARTICLE_PATH))
|
author_url = link_formatter(create_article_path("User:{user}".format(user=change["user"]), WIKI_ARTICLE_PATH))
|
||||||
author = change["user"]
|
author = change["user"]
|
||||||
|
@ -280,21 +279,15 @@ async def compact_formatter(action, change, parsed_comment, categories, recent_c
|
||||||
link = link_formatter(create_article_path(change["title"], WIKI_ARTICLE_PATH))
|
link = link_formatter(create_article_path(change["title"], WIKI_ARTICLE_PATH))
|
||||||
content = _("[{author}]({author_url}) edited the slice for [{article}]({article_url})").format(author=author, author_url=author_url, article=change["title"], article_url=link)
|
content = _("[{author}]({author_url}) edited the slice for [{article}]({article_url})").format(author=author, author_url=author_url, article=change["title"], article_url=link)
|
||||||
elif action == "cargo/createtable":
|
elif action == "cargo/createtable":
|
||||||
LinkParser.feed(change["logparams"]["0"])
|
table = parse_link(paths[3], change["logparams"]["0"])
|
||||||
table = LinkParser.new_string
|
|
||||||
LinkParser.new_string = ""
|
|
||||||
content = _("[{author}]({author_url}) created the Cargo table \"{table}\"").format(author=author, author_url=author_url, table=table)
|
content = _("[{author}]({author_url}) created the Cargo table \"{table}\"").format(author=author, author_url=author_url, table=table)
|
||||||
elif action == "cargo/deletetable":
|
elif action == "cargo/deletetable":
|
||||||
content = _("[{author}]({author_url}) deleted the Cargo table \"{table}\"").format(author=author, author_url=author_url, table=change["logparams"]["0"])
|
content = _("[{author}]({author_url}) deleted the Cargo table \"{table}\"").format(author=author, author_url=author_url, table=change["logparams"]["0"])
|
||||||
elif action == "cargo/recreatetable":
|
elif action == "cargo/recreatetable":
|
||||||
LinkParser.feed(change["logparams"]["0"])
|
table = parse_link(paths[3], change["logparams"]["0"])
|
||||||
table = LinkParser.new_string
|
|
||||||
LinkParser.new_string = ""
|
|
||||||
content = _("[{author}]({author_url}) recreated the Cargo table \"{table}\"").format(author=author, author_url=author_url, table=table)
|
content = _("[{author}]({author_url}) recreated the Cargo table \"{table}\"").format(author=author, author_url=author_url, table=table)
|
||||||
elif action == "cargo/replacetable":
|
elif action == "cargo/replacetable":
|
||||||
LinkParser.feed(change["logparams"]["0"])
|
table = parse_link(paths[3], change["logparams"]["0"])
|
||||||
table = LinkParser.new_string
|
|
||||||
LinkParser.new_string = ""
|
|
||||||
content = _("[{author}]({author_url}) replaced the Cargo table \"{table}\"").format(author=author, author_url=author_url, table=table)
|
content = _("[{author}]({author_url}) replaced the Cargo table \"{table}\"").format(author=author, author_url=author_url, table=table)
|
||||||
elif action == "managetags/create":
|
elif action == "managetags/create":
|
||||||
link = link_formatter(create_article_path("Special:Tags", WIKI_ARTICLE_PATH))
|
link = link_formatter(create_article_path("Special:Tags", WIKI_ARTICLE_PATH))
|
||||||
|
@ -319,14 +312,12 @@ async def compact_formatter(action, change, parsed_comment, categories, recent_c
|
||||||
|
|
||||||
|
|
||||||
async def embed_formatter(action, change, parsed_comment, categories, recent_changes, target, _, ngettext, paths, additional_data=None):
|
async def embed_formatter(action, change, parsed_comment, categories, recent_changes, target, _, ngettext, paths, additional_data=None):
|
||||||
global LinkParser
|
|
||||||
if additional_data is None:
|
if additional_data is None:
|
||||||
additional_data = {"namespaces": {}, "tags": {}}
|
additional_data = {"namespaces": {}, "tags": {}}
|
||||||
WIKI_API_PATH = paths[0]
|
WIKI_API_PATH = paths[0]
|
||||||
WIKI_SCRIPT_PATH = paths[1]
|
WIKI_SCRIPT_PATH = paths[1]
|
||||||
WIKI_ARTICLE_PATH = paths[2]
|
WIKI_ARTICLE_PATH = paths[2]
|
||||||
WIKI_JUST_DOMAIN = paths[3]
|
WIKI_JUST_DOMAIN = paths[3]
|
||||||
LinkParser = LinkParser(paths[3])
|
|
||||||
embed = DiscordMessage("embed", action, target[1], wiki=WIKI_SCRIPT_PATH)
|
embed = DiscordMessage("embed", action, target[1], wiki=WIKI_SCRIPT_PATH)
|
||||||
if parsed_comment is None:
|
if parsed_comment is None:
|
||||||
parsed_comment = _("No description provided")
|
parsed_comment = _("No description provided")
|
||||||
|
@ -358,7 +349,7 @@ async def embed_formatter(action, change, parsed_comment, categories, recent_cha
|
||||||
embed["title"] = "{redirect}{article} ({new}{minor}{bot}{space}{editsize})".format(redirect="⤷ " if "redirect" in change else "", article=change["title"], editsize="+" + str(
|
embed["title"] = "{redirect}{article} ({new}{minor}{bot}{space}{editsize})".format(redirect="⤷ " if "redirect" in change else "", article=change["title"], editsize="+" + str(
|
||||||
editsize) if editsize > 0 else editsize, new=_("(N!) ") if action == "new" else "",
|
editsize) if editsize > 0 else editsize, new=_("(N!) ") if action == "new" else "",
|
||||||
minor=_("m") if action == "edit" and "minor" in change else "", bot=_('b') if "bot" in change else "", space=" " if "bot" in change or (action == "edit" and "minor" in change) or action == "new" else "")
|
minor=_("m") if action == "edit" and "minor" in change else "", bot=_('b') if "bot" in change else "", space=" " if "bot" in change or (action == "edit" and "minor" in change) or action == "new" else "")
|
||||||
if target[1] == 3:
|
if target[0][1] == 3:
|
||||||
if action == "new":
|
if action == "new":
|
||||||
changed_content = await safe_read(await recent_changes.safe_request(
|
changed_content = await safe_read(await recent_changes.safe_request(
|
||||||
"{wiki}?action=compare&format=json&fromtext=&torev={diff}&topst=1&prop=diff".format(
|
"{wiki}?action=compare&format=json&fromtext=&torev={diff}&topst=1&prop=diff".format(
|
||||||
|
@ -432,7 +423,7 @@ async def embed_formatter(action, change, parsed_comment, categories, recent_cha
|
||||||
embed["title"] = _("Uploaded {name}").format(name=change["title"])
|
embed["title"] = _("Uploaded {name}").format(name=change["title"])
|
||||||
if additional_info_retrieved:
|
if additional_info_retrieved:
|
||||||
embed.add_field(_("Options"), _("([preview]({link}))").format(link=image_direct_url))
|
embed.add_field(_("Options"), _("([preview]({link}))").format(link=image_direct_url))
|
||||||
if target[1] > 1:
|
if target[0][1] > 1:
|
||||||
embed["image"]["url"] = image_direct_url
|
embed["image"]["url"] = image_direct_url
|
||||||
elif action == "delete/delete":
|
elif action == "delete/delete":
|
||||||
link = create_article_path(change["title"].replace(" ", "_"), WIKI_ARTICLE_PATH)
|
link = create_article_path(change["title"].replace(" ", "_"), WIKI_ARTICLE_PATH)
|
||||||
|
@ -643,9 +634,7 @@ async def embed_formatter(action, change, parsed_comment, categories, recent_cha
|
||||||
link = create_article_path(change["title"].replace(" ", "_"), WIKI_ARTICLE_PATH)
|
link = create_article_path(change["title"].replace(" ", "_"), WIKI_ARTICLE_PATH)
|
||||||
embed["title"] = _("Edited the slice for {article}").format(article=change["title"])
|
embed["title"] = _("Edited the slice for {article}").format(article=change["title"])
|
||||||
elif action == "cargo/createtable":
|
elif action == "cargo/createtable":
|
||||||
LinkParser.feed(change["logparams"]["0"])
|
table = re.search(r"\[(.*?)\]\(<(.*?)>\)", parse_link(paths[3], change["logparams"]["0"]))
|
||||||
table = re.search(r"\[(.*?)\]\(<(.*?)>\)", LinkParser.new_string)
|
|
||||||
LinkParser.new_string = ""
|
|
||||||
link = table.group(2)
|
link = table.group(2)
|
||||||
embed["title"] = _("Created the Cargo table \"{table}\"").format(table=table.group(1))
|
embed["title"] = _("Created the Cargo table \"{table}\"").format(table=table.group(1))
|
||||||
parsed_comment = None
|
parsed_comment = None
|
||||||
|
@ -654,16 +643,12 @@ async def embed_formatter(action, change, parsed_comment, categories, recent_cha
|
||||||
embed["title"] = _("Deleted the Cargo table \"{table}\"").format(table=change["logparams"]["0"])
|
embed["title"] = _("Deleted the Cargo table \"{table}\"").format(table=change["logparams"]["0"])
|
||||||
parsed_comment = None
|
parsed_comment = None
|
||||||
elif action == "cargo/recreatetable":
|
elif action == "cargo/recreatetable":
|
||||||
LinkParser.feed(change["logparams"]["0"])
|
table = re.search(r"\[(.*?)\]\(<(.*?)>\)", parse_link(paths[3], change["logparams"]["0"]))
|
||||||
table = re.search(r"\[(.*?)\]\(<(.*?)>\)", LinkParser.new_string)
|
|
||||||
LinkParser.new_string = ""
|
|
||||||
link = table.group(2)
|
link = table.group(2)
|
||||||
embed["title"] = _("Recreated the Cargo table \"{table}\"").format(table=table.group(1))
|
embed["title"] = _("Recreated the Cargo table \"{table}\"").format(table=table.group(1))
|
||||||
parsed_comment = None
|
parsed_comment = None
|
||||||
elif action == "cargo/replacetable":
|
elif action == "cargo/replacetable":
|
||||||
LinkParser.feed(change["logparams"]["0"])
|
table = re.search(r"\[(.*?)\]\(<(.*?)>\)", parse_link(paths[3], change["logparams"]["0"]))
|
||||||
table = re.search(r"\[(.*?)\]\(<(.*?)>\)", LinkParser.new_string)
|
|
||||||
LinkParser.new_string = ""
|
|
||||||
link = table.group(2)
|
link = table.group(2)
|
||||||
embed["title"] = _("Replaced the Cargo table \"{table}\"").format(table=table.group(1))
|
embed["title"] = _("Replaced the Cargo table \"{table}\"").format(table=table.group(1))
|
||||||
parsed_comment = None
|
parsed_comment = None
|
||||||
|
@ -710,4 +695,4 @@ async def embed_formatter(action, change, parsed_comment, categories, recent_cha
|
||||||
del_cat = (_("**Removed**: ") + ", ".join(list(categories["removed"])[0:16]) + ("" if len(categories["removed"])<=15 else _(" and {} more").format(len(categories["removed"])-15))) if categories["removed"] else ""
|
del_cat = (_("**Removed**: ") + ", ".join(list(categories["removed"])[0:16]) + ("" if len(categories["removed"])<=15 else _(" and {} more").format(len(categories["removed"])-15))) if categories["removed"] else ""
|
||||||
embed.add_field(_("Changed categories"), new_cat + del_cat)
|
embed.add_field(_("Changed categories"), new_cat + del_cat)
|
||||||
embed.finish_embed()
|
embed.finish_embed()
|
||||||
await send_to_discord(embed)
|
await send_to_discord(embed)
|
||||||
|
|
125
src/misc.py
125
src/misc.py
|
@ -1,116 +1,12 @@
|
||||||
from html.parser import HTMLParser
|
from html.parser import HTMLParser
|
||||||
import base64, re
|
import base64, re
|
||||||
from src.config import settings
|
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
from collections import defaultdict
|
|
||||||
import random
|
|
||||||
from urllib.parse import urlparse, urlunparse
|
from urllib.parse import urlparse, urlunparse
|
||||||
import math
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
|
|
||||||
logger = logging.getLogger("rcgcdw.misc")
|
logger = logging.getLogger("rcgcdw.misc")
|
||||||
|
|
||||||
class DiscordMessage():
|
|
||||||
"""A class defining a typical Discord JSON representation of webhook payload."""
|
|
||||||
def __init__(self, message_type: str, event_type: str, webhook_url: list, wiki, content=None):
|
|
||||||
self.webhook_object = dict(allowed_mentions={"parse": []})
|
|
||||||
self.webhook_url = webhook_url
|
|
||||||
self.wiki = wiki
|
|
||||||
|
|
||||||
if message_type == "embed":
|
|
||||||
self.__setup_embed()
|
|
||||||
elif message_type == "compact":
|
|
||||||
self.webhook_object["content"] = content
|
|
||||||
|
|
||||||
self.event_type = event_type
|
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
|
||||||
"""Set item is used only in embeds."""
|
|
||||||
try:
|
|
||||||
self.embed[key] = value
|
|
||||||
except NameError:
|
|
||||||
raise TypeError("Tried to assign a value when message type is plain message!")
|
|
||||||
|
|
||||||
def __getitem__(self, item):
|
|
||||||
return self.embed[item]
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
"""Return the Discord webhook object ready to be sent"""
|
|
||||||
return json.dumps(self.webhook_object)
|
|
||||||
|
|
||||||
def __setup_embed(self):
|
|
||||||
self.embed = defaultdict(dict)
|
|
||||||
if "embeds" not in self.webhook_object:
|
|
||||||
self.webhook_object["embeds"] = [self.embed]
|
|
||||||
else:
|
|
||||||
self.webhook_object["embeds"].append(self.embed)
|
|
||||||
self.embed["color"] = None
|
|
||||||
|
|
||||||
def add_embed(self):
|
|
||||||
self.finish_embed()
|
|
||||||
self.__setup_embed()
|
|
||||||
|
|
||||||
def finish_embed(self):
|
|
||||||
if self.embed["color"] is None:
|
|
||||||
if settings["appearance"]["embed"].get(self.event_type, {"color": None})["color"] is None:
|
|
||||||
self.embed["color"] = random.randrange(1, 16777215)
|
|
||||||
else:
|
|
||||||
self.embed["color"] = settings["appearance"]["embed"][self.event_type]["color"]
|
|
||||||
else:
|
|
||||||
self.embed["color"] = math.floor(self.embed["color"])
|
|
||||||
|
|
||||||
def set_author(self, name, url, icon_url=""):
|
|
||||||
self.embed["author"]["name"] = name
|
|
||||||
self.embed["author"]["url"] = url
|
|
||||||
self.embed["author"]["icon_url"] = icon_url
|
|
||||||
|
|
||||||
def add_field(self, name, value, inline=False):
|
|
||||||
if "fields" not in self.embed:
|
|
||||||
self.embed["fields"] = []
|
|
||||||
self.embed["fields"].append(dict(name=name, value=value, inline=inline))
|
|
||||||
|
|
||||||
def set_avatar(self, url):
|
|
||||||
self.webhook_object["avatar_url"] = url
|
|
||||||
|
|
||||||
def set_name(self, name):
|
|
||||||
self.webhook_object["username"] = name
|
|
||||||
|
|
||||||
|
|
||||||
async def send_to_discord_webhook(data: DiscordMessage, session: aiohttp.ClientSession):
|
|
||||||
header = settings["header"]
|
|
||||||
header['Content-Type'] = 'application/json'
|
|
||||||
for webhook in data.webhook_url:
|
|
||||||
try:
|
|
||||||
result = await session.post("https://discord.com/api/webhooks/"+webhook, data=repr(data),
|
|
||||||
headers=header)
|
|
||||||
except (aiohttp.ClientConnectionError, aiohttp.ServerConnectionError):
|
|
||||||
logger.exception("Could not send the message to Discord")
|
|
||||||
return 3
|
|
||||||
return await handle_discord_http(result.status, repr(data), await result.text())
|
|
||||||
|
|
||||||
|
|
||||||
async def handle_discord_http(code, formatted_embed, result):
|
|
||||||
if 300 > code > 199: # message went through
|
|
||||||
return 0
|
|
||||||
elif code == 400: # HTTP BAD REQUEST result.status_code, data, result, header
|
|
||||||
logger.error(
|
|
||||||
"Following message has been rejected by Discord, please submit a bug on our bugtracker adding it:")
|
|
||||||
logger.error(formatted_embed)
|
|
||||||
logger.error(result.text)
|
|
||||||
return 1
|
|
||||||
elif code == 401 or code == 404: # HTTP UNAUTHORIZED AND NOT FOUND
|
|
||||||
logger.error("Webhook URL is invalid or no longer in use, please replace it with proper one.")
|
|
||||||
|
|
||||||
return 1
|
|
||||||
elif code == 429:
|
|
||||||
logger.error("We are sending too many requests to the Discord, slowing down...")
|
|
||||||
return 2
|
|
||||||
elif 499 < code < 600:
|
|
||||||
logger.error(
|
|
||||||
"Discord have trouble processing the event, and because the HTTP code returned is {} it means we blame them.".format(
|
|
||||||
code))
|
|
||||||
return 3
|
|
||||||
|
|
||||||
|
|
||||||
def get_paths(wiki: str, request) -> tuple:
|
def get_paths(wiki: str, request) -> tuple:
|
||||||
parsed_url = urlparse(wiki)
|
parsed_url = urlparse(wiki)
|
||||||
|
@ -125,10 +21,7 @@ class LinkParser(HTMLParser):
|
||||||
|
|
||||||
new_string = ""
|
new_string = ""
|
||||||
recent_href = ""
|
recent_href = ""
|
||||||
|
WIKI_JUST_DOMAIN = ""
|
||||||
def __init__(self, domain):
|
|
||||||
self.WIKI_JUST_DOMAIN = domain
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
def handle_starttag(self, tag, attrs):
|
def handle_starttag(self, tag, attrs):
|
||||||
for attr in attrs:
|
for attr in attrs:
|
||||||
|
@ -159,6 +52,18 @@ class LinkParser(HTMLParser):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
LinkParse = LinkParser()
|
||||||
|
|
||||||
|
def parse_link(domain: str, to_parse: str) -> str:
|
||||||
|
"""Because I have strange issues using the LinkParser class myself, this is a helper function
|
||||||
|
to utilize the LinkParser properly"""
|
||||||
|
LinkParse.WIKI_JUST_DOMAIN = domain
|
||||||
|
LinkParse.feed(to_parse)
|
||||||
|
LinkParse.new_string = ""
|
||||||
|
LinkParse.recent_href = ""
|
||||||
|
return LinkParse.new_string
|
||||||
|
|
||||||
|
|
||||||
def link_formatter(link: str) -> str:
|
def link_formatter(link: str) -> str:
|
||||||
"""Formats a link to not embed it"""
|
"""Formats a link to not embed it"""
|
||||||
return "<" + re.sub(r"([)])", "\\\\\\1", link).replace(" ", "_") + ">"
|
return "<" + re.sub(r"([)])", "\\\\\\1", link).replace(" ", "_") + ">"
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import asyncio, logging, aiohttp
|
import asyncio, logging, aiohttp
|
||||||
from src.misc import send_to_discord_webhook
|
from src.discord import send_to_discord_webhook
|
||||||
from src.config import settings
|
from src.config import settings
|
||||||
logger = logging.getLogger("rcgcdw.msgqueue")
|
logger = logging.getLogger("rcgcdw.msgqueue")
|
||||||
|
|
||||||
|
@ -35,8 +35,7 @@ class MessageQueue:
|
||||||
await self.create_session()
|
await self.create_session()
|
||||||
if self._queue:
|
if self._queue:
|
||||||
logger.info(
|
logger.info(
|
||||||
"{} messages waiting to be delivered to Discord due to Discord throwing errors/no connection to Discord servers.".format(
|
"{} messages waiting to be delivered to Discord.".format(len(self._queue)))
|
||||||
len(self._queue)))
|
|
||||||
for num, item in enumerate(self._queue):
|
for num, item in enumerate(self._queue):
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Trying to send a message to Discord from the queue with id of {} and content {}".format(str(num),
|
"Trying to send a message to Discord from the queue with id of {} and content {}".format(str(num),
|
||||||
|
|
|
@ -4,7 +4,7 @@ import logging, aiohttp
|
||||||
from src.exceptions import *
|
from src.exceptions import *
|
||||||
from src.database import db_cursor, db_connection
|
from src.database import db_cursor, db_connection
|
||||||
from src.formatters.rc import embed_formatter, compact_formatter
|
from src.formatters.rc import embed_formatter, compact_formatter
|
||||||
from src.misc import LinkParser
|
from src.misc import parse_link
|
||||||
from src.i18n import langs
|
from src.i18n import langs
|
||||||
import src.discord
|
import src.discord
|
||||||
from src.config import settings
|
from src.config import settings
|
||||||
|
@ -157,7 +157,6 @@ async def process_mwmsgs(wiki_response: dict, local_wiki: Wiki, mw_msgs: dict):
|
||||||
local_wiki.mw_messages = key
|
local_wiki.mw_messages = key
|
||||||
|
|
||||||
async def essential_info(change: dict, changed_categories, local_wiki: Wiki, db_wiki: tuple, target: tuple, paths: tuple, request: dict):
|
async def essential_info(change: dict, changed_categories, local_wiki: Wiki, db_wiki: tuple, target: tuple, paths: tuple, request: dict):
|
||||||
global LinkParser
|
|
||||||
"""Prepares essential information for both embed and compact message format."""
|
"""Prepares essential information for both embed and compact message format."""
|
||||||
def _(string: str) -> str:
|
def _(string: str) -> str:
|
||||||
"""Our own translation string to make it compatible with async"""
|
"""Our own translation string to make it compatible with async"""
|
||||||
|
@ -166,16 +165,13 @@ async def essential_info(change: dict, changed_categories, local_wiki: Wiki, db_
|
||||||
lang = langs[target[0][0]]
|
lang = langs[target[0][0]]
|
||||||
ngettext = lang.ngettext
|
ngettext = lang.ngettext
|
||||||
# recent_changes = RecentChangesClass() # TODO Look into replacing RecentChangesClass with local_wiki
|
# recent_changes = RecentChangesClass() # TODO Look into replacing RecentChangesClass with local_wiki
|
||||||
LinkParser = LinkParser(paths[3])
|
|
||||||
logger.debug(change)
|
logger.debug(change)
|
||||||
appearance_mode = embed_formatter if target[0][1] > 0 else compact_formatter
|
appearance_mode = embed_formatter if target[0][1] > 0 else compact_formatter
|
||||||
if ("actionhidden" in change or "suppressed" in change): # if event is hidden using suppression
|
if ("actionhidden" in change or "suppressed" in change): # if event is hidden using suppression
|
||||||
await appearance_mode("suppressed", change, "", changed_categories, local_wiki, target, _, ngettext, paths)
|
await appearance_mode("suppressed", change, "", changed_categories, local_wiki, target, _, ngettext, paths)
|
||||||
return
|
return
|
||||||
if "commenthidden" not in change:
|
if "commenthidden" not in change:
|
||||||
LinkParser.feed(change["parsedcomment"])
|
parsed_comment = parse_link(paths[3], change["parsedcomment"])
|
||||||
parsed_comment = LinkParser.new_string
|
|
||||||
LinkParser.new_string = ""
|
|
||||||
parsed_comment = re.sub(r"(`|_|\*|~|{|}|\|\|)", "\\\\\\1", parsed_comment, 0)
|
parsed_comment = re.sub(r"(`|_|\*|~|{|}|\|\|)", "\\\\\\1", parsed_comment, 0)
|
||||||
else:
|
else:
|
||||||
parsed_comment = _("~~hidden~~")
|
parsed_comment = _("~~hidden~~")
|
||||||
|
|
Loading…
Reference in a new issue