Commiting broken code

This commit is contained in:
Frisk 2020-07-21 14:15:40 +02:00
parent 217c30d096
commit 76ef334843
No known key found for this signature in database
GPG key ID: 213F7C15068AF8AC
9 changed files with 158 additions and 82 deletions

View file

@ -7,7 +7,7 @@ from src.misc import get_paths
from src.exceptions import *
from src.database import db_cursor
from collections import defaultdict
from queue_handler import DBHandler
from src.queue_handler import DBHandler
logging.config.dictConfig(settings["logging"])
logger = logging.getLogger("rcgcdb.bot")
@ -35,7 +35,7 @@ def calculate_delay() -> float:
return min_delay
def generate_targets(wiki_url: str) -> defaultdict[list]:
def generate_targets(wiki_url: str) -> defaultdict:
combinations = defaultdict(list)
for webhook in db_cursor.execute('SELECT ROWID, * FROM rcgcdw WHERE wiki = ?', wiki_url):
# rowid, guild, configid, webhook, wiki, lang, display, rcid, wikiid, postid
@ -50,13 +50,14 @@ async def wiki_scanner():
for db_wiki in db_cursor.execute('SELECT * FROM rcgcdw GROUP BY wiki'):
extended = False
if db_wiki[3] not in all_wikis:
logger.debug("New wiki: {}".format(wiki[1]))
logger.debug("New wiki: {}".format(db_wiki[3]))
all_wikis[db_wiki[3]] = Wiki()
local_wiki = all_wikis[db_wiki[3]] # set a reference to a wiki object from memory
if local_wiki.mw_messages is None:
extended = True
logger.debug("test")
try:
wiki_response = await local_wiki.fetch_wiki(extended, db_wiki[0])
wiki_response = await local_wiki.fetch_wiki(extended, db_wiki[3])
await local_wiki.check_status(wiki[3], wiki_response.status)
except (WikiServerError, WikiError):
continue # ignore this wiki if it throws errors
@ -83,9 +84,13 @@ async def wiki_scanner():
for change in recent_changes: # Yeah, second loop since the categories require to be all loaded up
if change["rcid"] < db_wiki[6]:
for target in targets.items():
await essential_info(change, categorize_events, local_wiki, db_wiki, target, paths)
await essential_info(change, categorize_events, local_wiki, db_wiki, target, paths, recent_changes_resp)
if recent_changes:
DBHandler.add(db_wiki[3], change["rcid"])
await asyncio.sleep(delay=calc_delay)
DBHandler.update_db()
async def message_sender():
pass
@ -95,4 +100,5 @@ async def main_loop():
task1 = asyncio.create_task(wiki_scanner())
task2 = asyncio.create_task(message_sender())
asyncio.run(main_loop())

View file

@ -1,7 +1,7 @@
import json, sys, logging
try: # load settings
with open("../settings.json") as sfile:
with open("settings.json") as sfile:
settings = json.load(sfile)
if "user-agent" in settings["header"]:
settings["header"]["user-agent"] = settings["header"]["user-agent"].format(version="1.0") # set the version in the useragent

View file

@ -5,8 +5,6 @@ from src.database import db_cursor
logger = logging.getLogger("rcgcdb.discord")
reasons = {410: _("wiki deletion"), 404: _("wiki deletion"), 401: _("wiki becoming inaccessible"), 402: _("wiki becoming inaccessible"), 403: _("wiki becoming inaccessible")}
# General functions
class DiscordMessage():
"""A class defining a typical Discord JSON representation of webhook payload."""
@ -71,7 +69,9 @@ class DiscordMessage():
# User facing webhook functions
def wiki_removal(wiki_id, status):
def wiki_removal(wiki_id, status): # TODO Add lang selector
reasons = {410: _("wiki deletion"), 404: _("wiki deletion"), 401: _("wiki becoming inaccessible"),
402: _("wiki becoming inaccessible"), 403: _("wiki becoming inaccessible")}
reason = reasons.get(status, _("unknown error"))
for observer in db_cursor.execute('SELECT * FROM observers WHERE wiki_id = ?', wiki_id):
DiscordMessage("compact", "webhook/remove", webhook_url=observer[4], content=_("The webhook for {} has been removed due to {}.".format(reason))) # TODO

View file

@ -4,9 +4,10 @@ import re
import time
import logging
import base64
from config import settings
from src.config import settings
from src.misc import link_formatter, create_article_path, LinkParser, profile_field_name, ContentParser, DiscordMessage, safe_read
from urllib.parse import quote_plus
from src.msgqueue import send_to_discord
# from html.parser import HTMLParser
from bs4 import BeautifulSoup
@ -20,7 +21,10 @@ from src.i18n import langs
logger = logging.getLogger("rcgcdw.rc_formatters")
#from src.rcgcdw import recent_changes, ngettext, logger, profile_field_name, LinkParser, pull_comment
async def compact_formatter(action, change, parsed_comment, categories, recent_changes, target, _, ngettext, paths):
async def compact_formatter(action, change, parsed_comment, categories, recent_changes, target, _, ngettext, paths,
additional_data=None):
if additional_data is None:
additional_data = {"namespaces": {}, "tags": {}}
WIKI_API_PATH = paths[0]
WIKI_SCRIPT_PATH = paths[1]
WIKI_ARTICLE_PATH = paths[2]
@ -121,8 +125,8 @@ async def compact_formatter(action, change, parsed_comment, categories, recent_c
else:
restriction_description = _(" on namespaces: ")
for namespace in change["logparams"]["restrictions"]["namespaces"]:
if str(namespace) in recent_changes.namespaces: # if we have cached namespace name for given namespace number, add its name to the list
namespaces.append("*{ns}*".format(ns=recent_changes.namespaces[str(namespace)]["*"]))
if str(namespace) in additional_data.namespaces: # if we have cached namespace name for given namespace number, add its name to the list
namespaces.append("*{ns}*".format(ns=additional_data.namespaces[str(namespace)]["*"]))
else:
namespaces.append("*{ns}*".format(ns=namespace))
restriction_description = restriction_description + ", ".join(namespaces)
@ -177,7 +181,7 @@ async def compact_formatter(action, change, parsed_comment, categories, recent_c
content = _("[{author}]({author_url}) edited the {field} on {target} profile. *({desc})*").format(author=author,
author_url=author_url,
target=target,
field=profile_field_name(change["logparams"]['4:section'], False),
field=profile_field_name(change["logparams"]['4:section'], False, _),
desc=BeautifulSoup(change["parsedcomment"], "lxml").get_text())
elif action in ("rights/rights", "rights/autopromote"):
link = link_formatter(create_article_path("User:{user}".format(user=change["title"].split(":")[1]), WIKI_ARTICLE_PATH))
@ -310,16 +314,18 @@ async def compact_formatter(action, change, parsed_comment, categories, recent_c
else:
logger.warning("No entry for {event} with params: {params}".format(event=action, params=change))
return
send_to_discord(DiscordMessage("compact", action, target[1], content=content))
await send_to_discord(DiscordMessage("compact", action, target[1], content=content, wiki=WIKI_SCRIPT_PATH))
async def embed_formatter(action, change, parsed_comment, categories, recent_changes, target, _, ngettext, paths):
async def embed_formatter(action, change, parsed_comment, categories, recent_changes, target, _, ngettext, paths, additional_data=None):
if additional_data is None:
additional_data = {"namespaces": {}, "tags": {}}
WIKI_API_PATH = paths[0]
WIKI_SCRIPT_PATH = paths[1]
WIKI_ARTICLE_PATH = paths[2]
WIKI_JUST_DOMAIN = paths[3]
LinkParser = LinkParser()
embed = DiscordMessage("embed", action, target[1])
embed = DiscordMessage("embed", action, target[1], wiki=WIKI_SCRIPT_PATH)
if parsed_comment is None:
parsed_comment = _("No description provided")
if action != "suppressed":
@ -350,7 +356,7 @@ async def embed_formatter(action, change, parsed_comment, categories, recent_cha
embed["title"] = "{redirect}{article} ({new}{minor}{bot}{space}{editsize})".format(redirect="" if "redirect" in change else "", article=change["title"], editsize="+" + str(
editsize) if editsize > 0 else editsize, new=_("(N!) ") if action == "new" else "",
minor=_("m") if action == "edit" and "minor" in change else "", bot=_('b') if "bot" in change else "", space=" " if "bot" in change or (action == "edit" and "minor" in change) or action == "new" else "")
if settings["appearance"]["embed"]["show_edit_changes"]:
if target[1] == 3:
if action == "new":
changed_content = await safe_read(await recent_changes.safe_request(
"{wiki}?action=compare&format=json&fromtext=&torev={diff}&topst=1&prop=diff".format(
@ -362,7 +368,7 @@ async def embed_formatter(action, change, parsed_comment, categories, recent_cha
wiki=WIKI_API_PATH, diff=change["revid"],oldrev=change["old_revid"]
)), "compare", "*")
if changed_content:
EditDiff = ContentParser()
EditDiff = ContentParser(_)
EditDiff.feed(changed_content)
if EditDiff.small_prev_del:
if EditDiff.small_prev_del.replace("~~", "").isspace():
@ -422,37 +428,9 @@ async def embed_formatter(action, change, parsed_comment, categories, recent_cha
embed["title"] = _("Reverted a version of {name}").format(name=change["title"])
else:
embed["title"] = _("Uploaded {name}").format(name=change["title"])
if settings["license_detection"]:
article_content = await safe_read(await recent_changes.safe_request(
"{wiki}?action=query&format=json&prop=revisions&titles={article}&rvprop=content".format(
wiki=WIKI_API_PATH, article=quote_plus(change["title"], safe=''))), "query", "pages")
if article_content is None:
logger.warning("Something went wrong when getting license for the image")
return 0
if "-1" not in article_content:
content = list(article_content.values())[0]['revisions'][0]['*']
try:
matches = re.search(re.compile(settings["license_regex"], re.IGNORECASE), content)
if matches is not None:
license = matches.group("license")
else:
if re.search(re.compile(settings["license_regex_detect"], re.IGNORECASE), content) is None:
license = _("**No license!**")
else:
license = "?"
except IndexError:
logger.error(
"Given regex for the license detection is incorrect. It does not have a capturing group called \"license\" specified. Please fix license_regex value in the config!")
license = "?"
except re.error:
logger.error(
"Given regex for the license detection is incorrect. Please fix license_regex or license_regex_detect values in the config!")
license = "?"
if license is not None:
parsed_comment += _("\nLicense: {}").format(license)
if additional_info_retrieved:
embed.add_field(_("Options"), _("([preview]({link}))").format(link=image_direct_url))
if settings["appearance"]["embed"]["embed_images"]:
if target[1] > 1:
embed["image"]["url"] = image_direct_url
elif action == "delete/delete":
link = create_article_path(change["title"].replace(" ", "_"), WIKI_ARTICLE_PATH)
@ -506,8 +484,8 @@ async def embed_formatter(action, change, parsed_comment, categories, recent_cha
else:
restriction_description = _("Blocked from editing pages on following namespaces: ")
for namespace in change["logparams"]["restrictions"]["namespaces"]:
if str(namespace) in recent_changes.namespaces: # if we have cached namespace name for given namespace number, add its name to the list
namespaces.append("*{ns}*".format(ns=recent_changes.namespaces[str(namespace)]["*"]))
if str(namespace) in additional_data.namespaces: # if we have cached namespace name for given namespace number, add its name to the list
namespaces.append("*{ns}*".format(ns=additional_data.namespaces[str(namespace)]["*"]))
else:
namespaces.append("*{ns}*".format(ns=namespace))
restriction_description = restriction_description + ", ".join(namespaces)
@ -527,22 +505,22 @@ async def embed_formatter(action, change, parsed_comment, categories, recent_cha
embed["title"] = _("Unblocked {blocked_user}").format(blocked_user=user)
elif action == "curseprofile/comment-created":
if settings["appearance"]["embed"]["show_edit_changes"]:
parsed_comment = recent_changes.pull_comment(change["logparams"]["4:comment_id"], WIKI_ARTICLE_PATH)
link = create_article_path("Special:CommentPermalink/{commentid}".format(commentid=change["logparams"]["4:comment_id"]))
parsed_comment = await recent_changes.pull_comment(change["logparams"]["4:comment_id"], WIKI_API_PATH)
link = create_article_path("Special:CommentPermalink/{commentid}".format(commentid=change["logparams"]["4:comment_id"]), WIKI_ARTICLE_PATH)
embed["title"] = _("Left a comment on {target}'s profile").format(target=change["title"].split(':')[1]) if change["title"].split(':')[1] != \
change["user"] else _(
"Left a comment on their own profile")
elif action == "curseprofile/comment-replied":
if settings["appearance"]["embed"]["show_edit_changes"]:
parsed_comment = recent_changes.pull_comment(change["logparams"]["4:comment_id"], WIKI_ARTICLE_PATH)
link = create_article_path("Special:CommentPermalink/{commentid}".format(commentid=change["logparams"]["4:comment_id"]))
parsed_comment = await recent_changes.pull_comment(change["logparams"]["4:comment_id"], WIKI_API_PATH)
link = create_article_path("Special:CommentPermalink/{commentid}".format(commentid=change["logparams"]["4:comment_id"]), WIKI_ARTICLE_PATH)
embed["title"] = _("Replied to a comment on {target}'s profile").format(target=change["title"].split(':')[1]) if change["title"].split(':')[1] != \
change["user"] else _(
"Replied to a comment on their own profile")
elif action == "curseprofile/comment-edited":
if settings["appearance"]["embed"]["show_edit_changes"]:
parsed_comment = recent_changes.pull_comment(change["logparams"]["4:comment_id"], WIKI_ARTICLE_PATH)
link = create_article_path("Special:CommentPermalink/{commentid}".format(commentid=change["logparams"]["4:comment_id"]))
parsed_comment = await recent_changes.pull_comment(change["logparams"]["4:comment_id"], WIKI_API_PATH)
link = create_article_path("Special:CommentPermalink/{commentid}".format(commentid=change["logparams"]["4:comment_id"]), WIKI_ARTICLE_PATH)
embed["title"] = _("Edited a comment on {target}'s profile").format(target=change["title"].split(':')[1]) if change["title"].split(':')[1] != \
change["user"] else _(
"Edited a comment on their own profile")
@ -716,11 +694,11 @@ async def embed_formatter(action, change, parsed_comment, categories, recent_cha
if "tags" in change and change["tags"]:
tag_displayname = []
for tag in change["tags"]:
if tag in recent_changes.tags:
if recent_changes.tags[tag] is None:
if tag in additional_data.tags:
if additional_data.tags[tag] is None:
continue # Ignore hidden tags
else:
tag_displayname.append(recent_changes.tags[tag])
tag_displayname.append(additional_data.tags[tag])
else:
tag_displayname.append(tag)
embed.add_field(_("Tags"), ", ".join(tag_displayname))
@ -730,4 +708,4 @@ async def embed_formatter(action, change, parsed_comment, categories, recent_cha
del_cat = (_("**Removed**: ") + ", ".join(list(categories["removed"])[0:16]) + ("" if len(categories["removed"])<=15 else _(" and {} more").format(len(categories["removed"])-15))) if categories["removed"] else ""
embed.add_field(_("Changed categories"), new_cat + del_cat)
embed.finish_embed()
send_to_discord(embed)
await send_to_discord(embed)

View file

@ -9,14 +9,14 @@ import random
from urllib.parse import urlparse, urlunparse
import math
import aiohttp
profile_fields = {"profile-location": _("Location"), "profile-aboutme": _("About me"), "profile-link-google": _("Google link"), "profile-link-facebook":_("Facebook link"), "profile-link-twitter": _("Twitter link"), "profile-link-reddit": _("Reddit link"), "profile-link-twitch": _("Twitch link"), "profile-link-psn": _("PSN link"), "profile-link-vk": _("VK link"), "profile-link-xbl": _("XBL link"), "profile-link-steam": _("Steam link"), "profile-link-discord": _("Discord handle"), "profile-link-battlenet": _("Battle.net handle")}
logger = logging.getLogger("rcgcdw.misc")
class DiscordMessage():
"""A class defining a typical Discord JSON representation of webhook payload."""
def __init__(self, message_type: str, event_type: str, webhook_url: list, content=None):
def __init__(self, message_type: str, event_type: str, webhook_url: list, wiki, content=None):
self.webhook_object = dict(allowed_mentions={"parse": []}, avatar_url=settings["avatars"].get(message_type, ""))
self.webhook_url = webhook_url
self.wiki = wiki
if message_type == "embed":
self.__setup_embed()
@ -76,9 +76,46 @@ class DiscordMessage():
def set_name(self, name):
self.webhook_object["username"] = name
async def send_to_discord_webhook(data: DiscordMessage, session: aiohttp.ClientSession):
header = settings["header"]
header['Content-Type'] = 'application/json'
for webhook in data.webhook_url:
try:
result = await session.post("https://discord.com/api/webhooks/"+webhook, data=repr(data),
headers=header)
except (aiohttp.ClientConnectionError, aiohttp.ServerConnectionError):
logger.exception("Could not send the message to Discord")
return 3
return await handle_discord_http(result.status, repr(data), await result.text())
async def handle_discord_http(code, formatted_embed, result):
if 300 > code > 199: # message went through
return 0
elif code == 400: # HTTP BAD REQUEST result.status_code, data, result, header
logger.error(
"Following message has been rejected by Discord, please submit a bug on our bugtracker adding it:")
logger.error(formatted_embed)
logger.error(result.text)
return 1
elif code == 401 or code == 404: # HTTP UNAUTHORIZED AND NOT FOUND
logger.error("Webhook URL is invalid or no longer in use, please replace it with proper one.")
return 1
elif code == 429:
logger.error("We are sending too many requests to the Discord, slowing down...")
return 2
elif 499 < code < 600:
logger.error(
"Discord have trouble processing the event, and because the HTTP code returned is {} it means we blame them.".format(
code))
return 3
def get_paths(wiki: str, request) -> tuple:
parsed_url = urlparse(wiki)
WIKI_API_PATH = wiki + request["query"]["general"]["scriptpath"] + "/api.php"
WIKI_API_PATH = wiki + request["query"]["general"]["scriptpath"] + "api.php"
WIKI_SCRIPT_PATH = wiki
WIKI_ARTICLE_PATH = urlunparse((*parsed_url[0:2], "", "", "", "")) + request["query"]["general"]["articlepath"]
WIKI_JUST_DOMAIN = urlunparse((*parsed_url[0:2], "", "", "", ""))
@ -140,7 +177,15 @@ def create_article_path(article: str, WIKI_ARTICLE_PATH: str) -> str:
return WIKI_ARTICLE_PATH.replace("$1", article)
def profile_field_name(name, embed):
def profile_field_name(name, embed, _):
profile_fields = {"profile-location": _("Location"), "profile-aboutme": _("About me"),
"profile-link-google": _("Google link"), "profile-link-facebook": _("Facebook link"),
"profile-link-twitter": _("Twitter link"), "profile-link-reddit": _("Reddit link"),
"profile-link-twitch": _("Twitch link"), "profile-link-psn": _("PSN link"),
"profile-link-vk": _("VK link"), "profile-link-xbl": _("XBL link"),
"profile-link-steam": _("Steam link"), "profile-link-discord": _("Discord handle"),
"profile-link-battlenet": _("Battle.net handle")}
try:
return profile_fields[name]
except KeyError:
@ -151,14 +196,17 @@ def profile_field_name(name, embed):
class ContentParser(HTMLParser):
more = _("\n__And more__")
current_tag = ""
small_prev_ins = ""
small_prev_del = ""
ins_length = len(more)
del_length = len(more)
added = False
def __init__(self, _):
super().__init__()
self.more = _("\n__And more__")
self.ins_length = len(self.more)
self.del_length = len(self.more)
def handle_starttag(self, tagname, attribs):
if tagname == "ins" or tagname == "del":
self.current_tag = tagname

View file

@ -1,10 +1,13 @@
import asyncio, logging
import asyncio, logging, aiohttp
from src.misc import send_to_discord_webhook
from src.config import settings
logger = logging.getLogger("rcgcdw.msgqueue")
class MessageQueue:
"""Message queue class for undelivered messages"""
def __init__(self):
self._queue = []
self.session = None
def __repr__(self):
return self._queue
@ -24,7 +27,12 @@ class MessageQueue:
def cut_messages(self, item_num):
self._queue = self._queue[item_num:]
async def create_session(self):
self.session = aiohttp.ClientSession(headers=settings["header"], timeout=aiohttp.ClientTimeout(5.0))
async def resend_msgs(self):
if self.session is None:
await self.create_session()
if self._queue:
logger.info(
"{} messages waiting to be delivered to Discord due to Discord throwing errors/no connection to Discord servers.".format(
@ -33,9 +41,9 @@ class MessageQueue:
logger.debug(
"Trying to send a message to Discord from the queue with id of {} and content {}".format(str(num),
str(item)))
if send_to_discord_webhook(item) < 2:
if await send_to_discord_webhook(item, self.session) < 2:
logger.debug("Sending message succeeded")
await asyncio.sleep(2.5)
await asyncio.sleep(1.5)
else:
logger.debug("Sending message failed")
break

View file

@ -1,4 +1,5 @@
import logging
from src.database import db_cursor, db_connection
logger = logging.getLogger("rcgcdb.queue_handler")
@ -14,6 +15,9 @@ class UpdateDB():
def update_db(self):
for update in self.updated:
db_cursor.execute("UPDATE rcgcdw SET rcid = ? WHERE wiki = ?", update[1], update[0])
db_connection.commit()
self.clear_list()
DBHandler = UpdateDB()
DBHandler = UpdateDB()

View file

@ -1,4 +1,8 @@
import aiohttp
from src.config import settings
session = None
session = aiohttp.ClientSession(headers=settings["header"], timeout=aiohttp.ClientTimeout(5.0))
async def start_session():
global session
session = aiohttp.ClientSession(headers=settings["header"], timeout=aiohttp.ClientTimeout(5.0))

View file

@ -1,26 +1,32 @@
from dataclasses import dataclass
from src.session import session
import re
import logging, aiohttp
from src.exceptions import *
from src.database import db_cursor, db_connection
from src.formatters.rc import embed_formatter, compact_formatter
from src.misc import LinkParser, RecentChangesClass
from i18n import langs
from src.misc import LinkParser
from src.i18n import langs
import src.discord
from src.config import settings
from bs4 import BeautifulSoup
logger = logging.getLogger("rcgcdb.wiki")
supported_logs = ["protect/protect", "protect/modify", "protect/unprotect", "upload/overwrite", "upload/upload", "delete/delete", "delete/delete_redir", "delete/restore", "delete/revision", "delete/event", "import/upload", "import/interwiki", "merge/merge", "move/move", "move/move_redir", "protect/move_prot", "block/block", "block/unblock", "block/reblock", "rights/rights", "rights/autopromote", "abusefilter/modify", "abusefilter/create", "interwiki/iw_add", "interwiki/iw_edit", "interwiki/iw_delete", "curseprofile/comment-created", "curseprofile/comment-edited", "curseprofile/comment-deleted", "curseprofile/comment-purged", "curseprofile/profile-edited", "curseprofile/comment-replied", "contentmodel/change", "sprite/sprite", "sprite/sheet", "sprite/slice", "managetags/create", "managetags/delete", "managetags/activate", "managetags/deactivate", "tag/update", "cargo/createtable", "cargo/deletetable", "cargo/recreatetable", "cargo/replacetable", "upload/revert"]
@dataclass
class Wiki:
mw_messages: int = None
fail_times: int = 0 # corresponding to amount of times connection with wiki failed for client reasons (400-499)
session: aiohttp.ClientSession = None
async def create_session(self):
self.session = aiohttp.ClientSession(headers=settings["header"], timeout=aiohttp.ClientTimeout(5.0))
async def fetch_wiki(self, extended, script_path) -> aiohttp.ClientResponse:
if self.session is None:
await self.create_session()
url_path = script_path + "api.php"
amount = 20
if extended:
@ -38,16 +44,15 @@ class Wiki:
"rcprop": "title|redirect|timestamp|ids|loginfo|parsedcomment|sizes|flags|tags|user",
"rclimit": amount, "rctype": "edit|new|log|external", "siprop": "namespaces|general"}
try:
response = await session.get(url_path, params=params)
response = await self.session.get(url_path, params=params)
except (aiohttp.ClientConnectionError, aiohttp.ServerTimeoutError):
logger.exception("A connection error occurred while requesting {}".format(url_path))
raise WikiServerError
return response
@staticmethod
async def safe_request(url):
async def safe_request(self, url):
try:
request = await session.get(url, timeout=5, allow_redirects=False)
request = await self.session.get(url, timeout=5, allow_redirects=False)
request.raise_for_status()
except (aiohttp.ClientConnectionError, aiohttp.ServerTimeoutError):
logger.exception("Reached connection error for request on link {url}".format(url=url))
@ -76,6 +81,23 @@ class Wiki:
logger.warning("{} rows affected by DELETE FROM rcgcdw WHERE wiki = {}".format(db_cursor.rowcount, wiki_id))
db_connection.commit()
async def pull_comment(self, comment_id, WIKI_API_PATH):
try:
comment = await self.safe_request(
"{wiki}?action=comment&do=getRaw&comment_id={comment}&format=json".format(wiki=WIKI_API_PATH,
comment=comment_id)).json()[
"text"]
logger.debug("Got the following comment from the API: {}".format(comment))
except (TypeError, AttributeError):
logger.exception("Could not resolve the comment text.")
except KeyError:
logger.exception("CurseProfile extension API did not respond with a valid comment content.")
else:
if len(comment) > 1000:
comment = comment[0:1000] + ""
return comment
return ""
async def process_cats(event: dict, local_wiki: Wiki, category_msgs: dict, categorize_events: dict):
if event["type"] == "categorize":
@ -134,7 +156,7 @@ async def process_mwmsgs(wiki_response: dict, local_wiki: Wiki, mw_msgs: dict):
mw_msgs[key] = msgs # it may be a little bit messy for sure, however I don't expect any reason to remove mw_msgs entries by one
local_wiki.mw_messages = key
async def essential_info(change: dict, changed_categories, local_wiki: Wiki, db_wiki: tuple, target: tuple, paths: tuple):
async def essential_info(change: dict, changed_categories, local_wiki: Wiki, db_wiki: tuple, target: tuple, paths: tuple, request: dict):
"""Prepares essential information for both embed and compact message format."""
def _(string: str) -> str:
"""Our own translation string to make it compatible with async"""
@ -175,4 +197,10 @@ async def essential_info(change: dict, changed_categories, local_wiki: Wiki, db_
else:
logger.warning("This event is not implemented in the script. Please make an issue on the tracker attaching the following info: wiki url, time, and this information: {}".format(change))
return
await appearance_mode(identification_string, change, parsed_comment, changed_categories, local_wiki, target, _, ngettext, paths)
additional_data = {"namespaces": request["query"]["namespaces"], "tags": {}}
for tag in request["query"]["tags"]:
try:
additional_data["tags"][tag["name"]] = (BeautifulSoup(tag["displayname"], "lxml")).get_text()
except KeyError:
additional_data["tags"][tag["name"]] = None # Tags with no displ
await appearance_mode(identification_string, change, parsed_comment, changed_categories, local_wiki, target, _, ngettext, paths, additional_data=additional_data)