mirror of
https://gitlab.com/chicken-riders/RcGcDb.git
synced 2025-02-23 00:54:09 +00:00
I don't believe this either but code has been added
This commit is contained in:
parent
6caa03153a
commit
217c30d096
|
@ -84,13 +84,13 @@ async def wiki_scanner():
|
||||||
if change["rcid"] < db_wiki[6]:
|
if change["rcid"] < db_wiki[6]:
|
||||||
for target in targets.items():
|
for target in targets.items():
|
||||||
await essential_info(change, categorize_events, local_wiki, db_wiki, target, paths)
|
await essential_info(change, categorize_events, local_wiki, db_wiki, target, paths)
|
||||||
|
|
||||||
|
|
||||||
await asyncio.sleep(delay=calc_delay)
|
await asyncio.sleep(delay=calc_delay)
|
||||||
|
|
||||||
|
|
||||||
async def message_sender():
|
async def message_sender():
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
async def main_loop():
|
async def main_loop():
|
||||||
task1 = asyncio.create_task(wiki_scanner())
|
task1 = asyncio.create_task(wiki_scanner())
|
||||||
task2 = asyncio.create_task(message_sender())
|
task2 = asyncio.create_task(message_sender())
|
||||||
|
|
|
@ -5,7 +5,7 @@ import time
|
||||||
import logging
|
import logging
|
||||||
import base64
|
import base64
|
||||||
from config import settings
|
from config import settings
|
||||||
from src.misc import link_formatter, create_article_path, LinkParser, profile_field_name, ContentParser, DiscordMessage
|
from src.misc import link_formatter, create_article_path, LinkParser, profile_field_name, ContentParser, DiscordMessage, safe_read
|
||||||
from urllib.parse import quote_plus
|
from urllib.parse import quote_plus
|
||||||
# from html.parser import HTMLParser
|
# from html.parser import HTMLParser
|
||||||
|
|
||||||
|
@ -20,7 +20,7 @@ from src.i18n import langs
|
||||||
logger = logging.getLogger("rcgcdw.rc_formatters")
|
logger = logging.getLogger("rcgcdw.rc_formatters")
|
||||||
#from src.rcgcdw import recent_changes, ngettext, logger, profile_field_name, LinkParser, pull_comment
|
#from src.rcgcdw import recent_changes, ngettext, logger, profile_field_name, LinkParser, pull_comment
|
||||||
|
|
||||||
async def compact_formatter(action, change, parsed_comment, categories, recent_changes, _, ngettext, paths):
|
async def compact_formatter(action, change, parsed_comment, categories, recent_changes, target, _, ngettext, paths):
|
||||||
WIKI_API_PATH = paths[0]
|
WIKI_API_PATH = paths[0]
|
||||||
WIKI_SCRIPT_PATH = paths[1]
|
WIKI_SCRIPT_PATH = paths[1]
|
||||||
WIKI_ARTICLE_PATH = paths[2]
|
WIKI_ARTICLE_PATH = paths[2]
|
||||||
|
@ -310,16 +310,16 @@ async def compact_formatter(action, change, parsed_comment, categories, recent_c
|
||||||
else:
|
else:
|
||||||
logger.warning("No entry for {event} with params: {params}".format(event=action, params=change))
|
logger.warning("No entry for {event} with params: {params}".format(event=action, params=change))
|
||||||
return
|
return
|
||||||
send_to_discord(DiscordMessage("compact", action, settings["webhookURL"], content=content))
|
send_to_discord(DiscordMessage("compact", action, target[1], content=content))
|
||||||
|
|
||||||
|
|
||||||
async def embed_formatter(action, change, parsed_comment, categories, recent_changes, _, ngettext, paths):
|
async def embed_formatter(action, change, parsed_comment, categories, recent_changes, target, _, ngettext, paths):
|
||||||
WIKI_API_PATH = paths[0]
|
WIKI_API_PATH = paths[0]
|
||||||
WIKI_SCRIPT_PATH = paths[1]
|
WIKI_SCRIPT_PATH = paths[1]
|
||||||
WIKI_ARTICLE_PATH = paths[2]
|
WIKI_ARTICLE_PATH = paths[2]
|
||||||
WIKI_JUST_DOMAIN = paths[3]
|
WIKI_JUST_DOMAIN = paths[3]
|
||||||
LinkParser = LinkParser()
|
LinkParser = LinkParser()
|
||||||
embed = DiscordMessage("embed", action, settings["webhookURL"])
|
embed = DiscordMessage("embed", action, target[1])
|
||||||
if parsed_comment is None:
|
if parsed_comment is None:
|
||||||
parsed_comment = _("No description provided")
|
parsed_comment = _("No description provided")
|
||||||
if action != "suppressed":
|
if action != "suppressed":
|
||||||
|
@ -352,12 +352,12 @@ async def embed_formatter(action, change, parsed_comment, categories, recent_cha
|
||||||
minor=_("m") if action == "edit" and "minor" in change else "", bot=_('b') if "bot" in change else "", space=" " if "bot" in change or (action == "edit" and "minor" in change) or action == "new" else "")
|
minor=_("m") if action == "edit" and "minor" in change else "", bot=_('b') if "bot" in change else "", space=" " if "bot" in change or (action == "edit" and "minor" in change) or action == "new" else "")
|
||||||
if settings["appearance"]["embed"]["show_edit_changes"]:
|
if settings["appearance"]["embed"]["show_edit_changes"]:
|
||||||
if action == "new":
|
if action == "new":
|
||||||
changed_content = safe_read(recent_changes.safe_request(
|
changed_content = await safe_read(await recent_changes.safe_request(
|
||||||
"{wiki}?action=compare&format=json&fromtext=&torev={diff}&topst=1&prop=diff".format(
|
"{wiki}?action=compare&format=json&fromtext=&torev={diff}&topst=1&prop=diff".format(
|
||||||
wiki=WIKI_API_PATH, diff=change["revid"]
|
wiki=WIKI_API_PATH, diff=change["revid"]
|
||||||
)), "compare", "*")
|
)), "compare", "*")
|
||||||
else:
|
else:
|
||||||
changed_content = safe_read(recent_changes.safe_request(
|
changed_content = await safe_read(await recent_changes.safe_request(
|
||||||
"{wiki}?action=compare&format=json&fromrev={oldrev}&torev={diff}&topst=1&prop=diff".format(
|
"{wiki}?action=compare&format=json&fromrev={oldrev}&torev={diff}&topst=1&prop=diff".format(
|
||||||
wiki=WIKI_API_PATH, diff=change["revid"],oldrev=change["old_revid"]
|
wiki=WIKI_API_PATH, diff=change["revid"],oldrev=change["old_revid"]
|
||||||
)), "compare", "*")
|
)), "compare", "*")
|
||||||
|
@ -383,7 +383,7 @@ async def embed_formatter(action, change, parsed_comment, categories, recent_cha
|
||||||
logger.warning("Unable to download data on the edit content!")
|
logger.warning("Unable to download data on the edit content!")
|
||||||
elif action in ("upload/overwrite", "upload/upload", "upload/revert"): # sending files
|
elif action in ("upload/overwrite", "upload/upload", "upload/revert"): # sending files
|
||||||
license = None
|
license = None
|
||||||
urls = safe_read(recent_changes.safe_request(
|
urls = await safe_read(await recent_changes.safe_request(
|
||||||
"{wiki}?action=query&format=json&prop=imageinfo&list=&meta=&titles={filename}&iiprop=timestamp%7Curl%7Carchivename&iilimit=5".format(
|
"{wiki}?action=query&format=json&prop=imageinfo&list=&meta=&titles={filename}&iiprop=timestamp%7Curl%7Carchivename&iilimit=5".format(
|
||||||
wiki=WIKI_API_PATH, filename=change["title"])), "query", "pages")
|
wiki=WIKI_API_PATH, filename=change["title"])), "query", "pages")
|
||||||
link = create_article_path(change["title"].replace(" ", "_"), WIKI_ARTICLE_PATH)
|
link = create_article_path(change["title"].replace(" ", "_"), WIKI_ARTICLE_PATH)
|
||||||
|
@ -423,7 +423,7 @@ async def embed_formatter(action, change, parsed_comment, categories, recent_cha
|
||||||
else:
|
else:
|
||||||
embed["title"] = _("Uploaded {name}").format(name=change["title"])
|
embed["title"] = _("Uploaded {name}").format(name=change["title"])
|
||||||
if settings["license_detection"]:
|
if settings["license_detection"]:
|
||||||
article_content = safe_read(recent_changes.safe_request(
|
article_content = await safe_read(await recent_changes.safe_request(
|
||||||
"{wiki}?action=query&format=json&prop=revisions&titles={article}&rvprop=content".format(
|
"{wiki}?action=query&format=json&prop=revisions&titles={article}&rvprop=content".format(
|
||||||
wiki=WIKI_API_PATH, article=quote_plus(change["title"], safe=''))), "query", "pages")
|
wiki=WIKI_API_PATH, article=quote_plus(change["title"], safe=''))), "query", "pages")
|
||||||
if article_content is None:
|
if article_content is None:
|
||||||
|
|
19
src/misc.py
19
src/misc.py
|
@ -8,12 +8,13 @@ from collections import defaultdict
|
||||||
import random
|
import random
|
||||||
from urllib.parse import urlparse, urlunparse
|
from urllib.parse import urlparse, urlunparse
|
||||||
import math
|
import math
|
||||||
|
import aiohttp
|
||||||
profile_fields = {"profile-location": _("Location"), "profile-aboutme": _("About me"), "profile-link-google": _("Google link"), "profile-link-facebook":_("Facebook link"), "profile-link-twitter": _("Twitter link"), "profile-link-reddit": _("Reddit link"), "profile-link-twitch": _("Twitch link"), "profile-link-psn": _("PSN link"), "profile-link-vk": _("VK link"), "profile-link-xbl": _("XBL link"), "profile-link-steam": _("Steam link"), "profile-link-discord": _("Discord handle"), "profile-link-battlenet": _("Battle.net handle")}
|
profile_fields = {"profile-location": _("Location"), "profile-aboutme": _("About me"), "profile-link-google": _("Google link"), "profile-link-facebook":_("Facebook link"), "profile-link-twitter": _("Twitter link"), "profile-link-reddit": _("Reddit link"), "profile-link-twitch": _("Twitch link"), "profile-link-psn": _("PSN link"), "profile-link-vk": _("VK link"), "profile-link-xbl": _("XBL link"), "profile-link-steam": _("Steam link"), "profile-link-discord": _("Discord handle"), "profile-link-battlenet": _("Battle.net handle")}
|
||||||
logger = logging.getLogger("rcgcdw.misc")
|
logger = logging.getLogger("rcgcdw.misc")
|
||||||
|
|
||||||
class DiscordMessage():
|
class DiscordMessage():
|
||||||
"""A class defining a typical Discord JSON representation of webhook payload."""
|
"""A class defining a typical Discord JSON representation of webhook payload."""
|
||||||
def __init__(self, message_type: str, event_type: str, webhook_url: str, content=None):
|
def __init__(self, message_type: str, event_type: str, webhook_url: list, content=None):
|
||||||
self.webhook_object = dict(allowed_mentions={"parse": []}, avatar_url=settings["avatars"].get(message_type, ""))
|
self.webhook_object = dict(allowed_mentions={"parse": []}, avatar_url=settings["avatars"].get(message_type, ""))
|
||||||
self.webhook_url = webhook_url
|
self.webhook_url = webhook_url
|
||||||
|
|
||||||
|
@ -218,6 +219,22 @@ class ContentParser(HTMLParser):
|
||||||
self.current_tag = ""
|
self.current_tag = ""
|
||||||
|
|
||||||
|
|
||||||
|
async def safe_read(request: aiohttp.ClientResponse, *keys):
|
||||||
|
if request is None:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
request = await request.json(encoding="UTF-8")
|
||||||
|
for item in keys:
|
||||||
|
request = request[item]
|
||||||
|
except KeyError:
|
||||||
|
logger.warning(
|
||||||
|
"Failure while extracting data from request on key {key} in {change}".format(key=item, change=request))
|
||||||
|
return None
|
||||||
|
except aiohttp.ClientResponseError:
|
||||||
|
logger.warning("Failure while extracting data from request in {change}".format(change=request))
|
||||||
|
return None
|
||||||
|
return request
|
||||||
|
|
||||||
# class RecentChangesClass():
|
# class RecentChangesClass():
|
||||||
# """Store verious data and functions related to wiki and fetching of Recent Changes"""
|
# """Store verious data and functions related to wiki and fetching of Recent Changes"""
|
||||||
# def __init__(self):
|
# def __init__(self):
|
||||||
|
|
|
@ -47,3 +47,7 @@ class MessageQueue:
|
||||||
|
|
||||||
|
|
||||||
messagequeue = MessageQueue()
|
messagequeue = MessageQueue()
|
||||||
|
|
||||||
|
|
||||||
|
async def send_to_discord(msg):
|
||||||
|
messagequeue.add_message(msg)
|
||||||
|
|
17
src/wiki.py
17
src/wiki.py
|
@ -44,6 +44,17 @@ class Wiki:
|
||||||
raise WikiServerError
|
raise WikiServerError
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def safe_request(url):
|
||||||
|
try:
|
||||||
|
request = await session.get(url, timeout=5, allow_redirects=False)
|
||||||
|
request.raise_for_status()
|
||||||
|
except (aiohttp.ClientConnectionError, aiohttp.ServerTimeoutError):
|
||||||
|
logger.exception("Reached connection error for request on link {url}".format(url=url))
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return request
|
||||||
|
|
||||||
async def check_status(self, wiki_url, status):
|
async def check_status(self, wiki_url, status):
|
||||||
if 199 < status < 300:
|
if 199 < status < 300:
|
||||||
self.fail_times = 0
|
self.fail_times = 0
|
||||||
|
@ -131,12 +142,12 @@ async def essential_info(change: dict, changed_categories, local_wiki: Wiki, db_
|
||||||
|
|
||||||
lang = langs[target[0][0]]
|
lang = langs[target[0][0]]
|
||||||
ngettext = lang.ngettext
|
ngettext = lang.ngettext
|
||||||
recent_changes = RecentChangesClass() # TODO Look into replacing RecentChangesClass with local_wiki
|
# recent_changes = RecentChangesClass() # TODO Look into replacing RecentChangesClass with local_wiki
|
||||||
LinkParser = LinkParser("domain")
|
LinkParser = LinkParser("domain")
|
||||||
logger.debug(change)
|
logger.debug(change)
|
||||||
appearance_mode = embed_formatter if target[0][1] > 0 else compact_formatter
|
appearance_mode = embed_formatter if target[0][1] > 0 else compact_formatter
|
||||||
if ("actionhidden" in change or "suppressed" in change): # if event is hidden using suppression
|
if ("actionhidden" in change or "suppressed" in change): # if event is hidden using suppression
|
||||||
await appearance_mode("suppressed", change, "", changed_categories, recent_changes, target, _, ngettext, paths)
|
await appearance_mode("suppressed", change, "", changed_categories, local_wiki, target, _, ngettext, paths)
|
||||||
return
|
return
|
||||||
if "commenthidden" not in change:
|
if "commenthidden" not in change:
|
||||||
LinkParser.feed(change["parsedcomment"])
|
LinkParser.feed(change["parsedcomment"])
|
||||||
|
@ -164,4 +175,4 @@ async def essential_info(change: dict, changed_categories, local_wiki: Wiki, db_
|
||||||
else:
|
else:
|
||||||
logger.warning("This event is not implemented in the script. Please make an issue on the tracker attaching the following info: wiki url, time, and this information: {}".format(change))
|
logger.warning("This event is not implemented in the script. Please make an issue on the tracker attaching the following info: wiki url, time, and this information: {}".format(change))
|
||||||
return
|
return
|
||||||
await appearance_mode(identification_string, change, parsed_comment, changed_categories, recent_changes, target, _, ngettext, paths)
|
await appearance_mode(identification_string, change, parsed_comment, changed_categories, local_wiki, target, _, ngettext, paths)
|
||||||
|
|
Loading…
Reference in a new issue