mirror of
https://gitlab.com/chicken-riders/RcGcDb.git
synced 2025-02-23 00:54:09 +00:00
Added Miraheze IRC quirk from main branch, small fixes and more debug info
This commit is contained in:
parent
bf09459ba6
commit
1d0fd1ceea
|
@ -77,9 +77,9 @@ class DiscussionsFromHellParser:
|
|||
logger.debug(item["attrs"]["id"])
|
||||
if item["attrs"]["id"] is not None:
|
||||
self.markdown_text = "{old}{img_url}\n".format(old=self.markdown_text, img_url=
|
||||
self.post["_embedded"]["contentImages"][int(item["attrs"]["id"])]["url"])
|
||||
self.image_last = self.post["_embedded"]["contentImages"][int(item["attrs"]["id"])]["url"]
|
||||
except (IndexError, ValueError):
|
||||
self.post["_embedded"]["contentImages"][int(item["attrs"]["id"])]["url"])
|
||||
self.image_last = self.post["_embedded"]["contentImages"][int(item["attrs"]["id"])]["url"]
|
||||
except (IndexError, ValueError, TypeError):
|
||||
logger.warning("Image {} not found.".format(item["attrs"]["id"]))
|
||||
logger.debug(self.markdown_text)
|
||||
elif item["type"] == "code_block":
|
||||
|
|
|
@ -73,7 +73,7 @@ def embed_managewiki_delete_group(ctx: Context, change: dict) -> DiscordMessage:
|
|||
embed_helper(ctx, embed, change)
|
||||
embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["title"]))
|
||||
group = change["title"].split("/")[-1]
|
||||
embed["title"] = ctx._("Deleted a \"{group}\" user group").format(wiki=group)
|
||||
embed["title"] = ctx._("Deleted a \"{group}\" user group").format(group=group)
|
||||
return embed
|
||||
|
||||
|
||||
|
|
|
@ -35,7 +35,7 @@ from src.api.hook import post_hook
|
|||
|
||||
|
||||
def add_button(message: DiscordMessage, custom_id: str, label, style=2, emoji: Optional[dict] = None):
|
||||
if len(custom_id) > 100 or not len(label):
|
||||
if len(custom_id) > 100 or not len(label) or message is None:
|
||||
return
|
||||
if "components" not in message.webhook_object:
|
||||
message.webhook_object["components"] = [{"type": 1, "components": []}]
|
||||
|
|
117
src/bot.py
117
src/bot.py
|
@ -82,121 +82,6 @@ async def message_sender():
|
|||
await send_exception_to_monitoring(ex)
|
||||
|
||||
|
||||
# async def discussion_handler():
|
||||
# await asyncio.sleep(3.0) # Make some time before IRC code is executed, happens only once and saves if inside
|
||||
# try:
|
||||
# while True:
|
||||
# async with db.pool().acquire() as connection:
|
||||
# async with connection.transaction():
|
||||
# async for db_wiki in connection.cursor("SELECT DISTINCT wiki, rcid, postid FROM rcgcdw WHERE postid != '-1' OR postid IS NULL"):
|
||||
# try:
|
||||
# local_wiki = all_wikis[db_wiki["wiki"]] # set a reference to a wiki object from memory
|
||||
# except KeyError:
|
||||
# local_wiki = all_wikis[db_wiki["wiki"]] = Wiki()
|
||||
# local_wiki.rc_active = db_wiki["rcid"]
|
||||
# if db_wiki["wiki"] not in rcqueue.irc_mapping["fandom.com"].updated_discussions and \
|
||||
# local_wiki.last_discussion_check+settings["irc_overtime"] > time.time(): # I swear if another wiki farm ever starts using Fandom discussions I'm gonna use explosion magic
|
||||
# continue
|
||||
# else:
|
||||
# try:
|
||||
# rcqueue.irc_mapping["fandom.com"].updated_discussions.remove(db_wiki["wiki"])
|
||||
# except KeyError:
|
||||
# pass # to be expected
|
||||
# header = settings["header"]
|
||||
# header["Accept"] = "application/hal+json"
|
||||
# async with aiohttp.ClientSession(headers=header,
|
||||
# timeout=aiohttp.ClientTimeout(6.0)) as session:
|
||||
# try:
|
||||
# feeds_response = await local_wiki.fetch_feeds(db_wiki["wiki"], session)
|
||||
# except (WikiServerError, WikiError):
|
||||
# continue # ignore this wiki if it throws errors
|
||||
# try:
|
||||
# discussion_feed_resp = await feeds_response.json(encoding="UTF-8")
|
||||
# if "error" in discussion_feed_resp:
|
||||
# error = discussion_feed_resp["error"]
|
||||
# if error == "NotFoundException": # Discussions disabled
|
||||
# if db_wiki["rcid"] != -1: # RC feed is disabled
|
||||
# await connection.execute("UPDATE rcgcdw SET postid = $1 WHERE wiki = $2", "-1", db_wiki["wiki"])
|
||||
# else:
|
||||
# await local_wiki.remove(db_wiki["wiki"], 1000)
|
||||
# continue
|
||||
# raise WikiError
|
||||
# discussion_feed = discussion_feed_resp["_embedded"]["doc:posts"]
|
||||
# discussion_feed.reverse()
|
||||
# except aiohttp.ContentTypeError:
|
||||
# logger.exception("Wiki seems to be resulting in non-json content.")
|
||||
# continue
|
||||
# except asyncio.TimeoutError:
|
||||
# logger.debug("Timeout on reading JSON of discussion post feeed.")
|
||||
# continue
|
||||
# except:
|
||||
# logger.exception("On loading json of response.")
|
||||
# continue
|
||||
# if db_wiki["postid"] is None: # new wiki, just get the last post to not spam the channel
|
||||
# if len(discussion_feed) > 0:
|
||||
# DBHandler.add(db_wiki["wiki"], discussion_feed[-1]["id"], True)
|
||||
# else:
|
||||
# DBHandler.add(db_wiki["wiki"], "0", True)
|
||||
# continue
|
||||
# comment_events = []
|
||||
# targets = await generate_targets(db_wiki["wiki"], "AND NOT postid = '-1'")
|
||||
# for post in discussion_feed:
|
||||
# if post["_embedded"]["thread"][0]["containerType"] == "ARTICLE_COMMENT" and post["id"] > db_wiki["postid"]:
|
||||
# comment_events.append(post["forumId"])
|
||||
# comment_pages: dict = {}
|
||||
# if comment_events:
|
||||
# try:
|
||||
# comment_pages = await local_wiki.safe_request(
|
||||
# "{wiki}wikia.php?controller=FeedsAndPosts&method=getArticleNamesAndUsernames&stablePageIds={pages}&format=json".format(
|
||||
# wiki=db_wiki["wiki"], pages=",".join(comment_events)
|
||||
# ), RateLimiter(), "articleNames")
|
||||
# except aiohttp.ClientResponseError: # Fandom can be funny sometimes... See #30
|
||||
# comment_pages = None
|
||||
# except:
|
||||
# if command_line_args.debug:
|
||||
# logger.exception("Exception on Feeds article comment request")
|
||||
# shutdown(loop=asyncio.get_event_loop())
|
||||
# else:
|
||||
# logger.exception("Exception on Feeds article comment request")
|
||||
# await generic_msg_sender_exception_logger(traceback.format_exc(),
|
||||
# "Exception on Feeds article comment request",
|
||||
# Post=str(post)[0:1000], Wiki=db_wiki["wiki"])
|
||||
# message_list = defaultdict(list)
|
||||
# for post in discussion_feed: # Yeah, second loop since the comments require an extra request
|
||||
# if post["id"] > db_wiki["postid"]:
|
||||
# for target in targets.items():
|
||||
# try:
|
||||
# message = await essential_feeds(post, comment_pages, db_wiki, target)
|
||||
# if message is not None:
|
||||
# message_list[target[0]].append(message)
|
||||
# except asyncio.CancelledError:
|
||||
# raise
|
||||
# except:
|
||||
# if command_line_args.debug:
|
||||
# logger.exception("Exception on Feeds formatter")
|
||||
# shutdown(loop=asyncio.get_event_loop())
|
||||
# else:
|
||||
# logger.exception("Exception on Feeds formatter")
|
||||
# await generic_msg_sender_exception_logger(traceback.format_exc(), "Exception in feed formatter", Post=str(post)[0:1000], Wiki=db_wiki["wiki"])
|
||||
# # Lets stack the messages
|
||||
# for messages in message_list.values():
|
||||
# messages = stack_message_list(messages)
|
||||
# for message in messages:
|
||||
# await send_to_discord(message)
|
||||
# if discussion_feed:
|
||||
# DBHandler.add(db_wiki["wiki"], post["id"], True)
|
||||
# await asyncio.sleep(delay=2.0) # hardcoded really doesn't need much more
|
||||
# await asyncio.sleep(delay=1.0) # Avoid lock on no wikis
|
||||
# except asyncio.CancelledError:
|
||||
# pass
|
||||
# except:
|
||||
# if command_line_args.debug:
|
||||
# raise # reraise the issue
|
||||
# else:
|
||||
# logger.exception("Exception on Feeds formatter")
|
||||
# await generic_msg_sender_exception_logger(traceback.format_exc(), "Discussion handler task exception", Wiki=db_wiki["wiki"])
|
||||
|
||||
|
||||
def shutdown(loop, signal=None):
|
||||
global main_tasks
|
||||
loop.remove_signal_handler(signal)
|
||||
|
@ -210,7 +95,9 @@ def shutdown(loop, signal=None):
|
|||
logger.debug("Killing task {}".format(task.get_name()))
|
||||
task.cancel()
|
||||
try:
|
||||
logger.debug("Awaiting for all of the task to return...")
|
||||
loop.run_until_complete(asyncio.gather(*asyncio.all_tasks(loop)))
|
||||
logger.debug("All tasks returned. Shutting down logging and finishing shutdown function.")
|
||||
except asyncio.CancelledError:
|
||||
loop.stop()
|
||||
logger.info("Script has shut down due to signal {}.".format(signal))
|
||||
|
|
|
@ -107,6 +107,7 @@ class DomainManager:
|
|||
for json_part in self.chunkstring(json_string, 7950):
|
||||
await connection.execute("select pg_notify('debugresponse', 'DUMP ' || $1 || ' ' || $2);", req_id, json_part)
|
||||
await connection.execute("select pg_notify('debugresponse', 'DUMP END ' || $1);", req_id)
|
||||
await connection.execute("select pg_notify('debugresponse', 'DUMP ' || $1);", json.dumps(json_object)) # TODO Remove legacy option
|
||||
elif split_payload[1] == "SITE" and len(split_payload) > 3:
|
||||
logger.info(f"Received {' '.join(split_payload)} on pub/sub. Preparing JSON with data...")
|
||||
req_id: str = split_payload[2]
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import re
|
||||
import time
|
||||
import types
|
||||
|
||||
|
@ -55,7 +56,7 @@ class AioIRCCat(irc.client_aio.AioSimpleIRCClient):
|
|||
def on_pubmsg(self, connection, event):
|
||||
self.last_msg = time.time()
|
||||
if event.target == self.targets["rc"]:
|
||||
self.parse_fandom_message(' '.join(event.arguments))
|
||||
self.parse_edit_message(' '.join(event.arguments))
|
||||
elif event.target == self.targets["discussion"]:
|
||||
self.parse_fandom_discussion(' '.join(event.arguments))
|
||||
|
||||
|
@ -66,9 +67,11 @@ class AioIRCCat(irc.client_aio.AioSimpleIRCClient):
|
|||
self.connect(*self.connection_details[0], **self.connection_details[1])
|
||||
pass
|
||||
|
||||
def parse_fandom_message(self, message: str):
|
||||
def parse_edit_message(self, message: str):
|
||||
message = message.split("\x035*\x03")
|
||||
# print(asyncio.all_tasks())
|
||||
if self.targets["rc"] == "#miraheze-feed":
|
||||
message[0] = re.sub(r"^(\w+)wiki $", "\x0302https://\\1.miraheze.org/w/", message[0]) # Convert miraheze database name to wiki script path
|
||||
half = message[0].find("\x0302http")
|
||||
if half == -1:
|
||||
return
|
||||
|
|
Loading…
Reference in a new issue