mirror of
https://gitlab.com/chicken-riders/RcGcDb.git
synced 2025-02-23 00:54:09 +00:00
Finished DUMP functionality
This commit is contained in:
parent
0ac5f8b824
commit
d5d0c0a6e6
|
@ -23,7 +23,6 @@ from src.api.context import Context
|
||||||
from src.discord.message import DiscordMessage, DiscordMessageMetadata
|
from src.discord.message import DiscordMessage, DiscordMessageMetadata
|
||||||
from src.api import formatter
|
from src.api import formatter
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger("rcgcdw.discussion_formatter")
|
logger = logging.getLogger("rcgcdw.discussion_formatter")
|
||||||
|
|
||||||
|
|
||||||
|
@ -62,7 +61,8 @@ class DiscussionsFromHellParser:
|
||||||
suf=suffix)
|
suf=suffix)
|
||||||
else:
|
else:
|
||||||
if ctype == "code_block":
|
if ctype == "code_block":
|
||||||
self.markdown_text += item["text"] # ignore formatting on preformatted text which cannot have additional formatting anyways
|
self.markdown_text += item[
|
||||||
|
"text"] # ignore formatting on preformatted text which cannot have additional formatting anyways
|
||||||
else:
|
else:
|
||||||
self.markdown_text += sanitize_to_markdown(item["text"])
|
self.markdown_text += sanitize_to_markdown(item["text"])
|
||||||
elif item["type"] == "paragraph":
|
elif item["type"] == "paragraph":
|
||||||
|
@ -132,6 +132,7 @@ def common_discussions(post: dict, embed: DiscordMessage, ctx: Context):
|
||||||
embed["timestamp"] = datetime.datetime.fromtimestamp(post["creationDate"]["epochSecond"],
|
embed["timestamp"] = datetime.datetime.fromtimestamp(post["creationDate"]["epochSecond"],
|
||||||
tz=datetime.timezone.utc).isoformat()
|
tz=datetime.timezone.utc).isoformat()
|
||||||
|
|
||||||
|
|
||||||
# discussion/forum - Discussions on the "forum" available via "Discuss" button
|
# discussion/forum - Discussions on the "forum" available via "Discuss" button
|
||||||
|
|
||||||
@formatter.embed(event="discussion/forum")
|
@formatter.embed(event="discussion/forum")
|
||||||
|
@ -143,7 +144,7 @@ def embed_discussion_forum(ctx: Context, post: dict):
|
||||||
author = post["createdBy"]["name"]
|
author = post["createdBy"]["name"]
|
||||||
embed.set_author(author, "{url}f/u/{creatorId}".format(url=ctx.client.WIKI_SCRIPT_PATH,
|
embed.set_author(author, "{url}f/u/{creatorId}".format(url=ctx.client.WIKI_SCRIPT_PATH,
|
||||||
creatorId=post["creatorId"]),
|
creatorId=post["creatorId"]),
|
||||||
icon_url=post["createdBy"]["avatarUrl"])
|
icon_url=post["createdBy"]["avatarUrl"])
|
||||||
if not post["isReply"]:
|
if not post["isReply"]:
|
||||||
embed["url"] = "{url}f/p/{threadId}".format(url=ctx.client.WIKI_SCRIPT_PATH,
|
embed["url"] = "{url}f/p/{threadId}".format(url=ctx.client.WIKI_SCRIPT_PATH,
|
||||||
threadId=post["threadId"])
|
threadId=post["threadId"])
|
||||||
|
@ -180,10 +181,13 @@ def embed_discussion_forum(ctx: Context, post: dict):
|
||||||
if post["_embedded"]["thread"][0]["tags"]:
|
if post["_embedded"]["thread"][0]["tags"]:
|
||||||
tag_displayname = []
|
tag_displayname = []
|
||||||
for tag in post["_embedded"]["thread"][0]["tags"]:
|
for tag in post["_embedded"]["thread"][0]["tags"]:
|
||||||
tag_displayname.append("[{title}]({url})".format(title=tag["articleTitle"], url=ctx.client.create_article_path(
|
tag_displayname.append(
|
||||||
quote_plus(tag["articleTitle"].replace(" ", "_"), "/:?=&"))))
|
"[{title}]({url})".format(title=tag["articleTitle"], url=ctx.client.create_article_path(
|
||||||
|
quote_plus(tag["articleTitle"].replace(" ", "_"), "/:?=&"))))
|
||||||
if len(", ".join(tag_displayname)) > 1000:
|
if len(", ".join(tag_displayname)) > 1000:
|
||||||
embed.add_field(ctx.pgettext("Fandom discussions Tags/Forums", "Tags"), ctx.pgettext("Fandom discussions amount of Tags/Forums", "{} tags").format(len(post["_embedded"]["thread"][0]["tags"])))
|
embed.add_field(ctx.pgettext("Fandom discussions Tags/Forums", "Tags"),
|
||||||
|
ctx.pgettext("Fandom discussions amount of Tags/Forums", "{} tags").format(
|
||||||
|
len(post["_embedded"]["thread"][0]["tags"])))
|
||||||
else:
|
else:
|
||||||
embed.add_field(ctx.pgettext("Fandom discussions Tags/Forums", "Tags"), ", ".join(tag_displayname))
|
embed.add_field(ctx.pgettext("Fandom discussions Tags/Forums", "Tags"), ", ".join(tag_displayname))
|
||||||
else:
|
else:
|
||||||
|
@ -230,6 +234,7 @@ def compact_discussion_forum(ctx: Context, post: dict):
|
||||||
forumName=post["forumName"])
|
forumName=post["forumName"])
|
||||||
return DiscordMessage("compact", event_type, ctx.webhook_url, content=message)
|
return DiscordMessage("compact", event_type, ctx.webhook_url, content=message)
|
||||||
|
|
||||||
|
|
||||||
# discussion/wall - Wall posts/replies
|
# discussion/wall - Wall posts/replies
|
||||||
|
|
||||||
|
|
||||||
|
@ -246,7 +251,7 @@ def compact_author_discussions(post: dict, ctx: Context):
|
||||||
author_url = clean_link(ctx.client.create_article_path("User:{user}".format(user=author)))
|
author_url = clean_link(ctx.client.create_article_path("User:{user}".format(user=author)))
|
||||||
else:
|
else:
|
||||||
author_url = "<{url}f/u/{creatorId}>".format(url=ctx.client.WIKI_SCRIPT_PATH,
|
author_url = "<{url}f/u/{creatorId}>".format(url=ctx.client.WIKI_SCRIPT_PATH,
|
||||||
creatorId=post["creatorId"])
|
creatorId=post["creatorId"])
|
||||||
return author, author_url
|
return author, author_url
|
||||||
|
|
||||||
|
|
||||||
|
@ -282,7 +287,8 @@ def embed_discussion_wall(ctx: Context, post: dict):
|
||||||
embed["url"] = "{url}wiki/Message_Wall:{user_wall}?threadId={threadId}".format(
|
embed["url"] = "{url}wiki/Message_Wall:{user_wall}?threadId={threadId}".format(
|
||||||
url=ctx.client.WIKI_SCRIPT_PATH, user_wall=quote_plus(user_wall.replace(" ", "_")),
|
url=ctx.client.WIKI_SCRIPT_PATH, user_wall=quote_plus(user_wall.replace(" ", "_")),
|
||||||
threadId=post["threadId"])
|
threadId=post["threadId"])
|
||||||
embed["title"] = ctx._("Created \"{title}\" on {user}'s Message Wall").format(title=post["title"], user=user_wall)
|
embed["title"] = ctx._("Created \"{title}\" on {user}'s Message Wall").format(title=post["title"],
|
||||||
|
user=user_wall)
|
||||||
else:
|
else:
|
||||||
embed.event_type = "discussion/wall/reply"
|
embed.event_type = "discussion/wall/reply"
|
||||||
embed["url"] = "{url}wiki/Message_Wall:{user_wall}?threadId={threadId}#{replyId}".format(
|
embed["url"] = "{url}wiki/Message_Wall:{user_wall}?threadId={threadId}#{replyId}".format(
|
||||||
|
@ -314,6 +320,7 @@ def compact_discussion_wall(ctx: Context, post: dict):
|
||||||
user_wall=quote_plus(user_wall.replace(" ", "_")), threadId=post["threadId"], replyId=post["id"])
|
user_wall=quote_plus(user_wall.replace(" ", "_")), threadId=post["threadId"], replyId=post["id"])
|
||||||
return DiscordMessage("compact", event_type, ctx.webhook_url, content=message)
|
return DiscordMessage("compact", event_type, ctx.webhook_url, content=message)
|
||||||
|
|
||||||
|
|
||||||
# discussion/article_comment - Article comments
|
# discussion/article_comment - Article comments
|
||||||
|
|
||||||
|
|
||||||
|
@ -345,7 +352,7 @@ def compact_discussion_article_comment(ctx: Context, post: dict):
|
||||||
article_paths = ctx.comment_page
|
article_paths = ctx.comment_page
|
||||||
if article_paths is None:
|
if article_paths is None:
|
||||||
article_paths = {"title": ctx._("unknown"), "fullUrl": ctx.client.WIKI_SCRIPT_PATH} # No page known
|
article_paths = {"title": ctx._("unknown"), "fullUrl": ctx.client.WIKI_SCRIPT_PATH} # No page known
|
||||||
article_paths["fullUrl"] = article_paths["fullUrl"].replace(")", "\)").replace("()", "\(")
|
article_paths["fullUrl"] = article_paths["fullUrl"].replace(")", "\\)").replace("()", "\\(")
|
||||||
if not post["isReply"]:
|
if not post["isReply"]:
|
||||||
event_type = "discussion/comment/post"
|
event_type = "discussion/comment/post"
|
||||||
message = ctx._(
|
message = ctx._(
|
||||||
|
|
|
@ -44,6 +44,9 @@ class DiscordMessageMetadata:
|
||||||
self.time_of_change = time_of_change
|
self.time_of_change = time_of_change
|
||||||
self.domain = domain
|
self.domain = domain
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"<DiscordMessageMetadata page_id={self.page_id} log_id={self.log_id} rev_id={self.rev_id}>"
|
||||||
|
|
||||||
def matches(self, other: dict):
|
def matches(self, other: dict):
|
||||||
for key, value in other.items():
|
for key, value in other.items():
|
||||||
if self.__dict__[key] != value:
|
if self.__dict__[key] != value:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# This file is part of Recent changes Goat compatible Discord webhook (RcGcDw).
|
# This file is part of Recent changes Goat compatible Discord webhook (RcGcDw).
|
||||||
|
import datetime
|
||||||
# RcGcDw is free software: you can redistribute it and/or modify
|
# RcGcDw is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU General Public License as published by
|
# it under the terms of the GNU General Public License as published by
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
@ -168,6 +168,7 @@ class MessageQueue:
|
||||||
message = None
|
message = None
|
||||||
for message in msg.message_list:
|
for message in msg.message_list:
|
||||||
if message.metadata.domain is not None and message.metadata.time_of_change is not None:
|
if message.metadata.domain is not None and message.metadata.time_of_change is not None:
|
||||||
|
logger.debug(f"Initial time {message.metadata.time_of_change} and send time {datetime.datetime.now(tz=datetime.timezone.utc)} for diff {message.metadata.rev_id} on {message.wiki.script_url}")
|
||||||
message.metadata.domain.register_message_timing_report(message.metadata.time_of_change)
|
message.metadata.domain.register_message_timing_report(message.metadata.time_of_change)
|
||||||
if message and message.metadata.domain is not None:
|
if message and message.metadata.domain is not None:
|
||||||
message.metadata.domain.discord_message_registration()
|
message.metadata.domain.discord_message_registration()
|
||||||
|
|
|
@ -56,11 +56,12 @@ class Domain:
|
||||||
"wikis": [x for x in self.wikis.keys()],
|
"wikis": [x for x in self.wikis.keys()],
|
||||||
"irc": self.irc.connection.connected if self.irc else False,
|
"irc": self.irc.connection.connected if self.irc else False,
|
||||||
"delay": self.calculate_sleep_time(len(self)) if not self.irc else 'handled by IRC scheduler',
|
"delay": self.calculate_sleep_time(len(self)) if not self.irc else 'handled by IRC scheduler',
|
||||||
"msgdelay": {"min": min(self.message_timings), "avg": int(sum(self.message_timings)/len(self.message_timings)),
|
"msgdelay": {"min": min(self.message_timings or [0]), "avg": int(sum(self.message_timings)/(len(self.message_timings) or 1)),
|
||||||
"max": max(self.message_timings)},
|
"max": max(self.message_timings or [0])},
|
||||||
"discord_messages": self.total_discord_messages_sent,
|
"discord_messages": self.total_discord_messages_sent,
|
||||||
"last_failure_report": self.last_failure_report
|
"last_failure_report": self.last_failure_report
|
||||||
}
|
}
|
||||||
|
return dict_obj
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return self.__str__()
|
return self.__str__()
|
||||||
|
|
||||||
|
|
|
@ -7,6 +7,8 @@ from urllib.parse import urlparse, urlunparse
|
||||||
import logging
|
import logging
|
||||||
import asyncpg
|
import asyncpg
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
|
from discord.queue import messagequeue
|
||||||
from src.exceptions import NoDomain
|
from src.exceptions import NoDomain
|
||||||
from src.config import settings
|
from src.config import settings
|
||||||
from src.domain import Domain
|
from src.domain import Domain
|
||||||
|
@ -77,15 +79,18 @@ class DomainManager:
|
||||||
"wiki_count": sum([len(x.wikis) for x in self.domains.values()]),
|
"wiki_count": sum([len(x.wikis) for x in self.domains.values()]),
|
||||||
"tasks": {},
|
"tasks": {},
|
||||||
"domains": {},
|
"domains": {},
|
||||||
|
"queued_messages": [],
|
||||||
"total_discord_messages_sent": sum([x.total_discord_messages_sent for x in self.domains.values()])
|
"total_discord_messages_sent": sum([x.total_discord_messages_sent for x in self.domains.values()])
|
||||||
}
|
}
|
||||||
for task in asyncio.all_tasks():
|
for task in asyncio.all_tasks():
|
||||||
json_object["tasks"][task.get_name()] = {"done": task.done(), "result": task.result() if task.done() else None}
|
json_object["tasks"][task.get_name()] = {"done": task.done(), "result": task.result() if task.done() else None}
|
||||||
for name, domain in self.domains.items():
|
for name, domain in self.domains.items():
|
||||||
json_object[name] = domain.json()
|
json_object["domains"][name] = domain.json()
|
||||||
await connection.execute("""select pg_notify('webhookupdates', %(jsondump)s);""", {'jsondump': json.dumps(json_object)})
|
for message in messagequeue._queue:
|
||||||
# we need: dict/list of tasks, dict of domains,
|
json_object["queued_messages"].append({"metadata": str(message.discord_message.metadata), "url": message.wiki.script_url})
|
||||||
|
await connection.execute("select pg_notify('webhookupdates', 'DEBUG RESPONSE ' || $1);", json.dumps(json_object))
|
||||||
|
elif split_payload[1] == "RESPONSE":
|
||||||
|
return
|
||||||
else:
|
else:
|
||||||
raise ValueError("Unknown pub/sub command! Payload: {}".format(payload))
|
raise ValueError("Unknown pub/sub command! Payload: {}".format(payload))
|
||||||
|
|
||||||
|
|
|
@ -39,7 +39,7 @@ class AioIRCCat(irc.client_aio.AioSimpleIRCClient):
|
||||||
self.connection.buffer_class.errors = "replace" # Ignore encoding errors
|
self.connection.buffer_class.errors = "replace" # Ignore encoding errors
|
||||||
self.connection_details = None
|
self.connection_details = None
|
||||||
self.last_msg = time.time()
|
self.last_msg = time.time()
|
||||||
self.activity_tester = asyncio.get_event_loop().create_task(self.testactivity())
|
self.activity_tester = asyncio.get_event_loop().create_task(self.testactivity(), name="{}_IRC_activity_tester".format(domain_object.name))
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.__repr__()
|
return self.__repr__()
|
||||||
|
|
Loading…
Reference in a new issue