mirror of
https://gitlab.com/chicken-riders/RcGcDw.git
synced 2025-02-23 00:24:09 +00:00
Finished #139
This commit is contained in:
parent
f6ecee6583
commit
2a57b0e772
|
@ -124,11 +124,11 @@ def advanced():
|
|||
|
||||
|
||||
def set_cooldown():
|
||||
option = default_or_custom(input("Interval for fetching recent changes in seconds (min. 10, default 30).\n"), 30)
|
||||
option = default_or_custom(input("Interval for fetching recent changes in seconds (min. 30, default 60).\n"), 60)
|
||||
try:
|
||||
option = int(option)
|
||||
if option < 10:
|
||||
print("Please give a value higher than 9!")
|
||||
if option < 29:
|
||||
print("Please give a value higher than 30!")
|
||||
return False
|
||||
else:
|
||||
settings["cooldown"] = option
|
||||
|
|
|
@ -13,7 +13,7 @@ _ = discussion_formatters.gettext
|
|||
discussion_logger = logging.getLogger("rcgcdw.discussion_formatter")
|
||||
|
||||
|
||||
def compact_formatter(post_type, post):
|
||||
def compact_formatter(post_type, post, article_paths):
|
||||
"""Compact formatter for Fandom discussions."""
|
||||
message = None
|
||||
if post_type == "FORUM":
|
||||
|
@ -48,12 +48,18 @@ def compact_formatter(post_type, post):
|
|||
else:
|
||||
message = _("[{author}]({author_url}) created a [reply](<{url}wiki/Message_Wall:{user_wall}?threadId={threadId}#{replyId}>) to [{title}](<{url}wiki/Message_Wall:{user_wall}?threadId={threadId}>) on [{user}'s Message Wall](<{url}wiki/Message_Wall:{user_wall}>)").format(author=author, author_url=author_url, url=settings["fandom_discussions"]["wiki_url"], title=post["_embedded"]["thread"][0]["title"], user=user_wall, user_wall=quote_plus(user_wall.replace(" ", "_")), threadId=post["threadId"], replyId=post["id"])
|
||||
elif post_type == "ARTICLE_COMMENT":
|
||||
discussion_logger.warning("Article comments are not yet implemented. For reasons see https://gitlab.com/piotrex43/RcGcDw/-/issues/126#note_366480037")
|
||||
article_page = _("unknown") # No page known
|
||||
if article_paths is None:
|
||||
article_paths = {"title": _("unknown"), "fullUrl": "{wiki}wiki/{article}".format(wiki=settings["fandom_discussions"]["wiki_url"], article=_("unknown").replace(" ", "_"))} # No page known
|
||||
if not post["isReply"]:
|
||||
message = _("[{author}]({author_url}) created a [comment](<{url}wiki/{article}?commentId={commentId}>) on [{article}](<{url}wiki/{article}>)").format(author=author, author_url=author_url, url=settings["fandom_discussions"]["wiki_url"], article=article_page, commentId=post["threadId"])
|
||||
message = _(
|
||||
"[{author}]({author_url}) created a [comment](<{url}?commentId={commentId}>) on [{article}](<{url}>)").format(
|
||||
author=author, author_url=author_url, url=article_paths["fullUrl"], article=article_paths["title"],
|
||||
commentId=post["threadId"])
|
||||
else:
|
||||
message = _("[{author}]({author_url}) created a [reply](<{url}wiki/{article}?threadId={threadId}) to a [comment](<{url}wiki/{article}?commentId={commentId}&replyId={replyId}>) on [{article}](<{url}wiki/{article}>)").format(author=author, author_url=author_url, url=settings["fandom_discussions"]["wiki_url"], article=article_page, commentId=post["threadId"], replyId=post["id"])
|
||||
message = _(
|
||||
"[{author}]({author_url}) created a [reply](<{url}?commentId={commentId}&replyId={replyId}>) to a [comment](<{url}?commentId={commentId}>) on [{article}](<{url}>)").format(
|
||||
author=author, author_url=author_url, url=article_paths["fullUrl"], article=article_paths["title"],
|
||||
commentId=post["threadId"], replyId=post["id"])
|
||||
else:
|
||||
discussion_logger.warning("No entry for {event} with params: {params}".format(event=post_type, params=post))
|
||||
if not settings["support"]:
|
||||
|
@ -64,7 +70,7 @@ def compact_formatter(post_type, post):
|
|||
send_to_discord(DiscordMessage("compact", "discussion", settings["fandom_discussions"]["webhookURL"], content=message))
|
||||
|
||||
|
||||
def embed_formatter(post_type, post):
|
||||
def embed_formatter(post_type, post, article_paths):
|
||||
"""Embed formatter for Fandom discussions."""
|
||||
embed = DiscordMessage("embed", "discussion", settings["fandom_discussions"]["webhookURL"])
|
||||
if post_type == "FORUM":
|
||||
|
@ -138,17 +144,18 @@ def embed_formatter(post_type, post):
|
|||
embed["url"] = "{url}wiki/Message_Wall:{user_wall}?threadId={threadId}#{replyId}".format(url=settings["fandom_discussions"]["wiki_url"], user_wall=quote_plus(user_wall.replace(" ", "_")), threadId=post["threadId"], replyId=post["id"])
|
||||
embed["title"] = _("Replied to \"{title}\" on {user}'s Message Wall").format(title=post["_embedded"]["thread"][0]["title"], user=user_wall)
|
||||
elif post_type == "ARTICLE_COMMENT":
|
||||
discussion_logger.warning("Article comments are not yet implemented. For reasons see https://gitlab.com/piotrex43/RcGcDw/-/issues/126#note_366480037")
|
||||
article_page = _("unknown") # No page known
|
||||
if article_paths is None:
|
||||
article_page = {"title": _("unknown"), "fullUrl": "{wiki}wiki/{article}".format(wiki=settings["fandom_discussions"]["wiki_url"], article=_(
|
||||
"unknown").replace(" ", "_"))} # No page known
|
||||
if not post["isReply"]:
|
||||
embed.event_type = "discussion/comment/post"
|
||||
# embed["url"] = "{url}wiki/{article}?commentId={commentId}".format(url=settings["fandom_discussions"]["wiki_url"], article=quote_plus(article_page.replace(" ", "_")), commentId=post["threadId"])
|
||||
embed["title"] = _("Commented on {article}").format(article=article_page)
|
||||
embed["url"] = "{url}?commentId={commentId}".format(url=article_paths["fullUrl"], commentId=post["threadId"])
|
||||
embed["title"] = _("Commented on {article}").format(article=article_paths["title"])
|
||||
else:
|
||||
embed.event_type = "discussion/comment/reply"
|
||||
# embed["url"] = "{url}wiki/{article}?commentId={commentId}&replyId={replyId}".format(url=settings["fandom_discussions"]["wiki_url"], article=quote_plus(article_page.replace(" ", "_")), commentId=post["threadId"], replyId=post["id"])
|
||||
embed["title"] = _("Replied to a comment on {article}").format(article=article_page)
|
||||
embed["footer"]["text"] = article_page
|
||||
embed["url"] = "{url}?commentId={commentId}&replyId={replyId}".format(url=article_paths["fullUrl"], commentId=post["threadId"], replyId=post["id"])
|
||||
embed["title"] = _("Replied to a comment on {article}").format(article=article_paths["title"])
|
||||
embed["footer"]["text"] = article_paths["title"]
|
||||
else:
|
||||
discussion_logger.warning("No entry for {event} with params: {params}".format(event=post_type, params=post))
|
||||
embed["title"] = _("Unknown event `{event}`").format(event=post_type)
|
||||
|
|
|
@ -17,11 +17,14 @@
|
|||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import logging, schedule, requests
|
||||
from typing import Dict, Any
|
||||
|
||||
from src.configloader import settings
|
||||
|
||||
from src.discussion_formatters import embed_formatter, compact_formatter
|
||||
from src.misc import datafile, messagequeue
|
||||
from src.misc import datafile, messagequeue, prepare_paths
|
||||
from src.session import session
|
||||
from src.exceptions import ArticleCommentError
|
||||
|
||||
# Create a custom logger
|
||||
|
||||
|
@ -36,7 +39,7 @@ if "discussion_id" not in datafile.data:
|
|||
storage = datafile.data
|
||||
|
||||
fetch_url = "https://services.fandom.com/discussion/{wikiid}/posts?sortDirection=descending&sortKey=creation_date&limit={limit}".format(wikiid=settings["fandom_discussions"]["wiki_id"], limit=settings["fandom_discussions"]["limit"])
|
||||
|
||||
domain = prepare_paths(settings["fandom_discussions"]["wiki_url"], dry=True) # Shutdown if the path for discussions is wrong
|
||||
|
||||
def fetch_discussions():
|
||||
messagequeue.resend_msgs()
|
||||
|
@ -53,14 +56,35 @@ def fetch_discussions():
|
|||
return None
|
||||
else:
|
||||
if request_json:
|
||||
comment_pages: dict = {}
|
||||
comment_events: list = [post["forumId"] for post in request_json if post["_embedded"]["thread"][0]["containerType"] == "ARTICLE_COMMENT" and int(post["id"]) > storage["discussion_id"]]
|
||||
if comment_events:
|
||||
comment_pages = safe_request(
|
||||
"{wiki}wikia.php?controller=FeedsAndPosts&method=getArticleNamesAndUsernames&stablePageIds={pages}&format=json".format(
|
||||
wiki=settings["fandom_discussions"]["wiki_url"], pages=",".join(comment_events)
|
||||
))
|
||||
if comment_pages:
|
||||
try:
|
||||
comment_pages = comment_pages.json()["articleNames"]
|
||||
except ValueError:
|
||||
discussion_logger.warning("ValueError in fetching discussions")
|
||||
return None
|
||||
except KeyError:
|
||||
discussion_logger.warning("Wiki returned %s" % (request_json.json()))
|
||||
return None
|
||||
else:
|
||||
return None
|
||||
for post in request_json:
|
||||
if int(post["id"]) > storage["discussion_id"]:
|
||||
parse_discussion_post(post)
|
||||
try:
|
||||
parse_discussion_post(post, comment_pages)
|
||||
except ArticleCommentError:
|
||||
return None
|
||||
if int(post["id"]) > storage["discussion_id"]:
|
||||
storage["discussion_id"] = int(post["id"])
|
||||
datafile.save_datafile()
|
||||
|
||||
def parse_discussion_post(post):
|
||||
def parse_discussion_post(post, comment_pages):
|
||||
"""Initial post recognition & handling"""
|
||||
post_type = post["_embedded"]["thread"][0]["containerType"]
|
||||
# Filter posts by forum
|
||||
|
@ -68,7 +92,14 @@ def parse_discussion_post(post):
|
|||
if not post["forumName"] in settings["fandom_discussions"]["show_forums"]:
|
||||
discussion_logger.debug(f"Ignoring post as it's from {post['forumName']}.")
|
||||
return
|
||||
formatter(post_type, post)
|
||||
comment_page = None
|
||||
if post_type == "ARTICLE_COMMENT":
|
||||
try:
|
||||
comment_page = {**comment_pages[post["forumId"]], "fullUrl": domain + comment_pages[post["forumId"]]["relativeUrl"]}
|
||||
except KeyError:
|
||||
discussion_logger.error("Could not parse paths for article comment, here is the content of comment_pages: {}, ignoring...".format(comment_pages))
|
||||
raise ArticleCommentError
|
||||
formatter(post_type, post, comment_page)
|
||||
|
||||
|
||||
def safe_request(url):
|
||||
|
|
|
@ -1,2 +1,5 @@
|
|||
class MWError(Exception):
|
||||
pass
|
||||
|
||||
class ArticleCommentError(Exception):
|
||||
pass
|
23
src/misc.py
23
src/misc.py
|
@ -253,7 +253,7 @@ def add_to_dict(dictionary, key):
|
|||
dictionary[key] = 1
|
||||
return dictionary
|
||||
|
||||
def prepare_paths():
|
||||
def prepare_paths(path, dry=False):
|
||||
global WIKI_API_PATH
|
||||
global WIKI_ARTICLE_PATH
|
||||
global WIKI_SCRIPT_PATH
|
||||
|
@ -276,24 +276,27 @@ def prepare_paths():
|
|||
except (KeyError, requests.exceptions.ConnectionError):
|
||||
return False
|
||||
try:
|
||||
parsed_url = urlparse(settings["wiki_url"])
|
||||
parsed_url = urlparse(path)
|
||||
except KeyError:
|
||||
misc_logger.critical("wiki_url is not specified in the settings. Please provide the wiki url in the settings and start the script again.")
|
||||
sys.exit(1)
|
||||
for url_scheme in (settings["wiki_url"], settings["wiki_url"].split("wiki")[0], urlunparse((*parsed_url[0:2], "", "", "", ""))): # check different combinations, it's supposed to be idiot-proof
|
||||
for url_scheme in (path, path.split("wiki")[0], urlunparse((*parsed_url[0:2], "", "", "", ""))): # check different combinations, it's supposed to be idiot-proof
|
||||
tested = quick_try_url(url_scheme + "/api.php?action=query&format=json&meta=siteinfo")
|
||||
if tested:
|
||||
WIKI_API_PATH = urlunparse((*parsed_url[0:2], "", "", "", "")) + tested.json()["query"]["general"]["scriptpath"] + "/api.php"
|
||||
WIKI_SCRIPT_PATH = urlunparse((*parsed_url[0:2], "", "", "", "")) + tested.json()["query"]["general"]["scriptpath"] + "/"
|
||||
WIKI_ARTICLE_PATH = urlunparse((*parsed_url[0:2], "", "", "", "")) + tested.json()["query"]["general"]["articlepath"]
|
||||
WIKI_JUST_DOMAIN = urlunparse((*parsed_url[0:2], "", "", "", ""))
|
||||
break
|
||||
if not dry:
|
||||
WIKI_API_PATH = urlunparse((*parsed_url[0:2], "", "", "", "")) + tested.json()["query"]["general"]["scriptpath"] + "/api.php"
|
||||
WIKI_SCRIPT_PATH = urlunparse((*parsed_url[0:2], "", "", "", "")) + tested.json()["query"]["general"]["scriptpath"] + "/"
|
||||
WIKI_ARTICLE_PATH = urlunparse((*parsed_url[0:2], "", "", "", "")) + tested.json()["query"]["general"]["articlepath"]
|
||||
WIKI_JUST_DOMAIN = urlunparse((*parsed_url[0:2], "", "", "", ""))
|
||||
break
|
||||
return urlunparse((*parsed_url[0:2], "", "", "", ""))
|
||||
|
||||
else:
|
||||
misc_logger.critical("Could not verify wikis paths. Please make sure you have given the proper wiki URL in settings.json and your Internet connection is working.")
|
||||
misc_logger.critical("Could not verify wikis paths. Please make sure you have given the proper wiki URLs in settings.json ({path} should be script path to your wiki) and your Internet connection is working.".format(path=path))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
prepare_paths()
|
||||
prepare_paths(settings["wiki_url"])
|
||||
|
||||
|
||||
def create_article_path(article: str) -> str:
|
||||
|
|
Loading…
Reference in a new issue