mirror of
https://gitlab.com/chicken-riders/RcGcDw.git
synced 2025-02-23 00:24:09 +00:00
Merge branch 'formatversion2' into testing
This commit is contained in:
commit
eb7bd5a112
|
@ -29,9 +29,13 @@ from src.exceptions import *
|
||||||
logger = logging.getLogger("extensions.base")
|
logger = logging.getLogger("extensions.base")
|
||||||
|
|
||||||
if 1 == 2: # additional translation strings in unreachable code
|
if 1 == 2: # additional translation strings in unreachable code
|
||||||
|
# noinspection PyUnreachableCode
|
||||||
_ = lambda a: a
|
_ = lambda a: a
|
||||||
|
# noinspection PyUnreachableCode
|
||||||
print(_("director"), _("bot"), _("editor"), _("directors"), _("sysop"), _("bureaucrat"), _("reviewer"),
|
print(_("director"), _("bot"), _("editor"), _("directors"), _("sysop"), _("bureaucrat"), _("reviewer"),
|
||||||
_("autoreview"), _("autopatrol"), _("wiki_guardian"))
|
_("autoreview"), _("autopatrol"), _("wiki_guardian"), _.pgettext("Removal of content field in Revision Delete", "content"),
|
||||||
|
_.pgettext("Removal of comment field in Revision Delete", "comment"),
|
||||||
|
_.pgettext("Removal of user field in Revision Delete", "user"))
|
||||||
|
|
||||||
|
|
||||||
# Page edit - event edit, New - page creation
|
# Page edit - event edit, New - page creation
|
||||||
|
@ -60,23 +64,23 @@ def embed_edit(ctx: Context, change: dict) -> DiscordMessage:
|
||||||
article=sanitize_to_url(change["title"])
|
article=sanitize_to_url(change["title"])
|
||||||
)
|
)
|
||||||
embed["title"] = "{redirect}{article} ({new}{minor}{bot}{space}{editsize})".format(
|
embed["title"] = "{redirect}{article} ({new}{minor}{bot}{space}{editsize})".format(
|
||||||
redirect="⤷ " if "redirect" in change else "",
|
redirect="⤷ " if change.get("redirect", False) else "",
|
||||||
article=sanitize_to_markdown(change["title"]),
|
article=sanitize_to_markdown(change["title"]),
|
||||||
editsize="+" + str(editsize) if editsize > 0 else editsize,
|
editsize="+" + str(editsize) if editsize > 0 else editsize,
|
||||||
new=ctx._("(N!) ") if action == "new" else "",
|
new=ctx._("(N!) ") if action == "new" else "",
|
||||||
minor=ctx._("m") if action == "edit" and "minor" in change else "",
|
minor=ctx._("m") if action == "edit" and change.get("minor", False) else "",
|
||||||
bot=ctx._('b') if "bot" in change else "",
|
bot=ctx._('b') if change.get("bot", False) else "",
|
||||||
space=" " if "bot" in change or (action == "edit" and "minor" in change) or action == "new" else "")
|
space=" " if "bot" in change or (action == "edit" and change.get("minor", False)) or action == "new" else "")
|
||||||
if ctx.settings["appearance"]["embed"]["show_edit_changes"]:
|
if ctx.settings["appearance"]["embed"]["show_edit_changes"]:
|
||||||
try:
|
try:
|
||||||
if action == "new":
|
if action == "new":
|
||||||
changed_content = ctx.client.make_api_request(
|
changed_content = ctx.client.make_api_request(
|
||||||
"?action=compare&format=json&fromslots=main&torev={diff}&fromtext-main=&topst=1&prop=diff".format(
|
"?action=compare&format=json&formatversion=2&fromslots=main&torev={diff}&fromtext-main=&topst=1&prop=diff".format(
|
||||||
diff=change["revid"]), "compare", "*")
|
diff=change["revid"]), "compare", "body")
|
||||||
else:
|
else:
|
||||||
changed_content = ctx.client.make_api_request(
|
changed_content = ctx.client.make_api_request(
|
||||||
"?action=compare&format=json&fromrev={oldrev}&torev={diff}&topst=1&prop=diff".format(
|
"?action=compare&format=json&formatversion=2&fromrev={oldrev}&torev={diff}&topst=1&prop=diff".format(
|
||||||
diff=change["revid"], oldrev=change["old_revid"]), "compare", "*")
|
diff=change["revid"], oldrev=change["old_revid"]), "compare", "body")
|
||||||
except (ServerError, MediaWikiError):
|
except (ServerError, MediaWikiError):
|
||||||
changed_content = None
|
changed_content = None
|
||||||
if changed_content:
|
if changed_content:
|
||||||
|
@ -150,8 +154,8 @@ def embed_upload_upload(ctx: Context, change: dict) -> DiscordMessage:
|
||||||
"Couldn't retrieve more information about the image {} because of unknown error".format(
|
"Couldn't retrieve more information about the image {} because of unknown error".format(
|
||||||
change["title"]))
|
change["title"]))
|
||||||
else:
|
else:
|
||||||
if "-1" not in request_for_image_data: # Image still exists and not removed
|
if not request_for_image_data[0].get("missing", False): # Image still exists and not removed
|
||||||
image_data = next(iter(request_for_image_data.values()))
|
image_data = request_for_image_data[0]
|
||||||
else:
|
else:
|
||||||
logger.warning("Request for additional image information have failed. The preview will not be shown.")
|
logger.warning("Request for additional image information have failed. The preview will not be shown.")
|
||||||
request_for_image_data = None
|
request_for_image_data = None
|
||||||
|
@ -202,7 +206,7 @@ def embed_upload_upload(ctx: Context, change: dict) -> DiscordMessage:
|
||||||
embed["title"] = ctx._("Uploaded {name}").format(name=sanitize_to_markdown(change["title"]))
|
embed["title"] = ctx._("Uploaded {name}").format(name=sanitize_to_markdown(change["title"]))
|
||||||
if ctx.settings["license_detection"] and image_direct_url:
|
if ctx.settings["license_detection"] and image_direct_url:
|
||||||
try:
|
try:
|
||||||
content = image_data['revisions'][0]["slots"]["main"]['*']
|
content = image_data['revisions'][0]["slots"]["main"]['content']
|
||||||
matches = re.search(re.compile(ctx.settings["license_regex"], re.IGNORECASE), content)
|
matches = re.search(re.compile(ctx.settings["license_regex"], re.IGNORECASE), content)
|
||||||
if matches is not None:
|
if matches is not None:
|
||||||
license = matches.group("license")
|
license = matches.group("license")
|
||||||
|
@ -373,6 +377,13 @@ def embed_delete_revision(ctx: Context, change: dict) -> DiscordMessage:
|
||||||
embed["title"] = ctx.ngettext("Changed visibility of revision on page {article} ",
|
embed["title"] = ctx.ngettext("Changed visibility of revision on page {article} ",
|
||||||
"Changed visibility of {amount} revisions on page {article} ", amount).format(
|
"Changed visibility of {amount} revisions on page {article} ", amount).format(
|
||||||
article=sanitize_to_markdown(change["title"]), amount=amount)
|
article=sanitize_to_markdown(change["title"]), amount=amount)
|
||||||
|
embed["description"] += ctx.parsedcomment + ctx._("\nContents have been suppressed")
|
||||||
|
old_hidden_fields = {ctx._(key) for key, value in change["logparams"]["old"].items() if (key in ("content", "comment", "user") and value is True)}
|
||||||
|
new_hidden_fields = {ctx._(key) for key, value in change["logparams"]["new"].items() if (key in ("content", "comment", "user") and value is True)}
|
||||||
|
if new_hidden_fields - old_hidden_fields:
|
||||||
|
embed.add_field(ctx._("Hidden fields"), ctx._(", ").join(new_hidden_fields - old_hidden_fields))
|
||||||
|
if old_hidden_fields - new_hidden_fields:
|
||||||
|
embed.add_field(ctx._("Revealed fields"), ctx._(", ").join(old_hidden_fields - new_hidden_fields))
|
||||||
return embed
|
return embed
|
||||||
|
|
||||||
|
|
||||||
|
@ -399,9 +410,8 @@ def embed_move_move(ctx: Context, change: dict) -> DiscordMessage:
|
||||||
embed_helper(ctx, embed, change, set_desc=False)
|
embed_helper(ctx, embed, change, set_desc=False)
|
||||||
embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["logparams"]['target_title']))
|
embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["logparams"]['target_title']))
|
||||||
embed["description"] = "{supress}. {desc}".format(desc=ctx.parsedcomment,
|
embed["description"] = "{supress}. {desc}".format(desc=ctx.parsedcomment,
|
||||||
supress=ctx._("No redirect has been made") if "suppressredirect" in
|
supress=ctx._("No redirect has been made") if change["logparams"].get("suppressredirect", False) else ctx._("A redirect has been made"))
|
||||||
change["logparams"] else ctx._("A redirect has been made"))
|
embed["title"] = ctx._("Moved {redirect}{article} to {target}").format(redirect="⤷ " if change.get("redirect", False) else "",
|
||||||
embed["title"] = ctx._("Moved {redirect}{article} to {target}").format(redirect="⤷ " if "redirect" in change else "",
|
|
||||||
article=sanitize_to_markdown(change["title"]),
|
article=sanitize_to_markdown(change["title"]),
|
||||||
target=sanitize_to_markdown(
|
target=sanitize_to_markdown(
|
||||||
change["logparams"]['target_title']))
|
change["logparams"]['target_title']))
|
||||||
|
@ -412,12 +422,12 @@ def embed_move_move(ctx: Context, change: dict) -> DiscordMessage:
|
||||||
def compact_move_move(ctx: Context, change: dict) -> DiscordMessage:
|
def compact_move_move(ctx: Context, change: dict) -> DiscordMessage:
|
||||||
author, author_url = compact_author(ctx, change)
|
author, author_url = compact_author(ctx, change)
|
||||||
link = clean_link(ctx.client.create_article_path(sanitize_to_url(change["logparams"]['target_title'])))
|
link = clean_link(ctx.client.create_article_path(sanitize_to_url(change["logparams"]['target_title'])))
|
||||||
redirect_status = ctx._("without making a redirect") if "suppressredirect" in change["logparams"] else ctx._(
|
redirect_status = ctx._("without making a redirect") if change["logparams"].get("suppressredirect", False) else ctx._(
|
||||||
"with a redirect")
|
"with a redirect")
|
||||||
parsed_comment = compact_summary(ctx)
|
parsed_comment = compact_summary(ctx)
|
||||||
content = ctx._(
|
content = ctx._(
|
||||||
"[{author}]({author_url}) moved {redirect}*{article}* to [{target}]({target_url}) {made_a_redirect}{comment}").format(
|
"[{author}]({author_url}) moved {redirect}*{article}* to [{target}]({target_url}) {made_a_redirect}{comment}").format(
|
||||||
author=author, author_url=author_url, redirect="⤷ " if "redirect" in change else "", article=sanitize_to_markdown(change["title"]),
|
author=author, author_url=author_url, redirect="⤷ " if change.get("redirect", False) else "", article=sanitize_to_markdown(change["title"]),
|
||||||
target=sanitize_to_markdown(change["logparams"]['target_title']), target_url=link, comment=parsed_comment,
|
target=sanitize_to_markdown(change["logparams"]['target_title']), target_url=link, comment=parsed_comment,
|
||||||
made_a_redirect=redirect_status)
|
made_a_redirect=redirect_status)
|
||||||
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
|
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
|
||||||
|
@ -432,10 +442,9 @@ def embed_move_move_redir(ctx: Context, change: dict) -> DiscordMessage:
|
||||||
embed_helper(ctx, embed, change, set_desc=False)
|
embed_helper(ctx, embed, change, set_desc=False)
|
||||||
embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["logparams"]['target_title']))
|
embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["logparams"]['target_title']))
|
||||||
embed["description"] = "{supress}. {desc}".format(desc=ctx.parsedcomment,
|
embed["description"] = "{supress}. {desc}".format(desc=ctx.parsedcomment,
|
||||||
supress=ctx._("No redirect has been made") if "suppressredirect" in
|
supress=ctx._("No redirect has been made") if change["logparams"].get("suppressredirect", False) else ctx._("A redirect has been made"))
|
||||||
change["logparams"] else ctx._("A redirect has been made"))
|
|
||||||
embed["title"] = ctx._("Moved {redirect}{article} to {title} over redirect").format(
|
embed["title"] = ctx._("Moved {redirect}{article} to {title} over redirect").format(
|
||||||
redirect="⤷ " if "redirect" in change else "", article=sanitize_to_markdown(change["title"]),
|
redirect="⤷ " if change.get("redirect", False) else "", article=sanitize_to_markdown(change["title"]),
|
||||||
title=sanitize_to_markdown(change["logparams"]["target_title"]))
|
title=sanitize_to_markdown(change["logparams"]["target_title"]))
|
||||||
return embed
|
return embed
|
||||||
|
|
||||||
|
@ -444,12 +453,12 @@ def embed_move_move_redir(ctx: Context, change: dict) -> DiscordMessage:
|
||||||
def compact_move_move_redir(ctx: Context, change: dict) -> DiscordMessage:
|
def compact_move_move_redir(ctx: Context, change: dict) -> DiscordMessage:
|
||||||
author, author_url = compact_author(ctx, change)
|
author, author_url = compact_author(ctx, change)
|
||||||
link = clean_link(ctx.client.create_article_path(sanitize_to_url(change["logparams"]['target_title'])))
|
link = clean_link(ctx.client.create_article_path(sanitize_to_url(change["logparams"]['target_title'])))
|
||||||
redirect_status = ctx._("without making a redirect") if "suppressredirect" in change["logparams"] else ctx._(
|
redirect_status = ctx._("without making a redirect") if change["logparams"].get("suppressredirect", False) else ctx._(
|
||||||
"with a redirect")
|
"with a redirect")
|
||||||
parsed_comment = compact_summary(ctx)
|
parsed_comment = compact_summary(ctx)
|
||||||
content = ctx._(
|
content = ctx._(
|
||||||
"[{author}]({author_url}) moved {redirect}*{article}* over redirect to [{target}]({target_url}) {made_a_redirect}{comment}").format(
|
"[{author}]({author_url}) moved {redirect}*{article}* over redirect to [{target}]({target_url}) {made_a_redirect}{comment}").format(
|
||||||
author=author, author_url=author_url, redirect="⤷ " if "redirect" in change else "",
|
author=author, author_url=author_url, redirect="⤷ " if change.get("redirect", False) else "",
|
||||||
article=sanitize_to_markdown(change["title"]),
|
article=sanitize_to_markdown(change["title"]),
|
||||||
target=sanitize_to_markdown(change["logparams"]['target_title']), target_url=link, comment=parsed_comment,
|
target=sanitize_to_markdown(change["logparams"]['target_title']), target_url=link, comment=parsed_comment,
|
||||||
made_a_redirect=redirect_status)
|
made_a_redirect=redirect_status)
|
||||||
|
@ -465,7 +474,7 @@ def embed_protect_move_prot(ctx: Context, change: dict):
|
||||||
embed_helper(ctx, embed, change)
|
embed_helper(ctx, embed, change)
|
||||||
embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["logparams"]["oldtitle_title"]))
|
embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["logparams"]["oldtitle_title"]))
|
||||||
embed["title"] = ctx._("Moved protection settings from {redirect}{article} to {title}").format(
|
embed["title"] = ctx._("Moved protection settings from {redirect}{article} to {title}").format(
|
||||||
redirect="⤷ " if "redirect" in change else "",
|
redirect="⤷ " if change.get("redirect", False) else "",
|
||||||
article=sanitize_to_markdown(change["logparams"]["oldtitle_title"]),
|
article=sanitize_to_markdown(change["logparams"]["oldtitle_title"]),
|
||||||
title=sanitize_to_markdown(change["title"]))
|
title=sanitize_to_markdown(change["title"]))
|
||||||
return embed
|
return embed
|
||||||
|
@ -478,7 +487,7 @@ def compact_protect_move_prot(ctx: Context, change: dict):
|
||||||
parsed_comment = compact_summary(ctx)
|
parsed_comment = compact_summary(ctx)
|
||||||
content = ctx._(
|
content = ctx._(
|
||||||
"[{author}]({author_url}) moved protection settings from {redirect}*{article}* to [{target}]({target_url}){comment}").format(
|
"[{author}]({author_url}) moved protection settings from {redirect}*{article}* to [{target}]({target_url}){comment}").format(
|
||||||
author=author, author_url=author_url, redirect="⤷ " if "redirect" in change else "",
|
author=author, author_url=author_url, redirect="⤷ " if change.get("redirect", False) else "",
|
||||||
article=sanitize_to_markdown(change["logparams"]["oldtitle_title"]),
|
article=sanitize_to_markdown(change["logparams"]["oldtitle_title"]),
|
||||||
target=sanitize_to_markdown(change["title"]), target_url=link, comment=parsed_comment)
|
target=sanitize_to_markdown(change["title"]), target_url=link, comment=parsed_comment)
|
||||||
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
|
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
|
||||||
|
@ -495,7 +504,7 @@ def embed_protect_protect(ctx: Context, change: dict):
|
||||||
embed["title"] = ctx._("Protected {target}").format(target=sanitize_to_markdown(change["title"]))
|
embed["title"] = ctx._("Protected {target}").format(target=sanitize_to_markdown(change["title"]))
|
||||||
embed["description"] = "{settings}{cascade} | {reason}".format(
|
embed["description"] = "{settings}{cascade} | {reason}".format(
|
||||||
settings=sanitize_to_markdown(change["logparams"].get("description", "")),
|
settings=sanitize_to_markdown(change["logparams"].get("description", "")),
|
||||||
cascade=ctx._(" [cascading]") if "cascade" in change["logparams"] else "",
|
cascade=ctx._(" [cascading]") if change["logparams"].get("cascade", False) else "",
|
||||||
reason=ctx.parsedcomment)
|
reason=ctx.parsedcomment)
|
||||||
return embed
|
return embed
|
||||||
|
|
||||||
|
@ -510,7 +519,7 @@ def compact_protect_protect(ctx: Context, change: dict):
|
||||||
author=author, author_url=author_url,
|
author=author, author_url=author_url,
|
||||||
article=sanitize_to_markdown(change["title"]), article_url=link,
|
article=sanitize_to_markdown(change["title"]), article_url=link,
|
||||||
settings=change["logparams"].get("description", "") + (
|
settings=change["logparams"].get("description", "") + (
|
||||||
ctx._(" [cascading]") if "cascade" in change["logparams"] else ""),
|
ctx._(" [cascading]") if change["logparams"].get("cascade", False) else ""),
|
||||||
comment=parsed_comment)
|
comment=parsed_comment)
|
||||||
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
|
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
|
||||||
|
|
||||||
|
@ -526,7 +535,7 @@ def embed_protect_modify(ctx: Context, change: dict):
|
||||||
embed["title"] = ctx._("Changed protection level for {article}").format(article=sanitize_to_markdown(change["title"]))
|
embed["title"] = ctx._("Changed protection level for {article}").format(article=sanitize_to_markdown(change["title"]))
|
||||||
embed["description"] = "{settings}{cascade} | {reason}".format(
|
embed["description"] = "{settings}{cascade} | {reason}".format(
|
||||||
settings=sanitize_to_markdown(change["logparams"].get("description", "")),
|
settings=sanitize_to_markdown(change["logparams"].get("description", "")),
|
||||||
cascade=ctx._(" [cascading]") if "cascade" in change["logparams"] else "",
|
cascade=ctx._(" [cascading]") if change["logparams"].get("cascade", False) else "",
|
||||||
reason=ctx.parsedcomment)
|
reason=ctx.parsedcomment)
|
||||||
return embed
|
return embed
|
||||||
|
|
||||||
|
@ -541,7 +550,7 @@ def compact_protect_modify(ctx: Context, change: dict):
|
||||||
author=author, author_url=author_url,
|
author=author, author_url=author_url,
|
||||||
article=sanitize_to_markdown(change["title"]), article_url=link,
|
article=sanitize_to_markdown(change["title"]), article_url=link,
|
||||||
settings=sanitize_to_markdown(change["logparams"].get("description", "")) + (
|
settings=sanitize_to_markdown(change["logparams"].get("description", "")) + (
|
||||||
ctx._(" [cascading]") if "cascade" in change["logparams"] else ""),
|
ctx._(" [cascading]") if change["logparams"].get("cascade", False) else ""),
|
||||||
comment=parsed_comment)
|
comment=parsed_comment)
|
||||||
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
|
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
|
||||||
|
|
||||||
|
@ -607,7 +616,7 @@ def embed_block_block(ctx: Context, change: dict):
|
||||||
embed["url"] = ctx.client.create_article_path("Special:Contributions/{user}".format(user=user))
|
embed["url"] = ctx.client.create_article_path("Special:Contributions/{user}".format(user=user))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["title"]))
|
embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["title"]))
|
||||||
if "sitewide" not in change["logparams"]:
|
if change["logparams"].get("sitewide", False) is False:
|
||||||
restriction_description = ""
|
restriction_description = ""
|
||||||
if "restrictions" in change["logparams"]:
|
if "restrictions" in change["logparams"]:
|
||||||
if "pages" in change["logparams"]["restrictions"] and change["logparams"]["restrictions"]["pages"]:
|
if "pages" in change["logparams"]["restrictions"] and change["logparams"]["restrictions"]["pages"]:
|
||||||
|
@ -625,7 +634,7 @@ def embed_block_block(ctx: Context, change: dict):
|
||||||
if str(namespace) == "0":
|
if str(namespace) == "0":
|
||||||
namespaces.append("*{ns}*".format(ns=ctx._("(Main)")))
|
namespaces.append("*{ns}*".format(ns=ctx._("(Main)")))
|
||||||
elif str(namespace) in ctx.client.namespaces: # if we have cached namespace name for given namespace number, add its name to the list
|
elif str(namespace) in ctx.client.namespaces: # if we have cached namespace name for given namespace number, add its name to the list
|
||||||
namespaces.append("*{ns}*".format(ns=ctx.client.namespaces[str(namespace)]["*"]))
|
namespaces.append("*{ns}*".format(ns=ctx.client.namespaces[str(namespace)]["name"]))
|
||||||
else:
|
else:
|
||||||
namespaces.append("*{ns}*".format(ns=namespace))
|
namespaces.append("*{ns}*".format(ns=namespace))
|
||||||
restriction_description = restriction_description + ", ".join(namespaces)
|
restriction_description = restriction_description + ", ".join(namespaces)
|
||||||
|
@ -673,7 +682,7 @@ def compact_block_block(ctx: Context, change: dict):
|
||||||
if str(namespace) == "0":
|
if str(namespace) == "0":
|
||||||
namespaces.append("*{ns}*".format(ns=ctx._("(Main)")))
|
namespaces.append("*{ns}*".format(ns=ctx._("(Main)")))
|
||||||
elif str(namespace) in ctx.client.namespaces: # if we have cached namespace name for given namespace number, add its name to the list
|
elif str(namespace) in ctx.client.namespaces: # if we have cached namespace name for given namespace number, add its name to the list
|
||||||
namespaces.append("*{ns}*".format(ns=ctx.client.namespaces[str(namespace)]["*"]))
|
namespaces.append("*{ns}*".format(ns=ctx.client.namespaces[str(namespace)]["name"]))
|
||||||
else:
|
else:
|
||||||
namespaces.append("*{ns}*".format(ns=namespace))
|
namespaces.append("*{ns}*".format(ns=namespace))
|
||||||
restriction_description = restriction_description + ", ".join(namespaces)
|
restriction_description = restriction_description + ", ".join(namespaces)
|
||||||
|
|
|
@ -53,7 +53,7 @@ talk_notify = settings.get("hooks", {}).get("talk_notify", {})
|
||||||
def talk_notify_hook(context: Context, change: dict):
|
def talk_notify_hook(context: Context, change: dict):
|
||||||
if not talk_notify.get("default", []) or context.event not in ("edit", "new"):
|
if not talk_notify.get("default", []) or context.event not in ("edit", "new"):
|
||||||
return
|
return
|
||||||
if "minor" in change or change["ns"] == 3:
|
if change.get("minor", False) or change["ns"] == 3:
|
||||||
return
|
return
|
||||||
ignore = change["ns"] % 2 == 0
|
ignore = change["ns"] % 2 == 0
|
||||||
if ignore and talk_notify.get("extra_pages", {}):
|
if ignore and talk_notify.get("extra_pages", {}):
|
||||||
|
|
10
src/misc.py
10
src/misc.py
|
@ -247,14 +247,14 @@ def safe_read(request, *keys):
|
||||||
def parse_mw_request_info(request_data: dict, url: str):
|
def parse_mw_request_info(request_data: dict, url: str):
|
||||||
"""A function parsing request JSON message from MediaWiki logging all warnings and raising on MediaWiki errors"""
|
"""A function parsing request JSON message from MediaWiki logging all warnings and raising on MediaWiki errors"""
|
||||||
# any([True for k in request_data.keys() if k in ("error", "errors")])
|
# any([True for k in request_data.keys() if k in ("error", "errors")])
|
||||||
errors: list = request_data.get("errors", {}) # Is it ugly? I don't know tbh
|
errors: dict = request_data.get("errors", {}) # Is it ugly? I don't know tbh
|
||||||
if errors:
|
if errors:
|
||||||
raise MediaWikiError(str(errors))
|
raise MediaWikiError(str(errors))
|
||||||
warnings: list = request_data.get("warnings", {})
|
warnings: dict = request_data.get("warnings", {})
|
||||||
if warnings:
|
if warnings:
|
||||||
for warning in warnings:
|
for module, warning_data in warnings.items():
|
||||||
misc_logger.warning("MediaWiki returned the following warning: {code} - {text} on {url}.".format(
|
misc_logger.warning("MediaWiki returned the following warning on module {module}: {text} on {url}.".format(
|
||||||
code=warning["code"], text=warning.get("text", warning.get("*", "")), url=url
|
module=module, text=warning_data.get("warnings", ""), url=url
|
||||||
))
|
))
|
||||||
return request_data
|
return request_data
|
||||||
|
|
||||||
|
|
22
src/wiki.py
22
src/wiki.py
|
@ -82,11 +82,11 @@ class Wiki(object):
|
||||||
try:
|
try:
|
||||||
response = self.handle_mw_errors(
|
response = self.handle_mw_errors(
|
||||||
self.session.post(WIKI_API_PATH,
|
self.session.post(WIKI_API_PATH,
|
||||||
data={'action': 'query', 'format': 'json', 'utf8': '', 'meta': 'tokens',
|
data={'action': 'query', 'format': 'json', 'formatversion': '2', 'utf8': '', 'meta': 'tokens',
|
||||||
'type': 'login'}))
|
'type': 'login'}))
|
||||||
response = self.handle_mw_errors(
|
response = self.handle_mw_errors(
|
||||||
self.session.post(WIKI_API_PATH,
|
self.session.post(WIKI_API_PATH,
|
||||||
data={'action': 'login', 'format': 'json', 'utf8': '',
|
data={'action': 'login', 'format': 'json', 'formatversion': '2', 'utf8': '',
|
||||||
'lgname': settings["wiki_bot_login"],
|
'lgname': settings["wiki_bot_login"],
|
||||||
'lgpassword': settings["wiki_bot_password"],
|
'lgpassword': settings["wiki_bot_password"],
|
||||||
'lgtoken': response.json()['query']['tokens']['logintoken']}))
|
'lgtoken': response.json()['query']['tokens']['logintoken']}))
|
||||||
|
@ -137,7 +137,7 @@ class Wiki(object):
|
||||||
|
|
||||||
def construct_params(self, amount):
|
def construct_params(self, amount):
|
||||||
"""Constructs GET parameters for recentchanges/abuselog fetching feature"""
|
"""Constructs GET parameters for recentchanges/abuselog fetching feature"""
|
||||||
params = OrderedDict(action="query", format="json")
|
params = OrderedDict(action="query", format="json", formatversion=2)
|
||||||
params["list"] = "recentchanges|abuselog" if settings.get("show_abuselog", False) else "recentchanges"
|
params["list"] = "recentchanges|abuselog" if settings.get("show_abuselog", False) else "recentchanges"
|
||||||
params["rcshow"] = "" if settings.get("show_bots", False) else "!bot"
|
params["rcshow"] = "" if settings.get("show_bots", False) else "!bot"
|
||||||
params["rcprop"] = "title|redirect|timestamp|ids|loginfo|parsedcomment|sizes|flags|tags|user|userid" + ( "|patrolled" if settings.get("show_patrolled", False) else "" )
|
params["rcprop"] = "title|redirect|timestamp|ids|loginfo|parsedcomment|sizes|flags|tags|user|userid" + ( "|patrolled" if settings.get("show_patrolled", False) else "" )
|
||||||
|
@ -259,10 +259,10 @@ class Wiki(object):
|
||||||
abuselog = request_json["query"]["abuselog"] # While LYBL approach would be more performant when abuselog is not in request body, I prefer this approach for its clarity
|
abuselog = request_json["query"]["abuselog"] # While LYBL approach would be more performant when abuselog is not in request body, I prefer this approach for its clarity
|
||||||
except KeyError:
|
except KeyError:
|
||||||
if "warnings" in request_json:
|
if "warnings" in request_json:
|
||||||
warnings = request_json.get("warnings", {"query": {"*": ""}})
|
warnings = request_json.get("warnings", {"query": {"warnings": ""}})
|
||||||
if "Unrecognized value for parameter \"list\": abuselog." in warnings["query"]["*"]:
|
if "Unrecognized value for parameter \"list\": abuselog." in warnings["query"]["warnings"]:
|
||||||
settings["show_abuselog"] = False
|
settings["show_abuselog"] = False
|
||||||
logger.warning("AbuseLog extension is not enabled on the wiki. Disabling the function...")
|
logger.warning("AbuseLog extension is not enabled on the wiki. Disabling the function for this session...")
|
||||||
else:
|
else:
|
||||||
abuselog_last_id = self.prepare_abuse_log(abuselog)
|
abuselog_last_id = self.prepare_abuse_log(abuselog)
|
||||||
return rc_last_id, abuselog_last_id
|
return rc_last_id, abuselog_last_id
|
||||||
|
@ -427,18 +427,18 @@ class Wiki(object):
|
||||||
|
|
||||||
def init_info(self):
|
def init_info(self):
|
||||||
startup_info = safe_read(self._safe_request(
|
startup_info = safe_read(self._safe_request(
|
||||||
"{wiki}?action=query&format=json&uselang=content&list=tags&meta=allmessages%7Csiteinfo&utf8=1&tglimit=max&tgprop=displayname&ammessages=recentchanges-page-added-to-category%7Crecentchanges-page-removed-from-category%7Crecentchanges-page-added-to-category-bundled%7Crecentchanges-page-removed-from-category-bundled&amenableparser=1&amincludelocal=1&siprop=namespaces".format(
|
"{wiki}?action=query&format=json&formatversion=2&uselang=content&list=tags&meta=allmessages%7Csiteinfo&utf8=1&tglimit=max&tgprop=displayname&ammessages=recentchanges-page-added-to-category%7Crecentchanges-page-removed-from-category%7Crecentchanges-page-added-to-category-bundled%7Crecentchanges-page-removed-from-category-bundled&amenableparser=1&amincludelocal=1&siprop=namespaces".format(
|
||||||
wiki=WIKI_API_PATH)), "query")
|
wiki=WIKI_API_PATH)), "query")
|
||||||
if startup_info:
|
if startup_info:
|
||||||
if "tags" in startup_info and "allmessages" in startup_info:
|
if "tags" in startup_info and "allmessages" in startup_info:
|
||||||
for tag in startup_info["tags"]:
|
for tag in startup_info["tags"]:
|
||||||
try:
|
if tag["displayname"]:
|
||||||
self.tags[tag["name"]] = (BeautifulSoup(tag["displayname"], "lxml")).get_text()
|
self.tags[tag["name"]] = (BeautifulSoup(tag["displayname"], "lxml")).get_text()
|
||||||
except KeyError:
|
else:
|
||||||
self.tags[tag["name"]] = None # Tags with no display name are hidden and should not appear on RC as well
|
self.tags[tag["name"]] = None # Tags with no display name are hidden and should not appear on RC as well
|
||||||
for message in startup_info["allmessages"]:
|
for message in startup_info["allmessages"]:
|
||||||
if "missing" not in message: # ignore missing strings
|
if message.get("missing", False) is False: # ignore missing strings
|
||||||
self.mw_messages[message["name"]] = message["*"]
|
self.mw_messages[message["name"]] = message["content"]
|
||||||
else:
|
else:
|
||||||
logging.warning("Could not fetch the MW message translation for: {}".format(message["name"]))
|
logging.warning("Could not fetch the MW message translation for: {}".format(message["name"]))
|
||||||
for key, message in self.mw_messages.items():
|
for key, message in self.mw_messages.items():
|
||||||
|
|
Loading…
Reference in a new issue