Compare commits

..

2 commits

9 changed files with 143 additions and 94 deletions

View file

@ -24,9 +24,9 @@ import extensions.base.discussions
import extensions.base.curseprofile import extensions.base.curseprofile
import extensions.base.interwiki import extensions.base.interwiki
import extensions.base.renameuser import extensions.base.renameuser
import extensions.base.migrators
import extensions.base.approvedrevs import extensions.base.approvedrevs
import extensions.base.templateclassification import extensions.base.templateclassification
import extensions.base.comments import extensions.base.comments
import extensions.base.rcgcdb import extensions.base.rcgcdb
import extensions.base.globalblocking import extensions.base.globalblocking
import extensions.base.migrators

View file

@ -63,6 +63,8 @@ def embed_abuselog(ctx: Context, change: dict):
embed.add_field(ctx._("Title"), "[{target}]({target_url})".format(target=change.get("title", ctx._("Unknown")), embed.add_field(ctx._("Title"), "[{target}]({target_url})".format(target=change.get("title", ctx._("Unknown")),
target_url=clean_link(ctx.client.create_article_path(sanitize_to_url(change.get("title", ctx._("Unknown")))))), inline=True) target_url=clean_link(ctx.client.create_article_path(sanitize_to_url(change.get("title", ctx._("Unknown")))))), inline=True)
embed.add_field(ctx._("Performed"), abusefilter_actions(change["action"], ctx._, change["action"]), inline=True) embed.add_field(ctx._("Performed"), abusefilter_actions(change["action"], ctx._, change["action"]), inline=True)
if change.get("wiki", None):
embed.add_field(ctx._("Wiki"), change["wiki"], inline=True)
embed.add_field(ctx._("Action taken"), "\n".join([abusefilter_results(result, ctx._, result) for result in results])) embed.add_field(ctx._("Action taken"), "\n".join([abusefilter_results(result, ctx._, result) for result in results]))
embed_helper(ctx, embed, change, is_anon=abuse_filter_is_ip(change), set_desc=False) embed_helper(ctx, embed, change, is_anon=abuse_filter_is_ip(change), set_desc=False)
return embed return embed
@ -73,8 +75,12 @@ def compact_abuselog(ctx: Context, change: dict):
results = change["result"].split(",") results = change["result"].split(",")
action = abuselog_action(results) action = abuselog_action(results)
author, author_url = compact_author(ctx, change, is_anon=abuse_filter_is_ip(change)) author, author_url = compact_author(ctx, change, is_anon=abuse_filter_is_ip(change))
message = ctx._("[{author}]({author_url}) triggered *[{abuse_filter}]({details_url})*, performing the action \"{action}\" on *[{target}]({target_url})* - action taken: {result}.").format( if change.get("wiki", None):
author=author, author_url=author_url, abuse_filter=sanitize_to_markdown(change["filter"]), message = ctx._("[{author}]({author_url}) triggered *[{abuse_filter}]({details_url})*, performing the action \"{action}\" on wiki \"{wiki}\" on *[{target}]({target_url})* - action taken: {result}.")
else:
message = ctx._("[{author}]({author_url}) triggered *[{abuse_filter}]({details_url})*, performing the action \"{action}\" on *[{target}]({target_url})* - action taken: {result}.")
message = message.format(
author=author, author_url=author_url, abuse_filter=sanitize_to_markdown(change["filter"]), wiki=change.get("wiki", None),
details_url=clean_link(ctx.client.create_article_path("Special:AbuseLog/{entry}".format(entry=change["id"]))), details_url=clean_link(ctx.client.create_article_path("Special:AbuseLog/{entry}".format(entry=change["id"]))),
action=abusefilter_actions(change["action"], ctx._, change["action"]), target=change.get("title", ctx._("Unknown")), action=abusefilter_actions(change["action"], ctx._, change["action"]), target=change.get("title", ctx._("Unknown")),
target_url=clean_link(ctx.client.create_article_path(sanitize_to_url(change.get("title", ctx._("Unknown"))))), target_url=clean_link(ctx.client.create_article_path(sanitize_to_url(change.get("title", ctx._("Unknown"))))),

View file

@ -22,14 +22,13 @@ from src.misc import LinkParser
# Approved Revs - https://mediawiki.org/wiki/Extension:Approved_Revs # Approved Revs - https://mediawiki.org/wiki/Extension:Approved_Revs
# approval/approvefile # approval/approvefile
@formatter.embed(event="approval/approvefile") @formatter.embed(event="approval/approvefile")
def embed_approval_approvefile(ctx: Context, change: dict): def embed_approval_approvefile(ctx: Context, change: dict):
embed = DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url) embed = DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url)
embed_helper(ctx, embed, change) embed_helper(ctx, embed, change)
embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["title"])) embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["title"]))
embed["title"] = ctx._("Approved a file revision of {file}").format(file=sanitize_to_markdown(change["title"])) embed["title"] = ctx._("Approved a file revision of {file}").format(file=sanitize_to_markdown(change["title"]))
link_parser_object = ctx.client.LinkParser(ctx.client.WIKI_JUST_DOMAIN) link_parser_object = LinkParser(ctx.client.WIKI_JUST_DOMAIN)
link_parser_object.feed(change.get("logparams", {}).get("0", "")) link_parser_object.feed(change.get("logparams", {}).get("0", ""))
embed["description"] = ctx._("File version from {time} got approved").format(name=change["title"], time=link_parser_object.new_string) embed["description"] = ctx._("File version from {time} got approved").format(name=change["title"], time=link_parser_object.new_string)
# TODO Make timestamp more user friendly? Getting user who uploaded will be a pain though, same with approval/approve # TODO Make timestamp more user friendly? Getting user who uploaded will be a pain though, same with approval/approve
@ -47,7 +46,6 @@ def compact_approval_approvefile(ctx: Context, change: dict):
file_url=link) file_url=link)
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content) return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
# approval/approve # approval/approve
@formatter.embed(event="approval/approve") @formatter.embed(event="approval/approve")
def embed_approval_approve(ctx: Context, change: dict): def embed_approval_approve(ctx: Context, change: dict):
@ -55,7 +53,7 @@ def embed_approval_approve(ctx: Context, change: dict):
embed_helper(ctx, embed, change) embed_helper(ctx, embed, change)
embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["title"])) embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["title"]))
embed["title"] = ctx._("Approved a revision of {article}").format(article=sanitize_to_markdown(change["title"])) embed["title"] = ctx._("Approved a revision of {article}").format(article=sanitize_to_markdown(change["title"]))
link_parser_object = ctx.client.LinkParser(ctx.client.WIKI_JUST_DOMAIN) link_parser_object = LinkParser(ctx.client.WIKI_JUST_DOMAIN)
link_parser_object.feed(change.get("logparams", {}).get("0", "")) link_parser_object.feed(change.get("logparams", {}).get("0", ""))
embed["description"] = ctx._("Revision number {revision_id} got approved").format(name=change["title"], time=link_parser_object.new_string) embed["description"] = ctx._("Revision number {revision_id} got approved").format(name=change["title"], time=link_parser_object.new_string)
return embed return embed
@ -72,7 +70,6 @@ def compact_approval_approve(ctx: Context, change: dict):
article_url=link) article_url=link)
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content) return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
# approval/unapprove # approval/unapprove
@formatter.embed(event="approval/unapprove") @formatter.embed(event="approval/unapprove")
def embed_approval_approve(ctx: Context, change: dict): def embed_approval_approve(ctx: Context, change: dict):

View file

@ -36,10 +36,10 @@ def embed_cargo_createtable(ctx: Context, change: dict):
@formatter.compact(event="cargo/createtable") @formatter.compact(event="cargo/createtable")
def compact_cargo_createtable(ctx: Context, change: dict): def compact_cargo_createtable(ctx: Context, change: dict):
author, author_url = compact_author(ctx, change) author, author_url = compact_author(ctx, change)
table = re.search(r"\[(.*?)]\(<(.*?)>\)", ctx.client.parse_links(change["logparams"]["0"])) table_link = ctx.client.parse_links(change["logparams"]["0"])
content = ctx._("[{author}]({author_url}) created the Cargo table \"{table}\"").format(author=author, content = ctx._("[{author}]({author_url}) created the Cargo table {table_link}").format(author=author,
author_url=author_url, author_url=author_url,
table=table) table_link=table_link)
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content) return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
@ -59,10 +59,10 @@ def embed_cargo_recreatetable(ctx: Context, change: dict):
@formatter.compact(event="cargo/recreatetable") @formatter.compact(event="cargo/recreatetable")
def compact_cargo_recreatetable(ctx: Context, change: dict): def compact_cargo_recreatetable(ctx: Context, change: dict):
author, author_url = compact_author(ctx, change) author, author_url = compact_author(ctx, change)
table = re.search(r"\[(.*?)]\(<(.*?)>\)", ctx.client.parse_links(change["logparams"]["0"])) table_link = ctx.client.parse_links(change["logparams"]["0"])
content = ctx._("[{author}]({author_url}) recreated the Cargo table \"{table}\"").format(author=author, content = ctx._("[{author}]({author_url}) recreated the Cargo table {table_link}").format(author=author,
author_url=author_url, author_url=author_url,
table=table) table_link=table_link)
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content) return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
@ -82,10 +82,10 @@ def embed_cargo_replacetable(ctx: Context, change: dict):
@formatter.compact(event="cargo/replacetable") @formatter.compact(event="cargo/replacetable")
def compact_cargo_replacetable(ctx: Context, change: dict): def compact_cargo_replacetable(ctx: Context, change: dict):
author, author_url = compact_author(ctx, change) author, author_url = compact_author(ctx, change)
table = re.search(r"\[(.*?)]\(<(.*?)>\)", ctx.client.parse_links(change["logparams"]["0"])) table_link = ctx.client.parse_links(change["logparams"]["0"])
content = ctx._("[{author}]({author_url}) replaced the Cargo table \"{table}\"").format(author=author, content = ctx._("[{author}]({author_url}) replaced the Cargo table {table_link}").format(author=author,
author_url=author_url, author_url=author_url,
table=table) table_link=table_link)
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content) return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)

View file

@ -14,7 +14,6 @@
# along with RcGcDw. If not, see <http://www.gnu.org/licenses/>. # along with RcGcDw. If not, see <http://www.gnu.org/licenses/>.
import logging
from src.discord.message import DiscordMessage from src.discord.message import DiscordMessage
from src.api import formatter from src.api import formatter
from src.api.context import Context from src.api.context import Context
@ -23,8 +22,6 @@ from src.api.util import embed_helper, compact_author, sanitize_to_markdown, san
# DataDumps - https://www.mediawiki.org/wiki/Extension:DataDump # DataDumps - https://www.mediawiki.org/wiki/Extension:DataDump
# datadump/generate - Generating a dump of wiki # datadump/generate - Generating a dump of wiki
@formatter.embed(event="datadump/generate") @formatter.embed(event="datadump/generate")
def embed_datadump_generate(ctx: Context, change: dict) -> DiscordMessage: def embed_datadump_generate(ctx: Context, change: dict) -> DiscordMessage:
embed = DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url) embed = DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url)
@ -44,9 +41,8 @@ def compact_datadump_generate(ctx: Context, change: dict):
) )
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content) return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
# datadump/delete - Deleting a dump of a wiki # datadump/delete - Deleting a dump of a wiki
@formatter.embed(event="datadump/delete") @formatter.embed(event="datadump/delete")
def embed_datadump_delete(ctx: Context, change: dict) -> DiscordMessage: def embed_datadump_delete(ctx: Context, change: dict) -> DiscordMessage:
embed = DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url) embed = DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url)
@ -65,3 +61,37 @@ def compact_datadump_delete(ctx: Context, change: dict) -> DiscordMessage:
comment=parsed_comment comment=parsed_comment
) )
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content) return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
# datadump/generate-in-progress - Datadump generation is in progress
@formatter.embed(event="datadump/generate-in-progress")
def embed_datadump_generate_in_progress(ctx: Context, change: dict) -> DiscordMessage:
embed = DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url)
embed_helper(ctx, embed, change)
embed["title"] = ctx._("Generation of a backup file ”{file}” has been initiated").format(file=sanitize_to_markdown(change["logparams"]["filename"]))
embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["title"]))
return embed
@formatter.compact(event="datadump/generate-in-progress")
def compact_datadump_generate_in_progress(ctx: Context, change: dict) -> DiscordMessage:
content = ctx._("Generation of backup file ”*{file}*” has been initiated").format(file=sanitize_to_markdown(change["logparams"]["filename"])
)
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
# datadump/generate-completed - Datadump generation is completed
@formatter.embed(event="datadump/generate-completed")
def embed_datadump_generate_completed(ctx: Context, change: dict) -> DiscordMessage:
embed = DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url)
embed_helper(ctx, embed, change)
embed["title"] = ctx._("Generation of backup file ”{file}” has been completed").format(file=sanitize_to_markdown(change["logparams"]["filename"]))
embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["title"]))
return embed
@formatter.compact(event="datadump/generate-completed")
def compact_datadump_generate_completed(ctx: Context, change: dict) -> DiscordMessage:
content = ctx._("Generation of backup file ”*{file}*” has been completed").format(file=sanitize_to_markdown(change["logparams"]["filename"])
)
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)

View file

@ -64,30 +64,6 @@ def compact_managewiki_delete(ctx: Context, change: dict):
comment=parsed_comment) comment=parsed_comment)
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content) return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
# managewiki/delete-group - Deleting a group
@formatter.embed(event="managewiki/delete-group")
def embed_managewiki_delete_group(ctx: Context, change: dict) -> DiscordMessage:
embed = DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url)
embed_helper(ctx, embed, change)
embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["title"]))
group = change["title"].split("/")[-1]
embed["title"] = ctx._("Deleted a \"{group}\" user group").format(group=group)
return embed
@formatter.compact(event="managewiki/delete-group")
def compact_managewiki_delete_group(ctx: Context, change: dict) -> DiscordMessage:
author, author_url = compact_author(ctx, change)
parsed_comment = compact_summary(ctx)
group = change["title"].split("/")[-1]
content = ctx._("[{author}]({author_url}) deleted a usergroup *{group}*{comment}").format(author=author,
author_url=author_url,
group=group,
comment=parsed_comment)
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
# managewiki/lock - Locking a wiki # managewiki/lock - Locking a wiki
@ -180,6 +156,29 @@ def compact_managewiki_rights(ctx: Context, change: dict):
) )
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content) return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
# managewiki/delete-group - Deleting user groups
@formatter.embed(event="managewiki/delete-group")
def embed_managewiki_delete_group(ctx: Context, change: dict):
embed = DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url)
embed_helper(ctx, embed, change)
embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["title"]))
group_name = change["title"].split("/permissions/", 1)[1]
embed["title"] = ctx._("Deleted \"{usergroup_name}\" usergroup").format(usergroup_name=group_name)
return embed
@formatter.compact(event="managewiki/delete-group")
def compact_managewiki_delete_group(ctx: Context, change: dict):
author, author_url = compact_author(ctx, change)
parsed_comment = compact_summary(ctx)
group_name = change["title"].split("/permissions/", 1)[1]
content = ctx._("[{author}]({author_url}) deleted user group *{group_name}*{comment}").format(
author=author, author_url=author_url, group_name=group_name, comment=parsed_comment
)
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
# managewiki/undelete - Restoring a wiki # managewiki/undelete - Restoring a wiki

View file

@ -29,9 +29,13 @@ from src.exceptions import *
logger = logging.getLogger("extensions.base") logger = logging.getLogger("extensions.base")
if 1 == 2: # additional translation strings in unreachable code if 1 == 2: # additional translation strings in unreachable code
# noinspection PyUnreachableCode
_ = lambda a: a _ = lambda a: a
# noinspection PyUnreachableCode
print(_("director"), _("bot"), _("editor"), _("directors"), _("sysop"), _("bureaucrat"), _("reviewer"), print(_("director"), _("bot"), _("editor"), _("directors"), _("sysop"), _("bureaucrat"), _("reviewer"),
_("autoreview"), _("autopatrol"), _("wiki_guardian")) _("autoreview"), _("autopatrol"), _("wiki_guardian"), _.pgettext("Removal of content field in Revision Delete", "content"),
_.pgettext("Removal of comment field in Revision Delete", "comment"),
_.pgettext("Removal of user field in Revision Delete", "user"))
# Page edit - event edit, New - page creation # Page edit - event edit, New - page creation
@ -60,26 +64,27 @@ def embed_edit(ctx: Context, change: dict) -> DiscordMessage:
article=sanitize_to_url(change["title"]) article=sanitize_to_url(change["title"])
) )
embed["title"] = "{redirect}{article} ({new}{minor}{bot}{space}{editsize})".format( embed["title"] = "{redirect}{article} ({new}{minor}{bot}{space}{editsize})".format(
redirect="" if "redirect" in change else "", redirect="" if change.get("redirect", False) else "",
article=sanitize_to_markdown(change["title"]), article=sanitize_to_markdown(change["title"]),
editsize="+" + str(editsize) if editsize > 0 else editsize, editsize="+" + str(editsize) if editsize > 0 else editsize,
new=ctx._("(N!) ") if action == "new" else "", new=ctx._("(N!) ") if action == "new" else "",
minor=ctx._("m") if action == "edit" and "minor" in change else "", minor=ctx._("m") if action == "edit" and change.get("minor", False) else "",
bot=ctx._('b') if "bot" in change else "", bot=ctx._('b') if change.get("bot", False) else "",
space=" " if "bot" in change or (action == "edit" and "minor" in change) or action == "new" else "") space=" " if "bot" in change or (action == "edit" and change.get("minor", False)) or action == "new" else "")
if ctx.settings["appearance"]["embed"]["show_edit_changes"]: if ctx.settings["appearance"]["embed"]["show_edit_changes"]:
try: try:
if action == "new": if action == "new":
changed_content = ctx.client.make_api_request( changed_content = ctx.client.make_api_request(
"?action=compare&format=json&fromslots=main&torev={diff}&fromtext-main=&topst=1&prop=diff".format( "?action=compare&format=json&formatversion=2&fromslots=main&torev={diff}&fromtext-main=&topst=1&prop=diff".format(
diff=change["revid"]), "compare", "*") diff=change["revid"]), "compare", "body")
else: else:
changed_content = ctx.client.make_api_request( changed_content = ctx.client.make_api_request(
"?action=compare&format=json&fromrev={oldrev}&torev={diff}&topst=1&prop=diff".format( "?action=compare&format=json&formatversion=2&fromrev={oldrev}&torev={diff}&topst=1&prop=diff".format(
diff=change["revid"], oldrev=change["old_revid"]), "compare", "*") diff=change["revid"], oldrev=change["old_revid"]), "compare", "body")
except (ServerError, MediaWikiError): except (ServerError, MediaWikiError):
changed_content = None changed_content = None
if changed_content: if changed_content:
ctx.changed_content = changed_content
parse_mediawiki_changes(ctx, changed_content, embed) parse_mediawiki_changes(ctx, changed_content, embed)
else: else:
logger.warning("Unable to download data on the edit content!") logger.warning("Unable to download data on the edit content!")
@ -128,6 +133,7 @@ def embed_upload_upload(ctx: Context, change: dict) -> DiscordMessage:
params = OrderedDict() params = OrderedDict()
params["action"] = "query" params["action"] = "query"
params["format"] = "json" params["format"] = "json"
params["formatversion"] = 2
if ctx.settings["license_detection"] and action == "upload/upload": if ctx.settings["license_detection"] and action == "upload/upload":
params["prop"] = "imageinfo|revisions" params["prop"] = "imageinfo|revisions"
params["rvprop"] = "content" params["rvprop"] = "content"
@ -149,8 +155,8 @@ def embed_upload_upload(ctx: Context, change: dict) -> DiscordMessage:
"Couldn't retrieve more information about the image {} because of unknown error".format( "Couldn't retrieve more information about the image {} because of unknown error".format(
change["title"])) change["title"]))
else: else:
if "-1" not in request_for_image_data: # Image still exists and not removed if not request_for_image_data[0].get("missing", False): # Image still exists and not removed
image_data = next(iter(request_for_image_data.values())) image_data = request_for_image_data[0]
else: else:
logger.warning("Request for additional image information have failed. The preview will not be shown.") logger.warning("Request for additional image information have failed. The preview will not be shown.")
request_for_image_data = None request_for_image_data = None
@ -164,7 +170,7 @@ def embed_upload_upload(ctx: Context, change: dict) -> DiscordMessage:
for num, revision in enumerate(urls): for num, revision in enumerate(urls):
if revision["timestamp"] == change["logparams"][ if revision["timestamp"] == change["logparams"][
"img_timestamp"]: # find the correct revision corresponding for this log entry "img_timestamp"]: # find the correct revision corresponding for this log entry
image_direct_url = "{rev}{c}rcgcdb={cache}".format(rev=revision["url"], image_direct_url = "{rev}{c}rcgcdw={cache}".format(rev=revision["url"],
c="&" if "?" in revision["url"] else "?", c="&" if "?" in revision["url"] else "?",
cache=int(time.time() * 5)) # cachebusting cache=int(time.time() * 5)) # cachebusting
break break
@ -176,15 +182,19 @@ def embed_upload_upload(ctx: Context, change: dict) -> DiscordMessage:
if action in ("upload/overwrite", "upload/revert"): if action in ("upload/overwrite", "upload/revert"):
if image_direct_url: if image_direct_url:
try: try:
revision = image_data["imageinfo"][num + 1] archive_name = image_data["imageinfo"][num + 1]["archivename"]
except IndexError: except IndexError:
logger.exception( logger.exception(
"Could not analize the information about the image (does it have only one version when expected more in overwrite?) which resulted in no Options field: {}".format( "Could not analize the information about the image (does it have only one version when expected more in overwrite?) which resulted in no Options field: {}".format(
image_data["imageinfo"])) image_data["imageinfo"]))
except KeyError:
logger.exception(
"Could not analize the information about the image, one of its revisions might have been deleted: {}".format(
image_data["imageinfo"]))
else: else:
undolink = "{wiki}index.php?title={filename}&action=revert&oldimage={archiveid}".format( undolink = "{wiki}index.php?title={filename}&action=revert&oldimage={archiveid}".format(
wiki=ctx.client.WIKI_SCRIPT_PATH, filename=sanitize_to_url(change["title"]), wiki=ctx.client.WIKI_SCRIPT_PATH, filename=sanitize_to_url(change["title"]),
archiveid=revision["archivename"]) archiveid=archive_name)
embed.add_field(ctx._("Options"), ctx._("([preview]({link}) | [undo]({undolink}))").format( embed.add_field(ctx._("Options"), ctx._("([preview]({link}) | [undo]({undolink}))").format(
link=image_direct_url, undolink=undolink)) link=image_direct_url, undolink=undolink))
if ctx.settings["appearance"]["embed"]["embed_images"]: if ctx.settings["appearance"]["embed"]["embed_images"]:
@ -197,7 +207,7 @@ def embed_upload_upload(ctx: Context, change: dict) -> DiscordMessage:
embed["title"] = ctx._("Uploaded {name}").format(name=sanitize_to_markdown(change["title"])) embed["title"] = ctx._("Uploaded {name}").format(name=sanitize_to_markdown(change["title"]))
if ctx.settings["license_detection"] and image_direct_url: if ctx.settings["license_detection"] and image_direct_url:
try: try:
content = image_data['revisions'][0]["slots"]["main"]['*'] content = image_data['revisions'][0]["slots"]["main"]['content']
matches = re.search(re.compile(ctx.settings["license_regex"], re.IGNORECASE), content) matches = re.search(re.compile(ctx.settings["license_regex"], re.IGNORECASE), content)
if matches is not None: if matches is not None:
license = matches.group("license") license = matches.group("license")
@ -368,6 +378,13 @@ def embed_delete_revision(ctx: Context, change: dict) -> DiscordMessage:
embed["title"] = ctx.ngettext("Changed visibility of revision on page {article} ", embed["title"] = ctx.ngettext("Changed visibility of revision on page {article} ",
"Changed visibility of {amount} revisions on page {article} ", amount).format( "Changed visibility of {amount} revisions on page {article} ", amount).format(
article=sanitize_to_markdown(change["title"]), amount=amount) article=sanitize_to_markdown(change["title"]), amount=amount)
embed["description"] += ctx.parsedcomment + ctx._("\nContents have been suppressed")
old_hidden_fields = {ctx._(key) for key, value in change["logparams"]["old"].items() if (key in ("content", "comment", "user") and value is True)}
new_hidden_fields = {ctx._(key) for key, value in change["logparams"]["new"].items() if (key in ("content", "comment", "user") and value is True)}
if new_hidden_fields - old_hidden_fields:
embed.add_field(ctx._("Hidden fields"), ctx._(", ").join(new_hidden_fields - old_hidden_fields))
if old_hidden_fields - new_hidden_fields:
embed.add_field(ctx._("Revealed fields"), ctx._(", ").join(old_hidden_fields - new_hidden_fields))
return embed return embed
@ -394,9 +411,8 @@ def embed_move_move(ctx: Context, change: dict) -> DiscordMessage:
embed_helper(ctx, embed, change, set_desc=False) embed_helper(ctx, embed, change, set_desc=False)
embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["logparams"]['target_title'])) embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["logparams"]['target_title']))
embed["description"] = "{supress}. {desc}".format(desc=ctx.parsedcomment, embed["description"] = "{supress}. {desc}".format(desc=ctx.parsedcomment,
supress=ctx._("No redirect has been made") if "suppressredirect" in supress=ctx._("No redirect has been made") if change["logparams"].get("suppressredirect", False) else ctx._("A redirect has been made"))
change["logparams"] else ctx._("A redirect has been made")) embed["title"] = ctx._("Moved {redirect}{article} to {target}").format(redirect="" if change.get("redirect", False) else "",
embed["title"] = ctx._("Moved {redirect}{article} to {target}").format(redirect="" if "redirect" in change else "",
article=sanitize_to_markdown(change["title"]), article=sanitize_to_markdown(change["title"]),
target=sanitize_to_markdown( target=sanitize_to_markdown(
change["logparams"]['target_title'])) change["logparams"]['target_title']))
@ -407,12 +423,12 @@ def embed_move_move(ctx: Context, change: dict) -> DiscordMessage:
def compact_move_move(ctx: Context, change: dict) -> DiscordMessage: def compact_move_move(ctx: Context, change: dict) -> DiscordMessage:
author, author_url = compact_author(ctx, change) author, author_url = compact_author(ctx, change)
link = clean_link(ctx.client.create_article_path(sanitize_to_url(change["logparams"]['target_title']))) link = clean_link(ctx.client.create_article_path(sanitize_to_url(change["logparams"]['target_title'])))
redirect_status = ctx._("without making a redirect") if "suppressredirect" in change["logparams"] else ctx._( redirect_status = ctx._("without making a redirect") if change["logparams"].get("suppressredirect", False) else ctx._(
"with a redirect") "with a redirect")
parsed_comment = compact_summary(ctx) parsed_comment = compact_summary(ctx)
content = ctx._( content = ctx._(
"[{author}]({author_url}) moved {redirect}*{article}* to [{target}]({target_url}) {made_a_redirect}{comment}").format( "[{author}]({author_url}) moved {redirect}*{article}* to [{target}]({target_url}) {made_a_redirect}{comment}").format(
author=author, author_url=author_url, redirect="" if "redirect" in change else "", article=sanitize_to_markdown(change["title"]), author=author, author_url=author_url, redirect="" if change.get("redirect", False) else "", article=sanitize_to_markdown(change["title"]),
target=sanitize_to_markdown(change["logparams"]['target_title']), target_url=link, comment=parsed_comment, target=sanitize_to_markdown(change["logparams"]['target_title']), target_url=link, comment=parsed_comment,
made_a_redirect=redirect_status) made_a_redirect=redirect_status)
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content) return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
@ -427,10 +443,9 @@ def embed_move_move_redir(ctx: Context, change: dict) -> DiscordMessage:
embed_helper(ctx, embed, change, set_desc=False) embed_helper(ctx, embed, change, set_desc=False)
embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["logparams"]['target_title'])) embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["logparams"]['target_title']))
embed["description"] = "{supress}. {desc}".format(desc=ctx.parsedcomment, embed["description"] = "{supress}. {desc}".format(desc=ctx.parsedcomment,
supress=ctx._("No redirect has been made") if "suppressredirect" in supress=ctx._("No redirect has been made") if change["logparams"].get("suppressredirect", False) else ctx._("A redirect has been made"))
change["logparams"] else ctx._("A redirect has been made"))
embed["title"] = ctx._("Moved {redirect}{article} to {title} over redirect").format( embed["title"] = ctx._("Moved {redirect}{article} to {title} over redirect").format(
redirect="" if "redirect" in change else "", article=sanitize_to_markdown(change["title"]), redirect="" if change.get("redirect", False) else "", article=sanitize_to_markdown(change["title"]),
title=sanitize_to_markdown(change["logparams"]["target_title"])) title=sanitize_to_markdown(change["logparams"]["target_title"]))
return embed return embed
@ -439,12 +454,12 @@ def embed_move_move_redir(ctx: Context, change: dict) -> DiscordMessage:
def compact_move_move_redir(ctx: Context, change: dict) -> DiscordMessage: def compact_move_move_redir(ctx: Context, change: dict) -> DiscordMessage:
author, author_url = compact_author(ctx, change) author, author_url = compact_author(ctx, change)
link = clean_link(ctx.client.create_article_path(sanitize_to_url(change["logparams"]['target_title']))) link = clean_link(ctx.client.create_article_path(sanitize_to_url(change["logparams"]['target_title'])))
redirect_status = ctx._("without making a redirect") if "suppressredirect" in change["logparams"] else ctx._( redirect_status = ctx._("without making a redirect") if change["logparams"].get("suppressredirect", False) else ctx._(
"with a redirect") "with a redirect")
parsed_comment = compact_summary(ctx) parsed_comment = compact_summary(ctx)
content = ctx._( content = ctx._(
"[{author}]({author_url}) moved {redirect}*{article}* over redirect to [{target}]({target_url}) {made_a_redirect}{comment}").format( "[{author}]({author_url}) moved {redirect}*{article}* over redirect to [{target}]({target_url}) {made_a_redirect}{comment}").format(
author=author, author_url=author_url, redirect="" if "redirect" in change else "", author=author, author_url=author_url, redirect="" if change.get("redirect", False) else "",
article=sanitize_to_markdown(change["title"]), article=sanitize_to_markdown(change["title"]),
target=sanitize_to_markdown(change["logparams"]['target_title']), target_url=link, comment=parsed_comment, target=sanitize_to_markdown(change["logparams"]['target_title']), target_url=link, comment=parsed_comment,
made_a_redirect=redirect_status) made_a_redirect=redirect_status)
@ -460,7 +475,7 @@ def embed_protect_move_prot(ctx: Context, change: dict):
embed_helper(ctx, embed, change) embed_helper(ctx, embed, change)
embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["logparams"]["oldtitle_title"])) embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["logparams"]["oldtitle_title"]))
embed["title"] = ctx._("Moved protection settings from {redirect}{article} to {title}").format( embed["title"] = ctx._("Moved protection settings from {redirect}{article} to {title}").format(
redirect="" if "redirect" in change else "", redirect="" if change.get("redirect", False) else "",
article=sanitize_to_markdown(change["logparams"]["oldtitle_title"]), article=sanitize_to_markdown(change["logparams"]["oldtitle_title"]),
title=sanitize_to_markdown(change["title"])) title=sanitize_to_markdown(change["title"]))
return embed return embed
@ -473,7 +488,7 @@ def compact_protect_move_prot(ctx: Context, change: dict):
parsed_comment = compact_summary(ctx) parsed_comment = compact_summary(ctx)
content = ctx._( content = ctx._(
"[{author}]({author_url}) moved protection settings from {redirect}*{article}* to [{target}]({target_url}){comment}").format( "[{author}]({author_url}) moved protection settings from {redirect}*{article}* to [{target}]({target_url}){comment}").format(
author=author, author_url=author_url, redirect="" if "redirect" in change else "", author=author, author_url=author_url, redirect="" if change.get("redirect", False) else "",
article=sanitize_to_markdown(change["logparams"]["oldtitle_title"]), article=sanitize_to_markdown(change["logparams"]["oldtitle_title"]),
target=sanitize_to_markdown(change["title"]), target_url=link, comment=parsed_comment) target=sanitize_to_markdown(change["title"]), target_url=link, comment=parsed_comment)
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content) return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
@ -490,7 +505,7 @@ def embed_protect_protect(ctx: Context, change: dict):
embed["title"] = ctx._("Protected {target}").format(target=sanitize_to_markdown(change["title"])) embed["title"] = ctx._("Protected {target}").format(target=sanitize_to_markdown(change["title"]))
embed["description"] = "{settings}{cascade} | {reason}".format( embed["description"] = "{settings}{cascade} | {reason}".format(
settings=sanitize_to_markdown(change["logparams"].get("description", "")), settings=sanitize_to_markdown(change["logparams"].get("description", "")),
cascade=ctx._(" [cascading]") if "cascade" in change["logparams"] else "", cascade=ctx._(" [cascading]") if change["logparams"].get("cascade", False) else "",
reason=ctx.parsedcomment) reason=ctx.parsedcomment)
return embed return embed
@ -505,7 +520,7 @@ def compact_protect_protect(ctx: Context, change: dict):
author=author, author_url=author_url, author=author, author_url=author_url,
article=sanitize_to_markdown(change["title"]), article_url=link, article=sanitize_to_markdown(change["title"]), article_url=link,
settings=change["logparams"].get("description", "") + ( settings=change["logparams"].get("description", "") + (
ctx._(" [cascading]") if "cascade" in change["logparams"] else ""), ctx._(" [cascading]") if change["logparams"].get("cascade", False) else ""),
comment=parsed_comment) comment=parsed_comment)
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content) return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
@ -521,7 +536,7 @@ def embed_protect_modify(ctx: Context, change: dict):
embed["title"] = ctx._("Changed protection level for {article}").format(article=sanitize_to_markdown(change["title"])) embed["title"] = ctx._("Changed protection level for {article}").format(article=sanitize_to_markdown(change["title"]))
embed["description"] = "{settings}{cascade} | {reason}".format( embed["description"] = "{settings}{cascade} | {reason}".format(
settings=sanitize_to_markdown(change["logparams"].get("description", "")), settings=sanitize_to_markdown(change["logparams"].get("description", "")),
cascade=ctx._(" [cascading]") if "cascade" in change["logparams"] else "", cascade=ctx._(" [cascading]") if change["logparams"].get("cascade", False) else "",
reason=ctx.parsedcomment) reason=ctx.parsedcomment)
return embed return embed
@ -536,7 +551,7 @@ def compact_protect_modify(ctx: Context, change: dict):
author=author, author_url=author_url, author=author, author_url=author_url,
article=sanitize_to_markdown(change["title"]), article_url=link, article=sanitize_to_markdown(change["title"]), article_url=link,
settings=sanitize_to_markdown(change["logparams"].get("description", "")) + ( settings=sanitize_to_markdown(change["logparams"].get("description", "")) + (
ctx._(" [cascading]") if "cascade" in change["logparams"] else ""), ctx._(" [cascading]") if change["logparams"].get("cascade", False) else ""),
comment=parsed_comment) comment=parsed_comment)
return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content) return DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url, content=content)
@ -597,11 +612,12 @@ def embed_block_block(ctx: Context, change: dict):
embed = DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url) embed = DiscordMessage(ctx.message_type, ctx.event, ctx.webhook_url)
user = change["title"].split(':', 1)[1] user = change["title"].split(':', 1)[1]
try: try:
if "/" not in user:
ipaddress.ip_address(user) ipaddress.ip_address(user)
embed["url"] = ctx.client.create_article_path("Special:Contributions/{user}".format(user=user)) embed["url"] = ctx.client.create_article_path("Special:Contributions/{user}".format(user=user))
except ValueError: except ValueError:
embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["title"])) embed["url"] = ctx.client.create_article_path(sanitize_to_url(change["title"]))
if "sitewide" not in change["logparams"]: if change["logparams"].get("sitewide", False) is False:
restriction_description = "" restriction_description = ""
if "restrictions" in change["logparams"]: if "restrictions" in change["logparams"]:
if "pages" in change["logparams"]["restrictions"] and change["logparams"]["restrictions"]["pages"]: if "pages" in change["logparams"]["restrictions"] and change["logparams"]["restrictions"]["pages"]:
@ -619,7 +635,7 @@ def embed_block_block(ctx: Context, change: dict):
if str(namespace) == "0": if str(namespace) == "0":
namespaces.append("*{ns}*".format(ns=ctx._("(Main)"))) namespaces.append("*{ns}*".format(ns=ctx._("(Main)")))
elif str(namespace) in ctx.client.namespaces: # if we have cached namespace name for given namespace number, add its name to the list elif str(namespace) in ctx.client.namespaces: # if we have cached namespace name for given namespace number, add its name to the list
namespaces.append("*{ns}*".format(ns=ctx.client.namespaces[str(namespace)]["*"])) namespaces.append("*{ns}*".format(ns=ctx.client.namespaces[str(namespace)]["name"]))
else: else:
namespaces.append("*{ns}*".format(ns=namespace)) namespaces.append("*{ns}*".format(ns=namespace))
restriction_description = restriction_description + ", ".join(namespaces) restriction_description = restriction_description + ", ".join(namespaces)
@ -632,7 +648,7 @@ def embed_block_block(ctx: Context, change: dict):
if block_flags: if block_flags:
embed.add_field(ctx._("Block flags"), ", ".join( embed.add_field(ctx._("Block flags"), ", ".join(
block_flags)) # TODO Translate flags into MW messages, this requires making additional request in init_request since we want to get all messages with prefix (amprefix) block-log-flags- and that parameter is exclusive with ammessages block_flags)) # TODO Translate flags into MW messages, this requires making additional request in init_request since we want to get all messages with prefix (amprefix) block-log-flags- and that parameter is exclusive with ammessages
embed["title"] = ctx._("Blocked {blocked_user} {time}").format(blocked_user=user, time=block_expiry(change, ctx)) embed["title"] = ctx._("Blocked {blocked_user} {time}").format(blocked_user=sanitize_to_markdown(user), time=block_expiry(change, ctx))
embed_helper(ctx, embed, change) embed_helper(ctx, embed, change)
return embed return embed
@ -644,6 +660,7 @@ def compact_block_block(ctx: Context, change: dict):
author, author_url = compact_author(ctx, change) author, author_url = compact_author(ctx, change)
parsed_comment = compact_summary(ctx) parsed_comment = compact_summary(ctx)
try: try:
if "/" not in user:
ipaddress.ip_address(user) ipaddress.ip_address(user)
link = clean_link(ctx.client.create_article_path("Special:Contributions/{user}".format(user=user))) link = clean_link(ctx.client.create_article_path("Special:Contributions/{user}".format(user=user)))
except ValueError: except ValueError:
@ -666,7 +683,7 @@ def compact_block_block(ctx: Context, change: dict):
if str(namespace) == "0": if str(namespace) == "0":
namespaces.append("*{ns}*".format(ns=ctx._("(Main)"))) namespaces.append("*{ns}*".format(ns=ctx._("(Main)")))
elif str(namespace) in ctx.client.namespaces: # if we have cached namespace name for given namespace number, add its name to the list elif str(namespace) in ctx.client.namespaces: # if we have cached namespace name for given namespace number, add its name to the list
namespaces.append("*{ns}*".format(ns=ctx.client.namespaces[str(namespace)]["*"])) namespaces.append("*{ns}*".format(ns=ctx.client.namespaces[str(namespace)]["name"]))
else: else:
namespaces.append("*{ns}*".format(ns=namespace)) namespaces.append("*{ns}*".format(ns=namespace))
restriction_description = restriction_description + ", ".join(namespaces) restriction_description = restriction_description + ", ".join(namespaces)

View file

@ -161,7 +161,7 @@ class DomainManager:
wiki_json = wiki.json() wiki_json = wiki.json()
try: try:
wiki.statistics.update(Log(type=LogType.SCAN_REASON, title="Debug request for the wiki")) wiki.statistics.update(Log(type=LogType.SCAN_REASON, title="Debug request for the wiki"))
params = OrderedDict({"action": "query", "format": "json", "uselang": "content", "list": "tags|recentchanges", params = OrderedDict({"action": "query", "format": "json", "formatversion": "2", "uselang": "content", "list": "tags|recentchanges",
"meta": "siteinfo", "utf8": 1, "rcshow": "!bot", "meta": "siteinfo", "utf8": 1, "rcshow": "!bot",
"rcprop": "title|redirect|timestamp|ids|loginfo|parsedcomment|sizes|flags|tags|user|userid", "rcprop": "title|redirect|timestamp|ids|loginfo|parsedcomment|sizes|flags|tags|user|userid",
"rclimit": 500, "rctype": "edit|new|log|categorize", "siprop": "namespaces|general"}) "rclimit": 500, "rctype": "edit|new|log|categorize", "siprop": "namespaces|general"})

View file

@ -324,7 +324,7 @@ class Wiki:
if warnings: if warnings:
for warning in warnings: for warning in warnings:
logger.warning("MediaWiki returned the following warning: {code} - {text} on {url}.".format( logger.warning("MediaWiki returned the following warning: {code} - {text} on {url}.".format(
code=warning["code"], text=warning.get("text", warning.get("*", "")), url=url code=warning["code"], text=warning.get("text", ""), url=url
)) ))
return request_data return request_data
@ -445,7 +445,7 @@ class Wiki:
async def fetch_wiki(self, amount=10) -> dict: async def fetch_wiki(self, amount=10) -> dict:
if self.mw_messages is None or self.recache_requested: if self.mw_messages is None or self.recache_requested:
params = OrderedDict({"action": "query", "format": "json", "uselang": "content", "list": "tags|recentchanges", params = OrderedDict({"action": "query", "format": "json", "formatversion": "2", "uselang": "content", "list": "tags|recentchanges",
"meta": "allmessages|siteinfo", "meta": "allmessages|siteinfo",
"utf8": 1, "tglimit": "max", "tgprop": "displayname", "utf8": 1, "tglimit": "max", "tgprop": "displayname",
"rcprop": "title|redirect|timestamp|ids|loginfo|parsedcomment|sizes|flags|tags|user|userid", "rcprop": "title|redirect|timestamp|ids|loginfo|parsedcomment|sizes|flags|tags|user|userid",
@ -453,7 +453,7 @@ class Wiki:
"ammessages": "recentchanges-page-added-to-category|recentchanges-page-removed-from-category|recentchanges-page-added-to-category-bundled|recentchanges-page-removed-from-category-bundled", "ammessages": "recentchanges-page-added-to-category|recentchanges-page-removed-from-category|recentchanges-page-added-to-category-bundled|recentchanges-page-removed-from-category-bundled",
"amenableparser": 1, "amincludelocal": 1, "siprop": "namespaces|general"}) "amenableparser": 1, "amincludelocal": 1, "siprop": "namespaces|general"})
else: else:
params = OrderedDict({"action": "query", "format": "json", "uselang": "content", "list": "recentchanges", params = OrderedDict({"action": "query", "format": "json", "formatversion": "2", "uselang": "content", "list": "recentchanges",
"meta": "siteinfo", "utf8": 1, "rcshow": "!bot", "meta": "siteinfo", "utf8": 1, "rcshow": "!bot",
"rcprop": "title|redirect|timestamp|ids|loginfo|parsedcomment|sizes|flags|tags|user|userid", "rcprop": "title|redirect|timestamp|ids|loginfo|parsedcomment|sizes|flags|tags|user|userid",
"rclimit": amount, "rctype": "edit|new|log|categorize", "siprop": "namespaces|general"}) "rclimit": amount, "rctype": "edit|new|log|categorize", "siprop": "namespaces|general"})
@ -613,15 +613,15 @@ def process_cachable(response: dict, wiki_object: Wiki) -> None:
mw_messages = response.get("query", {}).get("allmessages", []) mw_messages = response.get("query", {}).get("allmessages", [])
final_mw_messages = dict() final_mw_messages = dict()
for msg in mw_messages: for msg in mw_messages:
if "missing" not in msg: # ignore missing strings if not msg.get("missing", False): # ignore missing strings
final_mw_messages[msg["name"]] = re.sub(r'\[\[.*?]]', '', msg["*"]) final_mw_messages[msg["name"]] = re.sub(r'\[\[.*?]]', '', msg["content"])
else: else:
logger.warning("Could not fetch the MW message translation for: {}".format(msg["name"])) logger.warning("Could not fetch the MW message translation for: {}".format(msg["name"]))
wiki_object.mw_messages = MWMessages(final_mw_messages) wiki_object.mw_messages = MWMessages(final_mw_messages)
for tag in response["query"]["tags"]: for tag in response["query"]["tags"]:
try: if tag["displayname"]:
wiki_object.tags[tag["name"]] = (BeautifulSoup(tag["displayname"], "lxml")).get_text() wiki_object.tags[tag["name"]] = (BeautifulSoup(tag["displayname"], "lxml")).get_text()
except KeyError: else:
wiki_object.tags[tag["name"]] = None wiki_object.tags[tag["name"]] = None
wiki_object.namespaces = response["query"]["namespaces"] wiki_object.namespaces = response["query"]["namespaces"]
wiki_object.recache_requested = False wiki_object.recache_requested = False