mirror of
https://gitlab.com/chicken-riders/RcGcDw.git
synced 2025-02-23 00:24:09 +00:00
Merge branch '102-add-support-for-fandom-discussions' into 'testing'
Resolve "Add support for Fandom discussions" See merge request piotrex43/RcGcDw!65
This commit is contained in:
commit
a0efa067f3
|
@ -10,3 +10,4 @@ try: # load settings
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
logging.critical("No config file could be found. Please make sure settings.json is in the directory.")
|
logging.critical("No config file could be found. Please make sure settings.json is in the directory.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
42
discussions.pot
Normal file
42
discussions.pot
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
# SOME DESCRIPTIVE TITLE.
|
||||||
|
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
|
||||||
|
# This file is distributed under the same license as the PACKAGE package.
|
||||||
|
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
|
||||||
|
#
|
||||||
|
#, fuzzy
|
||||||
|
msgid ""
|
||||||
|
msgstr ""
|
||||||
|
"Project-Id-Version: PACKAGE VERSION\n"
|
||||||
|
"Report-Msgid-Bugs-To: \n"
|
||||||
|
"POT-Creation-Date: 2020-04-06 18:55+0200\n"
|
||||||
|
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||||
|
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||||
|
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||||
|
"Language: \n"
|
||||||
|
"MIME-Version: 1.0\n"
|
||||||
|
"Content-Type: text/plain; charset=CHARSET\n"
|
||||||
|
"Content-Transfer-Encoding: 8bit\n"
|
||||||
|
|
||||||
|
#: discussions.py:53
|
||||||
|
#, python-brace-format
|
||||||
|
msgid "Replied to \"{title}\""
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: discussions.py:56
|
||||||
|
#, python-brace-format
|
||||||
|
msgid "Created \"{title}\""
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: discussions.py:73
|
||||||
|
#, python-brace-format
|
||||||
|
msgid ""
|
||||||
|
"[{author}](<{url}f/u/{creatorId}>) created [{title}](<{url}f/p/{threadId}>) "
|
||||||
|
"in {forumName}"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: discussions.py:76
|
||||||
|
#, python-brace-format
|
||||||
|
msgid ""
|
||||||
|
"[{author}](<{url}f/u/{creatorId}>) created a [reply](<{url}f/p/{threadId}/r/"
|
||||||
|
"{postId}>) to [{title}](<{url}f/p/{threadId}>) in {forumName}"
|
||||||
|
msgstr ""
|
124
discussions.py
Normal file
124
discussions.py
Normal file
|
@ -0,0 +1,124 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Recent changes Goat compatible Discord webhook is a project for using a webhook as recent changes page from MediaWiki.
|
||||||
|
# Copyright (C) 2020 Frisk
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import logging, gettext, schedule, requests, json, datetime
|
||||||
|
from collections import defaultdict
|
||||||
|
from configloader import settings
|
||||||
|
from misc import datafile, WIKI_SCRIPT_PATH, send_to_discord
|
||||||
|
from session import session
|
||||||
|
|
||||||
|
# Initialize translation
|
||||||
|
|
||||||
|
t = gettext.translation('discussions', localedir='locale', languages=[settings["lang"]])
|
||||||
|
_ = t.gettext
|
||||||
|
|
||||||
|
# Create a custom logger
|
||||||
|
|
||||||
|
discussion_logger = logging.getLogger("rcgcdw.disc")
|
||||||
|
|
||||||
|
# Create a variable in datafile if it doesn't exist yet (in files <1.10)
|
||||||
|
|
||||||
|
if "discussion_id" not in datafile.data:
|
||||||
|
datafile.data["discussion_id"] = 0
|
||||||
|
datafile.save_datafile()
|
||||||
|
|
||||||
|
storage = datafile.data
|
||||||
|
|
||||||
|
fetch_url = "https://services.fandom.com/discussion/{wikiid}/posts?sortDirection=descending&sortKey=creation_date&limit={limit}".format(wikiid=settings["fandom_discussions"]["wiki_id"], limit=settings["fandom_discussions"]["limit"])
|
||||||
|
|
||||||
|
|
||||||
|
def embed_formatter(post):
|
||||||
|
"""Embed formatter for Fandom discussions."""
|
||||||
|
embed = defaultdict(dict)
|
||||||
|
data = {"embeds": []}
|
||||||
|
embed["author"]["name"] = post["createdBy"]["name"]
|
||||||
|
embed["author"]["icon_url"] = post["createdBy"]["avatarUrl"]
|
||||||
|
embed["author"]["url"] = "{wikiurl}f/u/{creatorId}".format(wikiurl=WIKI_SCRIPT_PATH, creatorId=post["creatorId"])
|
||||||
|
if post["isReply"]:
|
||||||
|
embed["title"] = _("Replied to \"{title}\"").format(title=post["_embedded"]["thread"][0]["title"])
|
||||||
|
embed["url"] = "{wikiurl}f/p/{threadId}/r/{postId}".format(wikiurl=WIKI_SCRIPT_PATH, threadId=post["threadId"], postId=post["id"])
|
||||||
|
else:
|
||||||
|
embed["title"] = _("Created \"{title}\"").format(title=post["title"])
|
||||||
|
embed["url"] = "{wikiurl}f/p/{threadId}".format(wikiurl=WIKI_SCRIPT_PATH, threadId=post["threadId"])
|
||||||
|
if settings["fandom_discussions"]["appearance"]["embed"]["show_content"]:
|
||||||
|
embed["description"] = post["rawContent"] if len(post["rawContent"]) < 2000 else post["rawContent"][0:2000] + "…"
|
||||||
|
embed["footer"]["text"] = post["forumName"]
|
||||||
|
embed["timestamp"] = datetime.datetime.fromtimestamp(post["creationDate"]["epochSecond"], tz=datetime.timezone.utc).isoformat()
|
||||||
|
data["embeds"].append(dict(embed))
|
||||||
|
data['avatar_url'] = settings["avatars"]["embed"]
|
||||||
|
data['allowed_mentions'] = {'parse': []}
|
||||||
|
formatted_embed = json.dumps(data, indent=4)
|
||||||
|
send_to_discord(formatted_embed)
|
||||||
|
|
||||||
|
|
||||||
|
def compact_formatter(post):
|
||||||
|
"""Compact formatter for Fandom discussions."""
|
||||||
|
message = None
|
||||||
|
if not post["isReply"]:
|
||||||
|
message = _("[{author}](<{url}f/u/{creatorId}>) created [{title}](<{url}f/p/{threadId}>) in {forumName}").format(
|
||||||
|
author=post["createdBy"]["name"], url=WIKI_SCRIPT_PATH, creatorId=post["creatorId"], title=post["title"], threadId=post["threadId"], forumName=post["forumName"])
|
||||||
|
else:
|
||||||
|
message = _("[{author}](<{url}f/u/{creatorId}>) created a [reply](<{url}f/p/{threadId}/r/{postId}>) to [{title}](<{url}f/p/{threadId}>) in {forumName}").format(
|
||||||
|
author=post["createdBy"]["name"], url=WIKI_SCRIPT_PATH, creatorId=post["creatorId"], threadId=post["threadId"], postId=post["id"], title=post["_embedded"]["thread"][0]["title"], forumName=post["forumName"]
|
||||||
|
)
|
||||||
|
send_to_discord(json.dumps({'content': message, 'allowed_mentions': {'parse': []}}))
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_discussions():
|
||||||
|
request = safe_request(fetch_url)
|
||||||
|
if request:
|
||||||
|
try:
|
||||||
|
request_json = request.json()["_embedded"]["doc:posts"]
|
||||||
|
request_json.reverse()
|
||||||
|
except ValueError:
|
||||||
|
discussion_logger.warning("ValueError in fetching discussions")
|
||||||
|
return None
|
||||||
|
except KeyError:
|
||||||
|
discussion_logger.warning("Wiki returned %s" % (request_json.json()))
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
if request_json:
|
||||||
|
for post in request_json:
|
||||||
|
if int(post["id"]) > storage["discussion_id"]:
|
||||||
|
formatter(post)
|
||||||
|
if int(post["id"]) > storage["discussion_id"]:
|
||||||
|
storage["discussion_id"] = int(post["id"])
|
||||||
|
datafile.save_datafile()
|
||||||
|
|
||||||
|
def safe_request(url):
|
||||||
|
"""Function to assure safety of request, and do not crash the script on exceptions,"""
|
||||||
|
try:
|
||||||
|
request = session.get(url, timeout=10, allow_redirects=False, headers={"Accept": "application/hal+json"})
|
||||||
|
except requests.exceptions.Timeout:
|
||||||
|
discussion_logger.warning("Reached timeout error for request on link {url}".format(url=url))
|
||||||
|
return None
|
||||||
|
except requests.exceptions.ConnectionError:
|
||||||
|
discussion_logger.warning("Reached connection error for request on link {url}".format(url=url))
|
||||||
|
return None
|
||||||
|
except requests.exceptions.ChunkedEncodingError:
|
||||||
|
discussion_logger.warning("Detected faulty response from the web server for request on link {url}".format(url=url))
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
if 499 < request.status_code < 600:
|
||||||
|
return None
|
||||||
|
return request
|
||||||
|
|
||||||
|
|
||||||
|
formatter = embed_formatter if settings["fandom_discussions"]["appearance"]["mode"] == "embed" else compact_formatter
|
||||||
|
|
||||||
|
schedule.every(settings["fandom_discussions"]["cooldown"]).seconds.do(fetch_discussions)
|
BIN
locale/en/LC_MESSAGES/discussions.mo
Normal file
BIN
locale/en/LC_MESSAGES/discussions.mo
Normal file
Binary file not shown.
37
locale/en/LC_MESSAGES/discussions.po
Normal file
37
locale/en/LC_MESSAGES/discussions.po
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
# SOME DESCRIPTIVE TITLE.
|
||||||
|
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
|
||||||
|
# This file is distributed under the same license as the PACKAGE package.
|
||||||
|
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
|
||||||
|
#
|
||||||
|
msgid ""
|
||||||
|
msgstr ""
|
||||||
|
"Project-Id-Version: \n"
|
||||||
|
"Report-Msgid-Bugs-To: \n"
|
||||||
|
"POT-Creation-Date: 2020-04-05 22:10+0200\n"
|
||||||
|
"PO-Revision-Date: 2020-04-06 19:24+0200\n"
|
||||||
|
"Language-Team: \n"
|
||||||
|
"MIME-Version: 1.0\n"
|
||||||
|
"Content-Type: text/plain; charset=UTF-8\n"
|
||||||
|
"Content-Transfer-Encoding: 8bit\n"
|
||||||
|
"X-Generator: Poedit 2.3\n"
|
||||||
|
"Last-Translator: \n"
|
||||||
|
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
|
||||||
|
"Language: en\n"
|
||||||
|
|
||||||
|
#: discussions.py:53
|
||||||
|
#, python-brace-format
|
||||||
|
msgid ""
|
||||||
|
"[{author}](<{url}f/u/{creatorId}>) created [{title}](<{url}f/p/{threadId}>) "
|
||||||
|
"in ${forumName}"
|
||||||
|
msgstr ""
|
||||||
|
"[{author}](<{url}f/u/{creatorId}>) created [{title}](<{url}f/p/{threadId}>) "
|
||||||
|
"in ${forumName}"
|
||||||
|
|
||||||
|
#: discussions.py:56
|
||||||
|
#, python-brace-format
|
||||||
|
msgid ""
|
||||||
|
"[{author}](<{url}f/u/{creatorId}>) created a [reply](<{url}f/p/{threadId}/r/"
|
||||||
|
"{postId}>) to [{title}](<{url}f/p/{threadId}>) in {forumName}"
|
||||||
|
msgstr ""
|
||||||
|
"[{author}](<{url}f/u/{creatorId}>) created a [reply](<{url}f/p/{threadId}/r/"
|
||||||
|
"{postId}>) to [{title}](<{url}f/p/{threadId}>) in {forumName}"
|
BIN
locale/pl/LC_MESSAGES/discussions.mo
Normal file
BIN
locale/pl/LC_MESSAGES/discussions.mo
Normal file
Binary file not shown.
49
locale/pl/LC_MESSAGES/discussions.po
Normal file
49
locale/pl/LC_MESSAGES/discussions.po
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
# SOME DESCRIPTIVE TITLE.
|
||||||
|
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
|
||||||
|
# This file is distributed under the same license as the PACKAGE package.
|
||||||
|
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
|
||||||
|
#
|
||||||
|
msgid ""
|
||||||
|
msgstr ""
|
||||||
|
"Project-Id-Version: \n"
|
||||||
|
"Report-Msgid-Bugs-To: \n"
|
||||||
|
"POT-Creation-Date: 2020-04-06 18:55+0200\n"
|
||||||
|
"PO-Revision-Date: 2020-04-06 18:56+0200\n"
|
||||||
|
"Last-Translator: \n"
|
||||||
|
"Language-Team: \n"
|
||||||
|
"Language: pl\n"
|
||||||
|
"MIME-Version: 1.0\n"
|
||||||
|
"Content-Type: text/plain; charset=UTF-8\n"
|
||||||
|
"Content-Transfer-Encoding: 8bit\n"
|
||||||
|
"X-Generator: Poedit 2.3\n"
|
||||||
|
"Plural-Forms: nplurals=3; plural=(n==1 ? 0 : n%10>=2 && n%10<=4 && (n%100<12 "
|
||||||
|
"|| n%100>14) ? 1 : 2);\n"
|
||||||
|
|
||||||
|
#: discussions.py:53
|
||||||
|
#, python-brace-format
|
||||||
|
msgid "Replied to \"{title}\""
|
||||||
|
msgstr "Odpowiedział(a) w „{title}”"
|
||||||
|
|
||||||
|
#: discussions.py:56
|
||||||
|
#, python-brace-format
|
||||||
|
msgid "Created \"{title}\""
|
||||||
|
msgstr "Utworzył(a) „{title}”"
|
||||||
|
|
||||||
|
#: discussions.py:73
|
||||||
|
#, python-brace-format
|
||||||
|
msgid ""
|
||||||
|
"[{author}](<{url}f/u/{creatorId}>) created [{title}](<{url}f/p/{threadId}>) "
|
||||||
|
"in {forumName}"
|
||||||
|
msgstr ""
|
||||||
|
"[{author}](<{url}f/u/{creatorId}>) utworzył(a) [{title}](<{url}f/p/{threadId}"
|
||||||
|
">) w {forumName}"
|
||||||
|
|
||||||
|
#: discussions.py:76
|
||||||
|
#, python-brace-format
|
||||||
|
msgid ""
|
||||||
|
"[{author}](<{url}f/u/{creatorId}>) created a [reply](<{url}f/p/{threadId}/r/"
|
||||||
|
"{postId}>) to [{title}](<{url}f/p/{threadId}>) in {forumName}"
|
||||||
|
msgstr ""
|
||||||
|
"[{author}](<{url}f/u/{creatorId}>) utworzył(a) [odpowiedź](<{url}f/p/"
|
||||||
|
"{threadId}/r/{postId}>) pod tematem [{title}](<{url}f/p/{threadId}>) w "
|
||||||
|
"{forumName}"
|
4
misc.pot
4
misc.pot
|
@ -8,7 +8,7 @@ msgid ""
|
||||||
msgstr ""
|
msgstr ""
|
||||||
"Project-Id-Version: PACKAGE VERSION\n"
|
"Project-Id-Version: PACKAGE VERSION\n"
|
||||||
"Report-Msgid-Bugs-To: \n"
|
"Report-Msgid-Bugs-To: \n"
|
||||||
"POT-Creation-Date: 2020-03-17 20:53+0100\n"
|
"POT-Creation-Date: 2020-04-06 18:47+0200\n"
|
||||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||||
|
@ -17,7 +17,7 @@ msgstr ""
|
||||||
"Content-Type: text/plain; charset=CHARSET\n"
|
"Content-Type: text/plain; charset=CHARSET\n"
|
||||||
"Content-Transfer-Encoding: 8bit\n"
|
"Content-Transfer-Encoding: 8bit\n"
|
||||||
|
|
||||||
#: misc.py:82
|
#: misc.py:120
|
||||||
msgid ""
|
msgid ""
|
||||||
"\n"
|
"\n"
|
||||||
"__And more__"
|
"__And more__"
|
||||||
|
|
181
misc.py
181
misc.py
|
@ -16,8 +16,11 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import json, logging, sys, re
|
import json, logging, sys, re, time
|
||||||
from html.parser import HTMLParser
|
from html.parser import HTMLParser
|
||||||
|
from urllib.parse import urlparse, urlunparse
|
||||||
|
import requests
|
||||||
|
|
||||||
from configloader import settings
|
from configloader import settings
|
||||||
import gettext
|
import gettext
|
||||||
|
|
||||||
|
@ -30,43 +33,78 @@ _ = t.gettext
|
||||||
|
|
||||||
misc_logger = logging.getLogger("rcgcdw.misc")
|
misc_logger = logging.getLogger("rcgcdw.misc")
|
||||||
|
|
||||||
data_template = {"rcid": 99999999999,
|
data_template = {"rcid": 99999999999, "discussion_id": 0,
|
||||||
"daily_overview": {"edits": None, "new_files": None, "admin_actions": None, "bytes_changed": None,
|
"daily_overview": {"edits": None, "new_files": None, "admin_actions": None, "bytes_changed": None,
|
||||||
"new_articles": None, "unique_editors": None, "day_score": None, "days_tracked": 0}}
|
"new_articles": None, "unique_editors": None, "day_score": None, "days_tracked": 0}}
|
||||||
|
|
||||||
|
WIKI_API_PATH: str = ""
|
||||||
|
WIKI_ARTICLE_PATH: str = ""
|
||||||
|
WIKI_SCRIPT_PATH: str = ""
|
||||||
|
WIKI_JUST_DOMAIN: str = ""
|
||||||
|
|
||||||
def generate_datafile():
|
class DataFile:
|
||||||
"""Generate a data.json file from a template."""
|
"""Data class which instance of is shared by multiple modules to remain consistent and do not cause too many IO operations."""
|
||||||
try:
|
def __init__(self):
|
||||||
with open("data.json", 'w') as data:
|
self.data = self.load_datafile()
|
||||||
data.write(json.dumps(data_template, indent=4))
|
|
||||||
except PermissionError:
|
@staticmethod
|
||||||
misc_logger.critical("Could not create a data file (no permissions). No way to store last edit.")
|
def generate_datafile():
|
||||||
sys.exit(1)
|
"""Generate a data.json file from a template."""
|
||||||
|
try:
|
||||||
|
with open("data.json", 'w') as data:
|
||||||
|
data.write(json.dumps(data_template, indent=4))
|
||||||
|
except PermissionError:
|
||||||
|
misc_logger.critical("Could not create a data file (no permissions). No way to store last edit.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
def load_datafile(self) -> dict:
|
||||||
|
"""Read a data.json file and return a dictionary with contents
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
with open("data.json") as data:
|
||||||
|
return json.loads(data.read())
|
||||||
|
except FileNotFoundError:
|
||||||
|
self.generate_datafile()
|
||||||
|
misc_logger.info("The data file could not be found. Generating a new one...")
|
||||||
|
return data_template
|
||||||
|
|
||||||
|
def save_datafile(self):
|
||||||
|
"""Overwrites the data.json file with given dictionary"""
|
||||||
|
try:
|
||||||
|
with open("data.json", "w") as data_file:
|
||||||
|
data_file.write(json.dumps(self.data, indent=4))
|
||||||
|
except PermissionError:
|
||||||
|
misc_logger.critical("Could not modify a data file (no permissions). No way to store last edit.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
def load_datafile() -> dict:
|
class MessageQueue:
|
||||||
"""Read a data.json file and return a dictionary with contents
|
"""Message queue class for undelivered messages"""
|
||||||
:rtype: dict
|
def __init__(self):
|
||||||
"""
|
self._queue = []
|
||||||
try:
|
|
||||||
with open("data.json") as data:
|
def __repr__(self):
|
||||||
return json.loads(data.read())
|
return self._queue
|
||||||
except FileNotFoundError:
|
|
||||||
generate_datafile()
|
def __len__(self):
|
||||||
misc_logger.info("The data file could not be found. Generating a new one...")
|
return len(self._queue)
|
||||||
return data_template
|
|
||||||
|
def __iter__(self):
|
||||||
|
return self._queue
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
self._queue.clear()
|
||||||
|
|
||||||
|
def add_message(self, message):
|
||||||
|
self._queue.append(message)
|
||||||
|
|
||||||
|
def cut_messages(self, item_num):
|
||||||
|
self._queue = self._queue[item_num:]
|
||||||
|
|
||||||
|
|
||||||
def save_datafile(data):
|
messagequeue = MessageQueue()
|
||||||
"""Overwrites the data.json file with given dictionary"""
|
datafile = DataFile()
|
||||||
try:
|
|
||||||
with open("data.json", "w") as data_file:
|
|
||||||
data_file.write(json.dumps(data, indent=4))
|
|
||||||
except PermissionError:
|
|
||||||
misc_logger.critical("Could not modify a data file (no permissions). No way to store last edit.")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
def weighted_average(value, weight, new_value):
|
def weighted_average(value, weight, new_value):
|
||||||
"""Calculates weighted average of value number with weight weight and new_value with weight 1"""
|
"""Calculates weighted average of value number with weight weight and new_value with weight 1"""
|
||||||
|
@ -191,4 +229,85 @@ def add_to_dict(dictionary, key):
|
||||||
dictionary[key] += 1
|
dictionary[key] += 1
|
||||||
else:
|
else:
|
||||||
dictionary[key] = 1
|
dictionary[key] = 1
|
||||||
return dictionary
|
return dictionary
|
||||||
|
|
||||||
|
def prepare_paths():
|
||||||
|
global WIKI_API_PATH
|
||||||
|
global WIKI_ARTICLE_PATH
|
||||||
|
global WIKI_SCRIPT_PATH
|
||||||
|
global WIKI_JUST_DOMAIN
|
||||||
|
"""Set the URL paths for article namespace and script namespace
|
||||||
|
WIKI_API_PATH will be: WIKI_DOMAIN/api.php
|
||||||
|
WIKI_ARTICLE_PATH will be: WIKI_DOMAIN/articlepath/$1 where $1 is the replaced string
|
||||||
|
WIKI_SCRIPT_PATH will be: WIKI_DOMAIN/
|
||||||
|
WIKI_JUST_DOMAIN will be: WIKI_DOMAIN"""
|
||||||
|
def quick_try_url(url):
|
||||||
|
"""Quickly test if URL is the proper script path,
|
||||||
|
False if it appears invalid
|
||||||
|
dictionary when it appears valid"""
|
||||||
|
try:
|
||||||
|
request = requests.get(url, timeout=5)
|
||||||
|
if request.status_code == requests.codes.ok:
|
||||||
|
if request.json()["query"]["general"] is not None:
|
||||||
|
return request
|
||||||
|
return False
|
||||||
|
except (KeyError, requests.exceptions.ConnectionError):
|
||||||
|
return False
|
||||||
|
try:
|
||||||
|
parsed_url = urlparse(settings["wiki_url"])
|
||||||
|
except KeyError:
|
||||||
|
misc_logger.critical("wiki_url is not specified in the settings. Please provide the wiki url in the settings and start the script again.")
|
||||||
|
sys.exit(1)
|
||||||
|
for url_scheme in (settings["wiki_url"], settings["wiki_url"].split("wiki")[0], urlunparse((*parsed_url[0:2], "", "", "", ""))): # check different combinations, it's supposed to be idiot-proof
|
||||||
|
tested = quick_try_url(url_scheme + "/api.php?action=query&format=json&meta=siteinfo")
|
||||||
|
if tested:
|
||||||
|
WIKI_API_PATH = urlunparse((*parsed_url[0:2], "", "", "", "")) + tested.json()["query"]["general"]["scriptpath"] + "/api.php"
|
||||||
|
WIKI_SCRIPT_PATH = urlunparse((*parsed_url[0:2], "", "", "", "")) + tested.json()["query"]["general"]["scriptpath"] + "/"
|
||||||
|
WIKI_ARTICLE_PATH = urlunparse((*parsed_url[0:2], "", "", "", "")) + tested.json()["query"]["general"]["articlepath"]
|
||||||
|
WIKI_JUST_DOMAIN = urlunparse((*parsed_url[0:2], "", "", "", ""))
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
misc_logger.critical("Could not verify wikis paths. Please make sure you have given the proper wiki URL in settings.json and your Internet connection is working.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
prepare_paths()
|
||||||
|
|
||||||
|
|
||||||
|
def create_article_path(article: str) -> str:
|
||||||
|
"""Takes the string and creates an URL with it as the article name"""
|
||||||
|
return WIKI_ARTICLE_PATH.replace("$1", article)
|
||||||
|
|
||||||
|
|
||||||
|
def send_to_discord_webhook(data):
|
||||||
|
header = settings["header"]
|
||||||
|
if isinstance(data, str):
|
||||||
|
header['Content-Type'] = 'application/json'
|
||||||
|
else:
|
||||||
|
header['Content-Type'] = 'application/x-www-form-urlencoded'
|
||||||
|
try:
|
||||||
|
result = requests.post(settings["webhookURL"], data=data,
|
||||||
|
headers=header, timeout=10)
|
||||||
|
except requests.exceptions.Timeout:
|
||||||
|
misc_logger.warning("Timeouted while sending data to the webhook.")
|
||||||
|
return 3
|
||||||
|
except requests.exceptions.ConnectionError:
|
||||||
|
misc_logger.warning("Connection error while sending the data to a webhook")
|
||||||
|
return 3
|
||||||
|
else:
|
||||||
|
return handle_discord_http(result.status_code, data, result)
|
||||||
|
|
||||||
|
|
||||||
|
def send_to_discord(data):
|
||||||
|
if messagequeue:
|
||||||
|
messagequeue.add_message(data)
|
||||||
|
else:
|
||||||
|
code = send_to_discord_webhook(data)
|
||||||
|
if code == 3:
|
||||||
|
messagequeue.add_message(data)
|
||||||
|
elif code == 2:
|
||||||
|
time.sleep(5.0)
|
||||||
|
messagequeue.add_message(data)
|
||||||
|
elif code < 2:
|
||||||
|
time.sleep(2.0)
|
||||||
|
pass
|
394
rcgcdw.pot
394
rcgcdw.pot
File diff suppressed because it is too large
Load diff
122
rcgcdw.py
122
rcgcdw.py
|
@ -26,9 +26,15 @@ from html.parser import HTMLParser
|
||||||
import misc
|
import misc
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from collections import defaultdict, Counter
|
from collections import defaultdict, Counter
|
||||||
from urllib.parse import quote_plus, urlparse, urlunparse
|
from urllib.parse import quote_plus
|
||||||
from configloader import settings
|
from configloader import settings
|
||||||
from misc import link_formatter, ContentParser, safe_read, handle_discord_http, add_to_dict, misc_logger
|
from misc import link_formatter, ContentParser, safe_read, add_to_dict, datafile, \
|
||||||
|
WIKI_API_PATH, WIKI_SCRIPT_PATH, WIKI_JUST_DOMAIN, create_article_path, messagequeue, send_to_discord_webhook, \
|
||||||
|
send_to_discord
|
||||||
|
from session import session
|
||||||
|
|
||||||
|
if settings["fandom_discussions"]["enabled"]:
|
||||||
|
import discussions
|
||||||
|
|
||||||
if __name__ != "__main__": # return if called as a module
|
if __name__ != "__main__": # return if called as a module
|
||||||
logging.critical("The file is being executed as a module. Please execute the script using the console.")
|
logging.critical("The file is being executed as a module. Please execute the script using the console.")
|
||||||
|
@ -53,7 +59,7 @@ except FileNotFoundError:
|
||||||
lang.install()
|
lang.install()
|
||||||
ngettext = lang.ngettext
|
ngettext = lang.ngettext
|
||||||
|
|
||||||
storage = misc.load_datafile()
|
storage = datafile.data
|
||||||
|
|
||||||
# Remove previous data holding file if exists and limitfetch allows
|
# Remove previous data holding file if exists and limitfetch allows
|
||||||
|
|
||||||
|
@ -61,7 +67,7 @@ if settings["limitrefetch"] != -1 and os.path.exists("lastchange.txt") is True:
|
||||||
with open("lastchange.txt", 'r') as sfile:
|
with open("lastchange.txt", 'r') as sfile:
|
||||||
logger.info("Converting old lastchange.txt file into new data storage data.json...")
|
logger.info("Converting old lastchange.txt file into new data storage data.json...")
|
||||||
storage["rcid"] = int(sfile.read().strip())
|
storage["rcid"] = int(sfile.read().strip())
|
||||||
misc.save_datafile(storage)
|
datafile.save_datafile()
|
||||||
os.remove("lastchange.txt")
|
os.remove("lastchange.txt")
|
||||||
|
|
||||||
# A few initial vars
|
# A few initial vars
|
||||||
|
@ -69,10 +75,6 @@ if settings["limitrefetch"] != -1 and os.path.exists("lastchange.txt") is True:
|
||||||
logged_in = False
|
logged_in = False
|
||||||
supported_logs = ["protect/protect", "protect/modify", "protect/unprotect", "upload/overwrite", "upload/upload", "delete/delete", "delete/delete_redir", "delete/restore", "delete/revision", "delete/event", "import/upload", "import/interwiki", "merge/merge", "move/move", "move/move_redir", "protect/move_prot", "block/block", "block/unblock", "block/reblock", "rights/rights", "rights/autopromote", "abusefilter/modify", "abusefilter/create", "interwiki/iw_add", "interwiki/iw_edit", "interwiki/iw_delete", "curseprofile/comment-created", "curseprofile/comment-edited", "curseprofile/comment-deleted", "curseprofile/comment-purged", "curseprofile/profile-edited", "curseprofile/comment-replied", "contentmodel/change", "sprite/sprite", "sprite/sheet", "sprite/slice", "managetags/create", "managetags/delete", "managetags/activate", "managetags/deactivate", "tag/update", "cargo/createtable", "cargo/deletetable", "cargo/recreatetable", "cargo/replacetable", "upload/revert"]
|
supported_logs = ["protect/protect", "protect/modify", "protect/unprotect", "upload/overwrite", "upload/upload", "delete/delete", "delete/delete_redir", "delete/restore", "delete/revision", "delete/event", "import/upload", "import/interwiki", "merge/merge", "move/move", "move/move_redir", "protect/move_prot", "block/block", "block/unblock", "block/reblock", "rights/rights", "rights/autopromote", "abusefilter/modify", "abusefilter/create", "interwiki/iw_add", "interwiki/iw_edit", "interwiki/iw_delete", "curseprofile/comment-created", "curseprofile/comment-edited", "curseprofile/comment-deleted", "curseprofile/comment-purged", "curseprofile/profile-edited", "curseprofile/comment-replied", "contentmodel/change", "sprite/sprite", "sprite/sheet", "sprite/slice", "managetags/create", "managetags/delete", "managetags/activate", "managetags/deactivate", "tag/update", "cargo/createtable", "cargo/deletetable", "cargo/recreatetable", "cargo/replacetable", "upload/revert"]
|
||||||
profile_fields = {"profile-location": _("Location"), "profile-aboutme": _("About me"), "profile-link-google": _("Google link"), "profile-link-facebook":_("Facebook link"), "profile-link-twitter": _("Twitter link"), "profile-link-reddit": _("Reddit link"), "profile-link-twitch": _("Twitch link"), "profile-link-psn": _("PSN link"), "profile-link-vk": _("VK link"), "profile-link-xbl": _("XBL link"), "profile-link-steam": _("Steam link"), "profile-link-discord": _("Discord handle"), "profile-link-battlenet": _("Battle.net handle")}
|
profile_fields = {"profile-location": _("Location"), "profile-aboutme": _("About me"), "profile-link-google": _("Google link"), "profile-link-facebook":_("Facebook link"), "profile-link-twitter": _("Twitter link"), "profile-link-reddit": _("Reddit link"), "profile-link-twitch": _("Twitch link"), "profile-link-psn": _("PSN link"), "profile-link-vk": _("VK link"), "profile-link-xbl": _("XBL link"), "profile-link-steam": _("Steam link"), "profile-link-discord": _("Discord handle"), "profile-link-battlenet": _("Battle.net handle")}
|
||||||
WIKI_API_PATH: str = ""
|
|
||||||
WIKI_ARTICLE_PATH: str = ""
|
|
||||||
WIKI_SCRIPT_PATH: str = ""
|
|
||||||
WIKI_JUST_DOMAIN: str = ""
|
|
||||||
|
|
||||||
|
|
||||||
class LinkParser(HTMLParser):
|
class LinkParser(HTMLParser):
|
||||||
|
@ -112,48 +114,6 @@ LinkParser = LinkParser()
|
||||||
class MWError(Exception):
|
class MWError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def prepare_paths():
|
|
||||||
global WIKI_API_PATH
|
|
||||||
global WIKI_ARTICLE_PATH
|
|
||||||
global WIKI_SCRIPT_PATH
|
|
||||||
global WIKI_JUST_DOMAIN
|
|
||||||
"""Set the URL paths for article namespace and script namespace
|
|
||||||
WIKI_API_PATH will be: WIKI_DOMAIN/api.php
|
|
||||||
WIKI_ARTICLE_PATH will be: WIKI_DOMAIN/articlepath/$1 where $1 is the replaced string
|
|
||||||
WIKI_SCRIPT_PATH will be: WIKI_DOMAIN/
|
|
||||||
WIKI_JUST_DOMAIN will be: WIKI_DOMAIN"""
|
|
||||||
def quick_try_url(url):
|
|
||||||
"""Quickly test if URL is the proper script path,
|
|
||||||
False if it appears invalid
|
|
||||||
dictionary when it appears valid"""
|
|
||||||
try:
|
|
||||||
request = requests.get(url, timeout=5)
|
|
||||||
if request.status_code == requests.codes.ok:
|
|
||||||
if request.json()["query"]["general"] is not None:
|
|
||||||
return request
|
|
||||||
return False
|
|
||||||
except (KeyError, requests.exceptions.ConnectionError):
|
|
||||||
return False
|
|
||||||
try:
|
|
||||||
parsed_url = urlparse(settings["wiki_url"])
|
|
||||||
except KeyError:
|
|
||||||
logger.critical("wiki_url is not specified in the settings. Please provide the wiki url in the settings and start the script again.")
|
|
||||||
sys.exit(1)
|
|
||||||
for url_scheme in (settings["wiki_url"], settings["wiki_url"].split("wiki")[0], urlunparse((*parsed_url[0:2], "", "", "", ""))): # check different combinations, it's supposed to be idiot-proof
|
|
||||||
tested = quick_try_url(url_scheme + "/api.php?action=query&format=json&meta=siteinfo")
|
|
||||||
if tested:
|
|
||||||
WIKI_API_PATH = urlunparse((*parsed_url[0:2], "", "", "", "")) + tested.json()["query"]["general"]["scriptpath"] + "/api.php"
|
|
||||||
WIKI_SCRIPT_PATH = urlunparse((*parsed_url[0:2], "", "", "", "")) + tested.json()["query"]["general"]["scriptpath"] + "/"
|
|
||||||
WIKI_ARTICLE_PATH = urlunparse((*parsed_url[0:2], "", "", "", "")) + tested.json()["query"]["general"]["articlepath"]
|
|
||||||
WIKI_JUST_DOMAIN = urlunparse((*parsed_url[0:2], "", "", "", ""))
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
logger.critical("Could not verify wikis paths. Please make sure you have given the proper wiki URL in settings.json and your Internet connection is working.")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
def create_article_path(article: str) -> str:
|
|
||||||
"""Takes the string and creates an URL with it as the article name"""
|
|
||||||
return WIKI_ARTICLE_PATH.replace("$1", article)
|
|
||||||
|
|
||||||
def send(message, name, avatar):
|
def send(message, name, avatar):
|
||||||
dictionary_creator = {"content": message}
|
dictionary_creator = {"content": message}
|
||||||
|
@ -173,39 +133,6 @@ def profile_field_name(name, embed):
|
||||||
else:
|
else:
|
||||||
return _("unknown")
|
return _("unknown")
|
||||||
|
|
||||||
def send_to_discord_webhook(data):
|
|
||||||
header = settings["header"]
|
|
||||||
if isinstance(data, str):
|
|
||||||
header['Content-Type'] = 'application/json'
|
|
||||||
else:
|
|
||||||
header['Content-Type'] = 'application/x-www-form-urlencoded'
|
|
||||||
try:
|
|
||||||
result = requests.post(settings["webhookURL"], data=data,
|
|
||||||
headers=header, timeout=10)
|
|
||||||
except requests.exceptions.Timeout:
|
|
||||||
logger.warning("Timeouted while sending data to the webhook.")
|
|
||||||
return 3
|
|
||||||
except requests.exceptions.ConnectionError:
|
|
||||||
logger.warning("Connection error while sending the data to a webhook")
|
|
||||||
return 3
|
|
||||||
else:
|
|
||||||
return handle_discord_http(result.status_code, data, result)
|
|
||||||
|
|
||||||
|
|
||||||
def send_to_discord(data):
|
|
||||||
if recent_changes.unsent_messages:
|
|
||||||
recent_changes.unsent_messages.append(data)
|
|
||||||
else:
|
|
||||||
code = send_to_discord_webhook(data)
|
|
||||||
if code == 3:
|
|
||||||
recent_changes.unsent_messages.append(data)
|
|
||||||
elif code == 2:
|
|
||||||
time.sleep(5.0)
|
|
||||||
recent_changes.unsent_messages.append(data)
|
|
||||||
elif code < 2:
|
|
||||||
time.sleep(2.0)
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def pull_comment(comment_id):
|
def pull_comment(comment_id):
|
||||||
try:
|
try:
|
||||||
|
@ -226,7 +153,7 @@ def pull_comment(comment_id):
|
||||||
|
|
||||||
def compact_formatter(action, change, parsed_comment, categories):
|
def compact_formatter(action, change, parsed_comment, categories):
|
||||||
if action != "suppressed":
|
if action != "suppressed":
|
||||||
author_url = link_formatter(create_article_path("User:{user}".format( user=change["user"])))
|
author_url = link_formatter(create_article_path("User:{user}".format(user=change["user"])))
|
||||||
author = change["user"]
|
author = change["user"]
|
||||||
parsed_comment = "" if parsed_comment is None else " *("+parsed_comment+")*"
|
parsed_comment = "" if parsed_comment is None else " *("+parsed_comment+")*"
|
||||||
parsed_comment = re.sub(r"([^<]|\A)(http(s)://.*?)( |\Z)", "\\1<\\2>\\4", parsed_comment) # see #97
|
parsed_comment = re.sub(r"([^<]|\A)(http(s)://.*?)( |\Z)", "\\1<\\2>\\4", parsed_comment) # see #97
|
||||||
|
@ -437,7 +364,8 @@ def compact_formatter(action, change, parsed_comment, categories):
|
||||||
link = link_formatter(create_article_path("Special:AbuseFilter/history/{number}/diff/prev/{historyid}".format(number=change["logparams"]['newId'], historyid=change["logparams"]["historyId"])))
|
link = link_formatter(create_article_path("Special:AbuseFilter/history/{number}/diff/prev/{historyid}".format(number=change["logparams"]['newId'], historyid=change["logparams"]["historyId"])))
|
||||||
content = _("[{author}]({author_url}) edited abuse filter [number {number}]({filter_url})").format(author=author, author_url=author_url, number=change["logparams"]['newId'], filter_url=link)
|
content = _("[{author}]({author_url}) edited abuse filter [number {number}]({filter_url})").format(author=author, author_url=author_url, number=change["logparams"]['newId'], filter_url=link)
|
||||||
elif action == "abusefilter/create":
|
elif action == "abusefilter/create":
|
||||||
link = link_formatter(create_article_path("Special:AbuseFilter/{number}".format(number=change["logparams"]['newId'])))
|
link = link_formatter(
|
||||||
|
create_article_path("Special:AbuseFilter/{number}".format(number=change["logparams"]['newId'])))
|
||||||
content = _("[{author}]({author_url}) created abuse filter [number {number}]({filter_url})").format(author=author, author_url=author_url, number=change["logparams"]['newId'], filter_url=link)
|
content = _("[{author}]({author_url}) created abuse filter [number {number}]({filter_url})").format(author=author, author_url=author_url, number=change["logparams"]['newId'], filter_url=link)
|
||||||
elif action == "merge/merge":
|
elif action == "merge/merge":
|
||||||
link = link_formatter(create_article_path(change["title"]))
|
link = link_formatter(create_article_path(change["title"]))
|
||||||
|
@ -846,7 +774,7 @@ def embed_formatter(action, change, parsed_comment, categories):
|
||||||
link = create_article_path("Special:AbuseFilter/history/{number}/diff/prev/{historyid}".format(number=change["logparams"]['newId'], historyid=change["logparams"]["historyId"]))
|
link = create_article_path("Special:AbuseFilter/history/{number}/diff/prev/{historyid}".format(number=change["logparams"]['newId'], historyid=change["logparams"]["historyId"]))
|
||||||
embed["title"] = _("Edited abuse filter number {number}").format(number=change["logparams"]['newId'])
|
embed["title"] = _("Edited abuse filter number {number}").format(number=change["logparams"]['newId'])
|
||||||
elif action == "abusefilter/create":
|
elif action == "abusefilter/create":
|
||||||
link = create_article_path("Special:AbuseFilter/{number}".format( number=change["logparams"]['newId']))
|
link = create_article_path("Special:AbuseFilter/{number}".format(number=change["logparams"]['newId']))
|
||||||
embed["title"] = _("Created abuse filter number {number}").format(number=change["logparams"]['newId'])
|
embed["title"] = _("Created abuse filter number {number}").format(number=change["logparams"]['newId'])
|
||||||
elif action == "merge/merge":
|
elif action == "merge/merge":
|
||||||
link = create_article_path(change["title"].replace(" ", "_"))
|
link = create_article_path(change["title"].replace(" ", "_"))
|
||||||
|
@ -1069,7 +997,7 @@ def daily_overview_sync(edits, files, admin, changed_bytes, new_articles, unique
|
||||||
storage["daily_overview"].update({"edits": edits_avg, "new_files": files_avg, "admin_actions": admin_avg, "bytes_changed": changed_bytes_avg,
|
storage["daily_overview"].update({"edits": edits_avg, "new_files": files_avg, "admin_actions": admin_avg, "bytes_changed": changed_bytes_avg,
|
||||||
"new_articles": new_articles_avg, "unique_editors": unique_contributors_avg, "day_score": day_score_avg})
|
"new_articles": new_articles_avg, "unique_editors": unique_contributors_avg, "day_score": day_score_avg})
|
||||||
storage["daily_overview"]["days_tracked"] += 1
|
storage["daily_overview"]["days_tracked"] += 1
|
||||||
misc.save_datafile(storage)
|
datafile.save_datafile()
|
||||||
return edits, files, admin, changed_bytes, new_articles, unique_contributors, day_score
|
return edits, files, admin, changed_bytes, new_articles, unique_contributors, day_score
|
||||||
|
|
||||||
def day_overview():
|
def day_overview():
|
||||||
|
@ -1170,11 +1098,9 @@ class Recent_Changes_Class(object):
|
||||||
self.tags = {}
|
self.tags = {}
|
||||||
self.groups = {}
|
self.groups = {}
|
||||||
self.streak = -1
|
self.streak = -1
|
||||||
self.unsent_messages = []
|
|
||||||
self.mw_messages = {}
|
self.mw_messages = {}
|
||||||
self.namespaces = None
|
self.namespaces = None
|
||||||
self.session = requests.Session()
|
self.session = session
|
||||||
self.session.headers.update(settings["header"])
|
|
||||||
if settings["limitrefetch"] != -1:
|
if settings["limitrefetch"] != -1:
|
||||||
self.file_id = storage["rcid"]
|
self.file_id = storage["rcid"]
|
||||||
else:
|
else:
|
||||||
|
@ -1234,11 +1160,11 @@ class Recent_Changes_Class(object):
|
||||||
self.ids.pop(0)
|
self.ids.pop(0)
|
||||||
|
|
||||||
def fetch(self, amount=settings["limit"]):
|
def fetch(self, amount=settings["limit"]):
|
||||||
if self.unsent_messages:
|
if messagequeue:
|
||||||
logger.info(
|
logger.info(
|
||||||
"{} messages waiting to be delivered to Discord due to Discord throwing errors/no connection to Discord servers.".format(
|
"{} messages waiting to be delivered to Discord due to Discord throwing errors/no connection to Discord servers.".format(
|
||||||
len(self.unsent_messages)))
|
len(messagequeue)))
|
||||||
for num, item in enumerate(self.unsent_messages):
|
for num, item in enumerate(messagequeue):
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Trying to send a message to Discord from the queue with id of {} and content {}".format(str(num),
|
"Trying to send a message to Discord from the queue with id of {} and content {}".format(str(num),
|
||||||
str(item)))
|
str(item)))
|
||||||
|
@ -1249,10 +1175,10 @@ class Recent_Changes_Class(object):
|
||||||
logger.debug("Sending message failed")
|
logger.debug("Sending message failed")
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
self.unsent_messages = []
|
messagequeue.clear()
|
||||||
logger.debug("Queue emptied, all messages delivered")
|
logger.debug("Queue emptied, all messages delivered")
|
||||||
self.unsent_messages = self.unsent_messages[num:]
|
messagequeue.cut_messages(num)
|
||||||
logger.debug(self.unsent_messages)
|
logger.debug(messagequeue)
|
||||||
last_check = self.fetch_changes(amount=amount)
|
last_check = self.fetch_changes(amount=amount)
|
||||||
# If the request succeeds the last_check will be the last rcid from recentchanges query
|
# If the request succeeds the last_check will be the last rcid from recentchanges query
|
||||||
if last_check is not None:
|
if last_check is not None:
|
||||||
|
@ -1261,7 +1187,7 @@ class Recent_Changes_Class(object):
|
||||||
if settings["limitrefetch"] != -1 and self.recent_id != self.file_id and self.recent_id != 0: # if saving to database is disabled, don't save the recent_id
|
if settings["limitrefetch"] != -1 and self.recent_id != self.file_id and self.recent_id != 0: # if saving to database is disabled, don't save the recent_id
|
||||||
self.file_id = self.recent_id
|
self.file_id = self.recent_id
|
||||||
storage["rcid"] = self.recent_id
|
storage["rcid"] = self.recent_id
|
||||||
misc.save_datafile(storage)
|
datafile.save_datafile()
|
||||||
logger.debug("Most recent rcid is: {}".format(self.recent_id))
|
logger.debug("Most recent rcid is: {}".format(self.recent_id))
|
||||||
return self.recent_id
|
return self.recent_id
|
||||||
|
|
||||||
|
@ -1453,7 +1379,6 @@ else:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Log in and download wiki information
|
# Log in and download wiki information
|
||||||
prepare_paths()
|
|
||||||
try:
|
try:
|
||||||
if settings["wiki_bot_login"] and settings["wiki_bot_password"]:
|
if settings["wiki_bot_login"] and settings["wiki_bot_password"]:
|
||||||
recent_changes.log_in()
|
recent_changes.log_in()
|
||||||
|
@ -1468,6 +1393,7 @@ recent_changes.fetch(amount=settings["limitrefetch"] if settings["limitrefetch"]
|
||||||
|
|
||||||
schedule.every(settings["cooldown"]).seconds.do(recent_changes.fetch)
|
schedule.every(settings["cooldown"]).seconds.do(recent_changes.fetch)
|
||||||
if 1 == 2: # additional translation strings in unreachable code
|
if 1 == 2: # additional translation strings in unreachable code
|
||||||
|
# noinspection PyUnreachableCode
|
||||||
print(_("director"), _("bot"), _("editor"), _("directors"), _("sysop"), _("bureaucrat"), _("reviewer"),
|
print(_("director"), _("bot"), _("editor"), _("directors"), _("sysop"), _("bureaucrat"), _("reviewer"),
|
||||||
_("autoreview"), _("autopatrol"), _("wiki_guardian"), ngettext("second", "seconds", 1), ngettext("minute", "minutes", 1), ngettext("hour", "hours", 1), ngettext("day", "days", 1), ngettext("week", "weeks", 1), ngettext("month", "months",1), ngettext("year", "years", 1), ngettext("millennium", "millennia", 1), ngettext("decade", "decades", 1), ngettext("century", "centuries", 1))
|
_("autoreview"), _("autopatrol"), _("wiki_guardian"), ngettext("second", "seconds", 1), ngettext("minute", "minutes", 1), ngettext("hour", "hours", 1), ngettext("day", "days", 1), ngettext("week", "weeks", 1), ngettext("month", "months",1), ngettext("year", "years", 1), ngettext("millennium", "millennia", 1), ngettext("decade", "decades", 1), ngettext("century", "centuries", 1))
|
||||||
|
|
||||||
|
|
23
session.py
Normal file
23
session.py
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Recent changes Goat compatible Discord webhook is a project for using a webhook as recent changes page from MediaWiki.
|
||||||
|
# Copyright (C) 2020 Frisk
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from configloader import settings
|
||||||
|
|
||||||
|
session = requests.Session()
|
||||||
|
session.headers.update(settings["header"])
|
Loading…
Reference in a new issue