mirror of
https://gitlab.com/chicken-riders/RcGcDw.git
synced 2025-02-23 00:24:09 +00:00
Initial commit, discussions work in progress, split off most important functions from rcgcdw.py since we need them for modularization
This commit is contained in:
parent
c5f215abc7
commit
ed111974df
|
@ -10,3 +10,4 @@ try: # load settings
|
|||
except FileNotFoundError:
|
||||
logging.critical("No config file could be found. Please make sure settings.json is in the directory.")
|
||||
sys.exit(1)
|
||||
|
||||
|
|
35
discussions.py
Normal file
35
discussions.py
Normal file
|
@ -0,0 +1,35 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Recent changes Goat compatible Discord webhook is a project for using a webhook as recent changes page from MediaWiki.
|
||||
# Copyright (C) 2020 Frisk
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import logging, gettext
|
||||
from configloader import settings
|
||||
from misc import datafile, WIKI_SCRIPT_PATH
|
||||
|
||||
# Initialize translation
|
||||
|
||||
t = gettext.translation('discussions', localedir='locale', languages=[settings["lang"]])
|
||||
_ = t.gettext
|
||||
|
||||
# Create a custom logger
|
||||
|
||||
discussion_logger = logging.getLogger("rcgcdw.disc")
|
||||
|
||||
fetch_url = "https://services.fandom.com/discussion/{wikiid}/posts?sortDirection=descending&sortKey=creation_date&limit={limit}".format(wikiid=settings["fandom_discussions"]["wiki_id"], limit=settings["fandom_discussions"]["limit"])
|
||||
|
||||
def fetch_discussions():
|
||||
pass
|
118
misc.py
118
misc.py
|
@ -18,6 +18,9 @@
|
|||
|
||||
import json, logging, sys, re
|
||||
from html.parser import HTMLParser
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
import requests
|
||||
|
||||
from configloader import settings
|
||||
import gettext
|
||||
|
||||
|
@ -30,43 +33,53 @@ _ = t.gettext
|
|||
|
||||
misc_logger = logging.getLogger("rcgcdw.misc")
|
||||
|
||||
data_template = {"rcid": 99999999999,
|
||||
data_template = {"rcid": 99999999999, "discussion_id": 0,
|
||||
"daily_overview": {"edits": None, "new_files": None, "admin_actions": None, "bytes_changed": None,
|
||||
"new_articles": None, "unique_editors": None, "day_score": None, "days_tracked": 0}}
|
||||
|
||||
WIKI_API_PATH: str = ""
|
||||
WIKI_ARTICLE_PATH: str = ""
|
||||
WIKI_SCRIPT_PATH: str = ""
|
||||
WIKI_JUST_DOMAIN: str = ""
|
||||
|
||||
def generate_datafile():
|
||||
"""Generate a data.json file from a template."""
|
||||
try:
|
||||
with open("data.json", 'w') as data:
|
||||
data.write(json.dumps(data_template, indent=4))
|
||||
except PermissionError:
|
||||
misc_logger.critical("Could not create a data file (no permissions). No way to store last edit.")
|
||||
sys.exit(1)
|
||||
class DataFile:
|
||||
"""Data class which instance of is shared by multiple modules to remain consistent and do not cause too many IO operations."""
|
||||
def __init__(self):
|
||||
self.data = self.load_datafile()
|
||||
|
||||
@staticmethod
|
||||
def generate_datafile():
|
||||
"""Generate a data.json file from a template."""
|
||||
try:
|
||||
with open("data.json", 'w') as data:
|
||||
data.write(json.dumps(data_template, indent=4))
|
||||
except PermissionError:
|
||||
misc_logger.critical("Could not create a data file (no permissions). No way to store last edit.")
|
||||
sys.exit(1)
|
||||
|
||||
def load_datafile(self) -> dict:
|
||||
"""Read a data.json file and return a dictionary with contents
|
||||
:rtype: dict
|
||||
"""
|
||||
try:
|
||||
with open("data.json") as data:
|
||||
return json.loads(data.read())
|
||||
except FileNotFoundError:
|
||||
self.generate_datafile()
|
||||
misc_logger.info("The data file could not be found. Generating a new one...")
|
||||
return data_template
|
||||
|
||||
def save_datafile(self):
|
||||
"""Overwrites the data.json file with given dictionary"""
|
||||
try:
|
||||
with open("data.json", "w") as data_file:
|
||||
data_file.write(json.dumps(self.data, indent=4))
|
||||
except PermissionError:
|
||||
misc_logger.critical("Could not modify a data file (no permissions). No way to store last edit.")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def load_datafile() -> dict:
|
||||
"""Read a data.json file and return a dictionary with contents
|
||||
:rtype: dict
|
||||
"""
|
||||
try:
|
||||
with open("data.json") as data:
|
||||
return json.loads(data.read())
|
||||
except FileNotFoundError:
|
||||
generate_datafile()
|
||||
misc_logger.info("The data file could not be found. Generating a new one...")
|
||||
return data_template
|
||||
|
||||
|
||||
def save_datafile(data):
|
||||
"""Overwrites the data.json file with given dictionary"""
|
||||
try:
|
||||
with open("data.json", "w") as data_file:
|
||||
data_file.write(json.dumps(data, indent=4))
|
||||
except PermissionError:
|
||||
misc_logger.critical("Could not modify a data file (no permissions). No way to store last edit.")
|
||||
sys.exit(1)
|
||||
|
||||
datafile = DataFile()
|
||||
|
||||
def weighted_average(value, weight, new_value):
|
||||
"""Calculates weighted average of value number with weight weight and new_value with weight 1"""
|
||||
|
@ -191,4 +204,45 @@ def add_to_dict(dictionary, key):
|
|||
dictionary[key] += 1
|
||||
else:
|
||||
dictionary[key] = 1
|
||||
return dictionary
|
||||
return dictionary
|
||||
|
||||
def prepare_paths():
|
||||
global WIKI_API_PATH
|
||||
global WIKI_ARTICLE_PATH
|
||||
global WIKI_SCRIPT_PATH
|
||||
global WIKI_JUST_DOMAIN
|
||||
"""Set the URL paths for article namespace and script namespace
|
||||
WIKI_API_PATH will be: WIKI_DOMAIN/api.php
|
||||
WIKI_ARTICLE_PATH will be: WIKI_DOMAIN/articlepath/$1 where $1 is the replaced string
|
||||
WIKI_SCRIPT_PATH will be: WIKI_DOMAIN/
|
||||
WIKI_JUST_DOMAIN will be: WIKI_DOMAIN"""
|
||||
def quick_try_url(url):
|
||||
"""Quickly test if URL is the proper script path,
|
||||
False if it appears invalid
|
||||
dictionary when it appears valid"""
|
||||
try:
|
||||
request = requests.get(url, timeout=5)
|
||||
if request.status_code == requests.codes.ok:
|
||||
if request.json()["query"]["general"] is not None:
|
||||
return request
|
||||
return False
|
||||
except (KeyError, requests.exceptions.ConnectionError):
|
||||
return False
|
||||
try:
|
||||
parsed_url = urlparse(settings["wiki_url"])
|
||||
except KeyError:
|
||||
misc_logger.critical("wiki_url is not specified in the settings. Please provide the wiki url in the settings and start the script again.")
|
||||
sys.exit(1)
|
||||
for url_scheme in (settings["wiki_url"], settings["wiki_url"].split("wiki")[0], urlunparse((*parsed_url[0:2], "", "", "", ""))): # check different combinations, it's supposed to be idiot-proof
|
||||
tested = quick_try_url(url_scheme + "/api.php?action=query&format=json&meta=siteinfo")
|
||||
if tested:
|
||||
WIKI_API_PATH = urlunparse((*parsed_url[0:2], "", "", "", "")) + tested.json()["query"]["general"]["scriptpath"] + "/api.php"
|
||||
WIKI_SCRIPT_PATH = urlunparse((*parsed_url[0:2], "", "", "", "")) + tested.json()["query"]["general"]["scriptpath"] + "/"
|
||||
WIKI_ARTICLE_PATH = urlunparse((*parsed_url[0:2], "", "", "", "")) + tested.json()["query"]["general"]["articlepath"]
|
||||
WIKI_JUST_DOMAIN = urlunparse((*parsed_url[0:2], "", "", "", ""))
|
||||
break
|
||||
else:
|
||||
misc_logger.critical("Could not verify wikis paths. Please make sure you have given the proper wiki URL in settings.json and your Internet connection is working.")
|
||||
sys.exit(1)
|
||||
|
||||
prepare_paths()
|
60
rcgcdw.py
60
rcgcdw.py
|
@ -26,9 +26,13 @@ from html.parser import HTMLParser
|
|||
import misc
|
||||
from bs4 import BeautifulSoup
|
||||
from collections import defaultdict, Counter
|
||||
from urllib.parse import quote_plus, urlparse, urlunparse
|
||||
from urllib.parse import quote_plus
|
||||
from configloader import settings
|
||||
from misc import link_formatter, ContentParser, safe_read, handle_discord_http, add_to_dict, misc_logger
|
||||
from misc import link_formatter, ContentParser, safe_read, handle_discord_http, add_to_dict, datafile, \
|
||||
WIKI_API_PATH, WIKI_ARTICLE_PATH, WIKI_SCRIPT_PATH, WIKI_JUST_DOMAIN
|
||||
|
||||
if settings["fandom_discussions"]["enabled"]:
|
||||
pass
|
||||
|
||||
if __name__ != "__main__": # return if called as a module
|
||||
logging.critical("The file is being executed as a module. Please execute the script using the console.")
|
||||
|
@ -53,7 +57,7 @@ except FileNotFoundError:
|
|||
lang.install()
|
||||
ngettext = lang.ngettext
|
||||
|
||||
storage = misc.load_datafile()
|
||||
storage = datafile.data
|
||||
|
||||
# Remove previous data holding file if exists and limitfetch allows
|
||||
|
||||
|
@ -61,7 +65,7 @@ if settings["limitrefetch"] != -1 and os.path.exists("lastchange.txt") is True:
|
|||
with open("lastchange.txt", 'r') as sfile:
|
||||
logger.info("Converting old lastchange.txt file into new data storage data.json...")
|
||||
storage["rcid"] = int(sfile.read().strip())
|
||||
misc.save_datafile(storage)
|
||||
datafile.save_datafile()
|
||||
os.remove("lastchange.txt")
|
||||
|
||||
# A few initial vars
|
||||
|
@ -69,10 +73,6 @@ if settings["limitrefetch"] != -1 and os.path.exists("lastchange.txt") is True:
|
|||
logged_in = False
|
||||
supported_logs = ["protect/protect", "protect/modify", "protect/unprotect", "upload/overwrite", "upload/upload", "delete/delete", "delete/delete_redir", "delete/restore", "delete/revision", "delete/event", "import/upload", "import/interwiki", "merge/merge", "move/move", "move/move_redir", "protect/move_prot", "block/block", "block/unblock", "block/reblock", "rights/rights", "rights/autopromote", "abusefilter/modify", "abusefilter/create", "interwiki/iw_add", "interwiki/iw_edit", "interwiki/iw_delete", "curseprofile/comment-created", "curseprofile/comment-edited", "curseprofile/comment-deleted", "curseprofile/comment-purged", "curseprofile/profile-edited", "curseprofile/comment-replied", "contentmodel/change", "sprite/sprite", "sprite/sheet", "sprite/slice", "managetags/create", "managetags/delete", "managetags/activate", "managetags/deactivate", "tag/update", "cargo/createtable", "cargo/deletetable", "cargo/recreatetable", "cargo/replacetable", "upload/revert"]
|
||||
profile_fields = {"profile-location": _("Location"), "profile-aboutme": _("About me"), "profile-link-google": _("Google link"), "profile-link-facebook":_("Facebook link"), "profile-link-twitter": _("Twitter link"), "profile-link-reddit": _("Reddit link"), "profile-link-twitch": _("Twitch link"), "profile-link-psn": _("PSN link"), "profile-link-vk": _("VK link"), "profile-link-xbl": _("XBL link"), "profile-link-steam": _("Steam link"), "profile-link-discord": _("Discord handle"), "profile-link-battlenet": _("Battle.net handle")}
|
||||
WIKI_API_PATH: str = ""
|
||||
WIKI_ARTICLE_PATH: str = ""
|
||||
WIKI_SCRIPT_PATH: str = ""
|
||||
WIKI_JUST_DOMAIN: str = ""
|
||||
|
||||
|
||||
class LinkParser(HTMLParser):
|
||||
|
@ -112,44 +112,6 @@ LinkParser = LinkParser()
|
|||
class MWError(Exception):
|
||||
pass
|
||||
|
||||
def prepare_paths():
|
||||
global WIKI_API_PATH
|
||||
global WIKI_ARTICLE_PATH
|
||||
global WIKI_SCRIPT_PATH
|
||||
global WIKI_JUST_DOMAIN
|
||||
"""Set the URL paths for article namespace and script namespace
|
||||
WIKI_API_PATH will be: WIKI_DOMAIN/api.php
|
||||
WIKI_ARTICLE_PATH will be: WIKI_DOMAIN/articlepath/$1 where $1 is the replaced string
|
||||
WIKI_SCRIPT_PATH will be: WIKI_DOMAIN/
|
||||
WIKI_JUST_DOMAIN will be: WIKI_DOMAIN"""
|
||||
def quick_try_url(url):
|
||||
"""Quickly test if URL is the proper script path,
|
||||
False if it appears invalid
|
||||
dictionary when it appears valid"""
|
||||
try:
|
||||
request = requests.get(url, timeout=5)
|
||||
if request.status_code == requests.codes.ok:
|
||||
if request.json()["query"]["general"] is not None:
|
||||
return request
|
||||
return False
|
||||
except (KeyError, requests.exceptions.ConnectionError):
|
||||
return False
|
||||
try:
|
||||
parsed_url = urlparse(settings["wiki_url"])
|
||||
except KeyError:
|
||||
logger.critical("wiki_url is not specified in the settings. Please provide the wiki url in the settings and start the script again.")
|
||||
sys.exit(1)
|
||||
for url_scheme in (settings["wiki_url"], settings["wiki_url"].split("wiki")[0], urlunparse((*parsed_url[0:2], "", "", "", ""))): # check different combinations, it's supposed to be idiot-proof
|
||||
tested = quick_try_url(url_scheme + "/api.php?action=query&format=json&meta=siteinfo")
|
||||
if tested:
|
||||
WIKI_API_PATH = urlunparse((*parsed_url[0:2], "", "", "", "")) + tested.json()["query"]["general"]["scriptpath"] + "/api.php"
|
||||
WIKI_SCRIPT_PATH = urlunparse((*parsed_url[0:2], "", "", "", "")) + tested.json()["query"]["general"]["scriptpath"] + "/"
|
||||
WIKI_ARTICLE_PATH = urlunparse((*parsed_url[0:2], "", "", "", "")) + tested.json()["query"]["general"]["articlepath"]
|
||||
WIKI_JUST_DOMAIN = urlunparse((*parsed_url[0:2], "", "", "", ""))
|
||||
break
|
||||
else:
|
||||
logger.critical("Could not verify wikis paths. Please make sure you have given the proper wiki URL in settings.json and your Internet connection is working.")
|
||||
sys.exit(1)
|
||||
|
||||
def create_article_path(article: str) -> str:
|
||||
"""Takes the string and creates an URL with it as the article name"""
|
||||
|
@ -1069,7 +1031,7 @@ def daily_overview_sync(edits, files, admin, changed_bytes, new_articles, unique
|
|||
storage["daily_overview"].update({"edits": edits_avg, "new_files": files_avg, "admin_actions": admin_avg, "bytes_changed": changed_bytes_avg,
|
||||
"new_articles": new_articles_avg, "unique_editors": unique_contributors_avg, "day_score": day_score_avg})
|
||||
storage["daily_overview"]["days_tracked"] += 1
|
||||
misc.save_datafile(storage)
|
||||
datafile.save_datafile()
|
||||
return edits, files, admin, changed_bytes, new_articles, unique_contributors, day_score
|
||||
|
||||
def day_overview():
|
||||
|
@ -1261,7 +1223,7 @@ class Recent_Changes_Class(object):
|
|||
if settings["limitrefetch"] != -1 and self.recent_id != self.file_id and self.recent_id != 0: # if saving to database is disabled, don't save the recent_id
|
||||
self.file_id = self.recent_id
|
||||
storage["rcid"] = self.recent_id
|
||||
misc.save_datafile(storage)
|
||||
datafile.save_datafile()
|
||||
logger.debug("Most recent rcid is: {}".format(self.recent_id))
|
||||
return self.recent_id
|
||||
|
||||
|
@ -1453,7 +1415,6 @@ else:
|
|||
sys.exit(1)
|
||||
|
||||
# Log in and download wiki information
|
||||
prepare_paths()
|
||||
try:
|
||||
if settings["wiki_bot_login"] and settings["wiki_bot_password"]:
|
||||
recent_changes.log_in()
|
||||
|
@ -1468,6 +1429,7 @@ recent_changes.fetch(amount=settings["limitrefetch"] if settings["limitrefetch"]
|
|||
|
||||
schedule.every(settings["cooldown"]).seconds.do(recent_changes.fetch)
|
||||
if 1 == 2: # additional translation strings in unreachable code
|
||||
# noinspection PyUnreachableCode
|
||||
print(_("director"), _("bot"), _("editor"), _("directors"), _("sysop"), _("bureaucrat"), _("reviewer"),
|
||||
_("autoreview"), _("autopatrol"), _("wiki_guardian"), ngettext("second", "seconds", 1), ngettext("minute", "minutes", 1), ngettext("hour", "hours", 1), ngettext("day", "days", 1), ngettext("week", "weeks", 1), ngettext("month", "months",1), ngettext("year", "years", 1), ngettext("millennium", "millennia", 1), ngettext("decade", "decades", 1), ngettext("century", "centuries", 1))
|
||||
|
||||
|
|
Loading…
Reference in a new issue