mirror of
https://gitlab.com/chicken-riders/RcGcDw.git
synced 2025-02-23 00:24:09 +00:00
Merge remote-tracking branch 'origin/testing' into testing
This commit is contained in:
commit
9775caece9
|
@ -196,7 +196,7 @@ def yes_no(answer):
|
|||
print("Welcome in RcGcDw config builder! This script is still work in progress so beware! You can accept the default value if provided in the question by using Enter key without providing any other input.\nWARNING! Your current settings.json will be overwritten if you continue!")
|
||||
|
||||
try: # load settings
|
||||
with open("../settings.json.example") as sfile:
|
||||
with open("../settings.json.example", encoding="utf-8") as sfile:
|
||||
settings = json.load(sfile)
|
||||
except FileNotFoundError:
|
||||
if yes_no(default_or_custom(input("Template config (settings.json.example) could not be found. Download the most recent stable one from master branch? (https://gitlab.com/piotrex43/RcGcDw/raw/master/settings.json.example)? (Y/n)"), "y")):
|
||||
|
@ -468,7 +468,7 @@ class AdvancedSettings:
|
|||
try:
|
||||
BasicSettings()
|
||||
shutil.copy("settings.json", "settings.json.bak")
|
||||
with open("settings.json", "w") as settings_file:
|
||||
with open("settings.json", "w", encoding="utf-8") as settings_file:
|
||||
settings_file.write(json.dumps(settings, indent=4))
|
||||
if "--advanced" in sys.argv:
|
||||
print("Basic part of the config has been completed. Starting the advanced part...")
|
||||
|
@ -478,5 +478,5 @@ except KeyboardInterrupt:
|
|||
if not yes_no(default_or_custom(input("\nSave the config before exiting? (y/N)"),"n")):
|
||||
sys.exit(0)
|
||||
else:
|
||||
with open("settings.json", "w") as settings_file:
|
||||
with open("settings.json", "w", encoding="utf-8") as settings_file:
|
||||
settings_file.write(json.dumps(settings, indent=4))
|
|
@ -1,22 +1,33 @@
|
|||
import json, sys, logging
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
|
||||
global settings
|
||||
|
||||
|
||||
def load_settings():
|
||||
global settings
|
||||
try: # load settings
|
||||
with open("settings.json", encoding="utf8") as sfile:
|
||||
settings = json.load(sfile)
|
||||
if settings["limitrefetch"] < settings["limit"] and settings["limitrefetch"] != -1:
|
||||
settings["limitrefetch"] = settings["limit"]
|
||||
if "user-agent" in settings["header"]:
|
||||
settings["header"]["user-agent"] = settings["header"]["user-agent"].format(version="1.13.1.2") # set the version in the useragent
|
||||
except FileNotFoundError:
|
||||
logging.critical("No config file could be found. Please make sure settings.json is in the directory.")
|
||||
sys.exit(1)
|
||||
# Set the cooldown to 15 seconds if it's a wiki farm like Fandom or Gamepedia and the cooldown is even lower than that.
|
||||
# Look, it's unreasonable to have even higher refresh rate than that, seriously. Setting it even lower can cause issues
|
||||
# for all users of the script for high usage of farm's servers. So please, do not remove this code unless you absolutely
|
||||
# know what you are doing <3
|
||||
if any(("fandom.com" in settings["wiki_url"], "gamepedia.com" in settings["wiki_url"])):
|
||||
if settings["cooldown"] < 15:
|
||||
settings["cooldown"] = 15
|
||||
if settings["fandom_discussions"]["cooldown"] < 15:
|
||||
settings["fandom_discussions"]["cooldown"] = 15
|
||||
|
||||
|
||||
load_settings()
|
||||
|
||||
try: # load settings
|
||||
with open("settings.json", encoding="utf8") as sfile:
|
||||
settings = json.load(sfile)
|
||||
if settings["limitrefetch"] < settings["limit"] and settings["limitrefetch"] != -1:
|
||||
settings["limitrefetch"] = settings["limit"]
|
||||
if "user-agent" in settings["header"]:
|
||||
settings["header"]["user-agent"] = settings["header"]["user-agent"].format(version="1.13.1") # set the version in the useragent
|
||||
except FileNotFoundError:
|
||||
logging.critical("No config file could be found. Please make sure settings.json is in the directory.")
|
||||
sys.exit(1)
|
||||
|
||||
# Set the cooldown to 15 seconds if it's a wiki farm like Fandom or Gamepedia and the cooldown is even lower than that.
|
||||
# Look, it's unreasonable to have even higher refresh rate than that, seriously. Setting it even lower can cause issues
|
||||
# for all users of the script for high usage of farm's servers. So please, do not remove this code unless you absolutely
|
||||
# know what you are doing <3
|
||||
if any(("fandom.com" in settings["wiki_url"], "gamepedia.com" in settings["wiki_url"])):
|
||||
if settings["cooldown"] < 15:
|
||||
settings["cooldown"] = 15
|
||||
if settings["fandom_discussions"]["cooldown"] < 15:
|
||||
settings["fandom_discussions"]["cooldown"] = 15
|
||||
|
|
|
@ -150,7 +150,7 @@ def embed_formatter(post_type, post, article_paths):
|
|||
if post["_embedded"]["thread"][0]["tags"]:
|
||||
tag_displayname = []
|
||||
for tag in post["_embedded"]["thread"][0]["tags"]:
|
||||
tag_displayname.append("[{title}]({url})".format(title=tag["articleTitle"], url=create_article_path(tag["articleTitle"])))
|
||||
tag_displayname.append("[{title}]({url})".format(title=tag["articleTitle"], url=create_article_path(quote_plus(tag["articleTitle"].replace(" ", "_"), "/:?=&"))))
|
||||
if len(", ".join(tag_displayname)) > 1000:
|
||||
embed.add_field(_("Tags"), _("{} tags").format(len(post["_embedded"]["thread"][0]["tags"])))
|
||||
else:
|
||||
|
|
46
src/migrations/11311.py
Normal file
46
src/migrations/11311.py
Normal file
|
@ -0,0 +1,46 @@
|
|||
from src.configloader import settings, load_settings
|
||||
import logging
|
||||
import shutil
|
||||
import time
|
||||
import json
|
||||
import sys
|
||||
|
||||
from src.migrations.utils import return_example_file
|
||||
|
||||
logger = logging.getLogger("rcgcdw.migrations.1.13.1.1")
|
||||
base_file = return_example_file()
|
||||
new_settings = settings.copy()
|
||||
|
||||
def run():
|
||||
global base_file
|
||||
if "event_appearance" not in settings:
|
||||
logger.info("Running migration 1.13.1.1")
|
||||
if "event_appearance" not in base_file: # if local base file is outdated, download from repo
|
||||
base_file = return_example_file(force=True)
|
||||
try:
|
||||
struct = settings['appearance']['embed']
|
||||
new_settings["event_appearance"] = {}
|
||||
keys = []
|
||||
for key, value in struct.items():
|
||||
if key not in ("show_edit_changes", "show_footer", "embed_images"):
|
||||
new_settings["event_appearance"][key] = value
|
||||
try:
|
||||
new_settings["event_appearance"][key]["emoji"] = base_file["event_appearance"][key]["emoji"]
|
||||
except KeyError:
|
||||
new_settings["event_appearance"][key]["emoji"] = ""
|
||||
keys.append(key)
|
||||
for item in keys:
|
||||
del new_settings['appearance']['embed'][item]
|
||||
except KeyError:
|
||||
logger.exception("Failed to migrate appearance embed.")
|
||||
sys.exit(1)
|
||||
shutil.copy("settings.json", "settings.json.{}.bak".format(int(time.time())))
|
||||
with open("settings.json", "w", encoding="utf-8") as new_write:
|
||||
new_write.write(json.dumps(new_settings, indent=4))
|
||||
load_settings()
|
||||
logger.info("Migration 1.13.1.1 has been successful.")
|
||||
else:
|
||||
logger.debug("Ignoring migration 1.13.1.1")
|
||||
|
||||
|
||||
run()
|
1
src/migrations/__init__.py
Normal file
1
src/migrations/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
__all__ = ["11311"]
|
20
src/migrations/utils.py
Normal file
20
src/migrations/utils.py
Normal file
|
@ -0,0 +1,20 @@
|
|||
import requests
|
||||
import logging
|
||||
import json
|
||||
|
||||
discussion_logger = logging.getLogger("rcgcdw.migrations.utils")
|
||||
|
||||
|
||||
def return_example_file(force=False) -> dict:
|
||||
try:
|
||||
if force:
|
||||
raise FileNotFoundError
|
||||
with open('settings.json.example', 'r', encoding="utf-8") as example_file:
|
||||
return json.loads(example_file.read())
|
||||
except FileNotFoundError:
|
||||
try:
|
||||
f = requests.get("https://gitlab.com/piotrex43/RcGcDw/-/raw/master/settings.json.example")
|
||||
except:
|
||||
raise
|
||||
return json.loads(f.text)
|
||||
|
|
@ -54,7 +54,7 @@ class DataFile:
|
|||
def generate_datafile():
|
||||
"""Generate a data.json file from a template."""
|
||||
try:
|
||||
with open("data.json", 'w') as data:
|
||||
with open("data.json", 'w', encoding="utf-8") as data:
|
||||
data.write(json.dumps(data_template, indent=4))
|
||||
except PermissionError:
|
||||
misc_logger.critical("Could not create a data file (no permissions). No way to store last edit.")
|
||||
|
@ -65,7 +65,7 @@ class DataFile:
|
|||
:rtype: dict
|
||||
"""
|
||||
try:
|
||||
with open("data.json") as data:
|
||||
with open("data.json", encoding="utf-8") as data:
|
||||
return json.loads(data.read())
|
||||
except FileNotFoundError:
|
||||
self.generate_datafile()
|
||||
|
@ -77,7 +77,7 @@ class DataFile:
|
|||
if self.changed is False: # don't cause unnecessary write operations
|
||||
return
|
||||
try:
|
||||
with open("data.json", "w") as data_file:
|
||||
with open("data.json", "w", encoding="utf-8") as data_file:
|
||||
data_file.write(json.dumps(self.data, indent=4))
|
||||
self.changed = False
|
||||
except PermissionError:
|
||||
|
|
24
src/rc.py
24
src/rc.py
|
@ -63,6 +63,7 @@ class Recent_Changes_Class(object):
|
|||
self.session = session
|
||||
self.logged_in = False
|
||||
self.initial_run_complete = False
|
||||
self.memory_id = None # Used only when limitrefetch is set to -1 to avoid reading from storage
|
||||
|
||||
@staticmethod
|
||||
def handle_mw_errors(request):
|
||||
|
@ -114,9 +115,12 @@ class Recent_Changes_Class(object):
|
|||
messagequeue.resend_msgs()
|
||||
last_check = self.fetch_changes(amount=amount)
|
||||
if last_check is not None:
|
||||
storage["rcid"] = last_check[0] if last_check[0] else storage["rcid"]
|
||||
storage["abuse_log_id"] = last_check[1] if last_check[1] else storage["abuse_log_id"]
|
||||
storage.save_datafile()
|
||||
if settings["limitrefetch"] != -1:
|
||||
storage["rcid"] = last_check[0] if last_check[0] else storage["rcid"]
|
||||
storage["abuse_log_id"] = last_check[1] if last_check[1] else storage["abuse_log_id"]
|
||||
storage.save_datafile()
|
||||
else:
|
||||
self.memory_id = last_check
|
||||
self.initial_run_complete = True
|
||||
|
||||
def fetch_recentchanges_request(self, amount):
|
||||
|
@ -155,8 +159,11 @@ class Recent_Changes_Class(object):
|
|||
categorize_events = {}
|
||||
new_events = 0
|
||||
changes.reverse()
|
||||
highest_id = recent_id = storage["rcid"]
|
||||
dry_run = True if recent_id is None else False
|
||||
if settings["limitrefetch"] == -1 and self.memory_id is not None:
|
||||
highest_id = recent_id = self.memory_id[0]
|
||||
else:
|
||||
highest_id = recent_id = storage["rcid"]
|
||||
dry_run = True if recent_id is None or (self.memory_id is None and settings["limitrefetch"] == -1) else False
|
||||
for change in changes:
|
||||
if not dry_run and not (change["rcid"] <= recent_id):
|
||||
new_events += 1
|
||||
|
@ -217,8 +224,11 @@ class Recent_Changes_Class(object):
|
|||
if not abuse_log:
|
||||
return None
|
||||
abuse_log.reverse()
|
||||
recent_id = storage["abuse_log_id"]
|
||||
dryrun = True if recent_id is None else False
|
||||
if self.memory_id is not None and settings["limitrefetch"] == -1:
|
||||
recent_id = self.memory_id[1]
|
||||
else:
|
||||
recent_id = storage["abuse_log_id"]
|
||||
dryrun = True if recent_id is None or (self.initial_run_complete is False and settings["limitrefetch"] == -1) else False
|
||||
for entry in abuse_log:
|
||||
if dryrun:
|
||||
continue
|
||||
|
|
|
@ -46,13 +46,13 @@ TESTING = True if "--test" in sys.argv else False # debug mode, pipeline testin
|
|||
logging.config.dictConfig(settings["logging"])
|
||||
logger = logging.getLogger("rcgcdw")
|
||||
logger.debug("Current settings: {settings}".format(settings=settings))
|
||||
|
||||
from src.migrations import * # migrations after logging
|
||||
storage = datafile
|
||||
|
||||
# Remove previous data holding file if exists and limitfetch allows
|
||||
|
||||
if settings["limitrefetch"] != -1 and os.path.exists("lastchange.txt") is True:
|
||||
with open("lastchange.txt", 'r') as sfile:
|
||||
with open("lastchange.txt", 'r', encoding="utf-8") as sfile:
|
||||
logger.info("Converting old lastchange.txt file into new data storage data.json...")
|
||||
storage["rcid"] = int(sfile.read().strip())
|
||||
datafile.save_datafile()
|
||||
|
|
Loading…
Reference in a new issue