mirror of
https://gitlab.com/chicken-riders/RcGcDb.git
synced 2025-02-23 00:54:09 +00:00
Added some code
This commit is contained in:
parent
dafc849321
commit
415c278778
21
src/bot.py
21
src/bot.py
|
@ -2,6 +2,7 @@ import logging.config
|
|||
from src.config import settings
|
||||
import sqlite3
|
||||
from src.wiki import Wiki
|
||||
import asyncio, aiohttp
|
||||
|
||||
logging.config.dictConfig(settings["logging"])
|
||||
logger = logging.getLogger("rcgcdb.bot")
|
||||
|
@ -10,12 +11,26 @@ logger.debug("Current settings: {settings}".format(settings=settings))
|
|||
conn = sqlite3.connect('rcgcdb.db')
|
||||
c = conn.cursor()
|
||||
|
||||
# Fetch basic information about all of the wikis in the database
|
||||
# Log Fail states with structure wiki_id: number of fail states
|
||||
all_wikis = {}
|
||||
mw_msgs = {} # will have the type of id: tuple
|
||||
|
||||
for wiki in c.execute('SELECT * FROM wikis'):
|
||||
all_wikis[wiki[0]] = Wiki() # assign cached information
|
||||
# First populate the all_wikis list with every wiki
|
||||
# Reasons for this: 1. we require amount of wikis to calculate the cooldown between requests
|
||||
# 2. Easier to code
|
||||
|
||||
for wiki in c.execute('SELECT ROWID, * FROM wikis'):
|
||||
all_wikis[wiki[0]] = Wiki()
|
||||
|
||||
# Start queueing logic
|
||||
|
||||
async def main_loop():
|
||||
for db_wiki in c.execute('SELECT ROWID, * FROM wikis'):
|
||||
extended = False
|
||||
if wiki[0] not in all_wikis:
|
||||
logger.debug("New wiki: {}".format(wiki[1]))
|
||||
all_wikis[wiki[0]] = Wiki()
|
||||
local_wiki = all_wikis[wiki[0]] # set a reference to a wiki object from memory
|
||||
if all_wikis[wiki[0]].mw_messages is None:
|
||||
extended = True
|
||||
wiki_response = await all_wikis[wiki[0]].fetch_wiki(extended)
|
4
src/session.py
Normal file
4
src/session.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
import aiohttp
|
||||
from src.config import settings
|
||||
|
||||
session = aiohttp.ClientSession(headers=settings["header"])
|
23
src/wiki.py
23
src/wiki.py
|
@ -1,5 +1,28 @@
|
|||
from dataclasses import dataclass
|
||||
from src.session import session
|
||||
|
||||
|
||||
@dataclass
|
||||
class Wiki:
|
||||
mw_messages: int = None
|
||||
fail_times: int = 0 # corresponding to amount of times connection with wiki failed for client reasons (400-499)
|
||||
|
||||
async def fetch_wiki(self, extended, api_path):
|
||||
url_path = api_path
|
||||
amount = 20
|
||||
if extended:
|
||||
params = {"action": "query", "format": "json", "uselang": "content", "list": "tags|recentchanges",
|
||||
"meta": "allmessages|siteinfo",
|
||||
"utf8": 1, "tglimit": "max", "tgprop": "displayname",
|
||||
"rcprop": "title|redirect|timestamp|ids|loginfo|parsedcomment|sizes|flags|tags|user",
|
||||
"rclimit": amount, "rctype": "edit|new|log|external",
|
||||
"ammessages": "recentchanges-page-added-to-category|recentchanges-page-removed-from-category|recentchanges-page-added-to-category-bundled|recentchanges-page-removed-from-category-bundled",
|
||||
"amenableparser": 1, "amincludelocal": 1, "siprop": "namespaces"}
|
||||
else:
|
||||
params = {"action": "query", "format": "json", "uselang": "content", "list": "tags|recentchanges",
|
||||
"utf8": 1,
|
||||
"tglimit": "max", "tgprop": "displayname",
|
||||
"rcprop": "title|redirect|timestamp|ids|loginfo|parsedcomment|sizes|flags|tags|user",
|
||||
"rclimit": amount, "rctype": "edit|new|log|external", "siprop": "namespaces"}
|
||||
try:
|
||||
await session.get(url_path, params=params)
|
Loading…
Reference in a new issue