mirror of
https://gitlab.com/chicken-riders/RcGcDb.git
synced 2025-02-23 00:54:09 +00:00
Improvements to handling some requests
This commit is contained in:
parent
2e4b015e1f
commit
833c7650c8
|
@ -5,7 +5,7 @@ import time
|
|||
import logging
|
||||
import base64
|
||||
from src.config import settings
|
||||
from src.misc import link_formatter, create_article_path, parse_link, profile_field_name, ContentParser, safe_read
|
||||
from src.misc import link_formatter, create_article_path, parse_link, profile_field_name, ContentParser
|
||||
from src.discord import DiscordMessage
|
||||
from urllib.parse import quote_plus
|
||||
from src.msgqueue import send_to_discord
|
||||
|
@ -356,15 +356,15 @@ async def embed_formatter(action, change, parsed_comment, categories, recent_cha
|
|||
minor=_("m") if action == "edit" and "minor" in change else "", bot=_('b') if "bot" in change else "", space=" " if "bot" in change or (action == "edit" and "minor" in change) or action == "new" else "")
|
||||
if target[0][1] == 3:
|
||||
if action == "new":
|
||||
changed_content = await safe_read(await recent_changes.safe_request(
|
||||
changed_content = await recent_changes.safe_request(
|
||||
"{wiki}?action=compare&format=json&fromtext=&torev={diff}&topst=1&prop=diff".format(
|
||||
wiki=WIKI_API_PATH, diff=change["revid"]
|
||||
)), "compare", "*")
|
||||
), "compare", "*")
|
||||
else:
|
||||
changed_content = await safe_read(await recent_changes.safe_request(
|
||||
changed_content = await recent_changes.safe_request(
|
||||
"{wiki}?action=compare&format=json&fromrev={oldrev}&torev={diff}&topst=1&prop=diff".format(
|
||||
wiki=WIKI_API_PATH, diff=change["revid"],oldrev=change["old_revid"]
|
||||
)), "compare", "*")
|
||||
), "compare", "*")
|
||||
if changed_content:
|
||||
EditDiff = ContentParser(_)
|
||||
EditDiff.feed(changed_content)
|
||||
|
@ -387,9 +387,9 @@ async def embed_formatter(action, change, parsed_comment, categories, recent_cha
|
|||
logger.warning("Unable to download data on the edit content!")
|
||||
elif action in ("upload/overwrite", "upload/upload", "upload/revert"): # sending files
|
||||
license = None
|
||||
urls = await safe_read(await recent_changes.safe_request(
|
||||
urls = await recent_changes.safe_request(
|
||||
"{wiki}?action=query&format=json&prop=imageinfo&list=&meta=&titles={filename}&iiprop=timestamp%7Curl%7Carchivename&iilimit=5".format(
|
||||
wiki=WIKI_API_PATH, filename=change["title"])), "query", "pages")
|
||||
wiki=WIKI_API_PATH, filename=change["title"]), "query", "pages")
|
||||
link = create_article_path(change["title"].replace(" ", "_"), WIKI_ARTICLE_PATH)
|
||||
additional_info_retrieved = False
|
||||
if urls is not None:
|
||||
|
|
17
src/misc.py
17
src/misc.py
|
@ -168,20 +168,3 @@ class ContentParser(HTMLParser):
|
|||
self.current_tag = "afterdel"
|
||||
else:
|
||||
self.current_tag = ""
|
||||
|
||||
|
||||
async def safe_read(request: aiohttp.ClientResponse, *keys):
|
||||
if request is None:
|
||||
return None
|
||||
try:
|
||||
request = await request.json(encoding="UTF-8")
|
||||
for item in keys:
|
||||
request = request[item]
|
||||
except KeyError:
|
||||
logger.warning(
|
||||
"Failure while extracting data from request on key {key} in {change}".format(key=item, change=request))
|
||||
return None
|
||||
except aiohttp.ClientResponseError:
|
||||
logger.warning("Failure while extracting data from request in {change}".format(change=request))
|
||||
return None
|
||||
return request
|
||||
|
|
|
@ -61,7 +61,7 @@ class MessageQueue:
|
|||
tasks_to_run.append(self.send_msg_set(set_msgs))
|
||||
await asyncio.gather(*tasks_to_run)
|
||||
logger.debug(self._queue)
|
||||
await asyncio.sleep(4.0)
|
||||
await asyncio.sleep(0.1)
|
||||
|
||||
|
||||
messagequeue = MessageQueue()
|
||||
|
|
23
src/wiki.py
23
src/wiki.py
|
@ -23,7 +23,7 @@ class Wiki:
|
|||
session: aiohttp.ClientSession = None
|
||||
|
||||
|
||||
async def fetch_wiki(self, extended, script_path, session) -> aiohttp.ClientResponse:
|
||||
async def fetch_wiki(self, extended, script_path, session: aiohttp.ClientSession) -> aiohttp.ClientResponse:
|
||||
url_path = script_path + "api.php"
|
||||
amount = 20
|
||||
if extended:
|
||||
|
@ -42,21 +42,29 @@ class Wiki:
|
|||
"rclimit": amount, "rctype": "edit|new|log|external", "siprop": "namespaces|general"}
|
||||
try:
|
||||
response = await session.get(url_path, params=params)
|
||||
except (aiohttp.ClientConnectionError, aiohttp.ServerTimeoutError):
|
||||
except (aiohttp.ClientConnectionError, aiohttp.ServerTimeoutError, asyncio.exceptions.TimeoutError):
|
||||
logger.exception("A connection error occurred while requesting {}".format(url_path))
|
||||
raise WikiServerError
|
||||
return response
|
||||
|
||||
async def safe_request(self, url):
|
||||
@staticmethod
|
||||
async def safe_request(url, *keys):
|
||||
try:
|
||||
async with aiohttp.ClientSession(headers=settings["header"], timeout=aiohttp.ClientTimeout(2.0)) as session:
|
||||
request = await session.get(url, timeout=5, allow_redirects=False)
|
||||
request.raise_for_status()
|
||||
except (aiohttp.ClientConnectionError, aiohttp.ServerTimeoutError):
|
||||
json_request = await request.json(encoding="UTF-8")
|
||||
except (aiohttp.ClientConnectionError, aiohttp.ServerTimeoutError, asyncio.exceptions.TimeoutError):
|
||||
logger.exception("Reached connection error for request on link {url}".format(url=url))
|
||||
return None
|
||||
else:
|
||||
return request
|
||||
try:
|
||||
for item in keys:
|
||||
json_request = json_request[item]
|
||||
except KeyError:
|
||||
logger.warning(
|
||||
"Failure while extracting data from request on key {key} in {change}".format(key=item, change=request))
|
||||
return None
|
||||
return json_request
|
||||
|
||||
async def fail_add(self, wiki_url, status):
|
||||
logger.debug("Increasing fail_times to {}".format(self.fail_times+3))
|
||||
|
@ -87,8 +95,7 @@ class Wiki:
|
|||
try:
|
||||
comment = await self.safe_request(
|
||||
"{wiki}?action=comment&do=getRaw&comment_id={comment}&format=json".format(wiki=WIKI_API_PATH,
|
||||
comment=comment_id)).json()[
|
||||
"text"]
|
||||
comment=comment_id), "text")
|
||||
logger.debug("Got the following comment from the API: {}".format(comment))
|
||||
except (TypeError, AttributeError):
|
||||
logger.exception("Could not resolve the comment text.")
|
||||
|
|
Loading…
Reference in a new issue