Improvements to handling some requests

This commit is contained in:
Frisk 2020-07-28 03:11:27 +02:00
parent 2e4b015e1f
commit 833c7650c8
No known key found for this signature in database
GPG key ID: 213F7C15068AF8AC
4 changed files with 24 additions and 34 deletions

View file

@ -5,7 +5,7 @@ import time
import logging import logging
import base64 import base64
from src.config import settings from src.config import settings
from src.misc import link_formatter, create_article_path, parse_link, profile_field_name, ContentParser, safe_read from src.misc import link_formatter, create_article_path, parse_link, profile_field_name, ContentParser
from src.discord import DiscordMessage from src.discord import DiscordMessage
from urllib.parse import quote_plus from urllib.parse import quote_plus
from src.msgqueue import send_to_discord from src.msgqueue import send_to_discord
@ -356,15 +356,15 @@ async def embed_formatter(action, change, parsed_comment, categories, recent_cha
minor=_("m") if action == "edit" and "minor" in change else "", bot=_('b') if "bot" in change else "", space=" " if "bot" in change or (action == "edit" and "minor" in change) or action == "new" else "") minor=_("m") if action == "edit" and "minor" in change else "", bot=_('b') if "bot" in change else "", space=" " if "bot" in change or (action == "edit" and "minor" in change) or action == "new" else "")
if target[0][1] == 3: if target[0][1] == 3:
if action == "new": if action == "new":
changed_content = await safe_read(await recent_changes.safe_request( changed_content = await recent_changes.safe_request(
"{wiki}?action=compare&format=json&fromtext=&torev={diff}&topst=1&prop=diff".format( "{wiki}?action=compare&format=json&fromtext=&torev={diff}&topst=1&prop=diff".format(
wiki=WIKI_API_PATH, diff=change["revid"] wiki=WIKI_API_PATH, diff=change["revid"]
)), "compare", "*") ), "compare", "*")
else: else:
changed_content = await safe_read(await recent_changes.safe_request( changed_content = await recent_changes.safe_request(
"{wiki}?action=compare&format=json&fromrev={oldrev}&torev={diff}&topst=1&prop=diff".format( "{wiki}?action=compare&format=json&fromrev={oldrev}&torev={diff}&topst=1&prop=diff".format(
wiki=WIKI_API_PATH, diff=change["revid"],oldrev=change["old_revid"] wiki=WIKI_API_PATH, diff=change["revid"],oldrev=change["old_revid"]
)), "compare", "*") ), "compare", "*")
if changed_content: if changed_content:
EditDiff = ContentParser(_) EditDiff = ContentParser(_)
EditDiff.feed(changed_content) EditDiff.feed(changed_content)
@ -387,9 +387,9 @@ async def embed_formatter(action, change, parsed_comment, categories, recent_cha
logger.warning("Unable to download data on the edit content!") logger.warning("Unable to download data on the edit content!")
elif action in ("upload/overwrite", "upload/upload", "upload/revert"): # sending files elif action in ("upload/overwrite", "upload/upload", "upload/revert"): # sending files
license = None license = None
urls = await safe_read(await recent_changes.safe_request( urls = await recent_changes.safe_request(
"{wiki}?action=query&format=json&prop=imageinfo&list=&meta=&titles={filename}&iiprop=timestamp%7Curl%7Carchivename&iilimit=5".format( "{wiki}?action=query&format=json&prop=imageinfo&list=&meta=&titles={filename}&iiprop=timestamp%7Curl%7Carchivename&iilimit=5".format(
wiki=WIKI_API_PATH, filename=change["title"])), "query", "pages") wiki=WIKI_API_PATH, filename=change["title"]), "query", "pages")
link = create_article_path(change["title"].replace(" ", "_"), WIKI_ARTICLE_PATH) link = create_article_path(change["title"].replace(" ", "_"), WIKI_ARTICLE_PATH)
additional_info_retrieved = False additional_info_retrieved = False
if urls is not None: if urls is not None:

View file

@ -168,20 +168,3 @@ class ContentParser(HTMLParser):
self.current_tag = "afterdel" self.current_tag = "afterdel"
else: else:
self.current_tag = "" self.current_tag = ""
async def safe_read(request: aiohttp.ClientResponse, *keys):
if request is None:
return None
try:
request = await request.json(encoding="UTF-8")
for item in keys:
request = request[item]
except KeyError:
logger.warning(
"Failure while extracting data from request on key {key} in {change}".format(key=item, change=request))
return None
except aiohttp.ClientResponseError:
logger.warning("Failure while extracting data from request in {change}".format(change=request))
return None
return request

View file

@ -61,7 +61,7 @@ class MessageQueue:
tasks_to_run.append(self.send_msg_set(set_msgs)) tasks_to_run.append(self.send_msg_set(set_msgs))
await asyncio.gather(*tasks_to_run) await asyncio.gather(*tasks_to_run)
logger.debug(self._queue) logger.debug(self._queue)
await asyncio.sleep(4.0) await asyncio.sleep(0.1)
messagequeue = MessageQueue() messagequeue = MessageQueue()

View file

@ -23,7 +23,7 @@ class Wiki:
session: aiohttp.ClientSession = None session: aiohttp.ClientSession = None
async def fetch_wiki(self, extended, script_path, session) -> aiohttp.ClientResponse: async def fetch_wiki(self, extended, script_path, session: aiohttp.ClientSession) -> aiohttp.ClientResponse:
url_path = script_path + "api.php" url_path = script_path + "api.php"
amount = 20 amount = 20
if extended: if extended:
@ -42,21 +42,29 @@ class Wiki:
"rclimit": amount, "rctype": "edit|new|log|external", "siprop": "namespaces|general"} "rclimit": amount, "rctype": "edit|new|log|external", "siprop": "namespaces|general"}
try: try:
response = await session.get(url_path, params=params) response = await session.get(url_path, params=params)
except (aiohttp.ClientConnectionError, aiohttp.ServerTimeoutError): except (aiohttp.ClientConnectionError, aiohttp.ServerTimeoutError, asyncio.exceptions.TimeoutError):
logger.exception("A connection error occurred while requesting {}".format(url_path)) logger.exception("A connection error occurred while requesting {}".format(url_path))
raise WikiServerError raise WikiServerError
return response return response
async def safe_request(self, url): @staticmethod
async def safe_request(url, *keys):
try: try:
async with aiohttp.ClientSession(headers=settings["header"], timeout=aiohttp.ClientTimeout(2.0)) as session: async with aiohttp.ClientSession(headers=settings["header"], timeout=aiohttp.ClientTimeout(2.0)) as session:
request = await session.get(url, timeout=5, allow_redirects=False) request = await session.get(url, timeout=5, allow_redirects=False)
request.raise_for_status() request.raise_for_status()
except (aiohttp.ClientConnectionError, aiohttp.ServerTimeoutError): json_request = await request.json(encoding="UTF-8")
except (aiohttp.ClientConnectionError, aiohttp.ServerTimeoutError, asyncio.exceptions.TimeoutError):
logger.exception("Reached connection error for request on link {url}".format(url=url)) logger.exception("Reached connection error for request on link {url}".format(url=url))
return None
else: else:
return request try:
for item in keys:
json_request = json_request[item]
except KeyError:
logger.warning(
"Failure while extracting data from request on key {key} in {change}".format(key=item, change=request))
return None
return json_request
async def fail_add(self, wiki_url, status): async def fail_add(self, wiki_url, status):
logger.debug("Increasing fail_times to {}".format(self.fail_times+3)) logger.debug("Increasing fail_times to {}".format(self.fail_times+3))
@ -87,8 +95,7 @@ class Wiki:
try: try:
comment = await self.safe_request( comment = await self.safe_request(
"{wiki}?action=comment&do=getRaw&comment_id={comment}&format=json".format(wiki=WIKI_API_PATH, "{wiki}?action=comment&do=getRaw&comment_id={comment}&format=json".format(wiki=WIKI_API_PATH,
comment=comment_id)).json()[ comment=comment_id), "text")
"text"]
logger.debug("Got the following comment from the API: {}".format(comment)) logger.debug("Got the following comment from the API: {}".format(comment))
except (TypeError, AttributeError): except (TypeError, AttributeError):
logger.exception("Could not resolve the comment text.") logger.exception("Could not resolve the comment text.")