Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 6 additions & 25 deletions cogs/check_su_platform_authorisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,19 +4,19 @@
from enum import Enum
from typing import TYPE_CHECKING, override

import aiohttp
import bs4
import discord
from discord.ext import tasks

from config import settings
from utils import GLOBAL_SSL_CONTEXT, CommandChecks, TeXBotBaseCog
from utils import CommandChecks, TeXBotBaseCog
from utils.error_capture_decorators import (
capture_guild_does_not_exist_error,
)
from utils.msl import fetch_url_content_with_session

if TYPE_CHECKING:
from collections.abc import Iterable, Mapping, Sequence
from collections.abc import Iterable, Sequence
from collections.abc import Set as AbstractSet
from logging import Logger
from typing import Final
Expand All @@ -31,15 +31,6 @@

logger: "Final[Logger]" = logging.getLogger("TeX-Bot")

REQUEST_HEADERS: "Final[Mapping[str, str]]" = {
"Cache-Control": "no-cache",
"Pragma": "no-cache",
"Expires": "0",
}

REQUEST_COOKIES: "Final[Mapping[str, str]]" = {
".AspNet.SharedCookie": settings["SU_PLATFORM_ACCESS_COOKIE"]
}

SU_PLATFORM_PROFILE_URL: "Final[str]" = "https://guildofstudents.com/profile"
SU_PLATFORM_ORGANISATION_URL: "Final[str]" = (
Expand Down Expand Up @@ -76,20 +67,10 @@ class SUPlatformAccessCookieStatus(Enum):
class CheckSUPlatformAuthorisationBaseCog(TeXBotBaseCog):
"""Cog class that defines the base functionality for cookie authorisation checks."""

async def _fetch_url_content_with_session(self, url: str) -> str:
"""Fetch the HTTP content at the given URL, using a shared aiohttp session."""
async with (
aiohttp.ClientSession(
headers=REQUEST_HEADERS, cookies=REQUEST_COOKIES
) as http_session,
http_session.get(url=url, ssl=GLOBAL_SSL_CONTEXT) as http_response,
):
return await http_response.text()

async def get_su_platform_access_cookie_status(self) -> SUPlatformAccessCookieStatus:
"""Retrieve the current validity status of the SU platform access cookie."""
response_object: bs4.BeautifulSoup = bs4.BeautifulSoup(
await self._fetch_url_content_with_session(SU_PLATFORM_PROFILE_URL), "html.parser"
await fetch_url_content_with_session(SU_PLATFORM_PROFILE_URL), "html.parser"
)
page_title: bs4.Tag | bs4.NavigableString | None = response_object.find("title")
if not page_title or "Login" in str(page_title):
Expand All @@ -99,7 +80,7 @@ async def get_su_platform_access_cookie_status(self) -> SUPlatformAccessCookieSt
organisation_admin_url: str = (
f"{SU_PLATFORM_ORGANISATION_URL}/{settings['ORGANISATION_ID']}"
)
response_html: str = await self._fetch_url_content_with_session(organisation_admin_url)
response_html: str = await fetch_url_content_with_session(organisation_admin_url)

if "admin tools" in response_html.lower():
return SUPlatformAccessCookieStatus.AUTHORISED
Expand All @@ -115,7 +96,7 @@ async def get_su_platform_access_cookie_status(self) -> SUPlatformAccessCookieSt
async def get_su_platform_organisations(self) -> "Iterable[str]":
"""Retrieve the MSL organisations the current SU platform cookie has access to."""
response_object: bs4.BeautifulSoup = bs4.BeautifulSoup(
await self._fetch_url_content_with_session(SU_PLATFORM_PROFILE_URL), "html.parser"
await fetch_url_content_with_session(SU_PLATFORM_PROFILE_URL), "html.parser"
)

page_title: bs4.Tag | bs4.NavigableString | None = response_object.find("title")
Expand Down
2 changes: 2 additions & 0 deletions utils/msl/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from .memberships import (
fetch_community_group_members_count,
fetch_community_group_members_list,
fetch_url_content_with_session,
is_id_a_community_group_member,
)

Expand All @@ -14,5 +15,6 @@
__all__: "Sequence[str]" = (
"fetch_community_group_members_count",
"fetch_community_group_members_list",
"fetch_url_content_with_session",
"is_id_a_community_group_member",
)
43 changes: 30 additions & 13 deletions utils/msl/memberships.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,27 +14,28 @@

if TYPE_CHECKING:
from collections.abc import Mapping, Sequence
from http.cookies import Morsel
from logging import Logger
from typing import Final


__all__: "Sequence[str]" = (
"fetch_community_group_members_count",
"fetch_community_group_members_list",
"fetch_url_content_with_session",
"is_id_a_community_group_member",
)


logger: "Final[Logger]" = logging.getLogger("TeX-Bot")


BASE_SU_PLATFORM_WEB_HEADERS: "Final[Mapping[str, str]]" = {
"Cache-Control": "no-cache",
"Pragma": "no-cache",
"Expires": "0",
}

BASE_SU_PLATFORM_WEB_COOKIES: "Final[Mapping[str, str]]" = {
BASE_SU_PLATFORM_WEB_COOKIES: "Mapping[str, str]" = {
".AspNet.SharedCookie": settings["SU_PLATFORM_ACCESS_COOKIE"],
}

Expand All @@ -43,21 +44,37 @@
_membership_list_cache: set[int] = set()


async def fetch_community_group_members_list() -> set[int]:
"""
Make a web request to fetch your community group's full membership list.

Returns a set of IDs.
"""
async def fetch_url_content_with_session(url: str) -> str:
"""Fetch the HTTP content at the given URL, using a shared aiohttp session."""
global BASE_SU_PLATFORM_WEB_COOKIES # noqa: PLW0603
async with (
aiohttp.ClientSession(
headers=BASE_SU_PLATFORM_WEB_HEADERS, cookies=BASE_SU_PLATFORM_WEB_COOKIES
) as http_session,
http_session.get(url=MEMBERS_LIST_URL, ssl=GLOBAL_SSL_CONTEXT) as http_response,
http_session.get(url=url, ssl=GLOBAL_SSL_CONTEXT) as http_response,
):
response_html: str = await http_response.text()
returned_asp_cookie: Morsel[str] | None = http_response.cookies.get(
".AspNet.SharedCookie"
)
if returned_asp_cookie is not None and (
returned_asp_cookie.value != BASE_SU_PLATFORM_WEB_COOKIES[".AspNet.SharedCookie"]
):
logger.info("SU platform access cookie was updated by the server; updating local.")
BASE_SU_PLATFORM_WEB_COOKIES = {
".AspNet.SharedCookie": returned_asp_cookie.value,
}
return await http_response.text()

parsed_html: BeautifulSoup = BeautifulSoup(markup=response_html, features="html.parser")

async def fetch_community_group_members_list() -> set[int]:
"""
Make a web request to fetch your community group's full membership list.

Returns a set of IDs.
"""
parsed_html: BeautifulSoup = BeautifulSoup(
markup=await fetch_url_content_with_session(MEMBERS_LIST_URL), features="html.parser"
)

member_ids: set[int] = set()

Expand All @@ -72,7 +89,7 @@ async def fetch_community_group_members_list() -> set[int]:

if filtered_table is None:
logger.warning("Membership table with ID %s could not be found.", table_id)
logger.debug(response_html)
logger.debug(parsed_html)
continue

if isinstance(filtered_table, bs4.NavigableString):
Expand All @@ -97,7 +114,7 @@ async def fetch_community_group_members_list() -> set[int]:
if not member_ids: # NOTE: this should never be possible, because to fetch the page you need to have admin access, which requires being a member.
NO_MEMBERS_MESSAGE: Final[str] = "No members were found in either membership table."
logger.warning(NO_MEMBERS_MESSAGE)
logger.debug(response_html)
logger.debug(parsed_html)
raise MSLMembershipError(message=NO_MEMBERS_MESSAGE)

_membership_list_cache.clear()
Expand Down
Loading