diff --git a/scraper/maiia/maiia.py b/scraper/maiia/maiia.py index 346de69cc17..6f65c90610d 100644 --- a/scraper/maiia/maiia.py +++ b/scraper/maiia/maiia.py @@ -109,7 +109,7 @@ def _fetch(self, request: ScraperRequest, creneau_q=DummyQueue()) -> Optional[st return None center_id = url_query["centerid"][0] - reasons = get_reasons(center_id, self._client, request=request) + reasons = get_reasons(center_id, client=self._client, request=request) if not reasons: return None self.lieu = Lieu( diff --git a/tests/test_maiia.py b/tests/test_maiia.py index 0269b30980b..c37d1d41dd7 100644 --- a/tests/test_maiia.py +++ b/tests/test_maiia.py @@ -1,6 +1,7 @@ import json import pytest import logging +from urllib.parse import parse_qs from scraper.pattern.center_info import CenterInfo from utils.vmd_utils import DummyQueue import httpx @@ -25,6 +26,11 @@ def app(request: httpx.Request) -> httpx.Response: try: + qs = parse_qs( + request.url._uri_reference.query + ) # we dont access request.url.query because its return query.encode('ascii'), will crashed + if int(qs.get("page", ["0"])[0]) >= 1: + return httpx.Response(200, json={"items": [], "total": 0}) slug = request.url.path.split("/")[-1] endpoint = slug.split("?")[0] path = Path("tests", "fixtures", "maiia", f"{endpoint}.json") @@ -128,7 +134,9 @@ def test_get_first_availability(): assert first_availability.isoformat() == "2021-05-13T13:40:00+00:00" -@pytest.mark.skip(reason="je n'ai aucune connaissance de ce scrapper et je dois supprimer les artifacts qui encombrent gitlab :D") +@pytest.mark.skip( + reason="je n'ai aucune connaissance de ce scrapper et je dois supprimer les artifacts qui encombrent gitlab :D" +) def test_fetch_slots(): # Oops I forgot centerid