diff --git a/backend/btrixcloud/pages.py b/backend/btrixcloud/pages.py index 7cadfcb0c..fd8428bc1 100644 --- a/backend/btrixcloud/pages.py +++ b/backend/btrixcloud/pages.py @@ -201,6 +201,28 @@ async def get_page( page_raw = await self.get_page_raw(page_id, oid, crawl_id) return Page.from_dict(page_raw) + async def get_page_out( + self, + page_id: UUID, + oid: UUID, + crawl_id: Optional[str] = None, + qa_run_id: Optional[str] = None, + ) -> Union[PageOut, PageOutWithSingleQA]: + """Return PageOut or PageOutWithSingleQA for page""" + page_raw = await self.get_page_raw(page_id, oid, crawl_id) + if qa_run_id: + qa = page_raw.get("qa") + if qa and qa.get(qa_run_id): + page_raw["qa"] = qa.get(qa_run_id) + else: + print( + f"Error: Page {page_id} does not have data from QA run {qa_run_id}", + flush=True, + ) + page_raw["qa"] = None + return PageOutWithSingleQA.from_dict(page_raw) + return PageOut.from_dict(page_raw) + async def add_qa_run_for_page( self, page_id: UUID, oid: UUID, qa_run_id: str, compare: PageQACompare ) -> bool: @@ -522,7 +544,7 @@ async def re_add_crawl_pages( @app.get( "/orgs/{oid}/crawls/{crawl_id}/pages/{page_id}", tags=["pages"], - response_model=Page, + response_model=PageOut, ) async def get_page( crawl_id: str, @@ -530,7 +552,21 @@ async def get_page( org: Organization = Depends(org_crawl_dep), ): """GET single page""" - return await ops.get_page(page_id, org.id, crawl_id) + return await ops.get_page_out(page_id, org.id, crawl_id) + + @app.get( + "/orgs/{oid}/crawls/{crawl_id}/qa/{qa_run_id}/pages/{page_id}", + tags=["pages", "qa"], + response_model=PageOutWithSingleQA, + ) + async def get_page_with_qa( + crawl_id: str, + qa_run_id: str, + page_id: UUID, + org: Organization = Depends(org_crawl_dep), + ): + """GET single page""" + return await ops.get_page_out(page_id, org.id, crawl_id, qa_run_id=qa_run_id) @app.patch( "/orgs/{oid}/crawls/{crawl_id}/pages/{page_id}", diff --git a/backend/test/test_qa.py b/backend/test/test_qa.py index 602954184..b8f15bb99 100644 --- a/backend/test/test_qa.py +++ b/backend/test/test_qa.py @@ -129,6 +129,27 @@ def test_qa_page_data(crawler_crawl_id, crawler_auth_headers, default_org_id): data = r.json() assert len(data["items"]) == 1 page = data["items"][0] + + page_id = page["id"] + assert page_id + + assert page["title"] == "Webrecorder" + assert page["url"] == "https://webrecorder.net/" + assert page["qa"]["textMatch"] == 1.0 + assert page["qa"]["screenshotMatch"] == 1.0 + assert page["qa"]["resourceCounts"] == { + "crawlGood": 15, + "crawlBad": 0, + "replayGood": 15, + "replayBad": 1, + } + + r = requests.get( + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{crawler_crawl_id}/qa/{qa_run_id}/pages/{page_id}", + headers=crawler_auth_headers, + ) + page = r.json() + assert page["id"] assert page["title"] == "Webrecorder" assert page["url"] == "https://webrecorder.net/" assert page["qa"]["textMatch"] == 1.0