diff --git a/backend/btrixcloud/pages.py b/backend/btrixcloud/pages.py index f18cfa37ab..a190e14ded 100644 --- a/backend/btrixcloud/pages.py +++ b/backend/btrixcloud/pages.py @@ -546,8 +546,10 @@ async def list_pages( if is_seed in (True, False): query["isSeed"] = is_seed + # Check that field matches and value is an int to avoid 0 + # from returning null results if depth: - query["depth"] = depth + query["$and"] = {"field": depth, "type": 16} if reviewed: query["$or"] = [ @@ -697,8 +699,10 @@ async def list_collection_pages( if is_seed in (True, False): query["isSeed"] = is_seed + # Check that field matches and value is an int to avoid 0 + # from returning null results if depth: - query["depth"] = depth + query["$and"] = {"field": depth, "type": 16} aggregate = [{"$match": query}] diff --git a/backend/test/test_run_crawl.py b/backend/test/test_run_crawl.py index b30d421f16..47ba0a3e38 100644 --- a/backend/test/test_run_crawl.py +++ b/backend/test/test_run_crawl.py @@ -872,13 +872,13 @@ def test_crawl_pages_qa_filters(crawler_auth_headers, default_org_id, crawler_cr assert r.status_code == 200 assert r.json()["total"] == 2 - # Test reviewed filter (page now approved so should show up in True) + # Test reviewed filter (page now approved so should show up in True, other pages show here) r = requests.get( f"{API_PREFIX}/orgs/{default_org_id}/crawls/{crawler_crawl_id}/pages?reviewed=False", headers=crawler_auth_headers, ) assert r.status_code == 200 - assert r.json()["total"] == 0 + assert r.json()["total"] == 2 r = requests.get( f"{API_PREFIX}/orgs/{default_org_id}/crawls/{crawler_crawl_id}/pages?reviewed=True",