From 992d8ad0d6d16afdba64acba5fb9576acef0138e Mon Sep 17 00:00:00 2001 From: Chris Wilcox Date: Mon, 6 Apr 2020 14:31:48 -0700 Subject: [PATCH] test: add new test class for query offset --- tests/system/test_system.py | 74 +++++++++++++++++++----- tests/system/utils/populate_datastore.py | 57 ++++++++++++++++++ 2 files changed, 115 insertions(+), 16 deletions(-) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 617ec2e7..29f89cee 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -14,6 +14,7 @@ import datetime import os +import string import unittest import requests @@ -429,23 +430,8 @@ def test_query_paginate_with_offset(self): self.assertEqual(entities[1]["name"], "Jon Snow") self.assertEqual(entities[2]["name"], "Arya") - def test_query_paginate_with_large_offset(self): - page_query = self._base_query() - page_query.order = "appearances" - offset = 21 # This is greater than the number of entries. - limit = 3 - iterator = page_query.fetch(limit=limit, offset=offset) + - # Fetch characters. - page = six.next(iterator.pages) - entities = list(page) - cursor = iterator.next_page_token - self.assertEqual(len(entities), 0) - - # Fetch next set of characters. - new_iterator = page_query.fetch(limit=limit, offset=0, start_cursor=cursor) - entities = list(new_iterator) - self.assertEqual(len(entities), 0) def test_query_paginate_with_start_cursor(self): page_query = self._base_query() @@ -482,6 +468,62 @@ def test_query_distinct_on(self): self.assertEqual(entities[0]["name"], "Catelyn") self.assertEqual(entities[1]["name"], "Arya") +class TestDatastoreQueryOffsets(TestDatastore): + TOTAL_OBJECTS = 1500 + NAMESPACE = "LargeCharacterEntity" + KIND = "LargeCharacter" + + @classmethod + def setUpClass(cls): + cls.CLIENT = clone_client(Config.CLIENT) + # Remove the namespace from the cloned client, since these + # query tests rely on the entities to be already stored + cls.CLIENT.namespace = cls.NAMESPACE + + # Populating the datastore if necessary. + populate_datastore.add_large_character_entities(client=cls.CLIENT) + + @classmethod + def tearDownClass(cls): + # In the emulator, destroy the query entities. + if os.getenv(GCD_DATASET) is not None: + # Use the client for this test instead of the global. + clear_datastore.remove_all_entities(client=cls.CLIENT) + + def _base_query(self): + # Use the client for this test instead of the global. + return self.CLIENT.query( + kind=self.KIND, + namespace=self.NAMESPACE + ) + + def _verify(self, limit, offset, expected): + # Query used for all tests + page_query = self._base_query() + page_query.add_filter("family", "=", "Stark") + page_query.add_filter("alive", "=", False) + + iterator = page_query.fetch(limit=limit, offset=offset) + entities = [e for e in iterator] + if len(entities) != expected: + self.fail(f"{limit}, {offset}, {expected}. Returned: {len(entities)}") + + def test_query_in_bounds_offsets(self): + # Verify that with no offset there are the correct # of results + self._verify(limit=None, offset=None, expected=self.TOTAL_OBJECTS) + + # Verify that with no limit there are results (offset provided)") + self._verify(limit=None, offset=900, expected=self.TOTAL_OBJECTS-900) + + # Offset beyond items larger Verify 200 items found") + self._verify(limit=200, offset=1100, expected=200) + + def test_query_out_of_bounds_offsets(self): + # Offset within range, expect 50 despite larger limit") + self._verify(limit=100, offset=self.TOTAL_OBJECTS-50, expected=50) + + # Offset beyond items larger Verify no items found") + self._verify(limit=200, offset=self.TOTAL_OBJECTS+1000, expected=0) class TestDatastoreTransaction(TestDatastore): def test_transaction_via_with_statement(self): diff --git a/tests/system/utils/populate_datastore.py b/tests/system/utils/populate_datastore.py index 2c266a8a..14565101 100644 --- a/tests/system/utils/populate_datastore.py +++ b/tests/system/utils/populate_datastore.py @@ -18,6 +18,7 @@ from __future__ import print_function import os +import string import sys import time import uuid @@ -61,6 +62,62 @@ def print_func(message): if os.getenv("GOOGLE_CLOUD_NO_PRINT") != "true": print(message) +def add_large_character_entities(client=None): + TOTAL_OBJECTS = 1500 + NAMESPACE="LargeCharacterEntity" + KIND="LargeCharacter" + MAX_STRING = (string.ascii_lowercase * 58)[:1500] + + client.namespace = NAMESPACE + + # Query used for all tests + page_query = client.query( + kind=KIND, + namespace=NAMESPACE, + ) + + def put_objects(count): + breakpoint() + remaining = count + current=0 + + # Can only do 500 operations in a transaction with an overall + # size limit. + ENTITIES_TO_BATCH = 25 + while current < count: + start = current + end = min(current + ENTITIES_TO_BATCH, count) + with client.transaction() as xact: + # The name/ID for the new entity + for i in range(start,end): + name = f'character{i:05d}' + # The Cloud Datastore key for the new entity + task_key = client.key(KIND, name) + + # Prepares the new entity + task = datastore.Entity(key=task_key) + task['name'] = f"{i:05d}" + task['family'] = 'Stark' + task['alive'] = False + + for i in string.ascii_lowercase: + task[f'space-{i}'] = MAX_STRING + + # Saves the entity + xact.put(task) + current += ENTITIES_TO_BATCH + + # Ensure we have 1500 entities for tests. If not, clean up type and add + # new entities equal to TOTAL_OBJECTS + all_entities = [e for e in page_query.fetch()] + if len(all_entities) != TOTAL_OBJECTS: + # Cleanup Collection if not an exact match + while all_entities: + entities = all_entities[:500] + all_entities = all_entities[500:] + client.delete_multi([e.key for e in entities]) + # Put objects + put_objects(TOTAL_OBJECTS) def add_characters(client=None): if client is None: