Skip to content

Commit

Permalink
add null and memory implementations
Browse files Browse the repository at this point in the history
  • Loading branch information
Ariana Barzinpour committed Apr 24, 2024
1 parent 26fda40 commit 1013a6b
Show file tree
Hide file tree
Showing 5 changed files with 28 additions and 6 deletions.
18 changes: 12 additions & 6 deletions datacube/index/memory/_datasets.py
Expand Up @@ -281,9 +281,8 @@ def restore(self, ids: Iterable[DSID]) -> None:
def purge(self, ids: Iterable[DSID]) -> None:
for id_ in ids:
id_ = dsid_to_uuid(id_)
if id_ in self._archived_by_id:
ds = self._archived_by_id.pop(id_)
del self._by_id[id_]
if id_ in self._by_id:
ds = self._by_id.pop(id_)
if id_ in self._derived_from:
for classifier, src_id in self._derived_from[id_].items():
del self._derivations[src_id][classifier]
Expand All @@ -292,13 +291,20 @@ def purge(self, ids: Iterable[DSID]) -> None:
for classifier, child_id in self._derivations[id_].items():
del self._derived_from[child_id][classifier]
del self._derivations[id_]
self._archived_by_product[ds.product.name].remove(id_)
if id_ in self._archived_by_id:
del self._archived_by_id[id_]
self._archived_by_product[ds.product.name].remove(id_)
else:
del self._active_by_id[id_]
self._by_product[ds.product.name].remove(id_)

def get_all_dataset_ids(self, archived: bool) -> Iterable[UUID]:
def get_all_dataset_ids(self, archived: bool | None = False) -> Iterable[UUID]:
if archived:
return (id_ for id_ in self._archived_by_id.keys())
else:
elif archived is not None:
return (id_ for id_ in self._active_by_id.keys())
else:
return (id_ for id_ in self._by_id.keys())

@deprecat(
reason="Multiple locations per dataset are now deprecated. Please use the 'get_location' method.",
Expand Down
7 changes: 7 additions & 0 deletions datacube/index/memory/_products.py
Expand Up @@ -115,6 +115,13 @@ def update(self, product: Product,
self.by_name[persisted.name] = persisted
return cast(Product, self.get_by_name(product.name))

def delete(self, product: Product):
ids: Iterable[UUID] = self._index.datasets.search_returning(archived=None, product=product.name)
self._index.datasets.purge(ids)

del self.by_id[product.id]
del self.by_name[product.name]

def get_unsafe(self, id_: int) -> Product:
return self.clone(self.by_id[id_])

Expand Down
3 changes: 3 additions & 0 deletions datacube/index/null/_products.py
Expand Up @@ -23,6 +23,9 @@ def can_update(self, product, allow_unsafe_updates=False, allow_table_lock=False
def update(self, product: Product, allow_unsafe_updates=False, allow_table_lock=False):
raise NotImplementedError()

def delete(self, product: Product):
raise NotImplementedError()

def get_unsafe(self, id_):
raise KeyError(id_)

Expand Down
4 changes: 4 additions & 0 deletions integration_tests/index/test_memory_index.py
Expand Up @@ -156,6 +156,7 @@ def test_mem_product_resource(mem_index_fresh: Datacube,
assert unmatched == {}
else:
assert unmatched["platform"] == 'landsat-8'
# Test search_by_metadata
lds = list(mem_index_fresh.index.products.search_by_metadata({"product_family": "ard"}))
assert len(lds) == 0
lds = list(mem_index_fresh.index.products.search_by_metadata({"odc:product_family": "ard"}))
Expand All @@ -164,6 +165,9 @@ def test_mem_product_resource(mem_index_fresh: Datacube,
assert len(lds) == 0
lds = list(mem_index_fresh.index.products.search_by_metadata({"eo:platform": "landsat-8"}))
assert len(lds) == 1
# Test delete
mem_index_fresh.index.products.delete(ls8_fresh)
assert mem_index_fresh.index.products.get_by_name("ga_ls8c_ard_3") is None


# Hand crafted tests with recent real-world eo3 examples
Expand Down
2 changes: 2 additions & 0 deletions integration_tests/index/test_null_index.py
Expand Up @@ -80,6 +80,8 @@ def test_null_product_resource(null_config: ODCEnvironment):
dc.index.products.can_update(MagicMock())
with pytest.raises(NotImplementedError) as e:
dc.index.products.update(MagicMock())
with pytest.raises(NotImplementedError) as e:
dc.index.products.delete(MagicMock())


def test_null_dataset_resource(null_config: ODCEnvironment):
Expand Down

0 comments on commit 1013a6b

Please sign in to comment.