Skip to content

Commit

Permalink
Improve argument naming after group discussion
Browse files Browse the repository at this point in the history
  • Loading branch information
omad committed Jan 5, 2017
1 parent b2b2893 commit 7ebf45c
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 11 deletions.
8 changes: 4 additions & 4 deletions datacube/api/core.py
Expand Up @@ -473,7 +473,7 @@ def product_data(*args, **kwargs):
return Datacube.load_data(*args, **kwargs)

@staticmethod
def load_data(sources, geobox, measurements, fuse_func=None, dask_chunks=None, ignore_errors=False):
def load_data(sources, geobox, measurements, fuse_func=None, dask_chunks=None, skip_broken_datasets=False):
"""
Load data from :meth:`group_datasets` into an :class:`xarray.Dataset`.
Expand Down Expand Up @@ -505,7 +505,7 @@ def data_func(measurement):
data = numpy.full(sources.shape + geobox.shape, measurement['nodata'], dtype=measurement['dtype'])
for index, datasets in numpy.ndenumerate(sources.values):
_fuse_measurement(data[index], datasets, geobox, measurement, fuse_func=fuse_func,
ignore_errors=ignore_errors)
skip_broken_datasets=skip_broken_datasets)
return data
else:
def data_func(measurement):
Expand Down Expand Up @@ -562,15 +562,15 @@ def fuse_lazy(datasets, geobox, measurement, fuse_func=None, prepend_dims=0):
return data.reshape(prepend_shape + geobox.shape)


def _fuse_measurement(dest, datasets, geobox, measurement, ignore_errors=False, fuse_func=None):
def _fuse_measurement(dest, datasets, geobox, measurement, skip_broken_datasets=False, fuse_func=None):
reproject_and_fuse([DatasetSource(dataset, measurement['name']) for dataset in datasets],
dest,
geobox.affine,
geobox.crs,
dest.dtype.type(measurement['nodata']),
resampling=measurement.get('resampling_method', 'nearest'),
fuse_func=fuse_func,
ignore_errors=ignore_errors)
skip_broken_datasets=skip_broken_datasets)


def get_bounds(datasets, crs):
Expand Down
4 changes: 2 additions & 2 deletions datacube/api/grid_workflow.py
Expand Up @@ -304,7 +304,7 @@ def list_tiles(self, cell_index=None, **query):
return self.tile_sources(observations, query_group_by(**query))

@staticmethod
def load(tile, measurements=None, dask_chunks=None, fuse_func=None, resampling=None, ignore_errors=False):
def load(tile, measurements=None, dask_chunks=None, fuse_func=None, resampling=None, skip_broken_datasets=False):
"""
Load data for a cell/tile.
Expand Down Expand Up @@ -348,7 +348,7 @@ def load(tile, measurements=None, dask_chunks=None, fuse_func=None, resampling=N
measurements = set_resampling_method(measurements, resampling)

dataset = Datacube.load_data(tile.sources, tile.geobox, measurements.values(), dask_chunks=dask_chunks,
fuse_func=fuse_func, ignore_errors=ignore_errors)
fuse_func=fuse_func, skip_broken_datasets=skip_broken_datasets)

return dataset

Expand Down
8 changes: 4 additions & 4 deletions datacube/storage/storage.py
Expand Up @@ -142,15 +142,15 @@ def ignore_if(ignore_errors):


def reproject_and_fuse(sources, destination, dst_transform, dst_projection, dst_nodata,
resampling='nearest', fuse_func=None, ignore_errors=False):
resampling='nearest', fuse_func=None, skip_broken_datasets=False):
"""
Reproject and fuse `sources` into a 2D numpy array `destination`.
:param List[BaseRasterDataSource] sources: Data sources to open and read from
:param numpy.ndarray destination: ndarray of appropriate size to read data into
:type resampling: str
:type fuse_func: callable or None
:param bool ignore_errors: Carry on in the face of adversity and failing reads.
:param bool skip_broken_datasets: Carry on in the face of adversity and failing reads.
"""
assert len(destination.shape) == 2

Expand All @@ -169,14 +169,14 @@ def copyto_fuser(dest, src):
if len(sources) == 0:
return destination
elif len(sources) == 1:
with ignore_if(ignore_errors):
with ignore_if(skip_broken_datasets):
read_from_source(sources[0], destination, dst_transform, dst_nodata, dst_projection, resampling)
return destination
else:
# Muitiple sources, we need to fuse them together into a single array
buffer_ = numpy.empty(destination.shape, dtype=destination.dtype)
for source in sources:
with ignore_if(ignore_errors):
with ignore_if(skip_broken_datasets):
read_from_source(source, buffer_, dst_transform, dst_nodata, dst_projection, resampling)
fuse_func(destination, buffer_)

Expand Down
2 changes: 1 addition & 1 deletion tests/storage/test_storage.py
Expand Up @@ -124,7 +124,7 @@ def test_read_from_broken_source():

# Check can ignore errors
reproject_and_fuse(sources, output_data, dst_transform=identity,
dst_projection=crs, dst_nodata=no_data, ignore_errors=True)
dst_projection=crs, dst_nodata=no_data, skip_broken_datasets=True)

assert (output_data == [[2, 2], [2, 2]]).all()

Expand Down

0 comments on commit 7ebf45c

Please sign in to comment.