Skip to content

Commit

Permalink
Merge pull request #58 from openearth/feature/interpolation-mask
Browse files Browse the repository at this point in the history
implement a better feature interpolation mask
  • Loading branch information
SiggyF committed Jan 10, 2019
2 parents 739fdd5 + 3028024 commit dda44f0
Show file tree
Hide file tree
Showing 4 changed files with 164 additions and 17 deletions.
25 changes: 23 additions & 2 deletions flowmap/formats/ugrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,11 @@ def subgrid(self, t):
# check if they exist
if table_path.exists():
logger.info('reading subgrid tables from %s', table_path)
# tables are now in array format with metadata
tables = subgrid.import_tables(str(table_path))
metadata = tables['metadata']
# get the valid range
valid_range = metadata.get('valid_range')
else:
command = 'flowmap export --format tables {} {}'.format(
self.path,
Expand Down Expand Up @@ -319,6 +323,23 @@ def subgrid(self, t):
with rasterio.open(str(interpolated_waterlevel_name)) as ds:
interpolated_waterlevel = ds.read(1, masked=True)

# buildings are now partially filtered out, values in waterlevels are interpolated into the buildings (radius 8)
# apply invalid dem mask (filter out buildings) to leave out these cells in the waterdepth
invalid_mask = False
if valid_range is not None:
invalid_mask = np.logical_or(
dem['band'] < valid_range[0],
dem['band'] > valid_range[1]
)
mask = np.logical_or(
interpolated_waterlevel.mask,
invalid_mask
)

interpolated_waterlevel = np.ma.masked_array(
interpolated_waterlevel,
mask
)
waterdepth_name = self.generate_name(
self.path,
suffix='.tiff',
Expand All @@ -342,7 +363,6 @@ def subgrid(self, t):
epsg=self.src_epsg
)


def export(self, format, **kwargs):
"""export dataset"""
crs = geojson.crs.Named(
Expand Down Expand Up @@ -404,7 +424,8 @@ def export(self, format, **kwargs):
suffix='.nc',
topic=format
)
subgrid.create_export(new_name, n_cells=len(tables), n_bins=20)
# save options to the netcdf file
subgrid.create_export(new_name, n_cells=len(tables), n_bins=20, attributes=kwargs)
subgrid.export_tables(new_name, tables)
elif format == 'id_grid':
dem = read_dem(self.options['dem'])
Expand Down
2 changes: 1 addition & 1 deletion flowmap/particles.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
def make_tracer_pipeline(
polydata,
seed,
maximum_number_of_steps=200,
maximum_number_of_steps=3000,
maximum_propagation=1000
):
"""Make a tracer pileine based on a polydata and seed.
Expand Down
45 changes: 31 additions & 14 deletions flowmap/subgrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,19 @@
NODATA = -9999


class MetaArray(np.ndarray):
"""Array with metadata."""
def __new__(cls, array, dtype=None, order=None, **kwargs):
obj = np.asarray(array, dtype=dtype, order=order).view(cls)
obj.metadata = kwargs
return obj

def __array_finalize__(self, obj):
if obj is None:
return
self.metadata = getattr(obj, 'metadata', None)


def data_for_idx(face_idx, dem, grid, data):
"""get data for cell with face_idx face_idx"""
face = grid['face_coordinates'][face_idx]
Expand Down Expand Up @@ -122,8 +135,7 @@ def build_tables(ugrid, dem, id_grid, valid_range=None):
)

mask = np.logical_or.reduce(masks)
# TOOD: als mask using id grid here....
if dem_i.mask.any():
if dem_i.mask.any() or mask.all():
n_per_bin, bin_edges = None, None
volume_table = None
cum_volume_table = None
Expand All @@ -143,7 +155,6 @@ def build_tables(ugrid, dem, id_grid, valid_range=None):
]
record = dict(
id=id_,
# TODO: convert to 4 numbers
slice=face_slice,
face=face,
face_area=face_area,
Expand Down Expand Up @@ -177,8 +188,10 @@ def compute_waterlevels(grid, dem, tables, data):
for face_id in tqdm.tqdm(face_ids, desc='subgrid compute'):
row = {}
for key, var in tables.items():
row[key] = var[face_id]
if key == 'metadata':
continue

row[key] = var[face_id]
result = compute_waterlevel_per_cell(row, dem=dem)
results.append(result)
tables['subgrid_waterlevel'] = results
Expand Down Expand Up @@ -237,7 +250,7 @@ def interpolate_waterlevels(waterlevel_file, interpolated_waterlevel_file, dem,
return


def create_export(filename, n_cells, n_bins):
def create_export(filename, n_cells, n_bins, attributes=None):
"""create an export file for subgrid tables"""

dimensions = {
Expand Down Expand Up @@ -301,6 +314,9 @@ def create_export(filename, n_cells, n_bins):
dimensions=var['dimensions']
)
ncvar.setncattr('long_name', var['long_name'])
if attributes:
# store attribute
ds.setncatts(attributes)


def export_tables(filename, tables):
Expand All @@ -322,11 +338,17 @@ def export_tables(filename, tables):

ds.variables[var][i] = val

def import_tables(filename, arrays=True):

def import_tables(filename):
"""import tables from netcdf table dump"""
with netCDF4.Dataset(filename) as ds:
# lookup metadata
metadata = {
key: getattr(ds, key)
for key
in ds.ncattrs()
}
vars = {}
index = np.arange(ds.variables['bin_edges'].shape[0])
for var in [
'bin_edges', 'cum_volume_table',
'volume_table', 'extent', 'n_per_bin',
Expand All @@ -335,13 +357,8 @@ def import_tables(filename, arrays=True):
arr = ds.variables[var][:]
vars[var] = arr
# fast approach, use just numpy array
if arrays:
tables = vars
else:
# this is slow for large number of cells
for key, arr in vars.items():
vars[key] = list(arr)
tables = pd.DataFrame(vars, index=index)
tables = vars
tables['metadata'] = metadata
return tables


Expand Down
109 changes: 109 additions & 0 deletions notebooks/create_quiver.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 16,
"metadata": {},
"outputs": [],
"source": [
"import netCDF4\n",
"import geojson\n",
"import numpy as np\n",
"import shapely.geometry"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"ds = netCDF4.Dataset('/Users/baart_f/data/rijnland/vanGovert/Leerdam/leerdamwest_map.nc')"
]
},
{
"cell_type": "code",
"execution_count": 22,
"metadata": {},
"outputs": [],
"source": [
"\n",
"x = ds.variables['mesh2d_face_x'][:]\n",
"y = ds.variables['mesh2d_face_y'][:]\n",
"ucx = ds.variables['mesh2d_ucx'][-1]\n",
"ucy = ds.variables['mesh2d_ucy'][-1]\n",
"angles = np.rad2deg(np.arctan2(ucy, ucx))\n",
"lengths = np.sqrt(ucx ** 2 + ucy **2 )"
]
},
{
"cell_type": "code",
"execution_count": 34,
"metadata": {},
"outputs": [],
"source": [
"crs = geojson.crs.Named(\n",
" properties={\n",
" \"name\": \"urn:ogc:def:crs:EPSG::{:d}\".format(28992)\n",
" }\n",
")\n",
"features= []\n",
"for i, (x_i, y_i, angle_i, length_i, u_i, v_i) in enumerate(zip(x, y, angles, lengths, ucx, ucy)):\n",
" point = shapely.geometry.Point(x_i, y_i)\n",
" geometry = point.__geo_interface__\n",
" geometry['crs'] = dict(crs)\n",
" feature = geojson.Feature(\n",
" id=i, \n",
" geometry=geometry, \n",
" properties={\n",
" \"angle_east_ccw\": angle_i,\n",
" \"angle_north_cw\": 90 - angle_i,\n",
" \"length\": length_i,\n",
" \"u\": u_i,\n",
" \"v\": v_i\n",
" }\n",
" )\n",
" features.append(feature)\n"
]
},
{
"cell_type": "code",
"execution_count": 35,
"metadata": {},
"outputs": [],
"source": [
"fc = geojson.FeatureCollection(features=features)\n",
"with open('quiver.json', 'w') as f:\n",
" geojson.dump(fc, f)\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.5"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

0 comments on commit dda44f0

Please sign in to comment.