Skip to content

Commit

Permalink
Merge pull request #120 from ICESat2-SlideRule/voila
Browse files Browse the repository at this point in the history
fix: interactive control of map in voila demo
  • Loading branch information
jpswinski committed Dec 14, 2022
2 parents b1c6f75 + 71accd2 commit 6024ecf
Show file tree
Hide file tree
Showing 4 changed files with 63 additions and 56 deletions.
58 changes: 32 additions & 26 deletions examples/voila_demo.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@
"import warnings\n",
"import time\n",
"import json\n",
"from IPython.display import clear_output\n",
"from IPython import display\n",
"# atl03 plotting imports\n",
"import numpy as np\n",
"import matplotlib.lines\n",
Expand Down Expand Up @@ -158,12 +158,18 @@
},
"outputs": [],
"source": [
"# create ipyleaflet map in specified projection\n",
"m = ipysliderule.leaflet(SRwidgets.projection.value)\n",
"# install click handler callback\n",
"m.add_selected_callback(SRwidgets.atl06_click_handler)\n",
"display(m.map)\n",
"display(run_output)"
"def create_map(projection):\n",
" # create ipyleaflet map in specified projection\n",
" global m\n",
" m = ipysliderule.leaflet(projection)\n",
" # install click handler callback\n",
" m.add_selected_callback(SRwidgets.atl06_click_handler)\n",
" display.display(m.map)\n",
"\n",
"# interactively change map when projection widget is changed\n",
"out = widgets.interactive_output(create_map, dict(projection=SRwidgets.projection))\n",
"display.display(out)\n",
"display.display(run_output)"
]
},
{
Expand All @@ -189,17 +195,17 @@
" layers=SRwidgets.layers.value,\n",
" rendering_rule=SRwidgets.rendering_rule\n",
" )\n",
" \n",
"\n",
"# map widgets\n",
"display(widgets.VBox([\n",
"display.display(widgets.VBox([\n",
" SRwidgets.projection,\n",
" SRwidgets.layers,\n",
" SRwidgets.raster_functions\n",
"]))\n",
"\n",
"# update button\n",
"update_button.on_click(on_update_clicked)\n",
"display(update_button)"
"display.display(update_button)"
]
},
{
Expand Down Expand Up @@ -317,7 +323,7 @@
"def on_show_code06_clicked(b):\n",
" global url_textbox, atl06_parms\n",
" with show_code06_output:\n",
" clear_output()\n",
" display.clear_output()\n",
" print(f'icesat2.init(\"{url_textbox.value}\")')\n",
" print('parms = ', json.dumps(atl06_parms, indent=4), sep='')\n",
" print('gdf = icesat2.atl06p(parms, asset=\"nsidc-s3\")')\n",
Expand Down Expand Up @@ -355,7 +361,7 @@
" description='URL:',\n",
" disabled=False\n",
")\n",
"display(url_textbox)\n",
"display.display(url_textbox)\n",
"\n",
"# points to plot drop down\n",
"points_dropdown = widgets.Dropdown(\n",
Expand All @@ -366,7 +372,7 @@
")\n",
"\n",
"# display widgets for setting SlideRule parameters\n",
"display(widgets.VBox([\n",
"display.display(widgets.VBox([\n",
" SRwidgets.surface_type,\n",
" SRwidgets.length,\n",
" SRwidgets.step,\n",
Expand All @@ -384,9 +390,9 @@
"]))\n",
"\n",
"# display buttons\n",
"display(run_button)\n",
"display(refresh_button, refresh_output)\n",
"display(show_code06_button, show_code06_output)"
"display.display(run_button)\n",
"display.display(refresh_button, refresh_output)\n",
"display.display(show_code06_button, show_code06_output)"
]
},
{
Expand Down Expand Up @@ -553,7 +559,7 @@
"def on_show_code03_clicked(b):\n",
" global url_textbox, atl03_parms\n",
" with show_code03_output:\n",
" clear_output()\n",
" display.clear_output()\n",
" print(f'icesat2.init(\"{url_textbox.value}\")')\n",
" print('parms = ', json.dumps(atl03_parms, indent=4), sep='')\n",
" print('gdf = icesat2.atl03sp(parms, asset=\"nsidc-s3\")')\n",
Expand Down Expand Up @@ -590,14 +596,14 @@
" disabled = False,\n",
")\n",
"\n",
"display(SRwidgets.rgt)\n",
"display(SRwidgets.cycle)\n",
"display(SRwidgets.ground_track)\n",
"display(SRwidgets.plot_classification)\n",
"display(elev_dropdown)\n",
"display(pc_button)\n",
"display(pc_output)\n",
"display(show_code03_button, show_code03_output)"
"display.display(SRwidgets.rgt)\n",
"display.display(SRwidgets.cycle)\n",
"display.display(SRwidgets.ground_track)\n",
"display.display(SRwidgets.plot_classification)\n",
"display.display(elev_dropdown)\n",
"display.display(pc_button)\n",
"display.display(pc_output)\n",
"display.display(show_code03_button, show_code03_output)"
]
}
],
Expand Down Expand Up @@ -635,7 +641,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.15"
"version": "3.10.6"
},
"toc-showtags": false
},
Expand Down
12 changes: 10 additions & 2 deletions sliderule/icesat2.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,6 +191,9 @@ def __cmr_granule_metadata(search_results):
for e in search_results['feed']['entry']:
# columns for dataframe
columns = {}
# granule title and identifiers
columns['title'] = e['title']
columns['collection_concept_id'] = e['collection_concept_id']
# time start and time end of granule
columns['time_start'] = numpy.datetime64(e['time_start'])
columns['time_end'] = numpy.datetime64(e['time_end'])
Expand All @@ -202,8 +205,13 @@ def __cmr_granule_metadata(search_results):
df = geopandas.pd.DataFrame(columns, index=[e['id']])
# Generate Geometry Column
if 'polygons' in e:
coords = [float(i) for i in e['polygons'][0][0].split()]
geometry = Polygon(zip(coords[1::2], coords[::2]))
polygons = []
# for each polygon
for poly in e['polygons'][0]:
coords = [float(i) for i in poly.split()]
polygons.append(Polygon(zip(coords[1::2], coords[::2])))
# generate multipolygon from list of polygons
geometry = MultiPolygon(polygons)
else:
geometry, = geopandas.points_from_xy([None], [None])
# Build GeoDataFrame (default geometry is crs=EPSG_MERCATOR)
Expand Down
35 changes: 12 additions & 23 deletions sliderule/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@

import sys
import json
import logging
import warnings
import datetime
import geopandas
Expand All @@ -49,8 +50,8 @@ def get_attributes(**kwargs):
# set default keyword arguments
kwargs.setdefault('lon_key','longitude')
kwargs.setdefault('lat_key','latitude')
coordinates = '{lat_key} {lon_key}'.format(**kwargs)
lon_key,lat_key = (kwargs['lon_key'],kwargs['lat_key'])
coordinates = f'{lat_key} {lon_key}'
attrs = {}
# file level attributes
attrs['featureType'] = 'trajectory'
Expand Down Expand Up @@ -326,7 +327,6 @@ def to_json(filename, **kwargs):
kwargs.setdefault('parameters',None)
kwargs.setdefault('regions',[])
kwargs.setdefault('crs','EPSG:4326')
kwargs.setdefault('verbose',False)
# add each parameter as an attribute
SRparams = ['H_min_win', 'atl08_class', 'atl03_quality', 'ats', 'cnf',
'cnt', 'len', 'maxi', 'res', 'sigma_r_max', 'srt', 'yapc']
Expand Down Expand Up @@ -355,21 +355,17 @@ def to_json(filename, **kwargs):
with open(filename, 'w') as fid:
json.dump(output, fid)
# print the filename and dictionary structure
if kwargs['verbose']:
print(filename)
print(list(output.keys()))
logging.info(filename)
logging.info(list(output.keys()))

# read request parameters and regions from JSON
def from_json(filename, **kwargs):
# set default keyword arguments
kwargs.setdefault('verbose',False)
# load the JSON file
with open(filename, 'r') as fid:
attributes = json.load(fid)
# print the filename and dictionary structure
if kwargs['verbose']:
print(filename)
print(list(attributes.keys()))
logging.info(filename)
logging.info(list(attributes.keys()))
# try to get the sliderule adjustable parameters
SRparams = ['H_min_win', 'atl08_class', 'atl03_quality', 'ats', 'cnf',
'cnt', 'len', 'maxi', 'res', 'sigma_r_max', 'srt', 'yapc']
Expand Down Expand Up @@ -397,7 +393,6 @@ def to_nc(gdf, filename, **kwargs):
# set default keyword arguments
kwargs.setdefault('parameters',None)
kwargs.setdefault('regions',[])
kwargs.setdefault('verbose',False)
kwargs.setdefault('crs','EPSG:4326')
kwargs.setdefault('lon_key','longitude')
kwargs.setdefault('lat_key','latitude')
Expand Down Expand Up @@ -473,9 +468,8 @@ def to_nc(gdf, filename, **kwargs):
setattr(fileID, 'poly{0:d}_x'.format(i), json.dumps(lon))
setattr(fileID, 'poly{0:d}_y'.format(i), json.dumps(lat))
# Output netCDF structure information
if kwargs['verbose']:
print(filename)
print(list(fileID.variables.keys()))
logging.info(filename)
logging.info(list(fileID.variables.keys()))
# Closing the netCDF file
fileID.close()
warnings.filterwarnings("default")
Expand Down Expand Up @@ -576,7 +570,6 @@ def to_hdf(gdf, filename, **kwargs):
kwargs.setdefault('driver','pytables')
kwargs.setdefault('parameters',None)
kwargs.setdefault('regions',[])
kwargs.setdefault('verbose',False)
kwargs.setdefault('crs','EPSG:4326')
kwargs.setdefault('lon_key','longitude')
kwargs.setdefault('lat_key','latitude')
Expand Down Expand Up @@ -606,7 +599,6 @@ def write_pytables(df, filename, attributes, **kwargs):
# set default keyword arguments
kwargs.setdefault('parameters',None)
kwargs.setdefault('regions',[])
kwargs.setdefault('verbose',False)
kwargs.setdefault('crs','EPSG:4326')
# write data to a pytables HDF5 file
df.to_hdf(filename, 'sliderule_segments', format="table", mode="w")
Expand Down Expand Up @@ -655,9 +647,8 @@ def write_pytables(df, filename, attributes, **kwargs):
setattr(fileID.root._v_attrs, 'poly{0:d}_x'.format(i), json.dumps(lon))
setattr(fileID.root._v_attrs, 'poly{0:d}_y'.format(i), json.dumps(lat))
# Output HDF5 structure information
if kwargs['verbose']:
print(filename)
print(fileID.get_storer('sliderule_segments').non_index_axes[0][1])
logging.info(filename)
logging.info(fileID.get_storer('sliderule_segments').non_index_axes[0][1])
# Closing the HDF5 file
fileID.close()

Expand All @@ -666,7 +657,6 @@ def write_h5py(df, filename, attributes, **kwargs):
# set default keyword arguments
kwargs.setdefault('parameters',None)
kwargs.setdefault('regions',[])
kwargs.setdefault('verbose',False)
kwargs.setdefault('crs','EPSG:4326')
# open HDF5 file object
fileID = h5py.File(filename, mode='w')
Expand Down Expand Up @@ -736,9 +726,8 @@ def write_h5py(df, filename, attributes, **kwargs):
fileID.attrs['poly{0:d}_x'.format(i)] = json.dumps(lon)
fileID.attrs['poly{0:d}_y'.format(i)] = json.dumps(lat)
# Output HDF5 structure information
if kwargs['verbose']:
print(filename)
print(list(fileID.keys()))
logging.info(filename)
logging.info(list(fileID.keys()))
# Closing the HDF5 file
fileID.close()

Expand Down
14 changes: 9 additions & 5 deletions sliderule/ipysliderule.py
Original file line number Diff line number Diff line change
Expand Up @@ -1621,6 +1621,7 @@ class leaflet:
def __init__(self, projection, **kwargs):
# set default keyword arguments
kwargs.setdefault('map',None)
kwargs.setdefault('prefer_canvas',False)
kwargs.setdefault('attribution',False)
kwargs.setdefault('zoom_control',False)
kwargs.setdefault('scale_control',False)
Expand All @@ -1632,25 +1633,28 @@ def __init__(self, projection, **kwargs):
if (projection == 'Global'):
self.map = ipyleaflet.Map(center=kwargs['center'],
zoom=9, max_zoom=15, world_copy_jump=True,
prefer_canvas=kwargs['prefer_canvas'],
attribution_control=kwargs['attribution'],
basemap=ipyleaflet.basemaps.Esri.WorldTopoMap)
self.crs = 'EPSG:3857'
elif (projection == 'North'):
self.map = ipyleaflet.Map(center=(90,0),
zoom=5, max_zoom=24,
prefer_canvas=kwargs['prefer_canvas'],
attribution_control=kwargs['attribution'],
basemap=ipyleaflet.basemaps.Esri.ArcticOceanBase,
crs=ipyleaflet.projections.EPSG5936.ESRIBasemap)
basemap=basemaps.Esri.ArcticOceanBase,
crs=projections.EPSG5936.ESRIBasemap)
# add arctic ocean reference basemap
reference = ipyleaflet.basemaps.Esri.ArcticOceanReference
reference = basemaps.Esri.ArcticOceanReference
self.map.add(ipyleaflet.basemap_to_tiles(reference))
self.crs = 'EPSG:5936'
elif (projection == 'South'):
self.map = ipyleaflet.Map(center=(-90,0),
zoom=2, max_zoom=9,
prefer_canvas=kwargs['prefer_canvas'],
attribution_control=kwargs['attribution'],
basemap=ipyleaflet.basemaps.Esri.AntarcticBasemap,
crs=ipyleaflet.projections.EPSG3031.ESRIBasemap)
basemap=basemaps.Esri.AntarcticBasemap,
crs=projections.EPSG3031.ESRIBasemap)
self.crs = 'EPSG:3031'
else:
# use a predefined ipyleaflet map
Expand Down

0 comments on commit 6024ecf

Please sign in to comment.