Skip to content

Commit

Permalink
BUGFIX for autocorrelation function (#140)
Browse files Browse the repository at this point in the history
* track.ncReadTrackData returns true datetime objects - track.ncReadTrackData previously returned cftime.DatetimeGregorian objects, which caused newer versions of matplotlib.dates.num2date to fail. This is because we write the tracks with units of 'hours since 1900-01-01 00:00', but matplotlib.dates uses 1970-01-01 as the epoch, and works in units of days (with no way to specify units in the num2date function).
* Read netcdf-format track file from GA SST - GA's Scenario selection tool allows users to download a selected track file, but due to technical constraints this is in a different format. This change allows TCRM to read the modified format.
* Move definition statements to separate file
* Move queries to separate file
* Add script to run through all permutations of wind parameters
* NHIRS 148 fix thread (#119)
* assertDictEqual doesn't like complex values (like arrays)
* NHIRS-148: Fixed issue with FlushCache failing. (#121)
* update wind field model to get direction correct, and update documentation
* Enhancement/linear rmax (#125)
* fix double calculation of t3 - it was previously L3 / (L2 * L2) instead of L3 / L2
* only call Kepert wind field fortran code when a Holland pressure profile is used
* Working version of cartopy-based scalebar
* Pycxml compatability (#133)
* gdal multi-threading for reprojection
* Use expected DataProcess-InputFile
* Create daily LTM MSLP file from ERA5
* Low resolution wind multipliers (#136) - Adds in a script to quickly downscale the wind multipliers and apply them to many wind fields quickly. If the wind multipliers have already been extracted this process takes minutes instead of weeks.
* Bugfix for ACF function (#139)

---------

Co-authored-by: mahmudulhasanGA <66761046+mahmudulhasanGA@users.noreply.github.com>
Co-authored-by: Kieran Ricardo <kieranricardo@hotmail.com>
  • Loading branch information
3 people committed Mar 5, 2023
1 parent a9be18f commit a05f812
Show file tree
Hide file tree
Showing 73 changed files with 1,272 additions and 307 deletions.
8 changes: 6 additions & 2 deletions .github/workflows/tcrm-pylint.yml
Expand Up @@ -11,13 +11,17 @@ jobs:

steps:
- uses: actions/checkout@v2
- name: Set up Python env
- name: Set up environment
uses: conda-incubator/setup-miniconda@v2.0.0
with:
python-version: 3.7
mamba-version: "*"
channels: conda-forge,defaults
channel-priority: true
activate-environment: tcrm
environment-file: tcrmenv.yml
python-version: 3.7
auto-activate-base: false
use-only-tar-bz2: true

- name: Install dependencies
run: |
Expand Down
14 changes: 10 additions & 4 deletions .github/workflows/tcrm-tests.yml
Expand Up @@ -13,20 +13,26 @@ jobs:
TCRM:
name: Test TCRM
runs-on: ubuntu-latest
strategy:
strategy:
matrix:
python-version: [3.7, 3.8, 3.9]
steps:
- uses: actions/checkout@v2
- name: Set up environment
uses: conda-incubator/setup-miniconda@v2.0.0
with:
python-version: ${{ matrix.python-version }}
mamba-version: "*"
channels: conda-forge,defaults
channel-priority: true
activate-environment: tcrm
environment-file: tcrmenv.yml
python-version: ${{ matrix.python-version }}
auto-activate-base: false
use-only-tar-bz2: true

- name: Test with nose
- name: Test with pytest
env:
PYTHONPATH: ~/tcrm;~/tcrm/Utilities
shell: bash -l {0}
run: |
python tests/run.py
pytest -x --cov=. --cov-report xml
10 changes: 7 additions & 3 deletions DataProcess/DataProcess.py
Expand Up @@ -171,13 +171,17 @@ def processData(self, restrictToWindfieldDomain=False):
if config.has_option('DataProcess', 'InputFile'):
inputFile = config.get('DataProcess', 'InputFile')
self.logger.info(f"Input file from DataProcess: {inputFile}")
else:
inputFile = None

if config.has_option('DataProcess', 'Source'):
source = config.get('DataProcess', 'Source')
self.logger.info(f"Loading {source} dataset")
fn = config.get(source, 'Filename')
path = config.get(source, 'Path')
inputFile = pjoin(path, fn)
if inputFile is None:
# Use this as alternate source of input file (downloaded?)
fn = config.get(source, 'Filename')
path = config.get(source, 'Path')
inputFile = pjoin(path, fn)
self.logger.info(f"Input file set to {inputFile}")

# If input file has no path information, default to tcrm input folder
Expand Down
56 changes: 42 additions & 14 deletions Evaluate/interpolateTracks.py
Expand Up @@ -9,6 +9,9 @@
from Utilities.maputils import latLon2Azi
from Utilities.loadData import loadTrackFile, maxWindSpeed
from Utilities.track import Track, ncSaveTracks
from Utilities.parallel import attemptParallel
import pandas as pd


LOG = logging.getLogger(__name__)
LOG.addHandler(logging.NullHandler())
Expand Down Expand Up @@ -139,7 +142,9 @@ def interpolate(track, delta, interpolation_type=None):
if interpolation_type == 'akima':
# Use the Akima interpolation method:
try:
import akima
from Utilities import akima
nLon = akima.interpolate(timestep, track.Longitude, newtime)
nLat = akima.interpolate(timestep, track.Latitude, newtime)
except ImportError:
LOG.exception(("Akima interpolation module unavailable "
" - default to scipy.interpolate"))
Expand All @@ -148,10 +153,6 @@ def interpolate(track, delta, interpolation_type=None):
nLat = splev(newtime, splrep(timestep, track.Latitude, s=0),
der=0)

else:
nLon = akima.interpolate(timestep, track.Longitude, newtime)
nLat = akima.interpolate(timestep, track.Latitude, newtime)

elif interpolation_type == 'linear':
nLon = interp1d(timestep, track.Longitude, kind='linear')(newtime)
nLat = interp1d(timestep, track.Latitude, kind='linear')(newtime)
Expand Down Expand Up @@ -299,21 +300,48 @@ def parseTracks(configFile, trackFile, source, delta, outputFile=None,
if trackFile.endswith("nc"):
from Utilities.track import ncReadTrackData
tracks = ncReadTrackData(trackFile)
elif trackFile.endswith("xml"):
from pycxml.pycxml import loadfile
dfs = loadfile(trackFile)
tracks = [bom2tcrm(df, i) for i, df in enumerate(dfs)]
else:
tracks = loadTrackFile(configFile, trackFile, source)

results = []

for track in tracks:
if len(track.data) == 1:
results.append(track)
else:
newtrack = interpolate(track, delta, interpolation_type)
results.append(newtrack)
# interpolating is memory intensive - only use a single process
MPI = attemptParallel()
if MPI.COMM_WORLD.rank == 0:

if outputFile:
# Save data to file:
ncSaveTracks(outputFile, results)
for track in tracks:
if len(track.data) == 1:
results.append(track)
else:
newtrack = interpolate(track, delta, interpolation_type)
results.append(newtrack)

if outputFile:
# Save data to file:
ncSaveTracks(outputFile, results)
MPI.COMM_WORLD.barrier()

return results


def bom2tcrm(df, trackId):
"""
Transforms a dataframe in BoM format into a tcrm track.
"""
df['Datetime'] = pd.to_datetime(df.validtime)
df['Speed'] = df.translation_speed
df['CentralPressure'] = df.pcentre
df['Longitude'] = df.longitude
df['Latitude'] = df.latitude
df['EnvPressure'] = df.poci
df['rMax'] = df.rmax
df['trackId'] = df.disturbance.values

track = Track(df)
track.trackId = [trackId, trackId]
return track
2 changes: 2 additions & 0 deletions PlotInterface/AutoPlotHazard.py
Expand Up @@ -36,6 +36,8 @@
from Utilities import pathLocator
from Utilities import metutils

from database.queries import locationRecords

from PlotInterface.maps import saveHazardMap
from PlotInterface.curves import saveHazardCurve

Expand Down
10 changes: 5 additions & 5 deletions PlotInterface/curves.py
Expand Up @@ -149,7 +149,7 @@ def subplot(self, axes, subfigure):

xdata, ydata, xlabel, ylabel, title = subfigure

axes.semilogx(xdata, ydata, '-', subsx=xdata)
axes.semilogx(xdata, ydata, '-', subs=xdata)
axes.set_xlabel(xlabel)
axes.set_ylabel(ylabel)
axes.set_title(title)
Expand Down Expand Up @@ -328,7 +328,7 @@ def subplot(self, axes, subfigure):
"""
xdata, ymean, ymax, ymin, xlabel, ylabel, title = subfigure

axes.semilogx(xdata, ymean, lw=2, subsx=xdata)
axes.semilogx(xdata, ymean, lw=2, subs=xdata)
if (ymin[0] > 0) and (ymax[0] > 0):
self.addRange(axes, xdata, ymin, ymax)

Expand Down Expand Up @@ -390,7 +390,7 @@ def subplot(self, axes, subfigure):
log.debug("xvalues = {0} length".format(len(emprp)))
log.debug("xvalues = {0}".format(emprp))

axes.semilogx(xdata, ymean, lw=2, subsx=xdata,
axes.semilogx(xdata, ymean, lw=2, subs=xdata,
label = 'Fitted hazard curve ({0})'.format(fit))
axes.scatter(emprp[emprp > 1], events[emprp > 1], s=100,
color='r', label = 'Empirical ARI')
Expand Down Expand Up @@ -478,8 +478,8 @@ def subplot(self, axes, subfigure):
"""
xdata, y1, y2, y2max, y2min, xlabel, ylabel, title = subfigure

axes.semilogx(xdata, y1, color='r', lw=2, label="", subsx=xdata)
axes.semilogx(xdata, y2, color='k', lw=2, label="", subsx=xdata)
axes.semilogx(xdata, y1, color='r', lw=2, label="", subs=xdata)
axes.semilogx(xdata, y2, color='k', lw=2, label="", subs=xdata)
self.addRange(axes, xdata, y2min, y2max)
ylim = (0., np.max([100, np.ceil(y2.max()/10.)*10.]))
axes.set_ylim(ylim)
Expand Down
35 changes: 11 additions & 24 deletions PlotInterface/maps.py
Expand Up @@ -24,6 +24,8 @@
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
import seaborn as sns

from .scalebar import scale_bar

def levels(maxval, minval=0):
"""
Calculate a nice number of levels between `minval` and `maxval`
Expand Down Expand Up @@ -230,35 +232,20 @@ def maskOceans(self, mapobj, fillcolor="#66ccff"):

mapobj.add_feature(cartopy.feature.OCEAN, color=fillcolor)

def addMapScale(self, mapobj):
def addMapScale(self, axes):
"""
Add a map scale to the curent `Basemap` instance. This
automatically determines a 'nice' length forthe scale bar -
Add a map scale to the curent `cartopy.mpl.geoaxes.GeoAxes` instance.
This automatically determines a 'nice' length forthe scale bar -
chosen to be approximately 20% of the map width at the centre
of the map.
:param mapobj: Current `Basemap` instance to add the scale bar to.
:param ax: Current `cartopy.mpl.geoaxes.GeoAxes` instance to add the
scale bar to.
see https://stackoverflow.com/questions/32333870/
"""
return # TODO: migrate to cartopy - see https://stackoverflow.com/questions/32333870/

midlon = (mapobj.lonmax - mapobj.lonmin) / 2.
midlat = (mapobj.latmax - mapobj.latmin) / 2.

xmin = mapobj.llcrnrx
xmax = mapobj.urcrnrx
ymin = mapobj.llcrnry
ymax = mapobj.urcrnry

xloc = xmin + 0.15 * abs(xmax - xmin)
yloc = ymin + 0.1 * abs(ymax - ymin)
axes = scale_bar(axes, (0.05, 0.1))

lonloc, latloc = mapobj(xloc, yloc, inverse=True)

# Set scale length to nearest 100-km for 20% of map width
scale_length = 100*int((0.2 * (xmax - xmin) / 1000.)/100)
mapobj.drawmapscale(lonloc, latloc, midlon, midlat, scale_length,
barstyle='fancy', zorder=10)

def createMap(self, axes, xgrid, ygrid, map_kwargs):
"""
Expand Down Expand Up @@ -296,7 +283,7 @@ def subplot(self, axes, subfigure):
self.addGraticule(axes, mapobj)
self.addCoastline(mapobj)
self.fillContinents(mapobj)
self.addMapScale(mapobj)
self.addMapScale(axes)

def plot(self):
"""
Expand Down Expand Up @@ -380,7 +367,7 @@ def subplot(self, axes, subfigure):
cmap = selectColormap(lvls)
CS = mapobj.contourf(mx, my, data, levels=lvls,
extend='both', cmap=cmap)
CB = self.colorbar(CS, ticks=lvls[::2], ax=axes, extend='both')
CB = self.colorbar(CS, ticks=lvls[::2], ax=axes)
CB.set_label(cbarlab)
axes.set_title(title)
self.addGraticule(axes, mapobj)
Expand Down
2 changes: 1 addition & 1 deletion PlotInterface/plotTimeseries.py
Expand Up @@ -49,7 +49,7 @@ def loadTimeseriesData(datafile):
comments='#', delimiter=',', skip_header=1,
converters=INPUT_CNVT)
except ValueError:
logging.warn("Timeseries data file is empty - returning empty array")
logging.warning("Timeseries data file is empty - returning empty array")
return np.empty(0, dtype={
'names': INPUT_COLS,
'formats': INPUT_FMTS})
Expand Down

0 comments on commit a05f812

Please sign in to comment.