diff --git a/CHANGELOG.md b/CHANGELOG.md index dd94a2063..13b8b347c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,7 @@ Code freeze date: YYYY-MM-DD ### Added +- `climada.hazard.tc_tracks.TCTracks.from_FAST` function [#993](https://github.com/CLIMADA-project/climada_python/pull/993) - Add `osm-flex` package to CLIMADA core [#981](https://github.com/CLIMADA-project/climada_python/pull/981) - `doc.tutorial.climada_entity_Exposures_osm.ipynb` tutorial explaining how to use `osm-flex`with CLIMADA - `climada.util.coordinates.bounding_box_global` function [#980](https://github.com/CLIMADA-project/climada_python/pull/980) diff --git a/climada/hazard/tc_tracks.py b/climada/hazard/tc_tracks.py index fce41053a..4fcd53ec5 100644 --- a/climada/hazard/tc_tracks.py +++ b/climada/hazard/tc_tracks.py @@ -162,6 +162,7 @@ "SI": 1005, "WP": 1005, "SP": 1004, + "AU": 1004, } """Basin-specific default environmental pressure""" @@ -1619,6 +1620,142 @@ def from_netcdf(cls, folder_name): data.append(track) return cls(data) + @staticmethod + def define_tc_category_FAST(max_sust_wind: np.array, units: str = "kn") -> int: + """Define category of the tropical cyclone according to Saffir-Simpson scale. + + Parameters: + ---------- + max_wind : str + Maximal sustained wind speed + units: str + Wind speed units, kn or m/s. Default: kn + + Returns: + ------- + category : int + -1: "Tropical Depression", + 0: "Tropical Storm", + 1: "Hurricane Cat. 1", + 2: "Hurricane Cat. 2", + 3: "Hurricane Cat. 3", + 4: "Hurricane Cat. 4", + 5: "Hurricane Cat. 5", + """ + + max_sust_wind = max_sust_wind.astype( + float + ) # avoid casting errors if max_sust_wind is int + max_sust_wind *= 1.943844 if units == "m/s" else 1 + + max_wind = np.nanmax(max_sust_wind) + category_test = np.full(len(SAFFIR_SIM_CAT), max_wind) < np.array( + SAFFIR_SIM_CAT + ) + category = np.argmax(category_test) - 1 + + return category + + @classmethod + def from_FAST(cls, folder_name: str): + """Create a new TCTracks object from NetCDF files generated by the FAST model, modifying + the xr.array structure to ensure compatibility with CLIMADA, and calculating the central + pressure and radius of maximum wind. + + Model GitHub Repository: https://github.com/linjonathan/tropical_cyclone_risk? + tab=readme-ov-file + Model Publication: https://agupubs.onlinelibrary.wiley.com/doi/epdf/10.1029/2023MS003686 + + Parameters: + ---------- + folder_name : str + Folder name from where to read files. + storm_id : int + Number of the simulated storm + + Returns: + ------- + tracks : TCTracks + TCTracks object with tracks data from the given directory of NetCDF files. + """ + + LOGGER.info("Reading %s files.", len(get_file_names(folder_name))) + data = [] + for file in get_file_names(folder_name): + if Path(file).suffix != ".nc": + continue + with xr.open_dataset(file) as dataset: + for i in dataset.n_trk: + + # Select track + track = dataset.sel(n_trk=i) + # chunk dataset at first NaN value + lon = track.lon_trks.data + last_valid_index = np.where(np.isnan(lon))[0][0] + track = track.isel(time=slice(0, last_valid_index)) + # Select lat, lon + lat = track.lat_trks.data + lon = track.lon_trks.data + # Convert lon from 0-360 to -180 - 180 + lon = ((lon + 180) % 360) - 180 + # Convert time to pandas Datetime "yyyy.mm.dd" + reference_time = ( + f"{track.tc_years.item()}-{int(track.tc_month.item())}-01" + ) + time = pd.to_datetime( + track.time.data, unit="s", origin=reference_time + ).astype("datetime64[s]") + # Define variables + max_sustained_wind_knots = track.vmax_trks.data * 1.943844 + env_pressure = BASIN_ENV_PRESSURE[track.tc_basins.data.item()] + cen_pres = _estimate_pressure( + np.full(lat.shape, np.nan), lat, lon, max_sustained_wind_knots + ) + + data.append( + xr.Dataset( + { + "time_step": ( + "time", + np.full(time.shape[0], track.time.data[1]), + ), + "max_sustained_wind": ("time", track.vmax_trks.data), + "central_pressure": ("time", cen_pres), + "radius_max_wind": ( + "time", + estimate_rmw(np.full(lat.shape, np.nan), cen_pres), + ), + "environmental_pressure": ( + "time", + np.full(time.shape[0], env_pressure), + ), + "basin": ( + "time", + np.full(time.shape[0], track.tc_basins.data.item()), + ), + }, + coords={ + "time": ("time", time), + "lat": ("time", lat), + "lon": ("time", lon), + }, + attrs={ + "max_sustained_wind_unit": "m/s", + "central_pressure_unit": "hPa", + "name": f"storm_{track.n_trk.item()}", + "sid": track.n_trk.item(), + "orig_event_flag": True, + "data_provider": "FAST", + "id_no": track.n_trk.item(), + "category": TCTracks.define_tc_category_FAST( + max_sustained_wind_knots + ), + }, + ) + ) + + return cls(data) + def write_hdf5(self, file_name, complevel=5): """Write TC tracks in NetCDF4-compliant HDF5 format. @@ -2665,12 +2802,12 @@ def ibtracs_fit_param(explained, explanatory, year_range=(1980, 2019), order=1): return sm_results -def ibtracs_track_agency(ds_sel): +def ibtracs_track_agency(track): """Get preferred IBTrACS agency for each entry in the dataset. Parameters ---------- - ds_sel : xarray.Dataset + track : xarray.Dataset Subselection of original IBTrACS NetCDF dataset. Returns @@ -2678,7 +2815,7 @@ def ibtracs_track_agency(ds_sel): agency_pref : list of str Names of IBTrACS agencies in order of preference. track_agency_ix : xarray.DataArray of ints - For each entry in `ds_sel`, the agency to use, given as an index into `agency_pref`. + For each entry in `track`, the agency to use, given as an index into `agency_pref`. """ agency_pref = ["wmo"] + IBTRACS_AGENCIES agency_map = {a.encode("utf-8"): i for i, a in enumerate(agency_pref)} @@ -2687,11 +2824,11 @@ def ibtracs_track_agency(ds_sel): ) agency_map[b""] = agency_map[b"wmo"] agency_fun = lambda x: agency_map[x] - if "track_agency" not in ds_sel.data_vars.keys(): - ds_sel["track_agency"] = ds_sel["wmo_agency"].where( - ds_sel["wmo_agency"] != b"", ds_sel["usa_agency"] + if "track_agency" not in track.data_vars.keys(): + track["track_agency"] = track["wmo_agency"].where( + track["wmo_agency"] != b"", track["usa_agency"] ) - track_agency_ix = xr.apply_ufunc(agency_fun, ds_sel["track_agency"], vectorize=True) + track_agency_ix = xr.apply_ufunc(agency_fun, track["track_agency"], vectorize=True) return agency_pref, track_agency_ix diff --git a/climada/hazard/test/data/FAST_test_tracks.nc b/climada/hazard/test/data/FAST_test_tracks.nc new file mode 100644 index 000000000..dfe9d8b71 Binary files /dev/null and b/climada/hazard/test/data/FAST_test_tracks.nc differ diff --git a/climada/hazard/test/test_tc_tracks.py b/climada/hazard/test/test_tc_tracks.py index 56005b51a..8330771f3 100644 --- a/climada/hazard/test/test_tc_tracks.py +++ b/climada/hazard/test/test_tc_tracks.py @@ -44,6 +44,7 @@ TEST_TRACK_EMANUEL = DATA_DIR.joinpath("emanuel_test_tracks.mat") TEST_TRACK_EMANUEL_CORR = DATA_DIR.joinpath("temp_mpircp85cal_full.mat") TEST_TRACK_CHAZ = DATA_DIR.joinpath("chaz_test_tracks.nc") +TEST_TRACK_FAST = DATA_DIR.joinpath("FAST_test_tracks.nc") TEST_TRACK_STORM = DATA_DIR.joinpath("storm_test_tracks.txt") TEST_TRACKS_ANTIMERIDIAN = DATA_DIR.joinpath("tracks-antimeridian") TEST_TRACKS_LEGACY_HDF5 = DATA_DIR.joinpath("tctracks_hdf5_legacy.nc") @@ -631,6 +632,62 @@ def test_from_simulations_storm(self): tc_track = tc.TCTracks.from_simulations_storm(TEST_TRACK_STORM, years=[7]) self.assertEqual(len(tc_track.data), 0) + def test_define_tc_category_FAST(self): + """test that the correct category is assigned to a TC from FAST model.""" + + max_wind = np.array([20, 72, 36, 50]) # knots + category1 = tc.TCTracks.define_tc_category_FAST(max_wind) + category2 = tc.TCTracks.define_tc_category_FAST( + max_sust_wind=max_wind, units="m/s" + ) + self.assertEqual(category1, 1) + self.assertEqual(category2, 5) + + def test_from_FAST(self): + """test the correct import of netcdf files from FAST model and the conversion to a + different xr.array structure compatible with CLIMADA.""" + + tc_track = tc.TCTracks.from_FAST(TEST_TRACK_FAST) + + expected_attributes = { + "max_sustained_wind_unit": "m/s", + "central_pressure_unit": "hPa", + "name": "storm_0", + "sid": 0, + "orig_event_flag": True, + "data_provider": "FAST", + "id_no": 0, + "category": 1, + } + + self.assertIsInstance( + tc_track, tc.TCTracks, "tc_track is not an instance of TCTracks" + ) + self.assertIsInstance( + tc_track.data, list, "tc_track.data is not an instance of list" + ) + self.assertIsInstance( + tc_track.data[0], + xr.Dataset, + "tc_track.data[0] not an instance of xarray.Dataset", + ) + self.assertEqual(len(tc_track.data), 5) + self.assertEqual(tc_track.data[0].attrs, expected_attributes) + self.assertEqual(list(tc_track.data[0].coords.keys()), ["time", "lat", "lon"]) + self.assertEqual( + tc_track.data[0].time.values[0], + np.datetime64("2025-09-01T00:00:00.000000000"), + ) + self.assertEqual(tc_track.data[0].lat.values[0], 17.863591350508266) + self.assertEqual(tc_track.data[0].lon.values[0], -71.76441758319629) + self.assertEqual(len(tc_track.data[0].time), 35) + self.assertEqual(tc_track.data[0].time_step[0], 10800) + self.assertEqual( + tc_track.data[0].max_sustained_wind.values[10], 24.71636959089841 + ) + self.assertEqual(tc_track.data[0].environmental_pressure.data[0], 1010) + self.assertEqual(tc_track.data[0].basin[0], "NA") + def test_to_geodataframe_points(self): """Conversion of TCTracks to GeoDataFrame using Points.""" tc_track = tc.TCTracks.from_processed_ibtracs_csv(TEST_TRACK) diff --git a/doc/tutorial/climada_hazard_TropCyclone.ipynb b/doc/tutorial/climada_hazard_TropCyclone.ipynb index 0613fa7fd..28d80d1ac 100644 --- a/doc/tutorial/climada_hazard_TropCyclone.ipynb +++ b/doc/tutorial/climada_hazard_TropCyclone.ipynb @@ -1802,7 +1802,7 @@ " \n", "### d) Load TC tracks from other sources\n", "\n", - "In addition to the [historical records of TCs (IBTrACS)](#Part1.a), the [probabilistic extension](#Part1.b) of these tracks, and the [ECMWF Forecast tracks](#Part1.c), CLIMADA also features functions to read in synthetic TC tracks from other sources. These include synthetic storm tracks from Kerry Emanuel's coupled statistical-dynamical model (Emanuel et al., 2006 as used in Geiger et al., 2016), synthetic storm tracks from a second coupled statistical-dynamical model (CHAZ) (as described in Lee et al., 2018), and synthetic storm tracks from a fully statistical model (STORM) Bloemendaal et al., 2020). However, these functions are partly under development and/or targeted at advanced users of CLIMADA in the context of very specific use cases. They are thus not covered in this tutorial." + "In addition to the [historical records of TCs (IBTrACS)](#Part1.a), the [probabilistic extension](#Part1.b) of these tracks, and the [ECMWF Forecast tracks](#Part1.c), CLIMADA also features functions to read in synthetic TC tracks from other sources. These include synthetic storm tracks from Kerry Emanuel's coupled statistical-dynamical model (Emanuel et al., 2006 as used in Geiger et al., 2016), from an open source derivative of Kerry Emanuel's model [FAST](https://github.com/linjonathan/tropical_cyclone_risk?tab=readme-ov-file), synthetic storm tracks from a second coupled statistical-dynamical model (CHAZ) (as described in Lee et al., 2018), and synthetic storm tracks from a fully statistical model (STORM) Bloemendaal et al., 2020). However, these functions are partly under development and/or targeted at advanced users of CLIMADA in the context of very specific use cases. They are thus not covered in this tutorial." ] }, {