Skip to content

Commit 5bf4024

Browse files
Merge pull request #84 from MET-OM/add_norkyst_v3
Add norkyst v3
2 parents 42625ea + 443dc7d commit 5bf4024

3 files changed

Lines changed: 99 additions & 1 deletion

File tree

.github/workflows/tests.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ jobs:
1616
strategy:
1717
matrix:
1818
os: ["ubuntu", "windows"]
19-
python-version: [ "3.11", "3.12" ]
19+
python-version: [ "3.12" ]
2020
steps:
2121
- uses: actions/checkout@v4
2222

metocean_api/ts/internal/metno/met_products.py

Lines changed: 89 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,8 @@ def find_product(name: str) -> Product:
4242
return NORA3Atm3hrSub(name)
4343
case "NORKYST800":
4444
return Norkyst800(name)
45+
case "NORKYST800_V3":
46+
return Norkyst800_v3(name)
4547
case "NorkystDA_surface":
4648
return NorkystDASurface(name)
4749
case "NorkystDA_zdepth":
@@ -509,6 +511,93 @@ def _flatten_data_structure(self, ds: xr.Dataset, **flatten_dims):
509511
return super()._flatten_data_structure(ds, **flatten_dims)
510512

511513

514+
515+
class Norkyst800_v3(MetProduct):
516+
517+
@property
518+
def convention(self) -> Convention:
519+
return Convention.OCEANIC
520+
521+
522+
def get_default_variables(self):
523+
return ["salinity", "temperature", "u_eastward", "v_northward", "zeta"]
524+
525+
def get_dates(self, start_date, end_date):
526+
return pd.date_range(start=start_date, end=end_date, freq="D")
527+
528+
def _get_url_info(self, date: str):
529+
if ((date >= pd.Timestamp("2012-01-05 00:00:00")) & (date < pd.Timestamp("2026-01-01 00:00:00"))):
530+
print("https://thredds.met.no/thredds/dodsC/romshindcast/norkyst_v3/zdepth/"+date.strftime("%Y")+"/"+date.strftime("%m")+"/norkyst800-"+date.strftime("%Y%m%d")+".nc")
531+
return "https://thredds.met.no/thredds/dodsC/romshindcast/norkyst_v3/zdepth/"+date.strftime("%Y")+"/"+date.strftime("%m")+"/norkyst800-"+date.strftime("%Y%m%d")+".nc"
532+
else:
533+
raise ValueError(f"Unhandled date {str(date)} for product {self.name}. Data only valid from 2012-01-05 onwards.")
534+
535+
def _get_near_coord(self, url: str, lon: float, lat: float):
536+
with xr.open_dataset(url) as ds:
537+
x, y = aux_funcs.find_nearest_cart_coord(ds.lon, ds.lat, lon, lat)
538+
lon_near = ds.lon.sel(Y=y, X=x).values[0][0]
539+
lat_near = ds.lat.sel(Y=y, X=x).values[0][0]
540+
return {"X": x, "Y": y}, lon_near, lat_near
541+
542+
def import_data(self, ts: TimeSeries, save_csv=True, save_nc=False, use_cache=False):
543+
"""
544+
Extract times series of the nearest gird point (lon,lat) from
545+
Norkyst800_v3.
546+
"""
547+
if ts.variable == [] or ts.variable is None:
548+
ts.variable = self.get_default_variables()
549+
ts.variable.append("lon") # keep info of regular lon
550+
ts.variable.append("lat") # keep info of regular lat
551+
dates = self.get_dates(start_date=ts.start_time, end_date=ts.end_time)
552+
print('dates done')
553+
tempfiles = aux_funcs.get_tempfiles(ts.product, ts.lon, ts.lat, dates)
554+
print('tempfiles done')
555+
selection = None
556+
lon_near = None
557+
lat_near = None
558+
559+
# extract point and create temp files
560+
any_sucessful_download = False
561+
failures = []
562+
for i in range(len(dates)):
563+
url = self._get_url_info(dates[i])
564+
print(url)
565+
print(f"Downloading {url}")
566+
567+
selection, lon_near, lat_near = self._get_near_coord(url, ts.lon, ts.lat)
568+
569+
if use_cache and os.path.exists(tempfiles[i]):
570+
print(f"Found cached file {tempfiles[i]}. Using this instead")
571+
else:
572+
try:
573+
dataset = xr.open_dataset(url)
574+
except Exception as e:
575+
print(f"Could not open {url} due to {e}")
576+
failures.append(i)
577+
if (i>10) and not any_sucessful_download:
578+
raise ValueError("Unable to access any files. This typically means the server is down.")
579+
continue
580+
# Reduce to the wanted variables and coordinates
581+
dataset = dataset[ts.variable].sel(selection).squeeze(drop=True)
582+
dataset.to_netcdf(tempfiles[i])
583+
any_sucessful_download = True
584+
585+
ts.lat_data = lat_near
586+
ts.lon_data = lon_near
587+
588+
tempfiles = [tempfiles[i] for i in range(len(dates)) if i not in failures]
589+
return self._combine_temporary_files(ts, save_csv, save_nc, use_cache, tempfiles, lon_near, lat_near, height=ts.height)
590+
591+
def _flatten_data_structure(self, ds: xr.Dataset, **flatten_dims):
592+
if "zeta" in ds.variables:
593+
# Just use the surface value
594+
if "depth" in ds["zeta"].dims:
595+
ds["zeta"] = ds.zeta.sel(depth=0)
596+
597+
return super()._flatten_data_structure(ds, **flatten_dims)
598+
599+
600+
512601
class NorkystDASurface(MetProduct):
513602

514603
@property

tests/test_extract_data.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -165,6 +165,15 @@ def test_norkyst_800():
165165
assert df_ts.data.shape == (48, 65)
166166
__compare_loaded_data(df_ts)
167167

168+
def test_norkyst_800_v3():
169+
df_ts = ts.TimeSeries(lon=3.73, lat=64.60,start_time='2020-09-14', end_time='2020-09-14', product='NORKYST800_V3')
170+
# Import data from thredds.met.no
171+
df_ts.import_data(save_csv=SAVE_CSV,save_nc=SAVE_NC, use_cache=USE_CACHE)
172+
print(df_ts.lat_data, df_ts.lon_data,df_ts.data.shape)
173+
assert (df_ts.lat_data, df_ts.lon_data) == (64.5983217588851, 3.7289053730237156)
174+
assert df_ts.data.shape == (24, 101)
175+
__compare_loaded_data(df_ts)
176+
168177
def test_norkyst_da_zdepth():
169178
# We want to collect a subset
170179
depth = [0.0, 500.0, 2500.00]

0 commit comments

Comments
 (0)