Skip to content

Commit 454e56e

Browse files
committed
add argo
1 parent d2a5e64 commit 454e56e

File tree

1 file changed

+104
-75
lines changed

1 file changed

+104
-75
lines changed

notebooks/wp5/steric_sea_level_ora5.ipynb

Lines changed: 104 additions & 75 deletions
Original file line numberDiff line numberDiff line change
@@ -204,7 +204,7 @@
204204
" for dim in da.dims:\n",
205205
" if dim == \"time\":\n",
206206
" chunks.append(1)\n",
207-
" elif dim in [\"x\", \"y\"]:\n",
207+
" elif dim in [\"x\", \"y\", \"latitude\", \"longitude\"]:\n",
208208
" chunks.append(200)\n",
209209
" elif dim == \"deptht\":\n",
210210
" chunks.append(15)\n",
@@ -222,12 +222,7 @@
222222
" return ssl\n",
223223
"\n",
224224
"\n",
225-
"def compute_ssl(\n",
226-
" collection_id, request, prefix, lon_slice, lat_slice, **download_kwargs\n",
227-
"):\n",
228-
" ds = download.download_and_transform(\n",
229-
" collection_id, request, transform_func=compute_gsw_ds, **download_kwargs\n",
230-
" )\n",
225+
"def compute_ssl_timeseries_from_ds(ds, prefix, lon_slice, lat_slice):\n",
231226
" ds = utils.regionalise(ds, lon_slice=lon_slice, lat_slice=lat_slice)\n",
232227
" if prefix == \"\":\n",
233228
" rho = ds[\"rho\"]\n",
@@ -237,7 +232,8 @@
237232
" rho = gsw.rho(ds[\"SA\"], ds[\"CT\"].mean(\"time\"), ds[\"p\"])\n",
238233
" else:\n",
239234
" raise NotImplementedError(f\"{prefix=}\")\n",
240-
" return compute_ssl_from_rho(rho, prefix=prefix).rename(f\"{prefix}ssl\")\n",
235+
" ssl = compute_ssl_from_rho(rho, prefix=prefix).rename(f\"{prefix}ssl\")\n",
236+
" return diagnostics.spatial_weighted_mean(ssl)\n",
241237
"\n",
242238
"\n",
243239
"@cacholote.cacheable\n",
@@ -249,15 +245,74 @@
249245
" lat_slice,\n",
250246
" **download_kwargs,\n",
251247
"):\n",
252-
" ssl = compute_ssl(\n",
253-
" collection_id,\n",
254-
" request,\n",
255-
" prefix,\n",
256-
" lon_slice=lon_slice,\n",
257-
" lat_slice=lat_slice,\n",
258-
" **download_kwargs,\n",
248+
" ds = download.download_and_transform(\n",
249+
" collection_id, request, transform_func=compute_gsw_ds, **download_kwargs\n",
259250
" )\n",
260-
" return diagnostics.spatial_weighted_mean(ssl)"
251+
" return compute_ssl_timeseries_from_ds(\n",
252+
" ds, prefix, lon_slice=lon_slice, lat_slice=lat_slice\n",
253+
" )\n",
254+
"\n",
255+
"\n",
256+
"def preprocess(ds):\n",
257+
" # Naming\n",
258+
" ds = ds.rename({var: var.lower() for var in ds.variables})\n",
259+
" # Time\n",
260+
" ds[\"time\"].attrs[\"calendar\"] = \"360_day\"\n",
261+
" ds = xr.decode_cf(ds)\n",
262+
" ds[\"time\"].attrs = {}\n",
263+
" ds[\"time\"].encoding = {}\n",
264+
" # Depth\n",
265+
" ds[\"depth\"] = -gsw.z_from_p(ds[\"pressure\"], ds[\"latitude\"]).mean(\n",
266+
" \"latitude\", keep_attrs=True\n",
267+
" )\n",
268+
" ds[\"depth\"].attrs.update({\"positive\": \"down\", \"long_name\": \"Depth from pressure\"})\n",
269+
" return ds.swap_dims(pressure=\"depth\")\n",
270+
"\n",
271+
"\n",
272+
"@cacholote.cacheable\n",
273+
"def get_argo(year, month):\n",
274+
" # Get climatology\n",
275+
" filenames = []\n",
276+
" for var in [\"Temperature\", \"Salinity\"]:\n",
277+
" url = f\"https://sio-argo.ucsd.edu/RG/RG_ArgoClim_{var}_2019.nc.gz\"\n",
278+
" filename = pooch.retrieve(\n",
279+
" url=url, known_hash=None, processor=pooch.Decompress()\n",
280+
" )\n",
281+
" filenames.append(filename)\n",
282+
" ds = xr.open_mfdataset(filenames, preprocess=preprocess, decode_times=False)\n",
283+
" ds_clima = ds.drop_dims(\"time\")\n",
284+
"\n",
285+
" # Get anomalies\n",
286+
" ds = ds.sel(time=slice(f\"{year}-{month:02d}\", f\"{year}-{month:02d}\"))\n",
287+
" if not ds.sizes[\"time\"]:\n",
288+
" url = f\"https://sio-argo.ucsd.edu/RG/RG_ArgoClim_{year}{month:02d}_2019.nc.gz\"\n",
289+
" filename = pooch.retrieve(\n",
290+
" url=url, known_hash=None, processor=pooch.Decompress()\n",
291+
" )\n",
292+
" ds = xr.open_mfdataset(filename, preprocess=preprocess, decode_times=False)\n",
293+
" ds = ds.sel(time=slice(f\"{year}-{month:02d}\", f\"{year}-{month:02d}\"))\n",
294+
"\n",
295+
" # Compute values\n",
296+
" dataarrays = []\n",
297+
" for var in [\"salinity\", \"temperature\"]:\n",
298+
" da = ds_clima[f\"argo_{var}_mean\"] + ds[f\"argo_{var}_anomaly\"]\n",
299+
" dataarrays.append(da.rename(var))\n",
300+
" ds = xr.merge(dataarrays)\n",
301+
"\n",
302+
" # Compute gsw dataset\n",
303+
" ds = ds.rename(depth=\"deptht\", temperature=\"votemper\", salinity=\"vosaline\")\n",
304+
" ds = compute_gsw_ds(ds)\n",
305+
" return ds\n",
306+
"\n",
307+
"\n",
308+
"@cacholote.cacheable\n",
309+
"def compute_ssl_timeseries_argo(prefix, lon_slice, lat_slice, year_start, year_stop):\n",
310+
" datasets = []\n",
311+
" for year in range(year_start, year_stop + 1):\n",
312+
" for month in range(1, 13):\n",
313+
" datasets.append(get_argo(year, month))\n",
314+
" ds = xr.concat(datasets, \"time\")\n",
315+
" return compute_ssl_timeseries_from_ds(ds, prefix, lon_slice, lat_slice)"
261316
]
262317
},
263318
{
@@ -315,7 +370,7 @@
315370
"id": "11",
316371
"metadata": {},
317372
"source": [
318-
"## Quick and dirty plot"
373+
"## Download and transform ARGO"
319374
]
320375
},
321376
{
@@ -325,17 +380,34 @@
325380
"metadata": {},
326381
"outputs": [],
327382
"source": [
328-
"da = ds_reanalysis.to_dataarray()\n",
329-
"da.plot(hue=\"variable\")\n",
330-
"plt.grid()"
383+
"dataarrays = []\n",
384+
"for prefix in [\"\", \"thermo\", \"halo\"]:\n",
385+
" name = \"_\".join(([prefix] if prefix else []) + [\"ssl\"])\n",
386+
" print(f\"{name = }\")\n",
387+
" da = compute_ssl_timeseries_argo(\n",
388+
" prefix=prefix,\n",
389+
" lon_slice=lon_slice,\n",
390+
" lat_slice=lat_slice,\n",
391+
" year_start=year_start,\n",
392+
" year_stop=year_stop,\n",
393+
" )\n",
394+
" dataarrays.append(da.rename(name))\n",
395+
"ds_argo = xr.merge(dataarrays)\n",
396+
"# Align\n",
397+
"ds_argo[\"time\"] = ds_argo[\"time\"].convert_calendar(\n",
398+
" \"proleptic_gregorian\", align_on=\"date\"\n",
399+
")\n",
400+
"if (ds_argo[\"time\"] == ds_reanalysis[\"time\"]).all():\n",
401+
" ds_argo[\"time\"] = ds_reanalysis[\"time\"]\n",
402+
"del dataarrays"
331403
]
332404
},
333405
{
334406
"cell_type": "markdown",
335407
"id": "13",
336408
"metadata": {},
337409
"source": [
338-
"## Download and transform ARGO"
410+
"## Quick and dirty plot"
339411
]
340412
},
341413
{
@@ -345,60 +417,17 @@
345417
"metadata": {},
346418
"outputs": [],
347419
"source": [
348-
"def preprocess(ds):\n",
349-
" # Naming\n",
350-
" ds = ds.rename({var: var.lower() for var in ds.variables})\n",
351-
" # Time\n",
352-
" ds[\"time\"].attrs[\"calendar\"] = \"360_day\"\n",
353-
" ds = xr.decode_cf(ds)\n",
354-
" # Depth\n",
355-
" ds[\"depth\"] = -gsw.z_from_p(ds[\"pressure\"], ds[\"latitude\"]).mean(\n",
356-
" \"latitude\", keep_attrs=True\n",
357-
" )\n",
358-
" ds[\"depth\"].attrs.update({\"positive\": \"down\", \"long_name\": \"Depth from pressure\"})\n",
359-
" return ds.swap_dims(pressure=\"depth\")\n",
360-
"\n",
361-
"\n",
362-
"# First dataset\n",
363-
"filenames = []\n",
364-
"for var in [\"Temperature\", \"Salinity\"]:\n",
365-
" url = f\"https://sio-argo.ucsd.edu/RG/RG_ArgoClim_{var}_2019.nc.gz\"\n",
366-
" filename = pooch.retrieve(url=url, known_hash=None, processor=pooch.Decompress())\n",
367-
" filenames.append(filename)\n",
368-
"with xr.set_options(use_new_combine_kwarg_defaults=True):\n",
369-
" ds_argo_1 = xr.open_mfdataset(filenames, preprocess=preprocess, decode_times=False)\n",
370-
"\n",
371-
"# Second dataset\n",
372-
"filenames = []\n",
373-
"for year in range(2019, year_stop + 1):\n",
374-
" for month in range(1, 13):\n",
375-
" url = f\"https://sio-argo.ucsd.edu/RG/RG_ArgoClim_{year}{month:02d}_2019.nc.gz\"\n",
376-
" filename = pooch.retrieve(\n",
377-
" url=url, known_hash=None, processor=pooch.Decompress()\n",
378-
" )\n",
379-
" filenames.append(filename)\n",
380-
"with xr.set_options(use_new_combine_kwarg_defaults=True):\n",
381-
" ds_argo_2 = xr.open_mfdataset(filename, preprocess=preprocess, decode_times=False)\n",
382-
"\n",
383-
"# Combine\n",
384-
"dataarrays = []\n",
385-
"for var in [\"salinity\", \"temperature\"]:\n",
386-
" da = ds_argo_1[f\"argo_{var}_mean\"]\n",
387-
" units = da.units\n",
388-
" da = xr.combine_by_coords(\n",
389-
" [\n",
390-
" da + ds_argo_1[f\"argo_{var}_anomaly\"],\n",
391-
" da + ds_argo_2[f\"argo_{var}_anomaly\"],\n",
392-
" ]\n",
393-
" )\n",
394-
" da.attrs[\"units\"] = units\n",
395-
" dataarrays.append(da.rename(var))\n",
396-
"with xr.set_options(use_new_combine_kwarg_defaults=True):\n",
397-
" ds_argo = xr.merge(dataarrays)\n",
398-
"\n",
399-
"# Selection\n",
400-
"ds_argo = ds_argo.sel(time=slice(str(year_start), str(year_stop)))\n",
401-
"ds_argo = utils.regionalise(ds_argo, lon_slice=lon_slice, lat_slice=lat_slice)"
420+
"ds = xr.concat(\n",
421+
" [\n",
422+
" ds_reanalysis.expand_dims(product=[\"ORAS5\"]),\n",
423+
" ds_argo.expand_dims(product=[\"ARGO\"]),\n",
424+
" ],\n",
425+
" \"product\",\n",
426+
")\n",
427+
"da = ds.to_dataarray()\n",
428+
"facet = da.plot(hue=\"variable\", col=\"product\")\n",
429+
"for ax in facet.axs.flatten():\n",
430+
" ax.grid()"
402431
]
403432
},
404433
{

0 commit comments

Comments
 (0)