Skip to content

Commit 5048f49

Browse files
committed
update requirements
1 parent 35e9821 commit 5048f49

File tree

3 files changed

+60
-122
lines changed

3 files changed

+60
-122
lines changed

requirements/environment.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -24,10 +24,10 @@ dependencies:
2424
- aws-crt-cpp=0.28.3
2525
- aws-sdk-cpp=1.11.407
2626
- azure-core-cpp=1.13.0
27-
- azure-identity-cpp=1.8.0
28-
- azure-storage-blobs-cpp=12.12.0
29-
- azure-storage-common-cpp=12.7.0
30-
- azure-storage-files-datalake-cpp=12.11.0
27+
- azure-identity-cpp=1.9.0
28+
- azure-storage-blobs-cpp=12.13.0
29+
- azure-storage-common-cpp=12.8.0
30+
- azure-storage-files-datalake-cpp=12.12.0
3131
- black=24.8.0
3232
- blosc=1.21.6
3333
- bokeh=3.5.2

tests/util/test_model_output_parser.py

Lines changed: 29 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -49,16 +49,37 @@ def test_get_ds_success(mock_ds_grib):
4949

5050

5151
@pytest.mark.parametrize(
52-
"to_xarray_return_value",
52+
"to_xarray_return_value, expected_result",
5353
[
54-
(KeyError(), "valid_xarray"),
55-
(KeyError(), KeyError(), "valid_xarray"),
56-
(KeyError(), KeyError(), KeyError(), "valid_xarray"),
57-
(KeyError(), KeyError(), KeyError(), KeyError(), "valid_xarray"),
58-
(KeyError(), KeyError(), KeyError(), KeyError(), KeyError(), "valid_xarray"),
54+
((KeyError(), "valid_stepType_xarray"), ["valid_stepType_xarray"]),
55+
(
56+
(KeyError(), KeyError(), "valid_numberOfPoints_xarray"),
57+
["valid_numberOfPoints_xarray"],
58+
),
59+
(
60+
(KeyError(), KeyError(), KeyError(), "valid_stepUnits_xarray"),
61+
["valid_stepUnits_xarray"],
62+
),
63+
(
64+
(KeyError(), KeyError(), KeyError(), KeyError(), "valid_dataType_xarray"),
65+
["valid_dataType_xarray"],
66+
),
67+
(
68+
(
69+
KeyError(),
70+
KeyError(),
71+
KeyError(),
72+
KeyError(),
73+
KeyError(),
74+
"valid_gridType_xarray",
75+
),
76+
["valid_gridType_xarray"],
77+
),
5978
],
6079
)
61-
def test_get_ds_recursive_selection(mock_ds_grib, to_xarray_return_value):
80+
def test_get_ds_recursive_selection(
81+
mock_ds_grib, to_xarray_return_value, expected_result
82+
):
6283
"""
6384
Test case where get_ds recursively selects the dataset by metadata fields.
6485
"""
@@ -73,16 +94,13 @@ def test_get_ds_recursive_selection(mock_ds_grib, to_xarray_return_value):
7394
assert mock_ds_grib.sel.call_count >= len(to_xarray_return_value)
7495

7596
# The result should contain the mocked xarray dataset
76-
assert result == ["valid_xarray"]
97+
assert result == expected_result
7798

7899

79100
def test_get_ds_keyerror_handling(caplog, mock_ds_grib):
80101
"""
81102
Test case where get_ds fails to retrieve data and handles multiple KeyErrors.
82103
"""
83-
# # Create a mock GRIB object
84-
# mock_ds_grib = MagicMock()
85-
86104
pid = 1
87105
lev = "surface"
88106

util/model_output_parser.py

Lines changed: 27 additions & 107 deletions
Original file line numberDiff line numberDiff line change
@@ -117,11 +117,10 @@ def get_ds(ds_grib, pid, lev):
117117
hierarchical metadata.
118118
119119
This function attempts to extract data from the GRIB file by selecting
120-
fields that match the given `paramId` and `typeOfLevel`.
121-
If the initial selection fails due to missing or mismatched metadata, the
122-
function will recursively explore other metadata fields such as `stepType`,
123-
`numberOfPoints`, `stepUnits`, `dataType`, and `gridType` to find matching
124-
datasets.
120+
fields that match the given `paramId` and `typeOfLevel`. If the initial
121+
selection fails due to missing or mismatched metadata, the function
122+
will explore other metadata fields such as `stepType`, `numberOfPoints`,
123+
`stepUnits`, `dataType`, and `gridType` to find matching datasets.
125124
126125
Parameters:
127126
-----------
@@ -137,112 +136,33 @@ def get_ds(ds_grib, pid, lev):
137136
ds_list : list
138137
A list of xarray datasets that match the specified parameter and level,
139138
with additional filtering based on hierarchical metadata fields.
140-
141-
Notes:
142-
------
143-
- The function handles `KeyError` exceptions by recursively selecting data
144-
with additional metadata fields (e.g., stepType, numberOfPoints, etc.).
145-
- If no matching datasets are found, the function prints an error message.
146139
"""
147140
ds_list = []
148-
try:
149-
ds = ds_grib.sel(paramId=pid, typeOfLevel=lev).to_xarray()
150-
ds_list.append(ds)
151-
except KeyError:
152-
step_type = np.unique(
153-
ds_grib.sel(paramId=pid, typeOfLevel=lev).metadata("stepType")
154-
).tolist()
155-
for steps in step_type:
141+
selectors = {"paramId": pid, "typeOfLevel": lev}
142+
metadata_keys = ["stepType", "numberOfPoints", "stepUnits", "dataType", "gridType"]
143+
144+
def recursive_select(selects, depth=0):
145+
try:
146+
ds = ds_grib.sel(**selects).to_xarray()
147+
ds_list.append(ds)
148+
except KeyError:
149+
if depth == len(metadata_keys): # No more metadata keys to try
150+
return
151+
key = metadata_keys[depth]
156152
try:
157-
ds = ds_grib.sel(
158-
paramId=pid, typeOfLevel=lev, stepType=steps
159-
).to_xarray()
160-
ds_list.append(ds)
153+
values = np.unique(ds_grib.sel(**selects).metadata(key)).tolist()
154+
for value in values:
155+
selects[key] = value
156+
recursive_select(selects, depth + 1) # Recurse to next level
161157
except KeyError:
162-
num_points = np.unique(
163-
ds_grib.sel(paramId=pid, typeOfLevel=lev, stepType=steps).metadata(
164-
"numberOfPoints"
165-
)
166-
).tolist()
167-
for points in num_points:
168-
try:
169-
ds = ds_grib.sel(
170-
paramId=pid,
171-
typeOfLevel=lev,
172-
stepType=steps,
173-
numberOfPoints=points,
174-
).to_xarray()
175-
ds_list.append(ds)
176-
except KeyError:
177-
units = np.unique(
178-
ds_grib.sel(
179-
paramId=pid,
180-
typeOfLevel=lev,
181-
stepType=steps,
182-
numberOfPoints=points,
183-
).metadata("stepUnits")
184-
).tolist()
185-
for unit in units:
186-
try:
187-
ds = ds_grib.sel(
188-
paramId=pid,
189-
typeOfLevel=lev,
190-
stepType=steps,
191-
numberOfPoints=points,
192-
stepUnits=unit,
193-
).to_xarray()
194-
ds_list.append(ds)
195-
except KeyError:
196-
data_type = np.unique(
197-
ds_grib.sel(
198-
paramId=pid,
199-
typeOfLevel=lev,
200-
stepType=steps,
201-
numberOfPoints=points,
202-
stepUnits=unit,
203-
).metadata("dataType")
204-
).tolist()
205-
for dtype in data_type:
206-
try:
207-
ds = ds_grib.sel(
208-
paramId=pid,
209-
typeOfLevel=lev,
210-
stepType=steps,
211-
numberOfPoints=points,
212-
stepUnits=unit,
213-
dataType=dtype,
214-
).to_xarray()
215-
ds_list.append(ds)
216-
except KeyError:
217-
grid_type = np.unique(
218-
ds_grib.sel(
219-
paramId=pid,
220-
typeOfLevel=lev,
221-
stepType=steps,
222-
numberOfPoints=points,
223-
stepUnits=unit,
224-
dataType=dtype,
225-
).metadata("gridType")
226-
).tolist()
227-
for gtype in grid_type:
228-
try:
229-
ds = ds_grib.sel(
230-
paramId=pid,
231-
typeOfLevel=lev,
232-
stepType=steps,
233-
numberOfPoints=points,
234-
stepUnits=unit,
235-
dataType=dtype,
236-
gridType=gtype,
237-
).to_xarray()
238-
ds_list.append(ds)
239-
except KeyError:
240-
logger.warning(
241-
"GRIB file of level %s and "
242-
"paramId %s cannot be read.",
243-
lev,
244-
pid,
245-
)
158+
pass
159+
160+
# Try initial selection
161+
recursive_select(selectors)
162+
163+
if not ds_list:
164+
logger.warning("GRIB file of level %s and paramId %s cannot be read.", lev, pid)
165+
246166
return ds_list
247167

248168

0 commit comments

Comments
 (0)