Skip to content

Commit 75090d4

Browse files
committed
merge with dev
2 parents 2cc9e3e + ab4f5eb commit 75090d4

File tree

12 files changed

+274
-204
lines changed

12 files changed

+274
-204
lines changed

.github/workflows/pyrad_tests_base.yml

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name: Test pyrad base
22
env:
33
MINIMAL_DEPENDENCIES: Cython numpy cartopy
4-
TEST_DEPENDENCIES: pytest imageio pygrib
4+
TEST_DEPENDENCIES: pytest imageio pygrib
55

66
on:
77
# Triggers the workflow on push or pull request events but only for the master branch
@@ -45,7 +45,7 @@ jobs:
4545
- name: Clone pyart
4646
uses: actions/checkout@master
4747
with:
48-
repository: ARM-DOE/pyart
48+
repository: ARM-DOE/pyart
4949
path: pyart
5050
- name: Clone pyrad-tests
5151
uses: actions/checkout@master
@@ -67,6 +67,10 @@ jobs:
6767
python -c "from pyrad.flow import main"
6868
- name: Run tests
6969
env:
70-
PYRAD_TESTS_PATH: ${{github.workspace}}/pyrad-tests/
70+
S3_SECRET_READ: ${{ secrets.S3_SECRET_READ }}
71+
S3_KEY_READ: ${{ secrets.S3_KEY_READ }}
72+
S3_SECRET_WRITE: ${{ secrets.S3_SECRET_WRITE }}
73+
S3_KEY_WRITE: ${{ secrets.S3_KEY_WRITE }}
74+
PYRAD_TESTS_PATH: ${{github.workspace}}/pyrad-tests/
7175
working-directory: ${{github.workspace}}/pyrad-tests/
7276
run: pytest "run_tests.py::test_base"

.github/workflows/pyrad_tests_base_dev.yml

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ jobs:
4242
- name: Clone pyart
4343
uses: actions/checkout@master
4444
with:
45-
repository: ARM-DOE/pyart
45+
repository: ARM-DOE/pyart
4646
path: pyart
4747
- name: Clone pyrad-tests
4848
uses: actions/checkout@master
@@ -64,10 +64,13 @@ jobs:
6464
python -c "from pyrad.flow import main"
6565
- name: Run tests
6666
env:
67-
PYRAD_TESTS_PATH: ${{github.workspace}}/pyrad-tests/
67+
S3_SECRET_READ: ${{ secrets.S3_SECRET_READ }}
68+
S3_KEY_READ: ${{ secrets.S3_KEY_READ }}
69+
S3_SECRET_WRITE: ${{ secrets.S3_SECRET_WRITE }}
70+
S3_KEY_WRITE: ${{ secrets.S3_KEY_WRITE }}
71+
PYRAD_TESTS_PATH: ${{github.workspace}}/pyrad-tests/
6872
working-directory: ${{github.workspace}}/pyrad-tests/
6973
run: pytest "run_tests.py::test_base"
7074
- name: tmate session if tests fail
7175
if: ${{ github.event_name == 'workflow_dispatch' && failure() }}
7276
uses: mxschmitt/action-tmate@v3
73-

.github/workflows/pyrad_tests_mch_dev.yml

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ on:
66
branches: [ dev ]
77
pull_request:
88
branches: [ dev ]
9-
9+
1010
jobs:
1111
unit_tests:
1212
name: Unit Tests (${{ matrix.python-version }}, ${{ matrix.os }})
@@ -65,12 +65,11 @@ jobs:
6565
# Insert tmate session here
6666
- name: Run tests
6767
env:
68-
PYRAD_TESTS_PATH: ${{github.workspace}}/pyrad-tests/
69-
PSRLIB_PATH: ${{github.workspace}}/src/libDX50/lib/
70-
PYART_CONFIG: ${{github.workspace}}/config/pyart/mch_config.py
68+
PYRAD_TESTS_PATH: ${{github.workspace}}/pyrad-tests/
69+
PSRLIB_PATH: ${{github.workspace}}/src/libDX50/lib/
70+
PYART_CONFIG: ${{github.workspace}}/config/pyart/mch_config.py
7171
working-directory: ${{github.workspace}}/pyrad-tests//
7272
run: pytest "run_tests.py::test_mch"
7373
- name: tmate session if tests fail
7474
if: ${{ github.event_name == 'workflow_dispatch' && failure() }}
7575
uses: mxschmitt/action-tmate@v3
76-

.pre-commit-config.yaml

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
repos:
2+
- repo: https://github.com/pre-commit/pre-commit-hooks
3+
rev: v5.0.0 # Use the latest stable version
4+
hooks:
5+
- id: trailing-whitespace
6+
- id: end-of-file-fixer
7+
- id: check-yaml
8+
9+
- repo: https://github.com/charliermarsh/ruff-pre-commit
10+
rev: v0.9.3 # Use the latest stable version of ruff-pre-commit
11+
hooks:
12+
- id: ruff
13+
args: ["--fix"]
14+
files: "src/pyrad_proc/pyrad" # Automatically fix issues when possible
15+
16+
- repo: https://github.com/psf/black
17+
rev: 23.1.0
18+
hooks:
19+
- id: black

ci/environment-ci.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ dependencies:
1414
- cvxopt
1515
- wradlib
1616
- xarray<2024.10.0
17+
- boto3
1718
- xradar
1819
- metpy
1920
- pytest-cov

doc/source/overview/main.rst

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -16,21 +16,24 @@ locationConfigFile STRING File name (with full path) of the location config
1616
productConfigFile STRING File name (with full path) of the product configuration file. Described in Section 4.
1717
lastStateFile STRING File name (with full path) of the file containing the time of the last processed scan. Used in particular for real-time processing.
1818
imgformat STRING/ File format(s) of the images. The following formats are supported: eps, png, and jpg. If ``saveimg`` is set to 0, this field is not used.
19-
STRARR
19+
STRARR
2020
saveimgbasepath STRING Base directory for the images to save. The directory structure looks as follows:
2121
``<saveimgbasepath>/<name>/<YYYY-MM-DD>/<datasetname>/<prodname>/<outputname>``
2222
If ``saveimg`` is set to 0, this field is not used.
23-
s3copypath STRING OPTIONAL. Path to an S3 bucket. If provided all generated products will be written there as well using the same data structure. The format must be https://bucket_name.endpoint.domain, for example https://tests.fr-par-1.linodeobjects.com/. The S3 copy procedure will only work if the
24-
environment variables AWS_KEY and AWS_SECRET are defined in the pyrad scope. AWS_KEY contains the S3 bucket AWS key and AWS_SECRET the associated secret.
23+
s3EndpointRead STRING OPTIONAL. Url to an S3 endpoint containing input radar data. The format must be https://endpoint.domain (e.g. https://eu-central-1.linodeobjects.com/), the https:// is not mandatory.
24+
s3BucketRead STRING OPTIONAL. Name of an S3 bucket containing input radar data. It has to be used together with ``s3EndpointRead`` and ``s3PathRead`` to be able to read input radar data from a bucket. The procedure will only work by setting the environment variables S3_KEY_READ and S3_SECRET_READ.
25+
s3PathRead STRING OPTIONAL. Path where to find input radar data in an S3 bucket. The data will be retrieved at url https://s3BucketRead.s3EndpointRead/s3PathRead/*
26+
rm_s3_file INT OPTIONAL. If set input radar data files downloaded from and S3 bucket will be removed after reading.
27+
s3EndpointWrite STRING OPTIONAL. Url to an S3 endpoint where to store output data. The format must be https://endpoint.domain (e.g. https://eu-central-1.linodeobjects.com/), the https:// is not mandatory.
28+
s3BucketWrite STRING OPTIONAL. Name of an S3 bucket containing input radar data. It has to be used together with ``s3EndpointWrite`` and ``s3PathWrite`` to be able to save output data to a bucket. The procedure will only work by setting the environment variables S3_KEY_WRITE and S3_SECRET_WRITE.
29+
s3PathWrite STRING OPTIONAL. Path where to save output radar data in an S3 bucket. The data will be saved at url https://s3BucketRead.s3EndpointRead/s3PathRead/filename
2530
loadbasepath STRING OPTIONAL. Base path of saved data. By default, this field is set to ``saveimgbasepath``.
2631
loadname STRING OPTIONAL. Name of the saved data processing. Used for saved volume loading. By default, this field is set to ``name``.
27-
gecsxbasepath STRING OPTIONAL. Base path of saved visibility fields generated by the GECSX tool
28-
gecsxname STRING OPTIONAL. Name of the dataset generated by GEXSX in which the visibility fields are contained. Used for GECSX volume loading.
32+
gecsxbasepath STRING OPTIONAL. Base path of saved visibility fields generated by the GECSX tool
33+
gecsxname STRING OPTIONAL. Name of the dataset generated by GEXSX in which the visibility fields are contained. Used for GECSX volume loading.
2934
dempath STRING OPTIONAL. Base directory of the Digital Elevation Model (DEM) files. Basically to load the radar visibility (Optional).
3035
smnpath STRING OPTIONAL. Base directory of the SwissMetNet stations data. Used in the comparison between radar data and rain gauges (Optional).
3136
disdropath STRING OPTIONAL. Base directory of the disdrometer data. Used in the comparison between radar data and disdrometers (Optional).
3237
solarfluxpath STRING OPTIONAL. Base directory of the solar flux data. Used to plot the calibration bias based on sun monitoring (Optional).
3338
iconpath STRING OPTIONAL. Base directory of the ICON (NWP model) data files.
3439
==================== ======= =======================================================================================
35-
36-

src/pyart

src/pyrad_proc/pyrad/flow/flow_aux.py

Lines changed: 52 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -1000,16 +1000,24 @@ def _generate_prod(dataset, cfg, prdname, prdfunc, dsname, voltime, runinfo=None
10001000
filenames = prdfunc(dataset, prdcfg)
10011001
if isinstance(filenames, str): # convert to list if needed
10021002
filenames = [filenames]
1003-
if "s3copypath" in prdcfg and filenames is not None: # copy to S3
1004-
s3AccessPolicy = (
1005-
prdcfg["s3AccessPolicy"] if "s3AccessPolicy" in prdcfg else None
1006-
)
1003+
if (
1004+
"s3BucketWrite" in prdcfg
1005+
and "s3EndpointWrite" in prdcfg
1006+
and filenames is not None
1007+
): # copy to S3
1008+
s3AccessPolicy = prdcfg.get("s3AccessPolicy", None)
1009+
s3path = prdcfg.get("s3PathWrite", None)
10071010
for fname in filenames:
10081011
if (
10091012
prdcfg["basepath"] in fname
10101013
): # only products saved to standard basepath
10111014
write_to_s3(
1012-
fname, prdcfg["basepath"], prdcfg["s3copypath"], s3AccessPolicy
1015+
fname,
1016+
prdcfg["basepath"],
1017+
prdcfg["s3EndpointWrite"],
1018+
prdcfg["s3BucketWrite"],
1019+
s3path,
1020+
s3AccessPolicy,
10131021
)
10141022
return False
10151023
except Exception as inst:
@@ -1221,6 +1229,9 @@ def _create_cfg_dict(cfgfile):
12211229
if "MFScale" not in cfg:
12221230
cfg.update({"MFScale": 0})
12231231

1232+
if not cfg["datapath"]: # empty datapath in case of s3 reading
1233+
cfg["datapath"] = ["" for rad in range(cfg["NumRadars"])]
1234+
12241235
# parameters necessary to read correctly MF grid binary files
12251236
if "BinFileParams" not in cfg:
12261237
bin_file_params = {
@@ -1333,35 +1344,35 @@ def _create_datacfg_dict(cfg):
13331344
datacfg.update({"DataTypeIDInFiles": cfg["DataTypeIDInFiles"]})
13341345

13351346
# s3 buckets
1336-
if "bucket" in cfg:
1347+
if "s3BucketRead" in cfg:
13371348
try:
1338-
datacfg["s3_key"] = os.environ["S3_IN_KEY"]
1339-
datacfg["s3_secret_key"] = os.environ["S3_IN_SECRET"]
1349+
datacfg["s3KeyRead"] = os.environ["S3_KEY_READ"]
1350+
datacfg["s3SecretRead"] = os.environ["S3_SECRET_READ"]
13401351
except KeyError:
13411352
warn(
1342-
"Define environment variables S3_IN_KEY and S3_IN_SECRET"
1353+
"Define environment variables S3_KEY_READ and S3_SECRET_READ"
13431354
" to get input data from S3 buckets."
13441355
)
13451356

1346-
if "s3path" in cfg:
1347-
datacfg.update({"s3path": cfg["s3path"]})
1357+
if "s3PathRead" in cfg:
1358+
datacfg.update({"s3PathRead": cfg["s3PathRead"]})
13481359
else:
1349-
warn("Unable to read data from s3 bucket. Define s3path")
1350-
if "s3_url" in cfg:
1351-
datacfg.update({"s3_url": cfg["s3_url"]})
1360+
warn("Unable to read data from s3 bucket. Define s3PathRead")
1361+
if "s3EndpointRead" in cfg:
1362+
datacfg.update({"s3EndpointRead": cfg["s3EndpointRead"]})
13521363
else:
1353-
warn("Unable to read data from s3 bucket. Define s3_url")
1364+
warn("Unable to read data from s3 bucket. Define s3EndpointRead")
13541365

13551366
if "rm_s3_file" in cfg:
13561367
datacfg.update({"rm_s3_file": cfg["rm_s3_file"]})
13571368

13581369
if (
1359-
"s3path" in datacfg
1360-
and "s3_url" in datacfg
1361-
and "s3_key" in datacfg
1362-
and "s3_secret_key" in datacfg
1370+
"s3PathRead" in datacfg
1371+
and "s3EndpointRead" in datacfg
1372+
and "s3KeyRead" in datacfg
1373+
and "s3SecretRead" in datacfg
13631374
):
1364-
datacfg.update({"bucket": cfg["bucket"]})
1375+
datacfg.update({"s3BucketRead": cfg["s3BucketRead"]})
13651376

13661377
# Modify size of radar or radar spectra object
13671378
datacfg.update({"elmin": cfg.get("elmin", None)})
@@ -1582,11 +1593,14 @@ def _create_prdcfg_dict(cfg, dataset, product, voltime, runinfo=None):
15821593
prdcfg.update({"imgformat": cfg["imgformat"]})
15831594
prdcfg.update({"RadarName": cfg["RadarName"]})
15841595

1585-
if "s3copypath" in cfg:
1586-
prdcfg.update({"s3copypath": cfg["s3copypath"]})
1596+
if "s3EndpointWrite" in cfg:
1597+
prdcfg.update({"s3EndpointWrite": cfg["s3EndpointWrite"]})
1598+
if "s3BucketWrite" in cfg:
1599+
prdcfg.update({"s3BucketWrite": cfg["s3BucketWrite"]})
1600+
if "s3PathWrite" in cfg:
1601+
prdcfg.update({"s3PathWrite": cfg["s3PathWrite"]})
15871602
if "s3AccessPolicy" in cfg:
15881603
prdcfg.update({"s3AccessPolicy": cfg["s3AccessPolicy"]})
1589-
15901604
if "RadarBeamwidth" in cfg:
15911605
prdcfg.update({"RadarBeamwidth": cfg["RadarBeamwidth"]})
15921606
if "ppiImageConfig" in cfg:
@@ -1641,9 +1655,13 @@ def _get_datatype_list(cfg, radarnr="RADAR001"):
16411655
if "datatype" not in cfg[dataset]:
16421656
continue
16431657
if isinstance(cfg[dataset]["datatype"], str):
1644-
(radarnr_descr, datagroup, datatype_aux, dataset_save, product_save) = (
1645-
get_datatype_fields(cfg[dataset]["datatype"])
1646-
)
1658+
(
1659+
radarnr_descr,
1660+
datagroup,
1661+
datatype_aux,
1662+
dataset_save,
1663+
product_save,
1664+
) = get_datatype_fields(cfg[dataset]["datatype"])
16471665
if datagroup != "PROC" and radarnr_descr == radarnr:
16481666
if (dataset_save is None) and (product_save is None):
16491667
datatypesdescr.add(
@@ -1667,9 +1685,13 @@ def _get_datatype_list(cfg, radarnr="RADAR001"):
16671685
)
16681686
else:
16691687
for datatype in cfg[dataset]["datatype"]:
1670-
(radarnr_descr, datagroup, datatype_aux, dataset_save, product_save) = (
1671-
get_datatype_fields(datatype)
1672-
)
1688+
(
1689+
radarnr_descr,
1690+
datagroup,
1691+
datatype_aux,
1692+
dataset_save,
1693+
product_save,
1694+
) = get_datatype_fields(datatype)
16731695
if datagroup != "PROC" and radarnr_descr == radarnr:
16741696
if dataset_save is None and product_save is None:
16751697
datatypesdescr.add(
@@ -1799,7 +1821,7 @@ def _get_masterfile_list(datatypesdescr, starttimes, endtimes, datacfg, scan_lis
17991821
)
18001822
return [], None, None
18011823

1802-
if "bucket" in datacfg:
1824+
if "s3BucketRead" in datacfg:
18031825
masterfilelist = get_file_list_s3(
18041826
masterdatatypedescr, starttimes, endtimes, datacfg, scan=masterscan
18051827
)

0 commit comments

Comments
 (0)