Skip to content

Commit c63a806

Browse files
committed
ENH: modified pyrad.io.read_trt_data to be able to digest the new TRT JSON format
1 parent 73177a2 commit c63a806

File tree

3 files changed

+23
-290
lines changed

3 files changed

+23
-290
lines changed

doc/source/overview/mappings/pyrad_to_pyart.txt

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
pyrad_name,field_name_pyart
1+
pyrad_name,pyart_name
22
dBZ,reflectivity
33
dBZ_flag,reflectivity_flag
44
dBZ_MF,reflectivity
@@ -54,7 +54,6 @@ TXh,transmitted_signal_power_h
5454
TXv,transmitted_signal_power_v
5555
SNRh,signal_to_noise_ratio_hh
5656
SNRv,signal_to_noise_ratio_vv
57-
SNR,signal_to_noise_ratio
5857
CCORh,clutter_correction_ratio_hh
5958
CCORv,clutter_correction_ratio_vv
6059
dBm_sun_hit,sun_hit_power_h
@@ -139,6 +138,8 @@ terrain_aspect,terrain_aspect
139138
elevation_angle,elevation_angle
140139
min_vis_elevation,min_vis_elevation
141140
min_vis_altitude,min_vis_altitude
141+
min_vis_height_above_ground,min_vis_height_above_ground
142+
min_rad_vis_height_above_ground,min_rad_vis_height_above_ground
142143
incident_angle,incident_angle
143144
sigma_0,sigma_0
144145
effective_area,effective_area
@@ -310,7 +311,7 @@ EZC15,echo_top_15dBz
310311
EZC20,echo_top_20dBz
311312
EZC45,echo_top_45dBz
312313
EZC50,echo_top_50dBz
313-
dEZC15,echo_top_15dBZ
314+
dEZC15,echo_top_15dBz
314315
dEZC20,echo_top_20dBz
315316
dEZC45,echo_top_45dBz
316317
dEZC50,echo_top_50dBz
@@ -436,7 +437,6 @@ ISDRco,ISDRco
436437
ISDRcx,ISDRcx
437438
SNRcx,SNRcx
438439
SNRCorFaCo,SNRCorFaCo
439-
SNRCorFaCo,SNRCorFaCo
440440
avgdBZ,avg_reflectivity
441441
NdBZ,npoints_reflectivity
442442
quant05dBZ,quant05_reflectivity

src/pyart

src/pyrad_proc/pyrad/io/read_data_sensor.py

Lines changed: 18 additions & 285 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@
3838
import os
3939
import gzip
4040
import glob
41+
import json
4142
import datetime
4243
import csv
4344
from warnings import warn
@@ -918,7 +919,7 @@ def read_trt_cell_lightning(fname):
918919

919920
def read_trt_data(fname):
920921
"""
921-
Reads the TRT data contained in a text file. The file has the following
922+
Reads the TRT data contained in a trt.json file. The file has the following
922923
fields:
923924
traj_ID
924925
yyyymmddHHMM
@@ -971,294 +972,26 @@ def read_trt_data(fname):
971972
972973
Returns
973974
-------
974-
A tupple containing the read values. None otherwise
975+
A dict containing the read values in a list for every key. Lists are empty
976+
if trt file could not be read.
975977
976978
"""
977979
try:
978-
with open(fname, "r", newline="") as csvfile:
979-
# first count the lines
980-
reader = csv.DictReader(
981-
(
982-
row
983-
for row in csvfile
984-
if (
985-
not row.startswith("#")
986-
and not row.startswith("@")
987-
and not row.startswith(" ")
988-
and row
989-
)
990-
),
991-
fieldnames=[
992-
"traj_ID",
993-
"yyyymmddHHMM",
994-
"lon",
995-
"lat",
996-
"ell_L",
997-
"ell_S",
998-
"ell_or",
999-
"area",
1000-
"vel_x",
1001-
"vel_y",
1002-
"det",
1003-
"RANKr",
1004-
"CG-",
1005-
"CG+",
1006-
"CG",
1007-
"%CG+",
1008-
"ET45",
1009-
"ET45m",
1010-
"ET15",
1011-
"ET15m",
1012-
"VIL",
1013-
"maxH",
1014-
"maxHm",
1015-
"POH",
1016-
"RANK",
1017-
"Dvel_x",
1018-
"Dvel_y",
1019-
],
1020-
restkey="cell_contour_lon-lat",
1021-
delimiter=";",
1022-
)
1023-
nrows = sum(1 for row in reader)
1024-
1025-
if nrows == 0:
1026-
warn("No data in file " + fname)
1027-
return (
1028-
None,
1029-
None,
1030-
None,
1031-
None,
1032-
None,
1033-
None,
1034-
None,
1035-
None,
1036-
None,
1037-
None,
1038-
None,
1039-
None,
1040-
None,
1041-
None,
1042-
None,
1043-
None,
1044-
None,
1045-
None,
1046-
None,
1047-
None,
1048-
None,
1049-
None,
1050-
None,
1051-
None,
1052-
None,
1053-
None,
1054-
None,
1055-
None,
1056-
)
1057-
1058-
traj_ID = np.empty(nrows, dtype=int)
1059-
yyyymmddHHMM = np.empty(nrows, dtype=datetime.datetime)
1060-
lon = np.empty(nrows, dtype=float)
1061-
lat = np.empty(nrows, dtype=float)
1062-
ell_L = np.empty(nrows, dtype=float)
1063-
ell_S = np.empty(nrows, dtype=float)
1064-
ell_or = np.empty(nrows, dtype=float)
1065-
area = np.empty(nrows, dtype=float)
1066-
vel_x = np.ma.empty(nrows, dtype=float)
1067-
vel_y = np.ma.empty(nrows, dtype=float)
1068-
det = np.ma.empty(nrows, dtype=float)
1069-
RANKr = np.empty(nrows, dtype=int)
1070-
CG_n = np.empty(nrows, dtype=int)
1071-
CG_p = np.empty(nrows, dtype=int)
1072-
CG = np.empty(nrows, dtype=int)
1073-
CG_percent_p = np.ma.empty(nrows, dtype=float)
1074-
ET45 = np.ma.empty(nrows, dtype=float)
1075-
ET45m = np.ma.empty(nrows, dtype=float)
1076-
ET15 = np.ma.empty(nrows, dtype=float)
1077-
ET15m = np.ma.empty(nrows, dtype=float)
1078-
VIL = np.ma.empty(nrows, dtype=float)
1079-
maxH = np.ma.empty(nrows, dtype=float)
1080-
maxHm = np.ma.empty(nrows, dtype=float)
1081-
POH = np.ma.empty(nrows, dtype=float)
1082-
RANK = np.ma.empty(nrows, dtype=float)
1083-
Dvel_x = np.ma.empty(nrows, dtype=float)
1084-
Dvel_y = np.ma.empty(nrows, dtype=float)
1085-
1086-
# now read the data
1087-
csvfile.seek(0)
1088-
reader = csv.DictReader(
1089-
(
1090-
row
1091-
for row in csvfile
1092-
if (
1093-
not row.startswith("#")
1094-
and not row.startswith("@")
1095-
and not row.startswith(" ")
1096-
and row
1097-
)
1098-
),
1099-
fieldnames=[
1100-
"traj_ID",
1101-
"yyyymmddHHMM",
1102-
"lon",
1103-
"lat",
1104-
"ell_L",
1105-
"ell_S",
1106-
"ell_or",
1107-
"area",
1108-
"vel_x",
1109-
"vel_y",
1110-
"det",
1111-
"RANKr",
1112-
"CG-",
1113-
"CG+",
1114-
"CG",
1115-
"%CG+",
1116-
"ET45",
1117-
"ET45m",
1118-
"ET15",
1119-
"ET15m",
1120-
"VIL",
1121-
"maxH",
1122-
"maxHm",
1123-
"POH",
1124-
"RANK",
1125-
"Dvel_x",
1126-
"Dvel_y",
1127-
],
1128-
restkey="cell_contour_lon-lat",
1129-
delimiter=";",
1130-
)
1131-
cell_contour = []
1132-
for i, row in enumerate(reader):
1133-
traj_ID[i] = int(row["traj_ID"])
1134-
yyyymmddHHMM[i] = datetime.datetime.strptime(
1135-
row["yyyymmddHHMM"].strip(), "%Y%m%d%H%M"
1136-
)
1137-
lon[i] = float(row["lon"].strip())
1138-
lat[i] = float(row["lat"].strip())
1139-
ell_L[i] = float(row["ell_L"].strip())
1140-
ell_S[i] = float(row["ell_S"].strip())
1141-
ell_or[i] = float(row["ell_or"].strip())
1142-
area[i] = float(row["area"].strip())
1143-
vel_x[i] = float(row["vel_x"].strip())
1144-
vel_y[i] = float(row["vel_y"].strip())
1145-
det[i] = float(row["det"].strip())
1146-
RANKr[i] = int(row["RANKr"].strip())
1147-
CG_n[i] = int(row["CG-"].strip())
1148-
CG_p[i] = int(row["CG+"].strip())
1149-
CG[i] = int(row["CG"].strip())
1150-
CG_percent_p[i] = float(row["%CG+"].strip())
1151-
ET45[i] = float(row["ET45"].strip())
1152-
ET45m[i] = float(row["ET45m"].strip())
1153-
ET15[i] = float(row["ET15"].strip())
1154-
ET15m[i] = float(row["ET15m"].strip())
1155-
VIL[i] = float(row["VIL"].strip())
1156-
maxH[i] = float(row["maxH"].strip())
1157-
maxHm[i] = float(row["maxHm"].strip())
1158-
POH[i] = float(row["POH"].strip())
1159-
RANK[i] = float(row["RANK"].strip())
1160-
Dvel_x[i] = float(row["Dvel_x"].strip())
1161-
Dvel_y[i] = float(row["Dvel_y"].strip())
1162-
1163-
cell_contour_list_aux = row["cell_contour_lon-lat"]
1164-
nele = len(cell_contour_list_aux) - 1
1165-
cell_contour_list = []
1166-
for j in range(nele):
1167-
cell_contour_list.append(float(cell_contour_list_aux[j].strip()))
1168-
cell_contour_dict = {
1169-
"lon": cell_contour_list[0::2],
1170-
"lat": cell_contour_list[1::2],
1171-
}
1172-
cell_contour.append(cell_contour_dict)
1173-
1174-
csvfile.close()
1175-
1176-
lon = np.ma.masked_invalid(lon)
1177-
lat = np.ma.masked_invalid(lat)
1178-
ell_L = np.ma.masked_invalid(ell_L)
1179-
ell_S = np.ma.masked_invalid(ell_S)
1180-
ell_or = np.ma.masked_invalid(ell_or)
1181-
area = np.ma.masked_invalid(area)
1182-
vel_x = np.ma.masked_invalid(vel_x)
1183-
vel_y = np.ma.masked_invalid(vel_y)
1184-
det = np.ma.masked_invalid(det)
1185-
CG_percent_p = np.ma.masked_invalid(CG_percent_p)
1186-
ET45 = np.ma.masked_invalid(ET45)
1187-
ET45m = np.ma.masked_invalid(ET45m)
1188-
ET15 = np.ma.masked_invalid(ET15)
1189-
ET15m = np.ma.masked_invalid(ET15m)
1190-
VIL = np.ma.masked_invalid(VIL)
1191-
maxH = np.ma.masked_invalid(maxH)
1192-
maxHm = np.ma.masked_invalid(maxHm)
1193-
POH = np.ma.masked_invalid(POH)
1194-
RANK = np.ma.masked_invalid(RANK)
1195-
Dvel_x = np.ma.masked_invalid(Dvel_x)
1196-
Dvel_y = np.ma.masked_invalid(Dvel_y)
1197-
1198-
return (
1199-
traj_ID,
1200-
yyyymmddHHMM,
1201-
lon,
1202-
lat,
1203-
ell_L,
1204-
ell_S,
1205-
ell_or,
1206-
area,
1207-
vel_x,
1208-
vel_y,
1209-
det,
1210-
RANKr,
1211-
CG_n,
1212-
CG_p,
1213-
CG,
1214-
CG_percent_p,
1215-
ET45,
1216-
ET45m,
1217-
ET15,
1218-
ET15m,
1219-
VIL,
1220-
maxH,
1221-
maxHm,
1222-
POH,
1223-
RANK,
1224-
Dvel_x,
1225-
Dvel_y,
1226-
cell_contour,
1227-
)
980+
with open(fname) as fh:
981+
data = json.load(fh)
982+
dict_trt = dict()
983+
for i, feat in enumerate(data["features"]):
984+
all_trt_keys = feat["properties"].keys()
985+
if i == 0:
986+
dict_trt = {key: [] for key in all_trt_keys}
987+
for key in all_trt_keys:
988+
dict_trt[key].append(feat["properties"][key])
989+
return dict_trt
1228990

1229991
except EnvironmentError as ee:
1230992
warn(str(ee))
1231993
warn("Unable to read file " + fname)
1232-
return (
1233-
None,
1234-
None,
1235-
None,
1236-
None,
1237-
None,
1238-
None,
1239-
None,
1240-
None,
1241-
None,
1242-
None,
1243-
None,
1244-
None,
1245-
None,
1246-
None,
1247-
None,
1248-
None,
1249-
None,
1250-
None,
1251-
None,
1252-
None,
1253-
None,
1254-
None,
1255-
None,
1256-
None,
1257-
None,
1258-
None,
1259-
None,
1260-
None,
1261-
)
994+
return None
1262995

1263996

1264997
def read_trt_traj_data(fname):
@@ -2239,13 +1972,13 @@ def get_sensor_data(date, datatype, cfg):
22391972
warn(
22401973
"Could not find any raingauge file with names {datafile1} or {datafile2}"
22411974
)
2242-
if datafile.endswith('.gz'):
2243-
with gzip.open(datafile,'rt') as f:
1975+
if datafile.endswith(".gz"):
1976+
with gzip.open(datafile, "rt") as f:
22441977
num_columns = len(next(f).strip().split(","))
22451978
else:
22461979
with open(datafile) as f:
22471980
num_columns = len(next(f).strip().split(","))
2248-
1981+
22491982
if num_columns == 3:
22501983
_, sensordate, sensorvalue = read_smn2(datafile)
22511984
else:

0 commit comments

Comments
 (0)