Skip to content

Commit 8bafb8b

Browse files
Merge pull request #299 from zhiminzhang0830/upper
change name of constant parameter to upper case
2 parents 286085a + 8d652bf commit 8bafb8b

File tree

14 files changed

+266
-266
lines changed

14 files changed

+266
-266
lines changed

examples/cylinder/2d_unsteady/cylinder2d_unsteady_Re100.py

Lines changed: 34 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -23,11 +23,11 @@
2323
# set random seed for reproducibility
2424
ppsci.utils.misc.set_random_seed(42)
2525
# set output directory
26-
output_dir = (
26+
OUTPUT_DIR = (
2727
"./output_cylinder2d_unsteady" if not args.output_dir else args.output_dir
2828
)
2929
# initialize logger
30-
logger.init_logger("ppsci", f"{output_dir}/train.log", "info")
30+
logger.init_logger("ppsci", f"{OUTPUT_DIR}/train.log", "info")
3131

3232
# set model
3333
model = ppsci.arch.MLP(
@@ -37,19 +37,19 @@
3737
equation = {"NavierStokes": ppsci.equation.NavierStokes(0.02, 1.0, 2, True)}
3838

3939
# set timestamps
40-
time_start, time_end = 1, 50
41-
num_timestamps = 50
42-
train_num_timestamps = 30
40+
TIME_START, TIME_END = 1, 50
41+
NUM_TIMESTAMPS = 50
42+
TRAIN_NUM_TIMESTAMPS = 30
4343

4444
train_timestamps = np.linspace(
45-
time_start, time_end, num_timestamps, endpoint=True
45+
TIME_START, TIME_END, NUM_TIMESTAMPS, endpoint=True
4646
).astype("float32")
47-
train_timestamps = np.random.choice(train_timestamps, train_num_timestamps)
47+
train_timestamps = np.random.choice(train_timestamps, TRAIN_NUM_TIMESTAMPS)
4848
train_timestamps.sort()
49-
t0 = np.array([time_start], dtype="float32")
49+
t0 = np.array([TIME_START], dtype="float32")
5050

5151
val_timestamps = np.linspace(
52-
time_start, time_end, num_timestamps, endpoint=True
52+
TIME_START, TIME_END, NUM_TIMESTAMPS, endpoint=True
5353
).astype("float32")
5454

5555
logger.info(f"train_timestamps: {train_timestamps.tolist()}")
@@ -59,8 +59,8 @@
5959
geom = {
6060
"time_rect": ppsci.geometry.TimeXGeometry(
6161
ppsci.geometry.TimeDomain(
62-
time_start,
63-
time_end,
62+
TIME_START,
63+
TIME_END,
6464
timestamps=np.concatenate((t0, train_timestamps), axis=0),
6565
),
6666
ppsci.geometry.PointCloud(
@@ -77,13 +77,13 @@
7777
}
7878

7979
# set dataloader config
80-
iters_per_epoch = 1
80+
ITERS_PER_EPOCH = 1
8181

8282
# pde/bc/sup constraint use t1~tn, initial constraint use t0
83-
npoint_pde, ntime_pde = 9420, len(train_timestamps)
84-
npoint_inlet_cylinder = 161
85-
npoint_outlet = 81
86-
alias_dict = {"x": "Points:0", "y": "Points:1", "u": "U:0", "v": "U:1"}
83+
NPOINT_PDE, ntime_pde = 9420, len(train_timestamps)
84+
NPOINT_INLET_CYLINDER = 161
85+
NPOINT_OUTLET = 81
86+
ALIAS_DICT = {"x": "Points:0", "y": "Points:1", "u": "U:0", "v": "U:1"}
8787

8888
# set constraint
8989
pde_constraint = ppsci.constraint.InteriorConstraint(
@@ -92,8 +92,8 @@
9292
geom["time_rect"],
9393
{
9494
"dataset": "IterableNamedArrayDataset",
95-
"batch_size": npoint_pde * ntime_pde,
96-
"iters_per_epoch": iters_per_epoch,
95+
"batch_size": NPOINT_PDE * ntime_pde,
96+
"iters_per_epoch": ITERS_PER_EPOCH,
9797
},
9898
ppsci.loss.MSELoss("mean"),
9999
name="EQ",
@@ -105,7 +105,7 @@
105105
"file_path": "./datasets/domain_inlet_cylinder.csv",
106106
"input_keys": ["x", "y"],
107107
"label_keys": ["u", "v"],
108-
"alias_dict": alias_dict,
108+
"alias_dict": ALIAS_DICT,
109109
"weight_dict": {"u": 10, "v": 10},
110110
"timestamps": train_timestamps,
111111
},
@@ -120,7 +120,7 @@
120120
"file_path": "./datasets/domain_outlet.csv",
121121
"input_keys": ["x", "y"],
122122
"label_keys": ["p"],
123-
"alias_dict": alias_dict,
123+
"alias_dict": ALIAS_DICT,
124124
"timestamps": train_timestamps,
125125
},
126126
},
@@ -134,7 +134,7 @@
134134
"file_path": "./datasets/initial/ic0.1.csv",
135135
"input_keys": ["x", "y"],
136136
"label_keys": ["u", "v", "p"],
137-
"alias_dict": alias_dict,
137+
"alias_dict": ALIAS_DICT,
138138
"weight_dict": {"u": 10, "v": 10, "p": 10},
139139
"timestamps": t0,
140140
},
@@ -149,7 +149,7 @@
149149
"file_path": "./datasets/probe/probe1_50.csv",
150150
"input_keys": ["t", "x", "y"],
151151
"label_keys": ["u", "v"],
152-
"alias_dict": alias_dict,
152+
"alias_dict": ALIAS_DICT,
153153
"weight_dict": {"u": 10, "v": 10},
154154
"timestamps": train_timestamps,
155155
},
@@ -167,14 +167,14 @@
167167
}
168168

169169
# set training hyper-parameters
170-
epochs = 40000 if not args.epochs else args.epochs
171-
eval_freq = 400
170+
EPOCHS = 40000 if not args.epochs else args.epochs
171+
EVAL_FREQ = 400
172172

173173
# set optimizer
174174
optimizer = ppsci.optimizer.Adam(0.001)((model,))
175175

176176
# set validator
177-
npoints_eval = (npoint_pde + npoint_inlet_cylinder + npoint_outlet) * num_timestamps
177+
npoints_eval = (NPOINT_PDE + NPOINT_INLET_CYLINDER + NPOINT_OUTLET) * NUM_TIMESTAMPS
178178
residual_validator = ppsci.validate.GeometryValidator(
179179
equation["NavierStokes"].equations,
180180
{"continuity": 0, "momentum_x": 0, "momentum_y": 0},
@@ -193,13 +193,13 @@
193193

194194
# set visualizer(optional)
195195
vis_points = geom["time_rect_eval"].sample_interior(
196-
(npoint_pde + npoint_inlet_cylinder + npoint_outlet) * num_timestamps
196+
(NPOINT_PDE + NPOINT_INLET_CYLINDER + NPOINT_OUTLET) * NUM_TIMESTAMPS
197197
)
198198
visualizer = {
199199
"visulzie_u": ppsci.visualize.VisualizerVtu(
200200
vis_points,
201201
{"u": lambda d: d["u"], "v": lambda d: d["v"], "p": lambda d: d["p"]},
202-
num_timestamps=num_timestamps,
202+
num_timestamps=NUM_TIMESTAMPS,
203203
prefix="result_u",
204204
)
205205
}
@@ -208,13 +208,13 @@
208208
solver = ppsci.solver.Solver(
209209
model,
210210
constraint,
211-
output_dir,
211+
OUTPUT_DIR,
212212
optimizer,
213213
None,
214-
epochs,
215-
iters_per_epoch,
214+
EPOCHS,
215+
ITERS_PER_EPOCH,
216216
eval_during_train=True,
217-
eval_freq=eval_freq,
217+
eval_freq=EVAL_FREQ,
218218
equation=equation,
219219
geom=geom,
220220
validator=validator,
@@ -228,16 +228,16 @@
228228
solver.visualize()
229229

230230
# directly evaluate model from pretrained_model_path(optional)
231-
logger.init_logger("ppsci", f"{output_dir}/eval.log", "info")
231+
logger.init_logger("ppsci", f"{OUTPUT_DIR}/eval.log", "info")
232232
solver = ppsci.solver.Solver(
233233
model,
234234
constraint,
235-
output_dir,
235+
OUTPUT_DIR,
236236
equation=equation,
237237
geom=geom,
238238
validator=validator,
239239
visualizer=visualizer,
240-
pretrained_model_path=f"{output_dir}/checkpoints/latest",
240+
pretrained_model_path=f"{OUTPUT_DIR}/checkpoints/latest",
241241
)
242242
solver.eval()
243243
# visualize prediction from pretrained_model_path(optional)

examples/cylinder/2d_unsteady/transformer_physx/train_enn_v2.py

Lines changed: 24 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -48,29 +48,29 @@ def get_mean_std(data: np.ndarray, visc: np.ndarray):
4848
if __name__ == "__main__":
4949
ppsci.utils.set_random_seed(42)
5050

51-
epochs = 300
52-
train_block_size = 4
53-
valid_block_size = 32
51+
EPOCHS = 300
52+
TRAIN_BLOCK_SIZE = 4
53+
VALID_BLOCK_SIZE = 32
5454

5555
input_keys = ("states", "visc")
5656
output_keys = ("pred_states", "recover_states")
57-
weights = (10.0 * (train_block_size - 1), 10.0 * train_block_size)
57+
weights = (10.0 * (TRAIN_BLOCK_SIZE - 1), 10.0 * TRAIN_BLOCK_SIZE)
5858
regularization_key = "k_matrix"
5959

60-
output_dir = "./output/cylinder_enn"
61-
train_file_path = "./datasets/cylinder_training.hdf5"
62-
valid_file_path = "./datasets/cylinder_valid.hdf5"
60+
OUTPUT_DIR = "./output/cylinder_enn"
61+
TRAIN_FILE_PATH = "./datasets/cylinder_training.hdf5"
62+
VALID_FILE_PATH = "./datasets/cylinder_valid.hdf5"
6363
# initialize logger
64-
logger.init_logger("ppsci", f"{output_dir}/train.log", "info")
64+
logger.init_logger("ppsci", f"{OUTPUT_DIR}/train.log", "info")
6565

6666
# maunally build constraint(s)
6767
train_dataloader_cfg = {
6868
"dataset": {
6969
"name": "CylinderDataset",
70-
"file_path": train_file_path,
70+
"file_path": TRAIN_FILE_PATH,
7171
"input_keys": input_keys,
7272
"label_keys": output_keys,
73-
"block_size": train_block_size,
73+
"block_size": TRAIN_BLOCK_SIZE,
7474
"stride": 16,
7575
"weight_dict": {key: value for key, value in zip(output_keys, weights)},
7676
},
@@ -86,15 +86,15 @@ def get_mean_std(data: np.ndarray, visc: np.ndarray):
8686
sup_constraint = ppsci.constraint.SupervisedConstraint(
8787
train_dataloader_cfg,
8888
ppsci.loss.MSELossWithL2Decay(
89-
regularization_dict={regularization_key: 1.0e-2 * (train_block_size - 1)}
89+
regularization_dict={regularization_key: 1.0e-2 * (TRAIN_BLOCK_SIZE - 1)}
9090
),
9191
{key: lambda out, k=key: out[k] for key in output_keys + (regularization_key,)},
9292
name="Sup",
9393
)
9494
constraint = {sup_constraint.name: sup_constraint}
9595

9696
# set iters_per_epoch by dataloader length
97-
iters_per_epoch = len(sup_constraint.data_loader)
97+
ITERS_PER_EPOCH = len(sup_constraint.data_loader)
9898

9999
# manually init model
100100
data_mean, data_std = get_mean_std(
@@ -107,11 +107,11 @@ def get_mean_std(data: np.ndarray, visc: np.ndarray):
107107
# init optimizer and lr scheduler
108108
clip = paddle.nn.ClipGradByGlobalNorm(clip_norm=0.1)
109109
lr_scheduler = ppsci.optimizer.lr_scheduler.ExponentialDecay(
110-
epochs,
111-
iters_per_epoch,
110+
EPOCHS,
111+
ITERS_PER_EPOCH,
112112
0.001,
113113
gamma=0.995,
114-
decay_steps=iters_per_epoch,
114+
decay_steps=ITERS_PER_EPOCH,
115115
by_epoch=True,
116116
)()
117117
optimizer = ppsci.optimizer.Adam(
@@ -121,14 +121,14 @@ def get_mean_std(data: np.ndarray, visc: np.ndarray):
121121
)([model])
122122

123123
# maunally build validator
124-
weights = (10.0 * (valid_block_size - 1), 10.0 * valid_block_size)
124+
weights = (10.0 * (VALID_BLOCK_SIZE - 1), 10.0 * VALID_BLOCK_SIZE)
125125
eval_dataloader_cfg = {
126126
"dataset": {
127127
"name": "CylinderDataset",
128-
"file_path": valid_file_path,
128+
"file_path": VALID_FILE_PATH,
129129
"input_keys": input_keys,
130130
"label_keys": output_keys,
131-
"block_size": valid_block_size,
131+
"block_size": VALID_BLOCK_SIZE,
132132
"stride": 32,
133133
"weight_dict": {key: value for key, value in zip(output_keys, weights)},
134134
},
@@ -153,11 +153,11 @@ def get_mean_std(data: np.ndarray, visc: np.ndarray):
153153
solver = ppsci.solver.Solver(
154154
model,
155155
constraint,
156-
output_dir,
156+
OUTPUT_DIR,
157157
optimizer,
158158
lr_scheduler,
159-
epochs,
160-
iters_per_epoch,
159+
EPOCHS,
160+
ITERS_PER_EPOCH,
161161
eval_during_train=True,
162162
eval_freq=50,
163163
validator=validator,
@@ -168,11 +168,11 @@ def get_mean_std(data: np.ndarray, visc: np.ndarray):
168168
solver.eval()
169169

170170
# directly evaluate pretrained model(optional)
171-
logger.init_logger("ppsci", f"{output_dir}/eval.log", "info")
171+
logger.init_logger("ppsci", f"{OUTPUT_DIR}/eval.log", "info")
172172
solver = ppsci.solver.Solver(
173173
model,
174-
output_dir=output_dir,
174+
output_dir=OUTPUT_DIR,
175175
validator=validator,
176-
pretrained_model_path=f"{output_dir}/checkpoints/latest",
176+
pretrained_model_path=f"{OUTPUT_DIR}/checkpoints/latest",
177177
)
178178
solver.eval()

0 commit comments

Comments
 (0)