|
| 1 | +# Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved. |
| 2 | + |
| 3 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | +# you may not use this file except in compliance with the License. |
| 5 | +# You may obtain a copy of the License at |
| 6 | + |
| 7 | +# http://www.apache.org/licenses/LICENSE-2.0 |
| 8 | + |
| 9 | +# Unless required by applicable law or agreed to in writing, software |
| 10 | +# distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | +# See the License for the specific language governing permissions and |
| 13 | +# limitations under the License. |
| 14 | + |
| 15 | +import numpy as np |
| 16 | + |
| 17 | +import ppsci |
| 18 | +from ppsci.utils import config |
| 19 | +from ppsci.utils import logger |
| 20 | + |
| 21 | +if __name__ == "__main__": |
| 22 | + args = config.parse_args() |
| 23 | + # set random seed for reproducibility |
| 24 | + ppsci.utils.misc.set_random_seed(42) |
| 25 | + # set output directory |
| 26 | + output_dir = "./output_darcy2d_coslr" if not args.output_dir else args.output_dir |
| 27 | + # initialize logger |
| 28 | + logger.init_logger("ppsci", f"{output_dir}/train.log", "info") |
| 29 | + |
| 30 | + # set model |
| 31 | + model = ppsci.arch.MLP(("x", "y"), ("p",), 5, 20, "tanh", False, False) |
| 32 | + |
| 33 | + # set equation |
| 34 | + equation = {"Poisson": ppsci.equation.Poisson(2)} |
| 35 | + |
| 36 | + # set geometry |
| 37 | + geom = {"rect": ppsci.geometry.Rectangle((0.0, 0.0), (1.0, 1.0))} |
| 38 | + |
| 39 | + # set dataloader config |
| 40 | + ITERS_PER_EPOCH = 1 |
| 41 | + train_dataloader_cfg = { |
| 42 | + "dataset": "IterableNamedArrayDataset", |
| 43 | + "iters_per_epoch": ITERS_PER_EPOCH, |
| 44 | + } |
| 45 | + |
| 46 | + NPOINT_PDE = 99**2 |
| 47 | + NPOINT_TOP = 101 |
| 48 | + NPOINT_BOTTOM = 101 |
| 49 | + NPOINT_LEFT = 99 |
| 50 | + NPOINT_RIGHT = 99 |
| 51 | + |
| 52 | + # set constraint |
| 53 | + pde_constraint = ppsci.constraint.InteriorConstraint( |
| 54 | + equation["Poisson"].equations, |
| 55 | + { |
| 56 | + "poisson": lambda _in: 8.0 |
| 57 | + * np.pi**2 |
| 58 | + * np.sin(2.0 * np.pi * _in["x"]) |
| 59 | + * np.cos(2.0 * np.pi * _in["y"]) |
| 60 | + }, |
| 61 | + geom["rect"], |
| 62 | + {**train_dataloader_cfg, "batch_size": NPOINT_PDE}, |
| 63 | + ppsci.loss.MSELoss("sum"), |
| 64 | + evenly=True, |
| 65 | + name="EQ", |
| 66 | + ) |
| 67 | + bc_top = ppsci.constraint.BoundaryConstraint( |
| 68 | + {"p": lambda out: out["p"]}, |
| 69 | + { |
| 70 | + "p": lambda _in: np.sin(2.0 * np.pi * _in["x"]) |
| 71 | + * np.cos(2.0 * np.pi * _in["y"]) |
| 72 | + }, |
| 73 | + geom["rect"], |
| 74 | + {**train_dataloader_cfg, "batch_size": NPOINT_TOP}, |
| 75 | + ppsci.loss.MSELoss("sum"), |
| 76 | + criteria=lambda x, y: np.isclose(y, 1.0), |
| 77 | + name="BC_top", |
| 78 | + ) |
| 79 | + bc_bottom = ppsci.constraint.BoundaryConstraint( |
| 80 | + {"p": lambda out: out["p"]}, |
| 81 | + { |
| 82 | + "p": lambda _in: np.sin(2.0 * np.pi * _in["x"]) |
| 83 | + * np.cos(2.0 * np.pi * _in["y"]) |
| 84 | + }, |
| 85 | + geom["rect"], |
| 86 | + {**train_dataloader_cfg, "batch_size": NPOINT_BOTTOM}, |
| 87 | + ppsci.loss.MSELoss("sum"), |
| 88 | + criteria=lambda x, y: np.isclose(y, 0.0), |
| 89 | + name="BC_bottom", |
| 90 | + ) |
| 91 | + bc_left = ppsci.constraint.BoundaryConstraint( |
| 92 | + {"p": lambda out: out["p"]}, |
| 93 | + { |
| 94 | + "p": lambda _in: np.sin(2.0 * np.pi * _in["x"]) |
| 95 | + * np.cos(2.0 * np.pi * _in["y"]) |
| 96 | + }, |
| 97 | + geom["rect"], |
| 98 | + {**train_dataloader_cfg, "batch_size": NPOINT_LEFT}, |
| 99 | + ppsci.loss.MSELoss("sum"), |
| 100 | + criteria=lambda x, y: np.isclose(x, 0.0), |
| 101 | + name="BC_left", |
| 102 | + ) |
| 103 | + bc_right = ppsci.constraint.BoundaryConstraint( |
| 104 | + {"p": lambda out: out["p"]}, |
| 105 | + { |
| 106 | + "p": lambda _in: np.sin(2.0 * np.pi * _in["x"]) |
| 107 | + * np.cos(2.0 * np.pi * _in["y"]) |
| 108 | + }, |
| 109 | + geom["rect"], |
| 110 | + {**train_dataloader_cfg, "batch_size": NPOINT_RIGHT}, |
| 111 | + ppsci.loss.MSELoss("sum"), |
| 112 | + criteria=lambda x, y: np.isclose(x, 1.0), |
| 113 | + name="BC_right", |
| 114 | + ) |
| 115 | + # wrap constraints together |
| 116 | + constraint = { |
| 117 | + pde_constraint.name: pde_constraint, |
| 118 | + bc_top.name: bc_top, |
| 119 | + bc_bottom.name: bc_bottom, |
| 120 | + bc_left.name: bc_left, |
| 121 | + bc_right.name: bc_right, |
| 122 | + } |
| 123 | + |
| 124 | + # set training hyper-parameters |
| 125 | + epochs = 10000 if not args.epochs else args.epochs |
| 126 | + lr_scheduler = ppsci.optimizer.lr_scheduler.Cosine( |
| 127 | + epochs, |
| 128 | + ITERS_PER_EPOCH, |
| 129 | + 0.001, |
| 130 | + warmup_epoch=int(0.05 * epochs), |
| 131 | + )() |
| 132 | + |
| 133 | + # set optimizer |
| 134 | + optimizer = ppsci.optimizer.Adam(lr_scheduler)((model,)) |
| 135 | + |
| 136 | + # set validator |
| 137 | + NPOINTS_EVAL = NPOINT_PDE |
| 138 | + residual_validator = ppsci.validate.GeometryValidator( |
| 139 | + equation["Poisson"].equations, |
| 140 | + { |
| 141 | + "poisson": lambda _in: 8.0 |
| 142 | + * np.pi**2 |
| 143 | + * np.sin(2.0 * np.pi * _in["x"]) |
| 144 | + * np.cos(2.0 * np.pi * _in["y"]) |
| 145 | + }, |
| 146 | + geom["rect"], |
| 147 | + { |
| 148 | + "dataset": "NamedArrayDataset", |
| 149 | + "total_size": NPOINTS_EVAL, |
| 150 | + "batch_size": 8192, |
| 151 | + "sampler": {"name": "BatchSampler"}, |
| 152 | + }, |
| 153 | + ppsci.loss.MSELoss("sum"), |
| 154 | + evenly=True, |
| 155 | + metric={"MSE": ppsci.metric.MSE()}, |
| 156 | + name="Residual", |
| 157 | + ) |
| 158 | + validator = {residual_validator.name: residual_validator} |
| 159 | + |
| 160 | + # set visualizer(optional) |
| 161 | + NPOINT_BC = NPOINT_TOP + NPOINT_BOTTOM + NPOINT_LEFT + NPOINT_RIGHT |
| 162 | + vis_interior_points = geom["rect"].sample_interior(NPOINT_PDE, evenly=True) |
| 163 | + vis_boundary_points = geom["rect"].sample_boundary(NPOINT_BC, evenly=True) |
| 164 | + |
| 165 | + # manually collate input data for visualization, |
| 166 | + # interior+boundary |
| 167 | + vis_points = {} |
| 168 | + for key in vis_interior_points: |
| 169 | + vis_points[key] = np.concatenate( |
| 170 | + (vis_interior_points[key], vis_boundary_points[key]) |
| 171 | + ) |
| 172 | + |
| 173 | + visualizer = { |
| 174 | + "visulzie_u_v": ppsci.visualize.VisualizerVtu( |
| 175 | + vis_points, |
| 176 | + {"p": lambda d: d["p"]}, |
| 177 | + prefix="result_u_v", |
| 178 | + ) |
| 179 | + } |
| 180 | + |
| 181 | + # initialize solver |
| 182 | + solver = ppsci.solver.Solver( |
| 183 | + model, |
| 184 | + constraint, |
| 185 | + output_dir, |
| 186 | + optimizer, |
| 187 | + lr_scheduler, |
| 188 | + epochs, |
| 189 | + ITERS_PER_EPOCH, |
| 190 | + eval_during_train=True, |
| 191 | + eval_freq=200, |
| 192 | + equation=equation, |
| 193 | + geom=geom, |
| 194 | + validator=validator, |
| 195 | + visualizer=visualizer, |
| 196 | + ) |
| 197 | + # train model |
| 198 | + solver.train() |
| 199 | + # evaluate after finished training |
| 200 | + solver.eval() |
| 201 | + # visualize prediction after finished training |
| 202 | + solver.visualize() |
| 203 | + |
| 204 | + # directly evaluate pretrained model(optional) |
| 205 | + solver = ppsci.solver.Solver( |
| 206 | + model, |
| 207 | + constraint, |
| 208 | + output_dir, |
| 209 | + equation=equation, |
| 210 | + geom=geom, |
| 211 | + validator=validator, |
| 212 | + visualizer=visualizer, |
| 213 | + pretrained_model_path=f"{output_dir}/checkpoints/latest", |
| 214 | + ) |
| 215 | + solver.eval() |
| 216 | + # visualize prediction for pretrained model(optional) |
| 217 | + solver.visualize() |
0 commit comments