Skip to content

Commit 334b930

Browse files
remove args in super(...).__init__(...) for python3 style
1 parent 6e963c7 commit 334b930

File tree

2 files changed

+13
-15
lines changed

2 files changed

+13
-15
lines changed

ppsci/data/dataset/trphysx_dataset.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ def __init__(
6363
weight_dict: Optional[Dict[str, float]] = None,
6464
embedding_model: Optional[base.Arch] = None,
6565
):
66-
super(LorenzDataset, self).__init__()
66+
super().__init__()
6767
self.file_path = file_path
6868
self.input_keys = input_keys
6969
self.label_keys = label_keys
@@ -160,7 +160,7 @@ def __init__(
160160
weight_dict: Optional[Dict[str, float]] = None,
161161
embedding_model: Optional[base.Arch] = None,
162162
):
163-
super(RosslerDataset, self).__init__(
163+
super().__init__(
164164
file_path,
165165
input_keys,
166166
label_keys,
@@ -209,7 +209,7 @@ def __init__(
209209
embedding_model: Optional[base.Arch] = None,
210210
embedding_batch_size: int = 64,
211211
):
212-
super(CylinderDataset, self).__init__()
212+
super().__init__()
213213
self.file_path = file_path
214214
self.input_keys = input_keys
215215
self.label_keys = label_keys

ppsci/optimizer/lr_scheduler.py

Lines changed: 10 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ def __init__(
5858
verbose: bool = False,
5959
) -> None:
6060
"""Initialize and record the necessary parameters"""
61-
super(LRBase, self).__init__()
61+
super().__init__()
6262
if warmup_epoch >= epochs:
6363
msg = (
6464
"When using warm up, the value of 'Global.epochs' should be greater "
@@ -124,7 +124,7 @@ class Constant(lr.LRScheduler):
124124
def __init__(self, learning_rate: float, last_epoch: int = -1):
125125
self.learning_rate = learning_rate
126126
self.last_epoch = last_epoch
127-
super(Constant, self).__init__()
127+
super().__init__()
128128

129129
def get_lr(self) -> float:
130130
"""always return the same learning rate"""
@@ -163,7 +163,7 @@ def __init__(
163163
last_epoch: int = -1,
164164
by_epoch: bool = False,
165165
):
166-
super(Linear, self).__init__(
166+
super().__init__(
167167
epochs,
168168
iters_per_epoch,
169169
learning_rate,
@@ -230,7 +230,7 @@ def __init__(
230230
last_epoch: int = -1,
231231
by_epoch: bool = False,
232232
):
233-
super(ExponentialDecay, self).__init__(
233+
super().__init__(
234234
epochs,
235235
iters_per_epoch,
236236
learning_rate,
@@ -291,7 +291,7 @@ def __init__(
291291
last_epoch: int = -1,
292292
by_epoch: bool = False,
293293
):
294-
super(Cosine, self).__init__(
294+
super().__init__(
295295
epochs,
296296
iters_per_epoch,
297297
learning_rate,
@@ -357,7 +357,7 @@ def __init__(
357357
last_epoch: int = -1,
358358
by_epoch: bool = False,
359359
):
360-
super(Step, self).__init__(
360+
super().__init__(
361361
epochs,
362362
iters_per_epoch,
363363
learning_rate,
@@ -418,7 +418,7 @@ def __init__(
418418
last_epoch: int = -1,
419419
by_epoch: bool = False,
420420
):
421-
super(Piecewise, self).__init__(
421+
super().__init__(
422422
epochs,
423423
iters_per_epoch,
424424
values[0],
@@ -479,7 +479,7 @@ def __init__(
479479
last_epoch: int = -1,
480480
by_epoch: bool = False,
481481
):
482-
super(MultiStepDecay, self).__init__(
482+
super().__init__(
483483
epochs,
484484
iters_per_epoch,
485485
learning_rate,
@@ -538,9 +538,7 @@ def __init__(
538538
self.T_mult = T_mult
539539
self.eta_min = eta_min
540540
self.T_cur = last_epoch
541-
super(CosineAnnealingWarmRestarts, self).__init__(
542-
learning_rate, last_epoch, verbose
543-
)
541+
super().__init__(learning_rate, last_epoch, verbose)
544542

545543
def get_lr(self):
546544
return (
@@ -616,7 +614,7 @@ def __init__(
616614
last_epoch: int = -1,
617615
by_epoch: bool = False,
618616
):
619-
super(CosineWarmRestarts, self).__init__(
617+
super().__init__(
620618
epochs,
621619
iters_per_epoch,
622620
learning_rate,

0 commit comments

Comments
 (0)