We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 062b956 commit 05d3555Copy full SHA for 05d3555
synaptic_reconstruction/training/supervised_training.py
@@ -255,7 +255,7 @@ def supervised_training(
255
pass
256
# If we have an ignore label the loss and metric have to be modified
257
# so that the ignore mask is not used in the gradient calculation.
258
- if ignore_label is not None:
+ elif ignore_label is not None:
259
loss = torch_em.loss.LossWrapper(
260
loss=torch_em.loss.DiceLoss(),
261
transform=torch_em.loss.wrapper.MaskIgnoreLabel(
0 commit comments