Commit 1cfd7cf9 authored by Julian Treiber's avatar Julian Treiber

removed illegal parameter in dice_loss

parent 0f841c73
......@@ -296,9 +296,8 @@ class ${tc.fileNameWithoutEnding}:
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'dice_loss':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
dice_weight = loss_params['dice_weight'] if 'dice_weight' in loss_params else None
loss_function = DiceLoss(axis=loss_axis, from_logits=fromLogits, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
loss_function = DiceLoss(axis=loss_axis, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'l2':
loss_function = mx.gluon.loss.L2Loss()
elif loss == 'l1':
......
......@@ -295,7 +295,6 @@ class CNNSupervisedTrainer_Alexnet:
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'dice_loss':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
dice_weight = loss_params['dice_weight'] if 'dice_weight' in loss_params else None
loss_function = DiceLoss(axis=loss_axis, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'l2':
......
......@@ -295,9 +295,8 @@ class CNNSupervisedTrainer_CifarClassifierNetwork:
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'dice_loss':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
dice_weight = loss_params['dice_weight'] if 'dice_weight' in loss_params else None
loss_function = DiceLoss(axis=loss_axis, from_logits=fromLogits, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
loss_function = DiceLoss(axis=loss_axis, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'l2':
loss_function = mx.gluon.loss.L2Loss()
elif loss == 'l1':
......
......@@ -288,9 +288,8 @@ class CNNSupervisedTrainer_Invariant:
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'dice_loss':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
dice_weight = loss_params['dice_weight'] if 'dice_weight' in loss_params else None
loss_function = DiceLoss(axis=loss_axis, from_logits=fromLogits, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
loss_function = DiceLoss(axis=loss_axis, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'l2':
loss_function = mx.gluon.loss.L2Loss()
elif loss == 'l1':
......
......@@ -288,9 +288,8 @@ class CNNSupervisedTrainer_MultipleStreams:
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'dice_loss':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
dice_weight = loss_params['dice_weight'] if 'dice_weight' in loss_params else None
loss_function = DiceLoss(axis=loss_axis, from_logits=fromLogits, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
loss_function = DiceLoss(axis=loss_axis, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'l2':
loss_function = mx.gluon.loss.L2Loss()
elif loss == 'l1':
......
......@@ -288,9 +288,8 @@ class CNNSupervisedTrainer_RNNencdec:
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'dice_loss':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
dice_weight = loss_params['dice_weight'] if 'dice_weight' in loss_params else None
loss_function = DiceLoss(axis=loss_axis, from_logits=fromLogits, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
loss_function = DiceLoss(axis=loss_axis, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'l2':
loss_function = mx.gluon.loss.L2Loss()
elif loss == 'l1':
......
......@@ -288,9 +288,8 @@ class CNNSupervisedTrainer_RNNsearch:
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'dice_loss':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
dice_weight = loss_params['dice_weight'] if 'dice_weight' in loss_params else None
loss_function = DiceLoss(axis=loss_axis, from_logits=fromLogits, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
loss_function = DiceLoss(axis=loss_axis, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'l2':
loss_function = mx.gluon.loss.L2Loss()
elif loss == 'l1':
......
......@@ -288,9 +288,8 @@ class CNNSupervisedTrainer_RNNtest:
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'dice_loss':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
dice_weight = loss_params['dice_weight'] if 'dice_weight' in loss_params else None
loss_function = DiceLoss(axis=loss_axis, from_logits=fromLogits, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
loss_function = DiceLoss(axis=loss_axis, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'l2':
loss_function = mx.gluon.loss.L2Loss()
elif loss == 'l1':
......
......@@ -288,9 +288,8 @@ class CNNSupervisedTrainer_ResNeXt50:
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'dice_loss':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
dice_weight = loss_params['dice_weight'] if 'dice_weight' in loss_params else None
loss_function = DiceLoss(axis=loss_axis, from_logits=fromLogits, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
loss_function = DiceLoss(axis=loss_axis, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'l2':
loss_function = mx.gluon.loss.L2Loss()
elif loss == 'l1':
......
......@@ -288,9 +288,8 @@ class CNNSupervisedTrainer_Show_attend_tell:
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'dice_loss':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
dice_weight = loss_params['dice_weight'] if 'dice_weight' in loss_params else None
loss_function = DiceLoss(axis=loss_axis, from_logits=fromLogits, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
loss_function = DiceLoss(axis=loss_axis, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'l2':
loss_function = mx.gluon.loss.L2Loss()
elif loss == 'l1':
......
......@@ -288,9 +288,8 @@ class CNNSupervisedTrainer_ThreeInputCNN_M14:
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'dice_loss':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
dice_weight = loss_params['dice_weight'] if 'dice_weight' in loss_params else None
loss_function = DiceLoss(axis=loss_axis, from_logits=fromLogits, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
loss_function = DiceLoss(axis=loss_axis, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'l2':
loss_function = mx.gluon.loss.L2Loss()
elif loss == 'l1':
......
......@@ -295,9 +295,8 @@ class CNNSupervisedTrainer_VGG16:
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'dice_loss':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
dice_weight = loss_params['dice_weight'] if 'dice_weight' in loss_params else None
loss_function = DiceLoss(axis=loss_axis, from_logits=fromLogits, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
loss_function = DiceLoss(axis=loss_axis, weight=dice_weight, sparse_label=sparseLabel, batch_axis=batch_axis)
elif loss == 'l2':
loss_function = mx.gluon.loss.L2Loss()
elif loss == 'l1':
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment