Commit 01425f2d authored by Julian Treiber's avatar Julian Treiber

updated tests

parent 536b80fa
......@@ -253,16 +253,17 @@ class CNNSupervisedTrainer_Alexnet:
margin = loss_params['margin'] if 'margin' in loss_params else 1.0
sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True
ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else []
lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1
if loss == 'softmax_cross_entropy':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(from_logits=fromLogits, sparse_label=sparseLabel)
loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel)
elif loss == 'softmax_cross_entropy_ignore_indices':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel)
elif loss == 'sigmoid_binary_cross_entropy':
loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss()
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(sparse_label=sparseLabel)
loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel)
elif loss == 'l2':
loss_function = mx.gluon.loss.L2Loss()
elif loss == 'l1':
......
......@@ -253,16 +253,17 @@ class CNNSupervisedTrainer_CifarClassifierNetwork:
margin = loss_params['margin'] if 'margin' in loss_params else 1.0
sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True
ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else []
lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1
if loss == 'softmax_cross_entropy':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(from_logits=fromLogits, sparse_label=sparseLabel)
loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel)
elif loss == 'softmax_cross_entropy_ignore_indices':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel)
elif loss == 'sigmoid_binary_cross_entropy':
loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss()
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(sparse_label=sparseLabel)
loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel)
elif loss == 'l2':
loss_function = mx.gluon.loss.L2Loss()
elif loss == 'l1':
......
......@@ -246,16 +246,17 @@ class CNNSupervisedTrainer_Invariant:
margin = loss_params['margin'] if 'margin' in loss_params else 1.0
sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True
ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else []
lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1
if loss == 'softmax_cross_entropy':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(from_logits=fromLogits, sparse_label=sparseLabel)
loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel)
elif loss == 'softmax_cross_entropy_ignore_indices':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel)
elif loss == 'sigmoid_binary_cross_entropy':
loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss()
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(sparse_label=sparseLabel)
loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel)
elif loss == 'l2':
loss_function = mx.gluon.loss.L2Loss()
elif loss == 'l1':
......@@ -425,7 +426,7 @@ class CNNSupervisedTrainer_Invariant:
test_iter.reset()
metric = mx.metric.create(eval_metric, **eval_metric_params)
for batch_i, batch in enumerate(test_iter):
if True:
if True:
labels = [batch.label[i].as_in_context(mx_context) for i in range(3)]
data_0_ = batch.data[0].as_in_context(mx_context)
......
......@@ -246,16 +246,17 @@ class CNNSupervisedTrainer_MultipleStreams:
margin = loss_params['margin'] if 'margin' in loss_params else 1.0
sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True
ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else []
lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1
if loss == 'softmax_cross_entropy':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(from_logits=fromLogits, sparse_label=sparseLabel)
loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel)
elif loss == 'softmax_cross_entropy_ignore_indices':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel)
elif loss == 'sigmoid_binary_cross_entropy':
loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss()
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(sparse_label=sparseLabel)
loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel)
elif loss == 'l2':
loss_function = mx.gluon.loss.L2Loss()
elif loss == 'l1':
......@@ -417,7 +418,7 @@ class CNNSupervisedTrainer_MultipleStreams:
test_iter.reset()
metric = mx.metric.create(eval_metric, **eval_metric_params)
for batch_i, batch in enumerate(test_iter):
if True:
if True:
labels = [batch.label[i].as_in_context(mx_context) for i in range(2)]
data_0_ = batch.data[0].as_in_context(mx_context)
......
......@@ -246,16 +246,17 @@ class CNNSupervisedTrainer_RNNencdec:
margin = loss_params['margin'] if 'margin' in loss_params else 1.0
sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True
ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else []
lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1
if loss == 'softmax_cross_entropy':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(from_logits=fromLogits, sparse_label=sparseLabel)
loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel)
elif loss == 'softmax_cross_entropy_ignore_indices':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel)
elif loss == 'sigmoid_binary_cross_entropy':
loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss()
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(sparse_label=sparseLabel)
loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel)
elif loss == 'l2':
loss_function = mx.gluon.loss.L2Loss()
elif loss == 'l1':
......@@ -481,7 +482,7 @@ class CNNSupervisedTrainer_RNNencdec:
test_iter.reset()
metric = mx.metric.create(eval_metric, **eval_metric_params)
for batch_i, batch in enumerate(test_iter):
if True:
if True:
labels = [batch.label[i].as_in_context(mx_context) for i in range(30)]
source_ = batch.data[0].as_in_context(mx_context)
......
......@@ -246,16 +246,17 @@ class CNNSupervisedTrainer_RNNsearch:
margin = loss_params['margin'] if 'margin' in loss_params else 1.0
sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True
ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else []
lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1
if loss == 'softmax_cross_entropy':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(from_logits=fromLogits, sparse_label=sparseLabel)
loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel)
elif loss == 'softmax_cross_entropy_ignore_indices':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel)
elif loss == 'sigmoid_binary_cross_entropy':
loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss()
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(sparse_label=sparseLabel)
loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel)
elif loss == 'l2':
loss_function = mx.gluon.loss.L2Loss()
elif loss == 'l1':
......@@ -479,7 +480,7 @@ class CNNSupervisedTrainer_RNNsearch:
test_iter.reset()
metric = mx.metric.create(eval_metric, **eval_metric_params)
for batch_i, batch in enumerate(test_iter):
if True:
if True:
labels = [batch.label[i].as_in_context(mx_context) for i in range(30)]
source_ = batch.data[0].as_in_context(mx_context)
......
......@@ -246,16 +246,17 @@ class CNNSupervisedTrainer_RNNtest:
margin = loss_params['margin'] if 'margin' in loss_params else 1.0
sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True
ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else []
lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1
if loss == 'softmax_cross_entropy':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(from_logits=fromLogits, sparse_label=sparseLabel)
loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel)
elif loss == 'softmax_cross_entropy_ignore_indices':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel)
elif loss == 'sigmoid_binary_cross_entropy':
loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss()
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(sparse_label=sparseLabel)
loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel)
elif loss == 'l2':
loss_function = mx.gluon.loss.L2Loss()
elif loss == 'l1':
......@@ -458,7 +459,7 @@ class CNNSupervisedTrainer_RNNtest:
test_iter.reset()
metric = mx.metric.create(eval_metric, **eval_metric_params)
for batch_i, batch in enumerate(test_iter):
if True:
if True:
labels = [batch.label[i].as_in_context(mx_context) for i in range(5)]
source_ = batch.data[0].as_in_context(mx_context)
......
......@@ -246,16 +246,17 @@ class CNNSupervisedTrainer_ResNeXt50:
margin = loss_params['margin'] if 'margin' in loss_params else 1.0
sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True
ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else []
lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1
if loss == 'softmax_cross_entropy':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(from_logits=fromLogits, sparse_label=sparseLabel)
loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel)
elif loss == 'softmax_cross_entropy_ignore_indices':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel)
elif loss == 'sigmoid_binary_cross_entropy':
loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss()
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(sparse_label=sparseLabel)
loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel)
elif loss == 'l2':
loss_function = mx.gluon.loss.L2Loss()
elif loss == 'l1':
......@@ -407,7 +408,7 @@ class CNNSupervisedTrainer_ResNeXt50:
test_iter.reset()
metric = mx.metric.create(eval_metric, **eval_metric_params)
for batch_i, batch in enumerate(test_iter):
if True:
if True:
labels = [batch.label[i].as_in_context(mx_context) for i in range(1)]
data_ = batch.data[0].as_in_context(mx_context)
......
......@@ -246,16 +246,17 @@ class CNNSupervisedTrainer_Show_attend_tell:
margin = loss_params['margin'] if 'margin' in loss_params else 1.0
sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True
ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else []
lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1
if loss == 'softmax_cross_entropy':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(from_logits=fromLogits, sparse_label=sparseLabel)
loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel)
elif loss == 'softmax_cross_entropy_ignore_indices':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel)
elif loss == 'sigmoid_binary_cross_entropy':
loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss()
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(sparse_label=sparseLabel)
loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel)
elif loss == 'l2':
loss_function = mx.gluon.loss.L2Loss()
elif loss == 'l1':
......@@ -473,7 +474,7 @@ class CNNSupervisedTrainer_Show_attend_tell:
test_iter.reset()
metric = mx.metric.create(eval_metric, **eval_metric_params)
for batch_i, batch in enumerate(test_iter):
if True:
if True:
labels = [batch.label[i].as_in_context(mx_context) for i in range(25)]
images_ = batch.data[0].as_in_context(mx_context)
......
......@@ -246,16 +246,17 @@ class CNNSupervisedTrainer_ThreeInputCNN_M14:
margin = loss_params['margin'] if 'margin' in loss_params else 1.0
sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True
ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else []
lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1
if loss == 'softmax_cross_entropy':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(from_logits=fromLogits, sparse_label=sparseLabel)
loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel)
elif loss == 'softmax_cross_entropy_ignore_indices':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel)
elif loss == 'sigmoid_binary_cross_entropy':
loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss()
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(sparse_label=sparseLabel)
loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel)
elif loss == 'l2':
loss_function = mx.gluon.loss.L2Loss()
elif loss == 'l1':
......@@ -411,7 +412,7 @@ class CNNSupervisedTrainer_ThreeInputCNN_M14:
test_iter.reset()
metric = mx.metric.create(eval_metric, **eval_metric_params)
for batch_i, batch in enumerate(test_iter):
if True:
if True:
labels = [batch.label[i].as_in_context(mx_context) for i in range(1)]
data_0_ = batch.data[0].as_in_context(mx_context)
......
......@@ -253,16 +253,17 @@ class CNNSupervisedTrainer_VGG16:
margin = loss_params['margin'] if 'margin' in loss_params else 1.0
sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True
ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else []
lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1
if loss == 'softmax_cross_entropy':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(from_logits=fromLogits, sparse_label=sparseLabel)
loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel)
elif loss == 'softmax_cross_entropy_ignore_indices':
fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False
loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel)
elif loss == 'sigmoid_binary_cross_entropy':
loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss()
elif loss == 'cross_entropy':
loss_function = CrossEntropyLoss(sparse_label=sparseLabel)
loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel)
elif loss == 'l2':
loss_function = mx.gluon.loss.L2Loss()
elif loss == 'l1':
......
......@@ -30,6 +30,7 @@ if __name__ == "__main__":
loss='softmax_cross_entropy',
loss_params={
'sparse_label': True,
'loss_axis': -1,
'from_logits': False},
optimizer='rmsprop',
optimizer_params={
......
......@@ -5,6 +5,7 @@ configuration FullConfig{
load_checkpoint : true
eval_metric : mse
loss: softmax_cross_entropy{
loss_axis: -1
sparse_label: true
from_logits: false
}
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment