diff --git a/src/main/resources/templates/gluon/CNNSupervisedTrainer.ftl b/src/main/resources/templates/gluon/CNNSupervisedTrainer.ftl index 7ea337ee8b203999d4d5856fea7e1f9e480ce479..01b3ab7dbaa7e58b68128dadc7a6d9b67ef21728 100644 --- a/src/main/resources/templates/gluon/CNNSupervisedTrainer.ftl +++ b/src/main/resources/templates/gluon/CNNSupervisedTrainer.ftl @@ -254,17 +254,17 @@ class ${tc.fileNameWithoutEnding}: margin = loss_params['margin'] if 'margin' in loss_params else 1.0 sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else [] - lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1 + loss_axis = loss_params['loss_axis'] if 'loss_axis' in loss_params else -1 if loss == 'softmax_cross_entropy': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False - loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel) + loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=loss_axis, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'softmax_cross_entropy_ignore_indices': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'sigmoid_binary_cross_entropy': loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss() elif loss == 'cross_entropy': - loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel) + loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel) elif loss == 'l2': loss_function = mx.gluon.loss.L2Loss() elif loss == 'l1': diff --git a/src/test/resources/target_code/CNNSupervisedTrainer_Alexnet.py b/src/test/resources/target_code/CNNSupervisedTrainer_Alexnet.py index 117f8e0a96257fbbc7f664acbd26e728c204e64e..5d38cbb0b27163467596847f969c89aea1a61d61 100644 --- a/src/test/resources/target_code/CNNSupervisedTrainer_Alexnet.py +++ b/src/test/resources/target_code/CNNSupervisedTrainer_Alexnet.py @@ -253,17 +253,17 @@ class CNNSupervisedTrainer_Alexnet: margin = loss_params['margin'] if 'margin' in loss_params else 1.0 sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else [] - lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1 + loss_axis = loss_params['loss_axis'] if 'loss_axis' in loss_params else -1 if loss == 'softmax_cross_entropy': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False - loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel) + loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=loss_axis, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'softmax_cross_entropy_ignore_indices': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'sigmoid_binary_cross_entropy': loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss() elif loss == 'cross_entropy': - loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel) + loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel) elif loss == 'l2': loss_function = mx.gluon.loss.L2Loss() elif loss == 'l1': diff --git a/src/test/resources/target_code/CNNSupervisedTrainer_CifarClassifierNetwork.py b/src/test/resources/target_code/CNNSupervisedTrainer_CifarClassifierNetwork.py index fe2e326a38b861aa09beb702ba00b295c0d4c570..4e52f0d31a47ca8453f0c37a13e5c283f6ce3681 100644 --- a/src/test/resources/target_code/CNNSupervisedTrainer_CifarClassifierNetwork.py +++ b/src/test/resources/target_code/CNNSupervisedTrainer_CifarClassifierNetwork.py @@ -253,17 +253,17 @@ class CNNSupervisedTrainer_CifarClassifierNetwork: margin = loss_params['margin'] if 'margin' in loss_params else 1.0 sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else [] - lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1 + loss_axis = loss_params['loss_axis'] if 'loss_axis' in loss_params else -1 if loss == 'softmax_cross_entropy': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False - loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel) + loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=loss_axis, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'softmax_cross_entropy_ignore_indices': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'sigmoid_binary_cross_entropy': loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss() elif loss == 'cross_entropy': - loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel) + loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel) elif loss == 'l2': loss_function = mx.gluon.loss.L2Loss() elif loss == 'l1': diff --git a/src/test/resources/target_code/CNNSupervisedTrainer_Invariant.py b/src/test/resources/target_code/CNNSupervisedTrainer_Invariant.py index 25f8683ab6750218df4577a682b67edd81bc2127..a31eac6b37df005c713e5620f8a13dbda468acc7 100644 --- a/src/test/resources/target_code/CNNSupervisedTrainer_Invariant.py +++ b/src/test/resources/target_code/CNNSupervisedTrainer_Invariant.py @@ -246,17 +246,17 @@ class CNNSupervisedTrainer_Invariant: margin = loss_params['margin'] if 'margin' in loss_params else 1.0 sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else [] - lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1 + loss_axis = loss_params['loss_axis'] if 'loss_axis' in loss_params else -1 if loss == 'softmax_cross_entropy': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False - loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel) + loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=loss_axis, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'softmax_cross_entropy_ignore_indices': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'sigmoid_binary_cross_entropy': loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss() elif loss == 'cross_entropy': - loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel) + loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel) elif loss == 'l2': loss_function = mx.gluon.loss.L2Loss() elif loss == 'l1': diff --git a/src/test/resources/target_code/CNNSupervisedTrainer_MultipleStreams.py b/src/test/resources/target_code/CNNSupervisedTrainer_MultipleStreams.py index 7824640a7beab1936a5bb272fcab007862cba097..f8db6d2dfd7d2403aa0923ed157282455562aadb 100644 --- a/src/test/resources/target_code/CNNSupervisedTrainer_MultipleStreams.py +++ b/src/test/resources/target_code/CNNSupervisedTrainer_MultipleStreams.py @@ -246,17 +246,17 @@ class CNNSupervisedTrainer_MultipleStreams: margin = loss_params['margin'] if 'margin' in loss_params else 1.0 sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else [] - lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1 + loss_axis = loss_params['loss_axis'] if 'loss_axis' in loss_params else -1 if loss == 'softmax_cross_entropy': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False - loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel) + loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=loss_axis, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'softmax_cross_entropy_ignore_indices': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'sigmoid_binary_cross_entropy': loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss() elif loss == 'cross_entropy': - loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel) + loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel) elif loss == 'l2': loss_function = mx.gluon.loss.L2Loss() elif loss == 'l1': diff --git a/src/test/resources/target_code/CNNSupervisedTrainer_RNNencdec.py b/src/test/resources/target_code/CNNSupervisedTrainer_RNNencdec.py index 76148479661f78543cd7bb8c2ea8e7bc44b4bf5c..963fb7bd94a1e880fb807c5ea45fdf4e2580c8e9 100644 --- a/src/test/resources/target_code/CNNSupervisedTrainer_RNNencdec.py +++ b/src/test/resources/target_code/CNNSupervisedTrainer_RNNencdec.py @@ -246,17 +246,17 @@ class CNNSupervisedTrainer_RNNencdec: margin = loss_params['margin'] if 'margin' in loss_params else 1.0 sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else [] - lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1 + loss_axis = loss_params['loss_axis'] if 'loss_axis' in loss_params else -1 if loss == 'softmax_cross_entropy': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False - loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel) + loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=loss_axis, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'softmax_cross_entropy_ignore_indices': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'sigmoid_binary_cross_entropy': loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss() elif loss == 'cross_entropy': - loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel) + loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel) elif loss == 'l2': loss_function = mx.gluon.loss.L2Loss() elif loss == 'l1': diff --git a/src/test/resources/target_code/CNNSupervisedTrainer_RNNsearch.py b/src/test/resources/target_code/CNNSupervisedTrainer_RNNsearch.py index 226b8bb30204f58b8b5fb3e7811cbc2bf0f43e02..fd12e18f6052fe40823ea1c0e8784110f103e9ea 100644 --- a/src/test/resources/target_code/CNNSupervisedTrainer_RNNsearch.py +++ b/src/test/resources/target_code/CNNSupervisedTrainer_RNNsearch.py @@ -246,17 +246,17 @@ class CNNSupervisedTrainer_RNNsearch: margin = loss_params['margin'] if 'margin' in loss_params else 1.0 sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else [] - lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1 + loss_axis = loss_params['loss_axis'] if 'loss_axis' in loss_params else -1 if loss == 'softmax_cross_entropy': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False - loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel) + loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=loss_axis, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'softmax_cross_entropy_ignore_indices': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'sigmoid_binary_cross_entropy': loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss() elif loss == 'cross_entropy': - loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel) + loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel) elif loss == 'l2': loss_function = mx.gluon.loss.L2Loss() elif loss == 'l1': diff --git a/src/test/resources/target_code/CNNSupervisedTrainer_RNNtest.py b/src/test/resources/target_code/CNNSupervisedTrainer_RNNtest.py index b8347fc39c81e0d47cc3b552c2884c65b793bcbf..f7d34328c12925afe98eee62ae0e94ed38a95752 100644 --- a/src/test/resources/target_code/CNNSupervisedTrainer_RNNtest.py +++ b/src/test/resources/target_code/CNNSupervisedTrainer_RNNtest.py @@ -246,17 +246,17 @@ class CNNSupervisedTrainer_RNNtest: margin = loss_params['margin'] if 'margin' in loss_params else 1.0 sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else [] - lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1 + loss_axis = loss_params['loss_axis'] if 'loss_axis' in loss_params else -1 if loss == 'softmax_cross_entropy': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False - loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel) + loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=loss_axis, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'softmax_cross_entropy_ignore_indices': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'sigmoid_binary_cross_entropy': loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss() elif loss == 'cross_entropy': - loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel) + loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel) elif loss == 'l2': loss_function = mx.gluon.loss.L2Loss() elif loss == 'l1': diff --git a/src/test/resources/target_code/CNNSupervisedTrainer_ResNeXt50.py b/src/test/resources/target_code/CNNSupervisedTrainer_ResNeXt50.py index 8a68613ff27a6272ae1699d4131e275e61b55066..b9f60cea5041c744cf73ed614e11ec38a2c8208f 100644 --- a/src/test/resources/target_code/CNNSupervisedTrainer_ResNeXt50.py +++ b/src/test/resources/target_code/CNNSupervisedTrainer_ResNeXt50.py @@ -246,17 +246,17 @@ class CNNSupervisedTrainer_ResNeXt50: margin = loss_params['margin'] if 'margin' in loss_params else 1.0 sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else [] - lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1 + loss_axis = loss_params['loss_axis'] if 'loss_axis' in loss_params else -1 if loss == 'softmax_cross_entropy': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False - loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel) + loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=loss_axis, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'softmax_cross_entropy_ignore_indices': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'sigmoid_binary_cross_entropy': loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss() elif loss == 'cross_entropy': - loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel) + loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel) elif loss == 'l2': loss_function = mx.gluon.loss.L2Loss() elif loss == 'l1': diff --git a/src/test/resources/target_code/CNNSupervisedTrainer_Show_attend_tell.py b/src/test/resources/target_code/CNNSupervisedTrainer_Show_attend_tell.py index 6df5bf3719605aebe3e09e92a710cc2022721de8..c428ed0c1cf9de80004c4562574673ed8f822fd2 100644 --- a/src/test/resources/target_code/CNNSupervisedTrainer_Show_attend_tell.py +++ b/src/test/resources/target_code/CNNSupervisedTrainer_Show_attend_tell.py @@ -246,17 +246,17 @@ class CNNSupervisedTrainer_Show_attend_tell: margin = loss_params['margin'] if 'margin' in loss_params else 1.0 sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else [] - lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1 + loss_axis = loss_params['loss_axis'] if 'loss_axis' in loss_params else -1 if loss == 'softmax_cross_entropy': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False - loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel) + loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=loss_axis, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'softmax_cross_entropy_ignore_indices': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'sigmoid_binary_cross_entropy': loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss() elif loss == 'cross_entropy': - loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel) + loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel) elif loss == 'l2': loss_function = mx.gluon.loss.L2Loss() elif loss == 'l1': diff --git a/src/test/resources/target_code/CNNSupervisedTrainer_ThreeInputCNN_M14.py b/src/test/resources/target_code/CNNSupervisedTrainer_ThreeInputCNN_M14.py index 9d45556cc14565ed2d2321aa0f75526aeb6649b4..d4c05848f279cedddf450623fdad13faccce7663 100644 --- a/src/test/resources/target_code/CNNSupervisedTrainer_ThreeInputCNN_M14.py +++ b/src/test/resources/target_code/CNNSupervisedTrainer_ThreeInputCNN_M14.py @@ -246,17 +246,17 @@ class CNNSupervisedTrainer_ThreeInputCNN_M14: margin = loss_params['margin'] if 'margin' in loss_params else 1.0 sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else [] - lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1 + loss_axis = loss_params['loss_axis'] if 'loss_axis' in loss_params else -1 if loss == 'softmax_cross_entropy': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False - loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel) + loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=loss_axis, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'softmax_cross_entropy_ignore_indices': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'sigmoid_binary_cross_entropy': loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss() elif loss == 'cross_entropy': - loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel) + loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel) elif loss == 'l2': loss_function = mx.gluon.loss.L2Loss() elif loss == 'l1': diff --git a/src/test/resources/target_code/CNNSupervisedTrainer_VGG16.py b/src/test/resources/target_code/CNNSupervisedTrainer_VGG16.py index 92e61e38fa01b3cad0f2b4eb527ffbed072a86ab..9df9611a1ad66077e355dd5dfab981fb122e42da 100644 --- a/src/test/resources/target_code/CNNSupervisedTrainer_VGG16.py +++ b/src/test/resources/target_code/CNNSupervisedTrainer_VGG16.py @@ -253,17 +253,17 @@ class CNNSupervisedTrainer_VGG16: margin = loss_params['margin'] if 'margin' in loss_params else 1.0 sparseLabel = loss_params['sparse_label'] if 'sparse_label' in loss_params else True ignore_indices = [loss_params['ignore_indices']] if 'ignore_indices' in loss_params else [] - lossAxis = loss_params['lossAxis'] if 'lossAxis' in loss_params else -1 + loss_axis = loss_params['loss_axis'] if 'loss_axis' in loss_params else -1 if loss == 'softmax_cross_entropy': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False - loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=lossAxis, from_logits=fromLogits, sparse_label=sparseLabel) + loss_function = mx.gluon.loss.SoftmaxCrossEntropyLoss(axis=loss_axis, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'softmax_cross_entropy_ignore_indices': fromLogits = loss_params['from_logits'] if 'from_logits' in loss_params else False loss_function = SoftmaxCrossEntropyLossIgnoreIndices(ignore_indices=ignore_indices, from_logits=fromLogits, sparse_label=sparseLabel) elif loss == 'sigmoid_binary_cross_entropy': loss_function = mx.gluon.loss.SigmoidBinaryCrossEntropyLoss() elif loss == 'cross_entropy': - loss_function = CrossEntropyLoss(axis=lossAxis, sparse_label=sparseLabel) + loss_function = CrossEntropyLoss(axis=loss_axis, sparse_label=sparseLabel) elif loss == 'l2': loss_function = mx.gluon.loss.L2Loss() elif loss == 'l1':