Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
What's new
7
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Sign in
Toggle navigation
Open sidebar
monticore
EmbeddedMontiArc
generators
CNNArch2Gluon
Commits
c6eb036e
Commit
c6eb036e
authored
Nov 26, 2019
by
Christian Fuß
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
removed unneccesary code
parent
0cc51c81
Pipeline
#211296
failed with stages
in 20 seconds
Changes
4
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
0 additions
and
4 deletions
+0
-4
src/main/resources/templates/gluon/CNNSupervisedTrainer.ftl
src/main/resources/templates/gluon/CNNSupervisedTrainer.ftl
+0
-1
src/test/resources/target_code/CNNSupervisedTrainer_Alexnet.py
...est/resources/target_code/CNNSupervisedTrainer_Alexnet.py
+0
-1
src/test/resources/target_code/CNNSupervisedTrainer_CifarClassifierNetwork.py
...arget_code/CNNSupervisedTrainer_CifarClassifierNetwork.py
+0
-1
src/test/resources/target_code/CNNSupervisedTrainer_VGG16.py
src/test/resources/target_code/CNNSupervisedTrainer_VGG16.py
+0
-1
No files found.
src/main/resources/templates/gluon/CNNSupervisedTrainer.ftl
View file @
c6eb036e
...
...
@@ -53,7 +53,6 @@ class SoftmaxCrossEntropyLossIgnoreIndices(gluon.loss.Loss):
else:
label = _reshape_like(F, label, pred)
loss = -(pred * label).sum(axis=self._axis, keepdims=True)
#loss = _apply_weighting(F, loss, self._weight, sample_weight)
# ignore some indices for loss, e.g. <pad> tokens in NLP applications
for i in self._ignore_indices:
loss = loss * mx.nd.logical_not(mx.nd.equal(mx.nd.argmax(pred, axis=1), mx.nd.ones_like(mx.nd.argmax(pred, axis=1))*i))
...
...
src/test/resources/target_code/CNNSupervisedTrainer_Alexnet.py
View file @
c6eb036e
...
...
@@ -52,7 +52,6 @@ class SoftmaxCrossEntropyLossIgnoreIndices(gluon.loss.Loss):
else
:
label
=
_reshape_like
(
F
,
label
,
pred
)
loss
=
-
(
pred
*
label
).
sum
(
axis
=
self
.
_axis
,
keepdims
=
True
)
#loss = _apply_weighting(F, loss, self._weight, sample_weight)
# ignore some indices for loss, e.g. <pad> tokens in NLP applications
for
i
in
self
.
_ignore_indices
:
loss
=
loss
*
mx
.
nd
.
logical_not
(
mx
.
nd
.
equal
(
mx
.
nd
.
argmax
(
pred
,
axis
=
1
),
mx
.
nd
.
ones_like
(
mx
.
nd
.
argmax
(
pred
,
axis
=
1
))
*
i
))
...
...
src/test/resources/target_code/CNNSupervisedTrainer_CifarClassifierNetwork.py
View file @
c6eb036e
...
...
@@ -52,7 +52,6 @@ class SoftmaxCrossEntropyLossIgnoreIndices(gluon.loss.Loss):
else
:
label
=
_reshape_like
(
F
,
label
,
pred
)
loss
=
-
(
pred
*
label
).
sum
(
axis
=
self
.
_axis
,
keepdims
=
True
)
#loss = _apply_weighting(F, loss, self._weight, sample_weight)
# ignore some indices for loss, e.g. <pad> tokens in NLP applications
for
i
in
self
.
_ignore_indices
:
loss
=
loss
*
mx
.
nd
.
logical_not
(
mx
.
nd
.
equal
(
mx
.
nd
.
argmax
(
pred
,
axis
=
1
),
mx
.
nd
.
ones_like
(
mx
.
nd
.
argmax
(
pred
,
axis
=
1
))
*
i
))
...
...
src/test/resources/target_code/CNNSupervisedTrainer_VGG16.py
View file @
c6eb036e
...
...
@@ -52,7 +52,6 @@ class SoftmaxCrossEntropyLossIgnoreIndices(gluon.loss.Loss):
else
:
label
=
_reshape_like
(
F
,
label
,
pred
)
loss
=
-
(
pred
*
label
).
sum
(
axis
=
self
.
_axis
,
keepdims
=
True
)
#loss = _apply_weighting(F, loss, self._weight, sample_weight)
# ignore some indices for loss, e.g. <pad> tokens in NLP applications
for
i
in
self
.
_ignore_indices
:
loss
=
loss
*
mx
.
nd
.
logical_not
(
mx
.
nd
.
equal
(
mx
.
nd
.
argmax
(
pred
,
axis
=
1
),
mx
.
nd
.
ones_like
(
mx
.
nd
.
argmax
(
pred
,
axis
=
1
))
*
i
))
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment