Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Sign in
Toggle navigation
Menu
Open sidebar
monticore
EmbeddedMontiArc
generators
CNNArch2Gluon
Commits
d6185b6e
Commit
d6185b6e
authored
Oct 30, 2019
by
Sebastian Nickels
Browse files
Merge
parents
039f6e64
d04e1188
Pipeline
#200183
failed with stages
in 40 seconds
Changes
8
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
src/main/java/de/monticore/lang/monticar/cnnarch/gluongenerator/CNNArch2GluonLayerSupportChecker.java
View file @
d6185b6e
...
...
@@ -33,6 +33,10 @@ public class CNNArch2GluonLayerSupportChecker extends LayerSupportChecker {
supportedLayerList
.
add
(
AllPredefinedLayers
.
EXPAND_DIMS_NAME
);
supportedLayerList
.
add
(
AllPredefinedLayers
.
SQUEEZE_NAME
);
supportedLayerList
.
add
(
AllPredefinedLayers
.
SWAPAXES_NAME
);
supportedLayerList
.
add
(
AllPredefinedLayers
.
BROADCAST_MULTIPLY_NAME
);
supportedLayerList
.
add
(
AllPredefinedLayers
.
REDUCE_SUM_NAME
);
supportedLayerList
.
add
(
AllPredefinedLayers
.
BROADCAST_ADD_NAME
);
supportedLayerList
.
add
(
AllPredefinedLayers
.
RESHAPE_NAME
);
}
}
src/main/resources/templates/gluon/CNNNet.ftl
View file @
d6185b6e
...
...
@@ -40,6 +40,16 @@ class NoNormalization(gluon.HybridBlock):
return x
class Reshape(gluon.HybridBlock):
def __init__(self, shape, **kwargs):
super(Reshape, self).__init__(**kwargs)
with self.name_scope():
self.shape = shape
def hybrid_forward(self, F, x):
return F.reshape(data=x, shape=self.shape)
class CustomRNN(gluon.HybridBlock):
def __init__(self, hidden_size, num_layers, bidirectional, **kwargs):
super(CustomRNN, self).__init__(**kwargs)
...
...
src/main/resources/templates/gluon/CNNSupervisedTrainer.ftl
View file @
d6185b6e
...
...
@@ -34,6 +34,30 @@ class LogCoshLoss(gluon.loss.Loss):
loss = gluon.loss._apply_weighting(F, loss, self._weight, sample_weight)
return F.mean(loss, axis=self._batch_axis, exclude=True)
class SoftmaxCrossEntropyLossIgnoreIndices(gluon.loss.Loss):
def __init__(self, axis=-1, ignore_indices=[], sparse_label=True, from_logits=False, weight=None, batch_axis=0, **kwargs):
super(SoftmaxCrossEntropyLossIgnoreIndices, self).__init__(weight, batch_axis, **kwargs)
self._axis = axis
self._ignore_indices = ignore_indices
self._sparse_label = sparse_label
self._from_logits = from_logits
def hybrid_forward(self, F, pred, label, sample_weight=None):
log_softmax = F.log_softmax
pick = F.pick
if not self._from_logits:
pred = log_softmax(pred, self._axis)
if self._sparse_label:
loss = -pick(pred, label, axis=self._axis, keepdims=True)
else:
label = _reshape_like(F, label, pred)
loss = -(pred * label).sum(axis=self._axis, keepdims=True)
#loss = _apply_weighting(F, loss, self._weight, sample_weight)
# ignore some indices for loss, e.g. <pad> tokens in NLP applications
for i in self._ignore_indices:
loss = loss * mx.nd.logical_not(mx.nd.equal(mx.nd.argmax(pred, axis=1), mx.nd.ones_like(mx.nd.argmax(pred, axis=1))*i))
return loss.mean(axis=self._batch_axis, exclude=True)
@mx.metric.register
class BLEU(mx.metric.EvalMetric):
N = 4
...
...
@@ -144,6 +168,8 @@ class BLEU(mx.metric.EvalMetric):
return new_list
class ${tc.fileNameWithoutEnding}:
def applyBeamSearch(input, length, width, maxLength, currProb, netIndex, bestOutput):
bestProb = 0.0
...
...
@@ -336,12 +362,43 @@ class ${tc.fileNameWithoutEnding}:
predictions = []
for output_name in outputs:
if mx.nd.shape_array(output_name).size > 1:
if mx.nd.shape_array(
mx.nd.squeeze(
output_name)
)
.size > 1:
predictions.append(mx.nd.argmax(output_name, axis=1))
#ArgMax already applied
else:
predictions.append(output_name)
'''
#Compute BLEU and NIST Score if data folder contains a dictionary -> NLP dataset
if(os.path.isfile('src/test/resources/training_data/Show_attend_tell/dict.pkl')):
with open('src/test/resources/training_data/Show_attend_tell/dict.pkl', 'rb') as f:
dict = pickle.load(f)
import nltk.translate.bleu_score
import nltk.translate.nist_score
prediction = []
for index in range(batch_size):
sentence = ''
for entry in predictions:
sentence += dict[int(entry[index].asscalar())] + ' '
prediction.append(sentence)
for index in range(batch_size):
sentence = ''
for batchEntry in batch.label:
sentence += dict[int(batchEntry[index].asscalar())] + ' '
print("############################")
print("label1: ", sentence)
print("prediction1: ", prediction[index])
BLEUscore = nltk.translate.bleu_score.sentence_bleu([sentence], prediction[index])
NISTscore = nltk.translate.nist_score.sentence_nist([sentence], prediction[index])
print("BLEU: ", BLEUscore)
print("NIST: ", NISTscore)
print("############################")
'''
metric.update(preds=predictions, labels=labels)
train_metric_score = metric.get()[1]
...
...
@@ -366,7 +423,7 @@ class ${tc.fileNameWithoutEnding}:
predictions = []
for output_name in outputs:
if mx.nd.shape_array(output_name).size > 1:
if mx.nd.shape_array(
mx.nd.squeeze(
output_name)
)
.size > 1:
predictions.append(mx.nd.argmax(output_name, axis=1))
#ArgMax already applied
else:
...
...
src/main/resources/templates/gluon/elements/BroadcastAdd.ftl
0 → 100644
View file @
d6185b6e
<#if mode == "FORWARD_FUNCTION">
${element.name} = F.broadcast_add(${tc.join(element.inputs, ",")})
<#elseif mode == "PYTHON_INLINE">
self.${element.name} = mx.nd.broadcast_add(${tc.join(element.inputs, ",")})
</#if>
\ No newline at end of file
src/main/resources/templates/gluon/elements/BroadcastMultiply.ftl
0 → 100644
View file @
d6185b6e
<#if mode == "FORWARD_FUNCTION">
${element.name} = F.broadcast_mul(${tc.join(element.inputs, ", ")})
</#if>
\ No newline at end of file
src/main/resources/templates/gluon/elements/Multiply.ftl
deleted
100644 → 0
View file @
039f6e64
<#if mode == "FORWARD_FUNCTION">
${element.name} = ${tc.join(element.inputs, " * ")}
<#elseif mode == "PYTHON_INLINE">
${element.name} = ${tc.join(element.inputs, " * ")}
<#elseif mode == "CPP_INLINE">
vector<float> ${element.name}(${element.inputs[0]}.size());
for (size_t i = 0; i != ${element.name}.size(); ++i) {
${element.name}[i] = ${tc.join(element.inputs, " * ", "", "[i]")};
}
</#if>
\ No newline at end of file
src/main/resources/templates/gluon/elements/Reshape.ftl
0 → 100644
View file @
d6185b6e
<#assign input = element.inputs[0]>
<#if mode == "ARCHITECTURE_DEFINITION">
self.${element.name} = Reshape(shape=(${tc.join(element.shape, ",")}))
<#include "OutputShape.ftl">
<#elseif mode == "FORWARD_FUNCTION">
${element.name} = self.${element.name}(${input})
<#elseif mode == "PYTHON_INLINE">
self.${element.name} = Reshape(shape=${shape})
</#if>
\ No newline at end of file
src/main/resources/templates/gluon/elements/Softmax.ftl
View file @
d6185b6e
<#-- This template is not used if the following architecture element is an output. See Output.ftl -->
<#assign axis = element.axis?c>
<#assign input = element.inputs[0]>
<#if mode == "FORWARD_FUNCTION">
${element.name} = F.softmax(${input})
${element.name} = F.softmax(${input}
, axis=${axis}
)
</#if>
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment