From a0fc79fc098e8d6b16c075f858232d91792ea1a9 Mon Sep 17 00:00:00 2001
From: Sebastian Nickels <sn1c@protonmail.ch>
Date: Wed, 3 Jul 2019 02:08:18 +0200
Subject: [PATCH] Changed OneHot layer, added support for constants and
 implemented invariants

---
 ...NArch2GluonArchitectureSupportChecker.java |  8 ++++
 .../CNNArch2GluonTemplateController.java      | 23 +++++++++-
 .../gluongenerator/NetDefinitionMode.java     |  8 +++-
 .../templates/gluon/CNNSupervisedTrainer.ftl  | 43 ++++++++++++++-----
 .../templates/gluon/elements/Add.ftl          |  7 +++
 .../templates/gluon/elements/BatchNorm.ftl    |  3 +-
 .../templates/gluon/elements/Concatenate.ftl  |  3 +-
 .../templates/gluon/elements/Const.ftl        |  7 +++
 .../templates/gluon/elements/Convolution.ftl  |  3 +-
 .../templates/gluon/elements/Dropout.ftl      |  3 +-
 .../templates/gluon/elements/Flatten.ftl      |  3 +-
 .../gluon/elements/FullyConnected.ftl         |  3 +-
 .../templates/gluon/elements/Get.ftl          |  4 ++
 .../gluon/elements/GlobalPooling.ftl          |  3 +-
 .../templates/gluon/elements/Input.ftl        |  7 ++-
 .../templates/gluon/elements/OneHot.ftl       | 10 +++--
 .../templates/gluon/elements/Output.ftl       |  7 ++-
 .../templates/gluon/elements/Pooling.ftl      |  3 +-
 .../templates/gluon/elements/Relu.ftl         |  3 +-
 .../templates/gluon/elements/Sigmoid.ftl      |  3 +-
 .../templates/gluon/elements/Softmax.ftl      |  3 +-
 .../templates/gluon/elements/Split.ftl        |  3 +-
 .../templates/gluon/elements/Tanh.ftl         |  3 +-
 .../resources/templates/gluon/execute.ftl     |  2 +
 .../gluongenerator/GenerationTest.java        |  8 ++++
 .../CNNSupervisedTrainer_Alexnet.py           | 10 +++--
 ...upervisedTrainer_CifarClassifierNetwork.py | 10 +++--
 .../target_code/CNNSupervisedTrainer_VGG16.py | 10 +++--
 src/test/resources/valid_tests/Invariant.cnna | 17 ++++++++
 .../valid_tests/MultipleStreams.cnna          |  2 +-
 src/test/resources/valid_tests/data_paths.txt |  3 +-
 31 files changed, 169 insertions(+), 56 deletions(-)
 create mode 100644 src/main/resources/templates/gluon/elements/Const.ftl
 create mode 100644 src/test/resources/valid_tests/Invariant.cnna

diff --git a/src/main/java/de/monticore/lang/monticar/cnnarch/gluongenerator/CNNArch2GluonArchitectureSupportChecker.java b/src/main/java/de/monticore/lang/monticar/cnnarch/gluongenerator/CNNArch2GluonArchitectureSupportChecker.java
index 80cb310b..72a92d39 100644
--- a/src/main/java/de/monticore/lang/monticar/cnnarch/gluongenerator/CNNArch2GluonArchitectureSupportChecker.java
+++ b/src/main/java/de/monticore/lang/monticar/cnnarch/gluongenerator/CNNArch2GluonArchitectureSupportChecker.java
@@ -7,16 +7,24 @@ public class CNNArch2GluonArchitectureSupportChecker extends ArchitectureSupport
 
     public CNNArch2GluonArchitectureSupportChecker() {}
 
+    @Override
     protected boolean checkMultipleStreams(ArchitectureSymbol architecture) {
         return true;
     }
 
+    @Override
     protected boolean checkMultipleInputs(ArchitectureSymbol architecture) {
         return true;
     }
 
+    @Override
     protected boolean checkMultipleOutputs(ArchitectureSymbol architecture) {
         return true;
     }
 
+    @Override
+    protected boolean checkConstants(ArchitectureSymbol architecture) {
+        return true;
+    }
+
 }
diff --git a/src/main/java/de/monticore/lang/monticar/cnnarch/gluongenerator/CNNArch2GluonTemplateController.java b/src/main/java/de/monticore/lang/monticar/cnnarch/gluongenerator/CNNArch2GluonTemplateController.java
index eeb67f23..d35b4f9a 100644
--- a/src/main/java/de/monticore/lang/monticar/cnnarch/gluongenerator/CNNArch2GluonTemplateController.java
+++ b/src/main/java/de/monticore/lang/monticar/cnnarch/gluongenerator/CNNArch2GluonTemplateController.java
@@ -65,13 +65,27 @@ public class CNNArch2GluonTemplateController extends CNNArchTemplateController {
         setCurrentElement(previousElement);
     }
 
+    public void include(ConstantSymbol constant, Writer writer, NetDefinitionMode netDefinitionMode) {
+        ArchitectureElementData previousElement = getCurrentElement();
+        setCurrentElement(constant);
+
+        if (constant.isAtomic()) {
+            include(TEMPLATE_ELEMENTS_DIR_PATH, "Const", writer, netDefinitionMode);
+        }
+        else {
+            include(constant.getResolvedThis().get(), writer, netDefinitionMode);
+        }
+
+        setCurrentElement(previousElement);
+    }
+
     public void include(LayerSymbol layer, Writer writer, NetDefinitionMode netDefinitionMode){
         ArchitectureElementData previousElement = getCurrentElement();
         setCurrentElement(layer);
 
         if (layer.isAtomic()){
             ArchitectureElementSymbol nextElement = layer.getOutputElement().get();
-            if (!isSoftmaxOutput(nextElement) && !isLogisticRegressionOutput(nextElement) && !isOneHotOutput(nextElement)){
+            if (!isSoftmaxOutput(nextElement) && !isLogisticRegressionOutput(nextElement)){
                 String templateName = layer.getDeclaration().getName();
                 include(TEMPLATE_ELEMENTS_DIR_PATH, templateName, writer, netDefinitionMode);
             }
@@ -101,6 +115,9 @@ public class CNNArch2GluonTemplateController extends CNNArchTemplateController {
         else if (architectureElement instanceof LayerSymbol){
             include((LayerSymbol) architectureElement, writer, netDefinitionMode);
         }
+        else if (architectureElement instanceof ConstantSymbol) {
+            include((ConstantSymbol) architectureElement, writer, netDefinitionMode);
+        }
         else {
             include((IOSymbol) architectureElement, writer, netDefinitionMode);
         }
@@ -117,6 +134,10 @@ public class CNNArch2GluonTemplateController extends CNNArchTemplateController {
         include(architectureElement, getWriter(), netDefinitionMode);
     }
 
+    public String ioNameToCpp(String ioName) {
+        return ioName.replaceAll("_([0-9]+)_", "[$1]");
+    }
+
     public List<String> getStreamInputNames(SerialCompositeElementSymbol stream) {
         List<String> names = new ArrayList<>();
 
diff --git a/src/main/java/de/monticore/lang/monticar/cnnarch/gluongenerator/NetDefinitionMode.java b/src/main/java/de/monticore/lang/monticar/cnnarch/gluongenerator/NetDefinitionMode.java
index f0916a68..22dabdff 100644
--- a/src/main/java/de/monticore/lang/monticar/cnnarch/gluongenerator/NetDefinitionMode.java
+++ b/src/main/java/de/monticore/lang/monticar/cnnarch/gluongenerator/NetDefinitionMode.java
@@ -5,7 +5,9 @@ package de.monticore.lang.monticar.cnnarch.gluongenerator;
  */
 public enum NetDefinitionMode {
     ARCHITECTURE_DEFINITION,
-    FORWARD_FUNCTION;
+    FORWARD_FUNCTION,
+    PYTHON_INLINE,
+    CPP_INLINE;
 
     public static NetDefinitionMode fromString(final String netDefinitionMode) {
         switch(netDefinitionMode) {
@@ -13,6 +15,10 @@ public enum NetDefinitionMode {
                 return ARCHITECTURE_DEFINITION;
             case "FORWARD_FUNCTION":
                 return FORWARD_FUNCTION;
+            case "PYTHON_INLINE":
+                return PYTHON_INLINE;
+            case "CPP_INLINE":
+                return CPP_INLINE;
             default:
                 throw new IllegalArgumentException("Unknown Net Definition Mode");
         }
diff --git a/src/main/resources/templates/gluon/CNNSupervisedTrainer.ftl b/src/main/resources/templates/gluon/CNNSupervisedTrainer.ftl
index c83d1938..9aeba0aa 100644
--- a/src/main/resources/templates/gluon/CNNSupervisedTrainer.ftl
+++ b/src/main/resources/templates/gluon/CNNSupervisedTrainer.ftl
@@ -100,11 +100,19 @@ class ${tc.fileNameWithoutEnding}:
 <#if stream.isNetwork()>
                     ${tc.join(tc.getStreamOutputNames(stream), ", ", "", "_output")} = self._networks[${stream?index}](${tc.join(tc.getStreamInputNames(stream), ", ", "", "_data")})
 <#else>
-                    # TODO: Implement non network streams
+${tc.include(stream, "PYTHON_INLINE")}
+</#if>
+</#list>
+
+                    loss = \
+<#list tc.architecture.streams as stream>
+<#if stream.isNetwork()>
+<#list tc.getStreamOutputNames(stream) as output_name>
+                        loss_functions['${output_name}'](${output_name}_output, ${output_name}_label)<#sep> + \
+</#list><#sep> + \
 </#if>
 </#list>
 
-                    loss = <#list tc.architectureOutputs as output_name>loss_functions['${output_name}'](${output_name}_output, ${output_name}_label)<#sep> + </#list>
 
                 loss.backward()
 
@@ -134,19 +142,26 @@ class ${tc.fileNameWithoutEnding}:
                 </#list>
 
                 labels = [
-                    <#list tc.architectureOutputs as output_name>batch.label[${output_name?index}].as_in_context(mx_context)<#sep>, </#list>
+<#list tc.architectureOutputs as output_name>
+                    batch.label[${output_name?index}].as_in_context(mx_context)<#sep>,
+</#list>
+
                 ]
 
+                if True: # Fix indentation
 <#list tc.architecture.streams as stream>
 <#if stream.isNetwork()>
-                ${tc.join(tc.getStreamOutputNames(stream), ", ", "", "_output")} = self._networks[${stream?index}](${tc.join(tc.getStreamInputNames(stream), ", ", "", "_data")})
+                    ${tc.join(tc.getStreamOutputNames(stream), ", ", "", "_output")} = self._networks[${stream?index}](${tc.join(tc.getStreamInputNames(stream), ", ", "", "_data")})
 <#else>
-                # TODO: Implement non network streams
+${tc.include(stream, "PYTHON_INLINE")}
 </#if>
 </#list>
 
                 predictions = [
-                    <#list tc.architectureOutputs as output_name>mx.nd.argmax(${output_name}_output, axis=1)<#sep>, </#list>
+<#list tc.architectureOutputs as output_name>
+                    mx.nd.argmax(${output_name}_output, axis=1)<#sep>,
+</#list>
+
                 ]
 
                 metric.update(preds=predictions, labels=labels)
@@ -160,18 +175,26 @@ class ${tc.fileNameWithoutEnding}:
                 </#list>
 
                 labels = [
-                    <#list tc.architectureOutputs as output_name>batch.label[${output_name?index}].as_in_context(mx_context)<#sep>, </#list>
+<#list tc.architectureOutputs as output_name>
+                    batch.label[${output_name?index}].as_in_context(mx_context)<#sep>,
+</#list>
+
                 ]
 
+                if True: # Fix indentation
 <#list tc.architecture.streams as stream>
 <#if stream.isNetwork()>
-                ${tc.join(tc.getStreamOutputNames(stream), ", ", "", "_output")} = self._networks[${stream?index}](${tc.join(tc.getStreamInputNames(stream), ", ", "", "_data")})
+                    ${tc.join(tc.getStreamOutputNames(stream), ", ", "", "_output")} = self._networks[${stream?index}](${tc.join(tc.getStreamInputNames(stream), ", ", "", "_data")})
 <#else>
-                # TODO: Implement non network streams
+${tc.include(stream, "PYTHON_INLINE")}
 </#if>
 </#list>
+
                 predictions = [
-                    <#list tc.architectureOutputs as output_name>mx.nd.argmax(${output_name}_output, axis=1)<#sep>, </#list>
+<#list tc.architectureOutputs as output_name>
+                    mx.nd.argmax(${output_name}_output, axis=1)<#sep>,
+</#list>
+
                 ]
 
                 metric.update(preds=predictions, labels=labels)
diff --git a/src/main/resources/templates/gluon/elements/Add.ftl b/src/main/resources/templates/gluon/elements/Add.ftl
index c2b1b443..abdcd90c 100644
--- a/src/main/resources/templates/gluon/elements/Add.ftl
+++ b/src/main/resources/templates/gluon/elements/Add.ftl
@@ -1,3 +1,10 @@
 <#if mode == "FORWARD_FUNCTION">
         ${element.name} = ${tc.join(element.inputs, " + ")}
+<#elseif mode == "PYTHON_INLINE">
+                    ${element.name} = ${tc.join(element.inputs, " + ")}
+<#elseif mode == "CPP_INLINE">
+    vector<float> ${element.name}(${element.inputs[0]}.size());
+    for (size_t i = 0; i != ${element.name}.size(); ++i) {
+        ${element.name}[i] = ${tc.join(element.inputs, " + ", "", "[i]")};
+    }
 </#if>
\ No newline at end of file
diff --git a/src/main/resources/templates/gluon/elements/BatchNorm.ftl b/src/main/resources/templates/gluon/elements/BatchNorm.ftl
index 890322b7..ecc1c611 100644
--- a/src/main/resources/templates/gluon/elements/BatchNorm.ftl
+++ b/src/main/resources/templates/gluon/elements/BatchNorm.ftl
@@ -3,7 +3,6 @@
 <#if mode == "ARCHITECTURE_DEFINITION">
             self.${element.name} = gluon.nn.BatchNorm()
             <#include "OutputShape.ftl">
-</#if>
-<#if mode == "FORWARD_FUNCTION">
+<#elseif mode == "FORWARD_FUNCTION">
         ${element.name} = self.${element.name}(${input})
 </#if>
\ No newline at end of file
diff --git a/src/main/resources/templates/gluon/elements/Concatenate.ftl b/src/main/resources/templates/gluon/elements/Concatenate.ftl
index b477a1e9..79eda52e 100644
--- a/src/main/resources/templates/gluon/elements/Concatenate.ftl
+++ b/src/main/resources/templates/gluon/elements/Concatenate.ftl
@@ -1,7 +1,6 @@
 <#if mode == "ARCHITECTURE_DEFINITION">
             self.${element.name} = Concatenate(dim=1)
             <#include "OutputShape.ftl">
-</#if>
-<#if mode == "FORWARD_FUNCTION">
+<#elseif mode == "FORWARD_FUNCTION">
         ${element.name} = self.${element.name}(${tc.join(element.inputs, ", ")})
 </#if>
\ No newline at end of file
diff --git a/src/main/resources/templates/gluon/elements/Const.ftl b/src/main/resources/templates/gluon/elements/Const.ftl
new file mode 100644
index 00000000..f4c073c6
--- /dev/null
+++ b/src/main/resources/templates/gluon/elements/Const.ftl
@@ -0,0 +1,7 @@
+<#if mode == "FORWARD_FUNCTION">
+        ${element.name} = gluon.Const('${element.name}', ${element.constValue})
+<#elseif mode == "PYTHON_INLINE">
+                    ${element.name} = nd.array(${element.constValue})
+<#elseif mode == "CPP_INLINE">
+    vector<float> ${element.name}{${element.constValue}};
+</#if>
\ No newline at end of file
diff --git a/src/main/resources/templates/gluon/elements/Convolution.ftl b/src/main/resources/templates/gluon/elements/Convolution.ftl
index 11701268..e0098b28 100644
--- a/src/main/resources/templates/gluon/elements/Convolution.ftl
+++ b/src/main/resources/templates/gluon/elements/Convolution.ftl
@@ -8,8 +8,7 @@
                 strides=(${tc.join(element.stride, ",")}),
                 use_bias=${element.noBias?string("False", "True")})
 <#include "OutputShape.ftl">
-</#if>
-<#if mode == "FORWARD_FUNCTION">
+<#elseif mode == "FORWARD_FUNCTION">
 <#if element.padding??>
         ${element.name}padding = self.${element.name}padding(${input})
 <#assign input = element.name + "padding">
diff --git a/src/main/resources/templates/gluon/elements/Dropout.ftl b/src/main/resources/templates/gluon/elements/Dropout.ftl
index 1b35794f..78ce2b50 100644
--- a/src/main/resources/templates/gluon/elements/Dropout.ftl
+++ b/src/main/resources/templates/gluon/elements/Dropout.ftl
@@ -2,7 +2,6 @@
 <#assign input = element.inputs[0]>
 <#if mode == "ARCHITECTURE_DEFINITION">
             self.${element.name} = gluon.nn.Dropout(rate=${rate})
-</#if>
-<#if mode == "FORWARD_FUNCTION">
+<#elseif mode == "FORWARD_FUNCTION">
         ${element.name} = self.${element.name}(${input})
 </#if>
\ No newline at end of file
diff --git a/src/main/resources/templates/gluon/elements/Flatten.ftl b/src/main/resources/templates/gluon/elements/Flatten.ftl
index bcafbec5..11860f97 100644
--- a/src/main/resources/templates/gluon/elements/Flatten.ftl
+++ b/src/main/resources/templates/gluon/elements/Flatten.ftl
@@ -2,7 +2,6 @@
 <#if mode == "ARCHITECTURE_DEFINITION">
             self.${element.name} = gluon.nn.Flatten()
             <#include "OutputShape.ftl">
-</#if>
-<#if mode == "FORWARD_FUNCTION">
+<#elseif mode == "FORWARD_FUNCTION">
         ${element.name} = self.${element.name}(${input})
 </#if>
\ No newline at end of file
diff --git a/src/main/resources/templates/gluon/elements/FullyConnected.ftl b/src/main/resources/templates/gluon/elements/FullyConnected.ftl
index 79004595..a44ba855 100644
--- a/src/main/resources/templates/gluon/elements/FullyConnected.ftl
+++ b/src/main/resources/templates/gluon/elements/FullyConnected.ftl
@@ -8,8 +8,7 @@
 </#if>
             self.${element.name} = gluon.nn.Dense(units=${units}, use_bias=${use_bias})
             <#include "OutputShape.ftl">
-</#if>
-<#if mode == "FORWARD_FUNCTION">
+<#elseif mode == "FORWARD_FUNCTION">
 <#if flatten>
         ${element.name}flatten_ = self.${element.name}flatten(${input})
         <#assign input = element.name + "flatten_">
diff --git a/src/main/resources/templates/gluon/elements/Get.ftl b/src/main/resources/templates/gluon/elements/Get.ftl
index 36a2e648..0123a1d1 100644
--- a/src/main/resources/templates/gluon/elements/Get.ftl
+++ b/src/main/resources/templates/gluon/elements/Get.ftl
@@ -1,3 +1,7 @@
 <#if mode == "FORWARD_FUNCTION">
         ${element.name} = ${element.inputs[element.index]}
+<#elseif mode == "PYTHON_INLINE">
+                    ${element.name} = ${element.inputs[element.index]}
+<#elseif mode == "CPP_INLINE">
+    vector<float> ${element.name} = ${element.inputs[element.index]};
 </#if>
\ No newline at end of file
diff --git a/src/main/resources/templates/gluon/elements/GlobalPooling.ftl b/src/main/resources/templates/gluon/elements/GlobalPooling.ftl
index 3004b10d..442ca3f1 100644
--- a/src/main/resources/templates/gluon/elements/GlobalPooling.ftl
+++ b/src/main/resources/templates/gluon/elements/GlobalPooling.ftl
@@ -8,7 +8,6 @@
 <#if mode == "ARCHITECTURE_DEFINITION">
             self.${element.name} = gluon.nn.Global${poolFunctionType}Pool2D()
             <#include "OutputShape.ftl">
-</#if>
-<#if mode == "FORWARD_FUNCTION">
+<#elseif mode == "FORWARD_FUNCTION">
         ${element.name} = self.${element.name}(${input})
 </#if>
\ No newline at end of file
diff --git a/src/main/resources/templates/gluon/elements/Input.ftl b/src/main/resources/templates/gluon/elements/Input.ftl
index 067390de..642a8251 100644
--- a/src/main/resources/templates/gluon/elements/Input.ftl
+++ b/src/main/resources/templates/gluon/elements/Input.ftl
@@ -6,7 +6,10 @@
             else:
                 self.input_normalization_${element.name} = NoNormalization()
 
-</#if>
-<#if mode == "FORWARD_FUNCTION">
+<#elseif mode == "FORWARD_FUNCTION">
         ${element.name} = self.input_normalization_${element.name}(${element.name})
+<#elseif mode == "PYTHON_INLINE">
+                    ${element.name} = ${element.name}_data
+<#elseif mode == "CPP_INLINE">
+    vector<float> ${element.name} = CNNTranslator::translate(${tc.ioNameToCpp(element.name)});
 </#if>
\ No newline at end of file
diff --git a/src/main/resources/templates/gluon/elements/OneHot.ftl b/src/main/resources/templates/gluon/elements/OneHot.ftl
index 6e75def6..369bbeaa 100644
--- a/src/main/resources/templates/gluon/elements/OneHot.ftl
+++ b/src/main/resources/templates/gluon/elements/OneHot.ftl
@@ -3,7 +3,11 @@
 <#if mode == "ARCHITECTURE_DEFINITION">
         self.${element.name} = OneHot(size=${size})
         <#include "OutputShape.ftl">
-</#if>
-<#if mode == "FORWARD_FUNCTION">
+<#elseif mode == "FORWARD_FUNCTION">
         ${element.name} = self.${element.name}(${input})
-</#if>
+<#elseif mode == "PYTHON_INLINE">
+                    ${element.name} = nd.one_hot(indices=${input}, depth=${size})
+<#elseif mode == "CPP_INLINE">
+    vector<float> ${element.name}(${size}, 0);
+    ${element.name}[${input}[0]] = 1;
+</#if>
\ No newline at end of file
diff --git a/src/main/resources/templates/gluon/elements/Output.ftl b/src/main/resources/templates/gluon/elements/Output.ftl
index bf282c0c..3e6c3f0c 100644
--- a/src/main/resources/templates/gluon/elements/Output.ftl
+++ b/src/main/resources/templates/gluon/elements/Output.ftl
@@ -9,7 +9,10 @@
     <#elseif element.oneHotOutput>
         self.last_layers['${element.name}'] = 'softmax'
     </#if>
-</#if>
-<#if mode == "FORWARD_FUNCTION">
+<#elseif mode == "FORWARD_FUNCTION">
         outputs.append(${input})
+<#elseif mode == "PYTHON_INLINE">
+                    ${element.name}_output = ${input}
+<#elseif mode == "CPP_INLINE">
+    CNN_${element.name} = ${input};
 </#if>
diff --git a/src/main/resources/templates/gluon/elements/Pooling.ftl b/src/main/resources/templates/gluon/elements/Pooling.ftl
index 1d1cc9f3..b59bf700 100644
--- a/src/main/resources/templates/gluon/elements/Pooling.ftl
+++ b/src/main/resources/templates/gluon/elements/Pooling.ftl
@@ -15,8 +15,7 @@
                 pool_size=${poolSize},
                 strides=${strides})
             <#include "OutputShape.ftl">
-</#if>
-<#if mode == "FORWARD_FUNCTION">
+<#elseif mode == "FORWARD_FUNCTION">
 <#if element.padding??>
         ${element.name}padding = self.${element.name}padding(${input})
         <#assign input = element.name + "padding">
diff --git a/src/main/resources/templates/gluon/elements/Relu.ftl b/src/main/resources/templates/gluon/elements/Relu.ftl
index e71f42dd..c85b9885 100644
--- a/src/main/resources/templates/gluon/elements/Relu.ftl
+++ b/src/main/resources/templates/gluon/elements/Relu.ftl
@@ -1,7 +1,6 @@
 <#assign input = element.inputs[0]>
 <#if mode == "ARCHITECTURE_DEFINITION">
             self.${element.name} = gluon.nn.Activation(activation='relu')
-</#if>
-<#if mode == "FORWARD_FUNCTION">
+<#elseif mode == "FORWARD_FUNCTION">
         ${element.name} = self.${element.name}(${input})
 </#if>
\ No newline at end of file
diff --git a/src/main/resources/templates/gluon/elements/Sigmoid.ftl b/src/main/resources/templates/gluon/elements/Sigmoid.ftl
index d8878609..e947d23f 100644
--- a/src/main/resources/templates/gluon/elements/Sigmoid.ftl
+++ b/src/main/resources/templates/gluon/elements/Sigmoid.ftl
@@ -1,7 +1,6 @@
 <#assign input = element.inputs[0]>
 <#if mode == "ARCHITECTURE_DEFINITION">
             self.${element.name} = gluon.nn.Activation(activation='sigmoid')
-</#if>
-<#if mode == "FORWARD_FUNCTION">
+<#elseif mode == "FORWARD_FUNCTION">
         ${element.name} = self.${element.name}(${input})
 </#if>
\ No newline at end of file
diff --git a/src/main/resources/templates/gluon/elements/Softmax.ftl b/src/main/resources/templates/gluon/elements/Softmax.ftl
index 4e5f7ed2..d1971610 100644
--- a/src/main/resources/templates/gluon/elements/Softmax.ftl
+++ b/src/main/resources/templates/gluon/elements/Softmax.ftl
@@ -2,7 +2,6 @@
 <#assign input = element.inputs[0]>
 <#if mode == "ARCHITECTURE_DEFINITION">
             self.${element.name} = Softmax()
-</#if>
-<#if mode == "FORWARD_FUNCTION">
+<#elseif mode == "FORWARD_FUNCTION">
         ${element.name} = self.${element.name}(${input})
 </#if>
diff --git a/src/main/resources/templates/gluon/elements/Split.ftl b/src/main/resources/templates/gluon/elements/Split.ftl
index 84ba4d16..2cb2c69b 100644
--- a/src/main/resources/templates/gluon/elements/Split.ftl
+++ b/src/main/resources/templates/gluon/elements/Split.ftl
@@ -3,7 +3,6 @@
 <#if mode == "ARCHITECTURE_DEFINITION">
             self.${element.name} = Split(num_outputs=${num_outputs}, axis=1)
             <#include "OutputShape.ftl">
-</#if>
-<#if mode == "FORWARD_FUNCTION">
+<#elseif mode == "FORWARD_FUNCTION">
         ${element.name} = self.${element.name}(${input})
 </#if>
\ No newline at end of file
diff --git a/src/main/resources/templates/gluon/elements/Tanh.ftl b/src/main/resources/templates/gluon/elements/Tanh.ftl
index b6bd3b88..1cccb58f 100644
--- a/src/main/resources/templates/gluon/elements/Tanh.ftl
+++ b/src/main/resources/templates/gluon/elements/Tanh.ftl
@@ -1,7 +1,6 @@
 <#assign input = element.inputs[0]>
 <#if mode == "ARCHITECTURE_DEFINITION">
             self.${element.name} = gluon.nn.Activation(activation='tanh')
-</#if>
-<#if mode == "FORWARD_FUNCTION">
+<#elseif mode == "FORWARD_FUNCTION">
         ${element.name} = self.${element.name}(${input})
 </#if>
\ No newline at end of file
diff --git a/src/main/resources/templates/gluon/execute.ftl b/src/main/resources/templates/gluon/execute.ftl
index fd8bd696..dfde17a8 100644
--- a/src/main/resources/templates/gluon/execute.ftl
+++ b/src/main/resources/templates/gluon/execute.ftl
@@ -7,6 +7,8 @@
     _predictor_${stream?index}_.predict(<#list stream.getFirstAtomicElements() as input>CNNTranslator::translate(${input.name}<#if input.arrayAccess.isPresent()>[${input.arrayAccess.get().intValue.get()?c}]</#if>),
                 </#list><#list stream.getLastAtomicElements() as output>CNN_${tc.getName(output)}<#sep>,
                 </#list>);
+<#else>
+${tc.include(stream, "CPP_INLINE")}
 </#if>
 </#list>
 
diff --git a/src/test/java/de/monticore/lang/monticar/cnnarch/gluongenerator/GenerationTest.java b/src/test/java/de/monticore/lang/monticar/cnnarch/gluongenerator/GenerationTest.java
index 1263005a..309191eb 100644
--- a/src/test/java/de/monticore/lang/monticar/cnnarch/gluongenerator/GenerationTest.java
+++ b/src/test/java/de/monticore/lang/monticar/cnnarch/gluongenerator/GenerationTest.java
@@ -127,6 +127,14 @@ public class GenerationTest extends AbstractSymtabTest {
         assertTrue(Log.getFindings().isEmpty());
     }
 
+    @Test
+    public void testInvariant() throws IOException, TemplateException {
+        Log.getFindings().clear();
+        String[] args = {"-m", "src/test/resources/valid_tests", "-r", "Invariant"};
+        CNNArch2GluonCli.main(args);
+        assertTrue(Log.getFindings().isEmpty());
+    }
+
     @Test
     public void testResNeXtGeneration() throws IOException, TemplateException {
         Log.getFindings().clear();
diff --git a/src/test/resources/target_code/CNNSupervisedTrainer_Alexnet.py b/src/test/resources/target_code/CNNSupervisedTrainer_Alexnet.py
index 3a121968..ad7d801f 100644
--- a/src/test/resources/target_code/CNNSupervisedTrainer_Alexnet.py
+++ b/src/test/resources/target_code/CNNSupervisedTrainer_Alexnet.py
@@ -94,7 +94,8 @@ class CNNSupervisedTrainer_Alexnet:
                 with autograd.record():
                     predictions_output = self._networks[0](data_data)
 
-                    loss = loss_functions['predictions'](predictions_output, predictions_label)
+                    loss = \
+                        loss_functions['predictions'](predictions_output, predictions_label)
 
                 loss.backward()
 
@@ -125,7 +126,8 @@ class CNNSupervisedTrainer_Alexnet:
                     batch.label[0].as_in_context(mx_context)
                 ]
 
-                predictions_output = self._networks[0](data_data)
+                if True: # Fix indentation
+                    predictions_output = self._networks[0](data_data)
 
                 predictions = [
                     mx.nd.argmax(predictions_output, axis=1)
@@ -143,7 +145,9 @@ class CNNSupervisedTrainer_Alexnet:
                     batch.label[0].as_in_context(mx_context)
                 ]
 
-                predictions_output = self._networks[0](data_data)
+                if True: # Fix indentation
+                    predictions_output = self._networks[0](data_data)
+
                 predictions = [
                     mx.nd.argmax(predictions_output, axis=1)
                 ]
diff --git a/src/test/resources/target_code/CNNSupervisedTrainer_CifarClassifierNetwork.py b/src/test/resources/target_code/CNNSupervisedTrainer_CifarClassifierNetwork.py
index bde71a26..9deaee25 100644
--- a/src/test/resources/target_code/CNNSupervisedTrainer_CifarClassifierNetwork.py
+++ b/src/test/resources/target_code/CNNSupervisedTrainer_CifarClassifierNetwork.py
@@ -94,7 +94,8 @@ class CNNSupervisedTrainer_CifarClassifierNetwork:
                 with autograd.record():
                     softmax_output = self._networks[0](data_data)
 
-                    loss = loss_functions['softmax'](softmax_output, softmax_label)
+                    loss = \
+                        loss_functions['softmax'](softmax_output, softmax_label)
 
                 loss.backward()
 
@@ -125,7 +126,8 @@ class CNNSupervisedTrainer_CifarClassifierNetwork:
                     batch.label[0].as_in_context(mx_context)
                 ]
 
-                softmax_output = self._networks[0](data_data)
+                if True: # Fix indentation
+                    softmax_output = self._networks[0](data_data)
 
                 predictions = [
                     mx.nd.argmax(softmax_output, axis=1)
@@ -143,7 +145,9 @@ class CNNSupervisedTrainer_CifarClassifierNetwork:
                     batch.label[0].as_in_context(mx_context)
                 ]
 
-                softmax_output = self._networks[0](data_data)
+                if True: # Fix indentation
+                    softmax_output = self._networks[0](data_data)
+
                 predictions = [
                     mx.nd.argmax(softmax_output, axis=1)
                 ]
diff --git a/src/test/resources/target_code/CNNSupervisedTrainer_VGG16.py b/src/test/resources/target_code/CNNSupervisedTrainer_VGG16.py
index 6c27eeb2..0a79fd9f 100644
--- a/src/test/resources/target_code/CNNSupervisedTrainer_VGG16.py
+++ b/src/test/resources/target_code/CNNSupervisedTrainer_VGG16.py
@@ -94,7 +94,8 @@ class CNNSupervisedTrainer_VGG16:
                 with autograd.record():
                     predictions_output = self._networks[0](data_data)
 
-                    loss = loss_functions['predictions'](predictions_output, predictions_label)
+                    loss = \
+                        loss_functions['predictions'](predictions_output, predictions_label)
 
                 loss.backward()
 
@@ -125,7 +126,8 @@ class CNNSupervisedTrainer_VGG16:
                     batch.label[0].as_in_context(mx_context)
                 ]
 
-                predictions_output = self._networks[0](data_data)
+                if True: # Fix indentation
+                    predictions_output = self._networks[0](data_data)
 
                 predictions = [
                     mx.nd.argmax(predictions_output, axis=1)
@@ -143,7 +145,9 @@ class CNNSupervisedTrainer_VGG16:
                     batch.label[0].as_in_context(mx_context)
                 ]
 
-                predictions_output = self._networks[0](data_data)
+                if True: # Fix indentation
+                    predictions_output = self._networks[0](data_data)
+
                 predictions = [
                     mx.nd.argmax(predictions_output, axis=1)
                 ]
diff --git a/src/test/resources/valid_tests/Invariant.cnna b/src/test/resources/valid_tests/Invariant.cnna
new file mode 100644
index 00000000..1bc65442
--- /dev/null
+++ b/src/test/resources/valid_tests/Invariant.cnna
@@ -0,0 +1,17 @@
+architecture Invariant{
+    def input Z(0:3)^{1} data[2]
+    def output Q(0:1)^{4} pred[3]
+
+    data[0] ->
+    FullyConnected(units=4) ->
+    Softmax() ->
+    pred[0];
+
+    data[1] ->
+    OneHot(size=4) ->
+    pred[1];
+
+    1 ->
+    OneHot(size=4) ->
+    pred[2];
+}
\ No newline at end of file
diff --git a/src/test/resources/valid_tests/MultipleStreams.cnna b/src/test/resources/valid_tests/MultipleStreams.cnna
index e0c5010a..209b9fbd 100644
--- a/src/test/resources/valid_tests/MultipleStreams.cnna
+++ b/src/test/resources/valid_tests/MultipleStreams.cnna
@@ -11,4 +11,4 @@ architecture MultipleStreams{
     FullyConnected(units=4, no_bias=true) ->
     Softmax() ->
     pred[1];
-}
\ No newline at end of file
+}
diff --git a/src/test/resources/valid_tests/data_paths.txt b/src/test/resources/valid_tests/data_paths.txt
index a4c42bba..e4f16cd2 100644
--- a/src/test/resources/valid_tests/data_paths.txt
+++ b/src/test/resources/valid_tests/data_paths.txt
@@ -3,4 +3,5 @@ CifarClassifierNetwork data/CifarClassifierNetwork
 ThreeInputCNN_M14 data/ThreeInputCNN_M14
 Alexnet data/Alexnet
 ResNeXt50 data/ResNeXt50
-MultipleStreams data/MultipleStreams
\ No newline at end of file
+MultipleStreams data/MultipleStreams
+Invariant data/Invariant
\ No newline at end of file
-- 
GitLab