Commit d6913a43 authored by Sebastian N.'s avatar Sebastian N.
Browse files

Fixed a bug which occurred when an empty network (a network which contains...

Fixed a bug which occurred when an empty network (a network which contains only an assignment) was used
parent 77943e14
Pipeline #204978 failed with stages
in 35 seconds
...@@ -2,6 +2,6 @@ ...@@ -2,6 +2,6 @@
<#if element.inputs?size gte 1> <#if element.inputs?size gte 1>
<#assign input = element.inputs[0]> <#assign input = element.inputs[0]>
<#if mode == "FORWARD_FUNCTION"> <#if mode == "FORWARD_FUNCTION">
${element.name} = ${input} ${element.name} = F.identity(${input})
</#if> </#if>
</#if> </#if>
...@@ -268,7 +268,7 @@ class Net_0(gluon.HybridBlock): ...@@ -268,7 +268,7 @@ class Net_0(gluon.HybridBlock):
dropout7_ = self.dropout7_(relu7_) dropout7_ = self.dropout7_(relu7_)
fc8_ = self.fc8_(dropout7_) fc8_ = self.fc8_(dropout7_)
softmax8_ = F.softmax(fc8_, axis=-1) softmax8_ = F.softmax(fc8_, axis=-1)
predictions_ = softmax8_ predictions_ = F.identity(softmax8_)
return predictions_ return predictions_
...@@ -471,7 +471,7 @@ class Net_0(gluon.HybridBlock): ...@@ -471,7 +471,7 @@ class Net_0(gluon.HybridBlock):
dropout31_ = self.dropout31_(fc31_) dropout31_ = self.dropout31_(fc31_)
fc32_ = self.fc32_(dropout31_) fc32_ = self.fc32_(dropout31_)
softmax32_ = F.softmax(fc32_, axis=-1) softmax32_ = F.softmax(fc32_, axis=-1)
softmax_ = softmax32_ softmax_ = F.identity(softmax32_)
return softmax_ return softmax_
...@@ -296,7 +296,7 @@ class Net_0(gluon.HybridBlock): ...@@ -296,7 +296,7 @@ class Net_0(gluon.HybridBlock):
dropout15_ = self.dropout15_(relu15_) dropout15_ = self.dropout15_(relu15_)
fc15_ = self.fc15_(dropout15_) fc15_ = self.fc15_(dropout15_)
softmax15_ = F.softmax(fc15_, axis=-1) softmax15_ = F.softmax(fc15_, axis=-1)
predictions_ = softmax15_ predictions_ = F.identity(softmax15_)
return predictions_ return predictions_
...@@ -135,7 +135,7 @@ class Net_0(gluon.HybridBlock): ...@@ -135,7 +135,7 @@ class Net_0(gluon.HybridBlock):
fc4_ = self.fc4_(add4_) fc4_ = self.fc4_(add4_)
relu4_ = self.relu4_(fc4_) relu4_ = self.relu4_(fc4_)
fc5_ = self.fc5_(relu4_) fc5_ = self.fc5_(relu4_)
qvalues_ = fc5_ qvalues_ = F.identity(fc5_)
return qvalues_ return qvalues_
...@@ -135,7 +135,7 @@ class Net_0(gluon.HybridBlock): ...@@ -135,7 +135,7 @@ class Net_0(gluon.HybridBlock):
fc4_ = self.fc4_(add4_) fc4_ = self.fc4_(add4_)
relu4_ = self.relu4_(fc4_) relu4_ = self.relu4_(fc4_)
fc5_ = self.fc5_(relu4_) fc5_ = self.fc5_(relu4_)
qvalues_ = fc5_ qvalues_ = F.identity(fc5_)
return qvalues_ return qvalues_
...@@ -135,7 +135,7 @@ class Net_0(gluon.HybridBlock): ...@@ -135,7 +135,7 @@ class Net_0(gluon.HybridBlock):
fc4_ = self.fc4_(add4_) fc4_ = self.fc4_(add4_)
relu4_ = self.relu4_(fc4_) relu4_ = self.relu4_(fc4_)
fc5_ = self.fc5_(relu4_) fc5_ = self.fc5_(relu4_)
qvalues_ = fc5_ qvalues_ = F.identity(fc5_)
return qvalues_ return qvalues_
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment