Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Sign in
Toggle navigation
Menu
Open sidebar
monticore
EmbeddedMontiArc
generators
CNNArch2Gluon
Commits
039f6e64
Commit
039f6e64
authored
Oct 30, 2019
by
Sebastian Nickels
Browse files
Cleaned up layers, simplified dimensions
parent
95a67ac4
Changes
43
Hide whitespace changes
Inline
Side-by-side
src/main/java/de/monticore/lang/monticar/cnnarch/gluongenerator/CNNArch2GluonLayerSupportChecker.java
View file @
039f6e64
...
...
@@ -28,6 +28,11 @@ public class CNNArch2GluonLayerSupportChecker extends LayerSupportChecker {
supportedLayerList
.
add
(
AllPredefinedLayers
.
GRU_NAME
);
supportedLayerList
.
add
(
AllPredefinedLayers
.
EMBEDDING_NAME
);
supportedLayerList
.
add
(
AllPredefinedLayers
.
ARG_MAX_NAME
);
supportedLayerList
.
add
(
AllPredefinedLayers
.
REPEAT_NAME
);
supportedLayerList
.
add
(
AllPredefinedLayers
.
DOT_NAME
);
supportedLayerList
.
add
(
AllPredefinedLayers
.
EXPAND_DIMS_NAME
);
supportedLayerList
.
add
(
AllPredefinedLayers
.
SQUEEZE_NAME
);
supportedLayerList
.
add
(
AllPredefinedLayers
.
SWAPAXES_NAME
);
}
}
src/main/java/de/monticore/lang/monticar/cnnarch/gluongenerator/CNNArch2GluonTemplateController.java
View file @
039f6e64
...
...
@@ -26,6 +26,7 @@ import de.monticore.lang.monticar.cnnarch.generator.CNNArchTemplateController;
import
de.monticore.lang.monticar.cnnarch._symboltable.*
;
import
de.monticore.lang.monticar.cnnarch.generator.TemplateConfiguration
;
import
de.monticore.lang.monticar.cnnarch.predefined.AllPredefinedLayers
;
import
de.se_rwth.commons.logging.Log
;
import
java.io.Writer
;
import
java.util.*
;
...
...
@@ -61,7 +62,9 @@ public class CNNArch2GluonTemplateController extends CNNArchTemplateController {
}
}
else
if
(
element
.
getType
()
==
VariableSymbol
.
Type
.
LAYER
)
{
include
(
TEMPLATE_ELEMENTS_DIR_PATH
,
element
.
getLayerVariableDeclaration
().
getLayer
().
getName
(),
writer
,
netDefinitionMode
);
if
(
element
.
getMember
()
!=
VariableSymbol
.
Member
.
OUTPUT
)
{
include
(
TEMPLATE_ELEMENTS_DIR_PATH
,
element
.
getLayerVariableDeclaration
().
getLayer
().
getName
(),
writer
,
netDefinitionMode
);
}
}
}
else
{
...
...
@@ -137,25 +140,13 @@ public class CNNArch2GluonTemplateController extends CNNArchTemplateController {
include
(
architectureElement
,
getWriter
(),
netDefinitionMode
);
}
public
Set
<
String
>
getStreamInputNames
(
SerialCompositeElementSymbol
stream
,
boolean
addStateIndex
)
{
if
(
addStateIndex
)
{
Set
<
String
>
names
=
getStreamInputs
(
stream
,
addStateIndex
).
keySet
();
Set
<
String
>
newNames
=
new
LinkedHashSet
<>();
for
(
String
name
:
names
)
{
// if LSTM state, transform name into list of hidden state and cell state
if
(
name
.
endsWith
(
"_state_"
))
{
name
=
"["
+
name
+
"[0], "
+
name
+
"[1]]"
;
}
newNames
.
add
(
name
);
}
return
newNames
;
}
return
getStreamInputs
(
stream
,
addStateIndex
).
keySet
();
public
Set
<
String
>
getStreamInputNames
(
SerialCompositeElementSymbol
stream
)
{
return
getStreamInputs
(
stream
).
keySet
();
}
// used for unroll
public
List
<
String
>
getStreamInputNames
(
SerialCompositeElementSymbol
stream
,
SerialCompositeElementSymbol
currentStream
,
boolean
addStateIndex
)
{
List
<
String
>
inputNames
=
new
LinkedList
<>(
getStreamInputNames
(
stream
,
addStateIndex
));
public
List
<
String
>
getStreamInputNames
(
SerialCompositeElementSymbol
stream
,
SerialCompositeElementSymbol
currentStream
)
{
List
<
String
>
inputNames
=
new
LinkedList
<>(
getStreamInputNames
(
stream
));
Map
<
String
,
String
>
pairs
=
getUnrollPairs
(
stream
,
currentStream
);
for
(
int
i
=
0
;
i
!=
inputNames
.
size
();
++
i
)
{
...
...
@@ -167,28 +158,8 @@ public class CNNArch2GluonTemplateController extends CNNArchTemplateController {
return
inputNames
;
}
public
Collection
<
List
<
String
>>
getStreamInputDimensions
(
SerialCompositeElementSymbol
stream
,
boolean
useStateDim
)
{
if
(
useStateDim
)
{
return
getStreamInputs
(
stream
,
false
).
values
();
}
else
{
Set
<
String
>
names
=
getStreamInputs
(
stream
,
true
).
keySet
();
List
<
List
<
String
>>
dims
=
new
ArrayList
<
List
<
String
>>(
getStreamInputs
(
stream
,
false
).
values
());
List
<
List
<
String
>>
result
=
new
ArrayList
<
List
<
String
>>();
int
index
=
0
;
for
(
String
name
:
names
)
{
if
(
name
.
endsWith
(
"_state_"
)
||
name
.
endsWith
(
"_state_[0]"
))
{
ArrayList
dim
=
new
ArrayList
<
String
>();
dim
.
add
(
"-1"
);
dim
.
add
(
name
.
replace
(
"_state_"
,
"_output_.begin_state(batch_size=1, ctx=context)"
));
result
.
add
(
dim
);
}
else
{
result
.
add
(
dims
.
get
(
index
));
}
index
++;
}
return
result
;
}
public
Collection
<
List
<
String
>>
getStreamInputDimensions
(
SerialCompositeElementSymbol
stream
)
{
return
getStreamInputs
(
stream
).
values
();
}
public
Set
<
String
>
getStreamOutputNames
(
SerialCompositeElementSymbol
stream
)
{
...
...
@@ -200,7 +171,7 @@ public class CNNArch2GluonTemplateController extends CNNArchTemplateController {
}
}
outputNames
.
addAll
(
getStreamLayerVariableMembers
(
stream
,
"1"
,
true
,
false
,
fals
e
).
keySet
());
outputNames
.
addAll
(
getStreamLayerVariableMembers
(
stream
,
tru
e
).
keySet
());
return
outputNames
;
}
...
...
@@ -220,25 +191,11 @@ public class CNNArch2GluonTemplateController extends CNNArchTemplateController {
}
// Used to initialize all layer variable members which are passed through the networks
public
Map
<
String
,
List
<
List
<
String
>>
>
getLayerVariableMembers
(
String
batchSize
,
boolean
includeStates
)
{
Map
<
String
,
List
<
List
<
String
>>
>
members
=
new
LinkedHashMap
<>();
public
Map
<
String
,
List
<
String
>>
getLayerVariableMembers
()
{
Map
<
String
,
List
<
String
>>
members
=
new
LinkedHashMap
<>();
int
index
=
0
;
for
(
SerialCompositeElementSymbol
stream
:
getArchitecture
().
getStreams
())
{
List
<
List
<
String
>>
value
=
new
ArrayList
<>();
Map
<
String
,
List
<
String
>>
member
=
getStreamLayerVariableMembers
(
stream
,
batchSize
,
true
,
includeStates
,
false
);
for
(
List
<
String
>
entry:
member
.
values
()){
value
.
add
(
entry
);
ArrayList
<
String
>
streamIndex
=
new
ArrayList
<
String
>();
streamIndex
.
add
(
Integer
.
toString
(
index
));
value
.
add
(
streamIndex
);
}
for
(
String
name:
member
.
keySet
()){
if
(!
members
.
containsKey
(
name
))
{
members
.
put
(
name
,
value
);
}
}
index
++;
members
.
putAll
(
getStreamLayerVariableMembers
(
stream
,
true
));
}
return
members
;
...
...
@@ -272,7 +229,7 @@ public class CNNArch2GluonTemplateController extends CNNArchTemplateController {
return
pairs
;
}
private
Map
<
String
,
List
<
String
>>
getStreamInputs
(
SerialCompositeElementSymbol
stream
,
boolean
addStateIndex
)
{
private
Map
<
String
,
List
<
String
>>
getStreamInputs
(
SerialCompositeElementSymbol
stream
)
{
Map
<
String
,
List
<
String
>>
inputs
=
new
LinkedHashMap
<>();
for
(
ArchitectureElementSymbol
element
:
stream
.
getFirstAtomicElements
())
{
...
...
@@ -284,19 +241,16 @@ public class CNNArch2GluonTemplateController extends CNNArchTemplateController {
dimensions
.
add
(
intDimension
.
toString
());
}
// Add batch size dimension
dimensions
.
add
(
0
,
"1"
);
inputs
.
put
(
getName
(
element
),
dimensions
);
}
}
inputs
.
putAll
(
getStreamLayerVariableMembers
(
stream
,
"1"
,
false
,
false
,
addStateIndex
));
inputs
.
putAll
(
getStreamLayerVariableMembers
(
stream
,
false
));
return
inputs
;
}
private
Map
<
String
,
List
<
String
>>
getStreamLayerVariableMembers
(
SerialCompositeElementSymbol
stream
,
String
batchSize
,
boolean
includeOutput
,
boolean
includeStates
,
boolean
addStateIndex
)
{
private
Map
<
String
,
List
<
String
>>
getStreamLayerVariableMembers
(
SerialCompositeElementSymbol
stream
,
boolean
includeOutput
)
{
Map
<
String
,
List
<
String
>>
members
=
new
LinkedHashMap
<>();
List
<
ArchitectureElementSymbol
>
elements
=
stream
.
getSpannedScope
().
resolveLocally
(
ArchitectureElementSymbol
.
KIND
);
...
...
@@ -304,19 +258,20 @@ public class CNNArch2GluonTemplateController extends CNNArchTemplateController {
if
(
element
instanceof
VariableSymbol
)
{
VariableSymbol
variable
=
(
VariableSymbol
)
element
;
if
(
variable
.
getType
()
==
VariableSymbol
.
Type
.
LAYER
&&
(
variable
.
getMember
()
==
VariableSymbol
.
Member
.
NONE
||
includeStates
))
{
if
(
variable
.
getType
()
==
VariableSymbol
.
Type
.
LAYER
&&
(
variable
.
getMember
()
==
VariableSymbol
.
Member
.
NONE
))
{
LayerVariableDeclarationSymbol
layerVariableDeclaration
=
variable
.
getLayerVariableDeclaration
();
if
(
layerVariableDeclaration
.
getLayer
().
getDeclaration
().
isPredefined
())
{
PredefinedLayerDeclaration
predefinedLayerDeclaration
=
(
PredefinedLayerDeclaration
)
layerVariableDeclaration
.
getLayer
().
getDeclaration
();
if
(
predefinedLayerDeclaration
.
isValidMember
(
VariableSymbol
.
Member
.
STATE
))
{
String
name
;
if
(
addStateIndex
&&
predefinedLayerDeclaration
.
getName
().
equals
(
AllPredefinedLayers
.
GRU_NAME
)){
name
=
variable
.
getName
()
+
"_state_[0]"
;
}
else
{
name
=
variable
.
getName
()
+
"_state_"
;
int
arrayLength
=
predefinedLayerDeclaration
.
getArrayLength
(
VariableSymbol
.
Member
.
STATE
);
for
(
int
i
=
0
;
i
<
arrayLength
;
++
i
)
{
String
name
=
variable
.
getName
()
+
"_state_"
;
if
(
arrayLength
>
1
)
{
name
+=
i
+
"_"
;
}
List
<
Integer
>
intDimensions
=
predefinedLayerDeclaration
.
computeOutputTypes
(
...
...
@@ -331,17 +286,19 @@ public class CNNArch2GluonTemplateController extends CNNArchTemplateController {
dimensions
.
add
(
intDimension
.
toString
());
}
// Add batch size dimension at index 1, since RNN states in Gluon have the format
// (layers, batch_size, units)
dimensions
.
add
(
1
,
batchSize
);
members
.
put
(
name
,
dimensions
);
}
if
(
includeOutput
)
{
if
(
predefinedLayerDeclaration
.
isValidMember
(
VariableSymbol
.
Member
.
OUTPUT
))
{
arrayLength
=
predefinedLayerDeclaration
.
getArrayLength
(
VariableSymbol
.
Member
.
OUTPUT
);
for
(
int
i
=
0
;
i
<
arrayLength
;
++
i
)
{
String
name
=
variable
.
getName
()
+
"_output_"
;
if
(
arrayLength
>
1
)
{
name
+=
i
+
"_"
;
}
List
<
Integer
>
intDimensions
=
predefinedLayerDeclaration
.
computeOutputTypes
(
layerVariableDeclaration
.
getLayer
().
getInputTypes
(),
layerVariableDeclaration
.
getLayer
(),
...
...
@@ -354,9 +311,6 @@ public class CNNArch2GluonTemplateController extends CNNArchTemplateController {
dimensions
.
add
(
intDimension
.
toString
());
}
// Add batch size dimension at index 0, since we use NTC format for RNN output in Gluon
dimensions
.
add
(
0
,
batchSize
);
members
.
put
(
name
,
dimensions
);
}
}
...
...
@@ -367,6 +321,15 @@ public class CNNArch2GluonTemplateController extends CNNArchTemplateController {
return
members
;
}
// cuts
public
List
<
String
>
cutDimensions
(
List
<
String
>
dimensions
)
{
while
(
dimensions
.
size
()
>
1
&&
dimensions
.
get
(
dimensions
.
size
()
-
1
).
equals
(
"1"
))
{
dimensions
.
remove
(
dimensions
.
size
()
-
1
);
}
return
dimensions
;
}
public
int
getBeamSearchWidth
(
UnrollInstructionSymbol
unroll
){
return
unroll
.
getIntValue
(
AllPredefinedLayers
.
WIDTH_NAME
).
get
();
}
...
...
src/main/resources/templates/gluon/CNNCreator.ftl
View file @
039f6e64
...
...
@@ -57,8 +57,7 @@ class ${tc.fileNameWithoutEnding}:
self.networks[$
{
networkInstruction
?
index
}
] = Net_$
{
networkInstruction
?
index
}
(data_mean=data_mean, data_std=data_std)
self.networks[$
{
networkInstruction
?
index
}
].collect_params().initialize(self.weight_initializer, ctx=context)
self.networks[$
{
networkInstruction
?
index
}
].hybridize()
self.networks[$
{
networkInstruction
?
index
}
](<#list tc.getStreamInputDimensions(networkInstruction.body, false) as dimensions>
<#if dimensions[0] == "-1">self.networks[$
{
networkInstruction
?
index
}
].$
{
dimensions
[
1
]}
<#else>mx.nd.zeros(($
{
tc
.join
(
dimensions
,
","
)}
,), ctx=context)</#if> <#sep>, </#list>)
self.networks[$
{
networkInstruction
?
index
}
](<#list tc.getStreamInputDimensions(networkInstruction.body) as dimensions>mx.nd.zeros((1, $
{
tc
.join
(
tc
.cutDimensions
(
dimensions
),
","
)}
,), ctx=context)<#sep>, </#list>)
</#
if
>
</#
list
>
...
...
src/main/resources/templates/gluon/CNNNet.ftl
View file @
039f6e64
...
...
@@ -2,88 +2,6 @@ import mxnet as mx
import
numpy as np
from
mxnet import gluon
class
OneHot(gluon.HybridBlock):
def __init__(self, size,
**
kwargs):
super(OneHot, self).__init__(
**
kwargs)
with self.name_scope():
self.size = size
def hybrid_forward(self, F, x):
return F.one_hot(indices=F.argmax(data=x, axis=1), depth=self.size)
class
Softmax(gluon.HybridBlock):
def __init__(self,
**
kwargs):
super(Softmax, self).__init__(
**
kwargs)
def hybrid_forward(self, F, x):
return F.softmax(x)
class
Split(gluon.HybridBlock):
def __init__(self, num_outputs, axis=1,
**
kwargs):
super(Split, self).__init__(
**
kwargs)
with self.name_scope():
self.axis = axis
self.num_outputs = num_outputs
def hybrid_forward(self, F, x):
return F.split(data=x, axis=self.axis, num_outputs=self.num_outputs)
class
Concatenate(gluon.HybridBlock):
def __init__(self, dim=1,
**
kwargs):
super(Concatenate, self).__init__(
**
kwargs)
with self.name_scope():
self.dim = dim
def hybrid_forward(self, F,
*
x):
return F.concat(
*
x, dim=self.dim)
class
Repeat(gluon.HybridBlock):
def __init__(self, repeats, axis=1,
**
kwargs):
super(Repeat, self).__init__(
**
kwargs)
with self.name_scope():
self.axis = axis
self.repeats = repeats
def hybrid_forward(self, F, x):
return F.repeat(data=x, axis=self.axis, repeats=self.repeats)
class
Dot(gluon.HybridBlock):
def __init__(self,
**
kwargs):
super(Dot, self).__init__(
**
kwargs)
def hybrid_forward(self, F,
*
x):
return F.batch_dot(
*
x)
class
ExpandDims(gluon.HybridBlock):
def __init__(self, dim=1,
**
kwargs):
super(ExpandDims, self).__init__(
**
kwargs)
with self.name_scope():
self.dim = dim
def hybrid_forward(self, F, x):
return F.expand_dims(data=x, axis=self.dim)
class
SwapAxes(gluon.HybridBlock):
def __init__(self, dim1, dim2,
**
kwargs):
super(SwapAxes, self).__init__(
**
kwargs)
with self.name_scope():
self.dim1 = dim1
self.dim2 = dim2
def hybrid_forward(self, F, x):
return F.swapaxes(data=x, dim1=self.dim1, dim2=self.dim2)
class
ReduceSum(gluon.HybridBlock):
def __init__(self, axis=1,
**
kwargs):
super(ReduceSum, self).__init__(
**
kwargs)
with self.name_scope():
self.axis = axis
def hybrid_forward(self, F, x):
return F.sum(data=x, axis=self.axis)
class
ZScoreNormalization(gluon.HybridBlock):
def __init__(self, data_mean, data_std,
**
kwargs):
...
...
@@ -122,6 +40,42 @@ class NoNormalization(gluon.HybridBlock):
return x
class
CustomRNN(gluon.HybridBlock):
def __init__(self, hidden_size, num_layers, bidirectional,
**
kwargs):
super(CustomRNN, self).__init__(
**
kwargs)
with self.name_scope():
self.rnn = gluon.rnn.RNN(hidden_size=hidden_size, num_layers=num_layers,
bidirectional=bidirectional, activation='tanh', layout='NTC')
def hybrid_forward(self, F, data, state0):
output, [state0] = self.rnn(data, [F.swapaxes(state0, 0, 1)])
return output, F.swapaxes(state0, 0, 1)
class
CustomLSTM(gluon.HybridBlock):
def __init__(self, hidden_size, num_layers, bidirectional,
**
kwargs):
super(CustomLSTM, self).__init__(
**
kwargs)
with self.name_scope():
self.lstm = gluon.rnn.LSTM(hidden_size=hidden_size, num_layers=num_layers,
bidirectional=bidirectional, layout='NTC')
def hybrid_forward(self, F, data, state0, state1):
output, [state0, state1] = self.lstm(data, [F.swapaxes(state0, 0, 1), F.swapaxes(state1, 0, 1)])
return output, F.swapaxes(state0, 0, 1), F.swapaxes(state1, 0, 1)
class
CustomGRU(gluon.HybridBlock):
def __init__(self, hidden_size, num_layers, bidirectional,
**
kwargs):
super(CustomGRU, self).__init__(
**
kwargs)
with self.name_scope():
self.gru = gluon.rnn.GRU(hidden_size=hidden_size, num_layers=num_layers,
bidirectional=bidirectional, layout='NTC')
def hybrid_forward(self, F, data, state0):
output, [state0] = self.gru(data, [F.swapaxes(state0, 0, 1)])
return output, F.swapaxes(state0, 0, 1)
<#
list
tc.architecture.networkInstructions as networkInstruction>
<#
if
networkInstruction.body.isTrainable()>
class
Net_$
{
networkInstruction
?
index
}
(gluon.HybridBlock):
...
...
@@ -131,7 +85,7 @@ class Net_${networkInstruction?index}(gluon.HybridBlock):
with self.name_scope():
${
tc
.include
(
networkInstruction
.body
,
"ARCHITECTURE_DEFINITION")
}
def hybrid_forward(self, F, $
{
tc
.join
(
tc
.getStreamInputNames
(
networkInstruction
.body
,
false
),
", "
)}
):
def hybrid_forward(self, F, $
{
tc
.join
(
tc
.getStreamInputNames
(
networkInstruction
.body
),
", "
)}
):
${
tc
.include
(
networkInstruction
.body
,
"FORWARD_FUNCTION")
}
return $
{
tc
.join
(
tc
.getStreamOutputNames
(
networkInstruction
.body
),
", "
)}
...
...
src/main/resources/templates/gluon/CNNPredictor.ftl
View file @
039f6e64
...
...
@@ -16,13 +16,13 @@ public:
const
std
::
string
json_file
=
"model/${tc.componentName}/model_${networkInstruction?index}_newest-symbol.json"
;
const
std
::
string
param_file
=
"model/${tc.componentName}/model_${networkInstruction?index}_newest-0000.params"
;
const
std
::
vector
<
std
::
string
>
input_keys
=
{
<#
if
tc
.getStreamInputNames
(
networkInstruction
.body
,
false
)
?
size
==
1
>
<#
if
tc
.getStreamInputNames
(
networkInstruction
.body
)
?
size
==
1
>
"data"
<#
else
>
<#
list
tc
.getStreamInputNames
(
networkInstruction
.body
,
false
)
as
variable
>
"data${variable?index}"
<#
sep
>
,
</#
list
>
<#
list
tc
.getStreamInputNames
(
networkInstruction
.body
)
as
variable
>
"data${variable?index}"
<#
sep
>
,
</#
list
>
</#
if
>
}
;
const
std
::
vector
<
std
::
vector
<
mx_uint
>>
input_shapes
=
{
<#
list
tc
.getStreamInputDimensions
(
networkInstruction
.body
,
true
)
as
dimensions
>
{
$
{
tc
.join
(
dimensions
,
", "
)}}
<#
sep
>
,
</#
list
>
}
;
const
std
::
vector
<
std
::
vector
<
mx_uint
>>
input_shapes
=
{
<#
list
tc
.getStreamInputDimensions
(
networkInstruction
.body
)
as
dimensions
>
{
$
{
tc
.join
(
dimensions
,
", "
)}}
<#
sep
>
,
</#
list
>
}
;
const
bool
use_gpu
=
false
;
P
redictorHandle
handle
;
...
...
@@ -35,9 +35,9 @@ public:
if
(
handle
)
MXP
redFree
(
handle
)
;
}
void
predict
(
$
{
tc
.join
(
tc
.getStreamInputNames
(
networkInstruction
.body
,
false
),
", "
,
"const std::vector<float> &in_"
,
""
)},
void
predict
(
$
{
tc
.join
(
tc
.getStreamInputNames
(
networkInstruction
.body
),
", "
,
"const std::vector<float> &in_"
,
""
)},
$
{
tc
.join
(
tc
.getStreamOutputNames
(
networkInstruction
.body
),
", "
,
"std::vector<float> &out_"
,
""
)}){
<#
list
tc
.getStreamInputNames
(
networkInstruction
.body
,
false
)
as
variable
>
<#
list
tc
.getStreamInputNames
(
networkInstruction
.body
)
as
variable
>
MXP
redSetInput
(
handle
,
input_keys
[$
{
variable
?
index
}
]
.c_str
(),
in_
$
{
variable
}
.data
(),
static_cast
<
mx_uint
>
(
in_
$
{
variable
}
.size
()))
;
</#
list
>
...
...
src/main/resources/templates/gluon/CNNTrainer.ftl
View file @
039f6e64
...
...
@@ -37,7 +37,7 @@ if __name__ == "__main__":
normalize=$
{
config
.normalize
?
string
(
"True"
,
"False"
)}
,
</#
if
>
<#
if
(config.evalMetric)??>
eval_metric='$
{
config
.evalMetric.me
tric
}
',
eval_metric='$
{
config
.evalMetric.
na
me
}
',
eval_metric_params=
{
<#
if
(
config
.evalMetric.exclude
)
??>
'
exclude
'
:
[<#
list
config
.evalMetric.exclude
as
value
>$
{
value
}
<#
sep
>
,
</#
list
>]
,
...
...
src/main/resources/templates/gluon/elements/Concatenate.ftl
View file @
039f6e64
<#
assign
dim = element.dim?c>
<#
if
mode == "ARCHITECTURE_DEFINITION">
self.$
{
element
.name
}
= Concatenate(dim=$
{
dim
}
)
<#include "OutputShape.ftl">
<#
elseif
mode == "FORWARD_FUNCTION">
$
{
element
.name
}
= self.$
{
element
.name
}
($
{
tc
.join
(
element
.inputs
,
", "
)}
)
<#
assign
axis = (element.axis + 1)?c>
<#
if
mode == "FORWARD_FUNCTION">
$
{
element
.name
}
= F.concat($
{
tc
.join
(
element
.inputs
,
", "
)}
, dim=$
{
axis
}
)
</#
if
>
\ No newline at end of file
src/main/resources/templates/gluon/elements/Dot.ftl
View file @
039f6e64
<#
if
mode == "ARCHITECTURE_DEFINITION">
self.$
{
element
.name
}
= Dot()
<#include "OutputShape.ftl">
<#
elseif
mode == "FORWARD_FUNCTION">
$
{
element
.name
}
= self.$
{
element
.name
}
($
{
tc
.join
(
element
.inputs
,
", "
)}
)
<#
if
mode == "FORWARD_FUNCTION">
$
{
element
.name
}
= F.batch_dot($
{
tc
.join
(
element
.inputs
,
", "
)}
)
</#
if
>
\ No newline at end of file
src/main/resources/templates/gluon/elements/ExpandDims.ftl
View file @
039f6e64
<#
assign
dim = element.dim?c>
<#
if
mode == "ARCHITECTURE_DEFINITION">
self.$
{
element
.name
}
= ExpandDims(dim=$
{
dim
}
)
<#include "OutputShape.ftl">
<#
elseif
mode == "FORWARD_FUNCTION">
$
{
element
.name
}
= self.$
{
element
.name
}
($
{
element
.inputs
[
0
]}
)
<#
assign
axis = (element.axis + 1)?c>
<#
if
mode == "FORWARD_FUNCTION">
$
{
element
.name
}
= F.expand_dims($
{
element
.inputs
[
0
]}
, axis=$
{
axis
}
)
</#
if
>
\ No newline at end of file
src/main/resources/templates/gluon/elements/FullyConnected.ftl
View file @
039f6e64
<#
if
element.member == "NONE">
<#
assign
input = element.inputs[0]>
<#
assign
units = element.units?c>
<#
assign
use_bias = element.noBias?string("False","True")>
...
...
@@ -8,16 +7,4 @@
<#include "OutputShape.ftl">
<#
elseif
mode == "FORWARD_FUNCTION">
$
{
element
.name
}
= self.$
{
element
.name
}
($
{
input
}
)
</#
if
>
<#
elseif
element.member == "STATE">
<#
if
element.inputs?size gte 1>
<#
assign
input = element.inputs[0]>
<#
if
mode == "FORWARD_FUNCTION">
$
{
element
.name
}
= $
{
input
}
<#
elseif
mode == "PYTHON_INLINE">
$
{
element
.name
}
= $
{
input
}
<#
elseif
mode == "CPP_INLINE">
$
{
element
.name
}
= $
{
input
}
</#
if
>
</#
if
>
</#
if
>
\ No newline at end of file
src/main/resources/templates/gluon/elements/GRU.ftl
View file @
039f6e64
<#
if
element.member == "NONE">
<#
assign
input = element.inputs[0]>
<#
if
mode == "ARCHITECTURE_DEFINITION">
self.$
{
element
.name
}
=
gluon.rnn.
GRU(hidden_size=$
{
element
.units
?
c
}
,
self.$
{
element
.name
}
=
Custom
GRU(hidden_size=$
{
element
.units
?
c
}
,
num_layers=$
{
element
.layers
?
c
}
,
bidirectional=$
{
element
.bidirectional
?
string
(
"True"
,
"False"
)}
,
layout='NTC')
bidirectional=$
{
element
.bidirectional
?
string
(
"True"
,
"False"
)}
)
<#include "OutputShape.ftl">
<#
elseif
mode == "FORWARD_FUNCTION">
<#
if
element.isVariable()>
...
...
src/main/resources/templates/gluon/elements/Get.ftl
View file @
039f6e64
<#
if
mode == "FORWARD_FUNCTION">
$
{
element
.name
}
=
[
$
{
element
.inputs
[
element
.index
]
}
]
$
{
element
.name
}
= $
{
element
.inputs
[
element
.index
]
}
<#
elseif
mode == "PYTHON_INLINE">
$
{
element
.name
}
=
[
$
{
element
.inputs
[
element
.index
]
}
]
$
{
element
.name
}
= $
{
element
.inputs
[
element
.index
]
}
<#
elseif
mode == "CPP_INLINE">
vector<float> $
{
element
.name
}
= $
{
element
.inputs
[
element
.index
]
}
;
</#
if
>
\ No newline at end of file
</#
if
>
src/main/resources/templates/gluon/elements/LSTM.ftl
View file @
039f6e64
<#
if
element.member == "NONE">
<#
assign
input = element.inputs[0]>
<#
if
mode == "ARCHITECTURE_DEFINITION">
self.$
{
element
.name
}
=
gluon.rnn.
LSTM(hidden_size=$
{
element
.units
?
c
}
,
self.$
{
element
.name
}
=
Custom
LSTM(hidden_size=$
{
element
.units
?
c
}
,
num_layers=$
{
element
.layers
?
c
}
,
bidirectional=$
{
element
.bidirectional
?
string
(
"True"
,
"False"
)}
,
layout='NTC')
bidirectional=$
{
element
.bidirectional
?
string
(
"True"
,
"False"
)}
)
<#include "OutputShape.ftl">
<#
elseif
mode == "FORWARD_FUNCTION">
<#
if
element.isVariable()>
$
{
element
.name
}
, $
{
element
.element.name
}
_state_ = self.$
{
element
.name
}
($
{
input
}
, $
{
element
.element.name
}
_state_)
$
{
element
.name
}
, $
{
element
.element.name
}
_state_
0_, $
{
element
.element.name
}
_state_1_
= self.$
{
element
.name
}
($
{
input
}
, $
{
element
.element.name
}
_state_
0_, $
{
element
.element.name
}
_state_1_
)
<#
else
>
$
{
element
.name
}
= self.$
{
element
.name
}
($
{
input
}
)
</#
if
>
...
...
src/main/resources/templates/gluon/elements/OneHot.ftl
View file @
039f6e64
<#
assign
input = element.inputs[0]>
<#
assign
size = element.size?c>
<#
if
mode == "ARCHITECTURE_DEFINITION">
self.$
{
element
.name
}
= OneHot(size=$
{
size
}
)
<#include "OutputShape.ftl">
<#
elseif
mode == "FORWARD_FUNCTION">
$
{
element
.name
}
= self.$
{
element
.name
}
($
{
input
}
)
<#
if
mode == "FORWARD_FUNCTION">
$
{
element
.name
}
= F.one_hot(indices=$
{
input
}
, depth=$
{
size
}
)
<#
elseif
mode == "PYTHON_INLINE">
$
{
element
.name
}
= nd.one_hot(indices=$
{
input
}
, depth=$
{
size
}
)
<#
elseif
mode == "CPP_INLINE">
...
...
src/main/resources/templates/gluon/elements/RNN.ftl
View file @
039f6e64
<#
if
element.member == "NONE">
<#
assign
input = element.inputs[0]>
<#
if
mode == "ARCHITECTURE_DEFINITION">
self.$
{
element
.name
}
=
gluon.rnn.
RNN(hidden_size=$
{
element
.units
?
c
}
,
self.$
{
element
.name
}
=
Custom
RNN(hidden_size=$
{
element
.units
?
c
}
,
num_layers=$
{
element
.layers
?
c
}
,
bidirectional=$
{
element
.bidirectional
?
string
(
"True"
,
"False"
)}
,
activation='tanh',
layout='NTC')
bidirectional=$
{
element
.bidirectional
?
string
(
"True"
,
"False"
)}
)
<#include "OutputShape.ftl">
<#
elseif
mode == "FORWARD_FUNCTION">
<#
if
element.isVariable()>
...
...
src/main/resources/templates/gluon/elements/ReduceSum.ftl
View file @
039f6e64
<#
assign
axis = element.axis?c>
<#
if
mode == "ARCHITECTURE_DEFINITION">
self.$
{
element
.name
}
= ReduceSum(axis=$
{
axis
}
)