Commit b7f790a0 authored by Sebastian Nickels's avatar Sebastian Nickels
Browse files

Updated so that it works with the semicolon change to prepare for multiple streams

parent 736a2256
Pipeline #141233 failed with stages
in 6 minutes and 19 seconds
......@@ -72,7 +72,7 @@ class Net(gluon.HybridBlock):
def __init__(self, data_mean=None, data_std=None, **kwargs):
super(Net, self).__init__(**kwargs)
with self.name_scope():
${tc.include(tc.architecture.body, "ARCHITECTURE_DEFINITION")}
${tc.include(tc.architecture.streams[0], "ARCHITECTURE_DEFINITION")}
def hybrid_forward(self, F, x):
${tc.include(tc.architecture.body, "FORWARD_FUNCTION")}
\ No newline at end of file
${tc.include(tc.architecture.streams[0], "FORWARD_FUNCTION")}
\ No newline at end of file
......@@ -128,6 +128,16 @@ public class GenerationTest extends AbstractSymtabTest {
assertTrue(Log.getFindings().size() == 3);
}
/* TODO: Uncomment when multiple streams are implemented
@Test
public void testMultipleStreams() throws IOException, TemplateException {
Log.getFindings().clear();
String[] args = {"-m", "src/test/resources/valid_tests", "-r", "MultipleStreams"};
CNNArch2GluonCli.main(args);
assertTrue(Log.getFindings().size() == 3);
}
*/
@Test
public void testFullCfgGeneration() throws IOException, TemplateException {
Log.getFindings().clear();
......
......@@ -39,5 +39,5 @@ architecture Alexnet(img_height=224, img_width=224, img_channels=3, classes=10){
fc(->=2) ->
FullyConnected(units=10) ->
Softmax() ->
predictions
predictions;
}
\ No newline at end of file
......@@ -40,5 +40,5 @@ architecture ResNeXt50(img_height=224, img_width=224, img_channels=3, classes=10
GlobalPooling(pool_type="avg") ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
predictions;
}
\ No newline at end of file
......@@ -33,5 +33,5 @@ architecture ResNet152(img_height=224, img_width=224, img_channels=3, classes=10
GlobalPooling(pool_type="avg") ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
predictions;
}
\ No newline at end of file
......@@ -31,5 +31,5 @@ architecture ResNet34(img_height=224, img_width=224, img_channels=3, classes=100
GlobalPooling(pool_type="avg") ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
predictions;
}
......@@ -25,5 +25,5 @@ architecture SequentialAlexnet(img_height=224, img_width=224, img_channels=3, cl
fc() ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
predictions;
}
......@@ -28,5 +28,5 @@ architecture ThreeInputCNN_M14(img_height=200, img_width=300, img_channels=3, cl
Relu() ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
predictions;
}
\ No newline at end of file
......@@ -27,5 +27,5 @@ architecture VGG16(img_height=224, img_width=224, img_channels=3, classes=1000){
fc() ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
predictions;
}
\ No newline at end of file
architecture ArgumentConstraintTest1(img_height=224, img_width=224, img_channels=3, classes=1000){
def input Z(0:255)^{img_channels, img_height, img_width} image
def output Q(0:1)^{classes} predictions
def conv(kernel, channels, stride=1, act=true){
Convolution(kernel=(kernel,kernel), channels=channels, stride=(stride,stride)) ->
BatchNorm() ->
Relu(?=act)
}
def skip(channels, stride){
Convolution(kernel=(1,1), channels=75, stride=(stride,stride)) ->
BatchNorm()
}
def resLayer(channels, stride=1){
(
conv(kernel=3, channels=channels, stride=stride) ->
conv(kernel=3, channels=channels, stride=stride, act=false)
|
skip(channels=channels, stride=stride, ?=(stride!=1))
) ->
Add() ->
Relu()
}
image ->
conv(kernel=7, channels=64, stride=2) ->
Pooling(pool_type="max", kernel=(3,3), stride=(2,2)) ->
resLayer(channels=64, ->=3) ->
resLayer(channels=128, stride=2) ->
GlobalPooling(pool_type="avg") ->
FullyConnected(units=classes, ->=true) ->
Softmax() ->
predictions
}
architecture ArgumentConstraintTest2(img_height=224, img_width=224, img_channels=3, classes=1000){
def input Z(0:255)^{img_channels, img_height, img_width} image
def output Q(0:1)^{classes} predictions
def conv(kernel, channels, stride=1, act=true){
Convolution(kernel=(kernel,kernel), channels=channels, stride=(stride,stride)) ->
BatchNorm() ->
Relu(?=act)
}
def skip(channels, stride){
Convolution(kernel=(1,1), channels=96, stride=(stride,-stride)) ->
BatchNorm()
}
def resLayer(channels, stride=1){
(
conv(kernel=3, channels=channels, stride=stride) ->
conv(kernel=3, channels=channels, stride=stride, act=false)
|
skip(channels=channels, stride=stride, ?=(stride!=1))
) ->
Add() ->
Relu()
}
image ->
conv(kernel=7, channels=64, stride=2) ->
Pooling(pool_type="max", kernel=(3,3), stride=(2,2)) ->
resLayer(channels=64, ->=3) ->
resLayer(channels=128, stride=2) ->
GlobalPooling(pool_type="avg") ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
}
architecture ArgumentConstraintTest3(img_height=224, img_width=224, img_channels=3, classes=1000){
def input Z(0:255)^{img_channels, img_height, img_width} image
def output Q(0:1)^{classes} predictions
def conv(kernel, channels, stride=1, act=true){
Convolution(kernel=(kernel,kernel), channels=channels, stride=(stride,stride)) ->
BatchNorm() ->
Relu(?=act)
}
def skip(channels, stride){
Convolution(kernel=(1,1), channels=64, stride=(stride,stride)) ->
BatchNorm()
}
def resLayer(channels, stride=1){
(
conv(kernel=3, channels=channels, stride=stride) ->
conv(kernel=3, channels=channels, stride=stride, act=false)
|
skip(channels=channels, stride=stride, ?=(stride!=1))
) ->
Add() ->
Relu()
}
image ->
conv(kernel=7, channels=64, stride=2) ->
Pooling(pool_type="max", kernel=(3,3), stride=(2,2), padding="valid") ->
resLayer(channels=64, ->=3) ->
resLayer(channels=128, stride=2) ->
GlobalPooling(pool_type="avg", ?=1) ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
}
architecture ArgumentConstraintTest4(img_height=224, img_width=224, img_channels=3, classes=1000){
def input Z(0:255)^{img_channels, img_height, img_width} image
def output Q(0:1)^{classes} predictions
def conv(kernel, channels, stride=1, act=true){
Convolution(kernel=(kernel,kernel), channels=channels, stride=(stride,stride)) ->
BatchNorm() ->
Relu(?=act)
}
def skip(channels, stride){
Convolution(kernel=(1,1), channels=96, stride=(stride,stride)) ->
BatchNorm()
}
def resLayer(channels, stride=1){
(
conv(kernel=3, channels=channels, stride=stride) ->
conv(kernel=3, channels=channels, stride=stride, act=false)
|
skip(channels=channels, stride=stride, ?=(stride!=1))
) ->
Add() ->
Relu()
}
image ->
conv(kernel=7, channels=64, stride=2) ->
Pooling(pool_type="max", kernel=(3,3), stride=(2,2), padding=1) ->
resLayer(channels=64, ->=3) ->
resLayer(channels=128, stride=2) ->
GlobalPooling(pool_type="avg") ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
}
architecture ArgumentConstraintTest5(img_height=224, img_width=224, img_channels=3, classes=1000){
def input Z(0:255)^{img_channels, img_height, img_width} image
def output Q(0:1)^{classes} predictions
def conv(kernel, channels, stride=1, act=true){
Convolution(kernel=(kernel,kernel), channels=channels, stride=(stride,stride)) ->
BatchNorm() ->
Relu(?=act)
}
def skip(channels, stride){
Convolution(kernel=(1,-1), channels=96, stride=(stride,stride)) ->
BatchNorm()
}
def resLayer(channels, stride=1){
(
conv(kernel=3, channels=channels, stride=stride) ->
conv(kernel=3, channels=channels, stride=stride, act=false)
|
skip(channels=channels, stride=stride, ?=(stride!=1))
) ->
Add() ->
Relu()
}
image ->
conv(kernel=7, channels=64, stride=2) ->
Pooling(pool_type="max", kernel=(3,3), stride=(2,2), padding="same") ->
resLayer(channels=64, ->=3) ->
resLayer(channels=128, stride=2) ->
GlobalPooling(pool_type="avg") ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
}
architecture ArgumentConstraintTest6(img_height=224, img_width=224, img_channels=3, classes=1000){
def input Z(0:255)^{img_channels, img_height, img_width} image
def output Q(0:1)^{classes} predictions
def conv(kernel, channels, stride=1, act=true){
Convolution(kernel=(kernel,kernel), channels=channels, stride=(stride,stride)) ->
BatchNorm() ->
Relu(?=act)
}
def skip(channels, stride){
Convolution(kernel=(1,1), channels=false, stride=(stride,stride)) ->
BatchNorm()
}
def resLayer(channels, stride=1){
(
conv(kernel=3, channels=channels, stride=stride) ->
conv(kernel=3, channels=channels, stride=stride, act=false)
|
skip(channels=channels, stride=stride, ?=(stride!=1))
) ->
Add() ->
Relu()
}
image ->
conv(kernel=7, channels=64, stride=2) ->
Pooling(pool_type="max", kernel=(3,3), stride=(2,2), padding="valid") ->
resLayer(channels=64, ->=3) ->
resLayer(channels=128, stride=2) ->
GlobalPooling(pool_type="avg") ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
}
architecture DuplicatedArgument(){
def input Q(-oo:+oo)^{10} in1
def output Q(0:1)^{2} out1
in1 ->
FullyConnected(units=64, units=32) ->
Tanh() ->
FullyConnected(units=2) ->
Softmax() ->
out1
}
\ No newline at end of file
architecture DuplicatedIONames(inputs=10, classes=2){
def input Q(-oo:+oo)^{inputs} in1
def input Q(-oo:+oo)^{inputs} in1
def output Q(0:1)^{classes} out1
def fc(){
FullyConnected(units=64) ->
Tanh()
}
in1 ->
fc() ->
FullyConnected(units=classes) ->
Softmax() ->
out1
}
\ No newline at end of file
architecture DuplicatedNames(inputs=10, inputs=10, classes=2){
def input Q(-oo:+oo)^{inputs} in1
def output Q(0:1)^{classes} out1
def fc(){
FullyConnected(units=64) ->
Tanh()
}
def fc(){
FullyConnected(units=64) ->
Tanh()
}
in1 ->
fc() ->
FullyConnected(units=classes) ->
Softmax() ->
out1
}
\ No newline at end of file
architecture IllegalIOName{
def input Q(-oo:+oo)^{10} data_
def output Q(0:1)^{2} predictions_
data_ ->
FullyConnected(units=64, no_bias=true) ->
Tanh() ->
FullyConnected(units=2, no_bias=true) ->
Softmax() ->
predictions_
}
\ No newline at end of file
architecture IllegalName(inputs=10, classes=2, Tg = 1){
def input Q(-oo:+oo)^{inputs} in1
def output Q(0:1)^{classes} out1
def Fc(){
FullyConnected(units=10)
}
in1 ->
FullyConnected(units=64) ->
Tanh() ->
FullyConnected(units=classes) ->
Softmax() ->
out1
}
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment