Commit cf4423f2 authored by Sebastian Nickels's avatar Sebastian Nickels

Updated to support the changes to CNNArchLang

parent 6bf25ee8
Pipeline #139781 failed with stages
in 22 seconds
......@@ -65,10 +65,20 @@ public class CNNArch2MxNet extends CNNArchGenerator {
}
private boolean supportCheck(ArchitectureSymbol architecture){
List<CompositeElementSymbol> streams = architecture.getStreams();
// This generator only supports one stream
if (streams.size() != 1)
{
return false;
}
LayerSupportChecker layerChecker = new LayerSupportChecker();
for (ArchitectureElementSymbol element : ((CompositeElementSymbol)architecture.getBody()).getElements()){
if(!isSupportedLayer(element, layerChecker)) {
return false;
for (CompositeElementSymbol stream : streams) {
for (ArchitectureElementSymbol element : stream.getElements()) {
if (!isSupportedLayer(element, layerChecker)) {
return false;
}
}
}
return true;
......
......@@ -33,7 +33,10 @@ public class LayerNameCreator {
private Map<String, ArchitectureElementSymbol> nameToElement = new HashMap<>();
public LayerNameCreator(ArchitectureSymbol architecture) {
name(architecture.getBody(), 1, new ArrayList<>());
int stage = 1;
for (CompositeElementSymbol stream : architecture.getStreams()) {
stage = name(stream, stage, new ArrayList<>());
}
}
public ArchitectureElementSymbol getArchitectureElement(String name){
......
......@@ -172,7 +172,7 @@ class ${tc.fileNameWithoutEnding}:
def construct(self, context, data_mean=None, data_std=None):
${tc.include(tc.architecture.body)}
${tc.include(tc.architecture.streams[0])}
self.module = mx.mod.Module(symbol=mx.symbol.Group([${tc.join(tc.architectureOutputs, ",")}]),
data_names=self._input_names_,
label_names=self._output_names_,
......
......@@ -107,6 +107,16 @@ public class GenerationTest extends AbstractSymtabTest{
assertTrue(Log.getFindings().isEmpty());
}
/* TODO: Change quitGeneration() call and maybe add Exception?
@Test
public void testMultipleStreams() throws IOException, TemplateException {
Log.getFindings().clear();
String[] args = {"-m", "src/test/resources/invalid_tests", "-r", "MultipleStreams"};
CNNArch2MxNetCli.main(args);
//assertTrue(Log.getFindings().isEmpty());
}
*/
@Test
public void testMultipleOutputs() throws IOException, TemplateException {
Log.getFindings().clear();
......
......@@ -55,7 +55,7 @@ public class SymtabTest extends AbstractSymtabTest {
CNNArchCompilationUnitSymbol.KIND).orElse(null);
assertNotNull(a);
a.resolve();
a.getArchitecture().getBody().getOutputTypes();
a.getArchitecture().getStreams().get(0).getOutputTypes();
}
@Ignore
......@@ -67,7 +67,7 @@ public class SymtabTest extends AbstractSymtabTest {
CNNArchCompilationUnitSymbol.KIND).orElse(null);
assertNotNull(a);
a.resolve();
a.getArchitecture().getBody().getOutputTypes();
a.getArchitecture().getStreams().get(0).getOutputTypes();
}
@Ignore
......@@ -79,7 +79,7 @@ public class SymtabTest extends AbstractSymtabTest {
CNNArchCompilationUnitSymbol.KIND).orElse(null);
assertNotNull(a);
a.resolve();
a.getArchitecture().getBody().getOutputTypes();
a.getArchitecture().getStreams().get(0).getOutputTypes();
}
}
......@@ -39,5 +39,5 @@ architecture Alexnet(img_height=224, img_width=224, img_channels=3, classes=10){
fc(->=2) ->
FullyConnected(units=10) ->
Softmax() ->
predictions
predictions;
}
\ No newline at end of file
......@@ -40,5 +40,5 @@ architecture ResNeXt50(img_height=224, img_width=224, img_channels=3, classes=10
GlobalPooling(pool_type="avg") ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
predictions;
}
\ No newline at end of file
......@@ -33,5 +33,5 @@ architecture ResNet152(img_height=224, img_width=224, img_channels=3, classes=10
GlobalPooling(pool_type="avg") ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
predictions;
}
\ No newline at end of file
......@@ -31,5 +31,5 @@ architecture ResNet34(img_height=224, img_width=224, img_channels=3, classes=100
GlobalPooling(pool_type="avg") ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
predictions;
}
......@@ -25,5 +25,5 @@ architecture SequentialAlexnet(img_height=224, img_width=224, img_channels=3, cl
fc() ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
predictions;
}
......@@ -28,5 +28,5 @@ architecture ThreeInputCNN_M14(img_height=200, img_width=300, img_channels=3, cl
Relu() ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
predictions;
}
\ No newline at end of file
......@@ -27,5 +27,5 @@ architecture VGG16(img_height=224, img_width=224, img_channels=3, classes=1000){
fc() ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
predictions;
}
\ No newline at end of file
architecture ArgumentConstraintTest1(img_height=224, img_width=224, img_channels=3, classes=1000){
def input Z(0:255)^{img_channels, img_height, img_width} image
def output Q(0:1)^{classes} predictions
def conv(kernel, channels, stride=1, act=true){
Convolution(kernel=(kernel,kernel), channels=channels, stride=(stride,stride)) ->
BatchNorm() ->
Relu(?=act)
}
def skip(channels, stride){
Convolution(kernel=(1,1), channels=75, stride=(stride,stride)) ->
BatchNorm()
}
def resLayer(channels, stride=1){
(
conv(kernel=3, channels=channels, stride=stride) ->
conv(kernel=3, channels=channels, stride=stride, act=false)
|
skip(channels=channels, stride=stride, ?=(stride!=1))
) ->
Add() ->
Relu()
}
image ->
conv(kernel=7, channels=64, stride=2) ->
Pooling(pool_type="max", kernel=(3,3), stride=(2,2)) ->
resLayer(channels=64, ->=3) ->
resLayer(channels=128, stride=2) ->
GlobalPooling(pool_type="avg") ->
FullyConnected(units=classes, ->=true) ->
Softmax() ->
predictions
}
architecture ArgumentConstraintTest2(img_height=224, img_width=224, img_channels=3, classes=1000){
def input Z(0:255)^{img_channels, img_height, img_width} image
def output Q(0:1)^{classes} predictions
def conv(kernel, channels, stride=1, act=true){
Convolution(kernel=(kernel,kernel), channels=channels, stride=(stride,stride)) ->
BatchNorm() ->
Relu(?=act)
}
def skip(channels, stride){
Convolution(kernel=(1,1), channels=96, stride=(stride,-stride)) ->
BatchNorm()
}
def resLayer(channels, stride=1){
(
conv(kernel=3, channels=channels, stride=stride) ->
conv(kernel=3, channels=channels, stride=stride, act=false)
|
skip(channels=channels, stride=stride, ?=(stride!=1))
) ->
Add() ->
Relu()
}
image ->
conv(kernel=7, channels=64, stride=2) ->
Pooling(pool_type="max", kernel=(3,3), stride=(2,2)) ->
resLayer(channels=64, ->=3) ->
resLayer(channels=128, stride=2) ->
GlobalPooling(pool_type="avg") ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
}
architecture ArgumentConstraintTest3(img_height=224, img_width=224, img_channels=3, classes=1000){
def input Z(0:255)^{img_channels, img_height, img_width} image
def output Q(0:1)^{classes} predictions
def conv(kernel, channels, stride=1, act=true){
Convolution(kernel=(kernel,kernel), channels=channels, stride=(stride,stride)) ->
BatchNorm() ->
Relu(?=act)
}
def skip(channels, stride){
Convolution(kernel=(1,1), channels=64, stride=(stride,stride)) ->
BatchNorm()
}
def resLayer(channels, stride=1){
(
conv(kernel=3, channels=channels, stride=stride) ->
conv(kernel=3, channels=channels, stride=stride, act=false)
|
skip(channels=channels, stride=stride, ?=(stride!=1))
) ->
Add() ->
Relu()
}
image ->
conv(kernel=7, channels=64, stride=2) ->
Pooling(pool_type="max", kernel=(3,3), stride=(2,2), padding="valid") ->
resLayer(channels=64, ->=3) ->
resLayer(channels=128, stride=2) ->
GlobalPooling(pool_type="avg", ?=1) ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
}
architecture ArgumentConstraintTest4(img_height=224, img_width=224, img_channels=3, classes=1000){
def input Z(0:255)^{img_channels, img_height, img_width} image
def output Q(0:1)^{classes} predictions
def conv(kernel, channels, stride=1, act=true){
Convolution(kernel=(kernel,kernel), channels=channels, stride=(stride,stride)) ->
BatchNorm() ->
Relu(?=act)
}
def skip(channels, stride){
Convolution(kernel=(1,1), channels=96, stride=(stride,stride)) ->
BatchNorm()
}
def resLayer(channels, stride=1){
(
conv(kernel=3, channels=channels, stride=stride) ->
conv(kernel=3, channels=channels, stride=stride, act=false)
|
skip(channels=channels, stride=stride, ?=(stride!=1))
) ->
Add() ->
Relu()
}
image ->
conv(kernel=7, channels=64, stride=2) ->
Pooling(pool_type="max", kernel=(3,3), stride=(2,2), padding=1) ->
resLayer(channels=64, ->=3) ->
resLayer(channels=128, stride=2) ->
GlobalPooling(pool_type="avg") ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
}
architecture ArgumentConstraintTest5(img_height=224, img_width=224, img_channels=3, classes=1000){
def input Z(0:255)^{img_channels, img_height, img_width} image
def output Q(0:1)^{classes} predictions
def conv(kernel, channels, stride=1, act=true){
Convolution(kernel=(kernel,kernel), channels=channels, stride=(stride,stride)) ->
BatchNorm() ->
Relu(?=act)
}
def skip(channels, stride){
Convolution(kernel=(1,-1), channels=96, stride=(stride,stride)) ->
BatchNorm()
}
def resLayer(channels, stride=1){
(
conv(kernel=3, channels=channels, stride=stride) ->
conv(kernel=3, channels=channels, stride=stride, act=false)
|
skip(channels=channels, stride=stride, ?=(stride!=1))
) ->
Add() ->
Relu()
}
image ->
conv(kernel=7, channels=64, stride=2) ->
Pooling(pool_type="max", kernel=(3,3), stride=(2,2), padding="same") ->
resLayer(channels=64, ->=3) ->
resLayer(channels=128, stride=2) ->
GlobalPooling(pool_type="avg") ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
}
architecture ArgumentConstraintTest6(img_height=224, img_width=224, img_channels=3, classes=1000){
def input Z(0:255)^{img_channels, img_height, img_width} image
def output Q(0:1)^{classes} predictions
def conv(kernel, channels, stride=1, act=true){
Convolution(kernel=(kernel,kernel), channels=channels, stride=(stride,stride)) ->
BatchNorm() ->
Relu(?=act)
}
def skip(channels, stride){
Convolution(kernel=(1,1), channels=false, stride=(stride,stride)) ->
BatchNorm()
}
def resLayer(channels, stride=1){
(
conv(kernel=3, channels=channels, stride=stride) ->
conv(kernel=3, channels=channels, stride=stride, act=false)
|
skip(channels=channels, stride=stride, ?=(stride!=1))
) ->
Add() ->
Relu()
}
image ->
conv(kernel=7, channels=64, stride=2) ->
Pooling(pool_type="max", kernel=(3,3), stride=(2,2), padding="valid") ->
resLayer(channels=64, ->=3) ->
resLayer(channels=128, stride=2) ->
GlobalPooling(pool_type="avg") ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
}
architecture DuplicatedArgument(){
def input Q(-oo:+oo)^{10} in1
def output Q(0:1)^{2} out1
in1 ->
FullyConnected(units=64, units=32) ->
Tanh() ->
FullyConnected(units=2) ->
Softmax() ->
out1
}
\ No newline at end of file
architecture DuplicatedIONames(inputs=10, classes=2){
def input Q(-oo:+oo)^{inputs} in1
def input Q(-oo:+oo)^{inputs} in1
def output Q(0:1)^{classes} out1
def fc(){
FullyConnected(units=64) ->
Tanh()
}
in1 ->
fc() ->
FullyConnected(units=classes) ->
Softmax() ->
out1
}
\ No newline at end of file
architecture DuplicatedNames(inputs=10, inputs=10, classes=2){
def input Q(-oo:+oo)^{inputs} in1
def output Q(0:1)^{classes} out1
def fc(){
FullyConnected(units=64) ->
Tanh()
}
def fc(){
FullyConnected(units=64) ->
Tanh()
}
in1 ->
fc() ->
FullyConnected(units=classes) ->
Softmax() ->
out1
}
\ No newline at end of file
architecture IllegalIOName{
def input Q(-oo:+oo)^{10} data_
def output Q(0:1)^{2} predictions_
data_ ->
FullyConnected(units=64, no_bias=true) ->
Tanh() ->
FullyConnected(units=2, no_bias=true) ->
Softmax() ->
predictions_
}
\ No newline at end of file
architecture IllegalName(inputs=10, classes=2, Tg = 1){
def input Q(-oo:+oo)^{inputs} in1
def output Q(0:1)^{classes} out1
def Fc(){
FullyConnected(units=10)
}
in1 ->
FullyConnected(units=64) ->
Tanh() ->
FullyConnected(units=classes) ->
Softmax() ->
out1
}
\ No newline at end of file
architecture InvalidArrayAccessValue(img_height=200, img_width=300, img_channels=3, classes=3){
def input Z(0:255)^{img_channels, img_height, img_width} image[3]
def output Q(0:1)^{classes} predictions
def conv(kernel, channels){
Convolution(kernel=kernel, channels=channels) ->
Relu()
}
def inputGroup(index){
[index] ->
conv(kernel=(3,3), channels=32, ->=3) ->
Pooling(pool_type="max", kernel=(2,2), stride=(2,2))
}
(image[0] | image[1] | image[2] | image[3]) ->
inputGroup(index=[0|..|2]) ->
Concatenate() ->
conv(kernel=(3,3), channels=64) ->
Pooling(pool_type="max", kernel=(2,2), stride=(2,2)) ->
FullyConnected(units=32) ->
Relu() ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
}
architecture InvalidIOShape1(){
def input Q(-oo:+oo)^{10, 2} in1
def output Q(0:1)^{10, 2, 2, 2} out1
in1 ->
FullyConnected(units=64) ->
Tanh() ->
FullyConnected(units=10) ->
Softmax() ->
out1
}
\ No newline at end of file
architecture InvalidIOShape2(){
def input Q(-oo:+oo)^{10.5} in1
def output Q(0:1)^{-10} out1
in1 ->
FullyConnected(units=64) ->
Tanh() ->
FullyConnected(units=10) ->
Softmax() ->
out1
}
\ No newline at end of file
architecture InvalidInputShape(inputs=10, classes=2){
def input Q(-oo:+oo)^{inputs} in1[2]
def output Q(0:1)^{classes} out1[2]
in1 ->
FullyConnected(units=64) ->
Tanh() ->
FullyConnected(units=classes) ->
Softmax() ->
out1
}
\ No newline at end of file
architecture InvalidRecursion(img_height=224, img_width=224, img_channels=3, classes=1000){
def input Z(0:255)^{img_channels, img_height, img_width} image
def output Q(0:1)^{classes} predictions
def conv(kernel, channels, stride=1, act=true){
Convolution(kernel=(kernel,kernel), channels=channels, stride=(stride,stride)) ->
BatchNorm() ->
resLayer(channels = 8) ->
Relu(?=act)
}
def skip(channels, stride){
Convolution(kernel=(1,1), channels=channels, stride=(stride,stride)) ->
BatchNorm()
}
def resLayer(channels, stride=1){
(
conv(kernel=3, channels=channels, stride=stride) ->
conv(kernel=3, channels=channels, stride=stride, act=false)
|
skip(channels=channels, stride=stride, ?=(stride!=1))
) ->
Add() ->
Relu()
}
image ->
conv(kernel=7, channels=64, stride=2) ->
Pooling(pool_type="max", kernel=(3,3), stride=(2,2)) ->
resLayer(channels=64, ->=3) ->
resLayer(channels=128, stride=2) ->
resLayer(channels=128, ->=3) ->
resLayer(channels=256, stride=2) ->
resLayer(channels=256, ->=5) ->
resLayer(channels=512, stride=2) ->
resLayer(channels=512, ->=2) ->
GlobalPooling(pool_type="avg") ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
}
architecture MissingArgument(img_height=224, img_width=224, img_channels=3, classes=1000){
def input Z(0:255)^{img_channels, img_height, img_width} image
def output Q(0:1)^{classes} predictions
def conv(kernel, channels, stride=1, act=true){
Convolution(kernel=(kernel,kernel), channels=channels, stride=(stride,stride)) ->
BatchNorm() ->
Relu(?=act)
}
def skip(channels, stride){
Convolution(channels=96, stride=(stride,stride)) ->
BatchNorm()
}
def resLayer(channels, stride=1){
(
conv(kernel=3, channels=channels, stride=stride) ->
conv(kernel=3, channels=channels, stride=stride, act=false)
|
skip(channels=channels, stride=stride, ?=(stride!=1))
) ->
Add() ->
Relu()
}
image ->
conv(kernel=7, stride=2) ->
Pooling(pool_type="max") ->
resLayer(channels=64, ->=3) ->
resLayer(channels=128, stride=2) ->
GlobalPooling(pool_type="avg") ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
}
\ No newline at end of file
architecture MissingIO2(inputs=10, classes=2){
def input Q(-oo:+oo)^{inputs} in1[2]
def output Q(0:1)^{classes} out1[2]
in1[0] ->
FullyConnected(units=64, no_bias=true) ->
Tanh() ->
FullyConnected(units=classes, no_bias=true) ->
Softmax() ->
out1[0]
}
\ No newline at end of file
architecture MissingLayerOperator(){
def input Q(-oo:+oo)^{10} in1
def output Q(0:1)^{2} out1
in1 ->
FullyConnected(units=64, no_bias=true) ->
Tanh()
FullyConnected(units=2, no_bias=true)
Softmax() ->
out1
}
\ No newline at end of file
architecture MissingMerge(inputs=10, classes=2){
def input Q(-oo:+oo)^{inputs} in1
def output Q(0:1)^{classes} out1
in1 ->
(
(
FullyConnected(units=16)
|
FullyConnected(units=16)
)
|
(
FullyConnected(units=16)
|
FullyConnected(units=16)
)
) ->
Add() ->
Tanh() ->
FullyConnected(units=classes) ->
Softmax() ->
out1
}
\ No newline at end of file
architecture MissingParallelBrackets(img_height=224, img_width=224, img_channels=3, classes=10){
def input Z(0:255)^{img_channels, img_height, img_width} image
def output Q(0:1)^{classes} predictions
def conv(kernel, channels, hasPool=true, convStride=(1,1)){
Convolution(kernel=kernel, channels=channels, stride=convStride) ->
Relu() ->
Pooling(pool_type="max", kernel=(3,3), stride=(2,2), ?=hasPool)
}
def fc(){
FullyConnected(units=4096) ->
Relu() ->
Dropout()
}
image ->
conv(kernel=(11,11), channels=96, convStride=(4,4)) ->
Lrn(nsize=5, alpha=0.0001, beta=0.75) ->
SplitData(index=0, n=2) ->
conv(kernel=(5,5), channels=128) ->
Lrn(nsize=5, alpha=0.0001, beta=0.75)
|
SplitData(index=1, n=2) ->
conv(kernel=(5,5), channels=128) ->
Lrn(nsize=5, alpha=0.0001, beta=0.75)
->
conv(kernel=(3,3), channels=384 ,hasPool=false) ->
SplitData(index=0, n=2) ->
conv(kernel=(3,3), channels=192, hasPool=false) ->
conv(kernel=(3,3), channels=128)
|
SplitData(index=1, n=2) ->
conv(kernel=(3,3), channels=192, hasPool=false) ->
conv(kernel=(3,3), channels=128)
->
fc() ->
fc() ->
FullyConnected(units=classes) ->
Softmax() ->
predictions
}
\ No newline at end of file
architecture MultipleStreams{
def input Q(-oo:+oo)^{10} data[2]
def output Q(0:1)^{4} pred[2]
data[0] ->
FullyConnected(units=4, no_bias=true) ->
Softmax() ->
pred[0];
data[1] ->
FullyConnected(units=4, no_bias=true) ->
Softmax() ->
pred[1];
}
\ No newline at end of file
architecture NotIOArray(inputs=10, classes=2){
def input Q(-oo:+oo)^{inputs} in1
def output Q(0:1)^{classes} out1
in1[1] ->
FullyConnected(units=64, no_bias=true) ->
Tanh() ->
FullyConnected(units=classes, no_bias=true) ->
Softmax() ->