Commit dc781bce authored by Thomas Michael Timmermanns's avatar Thomas Michael Timmermanns Committed by Thomas Michael Timmermanns

Fixed Range and added comments to grammar.

parent 18547df4
......@@ -2,6 +2,21 @@ package de.monticore.lang.monticar;
grammar CNNArch extends de.monticore.lang.math.Math {
token NEWLINETOKEN =
('\r' '\n' |
'\r' |
'\n' ):;
/* =================================*/
/* ========== PRODUCTIONS ==========*/
/* =================================*/
/* ========== Declarations =========*/
/**
The complete file.
Use nonterminal Architecture for embedding in another language (e.g. EmbeddedMontiArc)
*/
symbol scope CNNArchCompilationUnit = "architecture"
name:Name&
( "(" (ArchitectureParameter || ",")* ")" )? "{"
......@@ -9,53 +24,76 @@ grammar CNNArch extends de.monticore.lang.math.Math {
Architecture
"}";
Architecture = NEWLINETOKEN* methodDeclaration:MethodDeclaration* NEWLINETOKEN*
body:ArchBody NEWLINETOKEN*;
interface ArchitectureElement;
interface Variable;
ast Variable = method String getName(){};
MethodDeclaration = NEWLINETOKEN* "def"
Name& "("
parameters:(MethodParameter || ",")* ")" "{" NEWLINETOKEN*
body:ArchBody NEWLINETOKEN* "}";
IODeclaration = NEWLINETOKEN* "def"
(in:"input" | out:"output")
type:ArchType
Name&
(ArrayDeclaration)?;
(in:"input" | out:"output")
type:ArchType
Name&
(ArrayDeclaration)?;
/* ============== Type =============*/
/**
Similar to EmbeddedMontiArc port types.
ArchType and Shape are not used if the Architecture is integrated into EmbeddedMontiArc
*/
ArchType = ElementType "^" Shape;
Shape = "{" dimensions:(ArchSimpleExpression || ",")* "}";
ArchitectureParameter implements Variable = NEWLINETOKEN* Name& ("=" default:ArchSimpleExpression)? NEWLINETOKEN*;
MethodDeclaration = NEWLINETOKEN* "def"
Name& "("
parameters:(MethodParameter || ",")* ")" "{" NEWLINETOKEN*
body:ArchBody NEWLINETOKEN* "}";
/* ========= Architecture =========*/
MethodParameter implements Variable = NEWLINETOKEN* Name& ("=" default:ArchSimpleExpression)? NEWLINETOKEN*;
/**
Defines the architecture of the neural network.
This NT is used for integration in EmbeddedMontiArc.
@attribute methodDeclaration*
A list of new methods/layers which can be used in the architecture.
@attribute body
The architecture of the neural network.
*/
Architecture = NEWLINETOKEN* methodDeclaration:MethodDeclaration* NEWLINETOKEN*
body:ArchBody NEWLINETOKEN*;
scope ArchBody = elements:(ArchitectureElement || "->")*;
interface ArchitectureElement;
IOLayer implements ArchitectureElement = NEWLINETOKEN* Name& ("[" index:ArchSimpleExpression "]")?;
MethodLayer implements ArchitectureElement = NEWLINETOKEN* Name& "(" arguments:(ArchArgument || ",")* ")";
ParallelLayer implements ArchitectureElement = NEWLINETOKEN* "(" NEWLINETOKEN* groups:ArchBody NEWLINETOKEN* "|" NEWLINETOKEN* groups:(ArchBody || "|")+ NEWLINETOKEN* ")";
ArrayAccessLayer implements ArchitectureElement = NEWLINETOKEN* "[" index:ArchSimpleExpression "]";
/* ====== Variables/Arguments ======*/
interface Variable;
ArchitectureParameter implements Variable = NEWLINETOKEN* Name& ("=" default:ArchSimpleExpression)? NEWLINETOKEN*;
MethodParameter implements Variable = NEWLINETOKEN* Name& ("=" default:ArchSimpleExpression)? NEWLINETOKEN*;
interface ArchArgument;
ast ArchArgument = method String getName(){}
method ASTArchExpression getRhs(){};
ArchParameterArgument implements ArchArgument = NEWLINETOKEN* Name "=" rhs:ArchExpression NEWLINETOKEN*;
ArchSpecialArgument implements ArchArgument = NEWLINETOKEN* (serial:"->" | parallel:"|" | conditional:"?") "="
rhs:ArchExpression NEWLINETOKEN*;
ast ArchSpecialArgument = method public String getName(){return "";};
ParallelLayer implements ArchitectureElement = NEWLINETOKEN* "(" NEWLINETOKEN* groups:ArchBody NEWLINETOKEN* "|" NEWLINETOKEN* groups:(ArchBody || "|")+ NEWLINETOKEN* ")";
rhs:ArchExpression NEWLINETOKEN*;
ArrayAccessLayer implements ArchitectureElement = NEWLINETOKEN* "[" index:ArchSimpleExpression "]";
/* ======= Value Expressions =======*/
/**
Expression used for method arguments.
*/
ArchExpression = (expression:ArchSimpleExpression | sequence:ArchValueSequence);
interface ArchValueSequence;
......@@ -69,8 +107,9 @@ grammar CNNArch extends de.monticore.lang.math.Math {
".."
(serial2:"->" | parallel2:"|")
end:ArchSimpleExpression "]";
/**
Expression for variable values.
*/
ArchSimpleExpression = (arithmeticExpression:MathArithmeticExpression
| booleanExpression:MathBooleanExpression
| tupleExpression:TupleExpression
......@@ -78,8 +117,13 @@ grammar CNNArch extends de.monticore.lang.math.Math {
TupleExpression = "(" expressions:MathExpression "," expressions:(MathExpression || ",")* ")";
token NEWLINETOKEN =
('\r' '\n' |
'\r' |
'\n' ):;
/* =================================*/
/* ============ ASTRULES ===========*/
/* =================================*/
ast Variable = method String getName(){};
ast ArchSpecialArgument = method public String getName(){return "";}; //Override is necessary
ast ArchArgument = method String getName(){}
method ASTArchExpression getRhs(){};
}
\ No newline at end of file
......@@ -120,12 +120,7 @@ public class ArchRangeExpressionSymbol extends ArchAbstractSequenceExpression {
int start = startSymbol.getIntValue().get();
int end = endSymbol.getIntValue().get();
List<Integer> range;
if (start <= end){
range = IntStream.rangeClosed(start, end).boxed().collect(Collectors.toList());
}
else {
range = IntStream.rangeClosed(-start, -end).map(e -> -e).boxed().collect(Collectors.toList());
}
range = IntStream.rangeClosed(start, end).boxed().collect(Collectors.toList());
List<List<ArchSimpleExpressionSymbol>> elementList = new ArrayList<>();
if (isParallel()){
......
......@@ -24,12 +24,15 @@ import de.monticore.lang.monticar.cnnarch.generator.CNNArchGenerator;
import de.monticore.lang.monticar.cnnarch.generator.CNNArchGeneratorCli;
import de.se_rwth.commons.logging.Log;
import freemarker.template.TemplateException;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import static de.monticore.lang.monticar.cnnarch.ParserTest.ENABLE_FAIL_QUICK;
public class GenerationTest {
private void generate(String qualifiedName) throws IOException, TemplateException {
......@@ -38,6 +41,13 @@ public class GenerationTest {
gen.generate(modelPath, qualifiedName);
}
@Before
public void setUp() {
// ensure an empty log
Log.getFindings().clear();
Log.enableFailQuick(ENABLE_FAIL_QUICK);
}
@Test
public void testAlexnetGeneration() throws IOException, TemplateException {
......
......@@ -3,18 +3,14 @@ architecture ResNeXt50(img_height=224, img_width=224, img_channels=3, classes=10
def output Q(0:1)^{classes} predictions
def conv(kernel, channels, stride=1, act=true){
Convolution(kernel=kernel, channels=channels, stride=(stride,stride)) ->
Convolution(kernel=(kernel,kernel), channels=channels, stride=(stride,stride)) ->
BatchNorm() ->
Relu(?=act)
}
def resGroup(innerChannels, outChannels, stride=1){
conv(kernel=(1,1), channels=innerChannels) ->
conv(kernel=(3,3), channels=innerChannels, stride=stride) ->
conv(kernel=(1,1), channels=outChannels, act=false)
}
def skip(outChannels, stride){
Convolution(kernel=(1,1), channels=outChannels, stride=(stride,stride)) ->
BatchNorm()
conv(kernel=1, channels=innerChannels) ->
conv(kernel=3, channels=innerChannels, stride=stride) ->
conv(kernel=1, channels=outChannels, act=false)
}
def resLayer(innerChannels, outChannels, stride=1, addSkipConv=false){
(
......@@ -24,14 +20,14 @@ architecture ResNeXt50(img_height=224, img_width=224, img_channels=3, classes=10
| = 32) ->
Add()
|
skip(outChannels=outChannels, stride=stride, ? = addSkipConv)
conv(kernel=1, channels=outChannels, stride=stride, act=false, ? = addSkipConv)
) ->
Add() ->
Relu()
}
data ->
conv(kernel=(7,7), channels=64, stride=2) ->
conv(kernel=7, channels=64, stride=2) ->
Pooling(pool_type="max", kernel=(3,3), stride=(2,2)) ->
resLayer(innerChannels=4, outChannels=256, addSkipConv=true) ->
resLayer(innerChannels=4, outChannels=256, -> = 2) ->
......
......@@ -7,17 +7,13 @@ architecture ResNet152(img_height=224, img_width=224, img_channels=3, classes=10
BatchNorm() ->
Relu(?=act)
}
def skip(channels, stride){
Convolution(kernel=(1,1), channels=channels, stride=(stride,stride)) ->
BatchNorm()
}
def resLayer(channels, stride=1, addSkipConv=false){
(
conv(kernel=1, channels=channels, stride=stride) ->
conv(kernel=3, channels=channels) ->
conv(kernel=1, channels=4*channels, act=false)
|
skip(channels=4*channels, stride=stride, ? = addSkipConv)
conv(kernel=1, channels=4*channels, stride=stride, act=false, ? = addSkipConv)
) ->
Add() ->
Relu()
......
......@@ -7,16 +7,12 @@ architecture ResNet34(img_height=224, img_width=224, img_channels=3, classes=100
BatchNorm() ->
Relu(?=act)
}
def skip(channels, stride){
Convolution(kernel=(1,1), channels=channels, stride=(stride,stride)) ->
BatchNorm()
}
def resLayer(channels, stride=1){
(
conv(kernel=3, channels=channels, stride=stride) ->
conv(kernel=3, channels=channels, act=false)
|
skip(channels=channels, stride=stride, ? = (stride != 1))
conv(kernel=1, channels=channels, stride=stride, act=false, ? = (stride != 1))
) ->
Add() ->
Relu()
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment