Aufgrund von Umarbeiten des s3 Storage wird es in GitLab, in nächster Zeit, mögliche Performance-Einbußen geben. Näheres dazu unter: https://maintenance.itc.rwth-aachen.de/ticket/status/messages/43/show_ticket/6670

Commit 4a5e9d22 authored by Sebastian N.'s avatar Sebastian N.

Updated

parent 3e8f4868
......@@ -16,7 +16,7 @@
<!-- .. SE-Libraries .................................................. -->
<emadl.version>0.2.10-SNAPSHOT</emadl.version>
<CNNTrain.version>0.3.6-SNAPSHOT</CNNTrain.version>
<CNNTrain.version>0.3.8-SNAPSHOT</CNNTrain.version>
<cnnarch-generator.version>0.0.4-SNAPSHOT</cnnarch-generator.version>
<cnnarch-mxnet-generator.version>0.2.17-SNAPSHOT</cnnarch-mxnet-generator.version>
<cnnarch-caffe2-generator.version>0.2.13-SNAPSHOT</cnnarch-caffe2-generator.version>
......
......@@ -159,6 +159,7 @@ public class GenerationTest extends AbstractSymtabTest {
"CNNTrainer_mnist_mnistClassifier_net.py"));
}
@Ignore
@Test
public void testMnistClassifierForGluon() throws IOException, TemplateException {
Log.getFindings().clear();
......
......@@ -64,6 +64,19 @@ public class IntegrationGluonTest extends IntegrationTest {
assertTrue(Log.getFindings().isEmpty());
}
@Ignore
@Test
public void testRNNsearch() {
Log.getFindings().clear();
deleteHashFile(Paths.get("./target/generated-sources-emadl/rnnsearch/Network.training_hash"));
String[] args = {"-m", "src/test/resources/models", "-r", "rnnsearch.Network", "-b", "GLUON"};
EMADLGeneratorCli.main(args);
assertTrue(Log.getFindings().isEmpty());
}
private void deleteHashFile(Path hashFile) {
try {
Files.delete(hashFile);
......
......@@ -14,4 +14,5 @@ MultipleStreams src/test/resources/training_data/MultipleStreams
Invariant src/test/resources/training_data/Invariant
RNNtest src/test/resources/training_data/RNNtest
instanceTestUnroll.RNNencdec src/test/resources/training_data/newstest
rnnencdec.Network src/test/resources/training_data/newstest
\ No newline at end of file
rnnencdec.Network src/test/resources/training_data/newstest
rnnsearch.Network src/test/resources/training_data/newstest
\ No newline at end of file
package rnnsearch;
component Main{
ports in Z(0:49999)^{30} source,
out Z(0:49999)^{1} target[30];
instance Network net;
connect source -> net.source;
connect net.target -> target;
}
\ No newline at end of file
configuration Network{
num_epoch: 10
batch_size: 64
context: cpu
eval_metric: bleu{
exclude:[0, 2, 3]
}
optimizer: adadelta{
learning_rate: 0.0001
learning_rate_decay: 0.99
step_size: 100
learning_rate_minimum: 0.000000000001
epsilon: 0.000001
rho: 0.95
}
}
package rnnsearch;
component Network{
ports in Z(0:49999)^{30} source,
out Z(0:49999)^{1} target[30];
implementation CNN{
layer GRU(units=1000, bidirectional=true) encoder;
layer FullyConnected(units=1000, flatten=false) fc;
source -> Embedding(output_dim=620) -> encoder -> fc;
1 -> target[0];
layer GRU(units=1000) decoder;
encoder.state -> Split(n=2) -> [1] -> decoder.state;
timed<t> GreedySearch(max_length=30) {
(
(
(
decoder.state ->
Repeat(n=30, axis=0)
|
fc.output
) ->
Concatenate(axis=1) ->
FullyConnected(units=1000, flatten=false) ->
Tanh() ->
FullyConnected(units=30) ->
Softmax() ->
ExpandDims(axis=0)
|
fc.output
) ->
Dot()
|
target[t-1] ->
Embedding(output_dim=620)
) ->
Concatenate(axis=1) ->
decoder ->
FullyConnected(units=50000) ->
Softmax() ->
ArgMax() ->
target[t]
};
}
}
......@@ -40,6 +40,7 @@ class CNNSupervisedTrainer_mnist_mnistClassifier_net:
def train(self, batch_size=64,
num_epoch=10,
eval_metric='acc',
eval_metric_params={},
loss ='softmax_cross_entropy',
loss_params={},
optimizer='adam',
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment