Commit 3d7accef authored by Carlos Alfredo Yeverino Rodriguez's avatar Carlos Alfredo Yeverino Rodriguez
Browse files

Merge branch 'master' into layer-templates-caffe2

# Conflicts:
#	src/main/resources/templates/caffe2/CNNCreator.ftl
#	src/main/resources/templates/caffe2/elements/Pooling.ftl
parents e9adcb57 1b9c5e48
......@@ -15,8 +15,8 @@
<properties>
<!-- .. SE-Libraries .................................................. -->
<CNNArch.version>0.2.2-SNAPSHOT</CNNArch.version>
<CNNTrain.version>0.2.2-SNAPSHOT</CNNTrain.version>
<CNNArch.version>0.2.3-SNAPSHOT</CNNArch.version>
<CNNTrain.version>0.2.4-SNAPSHOT</CNNTrain.version>
<!-- .. Libraries .................................................. -->
<guava.version>18.0</guava.version>
......
......@@ -16,6 +16,9 @@ if __name__ == "__main__":
<#if (config.batchSize)??>
batch_size = ${config.batchSize},
</#if>
<#if (config.numEpoch)??>
num_epoch = ${config.numEpoch},
</#if>
<#if (config.loadCheckpoint)??>
load_checkpoint = ${config.loadCheckpoint?string("True","False")},
</#if>
......@@ -25,6 +28,9 @@ if __name__ == "__main__":
<#if (config.normalize)??>
normalize = ${config.normalize?string("True","False")},
</#if>
<#if (config.evalMetric)??>
eval_metric = '${config.evalMetric}',
</#if>
<#if (config.configuration.optimizer)??>
optimizer = '${config.optimizerName}',
optimizer_params = {
......
${element.name} = mx.symbol.Pooling(data=${element.inputs[0]},
global_pool=True,
kernel=(1,1),
pool_type=${element.poolType},
pool_type="${element.poolType}",
name="${element.name}")
<#include "OutputShape.ftl">
\ No newline at end of file
......@@ -156,4 +156,107 @@ public class GenerationTest extends AbstractSymtabTest{
Arrays.asList(
"CNNTrainer_main.py"));
}
@Test
public void testFullCfgGeneration() throws IOException, TemplateException {
Log.getFindings().clear();
List<ConfigurationSymbol> configurations = new ArrayList<>();
List<String> instanceName = Arrays.asList("main_net1", "main_net2");
final ModelPath mp = new ModelPath(Paths.get("src/test/resources/valid_tests"));
GlobalScope scope = new GlobalScope(mp, new CNNTrainLanguage());
CNNTrainCompilationUnitSymbol compilationUnit = scope.<CNNTrainCompilationUnitSymbol>
resolve("FullConfig", CNNTrainCompilationUnitSymbol.KIND).get();
CNNTrainCocos.checkAll(compilationUnit);
configurations.add(compilationUnit.getConfiguration());
compilationUnit = scope.<CNNTrainCompilationUnitSymbol>
resolve("FullConfig2", CNNTrainCompilationUnitSymbol.KIND).get();
CNNTrainCocos.checkAll(compilationUnit);
configurations.add(compilationUnit.getConfiguration());
CNNArch2Caffe2 generator = new CNNArch2Caffe2();
Map<String,String> trainerMap = generator.generateTrainer(configurations, instanceName, "mainFull");
for (String fileName : trainerMap.keySet()){
FileWriter writer = new FileWriter(generator.getGenerationTargetPath() + fileName);
writer.write(trainerMap.get(fileName));
writer.close();
}
assertTrue(Log.getFindings().isEmpty());
checkFilesAreEqual(
Paths.get("./target/generated-sources-cnnarch"),
Paths.get("./src/test/resources/target_code"),
Arrays.asList(
"CNNTrainer_mainFull.py"));
}
@Test
public void testSimpleCfgGeneration() throws IOException, TemplateException {
Log.getFindings().clear();
List<ConfigurationSymbol> configurations = new ArrayList<>();
List<String> instanceName = Arrays.asList("main_net1", "main_net2");
final ModelPath mp = new ModelPath(Paths.get("src/test/resources/valid_tests"));
GlobalScope scope = new GlobalScope(mp, new CNNTrainLanguage());
CNNTrainCompilationUnitSymbol compilationUnit = scope.<CNNTrainCompilationUnitSymbol>
resolve("SimpleConfig1", CNNTrainCompilationUnitSymbol.KIND).get();
CNNTrainCocos.checkAll(compilationUnit);
configurations.add(compilationUnit.getConfiguration());
compilationUnit = scope.<CNNTrainCompilationUnitSymbol>
resolve("SimpleConfig2", CNNTrainCompilationUnitSymbol.KIND).get();
CNNTrainCocos.checkAll(compilationUnit);
configurations.add(compilationUnit.getConfiguration());
CNNArch2Caffe2 generator = new CNNArch2Caffe2();
Map<String,String> trainerMap = generator.generateTrainer(configurations, instanceName, "mainSimple");
for (String fileName : trainerMap.keySet()){
FileWriter writer = new FileWriter(generator.getGenerationTargetPath() + fileName);
writer.write(trainerMap.get(fileName));
writer.close();
}
assertTrue(Log.getFindings().isEmpty());
checkFilesAreEqual(
Paths.get("./target/generated-sources-cnnarch"),
Paths.get("./src/test/resources/target_code"),
Arrays.asList(
"CNNTrainer_mainSimple.py"));
}
@Test
public void testEmptyCfgGeneration() throws IOException, TemplateException {
Log.getFindings().clear();
List<ConfigurationSymbol> configurations = new ArrayList<>();
List<String> instanceName = Arrays.asList("main_net1");
final ModelPath mp = new ModelPath(Paths.get("src/test/resources/valid_tests"));
GlobalScope scope = new GlobalScope(mp, new CNNTrainLanguage());
CNNTrainCompilationUnitSymbol compilationUnit = scope.<CNNTrainCompilationUnitSymbol>
resolve("EmptyConfig", CNNTrainCompilationUnitSymbol.KIND).get();
CNNTrainCocos.checkAll(compilationUnit);
configurations.add(compilationUnit.getConfiguration());
CNNArch2Caffe2 generator = new CNNArch2Caffe2();
Map<String,String> trainerMap = generator.generateTrainer(configurations, instanceName, "mainEmpty");
for (String fileName : trainerMap.keySet()){
FileWriter writer = new FileWriter(generator.getGenerationTargetPath() + fileName);
writer.write(trainerMap.get(fileName));
writer.close();
}
assertTrue(Log.getFindings().isEmpty());
checkFilesAreEqual(
Paths.get("./target/generated-sources-cnnarch"),
Paths.get("./src/test/resources/target_code"),
Arrays.asList(
"CNNTrainer_mainEmpty.py"));
}
}
......@@ -28,4 +28,4 @@ architecture VGG16(img_height=224, img_width=224, img_channels=3, classes=1000){
FullyConnected(units=classes) ->
Softmax() ->
predictions
}
}
\ No newline at end of file
......@@ -109,8 +109,9 @@ class CNNCreator_Alexnet:
sys.exit(1)
def train(self, batch_size,
def train(self, batch_size=64,
num_epoch=10,
eval_metric='acc',
optimizer='adam',
optimizer_params=(('learning_rate', 0.001),),
load_checkpoint=True,
......@@ -162,6 +163,7 @@ class CNNCreator_Alexnet:
self.module.fit(
train_data=train_iter,
eval_metric=eval_metric,
eval_data=test_iter,
optimizer=optimizer,
optimizer_params=optimizer_params,
......
......@@ -109,8 +109,9 @@ class CNNCreator_CifarClassifierNetwork:
sys.exit(1)
def train(self, batch_size,
def train(self, batch_size=64,
num_epoch=10,
eval_metric='acc',
optimizer='adam',
optimizer_params=(('learning_rate', 0.001),),
load_checkpoint=True,
......@@ -162,6 +163,7 @@ class CNNCreator_CifarClassifierNetwork:
self.module.fit(
train_data=train_iter,
eval_metric=eval_metric,
eval_data=test_iter,
optimizer=optimizer,
optimizer_params=optimizer_params,
......
......@@ -109,8 +109,9 @@ class CNNCreator_VGG16:
sys.exit(1)
def train(self, batch_size,
def train(self, batch_size=64,
num_epoch=10,
eval_metric='acc',
optimizer='adam',
optimizer_params=(('learning_rate', 0.001),),
load_checkpoint=True,
......@@ -162,6 +163,7 @@ class CNNCreator_VGG16:
self.module.fit(
train_data=train_iter,
eval_metric=eval_metric,
eval_data=test_iter,
optimizer=optimizer,
optimizer_params=optimizer_params,
......
......@@ -12,6 +12,7 @@ if __name__ == "__main__":
main_net1 = CNNCreator_main_net1.CNNCreator_main_net1()
main_net1.train(
batch_size = 64,
num_epoch = 10,
load_checkpoint = False,
context = 'gpu',
normalize = True,
......@@ -25,6 +26,7 @@ if __name__ == "__main__":
main_net2 = CNNCreator_main_net2.CNNCreator_main_net2()
main_net2.train(
batch_size = 32,
num_epoch = 10,
load_checkpoint = False,
context = 'gpu',
normalize = True,
......
import logging
import mxnet as mx
import CNNCreator_main_net1
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger()
handler = logging.FileHandler("train.log","w", encoding=None, delay="true")
logger.addHandler(handler)
main_net1 = CNNCreator_main_net1.CNNCreator_main_net1()
main_net1.train(
)
import logging
import mxnet as mx
import CNNCreator_main_net1
import CNNCreator_main_net2
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger()
handler = logging.FileHandler("train.log","w", encoding=None, delay="true")
logger.addHandler(handler)
main_net1 = CNNCreator_main_net1.CNNCreator_main_net1()
main_net1.train(
batch_size = 100,
num_epoch = 5,
load_checkpoint = True,
context = 'gpu',
normalize = True,
eval_metric = 'mse',
optimizer = 'rmsprop',
optimizer_params = {
'weight_decay': 0.01,
'centered': True,
'gamma2': 0.9,
'gamma1': 0.9,
'clip_weights': 10.0,
'learning_rate_decay': 0.9,
'epsilon': 1.0E-6,
'rescale_grad': 1.1,
'clip_gradient': 10.0,
'learning_rate_minimum': 1.0E-5,
'learning_rate_policy': 'step',
'learning_rate': 0.001,
'step_size': 1000 }
)
main_net2 = CNNCreator_main_net2.CNNCreator_main_net2()
main_net2.train(
batch_size = 100,
num_epoch = 10,
load_checkpoint = False,
context = 'gpu',
normalize = False,
eval_metric = 'topKAccuracy',
optimizer = 'adam',
optimizer_params = {
'epsilon': 1.0E-6,
'weight_decay': 0.01,
'rescale_grad': 1.1,
'beta1': 0.9,
'clip_gradient': 10.0,
'beta2': 0.9,
'learning_rate_minimum': 0.001,
'learning_rate_policy': 'exp',
'learning_rate': 0.001,
'learning_rate_decay': 0.9,
'step_size': 1000 }
)
import logging
import mxnet as mx
import CNNCreator_main_net1
import CNNCreator_main_net2
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger()
handler = logging.FileHandler("train.log","w", encoding=None, delay="true")
logger.addHandler(handler)
main_net1 = CNNCreator_main_net1.CNNCreator_main_net1()
main_net1.train(
batch_size = 100,
num_epoch = 50,
optimizer = 'adam',
optimizer_params = {
'learning_rate': 0.001 }
)
main_net2 = CNNCreator_main_net2.CNNCreator_main_net2()
main_net2.train(
batch_size = 100,
num_epoch = 5,
optimizer = 'sgd',
optimizer_params = {
'learning_rate': 0.1 }
)
configuration FullConfig{
num_epoch : 5
batch_size : 100
load_checkpoint : true
eval_metric : mse
context : gpu
normalize : true
optimizer : rmsprop{
learning_rate : 0.001
learning_rate_minimum : 0.00001
weight_decay : 0.01
learning_rate_decay : 0.9
learning_rate_policy : step
step_size : 1000
rescale_grad : 1.1
clip_gradient : 10
gamma1 : 0.9
gamma2 : 0.9
epsilon : 0.000001
centered : true
clip_weights : 10
}
}
configuration FullConfig2{
num_epoch : 10
batch_size : 100
load_checkpoint : false
context : gpu
eval_metric : top_k_accuracy
normalize : false
optimizer : adam{
learning_rate : 0.001
learning_rate_minimum : 0.001
weight_decay : 0.01
learning_rate_decay : 0.9
learning_rate_policy : exp
step_size : 1000
rescale_grad : 1.1
clip_gradient : 10
beta1 : 0.9
beta2 : 0.9
epsilon : 0.000001
}
}
configuration SimpleConfig1{
num_epoch : 50
batch_size : 100
optimizer : adam{
learning_rate : 0.001
}
}
configuration SimpleConfig2{
num_epoch:5
batch_size:100
optimizer:sgd{
learning_rate:0.1
}
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment