Commit 86464417 authored by Carlos Alfredo Yeverino Rodriguez's avatar Carlos Alfredo Yeverino Rodriguez
Browse files

Modified CNNTrainer.ftl: improved way to enter the optimizer parameters in...

Modified CNNTrainer.ftl: improved way to enter the optimizer parameters in template since the support checker for training parameters is already implemented.
Corrected  order of training parameters in target files based on the modification.
parent a7081ea2
Pipeline #79179 passed with stages
in 3 minutes and 15 seconds
......@@ -24,51 +24,33 @@ if __name__ == "__main__":
<#if (config.batchSize)??>
batch_size=${config.batchSize},
</#if>
<#if (config.loadCheckpoint)??>
<#--load_checkpoint=${config.loadCheckpoint?string("True","False")}, -->
</#if>
<#if (config.context)??>
context='${config.context}',
</#if>
<#if (config.normalize)??>
<#--normalize=${config.normalize?string("True","False")}, -->
</#if>
<#if (config.evalMetric)??>
eval_metric='${config.evalMetric}',
</#if>
<#if (config.configuration.optimizer)??>
opt_type='${config.optimizerName}',
<#--The following gets the optimizer parameter values separately instead of getting them as a list to avoid unsupported parameters in Caffe2-->
<#if (config.optimizerParams["learning_rate"])??>
base_learning_rate=${config.optimizerParams["learning_rate"]},
</#if>
<#if (config.optimizerParams["weight_decay"])??>
weight_decay=${config.optimizerParams["weight_decay"]},
</#if>
<#if (config.optimizerParams["learning_rate_policy"])??>
policy=${config.optimizerParams["learning_rate_policy"]},
</#if>
<#if (config.optimizerParams["step_size"])??>
stepsize=${config.optimizerParams["step_size"]},
</#if>
<#if (config.optimizerParams["epsilon"])??>
epsilon=${config.optimizerParams["epsilon"]},
</#if>
<#if (config.optimizerParams["beta1"])??>
beta1=${config.optimizerParams["beta1"]},
</#if>
<#if (config.optimizerParams["beta2"])??>
beta2=${config.optimizerParams["beta2"]},
</#if>
<#if (config.optimizerParams["gamma1"])??>
gamma=${config.optimizerParams["gamma1"]},
</#if>
<#if (config.optimizerParams["momentum"])??>
momentum=${config.optimizerParams["momentum"]},
</#if>
<#list config.optimizerParams?keys as param>
<#--Adapt parameter names since parameter names in Caffe2 are different than in CNNTrainLang-->
<#assign paramName = param>
<#if param == "learning_rate">
<#assign paramName = "base_learning_rate">
<#elseif param == "learning_rate_policy">
<#assign paramName = "policy">
<#elseif param == "step_size">
<#assign paramName = "stepsize">
<#elseif param == "gamma1">
<#assign paramName = "gamma">
</#if>
${paramName}=${config.optimizerParams[param]}<#sep>,
</#list>
</#if>
)
</#list>
<#--Below code can be removed. It is only an specific example to verify that deploy_net works-->
print '\n********************************************'
print("Loading Deploy model")
......
......@@ -20,12 +20,13 @@ if __name__ == "__main__":
context='gpu',
eval_metric='mse',
opt_type='rmsprop',
base_learning_rate=0.001,
weight_decay=0.01,
policy='step',
stepsize=1000,
epsilon=1.0E-6,
weight_decay=0.01,
gamma=0.9,
policy='step',
base_learning_rate=0.001,
learning_rate_decay=0.9,
stepsize=1000
)
print '\n********************************************'
......
......@@ -18,7 +18,7 @@ if __name__ == "__main__":
num_epoch=50,
batch_size=100,
opt_type='adam',
base_learning_rate=0.001,
base_learning_rate=0.001
)
print '\n********************************************'
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment