CNNTrainer.ftl 1.75 KB
Newer Older
1 2 3 4 5
from caffe2.python import workspace, core, model_helper, brew, optimizer
from caffe2.python.predictor import mobile_exporter
from caffe2.proto import caffe2_pb2

import numpy as np
6 7 8 9 10 11 12 13
import logging
<#list configurations as config>
import CNNCreator_${config.instanceName}
</#list>

if __name__ == "__main__":
    logging.basicConfig(level=logging.DEBUG)
    logger = logging.getLogger()
14
    handler = logging.FileHandler("train.log", "w", encoding=None, delay="true")
15 16 17 18 19
    logger.addHandler(handler)

<#list configurations as config>
    ${config.instanceName} = CNNCreator_${config.instanceName}.CNNCreator_${config.instanceName}()
    ${config.instanceName}.train(
20
<#if (config.numEpoch)??>
21
        num_epoch=${config.numEpoch},
22
</#if>
23 24 25
<#if (config.batchSize)??>
        batch_size=${config.batchSize},
</#if>
26
<#if (config.context)??>
27
        context='${config.context}',
28
</#if>
29
<#if (config.evalMetric)??>
30
        eval_metric='${config.evalMetric}',
31
</#if>
32 33 34
<#if (config.loss)??>
        loss='${config.loss}',
</#if>
35
<#if (config.configuration.optimizer)??>
36
        opt_type='${config.optimizerName}',
37
<#list config.optimizerParams?keys as param>
38
    <#--To adapt parameter names since parameter names in Caffe2 are different than in CNNTrainLang-->
39 40 41 42 43 44 45 46
    <#assign paramName = param>
    <#if param == "learning_rate">
        <#assign paramName = "base_learning_rate">
    <#elseif param == "learning_rate_policy">
        <#assign paramName = "policy">
    <#elseif param == "step_size">
        <#assign paramName = "stepsize">
    <#elseif param == "gamma1">
47 48
        <#assign paramName = "gamma1">
    <#elseif param == "learning_rate_decay">
49 50 51 52
        <#assign paramName = "gamma">
    </#if>
        ${paramName}=${config.optimizerParams[param]}<#sep>,
</#list>
53
</#if>
54

55
    )
56
</#list>