Commit 996a2fab authored by Evgeny Kusmenko's avatar Evgeny Kusmenko

Merge branch 'develop' into 'master'

Updated to work with new CNNArchLang version

See merge request !28
parents c561d307 cf0abc56
Pipeline #171110 passed with stages
in 4 minutes and 52 seconds
......@@ -8,16 +8,16 @@
<groupId>de.monticore.lang.monticar</groupId>
<artifactId>cnnarch-mxnet-generator</artifactId>
<version>0.2.16-SNAPSHOT</version>
<version>0.2.17-SNAPSHOT</version>
<!-- == PROJECT DEPENDENCIES ============================================= -->
<properties>
<!-- .. SE-Libraries .................................................. -->
<CNNArch.version>0.3.1-SNAPSHOT</CNNArch.version>
<CNNTrain.version>0.3.4-SNAPSHOT</CNNTrain.version>
<CNNArch2X.version>0.0.2-SNAPSHOT</CNNArch2X.version>
<CNNArch.version>0.3.2-SNAPSHOT</CNNArch.version>
<CNNTrain.version>0.3.6-SNAPSHOT</CNNTrain.version>
<CNNArch2X.version>0.0.3-SNAPSHOT</CNNArch2X.version>
<embedded-montiarc-math-opt-generator>0.1.4</embedded-montiarc-math-opt-generator>
<!-- .. Libraries .................................................. -->
......
......@@ -27,8 +27,10 @@ import de.monticore.lang.monticar.cnnarch.generator.DataPathConfigParser;
import de.monticore.lang.monticar.cnnarch.generator.LayerSupportChecker;
import de.monticore.lang.monticar.cnnarch.generator.Target;
import de.monticore.lang.monticar.cnnarch._symboltable.ArchitectureSymbol;
import de.monticore.lang.monticar.generator.FileContent;
import de.monticore.lang.monticar.generator.cmake.CMakeConfig;
import de.monticore.lang.monticar.generator.cmake.CMakeFindModule;
import de.monticore.symboltable.Scope;
import de.se_rwth.commons.logging.Log;
import java.io.IOException;
import java.util.HashMap;
......@@ -61,4 +63,20 @@ public class CNNArch2MxNet extends CNNArchGenerator {
return fileContentMap;
}
public Map<String, String> generateCMakeContent(String rootModelName) {
// model name should start with a lower case letter. If it is a component, replace dot . by _
rootModelName = rootModelName.replace('.', '_').replace('[', '_').replace(']', '_');
rootModelName = rootModelName.substring(0, 1).toLowerCase() + rootModelName.substring(1);
CMakeConfig cMakeConfig = new CMakeConfig(rootModelName);
cMakeConfig.addModuleDependency(new CMakeFindModule("Armadillo", true));
cMakeConfig.addCMakeCommand("set(LIBS ${LIBS} mxnet)");
Map<String,String> fileContentMap = new HashMap<>();
for (FileContent fileContent : cMakeConfig.generateCMakeFiles()){
fileContentMap.put(fileContent.getFileName(), fileContent.getFileContent());
}
return fileContentMap;
}
}
......@@ -12,18 +12,18 @@ public class CNNArch2MxNetTemplateController extends CNNArchTemplateController {
super(architecture, new MxNetTemplateConfiguration());
}
public void include(IOSymbol ioElement, Writer writer){
public void include(VariableSymbol element, Writer writer){
ArchitectureElementData previousElement = getCurrentElement();
setCurrentElement(ioElement);
setCurrentElement(element);
if (ioElement.isAtomic()){
if (ioElement.isInput()){
if (element.isAtomic()){
if (element.isInput()){
include(TEMPLATE_ELEMENTS_DIR_PATH, "Input", writer);
} else {
include(TEMPLATE_ELEMENTS_DIR_PATH, "Output", writer);
}
} else {
include(ioElement.getResolvedThis().get(), writer);
include(element.getResolvedThis().get(), writer);
}
setCurrentElement(previousElement);
......@@ -61,7 +61,7 @@ public class CNNArch2MxNetTemplateController extends CNNArchTemplateController {
} else if (architectureElement instanceof LayerSymbol) {
include((LayerSymbol) architectureElement, writer);
} else {
include((IOSymbol) architectureElement, writer);
include((VariableSymbol) architectureElement, writer);
}
}
......
......@@ -22,8 +22,11 @@ class ${tc.fileNameWithoutEnding}:
_model_dir_ = "model/${tc.componentName}/"
_model_prefix_ = "model"
_input_names_ = [${tc.join(tc.architectureInputs, ",", "'", "'")}]
_input_shapes_ = [<#list tc.architecture.inputs as input>(${tc.join(input.definition.type.dimensions, ",")})</#list>]
_input_shapes_ = [<#list tc.architecture.inputs as input>(${tc.join(input.ioDeclaration.type.dimensions, ",")})</#list>]
_output_names_ = [${tc.join(tc.architectureOutputs, ",", "'", "_label'")}]
_input_data_names_ = [<#list tc.architectureInputs as inputName>'${inputName?keep_before_last("_")}'<#sep>, </#list>]
_output_data_names_ = [${tc.join(tc.architectureOutputs, ",", "'", "label'")}]
def load(self, context):
......@@ -62,18 +65,18 @@ class ${tc.fileNameWithoutEnding}:
def load_data(self, batch_size):
train_h5, test_h5 = self.load_h5_files()
data_mean = train_h5[self._input_names_[0]][:].mean(axis=0)
data_std = train_h5[self._input_names_[0]][:].std(axis=0) + 1e-5
data_mean = train_h5[self._input_data_names_[0]][:].mean(axis=0)
data_std = train_h5[self._input_data_names_[0]][:].std(axis=0) + 1e-5
train_iter = mx.io.NDArrayIter(train_h5[self._input_names_[0]],
train_h5[self._output_names_[0]],
train_iter = mx.io.NDArrayIter(train_h5[self._input_data_names_[0]],
train_h5[self._output_data_names_[0]],
batch_size=batch_size,
data_name=self._input_names_[0],
label_name=self._output_names_[0])
test_iter = None
if test_h5 != None:
test_iter = mx.io.NDArrayIter(test_h5[self._input_names_[0]],
test_h5[self._output_names_[0]],
test_iter = mx.io.NDArrayIter(test_h5[self._input_data_names_[0]],
test_h5[self._output_data_names_[0]],
batch_size=batch_size,
data_name=self._input_names_[0],
label_name=self._output_names_[0])
......@@ -86,16 +89,16 @@ class ${tc.fileNameWithoutEnding}:
test_path = self._data_dir_ + "test.h5"
if os.path.isfile(train_path):
train_h5 = h5py.File(train_path, 'r')
if not (self._input_names_[0] in train_h5 and self._output_names_[0] in train_h5):
if not (self._input_data_names_[0] in train_h5 and self._output_data_names_[0] in train_h5):
logging.error("The HDF5 file '" + os.path.abspath(train_path) + "' has to contain the datasets: "
+ "'" + self._input_names_[0] + "', '" + self._output_names_[0] + "'")
+ "'" + self._input_data_names_[0] + "', '" + self._output_data_names_[0] + "'")
sys.exit(1)
test_iter = None
if os.path.isfile(test_path):
test_h5 = h5py.File(test_path, 'r')
if not (self._input_names_[0] in test_h5 and self._output_names_[0] in test_h5):
if not (self._input_data_names_[0] in test_h5 and self._output_data_names_[0] in test_h5):
logging.error("The HDF5 file '" + os.path.abspath(test_path) + "' has to contain the datasets: "
+ "'" + self._input_names_[0] + "', '" + self._output_names_[0] + "'")
+ "'" + self._input_data_names_[0] + "', '" + self._output_data_names_[0] + "'")
sys.exit(1)
else:
logging.warning("Couldn't load test set. File '" + os.path.abspath(test_path) + "' does not exist.")
......
......@@ -13,9 +13,14 @@ class ${tc.fileNameWithoutEnding}_0{
public:
const std::string json_file = "model/${tc.componentName}/model_newest-symbol.json";
const std::string param_file = "model/${tc.componentName}/model_newest-0000.params";
//const std::vector<std::string> input_keys = {"data"};
const std::vector<std::string> input_keys = {${tc.join(tc.architectureInputs, ",", "\"", "\"")}};
const std::vector<std::vector<mx_uint>> input_shapes = {<#list tc.architecture.inputs as input>{1,${tc.join(input.definition.type.dimensions, ",")}}<#if input?has_next>,</#if></#list>};
const std::vector<std::string> input_keys = {
<#if tc.architectureInputs?size == 1>
"data"
<#else>
<#list tc.architectureInputs as inputName>"data${inputName?index}"<#sep>, </#list>
</#if>
};
const std::vector<std::vector<mx_uint>> input_shapes = {<#list tc.architecture.inputs as input>{1,${tc.join(input.ioDeclaration.type.dimensions, ",")}}<#if input?has_next>,</#if></#list>};
const bool use_gpu = false;
PredictorHandle handle;
......@@ -31,8 +36,7 @@ public:
void predict(${tc.join(tc.architectureInputs, ", ", "const std::vector<float> &", "")},
${tc.join(tc.architectureOutputs, ", ", "std::vector<float> &", "")}){
<#list tc.architectureInputs as inputName>
MXPredSetInput(handle, "data", ${inputName}.data(), ${inputName}.size());
//MXPredSetInput(handle, "${inputName}", ${inputName}.data(), ${inputName}.size());
MXPredSetInput(handle, input_keys[${inputName?index}].c_str(), ${inputName}.data(), ${inputName}.size());
</#list>
MXPredForward(handle);
......
<#list tc.architecture.outputs as output>
<#assign shape = output.definition.type.dimensions>
<#assign shape = output.ioDeclaration.type.dimensions>
vector<float> CNN_${tc.getName(output)}(<#list shape as dim>${dim?c}<#if dim?has_next>*</#if></#list>);
</#list>
......@@ -8,7 +8,7 @@
</#if></#list>);
<#list tc.architecture.outputs as output>
<#assign shape = output.definition.type.dimensions>
<#assign shape = output.ioDeclaration.type.dimensions>
<#if shape?size == 1>
${output.name}<#if output.arrayAccess.isPresent()>[${output.arrayAccess.get().intValue.get()?c}]</#if> = CNNTranslator::translateToCol(CNN_${tc.getName(output)}, std::vector<size_t> {${shape[0]?c}});
</#if>
......
......@@ -21,9 +21,12 @@ class CNNCreator_Alexnet:
_data_dir_ = "data/Alexnet/"
_model_dir_ = "model/Alexnet/"
_model_prefix_ = "model"
_input_names_ = ['data']
_input_names_ = ['data_']
_input_shapes_ = [(3,224,224)]
_output_names_ = ['predictions_label']
_output_names_ = ['predictions__label']
_input_data_names_ = ['data']
_output_data_names_ = ['predictions_label']
def load(self, context):
......@@ -62,18 +65,18 @@ class CNNCreator_Alexnet:
def load_data(self, batch_size):
train_h5, test_h5 = self.load_h5_files()
data_mean = train_h5[self._input_names_[0]][:].mean(axis=0)
data_std = train_h5[self._input_names_[0]][:].std(axis=0) + 1e-5
data_mean = train_h5[self._input_data_names_[0]][:].mean(axis=0)
data_std = train_h5[self._input_data_names_[0]][:].std(axis=0) + 1e-5
train_iter = mx.io.NDArrayIter(train_h5[self._input_names_[0]],
train_h5[self._output_names_[0]],
train_iter = mx.io.NDArrayIter(train_h5[self._input_data_names_[0]],
train_h5[self._output_data_names_[0]],
batch_size=batch_size,
data_name=self._input_names_[0],
label_name=self._output_names_[0])
test_iter = None
if test_h5 != None:
test_iter = mx.io.NDArrayIter(test_h5[self._input_names_[0]],
test_h5[self._output_names_[0]],
test_iter = mx.io.NDArrayIter(test_h5[self._input_data_names_[0]],
test_h5[self._output_data_names_[0]],
batch_size=batch_size,
data_name=self._input_names_[0],
label_name=self._output_names_[0])
......@@ -86,16 +89,16 @@ class CNNCreator_Alexnet:
test_path = self._data_dir_ + "test.h5"
if os.path.isfile(train_path):
train_h5 = h5py.File(train_path, 'r')
if not (self._input_names_[0] in train_h5 and self._output_names_[0] in train_h5):
if not (self._input_data_names_[0] in train_h5 and self._output_data_names_[0] in train_h5):
logging.error("The HDF5 file '" + os.path.abspath(train_path) + "' has to contain the datasets: "
+ "'" + self._input_names_[0] + "', '" + self._output_names_[0] + "'")
+ "'" + self._input_data_names_[0] + "', '" + self._output_data_names_[0] + "'")
sys.exit(1)
test_iter = None
if os.path.isfile(test_path):
test_h5 = h5py.File(test_path, 'r')
if not (self._input_names_[0] in test_h5 and self._output_names_[0] in test_h5):
if not (self._input_data_names_[0] in test_h5 and self._output_data_names_[0] in test_h5):
logging.error("The HDF5 file '" + os.path.abspath(test_path) + "' has to contain the datasets: "
+ "'" + self._input_names_[0] + "', '" + self._output_names_[0] + "'")
+ "'" + self._input_data_names_[0] + "', '" + self._output_data_names_[0] + "'")
sys.exit(1)
else:
logging.warning("Couldn't load test set. File '" + os.path.abspath(test_path) + "' does not exist.")
......@@ -254,9 +257,9 @@ class CNNCreator_Alexnet:
def construct(self, context, data_mean=None, data_std=None):
data = mx.sym.var("data",
data_ = mx.sym.var("data_",
shape=(0,3,224,224))
# data, output shape: {[3,224,224]}
# data_, output shape: {[3,224,224]}
if not data_mean is None:
assert(not data_std is None)
......@@ -264,9 +267,9 @@ class CNNCreator_Alexnet:
_data_mean_ = mx.sym.BlockGrad(_data_mean_)
_data_std_ = mx.sym.Variable("_data_std_", shape=(3,224,224), init=MyConstant(value=data_mean.tolist()))
_data_std_ = mx.sym.BlockGrad(_data_std_)
data = mx.symbol.broadcast_sub(data, _data_mean_)
data = mx.symbol.broadcast_div(data, _data_std_)
conv1_ = mx.symbol.pad(data=data,
data_ = mx.symbol.broadcast_sub(data_, _data_mean_)
data_ = mx.symbol.broadcast_div(data_, _data_std_)
conv1_ = mx.symbol.pad(data=data_,
mode='constant',
pad_width=(0,0,0,0,2,1,2,1),
constant_value=0)
......@@ -503,10 +506,10 @@ class CNNCreator_Alexnet:
softmax8_ = mx.symbol.softmax(data=fc8_,
axis=1,
name="softmax8_")
predictions = mx.symbol.SoftmaxOutput(data=softmax8_,
name="predictions")
predictions_ = mx.symbol.SoftmaxOutput(data=softmax8_,
name="predictions_")
self.module = mx.mod.Module(symbol=mx.symbol.Group([predictions]),
self.module = mx.mod.Module(symbol=mx.symbol.Group([predictions_]),
data_names=self._input_names_,
label_names=self._output_names_,
context=context)
......@@ -21,9 +21,12 @@ class CNNCreator_CifarClassifierNetwork:
_data_dir_ = "data/CifarClassifierNetwork/"
_model_dir_ = "model/CifarClassifierNetwork/"
_model_prefix_ = "model"
_input_names_ = ['data']
_input_names_ = ['data_']
_input_shapes_ = [(3,32,32)]
_output_names_ = ['softmax_label']
_output_names_ = ['softmax__label']
_input_data_names_ = ['data']
_output_data_names_ = ['softmax_label']
def load(self, context):
......@@ -62,18 +65,18 @@ class CNNCreator_CifarClassifierNetwork:
def load_data(self, batch_size):
train_h5, test_h5 = self.load_h5_files()
data_mean = train_h5[self._input_names_[0]][:].mean(axis=0)
data_std = train_h5[self._input_names_[0]][:].std(axis=0) + 1e-5
data_mean = train_h5[self._input_data_names_[0]][:].mean(axis=0)
data_std = train_h5[self._input_data_names_[0]][:].std(axis=0) + 1e-5
train_iter = mx.io.NDArrayIter(train_h5[self._input_names_[0]],
train_h5[self._output_names_[0]],
train_iter = mx.io.NDArrayIter(train_h5[self._input_data_names_[0]],
train_h5[self._output_data_names_[0]],
batch_size=batch_size,
data_name=self._input_names_[0],
label_name=self._output_names_[0])
test_iter = None
if test_h5 != None:
test_iter = mx.io.NDArrayIter(test_h5[self._input_names_[0]],
test_h5[self._output_names_[0]],
test_iter = mx.io.NDArrayIter(test_h5[self._input_data_names_[0]],
test_h5[self._output_data_names_[0]],
batch_size=batch_size,
data_name=self._input_names_[0],
label_name=self._output_names_[0])
......@@ -86,16 +89,16 @@ class CNNCreator_CifarClassifierNetwork:
test_path = self._data_dir_ + "test.h5"
if os.path.isfile(train_path):
train_h5 = h5py.File(train_path, 'r')
if not (self._input_names_[0] in train_h5 and self._output_names_[0] in train_h5):
if not (self._input_data_names_[0] in train_h5 and self._output_data_names_[0] in train_h5):
logging.error("The HDF5 file '" + os.path.abspath(train_path) + "' has to contain the datasets: "
+ "'" + self._input_names_[0] + "', '" + self._output_names_[0] + "'")
+ "'" + self._input_data_names_[0] + "', '" + self._output_data_names_[0] + "'")
sys.exit(1)
test_iter = None
if os.path.isfile(test_path):
test_h5 = h5py.File(test_path, 'r')
if not (self._input_names_[0] in test_h5 and self._output_names_[0] in test_h5):
if not (self._input_data_names_[0] in test_h5 and self._output_data_names_[0] in test_h5):
logging.error("The HDF5 file '" + os.path.abspath(test_path) + "' has to contain the datasets: "
+ "'" + self._input_names_[0] + "', '" + self._output_names_[0] + "'")
+ "'" + self._input_data_names_[0] + "', '" + self._output_data_names_[0] + "'")
sys.exit(1)
else:
logging.warning("Couldn't load test set. File '" + os.path.abspath(test_path) + "' does not exist.")
......@@ -254,9 +257,9 @@ class CNNCreator_CifarClassifierNetwork:
def construct(self, context, data_mean=None, data_std=None):
data = mx.sym.var("data",
data_ = mx.sym.var("data_",
shape=(0,3,32,32))
# data, output shape: {[3,32,32]}
# data_, output shape: {[3,32,32]}
if not data_mean is None:
assert(not data_std is None)
......@@ -264,9 +267,9 @@ class CNNCreator_CifarClassifierNetwork:
_data_mean_ = mx.sym.BlockGrad(_data_mean_)
_data_std_ = mx.sym.Variable("_data_std_", shape=(3,32,32), init=MyConstant(value=data_mean.tolist()))
_data_std_ = mx.sym.BlockGrad(_data_std_)
data = mx.symbol.broadcast_sub(data, _data_mean_)
data = mx.symbol.broadcast_div(data, _data_std_)
conv2_1_ = mx.symbol.pad(data=data,
data_ = mx.symbol.broadcast_sub(data_, _data_mean_)
data_ = mx.symbol.broadcast_div(data_, _data_std_)
conv2_1_ = mx.symbol.pad(data=data_,
mode='constant',
pad_width=(0,0,0,0,1,1,1,1),
constant_value=0)
......@@ -300,7 +303,7 @@ class CNNCreator_CifarClassifierNetwork:
batchnorm3_1_ = mx.symbol.BatchNorm(data=conv3_1_,
fix_gamma=True,
name="batchnorm3_1_")
conv2_2_ = mx.symbol.Convolution(data=data,
conv2_2_ = mx.symbol.Convolution(data=data_,
kernel=(1,1),
stride=(1,1),
num_filter=8,
......@@ -741,10 +744,10 @@ class CNNCreator_CifarClassifierNetwork:
softmax32_ = mx.symbol.softmax(data=fc32_,
axis=1,
name="softmax32_")
softmax = mx.symbol.SoftmaxOutput(data=softmax32_,
name="softmax")
softmax_ = mx.symbol.SoftmaxOutput(data=softmax32_,
name="softmax_")
self.module = mx.mod.Module(symbol=mx.symbol.Group([softmax]),
self.module = mx.mod.Module(symbol=mx.symbol.Group([softmax_]),
data_names=self._input_names_,
label_names=self._output_names_,
context=context)
......@@ -21,9 +21,12 @@ class CNNCreator_VGG16:
_data_dir_ = "data/VGG16/"
_model_dir_ = "model/VGG16/"
_model_prefix_ = "model"
_input_names_ = ['data']
_input_names_ = ['data_']
_input_shapes_ = [(3,224,224)]
_output_names_ = ['predictions_label']
_output_names_ = ['predictions__label']
_input_data_names_ = ['data']
_output_data_names_ = ['predictions_label']
def load(self, context):
......@@ -62,18 +65,18 @@ class CNNCreator_VGG16:
def load_data(self, batch_size):
train_h5, test_h5 = self.load_h5_files()
data_mean = train_h5[self._input_names_[0]][:].mean(axis=0)
data_std = train_h5[self._input_names_[0]][:].std(axis=0) + 1e-5
data_mean = train_h5[self._input_data_names_[0]][:].mean(axis=0)
data_std = train_h5[self._input_data_names_[0]][:].std(axis=0) + 1e-5
train_iter = mx.io.NDArrayIter(train_h5[self._input_names_[0]],
train_h5[self._output_names_[0]],
train_iter = mx.io.NDArrayIter(train_h5[self._input_data_names_[0]],
train_h5[self._output_data_names_[0]],
batch_size=batch_size,
data_name=self._input_names_[0],
label_name=self._output_names_[0])
test_iter = None
if test_h5 != None:
test_iter = mx.io.NDArrayIter(test_h5[self._input_names_[0]],
test_h5[self._output_names_[0]],
test_iter = mx.io.NDArrayIter(test_h5[self._input_data_names_[0]],
test_h5[self._output_data_names_[0]],
batch_size=batch_size,
data_name=self._input_names_[0],
label_name=self._output_names_[0])
......@@ -86,16 +89,16 @@ class CNNCreator_VGG16:
test_path = self._data_dir_ + "test.h5"
if os.path.isfile(train_path):
train_h5 = h5py.File(train_path, 'r')
if not (self._input_names_[0] in train_h5 and self._output_names_[0] in train_h5):
if not (self._input_data_names_[0] in train_h5 and self._output_data_names_[0] in train_h5):
logging.error("The HDF5 file '" + os.path.abspath(train_path) + "' has to contain the datasets: "
+ "'" + self._input_names_[0] + "', '" + self._output_names_[0] + "'")
+ "'" + self._input_data_names_[0] + "', '" + self._output_data_names_[0] + "'")
sys.exit(1)
test_iter = None
if os.path.isfile(test_path):
test_h5 = h5py.File(test_path, 'r')
if not (self._input_names_[0] in test_h5 and self._output_names_[0] in test_h5):
if not (self._input_data_names_[0] in test_h5 and self._output_data_names_[0] in test_h5):
logging.error("The HDF5 file '" + os.path.abspath(test_path) + "' has to contain the datasets: "
+ "'" + self._input_names_[0] + "', '" + self._output_names_[0] + "'")
+ "'" + self._input_data_names_[0] + "', '" + self._output_data_names_[0] + "'")
sys.exit(1)
else:
logging.warning("Couldn't load test set. File '" + os.path.abspath(test_path) + "' does not exist.")
......@@ -254,9 +257,9 @@ class CNNCreator_VGG16:
def construct(self, context, data_mean=None, data_std=None):
data = mx.sym.var("data",
data_ = mx.sym.var("data_",
shape=(0,3,224,224))
# data, output shape: {[3,224,224]}
# data_, output shape: {[3,224,224]}
if not data_mean is None:
assert(not data_std is None)
......@@ -264,9 +267,9 @@ class CNNCreator_VGG16:
_data_mean_ = mx.sym.BlockGrad(_data_mean_)
_data_std_ = mx.sym.Variable("_data_std_", shape=(3,224,224), init=MyConstant(value=data_mean.tolist()))
_data_std_ = mx.sym.BlockGrad(_data_std_)
data = mx.symbol.broadcast_sub(data, _data_mean_)
data = mx.symbol.broadcast_div(data, _data_std_)
conv1_ = mx.symbol.pad(data=data,
data_ = mx.symbol.broadcast_sub(data_, _data_mean_)
data_ = mx.symbol.broadcast_div(data_, _data_std_)
conv1_ = mx.symbol.pad(data=data_,
mode='constant',
pad_width=(0,0,0,0,1,1,1,1),
constant_value=0)
......@@ -539,10 +542,10 @@ class CNNCreator_VGG16:
softmax15_ = mx.symbol.softmax(data=fc15_,
axis=1,
name="softmax15_")
predictions = mx.symbol.SoftmaxOutput(data=softmax15_,
name="predictions")
predictions_ = mx.symbol.SoftmaxOutput(data=softmax15_,
name="predictions_")
self.module = mx.mod.Module(symbol=mx.symbol.Group([predictions]),
self.module = mx.mod.Module(symbol=mx.symbol.Group([predictions_]),
data_names=self._input_names_,
label_names=self._output_names_,
context=context)
......@@ -13,8 +13,9 @@ class CNNPredictor_Alexnet_0{
public:
const std::string json_file = "model/Alexnet/model_newest-symbol.json";
const std::string param_file = "model/Alexnet/model_newest-0000.params";
//const std::vector<std::string> input_keys = {"data"};
const std::vector<std::string> input_keys = {"data"};
const std::vector<std::string> input_keys = {
"data"
};
const std::vector<std::vector<mx_uint>> input_shapes = {{1,3,224,224}};
const bool use_gpu = false;
......@@ -28,10 +29,9 @@ public:
if(handle) MXPredFree(handle);
}
void predict(const std::vector<float> &data,
std::vector<float> &predictions){
MXPredSetInput(handle, "data", data.data(), data.size());
//MXPredSetInput(handle, "data", data.data(), data.size());
void predict(const std::vector<float> &data_,
std::vector<float> &predictions_){
MXPredSetInput(handle, input_keys[0].c_str(), data_.data(), data_.size());
MXPredForward(handle);
......@@ -44,8 +44,8 @@ public:
MXPredGetOutputShape(handle, output_index, &shape, &shape_len);
size = 1;
for (mx_uint i = 0; i < shape_len; ++i) size *= shape[i];
assert(size == predictions.size());
MXPredGetOutput(handle, 0, &(predictions[0]), predictions.size());
assert(size == predictions_.size());
MXPredGetOutput(handle, 0, &(predictions_[0]), predictions_.size());
}
......@@ -106,4 +106,4 @@ public:
}
};
#endif // CNNPREDICTOR_ALEXNET
\ No newline at end of file
#endif // CNNPREDICTOR_ALEXNET
......@@ -13,8 +13,9 @@ class CNNPredictor_CifarClassifierNetwork_0{
public:
const std::string json_file = "model/CifarClassifierNetwork/model_newest-symbol.json";
const std::string param_file = "model/CifarClassifierNetwork/model_newest-0000.params";
//const std::vector<std::string> input_keys = {"data"};
const std::vector<std::string> input_keys = {"data"};
const std::vector<std::string> input_keys = {
"data"
};
const std::vector<std::vector<mx_uint>> input_shapes = {{1,3,32,32}};
const bool use_gpu = false;
......@@ -28,10 +29,9 @@ public:
if(handle) MXPredFree(handle);
}
void predict(const std::vector<float> &data,
std::vector<float> &softmax){
MXPredSetInput(handle, "data", data.data(), data.size());
//MXPredSetInput(handle, "data", data.data(), data.size());
void predict(const std::vector<float> &data_,
std::vector<float> &softmax_){
MXPredSetInput(handle, input_keys[0].c_str(), data_.data(), data_.size());
MXPredForward(handle);
......@@ -44,8 +44,8 @@ public:
MXPredGetOutputShape(handle, output_index, &shape, &shape_len);
size = 1;
for (mx_uint i = 0; i < shape_len; ++i) size *= shape[i];
assert(size == softmax.size());
MXPredGetOutput(handle, 0, &(softmax[0]), softmax.size());
assert(size == softmax_.size());
MXPredGetOutput(handle, 0, &(softmax_[0]), softmax_.size());
}
......@@ -106,4 +106,4 @@ public:
}
};
#endif // CNNPREDICTOR_CIFARCLASSIFIERNETWORK
\ No newline at end of file
#endif // CNNPREDICTOR_CIFARCLASSIFIERNETWORK
......@@ -13,8 +13,9 @@ class CNNPredictor_VGG16_0{
public:
const std::string json_file = "model/VGG16/model_newest-symbol.json";
const std::string param_file = "model/VGG16/model_newest-0000.params";
//const std::vector<std::string> input_keys = {"data"};
const std::vector<std::string> input_keys = {"data"};
const std::vector<std::string> input_keys = {
"data"
};
const std::vector<std::vector<mx_uint>> input_shapes = {{1,3,224,224}};
const bool use_gpu = false;
......@@ -28,10 +29,9 @@ public:
if(handle) MXPredFree(handle);
}
void predict(const std::vector<float> &data,
std::vector<float> &predictions){
MXPredSetInput(handle, "data", data.data(), data.size());
//MXPredSetInput(handle, "data", data.data(), data.size());
void predict(const std::vector<float> &data_,
std::vector<float> &predictions_){
MXPredSetInput(handle, input_keys[0].c_str(), data_.data(), data_.size());
MXPredForward(handle);
......@@ -44,8 +44,8 @@ public:
MXPredGetOutputShape(handle, output_index, &shape, &shape_len);
size = 1;
for (mx_uint i = 0; i < shape_len; ++i) size *= shape[i];
assert(size == predictions.size());
MXPredGetOutput(handle, 0, &(predictions[0]), predictions.size());
assert(size == predictions_.size());
MXPredGetOutput(handle, 0, &(predictions_[0]), predictions_.size());
}
......@@ -106,4 +106,4 @@ public:
}
};
#endif // CNNPREDICTOR_VGG16
\ No newline at end of file
#endif // CNNPREDICTOR_VGG16
vector<float> CNN_predictions(10);
vector<float> CNN_predictions_(10);
_predictor_0_.predict(CNNTranslator::translate(data),
CNN_predictions);
CNN_predictions_);
predictions = CNNTranslator::translateToCol(CNN_predictions, std::vector<size_t> {10});
\ No newline at end of file
predictions = CNNTranslator::translateToCol(CNN_predictions_, std::vector<size_t> {10});
vector<float> CNN_softmax(10);
vector<float> CNN_softmax_(10);
_predictor_0_.predict(CNNTranslator::translate(data),
CNN_softmax);
CNN_softmax_);
softmax = CNNTranslator::translateToCol(CNN_softmax, std::vector<size_t> {10});
\ No newline at end of file
softmax = CNNTranslator::translateToCol(CNN_softmax_, std::vector<size_t> {10});
vector<float> CNN_predictions(1000);
vector<float> CNN_predictions_(1000);
_predictor_0_.predict(CNNTranslator::translate(data),
CNN_predictions);
CNN_predictions_);
predictions = CNNTranslator::translateToCol(CNN_predictions, std::vector<size_t> {1000});
\ No newline at end of file
predictions = CNNTranslator::translateToCol(CNN_predictions_, std::vector<size_t> {1000});
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment