Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
What's new
7
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Sign in
Toggle navigation
Open sidebar
monticore
EmbeddedMontiArc
generators
CNNArch2Gluon
Commits
6ff9b987
Commit
6ff9b987
authored
Dec 29, 2019
by
Julian Dierkes
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
added GAN options and fixed tests
parent
5955c149
Changes
29
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
29 changed files
with
23014 additions
and
30 deletions
+23014
-30
src/main/java/de/monticore/lang/monticar/cnnarch/gluongenerator/CNNArch2GluonTemplateController.java
...narch/gluongenerator/CNNArch2GluonTemplateController.java
+20
-0
src/main/resources/templates/gluon/CNNCreator.ftl
src/main/resources/templates/gluon/CNNCreator.ftl
+1
-1
src/main/resources/templates/gluon/CNNGanTrainer.ftl
src/main/resources/templates/gluon/CNNGanTrainer.ftl
+11
-11
src/main/resources/templates/gluon/CNNNet.ftl
src/main/resources/templates/gluon/CNNNet.ftl
+2
-2
src/main/resources/templates/gluon/CNNPredictor.ftl
src/main/resources/templates/gluon/CNNPredictor.ftl
+1
-1
src/test/resources/target_code/CNNDataLoader_Alexnet.py
src/test/resources/target_code/CNNDataLoader_Alexnet.py
+142
-0
src/test/resources/target_code/CNNDataLoader_CifarClassifierNetwork.py
...urces/target_code/CNNDataLoader_CifarClassifierNetwork.py
+142
-0
src/test/resources/target_code/CNNDataLoader_MultipleStreams.py
...st/resources/target_code/CNNDataLoader_MultipleStreams.py
+251
-0
src/test/resources/target_code/CNNDataLoader_RNNtest.py
src/test/resources/target_code/CNNDataLoader_RNNtest.py
+251
-0
src/test/resources/target_code/CNNDataLoader_ResNeXt50.py
src/test/resources/target_code/CNNDataLoader_ResNeXt50.py
+251
-0
src/test/resources/target_code/CNNDataLoader_ThreeInputCNN_M14.py
.../resources/target_code/CNNDataLoader_ThreeInputCNN_M14.py
+251
-0
src/test/resources/target_code/CNNDataLoader_VGG16.py
src/test/resources/target_code/CNNDataLoader_VGG16.py
+142
-0
src/test/resources/target_code/CNNNet_Alexnet.py
src/test/resources/target_code/CNNNet_Alexnet.py
+30
-6
src/test/resources/target_code/CNNNet_CifarClassifierNetwork.py
...st/resources/target_code/CNNNet_CifarClassifierNetwork.py
+14
-0
src/test/resources/target_code/CNNNet_MultipleStreams.py
src/test/resources/target_code/CNNNet_MultipleStreams.py
+157
-0
src/test/resources/target_code/CNNNet_RNNtest.py
src/test/resources/target_code/CNNNet_RNNtest.py
+139
-0
src/test/resources/target_code/CNNNet_ResNeXt50.py
src/test/resources/target_code/CNNNet_ResNeXt50.py
+20230
-0
src/test/resources/target_code/CNNNet_ThreeInputCNN_M14.py
src/test/resources/target_code/CNNNet_ThreeInputCNN_M14.py
+293
-0
src/test/resources/target_code/CNNNet_VGG16.py
src/test/resources/target_code/CNNNet_VGG16.py
+14
-0
src/test/resources/target_code/CNNPredictor_Alexnet.h
src/test/resources/target_code/CNNPredictor_Alexnet.h
+1
-3
src/test/resources/target_code/CNNPredictor_CifarClassifierNetwork.h
...sources/target_code/CNNPredictor_CifarClassifierNetwork.h
+1
-3
src/test/resources/target_code/CNNPredictor_MultipleStreams.h
...test/resources/target_code/CNNPredictor_MultipleStreams.h
+201
-0
src/test/resources/target_code/CNNPredictor_RNNtest.h
src/test/resources/target_code/CNNPredictor_RNNtest.h
+201
-0
src/test/resources/target_code/CNNPredictor_ResNeXt50.h
src/test/resources/target_code/CNNPredictor_ResNeXt50.h
+107
-0
src/test/resources/target_code/CNNPredictor_ThreeInputCNN_M14.h
...st/resources/target_code/CNNPredictor_ThreeInputCNN_M14.h
+109
-0
src/test/resources/target_code/CNNPredictor_VGG16.h
src/test/resources/target_code/CNNPredictor_VGG16.h
+1
-3
src/test/resources/target_code/ddpg/reinforcement_learning/CNNNet_CriticNetwork.py
..._code/ddpg/reinforcement_learning/CNNNet_CriticNetwork.py
+17
-0
src/test/resources/target_code/ros-ddpg/reinforcement_learning/CNNNet_RosCriticNetwork.py
...os-ddpg/reinforcement_learning/CNNNet_RosCriticNetwork.py
+17
-0
src/test/resources/target_code/td3/reinforcement_learning/CNNNet_CriticNetwork.py
...t_code/td3/reinforcement_learning/CNNNet_CriticNetwork.py
+17
-0
No files found.
src/main/java/de/monticore/lang/monticar/cnnarch/gluongenerator/CNNArch2GluonTemplateController.java
View file @
6ff9b987
...
...
@@ -117,6 +117,16 @@ public class CNNArch2GluonTemplateController extends CNNArchTemplateController {
return
getStreamInputs
(
stream
,
outputAsArray
).
keySet
();
}
public
ArrayList
<
String
>
getStreamInputVariableNames
(
SerialCompositeElementSymbol
stream
,
boolean
outputAsArray
)
{
ArrayList
<
String
>
inputVariableNames
=
new
ArrayList
<
String
>();
for
(
ArchitectureElementSymbol
element
:
stream
.
getFirstAtomicElements
())
{
if
(
element
.
isInput
())
{
inputVariableNames
.
add
(
getName
(
element
));
}
}
return
inputVariableNames
;
}
public
List
<
String
>
get
(
Map
<
String
,
List
<
String
>>
map
,
String
name
)
{
return
map
.
get
(
name
);
}
...
...
@@ -142,6 +152,16 @@ public class CNNArch2GluonTemplateController extends CNNArchTemplateController {
return
getStreamOutputs
(
stream
,
false
).
values
();
}
public
ArrayList
<
String
>
getStreamOutputVariableNames
(
SerialCompositeElementSymbol
stream
,
boolean
outputAsArray
)
{
ArrayList
<
String
>
outputVariableNames
=
new
ArrayList
<
String
>();
for
(
ArchitectureElementSymbol
element
:
stream
.
getLastAtomicElements
())
{
if
(
element
.
isOutput
())
{
outputVariableNames
.
add
(
getName
(
element
));
}
}
return
outputVariableNames
;
}
public
Collection
<
List
<
String
>>
getStreamInputInformation
(
SerialCompositeElementSymbol
stream
)
{
Map
<
String
,
List
<
String
>>
dimensions
=
getStreamInputs
(
stream
,
false
);
Map
<
String
,
List
<
String
>>
domains
=
getStreamInputDomains
(
stream
);
...
...
src/main/resources/templates/gluon/CNNCreator.ftl
View file @
6ff9b987
...
...
@@ -55,7 +55,7 @@ class ${tc.fileNameWithoutEnding}:
self.networks[${networkInstruction?index}] = Net_${networkInstruction?index}(data_mean=data_mean, data_std=data_std)
self.networks[${networkInstruction?index}].collect_params().initialize(self.weight_initializer, ctx=context)
self.networks[${networkInstruction?index}].hybridize()
self.networks[${networkInstruction?index}](<#list tc.getStreamInputDimensions(networkInstruction.body) as dimensions>mx.nd.zeros((1, ${tc.join(dimensions, ",")},), ctx=context)<#sep>, </#list>)
self.networks[${networkInstruction?index}](<#list tc.getStreamInputDimensions(networkInstruction.body) as dimensions>mx.nd.zeros((1, ${tc.join(
tc.cutDimensions(
dimensions
)
, ",")},), ctx=context)<#sep>, </#list>)
</#list>
if not os.path.exists(self._model_dir_):
...
...
src/main/resources/templates/gluon/CNNGanTrainer.ftl
View file @
6ff9b987
...
...
@@ -15,21 +15,21 @@ def visualize(img_arr):
plt.axis('off')
def getDataIter(ctx, batch_size=64, Z=100):
img_number = 70
img_number = 70
000
mnist_train = mx.gluon.data.vision.datasets.MNIST(train=True)
mnist_test = mx.gluon.data.vision.datasets.MNIST(train=False)
X = np.zeros((img_number, 28, 28))
for i in range(img_number/2):
X[i] = mnist_train[i][0].asnumpy()[:,:,0]
for i in range(img_number/2):
X[img_number/2+i] = mnist_test[i][0].asnumpy()[:,:,0]
#X = np.zeros((img_number, 28, 28))
#for i
, (data, label) in enumerate(mnist_train
):
#for i
in range(img_number/2
):
# X[i] = mnist_train[i][0].asnumpy()[:,:,0]
#for i, (data, label) in enumerate(mnist_test):
# X[len(mnist_train)+i] = data.asnumpy()[:,:,0]
#for i in range(img_number/2):
# X[img_number/2+i] = mnist_test[i][0].asnumpy()[:,:,0]
X = np.zeros((img_number, 28, 28))
for i, (data, label) in enumerate(mnist_train):
X[i] = mnist_train[i][0].asnumpy()[:,:,0]
for i, (data, label) in enumerate(mnist_test):
X[len(mnist_train)+i] = data.asnumpy()[:,:,0]
np.random.seed(1)
p = np.random.permutation(X.shape[0])
...
...
@@ -244,7 +244,7 @@ class ${tc.fileNameWithoutEnding}:
# ugly start
#if batch_i % 200 == 0:
# fake_data[0].asnumpy()
if batch_i % 5
0
0 == 0:
if batch_i %
7
50 == 0:
#gen_net.save_parameters(self.parameter_path_gen() + '-' + str(num_epoch + begin_epoch).zfill(4) + '.params')
#gen_net.export(self.parameter_path_gen() + '_newest', epoch=0)
#dis_net.save_parameters(self.parameter_path_dis() + '-' + str(num_epoch + begin_epoch).zfill(4) + '.params')
...
...
src/main/resources/templates/gluon/CNNNet.ftl
View file @
6ff9b987
...
...
@@ -110,7 +110,7 @@ ${tc.include(networkInstruction.body, "FORWARD_FUNCTION")}
<#list tc.architecture.streams as stream>
<#assign dimensions = (tc.getStreamInputs(stream, false))>
<#assign domains = (tc.getStreamInputDomains(stream))>
<#list tc.getStreamInputNames(stream, false) as name>
<#list tc.getStreamInput
Variable
Names(stream, false) as name>
input_dimensions = (${tc.join(dimensions[name], ",")})
input_domains = (${tc.join(domains[name], ",")})
inputs["${name}"] = input_domains + (input_dimensions,)
...
...
@@ -123,7 +123,7 @@ ${tc.include(networkInstruction.body, "FORWARD_FUNCTION")}
<#list tc.architecture.streams as stream>
<#assign dimensions = (tc.getStreamOutputs(stream, false))>
<#assign domains = (tc.getStreamOutputDomains(stream))>
<#list tc.getStreamOutputNames(stream, false) as name>
<#list tc.getStreamOutput
Variable
Names(stream, false) as name>
output_dimensions = (${tc.join(dimensions[name], ",")})
output_domains = (${tc.join(domains[name], ",")})
outputs["${name}"] = output_domains + (output_dimensions,)
...
...
src/main/resources/templates/gluon/CNNPredictor.ftl
View file @
6ff9b987
...
...
@@ -22,7 +22,7 @@ public:
<#list tc.getStreamInputNames(networkInstruction.body, true) as variable>"data${variable?index}"<#sep>, </#list>
</#if>
};
const std::vector<std::vector<mx_uint>> input_shapes = {<#list tc.getStreamInputDimensions(networkInstruction.body) as dimensions>{${tc.join(dimensions, ", ")}}<#sep>, </#list>};
const std::vector<std::vector<mx_uint>> input_shapes = {<#list tc.getStreamInputDimensions(networkInstruction.body) as dimensions>{
1,
${tc.join(
tc.cutDimensions(
dimensions
)
, ", ")}}<#sep>, </#list>};
const bool use_gpu = false;
PredictorHandle handle;
...
...
src/test/resources/target_code/CNNDataLoader_Alexnet.py
View file @
6ff9b987
...
...
@@ -3,6 +3,9 @@ import h5py
import
mxnet
as
mx
import
logging
import
sys
import
numpy
as
np
import
cv2
import
importlib
from
mxnet
import
nd
class
CNNDataLoader_Alexnet
:
...
...
@@ -65,6 +68,144 @@ class CNNDataLoader_Alexnet:
return
train_iter
,
train_test_iter
,
test_iter
,
data_mean
,
data_std
,
train_images
,
test_images
def
load_data
(
self
,
batch_size
,
img_size
):
train_h5
,
test_h5
=
self
.
load_h5_files
()
width
=
img_size
[
0
]
height
=
img_size
[
1
]
comb_data
=
{}
data_mean
=
{}
data_std
=
{}
for
input_name
in
self
.
_input_names_
:
train_data
=
train_h5
[
input_name
][:]
test_data
=
test_h5
[
input_name
][:]
train_shape
=
train_data
.
shape
test_shape
=
test_data
.
shape
comb_data
[
input_name
]
=
mx
.
nd
.
zeros
((
train_shape
[
0
]
+
test_shape
[
0
],
train_shape
[
1
],
width
,
height
))
for
i
,
img
in
enumerate
(
train_data
):
img
=
img
.
transpose
(
1
,
2
,
0
)
comb_data
[
input_name
][
i
]
=
cv2
.
resize
(
img
,
(
width
,
height
)).
reshape
((
train_shape
[
1
],
width
,
height
))
for
i
,
img
in
enumerate
(
test_data
):
img
=
img
.
transpose
(
1
,
2
,
0
)
comb_data
[
input_name
][
i
+
train_shape
[
0
]]
=
cv2
.
resize
(
img
,
(
width
,
height
)).
reshape
((
train_shape
[
1
],
width
,
height
))
data_mean
[
input_name
+
'_'
]
=
nd
.
array
(
comb_data
[
input_name
][:].
mean
(
axis
=
0
))
data_std
[
input_name
+
'_'
]
=
nd
.
array
(
comb_data
[
input_name
][:].
asnumpy
().
std
(
axis
=
0
)
+
1e-5
)
comb_label
=
{}
for
output_name
in
self
.
_output_names_
:
train_labels
=
train_h5
[
output_name
][:]
test_labels
=
test_h5
[
output_name
][:]
comb_label
[
output_name
]
=
np
.
append
(
train_labels
,
test_labels
,
axis
=
0
)
train_iter
=
mx
.
io
.
NDArrayIter
(
data
=
comb_data
,
label
=
comb_label
,
batch_size
=
batch_size
)
test_iter
=
None
return
train_iter
,
test_iter
,
data_mean
,
data_std
def
load_preprocessed_data
(
self
,
batch_size
,
preproc_lib
):
train_h5
,
test_h5
=
self
.
load_h5_files
()
wrapper
=
importlib
.
import_module
(
preproc_lib
)
instance
=
getattr
(
wrapper
,
preproc_lib
)()
instance
.
init
()
lib_head
,
_sep
,
tail
=
preproc_lib
.
rpartition
(
'_'
)
inp
=
getattr
(
wrapper
,
lib_head
+
"_input"
)()
train_data
=
{}
train_label
=
{}
data_mean
=
{}
data_std
=
{}
shape_output
=
self
.
preprocess_data
(
instance
,
inp
,
0
,
train_h5
)
train_len
=
len
(
train_h5
[
self
.
_input_names_
[
0
]])
for
input_name
in
self
.
_input_names_
:
if
type
(
getattr
(
shape_output
,
input_name
+
"_out"
))
==
np
.
ndarray
:
cur_shape
=
(
train_len
,)
+
getattr
(
shape_output
,
input_name
+
"_out"
).
shape
else
:
cur_shape
=
(
train_len
,
1
)
train_data
[
input_name
]
=
mx
.
nd
.
zeros
(
cur_shape
)
for
output_name
in
self
.
_output_names_
:
if
type
(
getattr
(
shape_output
,
output_name
+
"_out"
))
==
nd
.
array
:
cur_shape
=
(
train_len
,)
+
getattr
(
shape_output
,
output_name
+
"_out"
).
shape
else
:
cur_shape
=
(
train_len
,
1
)
train_label
[
output_name
]
=
mx
.
nd
.
zeros
(
cur_shape
)
for
i
in
range
(
train_len
):
output
=
self
.
preprocess_data
(
instance
,
inp
,
i
,
train_h5
)
for
input_name
in
self
.
_input_names_
:
train_data
[
input_name
][
i
]
=
getattr
(
output
,
input_name
+
"_out"
)
for
output_name
in
self
.
_output_names_
:
train_label
[
output_name
][
i
]
=
getattr
(
shape_output
,
output_name
+
"_out"
)
for
input_name
in
self
.
_input_names_
:
data_mean
[
input_name
+
'_'
]
=
nd
.
array
(
train_data
[
input_name
][:].
mean
(
axis
=
0
))
data_std
[
input_name
+
'_'
]
=
nd
.
array
(
train_data
[
input_name
][:].
asnumpy
().
std
(
axis
=
0
)
+
1e-5
)
train_iter
=
mx
.
io
.
NDArrayIter
(
data
=
train_data
,
label
=
train_label
,
batch_size
=
batch_size
)
test_data
=
{}
test_label
=
{}
shape_output
=
self
.
preprocess_data
(
instance
,
inp
,
0
,
test_h5
)
test_len
=
len
(
test_h5
[
self
.
_input_names_
[
0
]])
for
input_name
in
self
.
_input_names_
:
if
type
(
getattr
(
shape_output
,
input_name
+
"_out"
))
==
np
.
ndarray
:
cur_shape
=
(
test_len
,)
+
getattr
(
shape_output
,
input_name
+
"_out"
).
shape
else
:
cur_shape
=
(
test_len
,
1
)
test_data
[
input_name
]
=
mx
.
nd
.
zeros
(
cur_shape
)
for
output_name
in
self
.
_output_names_
:
if
type
(
getattr
(
shape_output
,
output_name
+
"_out"
))
==
nd
.
array
:
cur_shape
=
(
test_len
,)
+
getattr
(
shape_output
,
output_name
+
"_out"
).
shape
else
:
cur_shape
=
(
test_len
,
1
)
test_label
[
output_name
]
=
mx
.
nd
.
zeros
(
cur_shape
)
for
i
in
range
(
test_len
):
output
=
self
.
preprocess_data
(
instance
,
inp
,
i
,
test_h5
)
for
input_name
in
self
.
_input_names_
:
test_data
[
input_name
][
i
]
=
getattr
(
output
,
input_name
+
"_out"
)
for
output_name
in
self
.
_output_names_
:
test_label
[
output_name
][
i
]
=
getattr
(
shape_output
,
output_name
+
"_out"
)
test_iter
=
mx
.
io
.
NDArrayIter
(
data
=
test_data
,
label
=
test_label
,
batch_size
=
batch_size
)
return
train_iter
,
test_iter
,
data_mean
,
data_std
def
preprocess_data
(
self
,
instance_wrapper
,
input_wrapper
,
index
,
data_h5
):
for
input_name
in
self
.
_input_names_
:
data
=
data_h5
[
input_name
][
0
]
attr
=
getattr
(
input_wrapper
,
input_name
)
if
(
type
(
data
))
==
np
.
ndarray
:
data
=
np
.
asfortranarray
(
data
).
astype
(
attr
.
dtype
)
else
:
data
=
type
(
attr
)(
data
)
setattr
(
input_wrapper
,
input_name
,
data
)
for
output_name
in
self
.
_output_names_
:
data
=
data_h5
[
output_name
][
0
]
attr
=
getattr
(
input_wrapper
,
output_name
)
if
(
type
(
data
))
==
np
.
ndarray
:
data
=
np
.
asfortranarray
(
data
).
astype
(
attr
.
dtype
)
else
:
data
=
type
(
attr
)(
data
)
setattr
(
input_wrapper
,
output_name
,
data
)
return
instance_wrapper
.
execute
(
input_wrapper
)
def
load_h5_files
(
self
):
train_h5
=
None
test_h5
=
None
...
...
@@ -73,6 +214,7 @@ class CNNDataLoader_Alexnet:
if
os
.
path
.
isfile
(
train_path
):
train_h5
=
h5py
.
File
(
train_path
,
'r'
)
print
(
train_path
)
for
input_name
in
self
.
_input_names_
:
if
not
input_name
in
train_h5
:
...
...
src/test/resources/target_code/CNNDataLoader_CifarClassifierNetwork.py
View file @
6ff9b987
...
...
@@ -3,6 +3,9 @@ import h5py
import
mxnet
as
mx
import
logging
import
sys
import
numpy
as
np
import
cv2
import
importlib
from
mxnet
import
nd
class
CNNDataLoader_CifarClassifierNetwork
:
...
...
@@ -65,6 +68,144 @@ class CNNDataLoader_CifarClassifierNetwork:
return
train_iter
,
train_test_iter
,
test_iter
,
data_mean
,
data_std
,
train_images
,
test_images
def
load_data
(
self
,
batch_size
,
img_size
):
train_h5
,
test_h5
=
self
.
load_h5_files
()
width
=
img_size
[
0
]
height
=
img_size
[
1
]
comb_data
=
{}
data_mean
=
{}
data_std
=
{}
for
input_name
in
self
.
_input_names_
:
train_data
=
train_h5
[
input_name
][:]
test_data
=
test_h5
[
input_name
][:]
train_shape
=
train_data
.
shape
test_shape
=
test_data
.
shape
comb_data
[
input_name
]
=
mx
.
nd
.
zeros
((
train_shape
[
0
]
+
test_shape
[
0
],
train_shape
[
1
],
width
,
height
))
for
i
,
img
in
enumerate
(
train_data
):
img
=
img
.
transpose
(
1
,
2
,
0
)
comb_data
[
input_name
][
i
]
=
cv2
.
resize
(
img
,
(
width
,
height
)).
reshape
((
train_shape
[
1
],
width
,
height
))
for
i
,
img
in
enumerate
(
test_data
):
img
=
img
.
transpose
(
1
,
2
,
0
)
comb_data
[
input_name
][
i
+
train_shape
[
0
]]
=
cv2
.
resize
(
img
,
(
width
,
height
)).
reshape
((
train_shape
[
1
],
width
,
height
))
data_mean
[
input_name
+
'_'
]
=
nd
.
array
(
comb_data
[
input_name
][:].
mean
(
axis
=
0
))
data_std
[
input_name
+
'_'
]
=
nd
.
array
(
comb_data
[
input_name
][:].
asnumpy
().
std
(
axis
=
0
)
+
1e-5
)
comb_label
=
{}
for
output_name
in
self
.
_output_names_
:
train_labels
=
train_h5
[
output_name
][:]
test_labels
=
test_h5
[
output_name
][:]
comb_label
[
output_name
]
=
np
.
append
(
train_labels
,
test_labels
,
axis
=
0
)
train_iter
=
mx
.
io
.
NDArrayIter
(
data
=
comb_data
,
label
=
comb_label
,
batch_size
=
batch_size
)
test_iter
=
None
return
train_iter
,
test_iter
,
data_mean
,
data_std
def
load_preprocessed_data
(
self
,
batch_size
,
preproc_lib
):
train_h5
,
test_h5
=
self
.
load_h5_files
()
wrapper
=
importlib
.
import_module
(
preproc_lib
)
instance
=
getattr
(
wrapper
,
preproc_lib
)()
instance
.
init
()
lib_head
,
_sep
,
tail
=
preproc_lib
.
rpartition
(
'_'
)
inp
=
getattr
(
wrapper
,
lib_head
+
"_input"
)()
train_data
=
{}
train_label
=
{}
data_mean
=
{}
data_std
=
{}
shape_output
=
self
.
preprocess_data
(
instance
,
inp
,
0
,
train_h5
)
train_len
=
len
(
train_h5
[
self
.
_input_names_
[
0
]])
for
input_name
in
self
.
_input_names_
:
if
type
(
getattr
(
shape_output
,
input_name
+
"_out"
))
==
np
.
ndarray
:
cur_shape
=
(
train_len
,)
+
getattr
(
shape_output
,
input_name
+
"_out"
).
shape
else
:
cur_shape
=
(
train_len
,
1
)
train_data
[
input_name
]
=
mx
.
nd
.
zeros
(
cur_shape
)
for
output_name
in
self
.
_output_names_
:
if
type
(
getattr
(
shape_output
,
output_name
+
"_out"
))
==
nd
.
array
:
cur_shape
=
(
train_len
,)
+
getattr
(
shape_output
,
output_name
+
"_out"
).
shape
else
:
cur_shape
=
(
train_len
,
1
)
train_label
[
output_name
]
=
mx
.
nd
.
zeros
(
cur_shape
)
for
i
in
range
(
train_len
):
output
=
self
.
preprocess_data
(
instance
,
inp
,
i
,
train_h5
)
for
input_name
in
self
.
_input_names_
:
train_data
[
input_name
][
i
]
=
getattr
(
output
,
input_name
+
"_out"
)
for
output_name
in
self
.
_output_names_
:
train_label
[
output_name
][
i
]
=
getattr
(
shape_output
,
output_name
+
"_out"
)
for
input_name
in
self
.
_input_names_
:
data_mean
[
input_name
+
'_'
]
=
nd
.
array
(
train_data
[
input_name
][:].
mean
(
axis
=
0
))
data_std
[
input_name
+
'_'
]
=
nd
.
array
(
train_data
[
input_name
][:].
asnumpy
().
std
(
axis
=
0
)
+
1e-5
)
train_iter
=
mx
.
io
.
NDArrayIter
(
data
=
train_data
,
label
=
train_label
,
batch_size
=
batch_size
)
test_data
=
{}
test_label
=
{}
shape_output
=
self
.
preprocess_data
(
instance
,
inp
,
0
,
test_h5
)
test_len
=
len
(
test_h5
[
self
.
_input_names_
[
0
]])
for
input_name
in
self
.
_input_names_
:
if
type
(
getattr
(
shape_output
,
input_name
+
"_out"
))
==
np
.
ndarray
:
cur_shape
=
(
test_len
,)
+
getattr
(
shape_output
,
input_name
+
"_out"
).
shape
else
:
cur_shape
=
(
test_len
,
1
)
test_data
[
input_name
]
=
mx
.
nd
.
zeros
(
cur_shape
)
for
output_name
in
self
.
_output_names_
:
if
type
(
getattr
(
shape_output
,
output_name
+
"_out"
))
==
nd
.
array
:
cur_shape
=
(
test_len
,)
+
getattr
(
shape_output
,
output_name
+
"_out"
).
shape
else
:
cur_shape
=
(
test_len
,
1
)
test_label
[
output_name
]
=
mx
.
nd
.
zeros
(
cur_shape
)
for
i
in
range
(
test_len
):
output
=
self
.
preprocess_data
(
instance
,
inp
,
i
,
test_h5
)
for
input_name
in
self
.
_input_names_
:
test_data
[
input_name
][
i
]
=
getattr
(
output
,
input_name
+
"_out"
)
for
output_name
in
self
.
_output_names_
:
test_label
[
output_name
][
i
]
=
getattr
(
shape_output
,
output_name
+
"_out"
)
test_iter
=
mx
.
io
.
NDArrayIter
(
data
=
test_data
,
label
=
test_label
,
batch_size
=
batch_size
)
return
train_iter
,
test_iter
,
data_mean
,
data_std
def
preprocess_data
(
self
,
instance_wrapper
,
input_wrapper
,
index
,
data_h5
):
for
input_name
in
self
.
_input_names_
:
data
=
data_h5
[
input_name
][
0
]
attr
=
getattr
(
input_wrapper
,
input_name
)
if
(
type
(
data
))
==
np
.
ndarray
:
data
=
np
.
asfortranarray
(
data
).
astype
(
attr
.
dtype
)
else
:
data
=
type
(
attr
)(
data
)
setattr
(
input_wrapper
,
input_name
,
data
)
for
output_name
in
self
.
_output_names_
:
data
=
data_h5
[
output_name
][
0
]
attr
=
getattr
(
input_wrapper
,
output_name
)
if
(
type
(
data
))
==
np
.
ndarray
:
data
=
np
.
asfortranarray
(
data
).
astype
(
attr
.
dtype
)
else
:
data
=
type
(
attr
)(
data
)
setattr
(
input_wrapper
,
output_name
,
data
)
return
instance_wrapper
.
execute
(
input_wrapper
)
def
load_h5_files
(
self
):
train_h5
=
None
test_h5
=
None
...
...
@@ -73,6 +214,7 @@ class CNNDataLoader_CifarClassifierNetwork:
if
os
.
path
.
isfile
(
train_path
):
train_h5
=
h5py
.
File
(
train_path
,
'r'
)
print
(
train_path
)
for
input_name
in
self
.
_input_names_
:
if
not
input_name
in
train_h5
:
...
...
src/test/resources/target_code/CNNDataLoader_MultipleStreams.py
0 → 100644
View file @
6ff9b987
import
os
import
h5py
import
mxnet
as
mx
import
logging
import
sys
import
numpy
as
np
import
cv2
import
importlib
from
mxnet
import
nd
class
CNNDataLoader_MultipleStreams
:
_input_names_
=
[
'data_0'
,
'data_1'
]
_output_names_
=
[
'pred_0_label'
,
'pred_1_label'
]
def
__init__
(
self
):
self
.
_data_dir
=
"data/MultipleStreams/"
def
load_data
(
self
,
train_batch_size
,
test_batch_size
):
train_h5
,
test_h5
=
self
.
load_h5_files
()
train_data
=
{}
data_mean
=
{}
data_std
=
{}
train_images
=
{}
for
input_name
in
self
.
_input_names_
:
train_data
[
input_name
]
=
train_h5
[
input_name
]
data_mean
[
input_name
+
'_'
]
=
nd
.
array
(
train_h5
[
input_name
][:].
mean
(
axis
=
0
))
data_std
[
input_name
+
'_'
]
=
nd
.
array
(
train_h5
[
input_name
][:].
std
(
axis
=
0
)
+
1e-5
)
if
'images'
in
train_h5
:
train_images
=
train_h5
[
'images'
]
train_label
=
{}
index
=
0
for
output_name
in
self
.
_output_names_
:
train_label
[
index
]
=
train_h5
[
output_name
]
index
+=
1
train_iter
=
mx
.
io
.
NDArrayIter
(
data
=
train_data
,
label
=
train_label
,
batch_size
=
train_batch_size
)
train_test_iter
=
mx
.
io
.
NDArrayIter
(
data
=
train_data
,
label
=
train_label
,
batch_size
=
test_batch_size
)
test_iter
=
None
if
test_h5
!=
None
:
test_data
=
{}
test_images
=
{}
for
input_name
in
self
.
_input_names_
:
test_data
[
input_name
]
=
test_h5
[
input_name
]
if
'images'
in
test_h5
:
test_images
=
test_h5
[
'images'
]
test_label
=
{}
index
=
0
for
output_name
in
self
.
_output_names_
:
test_label
[
index
]
=
test_h5
[
output_name
]
index
+=
1
test_iter
=
mx
.
io
.
NDArrayIter
(
data
=
test_data
,
label
=
test_label
,
batch_size
=
test_batch_size
)
return
train_iter
,
train_test_iter
,
test_iter
,
data_mean
,
data_std
,
train_images
,
test_images
def
load_data
(
self
,
batch_size
,
img_size
):
train_h5
,
test_h5
=
self
.
load_h5_files
()
width
=
img_size
[
0
]
height
=
img_size
[
1
]
comb_data
=
{}
data_mean
=
{}
data_std
=
{}
for
input_name
in
self
.
_input_names_
:
train_data
=
train_h5
[
input_name
][:]
test_data
=
test_h5
[
input_name
][:]
train_shape
=
train_data
.
shape
test_shape
=
test_data
.
shape
comb_data
[
input_name
]
=
mx
.
nd
.
zeros
((
train_shape
[
0
]
+
test_shape
[
0
],
train_shape
[
1
],
width
,
height
))
for
i
,
img
in
enumerate
(
train_data
):
img
=
img
.
transpose
(
1
,
2
,
0
)
comb_data
[
input_name
][
i
]
=
cv2
.
resize
(
img
,
(
width
,
height
)).
reshape
((
train_shape
[
1
],
width
,
height
))
for
i
,
img
in
enumerate
(
test_data
):
img
=
img
.
transpose
(
1
,
2
,
0
)
comb_data
[
input_name
][
i
+
train_shape
[
0
]]
=
cv2
.
resize
(
img
,
(
width
,
height
)).
reshape
((
train_shape
[
1
],
width
,
height
))
data_mean
[
input_name
+
'_'
]
=
nd
.
array
(
comb_data
[
input_name
][:].
mean
(
axis
=
0
))
data_std
[
input_name
+
'_'
]
=
nd
.
array
(
comb_data
[
input_name
][:].
asnumpy
().
std
(
axis
=
0
)
+
1e-5
)
comb_label
=
{}
for
output_name
in
self
.
_output_names_
:
train_labels
=
train_h5
[
output_name
][:]
test_labels
=
test_h5
[
output_name
][:]
comb_label
[
output_name
]
=
np
.
append
(
train_labels
,
test_labels
,
axis
=
0
)
train_iter
=
mx
.
io
.
NDArrayIter
(
data
=
comb_data
,
label
=
comb_label
,
batch_size
=
batch_size
)
test_iter
=
None
return
train_iter
,
test_iter
,
data_mean
,
data_std
def
load_preprocessed_data
(
self
,
batch_size
,
preproc_lib
):
train_h5
,
test_h5
=
self
.
load_h5_files
()
wrapper
=
importlib
.
import_module
(
preproc_lib
)
instance
=
getattr
(
wrapper
,
preproc_lib
)()
instance
.
init
()