CNNCreator_VGG16.py 24.9 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22
import mxnet as mx
import logging
import os
import errno
import shutil
import h5py
import sys
import numpy as np

@mx.init.register
class MyConstant(mx.init.Initializer):
    def __init__(self, value):
        super(MyConstant, self).__init__(value=value)
        self.value = value
    def _init_weight(self, _, arr):
        arr[:] = mx.nd.array(self.value)

class CNNCreator_VGG16:

    module = None
    _data_dir_ = "data/VGG16/"
    _model_dir_ = "model/VGG16/"
nilsfreyer's avatar
nilsfreyer committed
23
    _model_prefix_ = "model"
24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54
    _input_names_ = ['data']
    _input_shapes_ = [(3,224,224)]
    _output_names_ = ['predictions_label']


    def load(self, context):
        lastEpoch = 0
        param_file = None

        try:
            os.remove(self._model_dir_ + self._model_prefix_ + "_newest-0000.params")
        except OSError:
            pass
        try:
            os.remove(self._model_dir_ + self._model_prefix_ + "_newest-symbol.json")
        except OSError:
            pass

        if os.path.isdir(self._model_dir_):
            for file in os.listdir(self._model_dir_):
                if ".params" in file and self._model_prefix_ in file:
                    epochStr = file.replace(".params","").replace(self._model_prefix_ + "-","")
                    epoch = int(epochStr)
                    if epoch > lastEpoch:
                        lastEpoch = epoch
                        param_file = file
        if param_file is None:
            return 0
        else:
            logging.info("Loading checkpoint: " + param_file)
            self.module.load(prefix=self._model_dir_ + self._model_prefix_,
eyuhar's avatar
eyuhar committed
55 56 57 58
                             epoch=lastEpoch,
                             data_names=self._input_names_,
                             label_names=self._output_names_,
                             context=context)
59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106
            return lastEpoch


    def load_data(self, batch_size):
        train_h5, test_h5 = self.load_h5_files()

        data_mean = train_h5[self._input_names_[0]][:].mean(axis=0)
        data_std = train_h5[self._input_names_[0]][:].std(axis=0) + 1e-5

        train_iter = mx.io.NDArrayIter(train_h5[self._input_names_[0]],
                                       train_h5[self._output_names_[0]],
                                       batch_size=batch_size,
                                       data_name=self._input_names_[0],
                                       label_name=self._output_names_[0])
        test_iter = None
        if test_h5 != None:
            test_iter = mx.io.NDArrayIter(test_h5[self._input_names_[0]],
                                          test_h5[self._output_names_[0]],
                                          batch_size=batch_size,
                                          data_name=self._input_names_[0],
                                          label_name=self._output_names_[0])
        return train_iter, test_iter, data_mean, data_std

    def load_h5_files(self):
        train_h5 = None
        test_h5 = None
        train_path = self._data_dir_ + "train.h5"
        test_path = self._data_dir_ + "test.h5"
        if os.path.isfile(train_path):
            train_h5 = h5py.File(train_path, 'r')
            if not (self._input_names_[0] in train_h5 and self._output_names_[0] in train_h5):
                logging.error("The HDF5 file '" + os.path.abspath(train_path) + "' has to contain the datasets: "
                              + "'" + self._input_names_[0] + "', '" + self._output_names_[0] + "'")
                sys.exit(1)
            test_iter = None
            if os.path.isfile(test_path):
                test_h5 = h5py.File(test_path, 'r')
                if not (self._input_names_[0] in test_h5 and self._output_names_[0] in test_h5):
                    logging.error("The HDF5 file '" + os.path.abspath(test_path) + "' has to contain the datasets: "
                                  + "'" + self._input_names_[0] + "', '" + self._output_names_[0] + "'")
                    sys.exit(1)
            else:
                logging.warning("Couldn't load test set. File '" + os.path.abspath(test_path) + "' does not exist.")
            return train_h5, test_h5
        else:
            logging.error("Data loading failure. File '" + os.path.abspath(train_path) + "' does not exist.")
            sys.exit(1)

eyuhar's avatar
eyuhar committed
107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174
    def loss_function(self, loss, params):
        label = mx.symbol.var(name=self._output_names_[0], )
        prediction = self.module.symbol.get_children()[0]

        margin = params['margin'] if 'margin' in params else 1.0
        sparseLabel = params['sparse_label'] if 'sparse_label' in params else True

        if loss == 'softmax_cross_entropy':
            fromLogits = params['from_logits'] if 'from_logits' in params else False
            if not fromLogits:
                prediction = mx.symbol.log_softmax(data=prediction, axis=1)
            if sparseLabel:
                loss_func = mx.symbol.mean(-mx.symbol.pick(prediction, label, axis=-1, keepdims=True), axis=0, exclude=True)
            else:
                label = mx.symbol.reshape_like(label, prediction)
                loss_func = mx.symbol.mean(-mx.symbol.sum(prediction * label, axis=-1, keepdims=True), axis=0, exclude=True)
            loss_func = mx.symbol.MakeLoss(loss_func, name="softmax_cross_entropy")
        elif loss == 'cross_entropy':
            prediction = mx.symbol.log(prediction)
            if sparseLabel:
                loss_func = mx.symbol.mean(-mx.symbol.pick(prediction, label, axis=-1, keepdims=True), axis=0, exclude=True)
            else:
                label = mx.symbol.reshape_like(label, prediction)
                loss_func = mx.symbol.mean(-mx.symbol.sum(prediction * label, axis=-1, keepdims=True), axis=0, exclude=True)
            loss_func = mx.symbol.MakeLoss(loss_func, name="cross_entropy")
        elif loss == 'sigmoid_binary_cross_entropy':
            loss_func = mx.symbol.LogisticRegressionOutput(data=prediction, name=self.module.symbol.name)
        elif loss == 'l1':
            loss_func = mx.symbol.MAERegressionOutput(data=prediction, name=self.module.symbol.name)
        elif loss == 'l2':
            label = mx.symbol.reshape_like(label, prediction)
            loss_func = mx.symbol.mean(mx.symbol.square((label - prediction) / 2), axis=0, exclude=True)
            loss_func = mx.symbol.MakeLoss(loss_func, name="L2")
        elif loss == 'huber':
            rho = params['rho'] if 'rho' in params else 1
            label = mx.symbol.reshape_like(label, prediction)
            loss_func = mx.symbol.abs(label - prediction)
            loss_func = mx.symbol.where(loss_func > rho, loss_func - 0.5 * rho, (0.5 / rho) * mx.symbol.square(loss_func))
            loss_func = mx.symbol.mean(loss_func, axis=0, exclude=True)
            loss_func = mx.symbol.MakeLoss(loss_func, name="huber")
        elif loss == 'hinge':
            label = mx.symbol.reshape_like(label, prediction)
            loss_func = mx.symbol.mean(mx.symbol.relu(margin - prediction * label), axis=0, exclude=True)
            loss_func = mx.symbol.MakeLoss(loss_func, name="hinge")
        elif loss == 'squared_hinge':
            label = mx.symbol.reshape_like(label, prediction)
            loss_func = mx.symbol.mean(mx.symbol.square(mx.symbol.relu(margin - prediction * label)), axis=0, exclude=True)
            loss_func = mx.symbol.MakeLoss(loss_func, name="squared_hinge")
        elif loss == 'logistic':
            labelFormat = params['label_format'] if 'label_format' in params else 'signed'
            if labelFormat not in ["binary", "signed"]:
                logging.error("label_format can only be signed or binary")
            label = mx.symbol.reshape_like(label, prediction)
            if labelFormat == 'signed':
                label = (label + 1.0)/2.0
            loss_func = mx.symbol.relu(prediction) - prediction * label
            loss_func = loss_func + mx.symbol.Activation(-mx.symbol.abs(prediction), act_type="softrelu")
            loss_func = mx.symbol.MakeLoss(mx.symbol.mean(loss_func, 0, exclude=True), name="logistic")
        elif loss == 'kullback_leibler':
            fromLogits = params['from_logits'] if 'from_logits' in params else True
            if not fromLogits:
                prediction = mx.symbol.log_softmax(prediction, axis=1)
            loss_func = mx.symbol.mean(label * (mx.symbol.log(label) - prediction), axis=0, exclude=True)
            loss_func = mx.symbol.MakeLoss(loss_func, name="kullback_leibler")
        else:
            logging.error("Invalid loss parameter.")

        return loss_func
175

176
    def train(self, batch_size=64,
177
              num_epoch=10,
Svetlana Pavlitskaya's avatar
Svetlana Pavlitskaya committed
178
              eval_metric='acc',
eyuhar's avatar
eyuhar committed
179 180
              loss ='softmax_cross_entropy',
              loss_params={},
181 182 183
              optimizer='adam',
              optimizer_params=(('learning_rate', 0.001),),
              load_checkpoint=True,
184
              context='gpu',
185 186
              checkpoint_period=5,
              normalize=True):
187 188 189 190 191 192
        if context == 'gpu':
            mx_context = mx.gpu()
        elif context == 'cpu':
            mx_context = mx.cpu()
        else:
            logging.error("Context argument is '" + context + "'. Only 'cpu' and 'gpu are valid arguments'.")
193 194 195 196 197 198 199 200 201 202

        if 'weight_decay' in optimizer_params:
            optimizer_params['wd'] = optimizer_params['weight_decay']
            del optimizer_params['weight_decay']
        if 'learning_rate_decay' in optimizer_params:
            min_learning_rate = 1e-08
            if 'learning_rate_minimum' in optimizer_params:
                min_learning_rate = optimizer_params['learning_rate_minimum']
                del optimizer_params['learning_rate_minimum']
            optimizer_params['lr_scheduler'] = mx.lr_scheduler.FactorScheduler(
eyuhar's avatar
eyuhar committed
203 204 205
                optimizer_params['step_size'],
                factor=optimizer_params['learning_rate_decay'],
                stop_factor_lr=min_learning_rate)
206 207 208 209 210 211
            del optimizer_params['step_size']
            del optimizer_params['learning_rate_decay']

        train_iter, test_iter, data_mean, data_std = self.load_data(batch_size)
        if self.module == None:
            if normalize:
212
                self.construct(mx_context, data_mean, data_std)
213
            else:
214
                self.construct(mx_context)
215

eyuhar's avatar
eyuhar committed
216 217 218 219 220 221 222 223
        loss_func = self.loss_function(loss=loss, params=loss_params)

        self.module = mx.mod.Module(
            symbol=mx.symbol.Group([loss_func, mx.symbol.BlockGrad(self.module.symbol.get_children()[0], name="pred")]),
            data_names=self._input_names_,
            label_names=self._output_names_,
            context=mx_context)

224 225
        begin_epoch = 0
        if load_checkpoint:
226
            begin_epoch = self.load(mx_context)
227 228 229 230 231 232 233 234 235 236
        else:
            if os.path.isdir(self._model_dir_):
                shutil.rmtree(self._model_dir_)

        try:
            os.makedirs(self._model_dir_)
        except OSError:
            if not os.path.isdir(self._model_dir_):
                raise

eyuhar's avatar
eyuhar committed
237 238
        metric = mx.metric.create(eval_metric, output_names=['pred_output'])

239 240
        self.module.fit(
            train_data=train_iter,
eyuhar's avatar
eyuhar committed
241
            eval_metric=metric,
242 243 244 245 246 247 248 249 250 251 252 253 254
            eval_data=test_iter,
            optimizer=optimizer,
            optimizer_params=optimizer_params,
            batch_end_callback=mx.callback.Speedometer(batch_size),
            epoch_end_callback=mx.callback.do_checkpoint(prefix=self._model_dir_ + self._model_prefix_, period=checkpoint_period),
            begin_epoch=begin_epoch,
            num_epoch=num_epoch + begin_epoch)
        self.module.save_checkpoint(self._model_dir_ + self._model_prefix_, num_epoch + begin_epoch)
        self.module.save_checkpoint(self._model_dir_ + self._model_prefix_ + '_newest', 0)


    def construct(self, context, data_mean=None, data_std=None):
        data = mx.sym.var("data",
eyuhar's avatar
eyuhar committed
255
                          shape=(0,3,224,224))
256 257 258 259 260 261 262 263 264 265 266
        # data, output shape: {[3,224,224]}

        if not data_mean is None:
            assert(not data_std is None)
            _data_mean_ = mx.sym.Variable("_data_mean_", shape=(3,224,224), init=MyConstant(value=data_mean.tolist()))
            _data_mean_ = mx.sym.BlockGrad(_data_mean_)
            _data_std_ = mx.sym.Variable("_data_std_", shape=(3,224,224), init=MyConstant(value=data_mean.tolist()))
            _data_std_ = mx.sym.BlockGrad(_data_std_)
            data = mx.symbol.broadcast_sub(data, _data_mean_)
            data = mx.symbol.broadcast_div(data, _data_std_)
        conv1_ = mx.symbol.pad(data=data,
eyuhar's avatar
eyuhar committed
267 268 269
                               mode='constant',
                               pad_width=(0,0,0,0,1,1,1,1),
                               constant_value=0)
270
        conv1_ = mx.symbol.Convolution(data=conv1_,
eyuhar's avatar
eyuhar committed
271 272 273 274 275
                                       kernel=(3,3),
                                       stride=(1,1),
                                       num_filter=64,
                                       no_bias=False,
                                       name="conv1_")
276 277 278
        # conv1_, output shape: {[64,224,224]}

        relu1_ = mx.symbol.Activation(data=conv1_,
eyuhar's avatar
eyuhar committed
279 280
                                      act_type='relu',
                                      name="relu1_")
281 282

        conv2_ = mx.symbol.pad(data=relu1_,
eyuhar's avatar
eyuhar committed
283 284 285
                               mode='constant',
                               pad_width=(0,0,0,0,1,1,1,1),
                               constant_value=0)
286
        conv2_ = mx.symbol.Convolution(data=conv2_,
eyuhar's avatar
eyuhar committed
287 288 289 290 291
                                       kernel=(3,3),
                                       stride=(1,1),
                                       num_filter=64,
                                       no_bias=False,
                                       name="conv2_")
292 293 294
        # conv2_, output shape: {[64,224,224]}

        relu2_ = mx.symbol.Activation(data=conv2_,
eyuhar's avatar
eyuhar committed
295 296
                                      act_type='relu',
                                      name="relu2_")
297 298

        pool2_ = mx.symbol.Pooling(data=relu2_,
eyuhar's avatar
eyuhar committed
299 300 301 302
                                   kernel=(2,2),
                                   pool_type="max",
                                   stride=(2,2),
                                   name="pool2_")
303 304 305
        # pool2_, output shape: {[64,112,112]}

        conv3_ = mx.symbol.pad(data=pool2_,
eyuhar's avatar
eyuhar committed
306 307 308
                               mode='constant',
                               pad_width=(0,0,0,0,1,1,1,1),
                               constant_value=0)
309
        conv3_ = mx.symbol.Convolution(data=conv3_,
eyuhar's avatar
eyuhar committed
310 311 312 313 314
                                       kernel=(3,3),
                                       stride=(1,1),
                                       num_filter=128,
                                       no_bias=False,
                                       name="conv3_")
315 316 317
        # conv3_, output shape: {[128,112,112]}

        relu3_ = mx.symbol.Activation(data=conv3_,
eyuhar's avatar
eyuhar committed
318 319
                                      act_type='relu',
                                      name="relu3_")
320 321

        conv4_ = mx.symbol.pad(data=relu3_,
eyuhar's avatar
eyuhar committed
322 323 324
                               mode='constant',
                               pad_width=(0,0,0,0,1,1,1,1),
                               constant_value=0)
325
        conv4_ = mx.symbol.Convolution(data=conv4_,
eyuhar's avatar
eyuhar committed
326 327 328 329 330
                                       kernel=(3,3),
                                       stride=(1,1),
                                       num_filter=128,
                                       no_bias=False,
                                       name="conv4_")
331 332 333
        # conv4_, output shape: {[128,112,112]}

        relu4_ = mx.symbol.Activation(data=conv4_,
eyuhar's avatar
eyuhar committed
334 335
                                      act_type='relu',
                                      name="relu4_")
336 337

        pool4_ = mx.symbol.Pooling(data=relu4_,
eyuhar's avatar
eyuhar committed
338 339 340 341
                                   kernel=(2,2),
                                   pool_type="max",
                                   stride=(2,2),
                                   name="pool4_")
342 343 344
        # pool4_, output shape: {[128,56,56]}

        conv5_ = mx.symbol.pad(data=pool4_,
eyuhar's avatar
eyuhar committed
345 346 347
                               mode='constant',
                               pad_width=(0,0,0,0,1,1,1,1),
                               constant_value=0)
348
        conv5_ = mx.symbol.Convolution(data=conv5_,
eyuhar's avatar
eyuhar committed
349 350 351 352 353
                                       kernel=(3,3),
                                       stride=(1,1),
                                       num_filter=256,
                                       no_bias=False,
                                       name="conv5_")
354 355 356
        # conv5_, output shape: {[256,56,56]}

        relu5_ = mx.symbol.Activation(data=conv5_,
eyuhar's avatar
eyuhar committed
357 358
                                      act_type='relu',
                                      name="relu5_")
359 360

        conv6_ = mx.symbol.pad(data=relu5_,
eyuhar's avatar
eyuhar committed
361 362 363
                               mode='constant',
                               pad_width=(0,0,0,0,1,1,1,1),
                               constant_value=0)
364
        conv6_ = mx.symbol.Convolution(data=conv6_,
eyuhar's avatar
eyuhar committed
365 366 367 368 369
                                       kernel=(3,3),
                                       stride=(1,1),
                                       num_filter=256,
                                       no_bias=False,
                                       name="conv6_")
370 371 372
        # conv6_, output shape: {[256,56,56]}

        relu6_ = mx.symbol.Activation(data=conv6_,
eyuhar's avatar
eyuhar committed
373 374
                                      act_type='relu',
                                      name="relu6_")
375 376

        conv7_ = mx.symbol.pad(data=relu6_,
eyuhar's avatar
eyuhar committed
377 378 379
                               mode='constant',
                               pad_width=(0,0,0,0,1,1,1,1),
                               constant_value=0)
380
        conv7_ = mx.symbol.Convolution(data=conv7_,
eyuhar's avatar
eyuhar committed
381 382 383 384 385
                                       kernel=(3,3),
                                       stride=(1,1),
                                       num_filter=256,
                                       no_bias=False,
                                       name="conv7_")
386 387 388
        # conv7_, output shape: {[256,56,56]}

        relu7_ = mx.symbol.Activation(data=conv7_,
eyuhar's avatar
eyuhar committed
389 390
                                      act_type='relu',
                                      name="relu7_")
391 392

        pool7_ = mx.symbol.Pooling(data=relu7_,
eyuhar's avatar
eyuhar committed
393 394 395 396
                                   kernel=(2,2),
                                   pool_type="max",
                                   stride=(2,2),
                                   name="pool7_")
397 398 399
        # pool7_, output shape: {[256,28,28]}

        conv8_ = mx.symbol.pad(data=pool7_,
eyuhar's avatar
eyuhar committed
400 401 402
                               mode='constant',
                               pad_width=(0,0,0,0,1,1,1,1),
                               constant_value=0)
403
        conv8_ = mx.symbol.Convolution(data=conv8_,
eyuhar's avatar
eyuhar committed
404 405 406 407 408
                                       kernel=(3,3),
                                       stride=(1,1),
                                       num_filter=512,
                                       no_bias=False,
                                       name="conv8_")
409 410 411
        # conv8_, output shape: {[512,28,28]}

        relu8_ = mx.symbol.Activation(data=conv8_,
eyuhar's avatar
eyuhar committed
412 413
                                      act_type='relu',
                                      name="relu8_")
414 415

        conv9_ = mx.symbol.pad(data=relu8_,
eyuhar's avatar
eyuhar committed
416 417 418
                               mode='constant',
                               pad_width=(0,0,0,0,1,1,1,1),
                               constant_value=0)
419
        conv9_ = mx.symbol.Convolution(data=conv9_,
eyuhar's avatar
eyuhar committed
420 421 422 423 424
                                       kernel=(3,3),
                                       stride=(1,1),
                                       num_filter=512,
                                       no_bias=False,
                                       name="conv9_")
425 426 427
        # conv9_, output shape: {[512,28,28]}

        relu9_ = mx.symbol.Activation(data=conv9_,
eyuhar's avatar
eyuhar committed
428 429
                                      act_type='relu',
                                      name="relu9_")
430 431

        conv10_ = mx.symbol.pad(data=relu9_,
eyuhar's avatar
eyuhar committed
432 433 434
                                mode='constant',
                                pad_width=(0,0,0,0,1,1,1,1),
                                constant_value=0)
435
        conv10_ = mx.symbol.Convolution(data=conv10_,
eyuhar's avatar
eyuhar committed
436 437 438 439 440
                                        kernel=(3,3),
                                        stride=(1,1),
                                        num_filter=512,
                                        no_bias=False,
                                        name="conv10_")
441 442 443
        # conv10_, output shape: {[512,28,28]}

        relu10_ = mx.symbol.Activation(data=conv10_,
eyuhar's avatar
eyuhar committed
444 445
                                       act_type='relu',
                                       name="relu10_")
446 447

        pool10_ = mx.symbol.Pooling(data=relu10_,
eyuhar's avatar
eyuhar committed
448 449 450 451
                                    kernel=(2,2),
                                    pool_type="max",
                                    stride=(2,2),
                                    name="pool10_")
452 453 454
        # pool10_, output shape: {[512,14,14]}

        conv11_ = mx.symbol.pad(data=pool10_,
eyuhar's avatar
eyuhar committed
455 456 457
                                mode='constant',
                                pad_width=(0,0,0,0,1,1,1,1),
                                constant_value=0)
458
        conv11_ = mx.symbol.Convolution(data=conv11_,
eyuhar's avatar
eyuhar committed
459 460 461 462 463
                                        kernel=(3,3),
                                        stride=(1,1),
                                        num_filter=512,
                                        no_bias=False,
                                        name="conv11_")
464 465 466
        # conv11_, output shape: {[512,14,14]}

        relu11_ = mx.symbol.Activation(data=conv11_,
eyuhar's avatar
eyuhar committed
467 468
                                       act_type='relu',
                                       name="relu11_")
469 470

        conv12_ = mx.symbol.pad(data=relu11_,
eyuhar's avatar
eyuhar committed
471 472 473
                                mode='constant',
                                pad_width=(0,0,0,0,1,1,1,1),
                                constant_value=0)
474
        conv12_ = mx.symbol.Convolution(data=conv12_,
eyuhar's avatar
eyuhar committed
475 476 477 478 479
                                        kernel=(3,3),
                                        stride=(1,1),
                                        num_filter=512,
                                        no_bias=False,
                                        name="conv12_")
480 481 482
        # conv12_, output shape: {[512,14,14]}

        relu12_ = mx.symbol.Activation(data=conv12_,
eyuhar's avatar
eyuhar committed
483 484
                                       act_type='relu',
                                       name="relu12_")
485 486

        conv13_ = mx.symbol.pad(data=relu12_,
eyuhar's avatar
eyuhar committed
487 488 489
                                mode='constant',
                                pad_width=(0,0,0,0,1,1,1,1),
                                constant_value=0)
490
        conv13_ = mx.symbol.Convolution(data=conv13_,
eyuhar's avatar
eyuhar committed
491 492 493 494 495
                                        kernel=(3,3),
                                        stride=(1,1),
                                        num_filter=512,
                                        no_bias=False,
                                        name="conv13_")
496 497 498
        # conv13_, output shape: {[512,14,14]}

        relu13_ = mx.symbol.Activation(data=conv13_,
eyuhar's avatar
eyuhar committed
499 500
                                       act_type='relu',
                                       name="relu13_")
501 502

        pool13_ = mx.symbol.Pooling(data=relu13_,
eyuhar's avatar
eyuhar committed
503 504 505 506
                                    kernel=(2,2),
                                    pool_type="max",
                                    stride=(2,2),
                                    name="pool13_")
507 508 509 510
        # pool13_, output shape: {[512,7,7]}

        fc13_ = mx.symbol.flatten(data=pool13_)
        fc13_ = mx.symbol.FullyConnected(data=fc13_,
eyuhar's avatar
eyuhar committed
511 512 513
                                         num_hidden=4096,
                                         no_bias=False,
                                         name="fc13_")
514
        relu14_ = mx.symbol.Activation(data=fc13_,
eyuhar's avatar
eyuhar committed
515 516
                                       act_type='relu',
                                       name="relu14_")
517 518

        dropout14_ = mx.symbol.Dropout(data=relu14_,
eyuhar's avatar
eyuhar committed
519 520
                                       p=0.5,
                                       name="dropout14_")
521
        fc14_ = mx.symbol.FullyConnected(data=dropout14_,
eyuhar's avatar
eyuhar committed
522 523 524
                                         num_hidden=4096,
                                         no_bias=False,
                                         name="fc14_")
525
        relu15_ = mx.symbol.Activation(data=fc14_,
eyuhar's avatar
eyuhar committed
526 527
                                       act_type='relu',
                                       name="relu15_")
528 529

        dropout15_ = mx.symbol.Dropout(data=relu15_,
eyuhar's avatar
eyuhar committed
530 531
                                       p=0.5,
                                       name="dropout15_")
532
        fc15_ = mx.symbol.FullyConnected(data=dropout15_,
eyuhar's avatar
eyuhar committed
533 534 535 536 537 538 539 540
                                         num_hidden=1000,
                                         no_bias=False,
                                         name="fc15_")
        softmax15_ = mx.symbol.softmax(data=fc15_,
                                       axis=1,
                                       name="softmax15_")
        predictions = mx.symbol.SoftmaxOutput(data=softmax15_,
                                              name="predictions")
541 542

        self.module = mx.mod.Module(symbol=mx.symbol.Group([predictions]),
eyuhar's avatar
eyuhar committed
543 544 545
                                    data_names=self._input_names_,
                                    label_names=self._output_names_,
                                    context=context)