Commit 1cf71de8 authored by lr119628's avatar lr119628
Browse files

[update] running version

parent f78d9a0a
......@@ -544,8 +544,8 @@ class Net_${networkInstruction?index}(gluon.HybridBlock):
with self.name_scope():
#if operations is None:
# operations={'dummy':nn.Dense(units = 10)}
self.data_shape = <#list networkInstruction.body.getAdaLayer().get().outputTypes as type>(${tc.join(type.dimensions, ",")})</#list>
self.classes = prod(list(self.data_shape))
self.data_shape = ${tc.getDefinedOutputDimension()}
self.classes = int(prod(list(self.data_shape)))
if operations is None:
operations={'dummy':nn.Dense(units = 10)}
else:
......@@ -555,16 +555,15 @@ class Net_${networkInstruction?index}(gluon.HybridBlock):
self.candidate_complexities[name] = operation.get_complexity()
self.out = nn.Dense(units=self.classes,activation=None,flatten=False)
def hybrid_forward(self,F,x):
def hybrid_forward(self, F, x):
res_list = []
for name in self.op_names:
res_list.append(self.__getattribute__(name)(x))
if not res_list:
res_list = [F.identity(x)]
res = tuple(res_list)
y = F.concat(*res,dim=1)
y = F.concat(*res, dim=1)
y = self.out(y)
y = F.reshape(y,shape = self.data_shape)
return y
def get_candidate_complexity(self):
......
......@@ -401,6 +401,7 @@ class AdaLoss(Loss):
reg_term = F.sum(((self.lamb * self.c_complexities) + self.beta) * l1)
return F.add(cl, reg_term)
def fitComponent(trainIter: mx.io.NDArrayIter, trainer: mx.gluon.Trainer, epochs: int, component: gluon.HybridBlock,
loss_class: gluon.loss, loss_params: dict) -> None:
"""
......@@ -429,6 +430,7 @@ def get_trainer(optimizer: str, parameters: dict, optimizer_params: dict) -> mx.
trainer = mx.gluon.Trainer(parameters, optimizer, optimizer_params)
return trainer
def fit(loss: gluon.loss.Loss,
optimizer: str,
epochs: int,
......@@ -472,8 +474,7 @@ def fit(loss: gluon.loss.Loss,
# train candidate 0
c0_trainer = get_trainer(optimizer, c0.collect_params(), optimizer_params)
fitComponent(trainIter=train_iter, trainer=c0_trainer, epochs=epochs, component=c0,
loss_class=CandidateTrainingloss, loss_params={'loss': loss, 'candidate': c0, 'logging': logging},
logging=logging)
loss_class=CandidateTrainingloss, loss_params={'loss': loss, 'candidate': c0})
# train candidate 1
c1_trainer = get_trainer(optimizer, c1.collect_params(), optimizer_params)
......@@ -533,8 +534,8 @@ def fit(loss: gluon.loss.Loss,
model_operations[operation.name] = operation
cg.update()
round_msg = 'AdaNet:round: {}/{} finished,'.format(rnd + 1, T)
score_msg = 'current model score:{:.5f} improvement {:.5f}%'.format(model_score.asscalar(),
(1-(model_score / old_score).asscalar())*100)
improvement = (1 - (model_score / old_score).asscalar()) * 100
score_msg = 'current model score:{:.5f} improvement {:.5f}%'.format(model_score.asscalar(), improvement)
logging.info(round_msg + score_msg)
return model
......
......@@ -129,8 +129,7 @@ class CandidateHull(gluon.HybridBlock):
if self.out:
x = self.out(x)
x = self.finalOut(x)
return F.reshape(x,shape = self.model_shape)
return x
class BuildingBlock(gluon.HybridBlock):
"""
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment