Skip to content
Snippets Groups Projects
Commit 24d8c0ef authored by Gonzalo Martin Garcia's avatar Gonzalo Martin Garcia
Browse files

commit to switch branches

parent fb2e6b86
Branches
No related tags found
No related merge requests found
...@@ -155,9 +155,9 @@ def cdm_trainer(model, ...@@ -155,9 +155,9 @@ def cdm_trainer(model,
optimizer_state_dict = checkpoint['optimizer'] optimizer_state_dict = checkpoint['optimizer']
optimizer.load_state_dict(optimizer_state_dict) optimizer.load_state_dict(optimizer_state_dict)
# load learning rate schedule state # load learning rate schedule state
scheduler_state_dict = checkpoint['scheduler'] #scheduler_state_dict = checkpoint['scheduler']
scheduler.load_state_dict(scheduler_state_dict) #scheduler.load_state_dict(scheduler_state_dict)
scheduler.last_epoch = last_epoch #scheduler.last_epoch = last_epoch
# load ema model state # load ema model state
if ema_training: if ema_training:
ema.module.load_state_dict(checkpoint['ema']) ema.module.load_state_dict(checkpoint['ema'])
...@@ -179,8 +179,8 @@ def cdm_trainer(model, ...@@ -179,8 +179,8 @@ def cdm_trainer(model,
with wandb.init(project='Unconditional Landscapes', name=run_name, entity='deep-lab-', id=run_name, resume=True) as run: with wandb.init(project='Unconditional Landscapes', name=run_name, entity='deep-lab-', id=run_name, resume=True) as run:
# Log some info # Log some info
run.config.learning_rate = learning_rate #run.config.learning_rate = learning_rate
#run.config.update({"learning_rate": learning_rate}, allow_val_change=True) run.config.update({"learning_rate": learning_rate}, allow_val_change=True)
run.config.optimizer = optimizer.__class__.__name__ run.config.optimizer = optimizer.__class__.__name__
#run.watch(model.net) #run.watch(model.net)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment