From 24d8c0ef205fce87539c18cd9f9f218c84c9e062 Mon Sep 17 00:00:00 2001
From: gonzalomartingarcia0 <gonzalomartingarcia0@gmail.com>
Date: Fri, 21 Jul 2023 11:54:44 +0200
Subject: [PATCH] commit to switch branches

---
 trainer/train.py | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/trainer/train.py b/trainer/train.py
index ac5dce8..9fe1525 100644
--- a/trainer/train.py
+++ b/trainer/train.py
@@ -155,9 +155,9 @@ def cdm_trainer(model,
             optimizer_state_dict = checkpoint['optimizer']
             optimizer.load_state_dict(optimizer_state_dict)
             # load learning rate schedule state
-            scheduler_state_dict = checkpoint['scheduler']
-            scheduler.load_state_dict(scheduler_state_dict)
-            scheduler.last_epoch = last_epoch
+            #scheduler_state_dict = checkpoint['scheduler']
+            #scheduler.load_state_dict(scheduler_state_dict)
+            #scheduler.last_epoch = last_epoch
             # load ema model state
             if ema_training:
                 ema.module.load_state_dict(checkpoint['ema'])     
@@ -179,8 +179,8 @@ def cdm_trainer(model,
     with wandb.init(project='Unconditional Landscapes', name=run_name, entity='deep-lab-', id=run_name, resume=True) as run: 
         
         # Log some info
-        run.config.learning_rate = learning_rate
-        #run.config.update({"learning_rate": learning_rate}, allow_val_change=True)
+        #run.config.learning_rate = learning_rate
+        run.config.update({"learning_rate": learning_rate}, allow_val_change=True)
         run.config.optimizer = optimizer.__class__.__name__
         #run.watch(model.net)
         
-- 
GitLab