diff --git a/scheduler/scheduler/tasks.py b/scheduler/scheduler/tasks.py
index 9a3e1d0f2336a0388627ea0c827183d3dbb5e825..20351a0bda08df4cff9e38591b97911f1edc848c 100644
--- a/scheduler/scheduler/tasks.py
+++ b/scheduler/scheduler/tasks.py
@@ -21,20 +21,14 @@ def append_job_log(result_token, job_id, job_output, succeeded=True):
         json={"result_token": result_token, "log": job_output, "succeeded": succeeded},
     )
     if not response.ok:
-        print(response.status_code)
-        print("Failed to send job log to backend.")
+        print("Failed to send job log to backend - returned status code " + str(response.status_code))
 
 
 def run_results_retention(analytics_token):
     url = f"{RIGHTS_ENGINE_BACKEND}/api/v1/provider/results-retention"
-    print(url)
-    try:
-        response = requests.post(url, headers={"Authorization": f"Basic {analytics_token}"})
-        if not response.ok:
-            print(response.status_code)
-            print("Failed execute results retention")
-    except Exception as e:
-        print(e)
+    response = requests.post(url, headers={"Authorization": f"Basic {analytics_token}"})
+    if not response.ok:
+        raise RuntimeError("Failed to execute results retention, rights engine request returned status code " + str(response.status_code))
 
 
 @shared_task(name="celery_tasks.run_analysis")
@@ -42,25 +36,31 @@ def run_analysis(repo_url, job_id, result_token, analytics_token):
     directory = f"job_{job_id}_repo"
     try:
         git.Repo.clone_from(repo_url, directory)
+    except Exception as e:
+        message = f"Repo {repo_url} inaccessible ({e})."
+        append_job_log(result_token, job_id, message, False)
+        if os.path.exists(directory):
+            shutil.rmtree(directory)
+        return f"Job with id {job_id} failed - {message}"
 
-        if not Path(f"{directory}/main.py").is_file():
-            append_job_log(result_token, job_id, "main.py doesn't exist", False)
-            return f"Job with id {job_id} failed. main.py doesn't exist"
+    if not Path(f"{directory}/main.py").is_file():
+        append_job_log(result_token, job_id, "main.py doesn't exist", False)
+        return f"Job with id {job_id} failed. main.py doesn't exist"
 
-        cmd = f"python3 {directory}/main.py {analytics_token} {RIGHTS_ENGINE_BACKEND}"
-        p = Popen(
-            cmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True
-        )
-        stdout, stderr = p.communicate()
-        print("Job log: ", stdout.decode("utf-8"))
-        append_job_log(result_token, job_id, stdout.decode("utf-8"))
-        shutil.rmtree(directory)
+    cmd = f"python3 {directory}/main.py {analytics_token} {RIGHTS_ENGINE_BACKEND}"
+    p = Popen(
+        cmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True
+    )
+    stdout, stderr = p.communicate()
+    print("Job log: ", stdout.decode("utf-8"))
+    append_job_log(result_token, job_id, stdout.decode("utf-8"))
+    shutil.rmtree(directory)
 
+    try:
         run_results_retention(analytics_token)
-        return f"Job with id {job_id} succeeded."
     except Exception as e:
-        append_job_log(result_token, job_id, "Cloning repository failed.", False)
-        if os.path.exists(directory):
-            shutil.rmtree(directory)
-        print(e)
-        return f"Job with id {job_id} failed - Repo {repo_url} inaccessable."
+        message = f"failed while running results retention: " + str(e)
+        append_job_log(result_token, job_id, message)
+        return f"Job with id {job_id} {message}"
+    return f"Job with id {job_id} succeeded."
+