diff --git a/backend/danswer/background/celery/tasks/indexing/tasks.py b/backend/danswer/background/celery/tasks/indexing/tasks.py index ed370c5bcdb0..197856d2d0a3 100644 --- a/backend/danswer/background/celery/tasks/indexing/tasks.py +++ b/backend/danswer/background/celery/tasks/indexing/tasks.py @@ -640,12 +640,16 @@ def connector_indexing_proxy_task( continue if job.status == "error": + exit_code: int | None = None + if job.process: + exit_code = job.process.exitcode task_logger.error( "Indexing watchdog - spawned task exceptioned: " f"attempt={index_attempt_id} " f"tenant={tenant_id} " f"cc_pair={cc_pair_id} " f"search_settings={search_settings_id} " + f"exit_code={exit_code} " f"error={job.exception()}" ) diff --git a/backend/danswer/background/indexing/job_client.py b/backend/danswer/background/indexing/job_client.py index 602ec4294c0b..a31b7b3119e8 100644 --- a/backend/danswer/background/indexing/job_client.py +++ b/backend/danswer/background/indexing/job_client.py @@ -82,7 +82,7 @@ class SimpleJob: return "running" elif self.process.exitcode is None: return "cancelled" - elif self.process.exitcode > 0: + elif self.process.exitcode != 0: return "error" else: return "finished" @@ -123,7 +123,8 @@ class SimpleJobClient: self._cleanup_completed_jobs() if len(self.jobs) >= self.n_workers: logger.debug( - f"No available workers to run job. Currently running '{len(self.jobs)}' jobs, with a limit of '{self.n_workers}'." + f"No available workers to run job. " + f"Currently running '{len(self.jobs)}' jobs, with a limit of '{self.n_workers}'." ) return None