Merge pull request #3059 from danswer-ai/bugfix/sentry_indexing

add sentry to spawned indexing task
This commit is contained in:
rkuo-danswer
2024-11-05 16:51:23 -08:00
committed by GitHub

View File

@ -4,6 +4,7 @@ from http import HTTPStatus
from time import sleep from time import sleep
import redis import redis
import sentry_sdk
from celery import Celery from celery import Celery
from celery import shared_task from celery import shared_task
from celery import Task from celery import Task
@ -50,6 +51,7 @@ from danswer.utils.variable_functionality import global_version
from shared_configs.configs import INDEXING_MODEL_SERVER_HOST from shared_configs.configs import INDEXING_MODEL_SERVER_HOST
from shared_configs.configs import INDEXING_MODEL_SERVER_PORT from shared_configs.configs import INDEXING_MODEL_SERVER_PORT
from shared_configs.configs import MULTI_TENANT from shared_configs.configs import MULTI_TENANT
from shared_configs.configs import SENTRY_DSN
logger = setup_logger() logger = setup_logger()
@ -484,6 +486,18 @@ def connector_indexing_task(
that the task transitioned to a "READY" state but the generator_complete_key doesn't exist. that the task transitioned to a "READY" state but the generator_complete_key doesn't exist.
This will cause the primary worker to abort the indexing attempt and clean up. This will cause the primary worker to abort the indexing attempt and clean up.
""" """
# Since connector_indexing_proxy_task spawns a new process using this function as
# the entrypoint, we init Sentry here.
if SENTRY_DSN:
sentry_sdk.init(
dsn=SENTRY_DSN,
traces_sample_rate=0.1,
)
logger.info("Sentry initialized")
else:
logger.debug("Sentry DSN not provided, skipping Sentry initialization")
logger.info( logger.info(
f"Indexing spawned task starting: attempt={index_attempt_id} " f"Indexing spawned task starting: attempt={index_attempt_id} "
f"tenant={tenant_id} " f"tenant={tenant_id} "