remove more logs for clarity

This commit is contained in:
pablodanswer 2024-09-26 16:58:26 -07:00
parent 1f12b074df
commit 516f1840ce
5 changed files with 1 additions and 15 deletions

View File

@ -208,7 +208,6 @@ def verify_sso_token(token: str) -> dict:
)
return payload
except jwt.PyJWTError:
print("error decodi")
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token"
)
@ -221,7 +220,6 @@ async def get_or_create_user(email: str, user_id: str) -> User:
async with get_async_session_context() as session:
async with get_user_db_context(session) as user_db:
existing_user = await user_db.get_by_email(email)
print(email)
if existing_user:
return existing_user
@ -236,7 +234,6 @@ async def get_or_create_user(email: str, user_id: str) -> User:
"is_active": True,
"is_superuser": False,
"is_verified": True,
# "tenant_id": uuid.UUID(tenant_id),
}
created_user: User = await user_db.create(new_user)

View File

@ -311,9 +311,6 @@ def stream_chat_message_objects(
)
try:
print("LLM OVVERIDE ")
print(new_msg_req.llm_override)
print(chat_session.llm_override)
llm, fast_llm = get_llms_for_persona(
persona=persona,
db_session=db_session,

View File

@ -130,7 +130,6 @@ def instantiate_connector(
credential: Credential,
db_session: Session,
) -> BaseConnector:
print(f"connector_specific_config: {connector_specific_config}")
connector_class = identify_connector_class(source, input_type)
connector = connector_class(**connector_specific_config)
new_credentials = connector.load_credentials(credential.credential_json)

View File

@ -171,8 +171,6 @@ class MultiTenantVespaIndex(DocumentIndex):
schema = add_ngrams_to_schema(schema) if needs_reindexing else schema
zip_dict[f"schemas/{index_name}.sd"] = schema.encode("utf-8")
print("appenidng hte sceam")
print(zip_dict.keys())
zip_file = in_memory_zip_from_file_bytes(zip_dict)
headers = {"Content-Type": "application/zip"}

View File

@ -32,22 +32,17 @@ def get_llms_for_persona(
temperature_override = llm_override.temperature if llm_override else None
provider_name = model_provider_override or persona.llm_model_provider_override
print("PROVIDER NAME IS", provider_name)
print("model provider override is", model_provider_override)
print("persona llm model provider override is", persona.llm_model_provider_override)
if not provider_name:
print("GETTING DEFAULT LLMS")
return get_default_llms(
temperature=temperature_override or GEN_AI_TEMPERATURE,
additional_headers=additional_headers,
db_session=db_session,
)
print("PROVIDER NAME IS", provider_name)
llm_provider = fetch_provider(db_session, provider_name)
if not llm_provider:
print("NO LLM PROVIDER FOUND")
raise ValueError("No LLM provider found")
model = model_version_override or persona.llm_model_version_override