mirror of
https://github.com/open-webui/open-webui.git
synced 2025-03-17 21:32:42 +01:00
refactor: replace print statements with logging for better error tracking
This commit is contained in:
parent
6fedd72e39
commit
0e5d5ecb81
@ -44,7 +44,7 @@ logging.getLogger("uvicorn.access").addFilter(EndpointFilter())
|
||||
|
||||
# Function to run the alembic migrations
|
||||
def run_migrations():
|
||||
print("Running migrations")
|
||||
log.info("Running migrations")
|
||||
try:
|
||||
from alembic import command
|
||||
from alembic.config import Config
|
||||
@ -57,7 +57,7 @@ def run_migrations():
|
||||
|
||||
command.upgrade(alembic_cfg, "head")
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
log.exception(f"Error running migrations: {e}")
|
||||
|
||||
|
||||
run_migrations()
|
||||
@ -1094,7 +1094,7 @@ try:
|
||||
banners = json.loads(os.environ.get("WEBUI_BANNERS", "[]"))
|
||||
banners = [BannerModel(**banner) for banner in banners]
|
||||
except Exception as e:
|
||||
print(f"Error loading WEBUI_BANNERS: {e}")
|
||||
log.exception(f"Error loading WEBUI_BANNERS: {e}")
|
||||
banners = []
|
||||
|
||||
WEBUI_BANNERS = PersistentConfig("WEBUI_BANNERS", "ui.banners", banners)
|
||||
|
@ -1,3 +1,4 @@
|
||||
import logging
|
||||
import json
|
||||
import time
|
||||
import uuid
|
||||
@ -5,7 +6,7 @@ from typing import Optional
|
||||
|
||||
from open_webui.internal.db import Base, get_db
|
||||
from open_webui.models.tags import TagModel, Tag, Tags
|
||||
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Boolean, Column, String, Text, JSON
|
||||
@ -16,6 +17,8 @@ from sqlalchemy.sql import exists
|
||||
# Chat DB Schema
|
||||
####################
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
class Chat(Base):
|
||||
__tablename__ = "chat"
|
||||
@ -670,7 +673,7 @@ class ChatTable:
|
||||
# Perform pagination at the SQL level
|
||||
all_chats = query.offset(skip).limit(limit).all()
|
||||
|
||||
print(len(all_chats))
|
||||
log.info(f"The number of chats: {len(all_chats)}")
|
||||
|
||||
# Validate and return chats
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
@ -731,7 +734,7 @@ class ChatTable:
|
||||
query = db.query(Chat).filter_by(user_id=user_id)
|
||||
tag_id = tag_name.replace(" ", "_").lower()
|
||||
|
||||
print(db.bind.dialect.name)
|
||||
log.info(f"DB dialect name: {db.bind.dialect.name}")
|
||||
if db.bind.dialect.name == "sqlite":
|
||||
# SQLite JSON1 querying for tags within the meta JSON field
|
||||
query = query.filter(
|
||||
@ -752,7 +755,7 @@ class ChatTable:
|
||||
)
|
||||
|
||||
all_chats = query.all()
|
||||
print("all_chats", all_chats)
|
||||
log.debug(f"all_chats: {all_chats}")
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
def add_chat_tag_by_id_and_user_id_and_tag_name(
|
||||
@ -810,7 +813,7 @@ class ChatTable:
|
||||
count = query.count()
|
||||
|
||||
# Debugging output for inspection
|
||||
print(f"Count of chats for tag '{tag_name}':", count)
|
||||
log.info(f"Count of chats for tag '{tag_name}': {count}")
|
||||
|
||||
return count
|
||||
|
||||
|
@ -118,7 +118,7 @@ class FeedbackTable:
|
||||
else:
|
||||
return None
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Error creating a new feedback: {e}")
|
||||
return None
|
||||
|
||||
def get_feedback_by_id(self, id: str) -> Optional[FeedbackModel]:
|
||||
|
@ -119,7 +119,7 @@ class FilesTable:
|
||||
else:
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"Error creating tool: {e}")
|
||||
log.exception(f"Error inserting a new file: {e}")
|
||||
return None
|
||||
|
||||
def get_file_by_id(self, id: str) -> Optional[FileModel]:
|
||||
|
@ -82,7 +82,7 @@ class FolderTable:
|
||||
else:
|
||||
return None
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Error inserting a new folder: {e}")
|
||||
return None
|
||||
|
||||
def get_folder_by_id_and_user_id(
|
||||
|
@ -105,7 +105,7 @@ class FunctionsTable:
|
||||
else:
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"Error creating tool: {e}")
|
||||
log.exception(f"Error creating a new function: {e}")
|
||||
return None
|
||||
|
||||
def get_function_by_id(self, id: str) -> Optional[FunctionModel]:
|
||||
@ -170,7 +170,7 @@ class FunctionsTable:
|
||||
function = db.get(Function, id)
|
||||
return function.valves if function.valves else {}
|
||||
except Exception as e:
|
||||
print(f"An error occurred: {e}")
|
||||
log.exception(f"Error getting function valves by id {id}: {e}")
|
||||
return None
|
||||
|
||||
def update_function_valves_by_id(
|
||||
@ -202,7 +202,7 @@ class FunctionsTable:
|
||||
|
||||
return user_settings["functions"]["valves"].get(id, {})
|
||||
except Exception as e:
|
||||
print(f"An error occurred: {e}")
|
||||
log.exception(f"Error getting user values by id {id} and user id {user_id}: {e}")
|
||||
return None
|
||||
|
||||
def update_user_valves_by_id_and_user_id(
|
||||
@ -225,7 +225,7 @@ class FunctionsTable:
|
||||
|
||||
return user_settings["functions"]["valves"][id]
|
||||
except Exception as e:
|
||||
print(f"An error occurred: {e}")
|
||||
log.exception(f"Error updating user valves by id {id} and user_id {user_id}: {e}")
|
||||
return None
|
||||
|
||||
def update_function_by_id(self, id: str, updated: dict) -> Optional[FunctionModel]:
|
||||
|
5
backend/open_webui/models/models.py
Normal file → Executable file
5
backend/open_webui/models/models.py
Normal file → Executable file
@ -166,7 +166,7 @@ class ModelsTable:
|
||||
else:
|
||||
return None
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Failed to insert a new model: {e}")
|
||||
return None
|
||||
|
||||
def get_all_models(self) -> list[ModelModel]:
|
||||
@ -246,8 +246,7 @@ class ModelsTable:
|
||||
db.refresh(model)
|
||||
return ModelModel.model_validate(model)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
log.exception(f"Failed to update the model by id {id}: {e}")
|
||||
return None
|
||||
|
||||
def delete_model_by_id(self, id: str) -> bool:
|
||||
|
@ -61,7 +61,7 @@ class TagTable:
|
||||
else:
|
||||
return None
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Error inserting a new tag: {e}")
|
||||
return None
|
||||
|
||||
def get_tag_by_name_and_user_id(
|
||||
|
@ -131,7 +131,7 @@ class ToolsTable:
|
||||
else:
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"Error creating tool: {e}")
|
||||
log.exception(f"Error creating a new tool: {e}")
|
||||
return None
|
||||
|
||||
def get_tool_by_id(self, id: str) -> Optional[ToolModel]:
|
||||
@ -175,7 +175,7 @@ class ToolsTable:
|
||||
tool = db.get(Tool, id)
|
||||
return tool.valves if tool.valves else {}
|
||||
except Exception as e:
|
||||
print(f"An error occurred: {e}")
|
||||
log.exception(f"Error getting tool valves by id {id}: {e}")
|
||||
return None
|
||||
|
||||
def update_tool_valves_by_id(self, id: str, valves: dict) -> Optional[ToolValves]:
|
||||
@ -204,7 +204,7 @@ class ToolsTable:
|
||||
|
||||
return user_settings["tools"]["valves"].get(id, {})
|
||||
except Exception as e:
|
||||
print(f"An error occurred: {e}")
|
||||
log.exception(f"Error getting user values by id {id} and user_id {user_id}: {e}")
|
||||
return None
|
||||
|
||||
def update_user_valves_by_id_and_user_id(
|
||||
@ -227,7 +227,7 @@ class ToolsTable:
|
||||
|
||||
return user_settings["tools"]["valves"][id]
|
||||
except Exception as e:
|
||||
print(f"An error occurred: {e}")
|
||||
log.exception(f"Error updating user valves by id {id} and user_id {user_id}: {e}")
|
||||
return None
|
||||
|
||||
def update_tool_by_id(self, id: str, updated: dict) -> Optional[ToolModel]:
|
||||
|
@ -1,13 +1,19 @@
|
||||
import os
|
||||
import logging
|
||||
import torch
|
||||
import numpy as np
|
||||
from colbert.infra import ColBERTConfig
|
||||
from colbert.modeling.checkpoint import Checkpoint
|
||||
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
class ColBERT:
|
||||
def __init__(self, name, **kwargs) -> None:
|
||||
print("ColBERT: Loading model", name)
|
||||
log.info("ColBERT: Loading model", name)
|
||||
self.device = "cuda" if torch.cuda.is_available() else "cpu"
|
||||
|
||||
DOCKER = kwargs.get("env") == "docker"
|
||||
|
@ -81,7 +81,7 @@ def query_doc(
|
||||
|
||||
return result
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Error querying doc {collection_name} with limit {k}: {e}")
|
||||
raise e
|
||||
|
||||
|
||||
@ -94,7 +94,7 @@ def get_doc(collection_name: str, user: UserModel = None):
|
||||
|
||||
return result
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Error getting doc {collection_name}: {e}")
|
||||
raise e
|
||||
|
||||
|
||||
@ -530,7 +530,7 @@ def generate_openai_batch_embeddings(
|
||||
else:
|
||||
raise "Something went wrong :/"
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Error generating openai batch embeddings: {e}")
|
||||
return None
|
||||
|
||||
|
||||
@ -564,7 +564,7 @@ def generate_ollama_batch_embeddings(
|
||||
else:
|
||||
raise "Something went wrong :/"
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Error generating ollama batch embeddings: {e}")
|
||||
return None
|
||||
|
||||
|
||||
|
7
backend/open_webui/retrieval/vector/dbs/chroma.py
Normal file → Executable file
7
backend/open_webui/retrieval/vector/dbs/chroma.py
Normal file → Executable file
@ -1,4 +1,5 @@
|
||||
import chromadb
|
||||
import logging
|
||||
from chromadb import Settings
|
||||
from chromadb.utils.batch_utils import create_batches
|
||||
|
||||
@ -16,6 +17,10 @@ from open_webui.config import (
|
||||
CHROMA_CLIENT_AUTH_PROVIDER,
|
||||
CHROMA_CLIENT_AUTH_CREDENTIALS,
|
||||
)
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
class ChromaClient:
|
||||
@ -103,7 +108,7 @@ class ChromaClient:
|
||||
)
|
||||
return None
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Error querying collection {collection} with limit {limit}: {e}")
|
||||
return None
|
||||
|
||||
def get(self, collection_name: str) -> Optional[GetResult]:
|
||||
|
@ -1,7 +1,7 @@
|
||||
from pymilvus import MilvusClient as Client
|
||||
from pymilvus import FieldSchema, DataType
|
||||
import json
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from open_webui.retrieval.vector.main import VectorItem, SearchResult, GetResult
|
||||
@ -10,6 +10,10 @@ from open_webui.config import (
|
||||
MILVUS_DB,
|
||||
MILVUS_TOKEN,
|
||||
)
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
class MilvusClient:
|
||||
@ -168,7 +172,7 @@ class MilvusClient:
|
||||
try:
|
||||
# Loop until there are no more items to fetch or the desired limit is reached
|
||||
while remaining > 0:
|
||||
print("remaining", remaining)
|
||||
log.info(f"remaining: {remaining}")
|
||||
current_fetch = min(
|
||||
max_limit, remaining
|
||||
) # Determine how many items to fetch in this iteration
|
||||
@ -195,10 +199,10 @@ class MilvusClient:
|
||||
if results_count < current_fetch:
|
||||
break
|
||||
|
||||
print(all_results)
|
||||
log.debug(all_results)
|
||||
return self._result_to_get_result([all_results])
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Error querying collection {collection_name} with limit {limit}: {e}")
|
||||
return None
|
||||
|
||||
def get(self, collection_name: str) -> Optional[GetResult]:
|
||||
|
@ -1,4 +1,5 @@
|
||||
from typing import Optional, List, Dict, Any
|
||||
import logging
|
||||
from sqlalchemy import (
|
||||
cast,
|
||||
column,
|
||||
@ -24,9 +25,14 @@ from sqlalchemy.exc import NoSuchTableError
|
||||
from open_webui.retrieval.vector.main import VectorItem, SearchResult, GetResult
|
||||
from open_webui.config import PGVECTOR_DB_URL, PGVECTOR_INITIALIZE_MAX_VECTOR_LENGTH
|
||||
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
VECTOR_LENGTH = PGVECTOR_INITIALIZE_MAX_VECTOR_LENGTH
|
||||
Base = declarative_base()
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
class DocumentChunk(Base):
|
||||
__tablename__ = "document_chunk"
|
||||
@ -82,10 +88,10 @@ class PgvectorClient:
|
||||
)
|
||||
)
|
||||
self.session.commit()
|
||||
print("Initialization complete.")
|
||||
log.info("Initialization complete.")
|
||||
except Exception as e:
|
||||
self.session.rollback()
|
||||
print(f"Error during initialization: {e}")
|
||||
log.exception(f"Error during initialization: {e}")
|
||||
raise
|
||||
|
||||
def check_vector_length(self) -> None:
|
||||
@ -150,12 +156,12 @@ class PgvectorClient:
|
||||
new_items.append(new_chunk)
|
||||
self.session.bulk_save_objects(new_items)
|
||||
self.session.commit()
|
||||
print(
|
||||
log.info(
|
||||
f"Inserted {len(new_items)} items into collection '{collection_name}'."
|
||||
)
|
||||
except Exception as e:
|
||||
self.session.rollback()
|
||||
print(f"Error during insert: {e}")
|
||||
log.exception(f"Error during insert: {e}")
|
||||
raise
|
||||
|
||||
def upsert(self, collection_name: str, items: List[VectorItem]) -> None:
|
||||
@ -184,10 +190,10 @@ class PgvectorClient:
|
||||
)
|
||||
self.session.add(new_chunk)
|
||||
self.session.commit()
|
||||
print(f"Upserted {len(items)} items into collection '{collection_name}'.")
|
||||
log.info(f"Upserted {len(items)} items into collection '{collection_name}'.")
|
||||
except Exception as e:
|
||||
self.session.rollback()
|
||||
print(f"Error during upsert: {e}")
|
||||
log.exception(f"Error during upsert: {e}")
|
||||
raise
|
||||
|
||||
def search(
|
||||
@ -278,7 +284,7 @@ class PgvectorClient:
|
||||
ids=ids, distances=distances, documents=documents, metadatas=metadatas
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Error during search: {e}")
|
||||
log.exception(f"Error during search: {e}")
|
||||
return None
|
||||
|
||||
def query(
|
||||
@ -310,7 +316,7 @@ class PgvectorClient:
|
||||
metadatas=metadatas,
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Error during query: {e}")
|
||||
log.exception(f"Error during query: {e}")
|
||||
return None
|
||||
|
||||
def get(
|
||||
@ -334,7 +340,7 @@ class PgvectorClient:
|
||||
|
||||
return GetResult(ids=ids, documents=documents, metadatas=metadatas)
|
||||
except Exception as e:
|
||||
print(f"Error during get: {e}")
|
||||
log.exception(f"Error during get: {e}")
|
||||
return None
|
||||
|
||||
def delete(
|
||||
@ -356,22 +362,22 @@ class PgvectorClient:
|
||||
)
|
||||
deleted = query.delete(synchronize_session=False)
|
||||
self.session.commit()
|
||||
print(f"Deleted {deleted} items from collection '{collection_name}'.")
|
||||
log.info(f"Deleted {deleted} items from collection '{collection_name}'.")
|
||||
except Exception as e:
|
||||
self.session.rollback()
|
||||
print(f"Error during delete: {e}")
|
||||
log.exception(f"Error during delete: {e}")
|
||||
raise
|
||||
|
||||
def reset(self) -> None:
|
||||
try:
|
||||
deleted = self.session.query(DocumentChunk).delete()
|
||||
self.session.commit()
|
||||
print(
|
||||
log.info(
|
||||
f"Reset complete. Deleted {deleted} items from 'document_chunk' table."
|
||||
)
|
||||
except Exception as e:
|
||||
self.session.rollback()
|
||||
print(f"Error during reset: {e}")
|
||||
log.exception(f"Error during reset: {e}")
|
||||
raise
|
||||
|
||||
def close(self) -> None:
|
||||
@ -387,9 +393,9 @@ class PgvectorClient:
|
||||
)
|
||||
return exists
|
||||
except Exception as e:
|
||||
print(f"Error checking collection existence: {e}")
|
||||
log.exception(f"Error checking collection existence: {e}")
|
||||
return False
|
||||
|
||||
def delete_collection(self, collection_name: str) -> None:
|
||||
self.delete(collection_name)
|
||||
print(f"Collection '{collection_name}' deleted.")
|
||||
log.info(f"Collection '{collection_name}' deleted.")
|
||||
|
@ -1,4 +1,5 @@
|
||||
from typing import Optional
|
||||
import logging
|
||||
|
||||
from qdrant_client import QdrantClient as Qclient
|
||||
from qdrant_client.http.models import PointStruct
|
||||
@ -6,9 +7,13 @@ from qdrant_client.models import models
|
||||
|
||||
from open_webui.retrieval.vector.main import VectorItem, SearchResult, GetResult
|
||||
from open_webui.config import QDRANT_URI, QDRANT_API_KEY
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
NO_LIMIT = 999999999
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
class QdrantClient:
|
||||
def __init__(self):
|
||||
@ -49,7 +54,7 @@ class QdrantClient:
|
||||
),
|
||||
)
|
||||
|
||||
print(f"collection {collection_name_with_prefix} successfully created!")
|
||||
log.info(f"collection {collection_name_with_prefix} successfully created!")
|
||||
|
||||
def _create_collection_if_not_exists(self, collection_name, dimension):
|
||||
if not self.has_collection(collection_name=collection_name):
|
||||
@ -120,7 +125,7 @@ class QdrantClient:
|
||||
)
|
||||
return self._result_to_get_result(points.points)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Error querying a collection '{collection_name}': {e}")
|
||||
return None
|
||||
|
||||
def get(self, collection_name: str) -> Optional[GetResult]:
|
||||
|
@ -71,7 +71,7 @@ from pydub.utils import mediainfo
|
||||
def is_mp4_audio(file_path):
|
||||
"""Check if the given file is an MP4 audio file."""
|
||||
if not os.path.isfile(file_path):
|
||||
print(f"File not found: {file_path}")
|
||||
log.error(f"File not found: {file_path}")
|
||||
return False
|
||||
|
||||
info = mediainfo(file_path)
|
||||
@ -88,7 +88,7 @@ def convert_mp4_to_wav(file_path, output_path):
|
||||
"""Convert MP4 audio file to WAV format."""
|
||||
audio = AudioSegment.from_file(file_path, format="mp4")
|
||||
audio.export(output_path, format="wav")
|
||||
print(f"Converted {file_path} to {output_path}")
|
||||
log.info(f"Converted {file_path} to {output_path}")
|
||||
|
||||
|
||||
def set_faster_whisper_model(model: str, auto_update: bool = False):
|
||||
@ -266,7 +266,6 @@ async def speech(request: Request, user=Depends(get_verified_user)):
|
||||
payload["model"] = request.app.state.config.TTS_MODEL
|
||||
|
||||
try:
|
||||
# print(payload)
|
||||
timeout = aiohttp.ClientTimeout(total=AIOHTTP_CLIENT_TIMEOUT)
|
||||
async with aiohttp.ClientSession(
|
||||
timeout=timeout, trust_env=True
|
||||
@ -468,7 +467,7 @@ async def speech(request: Request, user=Depends(get_verified_user)):
|
||||
|
||||
|
||||
def transcribe(request: Request, file_path):
|
||||
print("transcribe", file_path)
|
||||
log.info(f"transcribe: {file_path}")
|
||||
filename = os.path.basename(file_path)
|
||||
file_dir = os.path.dirname(file_path)
|
||||
id = filename.split(".")[0]
|
||||
|
@ -613,7 +613,7 @@ async def get_admin_details(request: Request, user=Depends(get_current_user)):
|
||||
admin_email = request.app.state.config.ADMIN_EMAIL
|
||||
admin_name = None
|
||||
|
||||
print(admin_email, admin_name)
|
||||
log.info(f"Admin details - Email: {admin_email}, Name: {admin_name}")
|
||||
|
||||
if admin_email:
|
||||
admin = Users.get_user_by_email(admin_email)
|
||||
|
@ -273,7 +273,7 @@ async def get_html_file_content_by_id(id: str, user=Depends(get_verified_user)):
|
||||
|
||||
# Check if the file already exists in the cache
|
||||
if file_path.is_file():
|
||||
print(f"file_path: {file_path}")
|
||||
log.info(f"file_path: {file_path}")
|
||||
return FileResponse(file_path)
|
||||
else:
|
||||
raise HTTPException(
|
||||
|
@ -1,4 +1,5 @@
|
||||
import os
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
@ -13,6 +14,11 @@ from open_webui.config import CACHE_DIR
|
||||
from open_webui.constants import ERROR_MESSAGES
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, status
|
||||
from open_webui.utils.auth import get_admin_user, get_verified_user
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MAIN"])
|
||||
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@ -79,7 +85,7 @@ async def create_new_function(
|
||||
detail=ERROR_MESSAGES.DEFAULT("Error creating function"),
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Failed to create a new function: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT(e),
|
||||
@ -183,7 +189,7 @@ async def update_function_by_id(
|
||||
FUNCTIONS[id] = function_module
|
||||
|
||||
updated = {**form_data.model_dump(exclude={"id"}), "type": function_type}
|
||||
print(updated)
|
||||
log.debug(updated)
|
||||
|
||||
function = Functions.update_function_by_id(id, updated)
|
||||
|
||||
@ -299,7 +305,7 @@ async def update_function_valves_by_id(
|
||||
Functions.update_function_valves_by_id(id, valves.model_dump())
|
||||
return valves.model_dump()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Error updating function values by id {id}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT(e),
|
||||
@ -388,7 +394,7 @@ async def update_function_user_valves_by_id(
|
||||
)
|
||||
return user_valves.model_dump()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Error updating function user valves by id {id}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT(e),
|
||||
|
16
backend/open_webui/routers/groups.py
Normal file → Executable file
16
backend/open_webui/routers/groups.py
Normal file → Executable file
@ -1,7 +1,7 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import logging
|
||||
|
||||
from open_webui.models.users import Users
|
||||
from open_webui.models.groups import (
|
||||
@ -14,7 +14,13 @@ from open_webui.models.groups import (
|
||||
from open_webui.config import CACHE_DIR
|
||||
from open_webui.constants import ERROR_MESSAGES
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, status
|
||||
|
||||
from open_webui.utils.auth import get_admin_user, get_verified_user
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MAIN"])
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@ -37,7 +43,7 @@ async def get_groups(user=Depends(get_verified_user)):
|
||||
|
||||
|
||||
@router.post("/create", response_model=Optional[GroupResponse])
|
||||
async def create_new_function(form_data: GroupForm, user=Depends(get_admin_user)):
|
||||
async def create_new_group(form_data: GroupForm, user=Depends(get_admin_user)):
|
||||
try:
|
||||
group = Groups.insert_new_group(user.id, form_data)
|
||||
if group:
|
||||
@ -48,7 +54,7 @@ async def create_new_function(form_data: GroupForm, user=Depends(get_admin_user)
|
||||
detail=ERROR_MESSAGES.DEFAULT("Error creating group"),
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Error creating a new group: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT(e),
|
||||
@ -94,7 +100,7 @@ async def update_group_by_id(
|
||||
detail=ERROR_MESSAGES.DEFAULT("Error updating group"),
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Error updating group {id}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT(e),
|
||||
@ -118,7 +124,7 @@ async def delete_group_by_id(id: str, user=Depends(get_admin_user)):
|
||||
detail=ERROR_MESSAGES.DEFAULT("Error deleting group"),
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Error deleting group {id}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT(e),
|
||||
|
@ -351,7 +351,7 @@ def get_models(request: Request, user=Depends(get_verified_user)):
|
||||
if model_node_id:
|
||||
model_list_key = None
|
||||
|
||||
print(workflow[model_node_id]["class_type"])
|
||||
log.info(workflow[model_node_id]["class_type"])
|
||||
for key in info[workflow[model_node_id]["class_type"]]["input"][
|
||||
"required"
|
||||
]:
|
||||
|
@ -614,7 +614,7 @@ def add_files_to_knowledge_batch(
|
||||
)
|
||||
|
||||
# Get files content
|
||||
print(f"files/batch/add - {len(form_data)} files")
|
||||
log.info(f"files/batch/add - {len(form_data)} files")
|
||||
files: List[FileModel] = []
|
||||
for form in form_data:
|
||||
file = Files.get_file_by_id(form.file_id)
|
||||
|
@ -777,7 +777,7 @@ async def proxy(path: str, request: Request, user=Depends(get_verified_user)):
|
||||
if r is not None:
|
||||
try:
|
||||
res = await r.json()
|
||||
print(res)
|
||||
log.error(res)
|
||||
if "error" in res:
|
||||
detail = f"External: {res['error']['message'] if 'message' in res['error'] else res['error']}"
|
||||
except Exception:
|
||||
|
@ -101,7 +101,7 @@ async def process_pipeline_inlet_filter(request, payload, user, models):
|
||||
if "detail" in res:
|
||||
raise Exception(response.status, res["detail"])
|
||||
except Exception as e:
|
||||
print(f"Connection error: {e}")
|
||||
log.exception(f"Connection error: {e}")
|
||||
|
||||
return payload
|
||||
|
||||
@ -153,7 +153,7 @@ async def process_pipeline_outlet_filter(request, payload, user, models):
|
||||
except Exception:
|
||||
pass
|
||||
except Exception as e:
|
||||
print(f"Connection error: {e}")
|
||||
log.exception(f"Connection error: {e}")
|
||||
|
||||
return payload
|
||||
|
||||
@ -196,7 +196,7 @@ async def upload_pipeline(
|
||||
file: UploadFile = File(...),
|
||||
user=Depends(get_admin_user),
|
||||
):
|
||||
print("upload_pipeline", urlIdx, file.filename)
|
||||
log.info(f"upload_pipeline: urlIdx={urlIdx}, filename={file.filename}")
|
||||
# Check if the uploaded file is a python file
|
||||
if not (file.filename and file.filename.endswith(".py")):
|
||||
raise HTTPException(
|
||||
@ -231,7 +231,7 @@ async def upload_pipeline(
|
||||
return {**data}
|
||||
except Exception as e:
|
||||
# Handle connection error here
|
||||
print(f"Connection error: {e}")
|
||||
log.exception(f"Connection error: {e}")
|
||||
|
||||
detail = None
|
||||
status_code = status.HTTP_404_NOT_FOUND
|
||||
@ -282,7 +282,7 @@ async def add_pipeline(
|
||||
return {**data}
|
||||
except Exception as e:
|
||||
# Handle connection error here
|
||||
print(f"Connection error: {e}")
|
||||
log.exception(f"Connection error: {e}")
|
||||
|
||||
detail = None
|
||||
if r is not None:
|
||||
@ -327,7 +327,7 @@ async def delete_pipeline(
|
||||
return {**data}
|
||||
except Exception as e:
|
||||
# Handle connection error here
|
||||
print(f"Connection error: {e}")
|
||||
log.exception(f"Connection error: {e}")
|
||||
|
||||
detail = None
|
||||
if r is not None:
|
||||
@ -361,7 +361,7 @@ async def get_pipelines(
|
||||
return {**data}
|
||||
except Exception as e:
|
||||
# Handle connection error here
|
||||
print(f"Connection error: {e}")
|
||||
log.exception(f"Connection error: {e}")
|
||||
|
||||
detail = None
|
||||
if r is not None:
|
||||
@ -400,7 +400,7 @@ async def get_pipeline_valves(
|
||||
return {**data}
|
||||
except Exception as e:
|
||||
# Handle connection error here
|
||||
print(f"Connection error: {e}")
|
||||
log.exception(f"Connection error: {e}")
|
||||
|
||||
detail = None
|
||||
if r is not None:
|
||||
@ -440,7 +440,7 @@ async def get_pipeline_valves_spec(
|
||||
return {**data}
|
||||
except Exception as e:
|
||||
# Handle connection error here
|
||||
print(f"Connection error: {e}")
|
||||
log.exception(f"Connection error: {e}")
|
||||
|
||||
detail = None
|
||||
if r is not None:
|
||||
@ -482,7 +482,7 @@ async def update_pipeline_valves(
|
||||
return {**data}
|
||||
except Exception as e:
|
||||
# Handle connection error here
|
||||
print(f"Connection error: {e}")
|
||||
log.exception(f"Connection error: {e}")
|
||||
|
||||
detail = None
|
||||
|
||||
|
@ -1553,11 +1553,11 @@ def reset_upload_dir(user=Depends(get_admin_user)) -> bool:
|
||||
elif os.path.isdir(file_path):
|
||||
shutil.rmtree(file_path) # Remove the directory
|
||||
except Exception as e:
|
||||
print(f"Failed to delete {file_path}. Reason: {e}")
|
||||
log.exception(f"Failed to delete {file_path}. Reason: {e}")
|
||||
else:
|
||||
print(f"The directory {folder} does not exist")
|
||||
log.warning(f"The directory {folder} does not exist")
|
||||
except Exception as e:
|
||||
print(f"Failed to process the directory {folder}. Reason: {e}")
|
||||
log.exception(f"Failed to process the directory {folder}. Reason: {e}")
|
||||
return True
|
||||
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
@ -15,6 +16,10 @@ from fastapi import APIRouter, Depends, HTTPException, Request, status
|
||||
from open_webui.utils.tools import get_tools_specs
|
||||
from open_webui.utils.auth import get_admin_user, get_verified_user
|
||||
from open_webui.utils.access_control import has_access, has_permission
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MAIN"])
|
||||
|
||||
|
||||
router = APIRouter()
|
||||
@ -111,7 +116,7 @@ async def create_new_tools(
|
||||
detail=ERROR_MESSAGES.DEFAULT("Error creating tools"),
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Failed to load the tool by id {form_data.id}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT(str(e)),
|
||||
@ -193,7 +198,7 @@ async def update_tools_by_id(
|
||||
"specs": specs,
|
||||
}
|
||||
|
||||
print(updated)
|
||||
log.debug(updated)
|
||||
tools = Tools.update_tool_by_id(id, updated)
|
||||
|
||||
if tools:
|
||||
@ -343,7 +348,7 @@ async def update_tools_valves_by_id(
|
||||
Tools.update_tool_valves_by_id(id, valves.model_dump())
|
||||
return valves.model_dump()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Failed to update tool valves by id {id}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT(str(e)),
|
||||
@ -421,7 +426,7 @@ async def update_tools_user_valves_by_id(
|
||||
)
|
||||
return user_valves.model_dump()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Failed to update user valves by id {id}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT(str(e)),
|
||||
|
@ -1,4 +1,5 @@
|
||||
import black
|
||||
import logging
|
||||
import markdown
|
||||
|
||||
from open_webui.models.chats import ChatTitleMessagesForm
|
||||
@ -13,11 +14,14 @@ from open_webui.utils.misc import get_gravatar_url
|
||||
from open_webui.utils.pdf_generator import PDFGenerator
|
||||
from open_webui.utils.auth import get_admin_user, get_verified_user
|
||||
from open_webui.utils.code_interpreter import execute_code_jupyter
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MAIN"])
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/gravatar")
|
||||
async def get_gravatar(email: str, user=Depends(get_verified_user)):
|
||||
return get_gravatar_url(email)
|
||||
@ -96,7 +100,7 @@ async def download_chat_as_pdf(
|
||||
headers={"Content-Disposition": "attachment;filename=chat.pdf"},
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Error generating PDF: {e}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
import os
|
||||
import shutil
|
||||
import json
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import BinaryIO, Tuple
|
||||
|
||||
@ -27,6 +28,11 @@ from open_webui.constants import ERROR_MESSAGES
|
||||
from azure.identity import DefaultAzureCredential
|
||||
from azure.storage.blob import BlobServiceClient
|
||||
from azure.core.exceptions import ResourceNotFoundError
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MAIN"])
|
||||
|
||||
|
||||
class StorageProvider(ABC):
|
||||
@ -71,7 +77,7 @@ class LocalStorageProvider(StorageProvider):
|
||||
if os.path.isfile(file_path):
|
||||
os.remove(file_path)
|
||||
else:
|
||||
print(f"File {file_path} not found in local storage.")
|
||||
log.warning(f"File {file_path} not found in local storage.")
|
||||
|
||||
@staticmethod
|
||||
def delete_all_files() -> None:
|
||||
@ -85,9 +91,9 @@ class LocalStorageProvider(StorageProvider):
|
||||
elif os.path.isdir(file_path):
|
||||
shutil.rmtree(file_path) # Remove the directory
|
||||
except Exception as e:
|
||||
print(f"Failed to delete {file_path}. Reason: {e}")
|
||||
log.exception(f"Failed to delete {file_path}. Reason: {e}")
|
||||
else:
|
||||
print(f"Directory {UPLOAD_DIR} not found in local storage.")
|
||||
log.warning(f"Directory {UPLOAD_DIR} not found in local storage.")
|
||||
|
||||
|
||||
class S3StorageProvider(StorageProvider):
|
||||
|
@ -14,14 +14,17 @@ from typing import Optional, Union, List, Dict
|
||||
from open_webui.models.users import Users
|
||||
|
||||
from open_webui.constants import ERROR_MESSAGES
|
||||
from open_webui.env import WEBUI_SECRET_KEY, TRUSTED_SIGNATURE_KEY, STATIC_DIR
|
||||
from open_webui.env import WEBUI_SECRET_KEY, TRUSTED_SIGNATURE_KEY, STATIC_DIR, SRC_LOG_LEVELS
|
||||
|
||||
from fastapi import Depends, HTTPException, Request, Response, status
|
||||
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
||||
from passlib.context import CryptContext
|
||||
|
||||
|
||||
logging.getLogger("passlib").setLevel(logging.ERROR)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["OAUTH"])
|
||||
|
||||
SESSION_SECRET = WEBUI_SECRET_KEY
|
||||
ALGORITHM = "HS256"
|
||||
@ -50,7 +53,7 @@ def verify_signature(payload: str, signature: str) -> bool:
|
||||
def override_static(path: str, content: str):
|
||||
# Ensure path is safe
|
||||
if "/" in path or ".." in path:
|
||||
print(f"Invalid path: {path}")
|
||||
log.error(f"Invalid path: {path}")
|
||||
return
|
||||
|
||||
file_path = os.path.join(STATIC_DIR, path)
|
||||
@ -82,11 +85,11 @@ def get_license_data(app, key):
|
||||
|
||||
return True
|
||||
else:
|
||||
print(
|
||||
log.error(
|
||||
f"License: retrieval issue: {getattr(res, 'text', 'unknown error')}"
|
||||
)
|
||||
except Exception as ex:
|
||||
print(f"License: Uncaught Exception: {ex}")
|
||||
log.exception(f"License: Uncaught Exception: {ex}")
|
||||
return False
|
||||
|
||||
|
||||
|
@ -66,7 +66,7 @@ async def generate_direct_chat_completion(
|
||||
user: Any,
|
||||
models: dict,
|
||||
):
|
||||
print("generate_direct_chat_completion")
|
||||
log.info("generate_direct_chat_completion")
|
||||
|
||||
metadata = form_data.pop("metadata", {})
|
||||
|
||||
@ -103,7 +103,7 @@ async def generate_direct_chat_completion(
|
||||
}
|
||||
)
|
||||
|
||||
print("res", res)
|
||||
log.info(f"res: {res}")
|
||||
|
||||
if res.get("status", False):
|
||||
# Define a generator to stream responses
|
||||
@ -432,7 +432,7 @@ async def chat_action(request: Request, action_id: str, form_data: dict, user: A
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Failed to get user values: {e}")
|
||||
|
||||
params = {**params, "__user__": __user__}
|
||||
|
||||
|
@ -1,6 +1,12 @@
|
||||
import inspect
|
||||
import logging
|
||||
|
||||
from open_webui.utils.plugin import load_function_module_by_id
|
||||
from open_webui.models.functions import Functions
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MAIN"])
|
||||
|
||||
|
||||
def get_sorted_filter_ids(model):
|
||||
@ -80,7 +86,7 @@ async def process_filter_functions(
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Failed to get user values: {e}")
|
||||
|
||||
# Execute handler
|
||||
if inspect.iscoroutinefunction(handler):
|
||||
@ -89,7 +95,7 @@ async def process_filter_functions(
|
||||
form_data = handler(**params)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error in {filter_type} handler {filter_id}: {e}")
|
||||
log.exception(f"Error in {filter_type} handler {filter_id}: {e}")
|
||||
raise e
|
||||
|
||||
# Handle file cleanup for inlet
|
||||
|
@ -1855,7 +1855,8 @@ async def process_chat_response(
|
||||
}
|
||||
)
|
||||
|
||||
print(content_blocks, serialize_content_blocks(content_blocks))
|
||||
log.info(f"content_blocks={content_blocks}")
|
||||
log.info(f"serialize_content_blocks={serialize_content_blocks(content_blocks)}")
|
||||
|
||||
try:
|
||||
res = await generate_chat_completion(
|
||||
@ -1926,7 +1927,7 @@ async def process_chat_response(
|
||||
|
||||
await background_tasks_handler()
|
||||
except asyncio.CancelledError:
|
||||
print("Task was cancelled!")
|
||||
log.warning("Task was cancelled!")
|
||||
await event_emitter({"type": "task-cancelled"})
|
||||
|
||||
if not ENABLE_REALTIME_CHAT_SAVE:
|
||||
|
@ -2,13 +2,17 @@ import hashlib
|
||||
import re
|
||||
import time
|
||||
import uuid
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
from typing import Callable, Optional
|
||||
|
||||
|
||||
import collections.abc
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MAIN"])
|
||||
|
||||
def deep_update(d, u):
|
||||
for k, v in u.items():
|
||||
@ -412,7 +416,7 @@ def parse_ollama_modelfile(model_text):
|
||||
elif param_type is bool:
|
||||
value = value.lower() == "true"
|
||||
except Exception as e:
|
||||
print(e)
|
||||
log.exception(f"Failed to parse parameter {param}: {e}")
|
||||
continue
|
||||
|
||||
data["params"][param] = value
|
||||
|
@ -45,7 +45,7 @@ def extract_frontmatter(content):
|
||||
frontmatter[key.strip()] = value.strip()
|
||||
|
||||
except Exception as e:
|
||||
print(f"An error occurred: {e}")
|
||||
log.exception(f"Failed to extract frontmatter: {e}")
|
||||
return {}
|
||||
|
||||
return frontmatter
|
||||
|
Loading…
x
Reference in New Issue
Block a user