mirror of
https://github.com/danswer-ai/danswer.git
synced 2025-09-19 20:24:32 +02:00
add agent search frontend
This commit is contained in:
370
backend/chat_packets.log
Normal file
370
backend/chat_packets.log
Normal file
File diff suppressed because one or more lines are too long
1086
backend/onyx/agent_search/pro_search_b/main/nodes.py
Normal file
1086
backend/onyx/agent_search/pro_search_b/main/nodes.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -404,7 +404,7 @@ class DefaultMultiLLM(LLM):
|
||||
# streaming choice
|
||||
stream=stream,
|
||||
# model params
|
||||
temperature=self._temperature,
|
||||
temperature=0,
|
||||
timeout=self._timeout,
|
||||
# For now, we don't support parallel tool calls
|
||||
# NOTE: we can't pass this in if tools are not specified
|
||||
|
@@ -112,7 +112,7 @@ class PersonaSnapshot(BaseModel):
|
||||
uploaded_image_id: str | None = None
|
||||
is_default_persona: bool
|
||||
search_start_date: datetime | None = None
|
||||
labels: list["PersonaLabelSnapshot"]
|
||||
labels: list["PersonaLabelSnapshot"] = []
|
||||
|
||||
@classmethod
|
||||
def from_model(
|
||||
|
@@ -185,6 +185,7 @@ def get_chat_session(
|
||||
user: User | None = Depends(current_chat_accesssible_user),
|
||||
db_session: Session = Depends(get_session),
|
||||
) -> ChatSessionDetailResponse:
|
||||
print("get_chat_session called")
|
||||
user_id = user.id if user is not None else None
|
||||
try:
|
||||
chat_session = get_chat_session_by_id(
|
||||
@@ -213,6 +214,8 @@ def get_chat_session(
|
||||
# we need the tool call objs anyways, so just fetch them in a single call
|
||||
prefetch_tool_calls=True,
|
||||
)
|
||||
for message in session_messages:
|
||||
translate_db_message_to_chat_message_detail(message)
|
||||
|
||||
return ChatSessionDetailResponse(
|
||||
chat_session_id=session_id,
|
||||
@@ -427,6 +430,8 @@ def handle_new_chat_message(
|
||||
),
|
||||
is_connected=is_connected_func,
|
||||
):
|
||||
# with open('chat_packets.log', 'a') as log_file:
|
||||
# log_file.write(json.dumps(packet) + '\n')
|
||||
yield json.dumps(packet) if isinstance(packet, dict) else packet
|
||||
|
||||
except Exception as e:
|
||||
|
@@ -29,8 +29,9 @@ class LongTermLogger:
|
||||
try:
|
||||
# Create directory if it doesn't exist
|
||||
os.makedirs(os.path.dirname(log_file_path), exist_ok=True)
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating directory for long-term logs: {e}")
|
||||
except Exception:
|
||||
# logger.error(f"Error creating directory for long-term logs: {e}")
|
||||
pass
|
||||
|
||||
def _cleanup_old_files(self, category_path: Path) -> None:
|
||||
try:
|
||||
@@ -47,10 +48,13 @@ class LongTermLogger:
|
||||
continue
|
||||
try:
|
||||
file.unlink()
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting old log file {file}: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error during log rotation cleanup: {e}")
|
||||
except Exception:
|
||||
pass
|
||||
# logger.error(f"Error deleting old log file {file
|
||||
# }: {e}")
|
||||
except Exception:
|
||||
pass
|
||||
# logger.error(f"Error during log rotation cleanup: {e}")
|
||||
|
||||
def _record(self, message: Any, category: str) -> None:
|
||||
category_path = self.log_file_path / category
|
||||
@@ -73,8 +77,9 @@ class LongTermLogger:
|
||||
with open(file_path, "w+") as f:
|
||||
# default allows us to "ignore" unserializable objects
|
||||
json.dump(final_record, f, default=lambda x: str(x))
|
||||
except Exception as e:
|
||||
logger.error(f"Error recording log: {e}")
|
||||
except Exception:
|
||||
# logger.error(f"Error recording log: {e}")
|
||||
pass
|
||||
|
||||
def record(self, message: JSON_ro, category: str = "default") -> None:
|
||||
try:
|
||||
|
Reference in New Issue
Block a user