This commit is contained in:
pablodanswer 2024-09-16 12:26:05 -07:00
parent d684fb116d
commit fbc5008259
4 changed files with 8 additions and 19 deletions

View File

@ -946,4 +946,5 @@ def stream_chat_message(
is_connected=is_connected,
)
for obj in objects:
print(obj)
yield get_json_line(obj.model_dump())

View File

@ -272,9 +272,9 @@ class Answer:
stop_reason=StreamStopReason.CONTEXT_LENGTH
)
if not tool_call_chunk:
logger.info("Skipped tool call but generated message")
return
if not tool_call_chunk:
logger.info("Skipped tool call but generated message")
return
tool_call_requests = tool_call_chunk.tool_calls
@ -328,7 +328,6 @@ class Answer:
self._update_prompt_builder_for_search_tool(prompt_builder, [])
elif tool.name == ImageGenerationTool._NAME:
print("\n----\nUpdating image prompt user message\n----\n")
img_urls = [
img_generation_result["url"]
for img_generation_result in tool_runner.tool_final_result().tool_result
@ -339,8 +338,6 @@ class Answer:
)
)
print("now stremign wie fianl results")
yield tool_runner.tool_final_result()
# Update message history with tool call and response
@ -379,17 +376,11 @@ class Answer:
)
)
print("\n----\nBuilding final prompt with Tool call summary\n----\n")
# Generate response based on updated message history
prompt = prompt_builder.build(tool_call_summary=tool_call_summary)
response_content = ""
for content in self._process_llm_stream(
prompt=prompt,
tools=None
# tools=[tool.tool_definition() for tool in self.tools],
):
for content in self._process_llm_stream(prompt=prompt, tools=None):
if isinstance(content, str):
response_content += content
yield content
@ -765,5 +756,4 @@ class Answer:
if not self.is_connected():
logger.debug("Answer stream has been cancelled")
self._is_cancelled = not self.is_connected()
return self._is_cancelled

View File

@ -12,7 +12,7 @@ from danswer.utils.threadpool_concurrency import run_functions_tuples_in_paralle
class ToolRunner:
def __init__(self, tool: Tool, args: dict[str, Any], llm: LLM):
def __init__(self, tool: Tool, args: dict[str, Any], llm: LLM | None = None):
self.tool = tool
self.args = args
self._llm = llm
@ -49,12 +49,9 @@ class ToolRunner:
def check_which_tools_should_run_for_non_tool_calling_llm(
tools: list[Tool], query: str, history: list[PreviousMessage], llm: LLM
) -> list[dict[str, Any] | None]:
print(len(tools))
tool_args_list: list[tuple[Callable[..., Any], tuple[Any, ...]]] = [
(tool.get_args_for_non_tool_calling_llm, (query, history, llm))
for tool in tools
]
print(tool_args_list)
print(tools)
print(len(tool_args_list))
return run_functions_tuples_in_parallel(tool_args_list)

View File

@ -1142,6 +1142,7 @@ export function ChatPage({
if (!packet) {
continue;
}
console.log(packet);
if (!initialFetchDetails) {
if (!Object.hasOwn(packet, "user_message_id")) {