Merge branch 'dev' into validate-rag-urls

This commit is contained in:
Rory 2025-02-03 20:44:59 -06:00
commit 1a1cc1514f
3 changed files with 40 additions and 37 deletions

View File

@ -901,7 +901,7 @@ async def process_chat_response(
if message:
messages = get_message_list(message_map, message.get("id"))
if tasks:
if tasks and messages:
if TASKS.TITLE_GENERATION in tasks:
if tasks[TASKS.TITLE_GENERATION]:
res = await generate_title(
@ -1090,6 +1090,14 @@ async def process_chat_response(
task_id = str(uuid4()) # Create a unique task ID.
model_id = form_data.get("model", "")
Chats.upsert_message_to_chat_by_id_and_message_id(
metadata["chat_id"],
metadata["message_id"],
{
"model": model_id,
},
)
# Handle as a background task
async def post_response_handler(response, events):
def serialize_content_blocks(content_blocks, raw=False):
@ -1388,6 +1396,7 @@ async def process_chat_response(
retries += 1
log.debug(f"Attempt count: {retries}")
output = ""
try:
if content_blocks[-1]["attributes"].get("type") == "code":
output = await event_caller(

View File

@ -20,7 +20,7 @@ def get_message_list(messages, message_id):
current_message = messages.get(message_id)
if not current_message:
return f"Message ID {message_id} not found in the history."
return None
# Reconstruct the chain by following the parentId links
message_list = []

View File

@ -827,14 +827,14 @@
}
};
const createMessagesList = (history, responseMessageId) => {
const createMessagesList = (responseMessageId) => {
if (responseMessageId === null) {
return [];
}
const message = history.messages[responseMessageId];
if (message?.parentId) {
return [...createMessagesList(history, message.parentId), message];
return [...createMessagesList(message.parentId), message];
} else {
return [message];
}
@ -896,7 +896,7 @@
};
const chatActionHandler = async (chatId, actionId, modelId, responseMessageId, event = null) => {
const messages = createMessagesList(history, responseMessageId);
const messages = createMessagesList(responseMessageId);
const res = await chatAction(localStorage.token, actionId, {
model: modelId,
@ -965,7 +965,7 @@
const modelId = selectedModels[0];
const model = $models.filter((m) => m.id === modelId).at(0);
const messages = createMessagesList(history, history.currentId);
const messages = createMessagesList(history.currentId);
const parentMessage = messages.length !== 0 ? messages.at(-1) : null;
const userMessageId = uuidv4();
@ -1210,12 +1210,7 @@
);
history.messages[message.id] = message;
await chatCompletedHandler(
chatId,
message.model,
message.id,
createMessagesList(history, message.id)
);
await chatCompletedHandler(chatId, message.model, message.id, createMessagesList(message.id));
}
console.log(data);
@ -1231,7 +1226,7 @@
const submitPrompt = async (userPrompt, { _raw = false } = {}) => {
console.log('submitPrompt', userPrompt, $chatId);
const messages = createMessagesList(history, history.currentId);
const messages = createMessagesList(history.currentId);
const _selectedModels = selectedModels.map((modelId) =>
$models.map((m) => m.id).includes(modelId) ? modelId : ''
);
@ -1330,7 +1325,7 @@
saveSessionSelectedModels();
sendPrompt(userPrompt, userMessageId, { newChat: true });
await sendPrompt(userPrompt, userMessageId, { newChat: true });
};
const sendPrompt = async (
@ -1338,6 +1333,17 @@
parentId: string,
{ modelId = null, modelIdx = null, newChat = false } = {}
) => {
// Create new chat if newChat is true and first user message
if (
newChat &&
history.messages[history.currentId].parentId === null &&
history.messages[history.currentId].role === 'user'
) {
await initChatHandler();
} else {
await saveChatHandler($chatId);
}
// If modelId is provided, use it, else use selected model
let selectedModelIds = modelId
? [modelId]
@ -1347,19 +1353,6 @@
// Create response messages for each selected model
const responseMessageIds: Record<PropertyKey, string> = {};
const _chatId = JSON.parse(JSON.stringify($chatId));
// Create new chat if newChat is true and first user message
if (
newChat &&
history.messages[history.currentId].parentId === null &&
history.messages[history.currentId].role === 'user'
) {
await initChatHandler();
} else {
await saveChatHandler(_chatId);
}
for (const [_modelIdx, modelId] of selectedModelIds.entries()) {
const model = $models.filter((m) => m.id === modelId).at(0);
@ -1397,18 +1390,19 @@
await tick();
// Save chat after all messages have been created
saveChatHandler(_chatId);
await saveChatHandler($chatId);
const _chatId = JSON.parse(JSON.stringify($chatId));
await Promise.all(
selectedModelIds.map(async (modelId, _modelIdx) => {
console.log('modelId', modelId);
const model = $models.filter((m) => m.id === modelId).at(0);
if (model) {
const messages = createMessagesList(history, parentId);
const messages = createMessagesList(parentId);
// If there are image files, check if model is vision capable
const hasImages = messages.some((message) =>
message?.files?.some((file) => file.type === 'image')
message.files?.some((file) => file.type === 'image')
);
if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
@ -1450,7 +1444,7 @@
const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
scrollToBottom();
await sendPromptSocket(history, model, responseMessageId, _chatId);
await sendPromptSocket(model, responseMessageId, _chatId);
if (chatEventEmitter) clearInterval(chatEventEmitter);
} else {
@ -1463,7 +1457,7 @@
chats.set(await getChatList(localStorage.token, $currentChatPage));
};
const sendPromptSocket = async (history, model, responseMessageId, _chatId) => {
const sendPromptSocket = async (model, responseMessageId, _chatId) => {
const responseMessage = history.messages[responseMessageId];
const userMessage = history.messages[responseMessage.parentId];
@ -1513,7 +1507,7 @@
}`
}
: undefined,
...createMessagesList(history, responseMessageId).map((message) => ({
...createMessagesList(responseMessageId).map((message) => ({
...message,
content: removeDetails(message.content, ['reasoning', 'code_interpreter'])
}))
@ -1748,7 +1742,7 @@
.at(0);
if (model) {
await sendPromptSocket(history, model, responseMessage.id, _chatId);
await sendPromptSocket(model, responseMessage.id, _chatId);
}
}
};
@ -1809,7 +1803,7 @@
system: $settings.system ?? undefined,
params: params,
history: history,
messages: createMessagesList(history, history.currentId),
messages: createMessagesList(history.currentId),
tags: [],
timestamp: Date.now()
});
@ -1831,7 +1825,7 @@
chat = await updateChatById(localStorage.token, _chatId, {
models: selectedModels,
history: history,
messages: createMessagesList(history, history.currentId),
messages: createMessagesList(history.currentId),
params: params,
files: chatFiles
});
@ -1939,7 +1933,7 @@
{/if}
<div class="flex flex-col flex-auto z-10 w-full">
{#if $settings?.landingPageMode === 'chat' || createMessagesList(history, history.currentId).length > 0}
{#if $settings?.landingPageMode === 'chat' || createMessagesList(history.currentId).length > 0}
<div
class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full z-10 scrollbar-hidden"
id="messages-container"