mirror of
https://github.com/open-webui/open-webui.git
synced 2025-04-11 21:39:07 +02:00
Merge pull request #11358 from OrenZhang/i18n_translation
i18n(common): add i18n translation
This commit is contained in:
commit
7a1cf1095a
@ -179,7 +179,7 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Tooltip content="Verify Connection" className="self-end -mb-1">
|
||||
<Tooltip content={$i18n.t('Verify Connection')} className="self-end -mb-1">
|
||||
<button
|
||||
class="self-center p-1 bg-transparent hover:bg-gray-100 dark:bg-gray-900 dark:hover:bg-gray-850 rounded-lg transition"
|
||||
on:click={() => {
|
||||
|
@ -387,8 +387,12 @@
|
||||
<div class="flex items-center relative">
|
||||
<Tooltip
|
||||
content={BYPASS_EMBEDDING_AND_RETRIEVAL
|
||||
? 'Inject the entire content as context for comprehensive processing, this is recommended for complex queries.'
|
||||
: 'Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.'}
|
||||
? $i18n.t(
|
||||
'Inject the entire content as context for comprehensive processing, this is recommended for complex queries.'
|
||||
)
|
||||
: $i18n.t(
|
||||
'Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.'
|
||||
)}
|
||||
>
|
||||
<Switch bind:state={BYPASS_EMBEDDING_AND_RETRIEVAL} />
|
||||
</Tooltip>
|
||||
@ -625,8 +629,12 @@
|
||||
<div class="flex items-center relative">
|
||||
<Tooltip
|
||||
content={RAG_FULL_CONTEXT
|
||||
? 'Inject entire contents as context for comprehensive processing, this is recommended for complex queries.'
|
||||
: 'Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.'}
|
||||
? $i18n.t(
|
||||
'Inject the entire content as context for comprehensive processing, this is recommended for complex queries.'
|
||||
)
|
||||
: $i18n.t(
|
||||
'Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.'
|
||||
)}
|
||||
>
|
||||
<Switch bind:state={RAG_FULL_CONTEXT} />
|
||||
</Tooltip>
|
||||
|
@ -462,8 +462,12 @@
|
||||
<div class="flex items-center relative">
|
||||
<Tooltip
|
||||
content={webConfig.BYPASS_WEB_SEARCH_EMBEDDING_AND_RETRIEVAL
|
||||
? 'Inject the entire content as context for comprehensive processing, this is recommended for complex queries.'
|
||||
: 'Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.'}
|
||||
? $i18n.t(
|
||||
'Inject the entire content as context for comprehensive processing, this is recommended for complex queries.'
|
||||
)
|
||||
: $i18n.t(
|
||||
'Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.'
|
||||
)}
|
||||
>
|
||||
<Switch bind:state={webConfig.BYPASS_WEB_SEARCH_EMBEDDING_AND_RETRIEVAL} />
|
||||
</Tooltip>
|
||||
|
@ -73,10 +73,13 @@
|
||||
<div class="text-2xl font-medium capitalize">{channel.name}</div>
|
||||
|
||||
<div class=" text-gray-500">
|
||||
This channel was created on {dayjs(channel.created_at / 1000000).format(
|
||||
'MMMM D, YYYY'
|
||||
)}. This is the very beginning of the {channel.name}
|
||||
channel.
|
||||
{$i18n.t(
|
||||
'This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.',
|
||||
{
|
||||
createdAt: dayjs(channel.created_at / 1000000).format('MMMM D, YYYY'),
|
||||
channelName: channel.name
|
||||
}
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{:else}
|
||||
|
@ -441,7 +441,9 @@
|
||||
|
||||
{#if ($config?.features?.enable_code_execution ?? true) && (lang.toLowerCase() === 'python' || lang.toLowerCase() === 'py' || (lang === '' && checkPythonCode(code)))}
|
||||
{#if executing}
|
||||
<div class="run-code-button bg-none border-none p-1 cursor-not-allowed">Running</div>
|
||||
<div class="run-code-button bg-none border-none p-1 cursor-not-allowed">
|
||||
{$i18n.t('Running')}
|
||||
</div>
|
||||
{:else if run}
|
||||
<button
|
||||
class="flex gap-1 items-center run-code-button bg-none border-none bg-gray-50 hover:bg-gray-100 dark:bg-gray-850 dark:hover:bg-gray-800 transition rounded-md px-1.5 py-0.5"
|
||||
|
@ -748,7 +748,9 @@
|
||||
onSourceClick={async (id, idx) => {
|
||||
console.log(id, idx);
|
||||
let sourceButton = document.getElementById(`source-${message.id}-${idx}`);
|
||||
const sourcesCollapsible = document.getElementById(`collapsible-${message.id}`);
|
||||
const sourcesCollapsible = document.getElementById(
|
||||
`collapsible-${message.id}`
|
||||
);
|
||||
|
||||
if (sourceButton) {
|
||||
sourceButton.click();
|
||||
|
@ -87,8 +87,12 @@
|
||||
<div>
|
||||
<Tooltip
|
||||
content={enableFullContent
|
||||
? 'Inject the entire content as context for comprehensive processing, this is recommended for complex queries.'
|
||||
: 'Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.'}
|
||||
? $i18n.t(
|
||||
'Inject the entire content as context for comprehensive processing, this is recommended for complex queries.'
|
||||
)
|
||||
: $i18n.t(
|
||||
'Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.'
|
||||
)}
|
||||
>
|
||||
<div class="flex items-center gap-1.5 text-xs">
|
||||
{#if enableFullContent}
|
||||
|
@ -113,8 +113,8 @@
|
||||
}
|
||||
}}
|
||||
>
|
||||
<option class=" text-gray-700" value="private" selected>Private</option>
|
||||
<option class=" text-gray-700" value="public" selected>Public</option>
|
||||
<option class=" text-gray-700" value="private" selected>{$i18n.t('Private')}</option>
|
||||
<option class=" text-gray-700" value="public" selected>{$i18n.t('Public')}</option>
|
||||
</select>
|
||||
|
||||
<div class=" text-xs text-gray-400 font-medium">
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "الإفتراضي Prompt الاقتراحات",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "الإفتراضي صلاحيات المستخدم",
|
||||
"Delete": "حذف",
|
||||
"Delete a model": "حذف الموديل",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "قم بتضمين علامة `-api` عند تشغيل Stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "معلومات",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "إدخال الأوامر",
|
||||
"Install from Github URL": "التثبيت من عنوان URL لجيثب",
|
||||
"Instant Auto-Send After Voice Transcription": "",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "أخر 30 يوم",
|
||||
"Previous 7 days": "أخر 7 أيام",
|
||||
"Private": "",
|
||||
"Profile Image": "صورة الملف الشخصي",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "موجه (على سبيل المثال: أخبرني بحقيقة ممتعة عن الإمبراطورية الرومانية)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "مطالبات",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Ollama.com \"{{searchValue}}\" أسحب من ",
|
||||
"Pull a model from Ollama.com": "Ollama.com سحب الموديل من ",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "الثيم",
|
||||
"Thinking...": "",
|
||||
"This action cannot be undone. Do you wish to continue?": "",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "وهذا يضمن حفظ محادثاتك القيمة بشكل آمن في قاعدة بياناتك الخلفية. شكرًا لك!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "",
|
||||
"variable": "المتغير",
|
||||
"variable to have them replaced with clipboard content.": "متغير لاستبدالها بمحتوى الحافظة.",
|
||||
"Verify Connection": "",
|
||||
"Version": "إصدار",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Промпт Предложения по подразбиране",
|
||||
"Default to 389 or 636 if TLS is enabled": "По подразбиране 389 или 636, ако TLS е активиран",
|
||||
"Default to ALL": "По подразбиране за ВСИЧКИ",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Роля на потребителя по подразбиране",
|
||||
"Delete": "Изтриване",
|
||||
"Delete a model": "Изтриване на модел",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Информация",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Въведете команди",
|
||||
"Install from Github URL": "Инсталиране от URL адреса на Github",
|
||||
"Instant Auto-Send After Voice Transcription": "Незабавно автоматично изпращане след гласова транскрипция",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "Наказание за присъствие",
|
||||
"Previous 30 days": "Предишните 30 дни",
|
||||
"Previous 7 days": "Предишните 7 дни",
|
||||
"Private": "",
|
||||
"Profile Image": "Профилна снимка",
|
||||
"Prompt": "Промпт",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Промпт (напр. Кажи ми забавен факт за Римската империя)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "Промптът е актуализиран успешно",
|
||||
"Prompts": "Промптове",
|
||||
"Prompts Access": "Достъп до промптове",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Извади \"{{searchValue}}\" от Ollama.com",
|
||||
"Pull a model from Ollama.com": "Издърпайте модел от Ollama.com",
|
||||
"Query Generation Prompt": "Промпт за генериране на запитвания",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Тема",
|
||||
"Thinking...": "Мисля...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Това действие не може да бъде отменено. Желаете ли да продължите?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Това гарантира, че ценните ви разговори се запазват сигурно във вашата бекенд база данни. Благодарим ви!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Това е експериментална функция, може да не работи според очакванията и подлежи на промяна по всяко време.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Клапаните са актуализирани успешно",
|
||||
"variable": "променлива",
|
||||
"variable to have them replaced with clipboard content.": "променлива, за да бъдат заменени със съдържанието от клипборда.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Версия",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "Версия {{selectedVersion}} от {{totalVersions}}",
|
||||
"View Replies": "Преглед на отговорите",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "ডিফল্ট প্রম্পট সাজেশন",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "ইউজারের ডিফল্ট পদবি",
|
||||
"Delete": "মুছে ফেলুন",
|
||||
"Delete a model": "একটি মডেল মুছে ফেলুন",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "stable-diffusion-webui চালু করার সময় `--api` ফ্ল্যাগ সংযুক্ত করুন",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "তথ্য",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "ইনপুট কমান্ডস",
|
||||
"Install from Github URL": "Github URL থেকে ইনস্টল করুন",
|
||||
"Instant Auto-Send After Voice Transcription": "",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "পূর্ব ৩০ দিন",
|
||||
"Previous 7 days": "পূর্ব ৭ দিন",
|
||||
"Private": "",
|
||||
"Profile Image": "প্রোফাইল ইমেজ",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "প্রম্প্ট (উদাহরণস্বরূপ, আমি রোমান ইমপার্টের সম্পর্কে একটি উপস্থিতি জানতে বল)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "প্রম্পটসমূহ",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Ollama.com থেকে \"{{searchValue}}\" টানুন",
|
||||
"Pull a model from Ollama.com": "Ollama.com থেকে একটি টেনে আনুন আনুন",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "থিম",
|
||||
"Thinking...": "",
|
||||
"This action cannot be undone. Do you wish to continue?": "",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "এটা নিশ্চিত করে যে, আপনার গুরুত্বপূর্ণ আলোচনা নিরাপদে আপনার ব্যাকএন্ড ডেটাবেজে সংরক্ষিত আছে। ধন্যবাদ!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "",
|
||||
"variable": "ভেরিয়েবল",
|
||||
"variable to have them replaced with clipboard content.": "ক্লিপবোর্ডের কন্টেন্ট দিয়ে যেই ভেরিয়েবল রিপ্লেস করা যাবে।",
|
||||
"Verify Connection": "",
|
||||
"Version": "ভার্সন",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Suggeriments d'indicació per defecte",
|
||||
"Default to 389 or 636 if TLS is enabled": "Per defecte 389 o 636 si TLS està habilitat",
|
||||
"Default to ALL": "Per defecte TOTS",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Rol d'usuari per defecte",
|
||||
"Delete": "Eliminar",
|
||||
"Delete a model": "Eliminar un model",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Inclou `--api` quan executis stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "Influeix amb la rapidesa amb què l'algoritme respon als comentaris del text generat. Una taxa d'aprenentatge més baixa donarà lloc a ajustos més lents, mentre que una taxa d'aprenentatge més alta farà que l'algorisme sigui més sensible.",
|
||||
"Info": "Informació",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Entra comandes",
|
||||
"Install from Github URL": "Instal·lar des de l'URL de Github",
|
||||
"Instant Auto-Send After Voice Transcription": "Enviament automàtic després de la transcripció de veu",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "Penalització de presència",
|
||||
"Previous 30 days": "30 dies anteriors",
|
||||
"Previous 7 days": "7 dies anteriors",
|
||||
"Private": "",
|
||||
"Profile Image": "Imatge de perfil",
|
||||
"Prompt": "Indicació",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Indicació (p.ex. Digues-me quelcom divertit sobre l'Imperi Romà)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "Indicació actualitzada correctament",
|
||||
"Prompts": "Indicacions",
|
||||
"Prompts Access": "Accés a les indicacions",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Obtenir \"{{searchValue}}\" de Ollama.com",
|
||||
"Pull a model from Ollama.com": "Obtenir un model d'Ollama.com",
|
||||
"Query Generation Prompt": "Indicació per a generació de consulta",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Tema",
|
||||
"Thinking...": "Pensant...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Aquesta acció no es pot desfer. Vols continuar?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Això assegura que les teves converses valuoses queden desades de manera segura a la teva base de dades. Gràcies!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Aquesta és una funció experimental, és possible que no funcioni com s'espera i està subjecta a canvis en qualsevol moment.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "Aquesta opció controla quants tokens es conserven en actualitzar el context. Per exemple, si s'estableix en 2, es conservaran els darrers 2 tokens del context de conversa. Preservar el context pot ajudar a mantenir la continuïtat d'una conversa, però pot reduir la capacitat de respondre a nous temes.",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Valves actualitat correctament",
|
||||
"variable": "variable",
|
||||
"variable to have them replaced with clipboard content.": "variable per tenir-les reemplaçades amb el contingut del porta-retalls.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Versió",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "Versió {{selectedVersion}} de {{totalVersions}}",
|
||||
"View Replies": "Veure les respostes",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Default nga prompt nga mga sugyot",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Default nga Papel sa Gumagamit",
|
||||
"Delete": "",
|
||||
"Delete a model": "Pagtangtang sa usa ka template",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Iapil ang `--api` nga bandila kung nagdagan nga stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Pagsulod sa input commands",
|
||||
"Install from Github URL": "",
|
||||
"Instant Auto-Send After Voice Transcription": "",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "",
|
||||
"Previous 7 days": "",
|
||||
"Private": "",
|
||||
"Profile Image": "",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "Mga aghat",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "",
|
||||
"Pull a model from Ollama.com": "Pagkuha ug template gikan sa Ollama.com",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Tema",
|
||||
"Thinking...": "",
|
||||
"This action cannot be undone. Do you wish to continue?": "",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Kini nagsiguro nga ang imong bililhon nga mga panag-istoryahanay luwas nga natipig sa imong backend database. ",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "",
|
||||
"variable": "variable",
|
||||
"variable to have them replaced with clipboard content.": "variable aron pulihan kini sa mga sulud sa clipboard.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Bersyon",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Výchozí návrhy promptů",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Výchozí uživatelská role",
|
||||
"Delete": "Smazat",
|
||||
"Delete a model": "Odstranit model.",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Při spuštění stable-diffusion-webui zahrňte příznak `--api`.",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Info",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Vstupní příkazy",
|
||||
"Install from Github URL": "Instalace z URL adresy Githubu",
|
||||
"Instant Auto-Send After Voice Transcription": "Okamžité automatické odeslání po přepisu hlasu",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "Předchozích 30 dnů",
|
||||
"Previous 7 days": "Předchozích 7 dní",
|
||||
"Private": "",
|
||||
"Profile Image": "Profilový obrázek",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Prompt (např. Řekni mi zábavný fakt o Římské říši)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "Prompty",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Stáhněte \"{{searchValue}}\" z Ollama.com",
|
||||
"Pull a model from Ollama.com": "Stáhněte model z Ollama.com",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Téma",
|
||||
"Thinking...": "Přemýšlím...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Tuto akci nelze vrátit zpět. Přejete si pokračovat?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "To zajišťuje, že vaše cenné konverzace jsou bezpečně uloženy ve vaší backendové databázi. Děkujeme!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Jedná se o experimentální funkci, nemusí fungovat podle očekávání a může být kdykoliv změněna.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Ventily byly úspěšně aktualizovány.",
|
||||
"variable": "proměnná",
|
||||
"variable to have them replaced with clipboard content.": "proměnnou, aby byl jejich obsah nahrazen obsahem schránky.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Verze",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "Verze {{selectedVersion}} z {{totalVersions}}",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Standardforslag til prompt",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Brugers rolle som standard",
|
||||
"Delete": "Slet",
|
||||
"Delete a model": "Slet en model",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Inkluder `--api` flag, når du kører stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Info",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Inputkommandoer",
|
||||
"Install from Github URL": "Installer fra Github URL",
|
||||
"Instant Auto-Send After Voice Transcription": "Øjeblikkelig automatisk afsendelse efter stemmetransskription",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "Seneste 30 dage",
|
||||
"Previous 7 days": "Seneste 7 dage",
|
||||
"Private": "",
|
||||
"Profile Image": "Profilbillede",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Prompt (f.eks. Fortæl mig en sjov kendsgerning om Romerriget)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "Prompts",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Hent \"{{searchValue}}\" fra Ollama.com",
|
||||
"Pull a model from Ollama.com": "Hent en model fra Ollama.com",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Tema",
|
||||
"Thinking...": "Tænker...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Denne handling kan ikke fortrydes. Vil du fortsætte?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Dette sikrer, at dine værdifulde samtaler gemmes sikkert i din backend-database. Tak!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Dette er en eksperimentel funktion, den fungerer muligvis ikke som forventet og kan ændres når som helst.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Ventiler opdateret.",
|
||||
"variable": "variabel",
|
||||
"variable to have them replaced with clipboard content.": "variabel for at få dem erstattet med indholdet af udklipsholderen.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Version",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "Version {{selectedVersion}} af {{totalVersions}}",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Prompt-Vorschläge",
|
||||
"Default to 389 or 636 if TLS is enabled": "Standardmäßig auf 389 oder 636 setzen, wenn TLS aktiviert ist",
|
||||
"Default to ALL": "Standardmäßig auf ALLE setzen",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Standardbenutzerrolle",
|
||||
"Delete": "Löschen",
|
||||
"Delete a model": "Ein Modell löschen",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Fügen Sie beim Ausführen von stable-diffusion-webui die Option `--api` hinzu",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Info",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Eingabebefehle",
|
||||
"Install from Github URL": "Installiere von der Github-URL",
|
||||
"Instant Auto-Send After Voice Transcription": "Spracherkennung direkt absenden",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "Vorherige 30 Tage",
|
||||
"Previous 7 days": "Vorherige 7 Tage",
|
||||
"Private": "",
|
||||
"Profile Image": "Profilbild",
|
||||
"Prompt": "Prompt",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Prompt (z. B. \"Erzähle mir eine interessante Tatsache über das Römische Reich\")",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "Prompt erfolgreich aktualisiert",
|
||||
"Prompts": "Prompts",
|
||||
"Prompts Access": "Prompt-Zugriff",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "\"{{searchValue}}\" von Ollama.com beziehen",
|
||||
"Pull a model from Ollama.com": "Modell von Ollama.com beziehen",
|
||||
"Query Generation Prompt": "Abfragegenerierungsprompt",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Design",
|
||||
"Thinking...": "Denke nach...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Diese Aktion kann nicht rückgängig gemacht werden. Möchten Sie fortfahren?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Dies stellt sicher, dass Ihre wertvollen Unterhaltungen sicher in Ihrer Backend-Datenbank gespeichert werden. Vielen Dank!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Dies ist eine experimentelle Funktion, sie funktioniert möglicherweise nicht wie erwartet und kann jederzeit geändert werden.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Valves erfolgreich aktualisiert",
|
||||
"variable": "Variable",
|
||||
"variable to have them replaced with clipboard content.": "Variable, um den Inhalt der Zwischenablage beim Nutzen des Prompts zu ersetzen.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Version",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "Version {{selectedVersion}} von {{totalVersions}}",
|
||||
"View Replies": "Antworten anzeigen",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Default Prompt Suggestions",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Default User Role",
|
||||
"Delete": "",
|
||||
"Delete a model": "Delete a model",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Include `--api` flag when running stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Input commands",
|
||||
"Install from Github URL": "",
|
||||
"Instant Auto-Send After Voice Transcription": "",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "",
|
||||
"Previous 7 days": "",
|
||||
"Private": "",
|
||||
"Profile Image": "",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "Promptos",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "",
|
||||
"Pull a model from Ollama.com": "Pull a wowdel from Ollama.com",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Theme much theme",
|
||||
"Thinking...": "",
|
||||
"This action cannot be undone. Do you wish to continue?": "",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "This ensures that your valuable conversations are securely saved to your backend database. Thank you! Much secure!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "",
|
||||
"variable": "variable very variable",
|
||||
"variable to have them replaced with clipboard content.": "variable to have them replaced with clipboard content. Very replace.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Version much version",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Προεπιλεγμένες Προτάσεις Προτροπής",
|
||||
"Default to 389 or 636 if TLS is enabled": "Προεπιλογή στο 389 ή 636 εάν είναι ενεργοποιημένο το TLS",
|
||||
"Default to ALL": "Προεπιλογή σε ΟΛΑ",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Προεπιλεγμένος Ρόλος Χρήστη",
|
||||
"Delete": "Διαγραφή",
|
||||
"Delete a model": "Διαγραφή ενός μοντέλου",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Συμπεριλάβετε το flag `--api` όταν τρέχετε το stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Πληροφορίες",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Εισαγωγή εντολών",
|
||||
"Install from Github URL": "Εγκατάσταση από URL Github",
|
||||
"Instant Auto-Send After Voice Transcription": "Άμεση Αυτόματη Αποστολή μετά τη μεταγραφή φωνής",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "Προηγούμενες 30 ημέρες",
|
||||
"Previous 7 days": "Προηγούμενες 7 ημέρες",
|
||||
"Private": "",
|
||||
"Profile Image": "Εικόνα Προφίλ",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Προτροπή (π.χ. Πες μου ένα διασκεδαστικό γεγονός για την Ρωμαϊκή Αυτοκρατορία)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "Η προτροπή ενημερώθηκε με επιτυχία",
|
||||
"Prompts": "Προτροπές",
|
||||
"Prompts Access": "Πρόσβαση Προτροπών",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Τραβήξτε \"{{searchValue}}\" από το Ollama.com",
|
||||
"Pull a model from Ollama.com": "Τραβήξτε ένα μοντέλο από το Ollama.com",
|
||||
"Query Generation Prompt": "Προτροπή Δημιουργίας Ερωτήσεων",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Θέμα",
|
||||
"Thinking...": "Σκέφτομαι...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Αυτή η ενέργεια δεν μπορεί να αναιρεθεί. Θέλετε να συνεχίσετε;",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Αυτό διασφαλίζει ότι οι πολύτιμες συνομιλίες σας αποθηκεύονται με ασφάλεια στη βάση δεδομένων backend σας. Ευχαριστούμε!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Αυτή είναι μια πειραματική λειτουργία, μπορεί να μην λειτουργεί όπως αναμένεται και υπόκειται σε αλλαγές οποιαδήποτε στιγμή.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Οι βαλβίδες ενημερώθηκαν με επιτυχία",
|
||||
"variable": "μεταβλητή",
|
||||
"variable to have them replaced with clipboard content.": "μεταβλητή να αντικατασταθούν με το περιεχόμενο του πρόχειρου.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Έκδοση",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "Έκδοση {{selectedVersion}} από {{totalVersions}}",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "",
|
||||
"Delete": "",
|
||||
"Delete a model": "",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "",
|
||||
"Install from Github URL": "",
|
||||
"Instant Auto-Send After Voice Transcription": "",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "",
|
||||
"Previous 7 days": "",
|
||||
"Private": "",
|
||||
"Profile Image": "",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "",
|
||||
"Pull a model from Ollama.com": "",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "",
|
||||
"Thinking...": "",
|
||||
"This action cannot be undone. Do you wish to continue?": "",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "",
|
||||
"variable": "",
|
||||
"variable to have them replaced with clipboard content.": "",
|
||||
"Verify Connection": "",
|
||||
"Version": "",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "",
|
||||
"Delete": "",
|
||||
"Delete a model": "",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "",
|
||||
"Install from Github URL": "",
|
||||
"Instant Auto-Send After Voice Transcription": "",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "",
|
||||
"Previous 7 days": "",
|
||||
"Private": "",
|
||||
"Profile Image": "",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "",
|
||||
"Pull a model from Ollama.com": "",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "",
|
||||
"Thinking...": "",
|
||||
"This action cannot be undone. Do you wish to continue?": "",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "",
|
||||
"variable": "",
|
||||
"variable to have them replaced with clipboard content.": "",
|
||||
"Verify Connection": "",
|
||||
"Version": "",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Sugerencias de mensajes por defecto",
|
||||
"Default to 389 or 636 if TLS is enabled": "Predeterminado a 389 o 636 si TLS está habilitado",
|
||||
"Default to ALL": "Predeterminado a TODOS",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Rol por defecto para usuarios",
|
||||
"Delete": "Borrar",
|
||||
"Delete a model": "Borra un modelo",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Incluir el indicador `--api` al ejecutar stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Información",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Ingresar comandos",
|
||||
"Install from Github URL": "Instalar desde la URL de Github",
|
||||
"Instant Auto-Send After Voice Transcription": "Auto-Enviar Después de la Transcripción de Voz",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "Últimos 30 días",
|
||||
"Previous 7 days": "Últimos 7 días",
|
||||
"Private": "",
|
||||
"Profile Image": "Imagen de perfil",
|
||||
"Prompt": "Prompt",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Prompt (por ejemplo, cuéntame una cosa divertida sobre el Imperio Romano)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "Prompt actualizado exitosamente",
|
||||
"Prompts": "Prompts",
|
||||
"Prompts Access": "Acceso a Prompts",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Extraer \"{{searchValue}}\" de Ollama.com",
|
||||
"Pull a model from Ollama.com": "Obtener un modelo de Ollama.com",
|
||||
"Query Generation Prompt": "Prompt de generación de consulta",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Tema",
|
||||
"Thinking...": "Pensando...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Esta acción no se puede deshacer. ¿Desea continuar?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Esto garantiza que sus valiosas conversaciones se guarden de forma segura en su base de datos en el backend. ¡Gracias!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Esta es una característica experimental que puede no funcionar como se esperaba y está sujeto a cambios en cualquier momento.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Valves actualizados con éxito",
|
||||
"variable": "variable",
|
||||
"variable to have them replaced with clipboard content.": "variable para reemplazarlos con el contenido del portapapeles.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Versión",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "Versión {{selectedVersion}} de {{totalVersions}}",
|
||||
"View Replies": "Ver respuestas",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Prompt Iradokizun Lehenetsiak",
|
||||
"Default to 389 or 636 if TLS is enabled": "Lehenetsi 389 edo 636 TLS gaituta badago",
|
||||
"Default to ALL": "Lehenetsi GUZTIAK",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Erabiltzaile Rol Lehenetsia",
|
||||
"Delete": "Ezabatu",
|
||||
"Delete a model": "Ezabatu eredu bat",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Sartu `--api` bandera stable-diffusion-webui exekutatzean",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Informazioa",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Sartu komandoak",
|
||||
"Install from Github URL": "Instalatu Github URLtik",
|
||||
"Instant Auto-Send After Voice Transcription": "Bidalketa Automatiko Berehalakoa Ahots Transkripzioaren Ondoren",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "Aurreko 30 egunak",
|
||||
"Previous 7 days": "Aurreko 7 egunak",
|
||||
"Private": "",
|
||||
"Profile Image": "Profil irudia",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Prompt-a (adib. Kontatu datu dibertigarri bat Erromatar Inperioari buruz)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "Prompt-a ongi eguneratu da",
|
||||
"Prompts": "Prompt-ak",
|
||||
"Prompts Access": "Prompt sarbidea",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Ekarri \"{{searchValue}}\" Ollama.com-etik",
|
||||
"Pull a model from Ollama.com": "Ekarri modelo bat Ollama.com-etik",
|
||||
"Query Generation Prompt": "Kontsulta sortzeko prompt-a",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Gaia",
|
||||
"Thinking...": "Pentsatzen...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Ekintza hau ezin da desegin. Jarraitu nahi duzu?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Honek zure elkarrizketa baliotsuak modu seguruan zure backend datu-basean gordeko direla ziurtatzen du. Eskerrik asko!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Hau funtzionalitate esperimental bat da, baliteke espero bezala ez funtzionatzea eta edozein unetan aldaketak izatea.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Balbulak ongi eguneratu dira",
|
||||
"variable": "aldagaia",
|
||||
"variable to have them replaced with clipboard content.": "aldagaia arbeleko edukiarekin ordezkatzeko.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Bertsioa",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "{{totalVersions}}-tik {{selectedVersion}}. bertsioa",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "پیشنهادات پرامپت پیش فرض",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "نقش کاربر پیش فرض",
|
||||
"Delete": "حذف",
|
||||
"Delete a model": "حذف یک مدل",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "فلگ `--api` را هنکام اجرای stable-diffusion-webui استفاده کنید.",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "اطلاعات",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "ورودی دستورات",
|
||||
"Install from Github URL": "نصب از ادرس Github",
|
||||
"Instant Auto-Send After Voice Transcription": "",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "30 روز قبل",
|
||||
"Previous 7 days": "7 روز قبل",
|
||||
"Private": "",
|
||||
"Profile Image": "تصویر پروفایل",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "پیشنهاد (برای مثال: به من بگوید چیزی که برای من یک کاربرد داره درباره ایران)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "پرامپت\u200cها",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "بازگرداندن \"{{searchValue}}\" از Ollama.com",
|
||||
"Pull a model from Ollama.com": "دریافت یک مدل از Ollama.com",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "پوسته",
|
||||
"Thinking...": "در حال فکر...",
|
||||
"This action cannot be undone. Do you wish to continue?": "این اقدام قابل بازگردانی نیست. برای ادامه اطمینان دارید؟",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "این تضمین می کند که مکالمات ارزشمند شما به طور ایمن در پایگاه داده بکند ذخیره می شود. تشکر!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "",
|
||||
"variable": "متغیر",
|
||||
"variable to have them replaced with clipboard content.": "متغیر برای جایگزینی آنها با محتوای بریده\u200cدان.",
|
||||
"Verify Connection": "",
|
||||
"Version": "نسخه",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "نسخهٔ {{selectedVersion}} از {{totalVersions}}",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Oletuskehotteiden ehdotukset",
|
||||
"Default to 389 or 636 if TLS is enabled": "Oletus 389 tai 636, jos TLS on käytössä",
|
||||
"Default to ALL": "Oletus KAIKKI",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Oletuskäyttäjärooli",
|
||||
"Delete": "Poista",
|
||||
"Delete a model": "Poista malli",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Sisällytä `--api`-lippu ajettaessa stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Tiedot",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Syötekäskyt",
|
||||
"Install from Github URL": "Asenna Github-URL:stä",
|
||||
"Instant Auto-Send After Voice Transcription": "Heti automaattinen lähetys äänitunnistuksen jälkeen",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "Edelliset 30 päivää",
|
||||
"Previous 7 days": "Edelliset 7 päivää",
|
||||
"Private": "",
|
||||
"Profile Image": "Profiilikuva",
|
||||
"Prompt": "Kehote",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Kehote (esim. Kerro hauska fakta Rooman valtakunnasta)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "Kehote päivitetty onnistuneesti",
|
||||
"Prompts": "Kehotteet",
|
||||
"Prompts Access": "Kehoitteiden käyttöoikeudet",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Lataa \"{{searchValue}}\" Ollama.comista",
|
||||
"Pull a model from Ollama.com": "Lataa malli Ollama.comista",
|
||||
"Query Generation Prompt": "Kyselytulosten luontikehote",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Teema",
|
||||
"Thinking...": "Ajattelee...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Tätä toimintoa ei voi peruuttaa. Haluatko jatkaa?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Tämä varmistaa, että arvokkaat keskustelusi tallennetaan turvallisesti backend-tietokantaasi. Kiitos!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Tämä on kokeellinen ominaisuus, se ei välttämättä toimi odotetulla tavalla ja se voi muuttua milloin tahansa.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Venttiilit päivitetty onnistuneesti",
|
||||
"variable": "muuttuja",
|
||||
"variable to have them replaced with clipboard content.": "muuttuja korvataan leikepöydän sisällöllä.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Versio",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "Versio {{selectedVersion}} / {{totalVersions}}",
|
||||
"View Replies": "Näytä vastaukset",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Suggestions de prompts par défaut",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Rôle utilisateur par défaut",
|
||||
"Delete": "Supprimer",
|
||||
"Delete a model": "Supprimer un modèle",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Inclure le drapeau `--api` lorsque vous exécutez stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Info",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Entrez les commandes",
|
||||
"Install from Github URL": "Installer depuis l'URL GitHub",
|
||||
"Instant Auto-Send After Voice Transcription": "Envoi automatique instantané après transcription vocale",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "30 derniers jours",
|
||||
"Previous 7 days": "7 derniers jours",
|
||||
"Private": "",
|
||||
"Profile Image": "Image de profil",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Prompt (par ex. Dites-moi un fait amusant à propos de l'Empire romain)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "Prompts",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Récupérer « {{searchValue}} » depuis Ollama.com",
|
||||
"Pull a model from Ollama.com": "Télécharger un modèle depuis Ollama.com",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Thème",
|
||||
"Thinking...": "En train de réfléchir...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Cette action ne peut pas être annulée. Souhaitez-vous continuer ?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Cela garantit que vos conversations précieuses soient sauvegardées en toute sécurité dans votre base de données backend. Merci !",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Il s'agit d'une fonctionnalité expérimentale, elle peut ne pas fonctionner comme prévu et est sujette à modification à tout moment.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Les vannes ont été mises à jour avec succès",
|
||||
"variable": "variable",
|
||||
"variable to have them replaced with clipboard content.": "variable pour qu'elles soient remplacées par le contenu du presse-papiers.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Version améliorée",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Suggestions de prompts par défaut",
|
||||
"Default to 389 or 636 if TLS is enabled": "Par défaut à 389 ou 636 si TLS est activé",
|
||||
"Default to ALL": "Par défaut à TOUS",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Rôle utilisateur par défaut",
|
||||
"Delete": "Supprimer",
|
||||
"Delete a model": "Supprimer un modèle",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Inclure le drapeau `--api` lorsque vous exécutez stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Info",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Commandes d'entrée",
|
||||
"Install from Github URL": "Installer depuis une URL GitHub",
|
||||
"Instant Auto-Send After Voice Transcription": "Envoi automatique après la transcription",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "Pénalité de présence",
|
||||
"Previous 30 days": "30 derniers jours",
|
||||
"Previous 7 days": "7 derniers jours",
|
||||
"Private": "",
|
||||
"Profile Image": "Image de profil",
|
||||
"Prompt": "Prompt",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Prompt (par ex. Dites-moi un fait amusant à propos de l'Empire romain)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "Prompt mis à jour avec succès",
|
||||
"Prompts": "Prompts",
|
||||
"Prompts Access": "Accès aux prompts",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Récupérer « {{searchValue}} » depuis Ollama.com",
|
||||
"Pull a model from Ollama.com": "Télécharger un modèle depuis Ollama.com",
|
||||
"Query Generation Prompt": "Prompt de génération de requête",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Thème",
|
||||
"Thinking...": "En train de réfléchir...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Cette action ne peut pas être annulée. Souhaitez-vous continuer ?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Cela garantit que vos conversations précieuses soient sauvegardées en toute sécurité dans votre base de données backend. Merci !",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Il s'agit d'une fonctionnalité expérimentale, elle peut ne pas fonctionner comme prévu et est sujette à modification à tout moment.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Les vannes ont été mises à jour avec succès",
|
||||
"variable": "variable",
|
||||
"variable to have them replaced with clipboard content.": "variable pour qu'elles soient remplacées par le contenu du presse-papiers.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Version:",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "Version {{selectedVersion}} de {{totalVersions}}",
|
||||
"View Replies": "Voir les réponses",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "הצעות ברירת מחדל לפקודות",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "תפקיד משתמש ברירת מחדל",
|
||||
"Delete": "מחק",
|
||||
"Delete a model": "מחק מודל",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "כלול את הדגל `--api` בעת הרצת stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "מידע",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "פקודות קלט",
|
||||
"Install from Github URL": "התקן מכתובת URL של Github",
|
||||
"Instant Auto-Send After Voice Transcription": "",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "30 הימים הקודמים",
|
||||
"Previous 7 days": "7 הימים הקודמים",
|
||||
"Private": "",
|
||||
"Profile Image": "תמונת פרופיל",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "פקודה (למשל, ספר לי עובדה מעניינת על האימפריה הרומית)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "פקודות",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "משוך \"{{searchValue}}\" מ-Ollama.com",
|
||||
"Pull a model from Ollama.com": "משוך מודל מ-Ollama.com",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "נושא",
|
||||
"Thinking...": "",
|
||||
"This action cannot be undone. Do you wish to continue?": "",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "פעולה זו מבטיחה שהשיחות בעלות הערך שלך יישמרו באופן מאובטח במסד הנתונים העורפי שלך. תודה!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "",
|
||||
"variable": "משתנה",
|
||||
"variable to have them replaced with clipboard content.": "משתנה להחליפו ב- clipboard תוכן.",
|
||||
"Verify Connection": "",
|
||||
"Version": "גרסה",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "डिफ़ॉल्ट प्रॉम्प्ट सुझाव",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "डिफ़ॉल्ट उपयोगकर्ता भूमिका",
|
||||
"Delete": "डिलीट",
|
||||
"Delete a model": "एक मॉडल हटाएँ",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "stable-diffusion-webui चलाते समय `--api` ध्वज शामिल करें",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "सूचना-विषयक",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "इनपुट क命",
|
||||
"Install from Github URL": "Github URL से इंस्टॉल करें",
|
||||
"Instant Auto-Send After Voice Transcription": "",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "पिछले 30 दिन",
|
||||
"Previous 7 days": "पिछले 7 दिन",
|
||||
"Private": "",
|
||||
"Profile Image": "प्रोफ़ाइल छवि",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "प्रॉम्प्ट (उदाहरण के लिए मुझे रोमन साम्राज्य के बारे में एक मजेदार तथ्य बताएं)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "प्रॉम्प्ट",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "\"{{searchValue}}\" को Ollama.com से खींचें",
|
||||
"Pull a model from Ollama.com": "Ollama.com से एक मॉडल खींचें",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "थीम",
|
||||
"Thinking...": "",
|
||||
"This action cannot be undone. Do you wish to continue?": "",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "यह सुनिश्चित करता है कि आपकी मूल्यवान बातचीत आपके बैकएंड डेटाबेस में सुरक्षित रूप से सहेजी गई है। धन्यवाद!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "",
|
||||
"variable": "वेरिएबल",
|
||||
"variable to have them replaced with clipboard content.": "उन्हें क्लिपबोर्ड सामग्री से बदलने के लिए वेरिएबल।",
|
||||
"Verify Connection": "",
|
||||
"Version": "संस्करण",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Zadani prijedlozi prompta",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Zadana korisnička uloga",
|
||||
"Delete": "Izbriši",
|
||||
"Delete a model": "Izbriši model",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Uključite zastavicu `--api` prilikom pokretanja stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Informacije",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Unos naredbi",
|
||||
"Install from Github URL": "Instaliraj s Github URL-a",
|
||||
"Instant Auto-Send After Voice Transcription": "Trenutačno automatsko slanje nakon glasovne transkripcije",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "Prethodnih 30 dana",
|
||||
"Previous 7 days": "Prethodnih 7 dana",
|
||||
"Private": "",
|
||||
"Profile Image": "Profilna slika",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Prompt (npr. Reci mi zanimljivost o Rimskom carstvu)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "Prompti",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Povucite \"{{searchValue}}\" s Ollama.com",
|
||||
"Pull a model from Ollama.com": "Povucite model s Ollama.com",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Tema",
|
||||
"Thinking...": "Razmišljam",
|
||||
"This action cannot be undone. Do you wish to continue?": "",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Ovo osigurava da su vaši vrijedni razgovori sigurno spremljeni u bazu podataka. Hvala vam!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Ovo je eksperimentalna značajka, možda neće funkcionirati prema očekivanjima i podložna je promjenama u bilo kojem trenutku.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "",
|
||||
"variable": "varijabla",
|
||||
"variable to have them replaced with clipboard content.": "varijabla za zamjenu sadržajem međuspremnika.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Verzija",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Alapértelmezett prompt javaslatok",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Alapértelmezett felhasználói szerep",
|
||||
"Delete": "Törlés",
|
||||
"Delete a model": "Modell törlése",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Add hozzá a `--api` kapcsolót a stable-diffusion-webui futtatásakor",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Információ",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Beviteli parancsok",
|
||||
"Install from Github URL": "Telepítés Github URL-ről",
|
||||
"Instant Auto-Send After Voice Transcription": "Azonnali automatikus küldés hangfelismerés után",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "Előző 30 nap",
|
||||
"Previous 7 days": "Előző 7 nap",
|
||||
"Private": "",
|
||||
"Profile Image": "Profilkép",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Prompt (pl. Mondj egy érdekes tényt a Római Birodalomról)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "Promptok",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "\"{{searchValue}}\" letöltése az Ollama.com-ról",
|
||||
"Pull a model from Ollama.com": "Modell letöltése az Ollama.com-ról",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Téma",
|
||||
"Thinking...": "Gondolkodik...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Ez a művelet nem vonható vissza. Szeretné folytatni?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Ez biztosítja, hogy értékes beszélgetései biztonságosan mentésre kerüljenek a backend adatbázisban. Köszönjük!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Ez egy kísérleti funkció, lehet, hogy nem a várt módon működik és bármikor változhat.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Szelepek sikeresen frissítve",
|
||||
"variable": "változó",
|
||||
"variable to have them replaced with clipboard content.": "változó, hogy a vágólap tartalmával helyettesítse őket.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Verzió",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "{{selectedVersion}}. verzió a {{totalVersions}}-ból",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Saran Permintaan Default",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Peran Pengguna Default",
|
||||
"Delete": "Menghapus",
|
||||
"Delete a model": "Menghapus model",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Sertakan bendera `--api` saat menjalankan stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Info",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Perintah masukan",
|
||||
"Install from Github URL": "Instal dari URL Github",
|
||||
"Instant Auto-Send After Voice Transcription": "Kirim Otomatis Instan Setelah Transkripsi Suara",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "30 hari sebelumnya",
|
||||
"Previous 7 days": "7 hari sebelumnya",
|
||||
"Private": "",
|
||||
"Profile Image": "Gambar Profil",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Permintaan (mis. Ceritakan sebuah fakta menarik tentang Kekaisaran Romawi)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "Prompt",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Tarik \"{{searchValue}}\" dari Ollama.com",
|
||||
"Pull a model from Ollama.com": "Tarik model dari Ollama.com",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Tema",
|
||||
"Thinking...": "Berpikir",
|
||||
"This action cannot be undone. Do you wish to continue?": "Tindakan ini tidak dapat dibatalkan. Apakah Anda ingin melanjutkan?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Ini akan memastikan bahwa percakapan Anda yang berharga disimpan dengan aman ke basis data backend. Terima kasih!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Ini adalah fitur eksperimental, mungkin tidak berfungsi seperti yang diharapkan dan dapat berubah sewaktu-waktu.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Katup berhasil diperbarui",
|
||||
"variable": "variabel",
|
||||
"variable to have them replaced with clipboard content.": "variabel untuk diganti dengan konten papan klip.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Versi",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Moltaí Leid Réamhshocraithe",
|
||||
"Default to 389 or 636 if TLS is enabled": "Réamhshocrú go 389 nó 636 má tá TLS cumasaithe",
|
||||
"Default to ALL": "Réamhshocrú do GACH",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Ról Úsáideora Réamhshocraithe",
|
||||
"Delete": "Scrios",
|
||||
"Delete a model": "Scrios múnla",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Cuir bratach `--api` san áireamh agus webui cobhsaí-scaipthe á rith",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Eolas",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Orduithe ionchuir",
|
||||
"Install from Github URL": "Suiteáil ó Github URL",
|
||||
"Instant Auto-Send After Voice Transcription": "Seoladh Uathoibríoch Láithreach Tar éis",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "Pionós Láithreacht",
|
||||
"Previous 30 days": "30 lá roimhe seo",
|
||||
"Previous 7 days": "7 lá roimhe seo",
|
||||
"Private": "",
|
||||
"Profile Image": "Íomhá Próifíl",
|
||||
"Prompt": "Leid",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Leid (m.sh. inis dom fíric spraíúil faoin Impireacht Rómhánach)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "D'éirigh leis an leid a nuashonrú",
|
||||
"Prompts": "Leabhair",
|
||||
"Prompts Access": "Rochtain ar Chuirí",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Tarraing \"{{searchValue}}\" ó Ollama.com",
|
||||
"Pull a model from Ollama.com": "Tarraing múnla ó Ollama.com",
|
||||
"Query Generation Prompt": "Cuirí Ginearáil Ceisteanna",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Téama",
|
||||
"Thinking...": "Ag smaoineamh...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Ní féidir an gníomh seo a chur ar ais. Ar mhaith leat leanúint ar aghaidh?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Cinntíonn sé seo go sábhálfar do chomhráite luachmhara go daingean i do bhunachar sonraí cúltaca Go raibh maith agat!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Is gné turgnamhach í seo, b'fhéidir nach bhfeidhmeoidh sé mar a bhíothas ag súil leis agus tá sé faoi réir athraithe ag am ar bith.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Comhlaí nuashonraíodh",
|
||||
"variable": "athraitheach",
|
||||
"variable to have them replaced with clipboard content.": "athróg chun ábhar gearrthaisce a chur in ionad iad.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Leagan",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "Leagan {{selectedVersion}} de {{totalVersions}}",
|
||||
"View Replies": "Féach ar Fhreagraí",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Suggerimenti prompt predefiniti",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Ruolo utente predefinito",
|
||||
"Delete": "Elimina",
|
||||
"Delete a model": "Elimina un modello",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Includi il flag `--api` quando esegui stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Informazioni",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Comandi di input",
|
||||
"Install from Github URL": "Eseguire l'installazione dall'URL di Github",
|
||||
"Instant Auto-Send After Voice Transcription": "",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "Ultimi 30 giorni",
|
||||
"Previous 7 days": "Ultimi 7 giorni",
|
||||
"Private": "",
|
||||
"Profile Image": "Immagine del profilo",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Prompt (ad esempio Dimmi un fatto divertente sull'Impero Romano)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "Prompt",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Estrai \"{{searchValue}}\" da Ollama.com",
|
||||
"Pull a model from Ollama.com": "Estrai un modello da Ollama.com",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Tema",
|
||||
"Thinking...": "",
|
||||
"This action cannot be undone. Do you wish to continue?": "",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Ciò garantisce che le tue preziose conversazioni siano salvate in modo sicuro nel tuo database backend. Grazie!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "",
|
||||
"variable": "variabile",
|
||||
"variable to have them replaced with clipboard content.": "variabile per farli sostituire con il contenuto degli appunti.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Versione",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "デフォルトのプロンプトの提案",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "デフォルトのユーザー役割",
|
||||
"Delete": "削除",
|
||||
"Delete a model": "モデルを削除",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "stable-diffusion-webuiを実行する際に`--api`フラグを含める",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "情報",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "入力コマンド",
|
||||
"Install from Github URL": "Github URLからインストール",
|
||||
"Instant Auto-Send After Voice Transcription": "",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "前の30日間",
|
||||
"Previous 7 days": "前の7日間",
|
||||
"Private": "",
|
||||
"Profile Image": "プロフィール画像",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "プロンプト(例:ローマ帝国についての楽しい事を教えてください)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "プロンプト",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Ollama.com から \"{{searchValue}}\" をプル",
|
||||
"Pull a model from Ollama.com": "Ollama.com からモデルをプル",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "テーマ",
|
||||
"Thinking...": "思考中...",
|
||||
"This action cannot be undone. Do you wish to continue?": "このアクションは取り消し不可です。続けますか?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "これは、貴重な会話がバックエンドデータベースに安全に保存されることを保証します。ありがとうございます!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "実験的機能であり正常動作しない場合があります。",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "",
|
||||
"variable": "変数",
|
||||
"variable to have them replaced with clipboard content.": "クリップボードの内容に置き換える変数。",
|
||||
"Verify Connection": "",
|
||||
"Version": "バージョン",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "ნაგულისხმევი მოთხოვნის მინიშნებები",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "მომხმარებლის ნაგულისხმევი როლი",
|
||||
"Delete": "წაშლა",
|
||||
"Delete a model": "მოდელის წაშლა",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "`--api` ალმის ჩასმა stable-diffusion-webui-ის გამოყენებისას",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "ინფორმაცია",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "შეიყვანეთ ბრძანებები",
|
||||
"Install from Github URL": "დაყენება Github-ის ბმულიდან",
|
||||
"Instant Auto-Send After Voice Transcription": "",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "წინა 30 დღე",
|
||||
"Previous 7 days": "წინა 7 დღე",
|
||||
"Private": "",
|
||||
"Profile Image": "პროფილის სურათი",
|
||||
"Prompt": "ბრძანების შეყვანის შეხსენება",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Prompt (მაგ. მითხარი სახალისო ფაქტი რომის იმპერიის შესახებ)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "მოთხოვნები",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "\"{{searchValue}}\"-ის გადმოწერა Ollama.com-იდან",
|
||||
"Pull a model from Ollama.com": "მოდელის გადმოწერა Ollama.com-დან",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "თემა",
|
||||
"Thinking...": "",
|
||||
"This action cannot be undone. Do you wish to continue?": "",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "ეს უზრუნველყოფს, რომ თქვენი ღირებული საუბრები უსაფრთხოდ შეინახება თქვენს უკანაბოლო მონაცემთა ბაზაში. მადლობა!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "",
|
||||
"variable": "ცვლადი",
|
||||
"variable to have them replaced with clipboard content.": "ცვლადი მისი ბუფერის მნიშვნელობით ჩასანაცვლებლად.",
|
||||
"Verify Connection": "",
|
||||
"Version": "ვერსია",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "기본 프롬프트 제안",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "기본 사용자 역할",
|
||||
"Delete": "삭제",
|
||||
"Delete a model": "모델 삭제",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "stable-diffusion-webui를 실행 시 `--api` 플래그를 포함하세요",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "정보",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "명령어 입력",
|
||||
"Install from Github URL": "Github URL에서 설치",
|
||||
"Instant Auto-Send After Voice Transcription": "음성 변환 후 즉시 자동 전송",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "이전 30일",
|
||||
"Previous 7 days": "이전 7일",
|
||||
"Private": "",
|
||||
"Profile Image": "프로필 이미지",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "프롬프트 (예: 로마 황제에 대해 재미있는 사실을 알려주세요)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "성공적으로 프롬프트를 수정했습니다",
|
||||
"Prompts": "프롬프트",
|
||||
"Prompts Access": "프롬프트 접근",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Ollama.com에서 \"{{searchValue}}\" 가져오기",
|
||||
"Pull a model from Ollama.com": "Ollama.com에서 모델 가져오기(pull)",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "테마",
|
||||
"Thinking...": "생각 중...",
|
||||
"This action cannot be undone. Do you wish to continue?": "이 액션은 되돌릴 수 없습니다. 계속 하시겠습니까?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "이렇게 하면 소중한 대화 내용이 백엔드 데이터베이스에 안전하게 저장됩니다. 감사합니다!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "이것은 실험적 기능으로, 예상대로 작동하지 않을 수 있으며 언제든지 변경될 수 있습니다.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "성공적으로 밸브가 업데이트되었습니다",
|
||||
"variable": "변수",
|
||||
"variable to have them replaced with clipboard content.": "변수를 사용하여 클립보드 내용으로 바꾸세요.",
|
||||
"Verify Connection": "",
|
||||
"Version": "버전",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "버전 {{totalVersions}}의 {{selectedVersion}}",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Numatytieji užklausų pasiūlymai",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Numatytoji naudotojo rolė",
|
||||
"Delete": "ištrinti",
|
||||
"Delete a model": "Ištrinti modėlį",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Pridėti `--api` kai vykdomas stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Informacija",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Įvesties komandos",
|
||||
"Install from Github URL": "Instaliuoti Github nuorodą",
|
||||
"Instant Auto-Send After Voice Transcription": "Siųsti iškart po balso transkripcijos",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "Paskutinės 30 dienų",
|
||||
"Previous 7 days": "Paskutinės 7 dienos",
|
||||
"Private": "",
|
||||
"Profile Image": "Profilio nuotrauka",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Užklausa (pvz. supaprastink šį laišką)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "Užklausos",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Rasti \"{{searchValue}}\" iš Ollama.com",
|
||||
"Pull a model from Ollama.com": "Gauti modelį iš Ollama.com",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Tema",
|
||||
"Thinking...": "Mąsto...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Šis veiksmas negali būti atšauktas. Ar norite tęsti?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Tai užtikrina, kad Jūsų pokalbiai saugiai saugojami duomenų bazėje. Ačiū!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Tai eksperimentinė funkcija ir gali veikti nevisada.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Įeitys atnaujintos sėkmingai",
|
||||
"variable": "kintamasis",
|
||||
"variable to have them replaced with clipboard content.": "kintamoji pakeičiama kopijuoklės turiniu.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Versija",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Cadangan Gesaan Lalai",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Peranan Pengguna Lalai",
|
||||
"Delete": "Padam",
|
||||
"Delete a model": "Padam Model",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Sertakan bendera `-- api ` semasa menjalankan stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Maklumat",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Masukkan Arahan",
|
||||
"Install from Github URL": "Pasang daripada URL Github",
|
||||
"Instant Auto-Send After Voice Transcription": "Hantar Secara Automatik Dengan Segera Selepas Transkripsi Suara",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "30 hari sebelumnya",
|
||||
"Previous 7 days": "7 hari sebelumnya",
|
||||
"Private": "",
|
||||
"Profile Image": "Imej Profail",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Gesaan (cth Beritahu saya fakta yang menyeronokkan tentang Kesultanan Melaka)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "Gesaan",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Tarik \"{{ searchValue }}\" daripada Ollama.com",
|
||||
"Pull a model from Ollama.com": "Tarik model dari Ollama.com",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Tema",
|
||||
"Thinking...": "Berfikir...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Tindakan ini tidak boleh diubah semula kepada asal. Adakah anda ingin teruskan",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Ini akan memastikan bahawa perbualan berharga anda disimpan dengan selamat ke pangkalan data 'backend' anda. Terima kasih!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "ni adalah ciri percubaan, ia mungkin tidak berfungsi seperti yang diharapkan dan tertakluk kepada perubahan pada bila-bila masa.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "'Valves' berjaya dikemaskini",
|
||||
"variable": "pembolehubah",
|
||||
"variable to have them replaced with clipboard content.": "pembolehubah untuk ia digantikan dengan kandungan papan klip.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Versi",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Standard forslag til ledetekster",
|
||||
"Default to 389 or 636 if TLS is enabled": "Velg 389 eller 636 som standard hvis TLS er aktivert",
|
||||
"Default to ALL": "Velg ALL som standard",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Standard brukerrolle",
|
||||
"Delete": "Slett",
|
||||
"Delete a model": "Slett en modell",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Inkluder flagget --api når du kjører stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Info",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Inntast kommandoer",
|
||||
"Install from Github URL": "Installer fra GitHub-URL",
|
||||
"Instant Auto-Send After Voice Transcription": "Øyeblikkelig automatisk sending etter taletranskripsjon",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "Straff for opptreden",
|
||||
"Previous 30 days": "Siste 30 dager",
|
||||
"Previous 7 days": "Siste 7 dager",
|
||||
"Private": "",
|
||||
"Profile Image": "Profilbilde",
|
||||
"Prompt": "Ledetekst",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Ledetekst (f.eks. Fortell meg noe morsomt om romerriket)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "Ledetekst oppdatert",
|
||||
"Prompts": "Ledetekster",
|
||||
"Prompts Access": "Tilgang til ledetekster",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Hent {{searchValue}} fra Ollama.com",
|
||||
"Pull a model from Ollama.com": "Hent en modell fra Ollama.com",
|
||||
"Query Generation Prompt": "Ledetekst for genering av spørringer",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Tema",
|
||||
"Thinking...": "Tenker ...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Denne handlingen kan ikke angres. Vil du fortsette?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Dette sikrer at de verdifulle samtalene dine lagres sikkert i backend-databasen din. Takk!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Dette er en eksperimentell funksjon. Det er mulig den ikke fungerer som forventet, og den kan endres når som helst.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Ventilene er oppdatert",
|
||||
"variable": "variabel",
|
||||
"variable to have them replaced with clipboard content.": "variabel for å erstatte dem med utklippstavleinnhold.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Versjon",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "Version {{selectedVersion}} av {{totalVersions}}",
|
||||
"View Replies": "Vis svar",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Standaard Prompt Suggesties",
|
||||
"Default to 389 or 636 if TLS is enabled": "Standaard 389 of 636 als TLS is ingeschakeld",
|
||||
"Default to ALL": "Standaar op ALL",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Standaard gebruikersrol",
|
||||
"Delete": "Verwijderen",
|
||||
"Delete a model": "Verwijder een model",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Voeg `--api` vlag toe bij het uitvoeren van stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Info",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Voer commando's in",
|
||||
"Install from Github URL": "Installeren vanaf Github-URL",
|
||||
"Instant Auto-Send After Voice Transcription": "Direct automatisch verzenden na spraaktranscriptie",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "Afgelopen 30 dagen",
|
||||
"Previous 7 days": "Afgelopen 7 dagen",
|
||||
"Private": "",
|
||||
"Profile Image": "Profielafbeelding",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Prompt (bv. Vertel me een leuke gebeurtenis over het Romeinse Rijk)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "Prompt succesvol bijgewerkt",
|
||||
"Prompts": "Prompts",
|
||||
"Prompts Access": "Prompttoegang",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Haal \"{{searchValue}}\" uit Ollama.com",
|
||||
"Pull a model from Ollama.com": "Haal een model van Ollama.com",
|
||||
"Query Generation Prompt": "Vraaggeneratieprompt",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Thema",
|
||||
"Thinking...": "Aan het denken...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Deze actie kan niet ongedaan worden gemaakt. Wilt u doorgaan?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Dit zorgt ervoor dat je waardevolle gesprekken veilig worden opgeslagen in je backend database. Dank je wel!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Dit is een experimentele functie, het kan functioneren zoals verwacht en kan op elk moment veranderen.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Kleppen succesvol bijgewerkt",
|
||||
"variable": "variabele",
|
||||
"variable to have them replaced with clipboard content.": "variabele om ze te laten vervangen door klembord inhoud.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Versie",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "Versie {{selectedVersion}} van {{totalVersions}}",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "ਮੂਲ ਪ੍ਰੰਪਟ ਸੁਝਾਅ",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "ਮੂਲ ਉਪਭੋਗਤਾ ਭੂਮਿਕਾ",
|
||||
"Delete": "ਮਿਟਾਓ",
|
||||
"Delete a model": "ਇੱਕ ਮਾਡਲ ਮਿਟਾਓ",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "ਸਟੇਬਲ-ਡਿਫਿਊਸ਼ਨ-ਵੈਬਯੂਆਈ ਚਲਾਉਣ ਸਮੇਂ `--api` ਝੰਡਾ ਸ਼ਾਮਲ ਕਰੋ",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "ਜਾਣਕਾਰੀ",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "ਇਨਪੁਟ ਕਮਾਂਡਾਂ",
|
||||
"Install from Github URL": "Github URL ਤੋਂ ਇੰਸਟਾਲ ਕਰੋ",
|
||||
"Instant Auto-Send After Voice Transcription": "",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "ਪਿਛਲੇ 30 ਦਿਨ",
|
||||
"Previous 7 days": "ਪਿਛਲੇ 7 ਦਿਨ",
|
||||
"Private": "",
|
||||
"Profile Image": "ਪ੍ਰੋਫਾਈਲ ਚਿੱਤਰ",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "ਪ੍ਰੰਪਟ (ਉਦਾਹਰਣ ਲਈ ਮੈਨੂੰ ਰੋਮਨ ਸਾਮਰਾਜ ਬਾਰੇ ਇੱਕ ਮਜ਼ੇਦਾਰ ਤੱਥ ਦੱਸੋ)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "ਪ੍ਰੰਪਟ",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "ਓਲਾਮਾ.ਕਾਮ ਤੋਂ \"{{searchValue}}\" ਖਿੱਚੋ",
|
||||
"Pull a model from Ollama.com": "ਓਲਾਮਾ.ਕਾਮ ਤੋਂ ਇੱਕ ਮਾਡਲ ਖਿੱਚੋ",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "ਥੀਮ",
|
||||
"Thinking...": "",
|
||||
"This action cannot be undone. Do you wish to continue?": "",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "ਇਹ ਯਕੀਨੀ ਬਣਾਉਂਦਾ ਹੈ ਕਿ ਤੁਹਾਡੀਆਂ ਕੀਮਤੀ ਗੱਲਾਂ ਤੁਹਾਡੇ ਬੈਕਐਂਡ ਡਾਟਾਬੇਸ ਵਿੱਚ ਸੁਰੱਖਿਅਤ ਤੌਰ 'ਤੇ ਸੰਭਾਲੀਆਂ ਗਈਆਂ ਹਨ। ਧੰਨਵਾਦ!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "",
|
||||
"variable": "ਵੈਰੀਏਬਲ",
|
||||
"variable to have them replaced with clipboard content.": "ਕਲਿੱਪਬੋਰਡ ਸਮੱਗਰੀ ਨਾਲ ਬਦਲਣ ਲਈ ਵੈਰੀਏਬਲ।",
|
||||
"Verify Connection": "",
|
||||
"Version": "ਵਰਜਨ",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Domyślne propozycje wpisów",
|
||||
"Default to 389 or 636 if TLS is enabled": "Domyślnie użyj 389 lub 636, jeśli TLS jest włączony",
|
||||
"Default to ALL": "Domyślne dla wszystkich",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Domyślna rola użytkownika",
|
||||
"Delete": "Usuń",
|
||||
"Delete a model": "Usuń model",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Użyj flagi `--api` podczas uruchamiania stable-diffusion-webui.",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Informacje",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Wprowadź polecenia",
|
||||
"Install from Github URL": "Instalacja z adresu URL serwisu Github",
|
||||
"Instant Auto-Send After Voice Transcription": "Automatyczne natychmiastowe wysyłanie po transkrypcji głosowej",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "Kara za obecność",
|
||||
"Previous 30 days": "Ostatnie 30 dni",
|
||||
"Previous 7 days": "Ostatnie 7 dni",
|
||||
"Private": "",
|
||||
"Profile Image": "Zdjęcie profilowe",
|
||||
"Prompt": "Wprowadź podpowiedź: ",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Prompt (np. podaj ciekawostkę o Imperium Rzymskim)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "Podpowiedź została zaktualizowana pomyślnie.",
|
||||
"Prompts": "Podpowiedzi",
|
||||
"Prompts Access": "Dostęp do podpowiedzi",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Pobierz \"{{searchValue}}\" z Ollama.com",
|
||||
"Pull a model from Ollama.com": "Pobierz model z Ollama.com",
|
||||
"Query Generation Prompt": "Podpowiedź do generowania zapytań",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Motyw",
|
||||
"Thinking...": "Myślę...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Czy na pewno chcesz kontynuować? Ta akcja nie może zostać cofnięta.",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "To gwarantuje, że Twoje wartościowe rozmowy są bezpiecznie zapisywane w bazie danych backendowej. Dziękujemy!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "To jest funkcja eksperymentalna, może nie działać zgodnie z oczekiwaniami i jest podatna na zmiany w dowolnym momencie.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Zawory zaktualizowane pomyślnie",
|
||||
"variable": "zmienna",
|
||||
"variable to have them replaced with clipboard content.": "Zmienna, która ma zostać zastąpiona zawartością schowka.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Wersja",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "Wersja {{selectedVersion}} z {{totalVersions}}",
|
||||
"View Replies": "Wyświetl odpowiedzi",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Sugestões de Prompt Padrão",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "Padrão para TODOS",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Padrão para novos usuários",
|
||||
"Delete": "Excluir",
|
||||
"Delete a model": "Excluir um modelo",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Incluir a flag `--api` ao executar stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Informação",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Comandos de entrada",
|
||||
"Install from Github URL": "Instalar da URL do Github",
|
||||
"Instant Auto-Send After Voice Transcription": "Envio Automático Instantâneo Após Transcrição de Voz",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "Últimos 30 dias",
|
||||
"Previous 7 days": "Últimos 7 dias",
|
||||
"Private": "",
|
||||
"Profile Image": "Imagem de Perfil",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Prompt (por exemplo, Diga-me um fato divertido sobre o Império Romano)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "Prompt atualizado com sucesso",
|
||||
"Prompts": "Prompts",
|
||||
"Prompts Access": "Acessar prompts",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Obter \"{{searchValue}}\" de Ollama.com",
|
||||
"Pull a model from Ollama.com": "Obter um modelo de Ollama.com",
|
||||
"Query Generation Prompt": "Prompt de Geração de Consulta",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Tema",
|
||||
"Thinking...": "Pensando...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Esta ação não pode ser desfeita. Você deseja continuar?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Isso garante que suas conversas valiosas sejam salvas com segurança no banco de dados do backend. Obrigado!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Esta é uma funcionalidade experimental, pode não funcionar como esperado e está sujeita a alterações a qualquer momento.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Válvulas atualizadas com sucesso",
|
||||
"variable": "variável",
|
||||
"variable to have them replaced with clipboard content.": "variável para ser substituída pelo conteúdo da área de transferência.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Versão",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "Versão {{selectedVersion}} de {{totalVersions}}",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Sugestões de Prompt Padrão",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Função de Utilizador Padrão",
|
||||
"Delete": "Apagar",
|
||||
"Delete a model": "Apagar um modelo",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Inclua a flag `--api` ao executar stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Informação",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Comandos de entrada",
|
||||
"Install from Github URL": "Instalar a partir do URL do Github",
|
||||
"Instant Auto-Send After Voice Transcription": "Enviar automaticamente depois da transcrição da voz",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "Últimos 30 dias",
|
||||
"Previous 7 days": "Últimos 7 dias",
|
||||
"Private": "",
|
||||
"Profile Image": "Imagem de Perfil",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Prompt (ex.: Dê-me um facto divertido sobre o Império Romano)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "Prompts",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Puxar \"{{searchValue}}\" do Ollama.com",
|
||||
"Pull a model from Ollama.com": "Puxar um modelo do Ollama.com",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Tema",
|
||||
"Thinking...": "A pensar...",
|
||||
"This action cannot be undone. Do you wish to continue?": "",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Isto garante que suas conversas valiosas sejam guardadas com segurança na sua base de dados de backend. Obrigado!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Isto é um recurso experimental, pode não funcionar conforme o esperado e está sujeito a alterações a qualquer momento.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "",
|
||||
"variable": "variável",
|
||||
"variable to have them replaced with clipboard content.": "variável para que sejam substituídos pelo conteúdo da área de transferência.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Versão",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Sugestii de Prompt Implicite",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Rolul Implicit al Utilizatorului",
|
||||
"Delete": "Șterge",
|
||||
"Delete a model": "Șterge un model",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Includeți flag-ul `--api` când rulați stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Informații",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Comenzi de intrare",
|
||||
"Install from Github URL": "Instalează de la URL-ul Github",
|
||||
"Instant Auto-Send After Voice Transcription": "Trimitere Automată Instantanee După Transcrierea Vocii",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "Ultimele 30 de zile",
|
||||
"Previous 7 days": "Ultimele 7 zile",
|
||||
"Private": "",
|
||||
"Profile Image": "Imagine de Profil",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Prompt (de ex. Spune-mi un fapt amuzant despre Imperiul Roman)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "Prompturi",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Extrage \"{{searchValue}}\" de pe Ollama.com",
|
||||
"Pull a model from Ollama.com": "Extrage un model de pe Ollama.com",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Temă",
|
||||
"Thinking...": "Gândește...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Această acțiune nu poate fi anulată. Doriți să continuați?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Acest lucru asigură că conversațiile dvs. valoroase sunt salvate în siguranță în baza de date a backend-ului dvs. Mulțumim!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Aceasta este o funcție experimentală, poate să nu funcționeze așa cum vă așteptați și este supusă schimbării în orice moment.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Valve actualizate cu succes",
|
||||
"variable": "variabilă",
|
||||
"variable to have them replaced with clipboard content.": "variabilă pentru a fi înlocuite cu conținutul clipboard-ului.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Versiune",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "Versiunea {{selectedVersion}} din {{totalVersions}}",
|
||||
"View Replies": "Vezi răspunsurile",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Предложения промптов по умолчанию",
|
||||
"Default to 389 or 636 if TLS is enabled": "По умолчанию 389 или 636, если TLS включен.",
|
||||
"Default to ALL": "По умолчанию ВСЕ",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Роль пользователя по умолчанию",
|
||||
"Delete": "Удалить",
|
||||
"Delete a model": "Удалить модель",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Добавьте флаг `--api` при запуске stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Информация",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Введите команды",
|
||||
"Install from Github URL": "Установка с URL-адреса Github",
|
||||
"Instant Auto-Send After Voice Transcription": "Мгновенная автоматическая отправка после расшифровки голоса",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "Предыдущие 30 дней",
|
||||
"Previous 7 days": "Предыдущие 7 дней",
|
||||
"Private": "",
|
||||
"Profile Image": "Изображение профиля",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Промпт (например, Расскажи мне интересный факт о Римской империи)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "Промпты",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Загрузить \"{{searchValue}}\" с Ollama.com",
|
||||
"Pull a model from Ollama.com": "Загрузить модель с Ollama.com",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Тема",
|
||||
"Thinking...": "Думаю...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Это действие нельзя отменить. Вы хотите продолжить?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Это обеспечивает сохранение ваших ценных разговоров в безопасной базе данных на вашем сервере. Спасибо!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Это экспериментальная функция, она может работать не так, как ожидалось, и может быть изменена в любое время.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Вентили успешно обновлены",
|
||||
"variable": "переменная",
|
||||
"variable to have them replaced with clipboard content.": "переменную, чтобы заменить их содержимым буфера обмена.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Версия",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Predvolené návrhy promptov",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Predvolená rola užívateľa",
|
||||
"Delete": "Odstrániť",
|
||||
"Delete a model": "Odstrániť model.",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Pri spustení stable-diffusion-webui zahrňte príznak `--api`.",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Info",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Vstupné príkazy",
|
||||
"Install from Github URL": "Inštalácia z URL adresy Githubu",
|
||||
"Instant Auto-Send After Voice Transcription": "Okamžité automatické odoslanie po prepisu hlasu",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "Predchádzajúcich 30 dní",
|
||||
"Previous 7 days": "Predchádzajúcich 7 dní",
|
||||
"Private": "",
|
||||
"Profile Image": "Profilový obrázok",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Prompt (napr. Povedz mi zábavnú skutočnosť o Rímskej ríši)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "Prompty",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Stiahnite \"{{searchValue}}\" z Ollama.com",
|
||||
"Pull a model from Ollama.com": "Stiahnite model z Ollama.com",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Téma",
|
||||
"Thinking...": "Premýšľam...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Túto akciu nie je možné vrátiť späť. Prajete si pokračovať?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Týmto je zaistené, že vaše cenné konverzácie sú bezpečne uložené vo vašej backendovej databáze. Ďakujeme!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Toto je experimentálna funkcia, nemusí fungovať podľa očakávania a môže byť kedykoľvek zmenená.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Ventily boli úspešne aktualizované.",
|
||||
"variable": "premenná",
|
||||
"variable to have them replaced with clipboard content.": "premennú, aby bol ich obsah nahradený obsahom schránky.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Verzia",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "Verzia {{selectedVersion}} z {{totalVersions}}",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Подразумевани предлози упита",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Подразумевана улога корисника",
|
||||
"Delete": "Обриши",
|
||||
"Delete a model": "Обриши модел",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Укључи `--api` заставицу при покретању stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Инфо",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Унеси наредбе",
|
||||
"Install from Github URL": "Инсталирај из Гитхуб УРЛ адресе",
|
||||
"Instant Auto-Send After Voice Transcription": "",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "Претходних 30 дана",
|
||||
"Previous 7 days": "Претходних 7 дана",
|
||||
"Private": "",
|
||||
"Profile Image": "Слика профила",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Упит (нпр. „подели занимљивост о Римском царству“)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "Упит измењен успешно",
|
||||
"Prompts": "Упити",
|
||||
"Prompts Access": "Приступ упитима",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Повуците \"{{searchValue}}\" са Ollama.com",
|
||||
"Pull a model from Ollama.com": "Повуците модел са Ollama.com",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Тема",
|
||||
"Thinking...": "Размишљам...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Ова радња се не може опозвати. Да ли желите наставити?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Ово осигурава да су ваши вредни разговори безбедно сачувани у вашој бекенд бази података. Хвала вам!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Вентили успешно ажурирани",
|
||||
"variable": "променљива",
|
||||
"variable to have them replaced with clipboard content.": "променљива за замену са садржајем оставе.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Издање",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "Погледај одговоре",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Standardinstruktionsförslag",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Standardanvändarroll",
|
||||
"Delete": "Radera",
|
||||
"Delete a model": "Ta bort en modell",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Inkludera flaggan `--api` när du kör stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Information",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Indatakommandon",
|
||||
"Install from Github URL": "Installera från Github-URL",
|
||||
"Instant Auto-Send After Voice Transcription": "Skicka automatiskt efter rösttranskribering",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "Föregående 30 dagar",
|
||||
"Previous 7 days": "Föregående 7 dagar",
|
||||
"Private": "",
|
||||
"Profile Image": "Profilbild",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Instruktion (t.ex. Berätta en kuriosa om Romerska Imperiet)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "Instruktioner",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Ladda ner \"{{searchValue}}\" från Ollama.com",
|
||||
"Pull a model from Ollama.com": "Ladda ner en modell från Ollama.com",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Tema",
|
||||
"Thinking...": "Tänker...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Denna åtgärd kan inte ångras. Vill du fortsätta?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Detta säkerställer att dina värdefulla samtal sparas säkert till din backend-databas. Tack!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Detta är en experimentell funktion som kanske inte fungerar som förväntat och som kan komma att ändras när som helst.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "",
|
||||
"variable": "variabel",
|
||||
"variable to have them replaced with clipboard content.": "variabel för att få dem ersatta med urklippsinnehåll.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Version",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "Version {{selectedVersion}} av {{totalVersions}}",
|
||||
"View Replies": "Se svar",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "คำแนะนำพรอมต์ค่าเริ่มต้น",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "บทบาทผู้ใช้ค่าเริ่มต้น",
|
||||
"Delete": "ลบ",
|
||||
"Delete a model": "ลบโมเดล",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "รวมแฟลก `--api` เมื่อเรียกใช้ stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "ข้อมูล",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "คำสั่งป้อนข้อมูล",
|
||||
"Install from Github URL": "ติดตั้งจาก URL ของ Github",
|
||||
"Instant Auto-Send After Voice Transcription": "ส่งอัตโนมัติทันทีหลังจากการถอดเสียง",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "30 วันที่ผ่านมา",
|
||||
"Previous 7 days": "7 วันที่ผ่านมา",
|
||||
"Private": "",
|
||||
"Profile Image": "รูปโปรไฟล์",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "พรอมต์ (เช่น บอกข้อเท็จจริงที่น่าสนุกเกี่ยวกับจักรวรรดิโรมัน)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "พรอมต์",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "",
|
||||
"Pull a model from Ollama.com": "",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "ธีม",
|
||||
"Thinking...": "กำลังคิด...",
|
||||
"This action cannot be undone. Do you wish to continue?": "การกระทำนี้ไม่สามารถย้อนกลับได้ คุณต้องการดำเนินการต่อหรือไม่?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "สิ่งนี้ทำให้มั่นใจได้ว่าการสนทนาที่มีค่าของคุณจะถูกบันทึกอย่างปลอดภัยในฐานข้อมูลแบ็กเอนด์ของคุณ ขอบคุณ!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "นี่เป็นฟีเจอร์ทดลอง อาจไม่ทำงานตามที่คาดไว้และอาจมีการเปลี่ยนแปลงได้ตลอดเวลา",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "อัปเดตวาล์วเรียบร้อยแล้ว",
|
||||
"variable": "ตัวแปร",
|
||||
"variable to have them replaced with clipboard content.": "ตัวแปรเพื่อให้แทนที่ด้วยเนื้อหาคลิปบอร์ด",
|
||||
"Verify Connection": "",
|
||||
"Version": "เวอร์ชัน",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "",
|
||||
"Delete": "",
|
||||
"Delete a model": "",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "",
|
||||
"Install from Github URL": "",
|
||||
"Instant Auto-Send After Voice Transcription": "",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "",
|
||||
"Previous 7 days": "",
|
||||
"Private": "",
|
||||
"Profile Image": "",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "",
|
||||
"Pull a model from Ollama.com": "",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "",
|
||||
"Thinking...": "",
|
||||
"This action cannot be undone. Do you wish to continue?": "",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "",
|
||||
"variable": "",
|
||||
"variable to have them replaced with clipboard content.": "",
|
||||
"Verify Connection": "",
|
||||
"Version": "",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Varsayılan Prompt Önerileri",
|
||||
"Default to 389 or 636 if TLS is enabled": "TLS etkinse 389 veya 636'ya varsayılan olarak",
|
||||
"Default to ALL": "TÜMÜ'nü varsayılan olarak",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Varsayılan Kullanıcı Rolü",
|
||||
"Delete": "Sil",
|
||||
"Delete a model": "Bir modeli sil",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "stable-diffusion-webui çalıştırılırken `--api` bayrağını dahil edin",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Bilgi",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Giriş komutları",
|
||||
"Install from Github URL": "Github URL'sinden yükleyin",
|
||||
"Instant Auto-Send After Voice Transcription": "Ses Transkripsiyonundan Sonra Anında Otomatik Gönder",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "Önceki 30 gün",
|
||||
"Previous 7 days": "Önceki 7 gün",
|
||||
"Private": "",
|
||||
"Profile Image": "Profil Fotoğrafı",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Prompt (örn. Roma İmparatorluğu hakkında ilginç bir bilgi verin)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "Prompt başarıyla güncellendi",
|
||||
"Prompts": "Promptlar",
|
||||
"Prompts Access": "Promptlara Erişim",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Ollama.com'dan \"{{searchValue}}\" çekin",
|
||||
"Pull a model from Ollama.com": "Ollama.com'dan bir model çekin",
|
||||
"Query Generation Prompt": "Sorgu Oluşturma Promptu",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Tema",
|
||||
"Thinking...": "Düşünüyor...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Bu eylem geri alınamaz. Devam etmek istiyor musunuz?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Bu, önemli konuşmalarınızın güvenli bir şekilde arkayüz veritabanınıza kaydedildiğini garantiler. Teşekkür ederiz!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Bu deneysel bir özelliktir, beklendiği gibi çalışmayabilir ve her an değişiklik yapılabilir.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Valvler başarıyla güncellendi",
|
||||
"variable": "değişken",
|
||||
"variable to have them replaced with clipboard content.": "panodaki içerikle değiştirilmesi için değişken.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Sürüm",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "Sürüm {{selectedVersion}} / {{totalVersions}}",
|
||||
"View Replies": "Yanıtları Görüntüle",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Пропозиції промтів замовчуванням",
|
||||
"Default to 389 or 636 if TLS is enabled": "За замовчуванням використовується 389 або 636, якщо TLS увімкнено.",
|
||||
"Default to ALL": "За замовчуванням — ВСІ.",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Роль користувача за замовчуванням",
|
||||
"Delete": "Видалити",
|
||||
"Delete a model": "Видалити модель",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Включіть прапор `--api` при запуску stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Інфо",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Команди вводу",
|
||||
"Install from Github URL": "Встановіть з URL-адреси Github",
|
||||
"Instant Auto-Send After Voice Transcription": "Миттєва автоматична відправка після транскрипції голосу",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "Попередні 30 днів",
|
||||
"Previous 7 days": "Попередні 7 днів",
|
||||
"Private": "",
|
||||
"Profile Image": "Зображення профілю",
|
||||
"Prompt": "Підказка",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Підказка (напр., розкажіть мені цікавий факт про Римську імперію)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "Підказку успішно оновлено",
|
||||
"Prompts": "Промти",
|
||||
"Prompts Access": "Доступ до підказок",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Завантажити \"{{searchValue}}\" з Ollama.com",
|
||||
"Pull a model from Ollama.com": "Завантажити модель з Ollama.com",
|
||||
"Query Generation Prompt": "Підказка для генерації запиту",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Тема",
|
||||
"Thinking...": "Думаю...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Цю дію не можна скасувати. Ви бажаєте продовжити?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Це забезпечує збереження ваших цінних розмов у безпечному бекенд-сховищі. Дякуємо!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Це експериментальна функція, вона може працювати не так, як очікувалося, і може бути змінена в будь-який час.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Клапани успішно оновлено",
|
||||
"variable": "змінна",
|
||||
"variable to have them replaced with clipboard content.": "змінна, щоб замінити їх вмістом буфера обміну.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Версія",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "Версія {{selectedVersion}} з {{totalVersions}}",
|
||||
"View Replies": "Переглянути відповіді",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "ڈیفالٹ پرامپٹ تجاویز",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "ڈیفالٹ صارف کا کردار",
|
||||
"Delete": "حذف کریں",
|
||||
"Delete a model": "ایک ماڈل حذف کریں",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "اسٹیبل-ڈیفیوژن-ویب یو آئی چلانے کے دوران `--api` فلیگ شامل کریں",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "معلومات",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "کمانڈز داخل کریں",
|
||||
"Install from Github URL": "گِٹ حب یو آر ایل سے انسٹال کریں",
|
||||
"Instant Auto-Send After Voice Transcription": "آواز کی نقل کے بعد فوری خودکار بھیجنا",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "پچھلے 30 دن",
|
||||
"Previous 7 days": "پچھلے 7 دن",
|
||||
"Private": "",
|
||||
"Profile Image": "پروفائل تصویر",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "سوال کریں (مثلاً: مجھے رومن سلطنت کے بارے میں کوئی دلچسپ حقیقت بتائیں)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "پرومپٹس",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Ollama.com سے \"{{searchValue}}\" کو کھینچیں",
|
||||
"Pull a model from Ollama.com": "Ollama.com سے ماڈل حاصل کریں",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "تھیم",
|
||||
"Thinking...": "سوچ رہا ہے...",
|
||||
"This action cannot be undone. Do you wish to continue?": "یہ عمل واپس نہیں کیا جا سکتا کیا آپ جاری رکھنا چاہتے ہیں؟",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "یہ یقینی بناتا ہے کہ آپ کی قیمتی گفتگو محفوظ طریقے سے آپ کے بیک اینڈ ڈیٹا بیس میں محفوظ کی گئی ہیں شکریہ!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "یہ ایک تجرباتی خصوصیت ہے، یہ متوقع طور پر کام نہ کر سکتی ہو اور کسی بھی وقت تبدیل کی جا سکتی ہے",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "والو کامیابی کے ساتھ اپ ڈیٹ ہو گئے",
|
||||
"variable": "متغیر",
|
||||
"variable to have them replaced with clipboard content.": "انہیں کلپ بورڈ کے مواد سے تبدیل کرنے کے لیے متغیر",
|
||||
"Verify Connection": "",
|
||||
"Version": "ورژن",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "ورژن {{selectedVersion}} کا {{totalVersions}} میں سے",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "Đề xuất prompt mặc định",
|
||||
"Default to 389 or 636 if TLS is enabled": "",
|
||||
"Default to ALL": "",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "Vai trò mặc định",
|
||||
"Delete": "Xóa",
|
||||
"Delete a model": "Xóa mô hình",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Bao gồm flag `--api` khi chạy stable-diffusion-webui",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "",
|
||||
"Info": "Thông tin",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "Nhập các câu lệnh",
|
||||
"Install from Github URL": "Cài đặt từ Github URL",
|
||||
"Instant Auto-Send After Voice Transcription": "Tự động gửi ngay lập tức sau khi phiên dịch giọng nói",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "",
|
||||
"Previous 30 days": "30 ngày trước",
|
||||
"Previous 7 days": "7 ngày trước",
|
||||
"Private": "",
|
||||
"Profile Image": "Ảnh đại diện",
|
||||
"Prompt": "",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "Prompt (ví dụ: Hãy kể cho tôi một sự thật thú vị về Đế chế La Mã)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "",
|
||||
"Prompts": "Prompt",
|
||||
"Prompts Access": "",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "Tải \"{{searchValue}}\" từ Ollama.com",
|
||||
"Pull a model from Ollama.com": "Tải mô hình từ Ollama.com",
|
||||
"Query Generation Prompt": "",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "Chủ đề",
|
||||
"Thinking...": "Đang suy luận...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Hành động này không thể được hoàn tác. Bạn có muốn tiếp tục không?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Điều này đảm bảo rằng các nội dung chat có giá trị của bạn được lưu an toàn vào cơ sở dữ liệu backend của bạn. Cảm ơn bạn!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Đây là tính năng thử nghiệm, có thể không hoạt động như mong đợi và có thể thay đổi bất kỳ lúc nào.",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "Đã cập nhật Valves thành công",
|
||||
"variable": "biến",
|
||||
"variable to have them replaced with clipboard content.": "biến để có chúng được thay thế bằng nội dung clipboard.",
|
||||
"Verify Connection": "",
|
||||
"Version": "Version",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "",
|
||||
"View Replies": "",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "默认提示词建议",
|
||||
"Default to 389 or 636 if TLS is enabled": "如果启用 TLS,则默认为 389 或 636",
|
||||
"Default to ALL": "默认为 ALL",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "默认进行分段检索以提取重点和相关内容 (推荐)",
|
||||
"Default User Role": "默认用户角色",
|
||||
"Delete": "删除",
|
||||
"Delete a model": "删除一个模型",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "运行 stable-diffusion-webui 时包含 `--api` 参数",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "影响算法对生成文本反馈的响应速度。较低的学习率将导致调整更慢,而较高的学习率将使算法反应更灵敏。",
|
||||
"Info": "信息",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "注入整个内容作为上下文进行综合处理,适用于复杂查询",
|
||||
"Input commands": "输入命令",
|
||||
"Install from Github URL": "从 Github URL 安装",
|
||||
"Instant Auto-Send After Voice Transcription": "语音转录文字后即时自动发送",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "重复惩罚(Presence Penalty)",
|
||||
"Previous 30 days": "过去 30 天",
|
||||
"Previous 7 days": "过去 7 天",
|
||||
"Private": "私有",
|
||||
"Profile Image": "用户头像",
|
||||
"Prompt": "提示词 (Prompt)",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "提示(例如:给我讲一个关于罗马帝国的趣事。)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "提示词更新成功",
|
||||
"Prompts": "提示词",
|
||||
"Prompts Access": "访问提示词",
|
||||
"Public": "公共",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "从 Ollama.com 拉取 \"{{searchValue}}\"",
|
||||
"Pull a model from Ollama.com": "从 Ollama.com 拉取一个模型",
|
||||
"Query Generation Prompt": "查询生成提示词",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "主题",
|
||||
"Thinking...": "正在思考...",
|
||||
"This action cannot be undone. Do you wish to continue?": "此操作无法撤销。是否确认继续?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "此频道创建于{{createdAt}},这里是{{channelName}}频道的开始",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "这将确保您的宝贵对话被安全地保存到后台数据库中。感谢!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "这是一个实验功能,可能不会如预期那样工作,而且可能随时发生变化。",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "此选项控制刷新上下文时保留多少 Token。例如,如果设置为 2,则将保留对话上下文的最后 2 个 Token。保留上下文有助于保持对话的连续性,但可能会降低响应新主题的能力。",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "值更新成功",
|
||||
"variable": "变量",
|
||||
"variable to have them replaced with clipboard content.": "变量将被剪贴板内容替换。",
|
||||
"Verify Connection": "验证连接",
|
||||
"Version": "版本",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "版本 {{selectedVersion}}/{{totalVersions}}",
|
||||
"View Replies": "查看回复",
|
||||
|
@ -270,6 +270,7 @@
|
||||
"Default Prompt Suggestions": "預設提示詞建議",
|
||||
"Default to 389 or 636 if TLS is enabled": "如果啓用了 TLS 則預設為 389 或 636",
|
||||
"Default to ALL": "預設到所有",
|
||||
"Default to segmented retrieval for focused and relevant content extraction, this is recommended for most cases.": "",
|
||||
"Default User Role": "預設使用者角色",
|
||||
"Delete": "刪除",
|
||||
"Delete a model": "刪除模型",
|
||||
@ -583,6 +584,7 @@
|
||||
"Include `--api` flag when running stable-diffusion-webui": "執行 stable-diffusion-webui 時包含 `--api` 參數",
|
||||
"Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive.": "影響算法對生成文本回饋的反應速度。較低的學習率會導致調整速度較慢,而較高的學習率會使算法反應更靈敏。",
|
||||
"Info": "資訊",
|
||||
"Inject the entire content as context for comprehensive processing, this is recommended for complex queries.": "",
|
||||
"Input commands": "輸入命令",
|
||||
"Install from Github URL": "從 GitHub URL 安裝",
|
||||
"Instant Auto-Send After Voice Transcription": "語音轉錄後立即自動傳送",
|
||||
@ -806,6 +808,7 @@
|
||||
"Presence Penalty": "在場懲罰",
|
||||
"Previous 30 days": "過去 30 天",
|
||||
"Previous 7 days": "過去 7 天",
|
||||
"Private": "",
|
||||
"Profile Image": "個人檔案圖片",
|
||||
"Prompt": "提示詞",
|
||||
"Prompt (e.g. Tell me a fun fact about the Roman Empire)": "提示詞(例如:告訴我關於羅馬帝國的一些趣事)",
|
||||
@ -815,6 +818,7 @@
|
||||
"Prompt updated successfully": "提示詞更新成功",
|
||||
"Prompts": "提示詞",
|
||||
"Prompts Access": "提示詞存取",
|
||||
"Public": "",
|
||||
"Pull \"{{searchValue}}\" from Ollama.com": "從 Ollama.com 下載「{{searchValue}}」",
|
||||
"Pull a model from Ollama.com": "從 Ollama.com 下載模型",
|
||||
"Query Generation Prompt": "查詢生成提示詞",
|
||||
@ -1009,6 +1013,7 @@
|
||||
"Theme": "主題",
|
||||
"Thinking...": "正在思考...",
|
||||
"This action cannot be undone. Do you wish to continue?": "此操作無法復原。您確定要繼續進行嗎?",
|
||||
"This channel was created on {{createdAt}}. This is the very beginning of the {{channelName}} channel.": "",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "這確保您寶貴的對話會安全地儲存到您的後端資料庫。謝謝!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "這是一個實驗性功能,它可能無法如預期運作,並且可能會隨時變更。",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "此選項控制在刷新上下文時保留多少 token。例如,如果設定為 2,則會保留對話上下文的最後 2 個 token。保留上下文有助於保持對話的連貫性,但也可能降低對新主題的回應能力。",
|
||||
@ -1118,6 +1123,7 @@
|
||||
"Valves updated successfully": "閥門更新成功",
|
||||
"variable": "變數",
|
||||
"variable to have them replaced with clipboard content.": "變數,以便將其替換為剪貼簿內容。",
|
||||
"Verify Connection": "",
|
||||
"Version": "版本",
|
||||
"Version {{selectedVersion}} of {{totalVersions}}": "第 {{selectedVersion}} 版,共 {{totalVersions}} 版",
|
||||
"View Replies": "檢視回覆",
|
||||
|
Loading…
x
Reference in New Issue
Block a user