diff --git a/nostr_dvm/tasks/summarization_huggingchat.py b/nostr_dvm/tasks/summarization_huggingchat.py index 73208dd..e12f995 100644 --- a/nostr_dvm/tasks/summarization_huggingchat.py +++ b/nostr_dvm/tasks/summarization_huggingchat.py @@ -123,7 +123,7 @@ def build_example(name, identifier, admin_config): nip89info = { "name": name, - "image": "https://image.nostr.build/c33ca6fc4cc038ca4adb46fdfdfda34951656f87ee364ef59095bae1495ce669.jpg", + "image": "https://image.nostr.build/720eadc9af89084bb09de659af43ad17fec1f4b0887084e83ac0ae708dfa83a6.png", "about": "I use a LLM connected via Huggingchat to summarize Inputs", "encryptionSupported": True, "cashuAccepted": True, diff --git a/nostr_dvm/tasks/summarization_unleasehed_chat.py b/nostr_dvm/tasks/summarization_unleasehed_chat.py new file mode 100644 index 0000000..c39dd21 --- /dev/null +++ b/nostr_dvm/tasks/summarization_unleasehed_chat.py @@ -0,0 +1,169 @@ +import json +import os +import re +from nostr_sdk import Tag +from nostr_dvm.interfaces.dvmtaskinterface import DVMTaskInterface, process_venv +from nostr_dvm.utils.admin_utils import AdminConfig +from nostr_dvm.utils.definitions import EventDefinitions +from nostr_dvm.utils.dvmconfig import DVMConfig, build_default_config +from nostr_dvm.utils.nip89_utils import NIP89Config, check_and_set_d_tag +from nostr_dvm.utils.nostr_utils import get_referenced_event_by_id, get_events_by_ids, get_event_by_id + +""" +This File contains a Module to generate Text, based on a prompt using the Unleashed.chat API. + +Accepted Inputs: Prompt (text) +Outputs: Generated text +""" + + +class SummarizationUnleashedChat(DVMTaskInterface): + KIND: int = EventDefinitions.KIND_NIP90_SUMMARIZE_TEXT + TASK: str = "text-to-text" + FIX_COST: float = 10 + dependencies = [("nostr-dvm", "nostr-dvm"), + ("openai", "openai")] + + def __init__(self, name, dvm_config: DVMConfig, nip89config: NIP89Config, + admin_config: AdminConfig = None, options=None): + dvm_config.SCRIPT = os.path.abspath(__file__) + super().__init__(name, dvm_config, nip89config, admin_config, options) + + def is_input_supported(self, tags, client=None, dvm_config=None): + for tag in tags: + if tag.as_vec()[0] == 'i': + print(tag.as_vec()) + input_value = tag.as_vec()[1] + input_type = tag.as_vec()[2] + if input_type != "event" and input_type != "job" and input_type != "text": + return False + + return True + + def create_request_from_nostr_event(self, event, client=None, dvm_config=None): + request_form = {"jobID": event.id().to_hex() + "_" + self.NAME.replace(" ", "")} + prompt = "" + collect_events = [] + nostr_mode = True + + for tag in event.tags(): + if tag.as_vec()[0] == 'i': + input_type = tag.as_vec()[2] + if input_type == "text": + prompt += tag.as_vec()[1] + "\n" + elif input_type == "event": + collect_events.append(tag.as_vec()[1]) + # evt = get_event_by_id(tag.as_vec()[1], client=client, config=dvm_config) + # prompt += evt.content() + "\n" + elif input_type == "job": + evt = get_referenced_event_by_id(event_id=tag.as_vec()[1], client=client, + kinds=[EventDefinitions.KIND_NIP90_RESULT_EXTRACT_TEXT, + EventDefinitions.KIND_NIP90_RESULT_SUMMARIZE_TEXT, + EventDefinitions.KIND_NIP90_RESULT_TRANSLATE_TEXT, + EventDefinitions.KIND_NIP90_RESULT_CONTENT_DISCOVERY], + dvm_config=dvm_config) + if evt is None: + print("Event not found") + raise Exception + + if evt.kind() == EventDefinitions.KIND_NIP90_RESULT_CONTENT_DISCOVERY: + result_list = json.loads(evt.content()) + prompt = "" + for tag in result_list: + e_tag = Tag.parse(tag) + evt = get_event_by_id(e_tag.as_vec()[1], client=client, config=dvm_config) + prompt += evt.content() + "\n" + + else: + prompt = evt.content() + + evts = get_events_by_ids(collect_events, client=client, config=dvm_config) + if evts is not None: + for evt in evts: + prompt += evt.content() + "\n" + + prompt = re.sub(r'http\S+', '', prompt) + options = { + "prompt": prompt, + "nostr": nostr_mode, + } + request_form['options'] = json.dumps(options) + + return request_form + + def process(self, request_form): + from openai import OpenAI + temp_open_ai_api_key = os.environ["OPENAI_API_KEY"] + os.environ["OPENAI_API_KEY"] = os.getenv("UNLEASHED_API_KEY") + options = DVMTaskInterface.set_options(request_form) + + try: + client = OpenAI( + base_url='https://unleashed.chat/api/v1', + ) + + print('Models:\n') + + for model in client.models.list(): + print('- ' + model.id) + + content = "Summarize the following notes: " + str(options["prompt"])[:4000] + normal_stream = client.chat.completions.create( + messages=[ + { + 'role': 'user', + 'content':content, + } + ], + model='dolphin-2.2.1-mistral-7b', + stream=True, + extra_body={ + 'nostr_mode': options["nostr"], + }, + ) + + print('\nChat response: ', end='') + + result = "" + for chunk in normal_stream: + result += chunk.choices[0].delta.content + print(chunk.choices[0].delta.content, end='') + + os.environ["OPENAI_API_KEY"] = temp_open_ai_api_key + return result + + except Exception as e: + print("Error in Module: " + str(e)) + raise Exception(e) + + +# We build an example here that we can call by either calling this file directly from the main directory, +# or by adding it to our playground. You can call the example and adjust it to your needs or redefine it in the +# playground or elsewhere +def build_example(name, identifier, admin_config): + dvm_config = build_default_config(identifier) + dvm_config.SEND_FEEDBACK_EVENTS = True + admin_config.LUD16 = dvm_config.LN_ADDRESS + + + nip89info = { + "name": name, + "image": "https://unleashed.chat/_app/immutable/assets/hero.pehsu4x_.jpeg", + "about": "I summarize Text with https://unleashed.chat", + "encryptionSupported": True, + "cashuAccepted": True, + "nip90Params": {} + } + + nip89config = NIP89Config() + nip89config.DTAG = check_and_set_d_tag(identifier, name, dvm_config.PRIVATE_KEY, nip89info["image"]) + nip89config.CONTENT = json.dumps(nip89info) + admin_config2 = AdminConfig() + admin_config2.REBROADCAST_NIP89 = False + + return SummarizationUnleashedChat(name=name, dvm_config=dvm_config, nip89config=nip89config, admin_config=admin_config2) + + +if __name__ == '__main__': + process_venv(SummarizationUnleashedChat) + diff --git a/nostr_dvm/tasks/textgeneration_huggingchat.py b/nostr_dvm/tasks/textgeneration_huggingchat.py index 650338d..a32e944 100644 --- a/nostr_dvm/tasks/textgeneration_huggingchat.py +++ b/nostr_dvm/tasks/textgeneration_huggingchat.py @@ -89,7 +89,7 @@ def build_example(name, identifier, admin_config): nip89info = { "name": name, - "image": "https://image.nostr.build/c33ca6fc4cc038ca4adb46fdfdfda34951656f87ee364ef59095bae1495ce669.jpg", + "image": "https://image.nostr.build/720eadc9af89084bb09de659af43ad17fec1f4b0887084e83ac0ae708dfa83a6.png", "about": "I use a LLM connected via Huggingchat", "encryptionSupported": True, "cashuAccepted": True, diff --git a/ui/noogle/src/components/ImageGeneration.vue b/ui/noogle/src/components/ImageGeneration.vue index bbd27e6..10ad486 100644 --- a/ui/noogle/src/components/ImageGeneration.vue +++ b/ui/noogle/src/components/ImageGeneration.vue @@ -27,6 +27,7 @@ import ModalComponent from "../components/Newnote.vue"; import VueDatePicker from "@vuepic/vue-datepicker"; import {timestamp} from "@vueuse/core"; import {post_note, schedule, copyinvoice, copyurl, sleep, nextInput, get_user_infos, createBolt11Lud16} from "../components/helper/Helper.vue" +import StringUtil from "@/components/helper/string.ts"; @@ -391,7 +392,7 @@ const submitHandler = async () => {
{{ dvm.about }}
+ +