first version of llm dvm (with replaceable backend)

This commit is contained in:
Believethehype 2023-12-11 19:41:00 +01:00
parent 331af99c89
commit a03efeab09
13 changed files with 153 additions and 15 deletions

View File

@ -10,7 +10,8 @@ import tasks.textextraction_pdf as textextraction_pdf
import tasks.textextraction_google as textextraction_google
import tasks.translation_google as translation_google
import tasks.translation_libretranslate as translation_libretranslate
from tasks import imagegeneration_replicate_sdxl, videogeneration_replicate_svd, trending_notes_nostrband
from tasks import imagegeneration_replicate_sdxl, videogeneration_replicate_svd, trending_notes_nostrband, \
textgeneration_llmlite
from utils.admin_utils import AdminConfig
from utils.backend_utils import keep_alive
@ -136,6 +137,9 @@ def playground():
bot_config.SUPPORTED_DVMS.append(trending)
trending.run()
ollama = textgeneration_llmlite.build_example("LLM", "llmlite", admin_config)
bot_config.SUPPORTED_DVMS.append(ollama)
ollama.run()
# Run the bot
Bot(bot_config)
# Keep the main function alive for libraries that require it, like openai

View File

@ -201,7 +201,6 @@ if __name__ == '__main__':
admin_config = AdminConfig()
admin_config.REBROADCAST_NIP89 = False
admin_config.UPDATE_PROFILE = False
admin_config.LUD16 = ""
dvm = build_example("Advanced Nostr Search", "discovery_content_search", admin_config)
dvm.run()

View File

@ -130,7 +130,6 @@ if __name__ == '__main__':
admin_config = AdminConfig()
admin_config.REBROADCAST_NIP89 = False
admin_config.UPDATE_PROFILE = False
admin_config.LUD16 = ""
dvm = build_example("Media Bringer", "media_converter", admin_config)
dvm.run()

View File

@ -224,8 +224,6 @@ if __name__ == '__main__':
admin_config = AdminConfig()
admin_config.REBROADCAST_NIP89 = False
admin_config.UPDATE_PROFILE = False
admin_config.LUD16 = ""
dvm = build_example("Bygones", "discovery_inactive_follows", admin_config)
dvm.run()

View File

@ -170,8 +170,6 @@ if __name__ == '__main__':
admin_config = AdminConfig()
admin_config.REBROADCAST_NIP89 = False
admin_config.UPDATE_PROFILE = False
admin_config.LUD16 = ""
dvm = build_example("Dall-E 3", "dalle3", admin_config)
dvm.run()

View File

@ -167,8 +167,6 @@ if __name__ == '__main__':
admin_config = AdminConfig()
admin_config.REBROADCAST_NIP89 = False
admin_config.UPDATE_PROFILE = False
admin_config.LUD16 = ""
dvm = build_example("Stable Diffusion XL", "replicate_sdxl", admin_config)
dvm.run()

View File

@ -178,7 +178,6 @@ if __name__ == '__main__':
admin_config = AdminConfig()
admin_config.REBROADCAST_NIP89 = False
admin_config.UPDATE_PROFILE = False
admin_config.LUD16 = ""
dvm = build_example("Transcriptor", "speech_recognition", admin_config)
dvm.run()

View File

@ -133,7 +133,6 @@ if __name__ == '__main__':
admin_config = AdminConfig()
admin_config.REBROADCAST_NIP89 = False
admin_config.UPDATE_PROFILE = False
admin_config.LUD16 = ""
dvm = build_example("PDF Extractor", "pdf_extractor", admin_config)
dvm.run()

View File

@ -0,0 +1,148 @@
import json
import os
from io import BytesIO
from pathlib import Path
import dotenv
import requests
from litellm import completion
from interfaces.dvmtaskinterface import DVMTaskInterface
from utils.admin_utils import AdminConfig
from utils.backend_utils import keep_alive
from utils.definitions import EventDefinitions
from utils.dvmconfig import DVMConfig
from utils.nip89_utils import NIP89Config, check_and_set_d_tag
from utils.nostr_utils import check_and_set_private_key
from utils.output_utils import upload_media_to_hoster
from utils.zap_utils import get_price_per_sat, check_and_set_ln_bits_keys
from nostr_sdk import Keys
"""
This File contains a Module to transform Text input on OpenAI's servers with DALLE-3 and receive results back.
Accepted Inputs: Prompt (text)
Outputs: An url to an Image
"""
class TextGenerationOLLAMA(DVMTaskInterface):
KIND: int = EventDefinitions.KIND_NIP90_GENERATE_TEXT
TASK: str = "text-to-text"
FIX_COST: float = 0
def __init__(self, name, dvm_config: DVMConfig, nip89config: NIP89Config,
admin_config: AdminConfig = None, options=None):
super().__init__(name, dvm_config, nip89config, admin_config, options)
if options is not None and options.get("server"):
self.options["server"] = options["server"]
def is_input_supported(self, tags):
for tag in tags:
if tag.as_vec()[0] == 'i':
input_value = tag.as_vec()[1]
input_type = tag.as_vec()[2]
if input_type != "text":
return False
return True
def create_request_from_nostr_event(self, event, client=None, dvm_config=None):
request_form = {"jobID": event.id().to_hex() + "_" + self.NAME.replace(" ", "")}
prompt = ""
model = "ollama/llama2-uncensored" #ollama/nous-hermes # This requires an instance of OLLAMA running
#model = "gpt-4-1106-preview" # This will call chatgpt and requires an OpenAI API Key set in .env
server = "http://localhost:11434"
for tag in event.tags():
if tag.as_vec()[0] == 'i':
input_type = tag.as_vec()[2]
if input_type == "text":
prompt = tag.as_vec()[1]
options = {
"prompt": prompt,
"model": model,
"server": server
}
request_form['options'] = json.dumps(options)
return request_form
def process(self, request_form):
options = DVMTaskInterface.set_options(request_form)
try:
if options["model"].startswith("ollama"):
response = completion(
model=options["model"],
messages=[{"content": options["prompt"], "role": "user"}],
api_base=options["server"]
)
print(response.choices[0].message.content)
return response.choices[0].message.content
else:
response = completion(
model=options["model"],
messages=[{"content": options["prompt"], "role": "user"}],
)
print(response.choices[0].message.content)
return response.choices[0].message.content
except Exception as e:
print("Error in Module: " + str(e))
raise Exception(e)
# We build an example here that we can call by either calling this file directly from the main directory,
# or by adding it to our playground. You can call the example and adjust it to your needs or redefine it in the
# playground or elsewhere
def build_example(name, identifier, admin_config):
dvm_config = DVMConfig()
dvm_config.PRIVATE_KEY = check_and_set_private_key(identifier)
npub = Keys.from_sk_str(dvm_config.PRIVATE_KEY).public_key().to_bech32()
invoice_key, admin_key, wallet_id, user_id, lnaddress = check_and_set_ln_bits_keys(identifier, npub)
dvm_config.LNBITS_INVOICE_KEY = invoice_key
dvm_config.LNBITS_ADMIN_KEY = admin_key # The dvm might pay failed jobs back
dvm_config.LNBITS_URL = os.getenv("LNBITS_HOST")
admin_config.LUD16 = lnaddress
nip90params = {
"size": {
"required": False,
"values": ["1024:1024", "1024x1792", "1792x1024"]
}
}
nip89info = {
"name": name,
"image": "https://image.nostr.build/c33ca6fc4cc038ca4adb46fdfdfda34951656f87ee364ef59095bae1495ce669.jpg",
"about": "I use a LLM connected via OLLAMA",
"encryptionSupported": True,
"cashuAccepted": True,
"nip90Params": nip90params
}
nip89config = NIP89Config()
nip89config.DTAG = check_and_set_d_tag(identifier, name, dvm_config.PRIVATE_KEY,
nip89info["image"])
nip89config.CONTENT = json.dumps(nip89info)
# We add an optional AdminConfig for this one, and tell the dvm to rebroadcast its NIP89
return TextGenerationOLLAMA(name=name, dvm_config=dvm_config, nip89config=nip89config, admin_config=admin_config)
if __name__ == '__main__':
env_path = Path('.env')
if env_path.is_file():
print(f'loading environment from {env_path.resolve()}')
dotenv.load_dotenv(env_path, verbose=True, override=True)
else:
raise FileNotFoundError(f'.env file not found at {env_path} ')
admin_config = AdminConfig()
admin_config.REBROADCAST_NIP89 = False
admin_config.UPDATE_PROFILE = False
dvm = build_example("LLM", "llmlite", admin_config)
dvm.run()
keep_alive()

View File

@ -160,7 +160,6 @@ if __name__ == '__main__':
admin_config = AdminConfig()
admin_config.REBROADCAST_NIP89 = False
admin_config.UPDATE_PROFILE = False
admin_config.LUD16 = ""
dvm = build_example("Google Translator", "google_translator", admin_config)
dvm.run()

View File

@ -161,7 +161,6 @@ if __name__ == '__main__':
admin_config = AdminConfig()
admin_config.REBROADCAST_NIP89 = False
admin_config.UPDATE_PROFILE = False
admin_config.LUD16 = ""
dvm = build_example("Libre Translator", "libre_translator", admin_config)
dvm.run()

View File

@ -149,7 +149,6 @@ if __name__ == '__main__':
admin_config = AdminConfig()
admin_config.REBROADCAST_NIP89 = False
admin_config.UPDATE_PROFILE = False
admin_config.LUD16 = ""
dvm = build_example("Trending Notes on Nostr.band", "trending_notes_nostrband", admin_config)
dvm.run()

View File

@ -156,7 +156,6 @@ if __name__ == '__main__':
admin_config = AdminConfig()
admin_config.REBROADCAST_NIP89 = False
admin_config.UPDATE_PROFILE = False
admin_config.LUD16 = ""
dvm = build_example("Stable Video Diffusion", "replicate_svd", admin_config)
dvm.run()