auto generate keys/dtagsl auto update user profiles, sdimg2img

This commit is contained in:
Believethehype
2023-11-30 15:30:49 +01:00
parent 023ba6d55f
commit 028c32f42d
12 changed files with 493 additions and 155 deletions

47
main.py
View File

@@ -1,26 +1,24 @@
import os
import signal
import sys
import time
from pathlib import Path
from threading import Thread
import dotenv
from nostr_sdk import Keys
from bot.bot import Bot
from playground import build_pdf_extractor, build_googletranslator, build_unstable_diffusion, build_sketcher, \
build_dalle, \
build_whisperx, build_libretranslator, build_external_dvm, build_media_converter, build_inactive_follows_finder
build_whisperx, build_libretranslator, build_external_dvm, build_media_converter, build_inactive_follows_finder, \
build_image_converter
from utils.definitions import EventDefinitions
from utils.dvmconfig import DVMConfig
from utils.nostr_utils import check_and_set_private_key
def run_nostr_dvm_with_local_config():
# We will run an optional bot that can communicate with the DVMs
# Note this is very basic for now and still under development
bot_config = DVMConfig()
bot_config.PRIVATE_KEY = os.getenv("BOT_PRIVATE_KEY")
bot_config.PRIVATE_KEY = check_and_set_private_key("bot")
bot_config.LNBITS_INVOICE_KEY = os.getenv("LNBITS_INVOICE_KEY")
bot_config.LNBITS_ADMIN_KEY = os.getenv("LNBITS_ADMIN_KEY") # The bot will forward zaps for us, use responsibly
bot_config.LNBITS_URL = os.getenv("LNBITS_HOST")
@@ -29,38 +27,46 @@ def run_nostr_dvm_with_local_config():
# You can add arbitrary DVMs there and instantiate them here
# Spawn DVM1 Kind 5000: A local Text Extractor from PDFs
pdfextractor = build_pdf_extractor("PDF Extractor")
pdfextractor = build_pdf_extractor("PDF Extractor", "pdf_extractor")
# If we don't add it to the bot, the bot will not provide access to the DVM
pdfextractor.run()
# Spawn DVM2 Kind 5002 Local Text TranslationGoogle, calling the free Google API.
translator = build_googletranslator("Google Translator")
translator = build_googletranslator("Google Translator", "google_translator")
bot_config.SUPPORTED_DVMS.append(translator) # We add translator to the bot
translator.run()
# Spawn DVM3 Kind 5002 Local Text TranslationLibre, calling the free LibreTranslateApi, as an alternative.
# This will only run and appear on the bot if an endpoint is set in the .env
if os.getenv("LIBRE_TRANSLATE_ENDPOINT") is not None and os.getenv("LIBRE_TRANSLATE_ENDPOINT") != "":
libre_translator = build_libretranslator("Libre Translator")
libre_translator = build_libretranslator("Libre Translator", "google_translator")
bot_config.SUPPORTED_DVMS.append(libre_translator) # We add translator to the bot
libre_translator.run()
# Spawn DVM3 Kind 5100 Image Generation This one uses a specific backend called nova-server.
# If you want to use it, see the instructions in backends/nova_server
if os.getenv("NOVA_SERVER") is not None and os.getenv("NOVA_SERVER") != "":
unstable_artist = build_unstable_diffusion("Unstable Diffusion")
unstable_artist = build_unstable_diffusion("Unstable Diffusion", "unstable_diffusion")
bot_config.SUPPORTED_DVMS.append(unstable_artist) # We add unstable Diffusion to the bot
unstable_artist.run()
# Spawn DVM4, another Instance of text-to-image, as before but use a different privatekey, model and lora this time.
if os.getenv("NOVA_SERVER") is not None and os.getenv("NOVA_SERVER") != "":
sketcher = build_sketcher("Sketcher")
sketcher = build_sketcher("Sketcher", "sketcher")
bot_config.SUPPORTED_DVMS.append(sketcher) # We also add Sketcher to the bot
sketcher.run()
# Spawn DVM5, image-to-image, .
if os.getenv("NOVA_SERVER") is not None and os.getenv("NOVA_SERVER") != "":
imageconverter = build_image_converter("Image Converter Inkpunk", "image_converter_inkpunk")
bot_config.SUPPORTED_DVMS.append(imageconverter) # We also add Sketcher to the bot
imageconverter.run()
# Spawn DVM5, Another script on nova-server calling WhisperX to transcribe media files
if os.getenv("NOVA_SERVER") is not None and os.getenv("NOVA_SERVER") != "":
whisperer = build_whisperx("Whisperer")
whisperer = build_whisperx("Whisperer", "whisperx")
bot_config.SUPPORTED_DVMS.append(whisperer) # We also add Sketcher to the bot
whisperer.run()
@@ -68,7 +74,7 @@ def run_nostr_dvm_with_local_config():
# per call. Make sure you have enough balance and the DVM's cost is set higher than what you pay yourself, except, you know,
# you're being generous.
if os.getenv("OPENAI_API_KEY") is not None and os.getenv("OPENAI_API_KEY") != "":
dalle = build_dalle("Dall-E 3")
dalle = build_dalle("Dall-E 3", "dalle3")
bot_config.SUPPORTED_DVMS.append(dalle)
dalle.run()
@@ -82,7 +88,8 @@ def run_nostr_dvm_with_local_config():
tasktiger_external.SUPPORTS_ENCRYPTION = False # if the dvm does not support encrypted events, just send a regular event and mark it with p tag. Other dvms might initial answer
bot_config.SUPPORTED_DVMS.append(tasktiger_external)
# Don't run it, it's on someone else's machine and we simply make the bot aware of it.
# Don't run it, it's on someone else's machine, and we simply make the bot aware of it.
# DVM: 8 Another external dvm for recommendations:
ymhm_external = build_external_dvm(name="External DVM: You might have missed",
@@ -95,16 +102,18 @@ def run_nostr_dvm_with_local_config():
bot_config.SUPPORTED_DVMS.append(ymhm_external)
# Spawn DVM9.. A Media Grabber/Converter
media_bringer = build_media_converter("Media Bringer")
bot_config.SUPPORTED_DVMS.append(media_bringer) # We also add Sketcher to the bot
media_bringer = build_media_converter("Media Bringer", "media_converter")
bot_config.SUPPORTED_DVMS.append(media_bringer)
media_bringer.run()
#Spawn DVM10 Discover inactive followers
discover_inactive = build_inactive_follows_finder("Bygones")
bot_config.SUPPORTED_DVMS.append(discover_inactive) # We also add Sketcher to the bot
# Spawn DVM10 Discover inactive followers
discover_inactive = build_inactive_follows_finder("Bygones", "discovery_inactive_follows")
bot_config.SUPPORTED_DVMS.append(discover_inactive)
discover_inactive.run()
Bot(bot_config)
# Keep the main function alive for libraries that require it, like openai