add popular tweets dvm

This commit is contained in:
Believethehype 2024-12-10 13:48:21 +01:00
parent bcfb3c29d1
commit d25746b286
5 changed files with 611 additions and 4 deletions

View File

@ -158,7 +158,7 @@ class DicoverContentCurrentlyPopularGallery(DVMTaskInterface):
ids_str).since(since)
dbopts = SyncOptions().direction(SyncDirection.DOWN)
await cli.sync(filtreactions, dbopts)
#await cli.sync(filtreactions, dbopts)
await cli.sync(filter_nip22, dbopts)
filter2 = Filter().ids(ids)

View File

@ -0,0 +1,331 @@
import json
import os
from datetime import timedelta
from nostr_sdk import Timestamp, Tag, Keys, Options, SecretKey, NostrSigner, NostrDatabase, \
ClientBuilder, Filter, SyncOptions, SyncDirection, init_logger, LogLevel, Kind
from nostr_dvm.interfaces.dvmtaskinterface import DVMTaskInterface, process_venv
from nostr_dvm.utils import definitions
from nostr_dvm.utils.admin_utils import AdminConfig
from nostr_dvm.utils.definitions import EventDefinitions
from nostr_dvm.utils.dvmconfig import DVMConfig, build_default_config
from nostr_dvm.utils.nip88_utils import NIP88Config, check_and_set_d_tag_nip88, check_and_set_tiereventid_nip88
from nostr_dvm.utils.nip89_utils import NIP89Config, check_and_set_d_tag, create_amount_tag
from nostr_dvm.utils.output_utils import post_process_list_to_events
"""
This File contains a Module to discover popular notes by topics
Accepted Inputs: none
Outputs: A list of events
Params: None
"""
class DicoverContentCurrentlyPopularTweets(DVMTaskInterface):
KIND: Kind = EventDefinitions.KIND_NIP90_CONTENT_DISCOVERY
TASK: str = "discover-content"
FIX_COST: float = 0
dvm_config: DVMConfig
request_form = None
last_schedule: int
min_reactions = 2
db_since = 10 * 3600
db_name = "db/nostr_default_recent_notes.db"
search_list = []
avoid_list = []
must_list = []
personalized = False
result = ""
database = None
async def init_dvm(self, name, dvm_config: DVMConfig, nip89config: NIP89Config, nip88config: NIP88Config = None,
admin_config: AdminConfig = None, options=None):
if dvm_config.DATABASE is not None:
self.database = dvm_config.DATABASE
self.request_form = {"jobID": "generic"}
opts = {
"max_results": 200,
}
self.request_form['options'] = json.dumps(opts)
dvm_config.SCRIPT = os.path.abspath(__file__)
if self.options.get("personalized"):
self.personalized = bool(self.options.get("personalized"))
self.last_schedule = Timestamp.now().as_secs()
if self.options.get("db_name"):
self.db_name = self.options.get("db_name")
if self.options.get("db_since"):
self.db_since = int(self.options.get("db_since"))
self.avoid_list = ["http", "nostr:nevent", "nostr:note"]
use_logger = False
if use_logger:
init_logger(LogLevel.DEBUG)
if self.dvm_config.UPDATE_DATABASE:
await self.sync_db()
if not self.personalized:
self.result = await self.calculate_result(self.request_form)
async def is_input_supported(self, tags, client=None, dvm_config=None):
for tag in tags:
if tag.as_vec()[0] == 'i':
input_value = tag.as_vec()[1]
input_type = tag.as_vec()[2]
if input_type != "text":
return False
return True
async def create_request_from_nostr_event(self, event, client=None, dvm_config=None):
self.dvm_config = dvm_config
request_form = {"jobID": event.id().to_hex()}
# default values
max_results = 200
user = event.author().to_hex()
for tag in event.tags().to_vec():
if tag.as_vec()[0] == 'i':
input_type = tag.as_vec()[2]
elif tag.as_vec()[0] == 'param':
param = tag.as_vec()[1]
if param == "max_results": # check for param type
max_results = int(tag.as_vec()[2])
elif param == "user": # check for param type
user = (tag.as_vec()[2])
options = {
"max_results": max_results,
"request_event_id": event.id().to_hex(),
"request_event_author": event.author().to_hex()
}
request_form['options'] = json.dumps(options)
self.request_form = request_form
return request_form
async def process(self, request_form):
# if the dvm supports individual results, recalculate it every time for the request
if self.personalized:
return await self.calculate_result(request_form)
# else return the result that gets updated once every schenduled update. In this case on database update.
else:
return self.result
async def post_process(self, result, event):
"""Overwrite the interface function to return a social client readable format, if requested"""
for tag in event.tags().to_vec():
if tag.as_vec()[0] == 'output':
format = tag.as_vec()[1]
if format == "text/plain": # check for output type
result = post_process_list_to_events(result)
# if not text/plain, don't post-process
return result
async def calculate_result(self, request_form):
from nostr_sdk import Filter
from types import SimpleNamespace
ns = SimpleNamespace()
options = self.set_options(request_form)
if self.database is None:
self.database = NostrDatabase.lmdb(self.db_name)
timestamp_since = Timestamp.now().as_secs() - self.db_since
since = Timestamp.from_secs(timestamp_since)
filter1 = Filter().kind(definitions.EventDefinitions.KIND_NOTE).since(since)
events = await self.database.query([filter1])
if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
print("[" + self.dvm_config.NIP89.NAME + "] Considering " + str(len(events.to_vec())) + " Events")
ns.finallist = {}
for event in events.to_vec():
if len(event.content()) < 211:
# if any(ele in event.content().lower() for ele in self.search_list):
if not any(ele in event.content().lower() for ele in self.avoid_list):
# only look for top level events, not replies
is_reply = False
for tag in event.tags().to_vec():
if tag.as_vec()[0] == 'e':
is_reply = True
if is_reply:
continue
filt = Filter().kinds(
[definitions.EventDefinitions.KIND_ZAP, definitions.EventDefinitions.KIND_REACTION,
definitions.EventDefinitions.KIND_REPOST,
definitions.EventDefinitions.KIND_NOTE]).event(event.id()).since(since)
reactions = await self.database.query([filt])
if len(reactions.to_vec()) >= self.min_reactions:
ns.finallist[event.id().to_hex()] = len(reactions.to_vec())
result_list = []
finallist_sorted = sorted(ns.finallist.items(), key=lambda x: x[1], reverse=True)[:int(options["max_results"])]
for entry in finallist_sorted:
# print(EventId.parse(entry[0]).to_bech32() + "/" + EventId.parse(entry[0]).to_hex() + ": " + str(entry[1]))
e_tag = Tag.parse(["e", entry[0]])
result_list.append(e_tag.as_vec())
if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
print("[" + self.dvm_config.NIP89.NAME + "] Filtered " + str(
len(result_list)) + " fitting events.")
# await cli.shutdown()
return json.dumps(result_list)
async def schedule(self, dvm_config):
if dvm_config.SCHEDULE_UPDATES_SECONDS == 0:
return 0
else:
if Timestamp.now().as_secs() >= self.last_schedule + dvm_config.SCHEDULE_UPDATES_SECONDS:
if self.dvm_config.UPDATE_DATABASE:
await self.sync_db()
self.last_schedule = Timestamp.now().as_secs()
self.result = await self.calculate_result(self.request_form)
return 1
async def sync_db(self):
try:
sk = SecretKey.from_hex(self.dvm_config.PRIVATE_KEY)
keys = Keys.parse(sk.to_hex())
database = NostrDatabase.lmdb(self.db_name)
cli = ClientBuilder().signer(NostrSigner.keys(keys)).database(database).build()
for relay in self.dvm_config.SYNC_DB_RELAY_LIST:
await cli.add_relay(relay)
await cli.connect()
timestamp_since = Timestamp.now().as_secs() - self.db_since
since = Timestamp.from_secs(timestamp_since)
filter1 = Filter().kinds(
[definitions.EventDefinitions.KIND_NOTE, definitions.EventDefinitions.KIND_REACTION,
definitions.EventDefinitions.KIND_ZAP]).since(since) # Notes, reactions, zaps
# filter = Filter().author(keys.public_key())
if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
print("[" + self.dvm_config.NIP89.NAME + "] Syncing notes of the last " + str(
self.db_since) + " seconds.. this might take a while..")
dbopts = SyncOptions().direction(SyncDirection.DOWN)
await cli.sync(filter1, dbopts)
await cli.database().delete(Filter().until(Timestamp.from_secs(
Timestamp.now().as_secs() - self.db_since))) # Clear old events so db doesn't get too full.
await cli.shutdown()
if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
print("[" + self.dvm_config.NIP89.NAME + "] Done Syncing Notes of the last " + str(
self.db_since) + " seconds..")
except Exception as e:
print(e)
# We build an example here that we can call by either calling this file directly from the main directory,
# or by adding it to our playground. You can call the example and adjust it to your needs or redefine it in the
# playground or elsewhere
def build_example(name, identifier, admin_config, options, image, description, update_rate=600, cost=0,
processing_msg=None, update_db=True):
dvm_config = build_default_config(identifier)
dvm_config.USE_OWN_VENV = False
dvm_config.SHOWLOG = True
dvm_config.SCHEDULE_UPDATES_SECONDS = update_rate # Every 10 minutes
dvm_config.UPDATE_DATABASE = update_db
# Activate these to use a subscription based model instead
# dvm_config.SUBSCRIPTION_REQUIRED = True
# dvm_config.SUBSCRIPTION_DAILY_COST = 1
dvm_config.FIX_COST = cost
dvm_config.CUSTOM_PROCESSING_MESSAGE = processing_msg
admin_config.LUD16 = dvm_config.LN_ADDRESS
# Add NIP89
nip89info = {
"name": name,
"picture": image,
"about": description,
"lud16": dvm_config.LN_ADDRESS,
"supportsEncryption": True,
"acceptsNutZaps": False,
"personalized": False,
"amount": create_amount_tag(cost),
"nip90Params": {
"max_results": {
"required": False,
"values": [],
"description": "The number of maximum results to return (default currently 100)"
}
}
}
nip89config = NIP89Config()
nip89config.DTAG = check_and_set_d_tag(identifier, name, dvm_config.PRIVATE_KEY, nip89info["picture"])
nip89config.CONTENT = json.dumps(nip89info)
return DicoverContentCurrentlyPopularTweets(name=name, dvm_config=dvm_config, nip89config=nip89config,
admin_config=admin_config, options=options)
def build_example_subscription(name, identifier, admin_config, options, image, description, processing_msg=None,
update_db=True):
dvm_config = build_default_config(identifier)
dvm_config.USE_OWN_VENV = False
dvm_config.SHOWLOG = True
dvm_config.SCHEDULE_UPDATES_SECONDS = 600 # Every 10 minutes
dvm_config.UPDATE_DATABASE = update_db
# Activate these to use a subscription based model instead
dvm_config.FIX_COST = 0
dvm_config.CUSTOM_PROCESSING_MESSAGE = processing_msg
admin_config.LUD16 = dvm_config.LN_ADDRESS
# Add NIP89
nip89info = {
"name": name,
"picture": image,
"about": description,
"lud16": dvm_config.LN_ADDRESS,
"supportsEncryption": True,
"acceptsNutZaps": False,
"subscription": True,
"personalized": False,
"nip90Params": {
"max_results": {
"required": False,
"values": [],
"description": "The number of maximum results to return (default currently 100)"
}
}
}
nip89config = NIP89Config()
nip89config.DTAG = check_and_set_d_tag(identifier, name, dvm_config.PRIVATE_KEY, nip89info["picture"])
nip89config.CONTENT = json.dumps(nip89info)
nip88config = NIP88Config()
nip88config.DTAG = check_and_set_d_tag_nip88(identifier, name, dvm_config.PRIVATE_KEY, nip89info["picture"])
nip88config.TIER_EVENT = check_and_set_tiereventid_nip88(identifier, "1")
nip89config.NAME = name
nip88config.IMAGE = nip89info["picture"]
nip88config.TITLE = name
nip88config.AMOUNT_DAILY = 100
nip88config.AMOUNT_MONTHLY = 2000
nip88config.CONTENT = "Subscribe to the DVM for unlimited use during your subscription"
nip88config.PERK1DESC = "Unlimited requests"
nip88config.PERK2DESC = "Support NostrDVM & NostrSDK development"
nip88config.PAYMENT_VERIFIER_PUBKEY = "5b5c045ecdf66fb540bdf2049fe0ef7f1a566fa427a4fe50d400a011b65a3a7e"
# admin_config.FETCH_NIP88 = True
# admin_config.EVENTID = "63a791cdc7bf78c14031616963105fce5793f532bb231687665b14fb6d805fdb"
# admin_config.PRIVKEY = dvm_config.PRIVATE_KEY
return DicoverContentCurrentlyPopularTweets(name=name, dvm_config=dvm_config, nip89config=nip89config,
nip88config=nip88config,
admin_config=admin_config,
options=options)
if __name__ == '__main__':
process_venv(DicoverContentCurrentlyPopularTweets)

View File

@ -18,6 +18,7 @@ from nostr_dvm.tasks.content_discovery_currently_popular_gallery import DicoverC
from nostr_dvm.tasks.content_discovery_currently_popular_mostr import DicoverContentCurrentlyPopularMostr
from nostr_dvm.tasks.content_discovery_currently_popular_nonfollowers import DicoverContentCurrentlyPopularNonFollowers
from nostr_dvm.tasks.content_discovery_currently_popular_topic import DicoverContentCurrentlyPopularbyTopic
from nostr_dvm.tasks.content_discovery_currently_popular_tweets import DicoverContentCurrentlyPopularTweets
from nostr_dvm.tasks.content_discovery_latest_one_per_follower import Discoverlatestperfollower
from nostr_dvm.tasks.content_discovery_update_db_only import DicoverContentDBUpdateScheduler
from nostr_dvm.tasks.discovery_trending_notes_nostrband import TrendingNotesNostrBand
@ -104,6 +105,51 @@ def build_db_scheduler(name, identifier, admin_config, options, image, descripti
admin_config=admin_config, options=options)
def build_example_tweets(name, identifier, admin_config, options, image, description, update_rate=600, cost=0,
processing_msg=None, update_db=True, database=None):
dvm_config = build_default_config(identifier)
dvm_config.USE_OWN_VENV = False
dvm_config.SHOWLOG = True
dvm_config.SCHEDULE_UPDATES_SECONDS = update_rate # Every 10 minutes
dvm_config.UPDATE_DATABASE = update_db
dvm_config.FIX_COST = cost
dvm_config.LOGLEVEL = LogLevel.INFO
dvm_config.CUSTOM_PROCESSING_MESSAGE = processing_msg
dvm_config.AVOID_OUTBOX_RELAY_LIST = AVOID_OUTBOX_RELAY_LIST
dvm_config.SYNC_DB_RELAY_LIST = SYNC_DB_RELAY_LIST
dvm_config.RELAY_LIST = RELAY_LIST
dvm_config.DATABASE = database
dvm_config.SEND_FEEDBACK_EVENTS = False
admin_config.LUD16 = dvm_config.LN_ADDRESS
# Add NIP89
nip89info = {
"name": name,
"picture": image,
"about": description,
"lud16": dvm_config.LN_ADDRESS,
"supportsEncryption": True,
"acceptsNutZaps": dvm_config.ENABLE_NUTZAP,
"personalized": False,
"amount": create_amount_tag(cost),
"nip90Params": {
"max_results": {
"required": False,
"values": [],
"description": "The number of maximum results to return (default currently 100)"
}
}
}
nip89config = NIP89Config()
nip89config.DTAG = check_and_set_d_tag(identifier, name, dvm_config.PRIVATE_KEY, nip89info["picture"])
nip89config.CONTENT = json.dumps(nip89info)
return DicoverContentCurrentlyPopularTweets(name=name, dvm_config=dvm_config, nip89config=nip89config,
admin_config=admin_config, options=options)
def build_example_gallery(name, identifier, admin_config, options, image, cost=0, update_rate=180, processing_msg=None,
update_db=True):
dvm_config = build_default_config(identifier)
@ -1105,9 +1151,42 @@ def playground():
update_db=update_db)
discovery_one_per_follow.run()
# Popular Tweets
admin_config = AdminConfig()
admin_config.REBROADCAST_NIP89 = rebroadcast_NIP89
admin_config.REBROADCAST_NIP65_RELAY_LIST = rebroadcast_NIP65_Relay_List
admin_config.UPDATE_PROFILE = update_profile
options = {
"db_name": "db/nostr_recent_notes.db",
"db_since": 24 * 60 * 60 * 4, # 48h since gmt,
"personalized": False,
"logger": False}
image = "https://image.nostr.build/53536b3eccb03fdb127849b79f85b0b6ecb241d12068b65f52afe4a4650d5318.jpg"
description = "I show popular tweets with < 210 symbols"
custom_processing_msg = ["Tweets are short notes"]
cost = 0
update_db = False # we use the DB scheduler above for a shared database. Or don't use it and let the DVM manage it
discovery_tweets = build_example_tweets("Popular Tweets",
"discovery_content_tweets",
admin_config, options,
image=image,
description=description,
update_rate=global_update_rate,
cost=cost,
processing_msg=custom_processing_msg,
update_db=update_db,
database=DATABASE)
discovery_tweets.run()
# Popular Global
admin_config_global_popular = AdminConfig()
admin_config_global_popular.REBROADCAST_NIP89 = rebroadcast_NIP89
admin_config_global_popular.REBROADCAST_NIP89 = True
admin_config_global_popular.REBROADCAST_NIP65_RELAY_LIST = rebroadcast_NIP65_Relay_List
admin_config_global_popular.UPDATE_PROFILE = update_profile
# admin_config_global_popular.DELETE_NIP89 = True

View File

@ -9,7 +9,7 @@ from nostr_dvm.utils.admin_utils import AdminConfig
from nostr_dvm.utils.dvmconfig import build_default_config
from nostr_dvm.utils.nip89_utils import create_amount_tag, NIP89Config, check_and_set_d_tag
rebroadcast_NIP89 = False # Announce NIP89 on startup
rebroadcast_NIP89 = True # Announce NIP89 on startup
rebroadcast_NIP65_Relay_List = False
update_profile = False
@ -33,7 +33,7 @@ def build_example_gallery(name, identifier, admin_config, options, image, cost=0
nip89info = {
"name": name,
"picture": image,
"about": "I show popular notes from the Olas feed",
"about": "I show popular pictures from the Olas feed",
"lud16": dvm_config.LN_ADDRESS,
"supportsEncryption": True,
"acceptsNutZaps": dvm_config.ENABLE_NUTZAP,

197
tests/discovery_tweets.py Normal file
View File

@ -0,0 +1,197 @@
import asyncio
import json
from pathlib import Path
import dotenv
from nostr_sdk import init_logger, LogLevel
# os.environ["RUST_BACKTRACE"] = "full"
from nostr_dvm.tasks.content_discovery_currently_popular_tweets import DicoverContentCurrentlyPopularTweets
from nostr_dvm.tasks.content_discovery_update_db_only import DicoverContentDBUpdateScheduler
from nostr_dvm.utils.admin_utils import AdminConfig
from nostr_dvm.utils.database_utils import init_db
from nostr_dvm.utils.dvmconfig import build_default_config
from nostr_dvm.utils.nip89_utils import create_amount_tag, NIP89Config, check_and_set_d_tag
from nostr_dvm.utils.outbox_utils import AVOID_OUTBOX_RELAY_LIST
rebroadcast_NIP89 = False # Announce NIP89 on startup Only do this if you know what you're doing.
rebroadcast_NIP65_Relay_List = True
update_profile = False
global_update_rate = 180 # set this high on first sync so db can fully sync before another process trys to.
use_logger = True
log_level = LogLevel.ERROR
max_sync_duration_in_h = 24
RELAY_LIST = ["wss://nostr.mom",
"wss://relay.primal.net",
"wss://nostr.oxtr.dev",
]
SYNC_DB_RELAY_LIST = ["wss://relay.damus.io",
"wss://relay.primal.net",
"wss://nostr.oxtr.dev"
]
if use_logger:
init_logger(log_level)
def build_db_scheduler(name, identifier, admin_config, options, image, description, update_rate=600, cost=0,
processing_msg=None, update_db=True, database=None):
dvm_config = build_default_config(identifier)
dvm_config.USE_OWN_VENV = False
dvm_config.SHOWLOG = True
dvm_config.SCHEDULE_UPDATES_SECONDS = update_rate # Every 10 minutes
dvm_config.UPDATE_DATABASE = update_db
dvm_config.LOGLEVEL = LogLevel.INFO
dvm_config.SYNC_DB_RELAY_LIST = SYNC_DB_RELAY_LIST
dvm_config.RELAY_LIST = RELAY_LIST
dvm_config.DATABASE = database
dvm_config.WOT_FILTERING = True
dvm_config.FIX_COST = cost
dvm_config.CUSTOM_PROCESSING_MESSAGE = processing_msg
admin_config.LUD16 = dvm_config.LN_ADDRESS
# Add NIP89
nip89info = {
"name": name,
"picture": image,
"about": description,
"lud16": dvm_config.LN_ADDRESS,
"supportsEncryption": True,
"acceptsNutZaps": dvm_config.ENABLE_NUTZAP,
"personalized": False,
"amount": create_amount_tag(cost),
"nip90Params": {
"max_results": {
"required": False,
"values": [],
"description": "The number of maximum results to return (default currently 100)"
}
}
}
nip89config = NIP89Config()
nip89config.DTAG = check_and_set_d_tag(identifier, name, dvm_config.PRIVATE_KEY, nip89info["picture"])
nip89config.CONTENT = json.dumps(nip89info)
return DicoverContentDBUpdateScheduler(name=name, dvm_config=dvm_config, nip89config=nip89config,
admin_config=admin_config, options=options)
def build_example_tweets(name, identifier, admin_config, options, image, description, update_rate=600, cost=0,
processing_msg=None, update_db=True, database=None):
dvm_config = build_default_config(identifier)
dvm_config.USE_OWN_VENV = False
dvm_config.SHOWLOG = True
dvm_config.SCHEDULE_UPDATES_SECONDS = update_rate # Every 10 minutes
dvm_config.UPDATE_DATABASE = update_db
dvm_config.FIX_COST = cost
dvm_config.LOGLEVEL = LogLevel.INFO
dvm_config.CUSTOM_PROCESSING_MESSAGE = processing_msg
dvm_config.AVOID_OUTBOX_RELAY_LIST = AVOID_OUTBOX_RELAY_LIST
dvm_config.SYNC_DB_RELAY_LIST = SYNC_DB_RELAY_LIST
dvm_config.RELAY_LIST = RELAY_LIST
dvm_config.DATABASE = database
dvm_config.SEND_FEEDBACK_EVENTS = False
admin_config.LUD16 = dvm_config.LN_ADDRESS
# Add NIP89
nip89info = {
"name": name,
"picture": image,
"about": description,
"lud16": dvm_config.LN_ADDRESS,
"supportsEncryption": True,
"acceptsNutZaps": dvm_config.ENABLE_NUTZAP,
"personalized": False,
"amount": create_amount_tag(cost),
"nip90Params": {
"max_results": {
"required": False,
"values": [],
"description": "The number of maximum results to return (default currently 100)"
}
}
}
nip89config = NIP89Config()
nip89config.DTAG = check_and_set_d_tag(identifier, name, dvm_config.PRIVATE_KEY, nip89info["picture"])
nip89config.CONTENT = json.dumps(nip89info)
return DicoverContentCurrentlyPopularTweets(name=name, dvm_config=dvm_config, nip89config=nip89config,
admin_config=admin_config, options=options)
def playground():
main_db = "db/nostr_recent_notes.db"
main_db_limit = 1024 # in mb
database = asyncio.run(init_db(main_db, wipe=True, limit=main_db_limit, print_filesize=True))
# DB Scheduler, do not announce, just use it to update the DB for the other DVMs.
admin_config_db_scheduler = AdminConfig()
options_db = {
"db_name": main_db,
"db_since": max_sync_duration_in_h * 60 * 60, # 48h since gmt,
"personalized": False,
"max_db_size": main_db_limit,
"logger": False}
image = ""
about = "I just update the Database based on my schedule"
db_scheduler = build_db_scheduler("DB Scheduler",
"db_scheduler",
admin_config_db_scheduler, options_db,
image=image,
description=about,
update_rate=global_update_rate,
cost=0,
update_db=True,
database=database)
db_scheduler.run()
# Popular Tweets
admin_config = AdminConfig()
admin_config.REBROADCAST_NIP89 = rebroadcast_NIP89
admin_config.REBROADCAST_NIP65_RELAY_LIST = rebroadcast_NIP65_Relay_List
admin_config.UPDATE_PROFILE = update_profile
options = {
"db_name": "db/nostr_recent_notes.db",
"db_since": 24 * 60 * 60 * 4, # 48h since gmt,
"personalized": False,
"logger": False}
image = "https://image.nostr.build/53536b3eccb03fdb127849b79f85b0b6ecb241d12068b65f52afe4a4650d5318.jpg"
description = "I show popular tweets."
custom_processing_msg = ["Tweets are short notes"]
cost = 0
update_db = False # we use the DB scheduler above for a shared database. Or don't use it and let the DVM manage it
discovery_tweets= build_example_tweets("Popular Tweets",
"discovery_content_tweets",
admin_config, options,
image=image,
description=description,
update_rate=global_update_rate,
cost=cost,
processing_msg=custom_processing_msg,
update_db=update_db,
database=database)
discovery_tweets.run()
if __name__ == '__main__':
env_path = Path('.env')
if not env_path.is_file():
with open('.env', 'w') as f:
print("Writing new .env file")
f.write('')
if env_path.is_file():
print(f'loading environment from {env_path.resolve()}')
dotenv.load_dotenv(env_path, verbose=True, override=True)
else:
raise FileNotFoundError(f'.env file not found at {env_path} ')
playground()