introduce loglevels to reduce logging on demand, improve wot

This commit is contained in:
Believethehype
2024-06-13 10:49:09 +02:00
parent 78dd3ae94a
commit b9dc440d99
11 changed files with 184 additions and 99 deletions

View File

@@ -38,7 +38,6 @@ class DVM:
def __init__(self, dvm_config, admin_config=None): def __init__(self, dvm_config, admin_config=None):
asyncio.run(self.run_dvm(dvm_config, admin_config)) asyncio.run(self.run_dvm(dvm_config, admin_config))
async def run_dvm(self, dvm_config, admin_config): async def run_dvm(self, dvm_config, admin_config):
@@ -57,7 +56,7 @@ class DVM:
self.job_list = [] self.job_list = []
self.jobs_on_hold_list = [] self.jobs_on_hold_list = []
pk = self.keys.public_key() pk = self.keys.public_key()
print(bcolors.GREEN + "[" + self.dvm_config.NIP89.NAME + "] " + "Nostr DVM public key: " + str( print(bcolors.BLUE + "[" + self.dvm_config.NIP89.NAME + "] " + "Nostr DVM public key: " + str(
pk.to_bech32()) + " Hex: " + pk.to_bech32()) + " Hex: " +
str(pk.to_hex()) + " Supported DVM tasks: " + str(pk.to_hex()) + " Supported DVM tasks: " +
', '.join(p.NAME + ":" + p.TASK for p in self.dvm_config.SUPPORTED_DVMS) + bcolors.ENDC) ', '.join(p.NAME + ":" + p.TASK for p in self.dvm_config.SUPPORTED_DVMS) + bcolors.ENDC)
@@ -82,7 +81,8 @@ class DVM:
keys = self.keys keys = self.keys
async def handle(self, relay_url, subscription_id, nostr_event: Event): async def handle(self, relay_url, subscription_id, nostr_event: Event):
print(nostr_event.as_json()) if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
print(nostr_event.as_json())
if EventDefinitions.KIND_NIP90_EXTRACT_TEXT.as_u64() <= nostr_event.kind().as_u64() <= EventDefinitions.KIND_NIP90_GENERIC.as_u64(): if EventDefinitions.KIND_NIP90_EXTRACT_TEXT.as_u64() <= nostr_event.kind().as_u64() <= EventDefinitions.KIND_NIP90_GENERIC.as_u64():
await handle_nip90_job_event(nostr_event) await handle_nip90_job_event(nostr_event)
elif nostr_event.kind().as_u64() == EventDefinitions.KIND_ZAP.as_u64(): elif nostr_event.kind().as_u64() == EventDefinitions.KIND_ZAP.as_u64():
@@ -110,7 +110,8 @@ class DVM:
p_tag_str = tag.as_vec()[1] p_tag_str = tag.as_vec()[1]
if p_tag_str != "" and p_tag_str != self.dvm_config.PUBLIC_KEY: if p_tag_str != "" and p_tag_str != self.dvm_config.PUBLIC_KEY:
print("[" + self.dvm_config.NIP89.NAME + "] No public request, also not addressed to me.") if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
print("[" + self.dvm_config.NIP89.NAME + "] No public request, also not addressed to me.")
return return
# check if task is supported by the current DVM # check if task is supported by the current DVM
@@ -126,9 +127,9 @@ class DVM:
await send_job_status_reaction(nip90_event, "error", client=self.client, dvm_config=self.dvm_config) await send_job_status_reaction(nip90_event, "error", client=self.client, dvm_config=self.dvm_config)
print("[" + self.dvm_config.NIP89.NAME + "] Request by blacklisted user, skipped") print("[" + self.dvm_config.NIP89.NAME + "] Request by blacklisted user, skipped")
return return
if self.dvm_config.LOGLEVEL.value >= LogLevel.INFO.value:
print( print(
bcolors.MAGENTA + "[" + self.dvm_config.NIP89.NAME + "] Received new Request: " + task + " from " + user.name + bcolors.ENDC) bcolors.MAGENTA + "[" + self.dvm_config.NIP89.NAME + "] Received new Request: " + task + " from " + user.name + bcolors.ENDC)
duration = input_data_file_duration(nip90_event, dvm_config=self.dvm_config, client=self.client) duration = input_data_file_duration(nip90_event, dvm_config=self.dvm_config, client=self.client)
amount = get_amount_per_task(task, self.dvm_config, duration) amount = get_amount_per_task(task, self.dvm_config, duration)
if amount is None: if amount is None:
@@ -158,8 +159,9 @@ class DVM:
self.dvm_config) self.dvm_config)
subscription_status = await nip88_has_active_subscription(PublicKey.parse(user.npub), subscription_status = await nip88_has_active_subscription(PublicKey.parse(user.npub),
self.dvm_config.NIP88.DTAG, self.client, self.dvm_config.NIP88.DTAG,
self.dvm_config.PUBLIC_KEY) self.client,
self.dvm_config.PUBLIC_KEY)
if subscription_status["isActive"]: if subscription_status["isActive"]:
await send_job_status_reaction(nip90_event, "subscription-required", True, amount, await send_job_status_reaction(nip90_event, "subscription-required", True, amount,
@@ -201,9 +203,10 @@ class DVM:
if (user.iswhitelisted or task_is_free or cashu_redeemed) and ( if (user.iswhitelisted or task_is_free or cashu_redeemed) and (
p_tag_str == "" or p_tag_str == p_tag_str == "" or p_tag_str ==
self.dvm_config.PUBLIC_KEY): self.dvm_config.PUBLIC_KEY):
print( if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
"[" + self.dvm_config.NIP89.NAME + "] Free task or Whitelisted for task " + task + print(
". Starting processing..") "[" + self.dvm_config.NIP89.NAME + "] Free task or Whitelisted for task " + task +
". Starting processing..")
if dvm_config.SEND_FEEDBACK_EVENTS: if dvm_config.SEND_FEEDBACK_EVENTS:
await send_job_status_reaction(nip90_event, "processing", True, 0, await send_job_status_reaction(nip90_event, "processing", True, 0,
@@ -278,7 +281,8 @@ class DVM:
else: else:
print("[" + self.dvm_config.NIP89.NAME + "] Job addressed to someone else, skipping..") if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
print("[" + self.dvm_config.NIP89.NAME + "] Job addressed to someone else, skipping..")
# else: # else:
# print("[" + self.dvm_config.NIP89.NAME + "] Task " + task + " not supported on this DVM, skipping..") # print("[" + self.dvm_config.NIP89.NAME + "] Task " + task + " not supported on this DVM, skipping..")
@@ -500,9 +504,12 @@ class DVM:
# send_event(reply_event, client=self.client, dvm_config=self.dvm_config) # send_event(reply_event, client=self.client, dvm_config=self.dvm_config)
await send_event_outbox(reply_event, client=self.client, dvm_config=self.dvm_config) await send_event_outbox(reply_event, client=self.client, dvm_config=self.dvm_config)
if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
print(bcolors.GREEN + "[" + self.dvm_config.NIP89.NAME + "] " + str( print(bcolors.GREEN + "[" + self.dvm_config.NIP89.NAME + "] " + str(
original_event.kind().as_u64() + 1000) + " Job Response event sent: " + reply_event.as_json() + bcolors.ENDC) original_event.kind().as_u64() + 1000) + " Job Response event sent: " + reply_event.as_json() + bcolors.ENDC)
elif self.dvm_config.LOGLEVEL.value >= LogLevel.INFO.value:
print(bcolors.GREEN + "[" + self.dvm_config.NIP89.NAME + "] " + str(
original_event.kind().as_u64() + 1000) + " Job Response event sent: " + reply_event.id().to_hex() + bcolors.ENDC)
async def send_job_status_reaction(original_event, status, is_paid=True, amount=0, client=None, async def send_job_status_reaction(original_event, status, is_paid=True, amount=0, client=None,
content=None, content=None,
@@ -612,8 +619,13 @@ class DVM:
# send_event(reaction_event, client=self.client, dvm_config=self.dvm_config) # send_event(reaction_event, client=self.client, dvm_config=self.dvm_config)
await send_event_outbox(reaction_event, client=self.client, dvm_config=self.dvm_config) await send_event_outbox(reaction_event, client=self.client, dvm_config=self.dvm_config)
print(bcolors.YELLOW + "[" + self.dvm_config.NIP89.NAME + "]" + " Sent Kind " + str( if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
EventDefinitions.KIND_FEEDBACK.as_u64()) + " Reaction: " + status + " " + reaction_event.as_json() + bcolors.ENDC) print(bcolors.YELLOW + "[" + self.dvm_config.NIP89.NAME + "]" + " Sent Kind " + str(
EventDefinitions.KIND_FEEDBACK.as_u64()) + " Reaction: " + status + " " + reaction_event.as_json() + bcolors.ENDC)
elif self.dvm_config.LOGLEVEL.value >= LogLevel.INFO.value:
print(bcolors.YELLOW + "[" + self.dvm_config.NIP89.NAME + "]" + " Sent Kind " + str(
EventDefinitions.KIND_FEEDBACK.as_u64()) + " Reaction: " + status + " " + reaction_event.id().to_hex() + bcolors.ENDC)
return reaction_event.as_json() return reaction_event.as_json()
async def do_work(job_event, amount): async def do_work(job_event, amount):
@@ -687,7 +699,7 @@ class DVM:
return return
#await self.client.handle_notifications(NotificationHandler) # await self.client.handle_notifications(NotificationHandler)
asyncio.create_task(self.client.handle_notifications(NotificationHandler())) asyncio.create_task(self.client.handle_notifications(NotificationHandler()))
while True: while True:
@@ -696,7 +708,7 @@ class DVM:
for job in self.job_list: for job in self.job_list:
if job.bolt11 != "" and job.payment_hash != "" and not job.payment_hash is None and not job.is_paid: if job.bolt11 != "" and job.payment_hash != "" and not job.payment_hash is None and not job.is_paid:
ispaid = check_bolt11_ln_bits_is_paid(job.payment_hash, se.dvm_config) ispaid = check_bolt11_ln_bits_is_paid(job.payment_hash, self.dvm_config)
if ispaid and job.is_paid is False: if ispaid and job.is_paid is False:
print("is paid") print("is paid")
job.is_paid = True job.is_paid = True
@@ -704,9 +716,9 @@ class DVM:
job.is_paid = True job.is_paid = True
await send_job_status_reaction(job.event, "processing", True, 0, await send_job_status_reaction(job.event, "processing", True, 0,
content=self.dvm_config.CUSTOM_PROCESSING_MESSAGE, content=self.dvm_config.CUSTOM_PROCESSING_MESSAGE,
client=self.client, client=self.client,
dvm_config=self.dvm_config) dvm_config=self.dvm_config)
print("[" + self.dvm_config.NIP89.NAME + "] doing work from joblist") print("[" + self.dvm_config.NIP89.NAME + "] doing work from joblist")
await do_work(job.event, amount) await do_work(job.event, amount)
elif ispaid is None: # invoice expired elif ispaid is None: # invoice expired
@@ -716,8 +728,8 @@ class DVM:
self.job_list.remove(job) self.job_list.remove(job)
for job in self.jobs_on_hold_list: for job in self.jobs_on_hold_list:
if await check_event_has_not_unfinished_job_input(job.event, False, client=se.client, if await check_event_has_not_unfinished_job_input(job.event, False, client=self.client,
dvmconfig=self.dvm_config): dvmconfig=self.dvm_config):
await handle_nip90_job_event(nip90_event=job.event) await handle_nip90_job_event(nip90_event=job.event)
try: try:
self.jobs_on_hold_list.remove(job) self.jobs_on_hold_list.remove(job)

View File

@@ -8,7 +8,7 @@ import sys
from sys import platform from sys import platform
from threading import Thread from threading import Thread
from venv import create from venv import create
from nostr_sdk import Keys, Kind from nostr_sdk import Keys, Kind, LogLevel
from nostr_dvm.dvm import DVM from nostr_dvm.dvm import DVM
from nostr_dvm.utils.admin_utils import AdminConfig from nostr_dvm.utils.admin_utils import AdminConfig
from nostr_dvm.utils.dvmconfig import DVMConfig, build_default_config from nostr_dvm.utils.dvmconfig import DVMConfig, build_default_config
@@ -17,6 +17,7 @@ from nostr_dvm.utils.nip89_utils import NIP89Config, check_and_set_d_tag
from nostr_dvm.utils.output_utils import post_process_result from nostr_dvm.utils.output_utils import post_process_result
class DVMTaskInterface: class DVMTaskInterface:
NAME: str NAME: str
KIND: Kind KIND: Kind
@@ -136,12 +137,13 @@ class DVMTaskInterface:
return post_process_result(result, event) return post_process_result(result, event)
def set_options(self, request_form): def set_options(self, request_form):
if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
print("[" + self.dvm_config.NIP89.NAME + "] " + "Setting options...") print("[" + self.dvm_config.NIP89.NAME + "] " + "Setting options...")
opts = [] opts = []
if request_form.get("options"): if request_form.get("options"):
opts = json.loads(request_form["options"]) opts = json.loads(request_form["options"])
print("[" + self.dvm_config.NIP89.NAME + "] " + str(opts)) if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
print("[" + self.dvm_config.NIP89.NAME + "] " + str(opts))
return dict(opts) return dict(opts)
@staticmethod @staticmethod

View File

@@ -14,6 +14,7 @@ from nostr_dvm.utils.nip88_utils import NIP88Config, check_and_set_d_tag_nip88,
from nostr_dvm.utils.nip89_utils import NIP89Config, check_and_set_d_tag, create_amount_tag from nostr_dvm.utils.nip89_utils import NIP89Config, check_and_set_d_tag, create_amount_tag
from nostr_dvm.utils.output_utils import post_process_list_to_events from nostr_dvm.utils.output_utils import post_process_list_to_events
""" """
This File contains a Module to discover popular notes This File contains a Module to discover popular notes
Accepted Inputs: none Accepted Inputs: none
@@ -112,7 +113,7 @@ class DicoverContentLatestLongForm(DVMTaskInterface):
signer = NostrSigner.keys(keys) signer = NostrSigner.keys(keys)
database = await NostrDatabase.sqlite(self.db_name) database = await NostrDatabase.sqlite(self.db_name)
print(self.db_name) #print(self.db_name)
cli = ClientBuilder().database(database).signer(signer).opts(opts).build() cli = ClientBuilder().database(database).signer(signer).opts(opts).build()
await cli.connect() await cli.connect()
@@ -123,8 +124,8 @@ class DicoverContentLatestLongForm(DVMTaskInterface):
filter1 = Filter().kind(definitions.EventDefinitions.KIND_LONGFORM).since(since) filter1 = Filter().kind(definitions.EventDefinitions.KIND_LONGFORM).since(since)
events = await cli.database().query([filter1]) events = await cli.database().query([filter1])
if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
print("[" + self.dvm_config.NIP89.NAME + "] Considering " + str(len(events)) + " Events") print("[" + self.dvm_config.NIP89.NAME + "] Considering " + str(len(events)) + " Events")
ns.finallist = {} ns.finallist = {}
index = options["max_results"] index = options["max_results"]
for event in events: for event in events:
@@ -142,8 +143,9 @@ class DicoverContentLatestLongForm(DVMTaskInterface):
e_tag = Tag.parse(["e", entry[0]]) e_tag = Tag.parse(["e", entry[0]])
result_list.append(e_tag.as_vec()) result_list.append(e_tag.as_vec())
await cli.shutdown() await cli.shutdown()
print("[" + self.dvm_config.NIP89.NAME + "] Filtered " + str( if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
len(result_list)) + " fitting events.") print("[" + self.dvm_config.NIP89.NAME + "] Filtered " + str(
len(result_list)) + " fitting events.")
return json.dumps(result_list) return json.dumps(result_list)
def post_process(self, result, event): def post_process(self, result, event):
@@ -189,15 +191,17 @@ class DicoverContentLatestLongForm(DVMTaskInterface):
filter1 = Filter().kinds([definitions.EventDefinitions.KIND_LONGFORM]).since(since) # Notes, reactions, zaps filter1 = Filter().kinds([definitions.EventDefinitions.KIND_LONGFORM]).since(since) # Notes, reactions, zaps
# filter = Filter().author(keys.public_key()) # filter = Filter().author(keys.public_key())
print("[" + self.dvm_config.NIP89.NAME + "] Syncing notes of the last " + str( if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
self.db_since) + " seconds.. this might take a while..") print("[" + self.dvm_config.NIP89.NAME + "] Syncing notes of the last " + str(
self.db_since) + " seconds.. this might take a while..")
dbopts = NegentropyOptions().direction(NegentropyDirection.DOWN) dbopts = NegentropyOptions().direction(NegentropyDirection.DOWN)
await cli.reconcile(filter1, dbopts) await cli.reconcile(filter1, dbopts)
await cli.database().delete(Filter().until(Timestamp.from_secs( await cli.database().delete(Filter().until(Timestamp.from_secs(
Timestamp.now().as_secs() - self.db_since))) # Clear old events so db doesn't get too full. Timestamp.now().as_secs() - self.db_since))) # Clear old events so db doesn't get too full.
await cli.shutdown() await cli.shutdown()
print( if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
"[" + self.dvm_config.NIP89.NAME + "] Done Syncing Notes of the last " + str(self.db_since) + " seconds..") print(
"[" + self.dvm_config.NIP89.NAME + "] Done Syncing Notes of the last " + str(self.db_since) + " seconds..")
# We build an example here that we can call by either calling this file directly from the main directory, # We build an example here that we can call by either calling this file directly from the main directory,

View File

@@ -15,6 +15,7 @@ from nostr_dvm.utils.nip88_utils import NIP88Config, check_and_set_d_tag_nip88,
from nostr_dvm.utils.nip89_utils import NIP89Config, check_and_set_d_tag, create_amount_tag from nostr_dvm.utils.nip89_utils import NIP89Config, check_and_set_d_tag, create_amount_tag
from nostr_dvm.utils.output_utils import post_process_list_to_events from nostr_dvm.utils.output_utils import post_process_list_to_events
""" """
This File contains a Module to discover popular notes This File contains a Module to discover popular notes
Accepted Inputs: none Accepted Inputs: none
@@ -125,7 +126,8 @@ class DicoverContentCurrentlyPopular(DVMTaskInterface):
filter1 = Filter().kind(definitions.EventDefinitions.KIND_NOTE).since(since) filter1 = Filter().kind(definitions.EventDefinitions.KIND_NOTE).since(since)
events = await database.query([filter1]) events = await database.query([filter1])
print("[" + self.dvm_config.NIP89.NAME + "] Considering " + str(len(events)) + " Events") if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
print("[" + self.dvm_config.NIP89.NAME + "] Considering " + str(len(events)) + " Events")
ns.finallist = {} ns.finallist = {}
for event in events: for event in events:
if event.created_at().as_secs() > timestamp_hour_ago: if event.created_at().as_secs() > timestamp_hour_ago:
@@ -146,8 +148,9 @@ class DicoverContentCurrentlyPopular(DVMTaskInterface):
e_tag = Tag.parse(["e", entry[0]]) e_tag = Tag.parse(["e", entry[0]])
result_list.append(e_tag.as_vec()) result_list.append(e_tag.as_vec())
#await cli.shutdown() #await cli.shutdown()
print("[" + self.dvm_config.NIP89.NAME + "] Filtered " + str( if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
len(result_list)) + " fitting events.") print("[" + self.dvm_config.NIP89.NAME + "] Filtered " + str(
len(result_list)) + " fitting events.")
return json.dumps(result_list) return json.dumps(result_list)
def post_process(self, result, event): def post_process(self, result, event):
@@ -192,15 +195,17 @@ class DicoverContentCurrentlyPopular(DVMTaskInterface):
definitions.EventDefinitions.KIND_ZAP]).since(since) # Notes, reactions, zaps definitions.EventDefinitions.KIND_ZAP]).since(since) # Notes, reactions, zaps
# filter = Filter().author(keys.public_key()) # filter = Filter().author(keys.public_key())
print("[" + self.dvm_config.NIP89.NAME + "] Syncing notes of the last " + str( if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
self.db_since) + " seconds.. this might take a while..") print("[" + self.dvm_config.NIP89.NAME + "] Syncing notes of the last " + str(
self.db_since) + " seconds.. this might take a while..")
dbopts = NegentropyOptions().direction(NegentropyDirection.DOWN) dbopts = NegentropyOptions().direction(NegentropyDirection.DOWN)
await cli.reconcile(filter1, dbopts) await cli.reconcile(filter1, dbopts)
await cli.database().delete(Filter().until(Timestamp.from_secs( await cli.database().delete(Filter().until(Timestamp.from_secs(
Timestamp.now().as_secs() - self.db_since))) # Clear old events so db doesn't get too full. Timestamp.now().as_secs() - self.db_since))) # Clear old events so db doesn't get too full.
await cli.shutdown() await cli.shutdown()
print( if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
"[" + self.dvm_config.NIP89.NAME + "] Done Syncing Notes of the last " + str(self.db_since) + " seconds..") print(
"[" + self.dvm_config.NIP89.NAME + "] Done Syncing Notes of the last " + str(self.db_since) + " seconds..")
# We build an example here that we can call by either calling this file directly from the main directory, # We build an example here that we can call by either calling this file directly from the main directory,

View File

@@ -127,7 +127,8 @@ class DicoverContentCurrentlyPopularZaps(DVMTaskInterface):
filter1 = Filter().kind(definitions.EventDefinitions.KIND_NOTE).since(since) filter1 = Filter().kind(definitions.EventDefinitions.KIND_NOTE).since(since)
events = await database.query([filter1]) events = await database.query([filter1])
print("[" + self.dvm_config.NIP89.NAME + "] Considering " + str(len(events)) + " Events") if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
print("[" + self.dvm_config.NIP89.NAME + "] Considering " + str(len(events)) + " Events")
ns.finallist = {} ns.finallist = {}
for event in events: for event in events:
@@ -189,9 +190,9 @@ class DicoverContentCurrentlyPopularZaps(DVMTaskInterface):
# print(EventId.parse(entry[0]).to_bech32() + "/" + EventId.parse(entry[0]).to_hex() + ": " + str(entry[1])) # print(EventId.parse(entry[0]).to_bech32() + "/" + EventId.parse(entry[0]).to_hex() + ": " + str(entry[1]))
e_tag = Tag.parse(["e", entry[0]]) e_tag = Tag.parse(["e", entry[0]])
result_list.append(e_tag.as_vec()) result_list.append(e_tag.as_vec())
if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
print("[" + self.dvm_config.NIP89.NAME + "] Filtered " + str( print("[" + self.dvm_config.NIP89.NAME + "] Filtered " + str(
len(result_list)) + " fitting events.") len(result_list)) + " fitting events.")
#await cli.shutdown() #await cli.shutdown()
@@ -243,15 +244,17 @@ class DicoverContentCurrentlyPopularZaps(DVMTaskInterface):
definitions.EventDefinitions.KIND_ZAP]).since(since) # Notes, reactions, zaps definitions.EventDefinitions.KIND_ZAP]).since(since) # Notes, reactions, zaps
# filter = Filter().author(keys.public_key()) # filter = Filter().author(keys.public_key())
print("[" + self.dvm_config.NIP89.NAME + "] Syncing notes of the last " + str( if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
self.db_since) + " seconds.. this might take a while..") print("[" + self.dvm_config.NIP89.NAME + "] Syncing notes of the last " + str(
self.db_since) + " seconds.. this might take a while..")
dbopts = NegentropyOptions().direction(NegentropyDirection.DOWN) dbopts = NegentropyOptions().direction(NegentropyDirection.DOWN)
await cli.reconcile(filter1, dbopts) await cli.reconcile(filter1, dbopts)
await cli.database().delete(Filter().until(Timestamp.from_secs( await cli.database().delete(Filter().until(Timestamp.from_secs(
Timestamp.now().as_secs() - self.db_since))) # Clear old events so db doesn't get too full. Timestamp.now().as_secs() - self.db_since))) # Clear old events so db doesn't get too full.
await cli.shutdown() await cli.shutdown()
print( if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
"[" + self.dvm_config.NIP89.NAME + "] Done Syncing Notes of the last " + str(self.db_since) + " seconds..") print(
"[" + self.dvm_config.NIP89.NAME + "] Done Syncing Notes of the last " + str(self.db_since) + " seconds..")
# We build an example here that we can call by either calling this file directly from the main directory, # We build an example here that we can call by either calling this file directly from the main directory,

View File

@@ -15,6 +15,7 @@ from nostr_dvm.utils.nip88_utils import NIP88Config, check_and_set_d_tag_nip88,
from nostr_dvm.utils.nip89_utils import NIP89Config, check_and_set_d_tag, create_amount_tag from nostr_dvm.utils.nip89_utils import NIP89Config, check_and_set_d_tag, create_amount_tag
from nostr_dvm.utils.output_utils import post_process_list_to_events from nostr_dvm.utils.output_utils import post_process_list_to_events
""" """
This File contains a Module to discover popular notes This File contains a Module to discover popular notes
Accepted Inputs: none Accepted Inputs: none
@@ -141,7 +142,8 @@ class DicoverContentCurrentlyPopularFollowers(DVMTaskInterface):
filter1 = Filter().kind(definitions.EventDefinitions.KIND_NOTE).authors(followings).since(since) filter1 = Filter().kind(definitions.EventDefinitions.KIND_NOTE).authors(followings).since(since)
events = await cli.database().query([filter1]) events = await cli.database().query([filter1])
print("[" + self.dvm_config.NIP89.NAME + "] Considering " + str(len(events)) + " Events") if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
print("[" + self.dvm_config.NIP89.NAME + "] Considering " + str(len(events)) + " Events")
ns.finallist = {} ns.finallist = {}
for event in events: for event in events:
@@ -162,8 +164,9 @@ class DicoverContentCurrentlyPopularFollowers(DVMTaskInterface):
result_list.append(e_tag.as_vec()) result_list.append(e_tag.as_vec())
#await cli.connect() #await cli.connect()
await cli.shutdown() await cli.shutdown()
print("[" + self.dvm_config.NIP89.NAME + "] Filtered " + str( if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
len(result_list)) + " fitting events.") print("[" + self.dvm_config.NIP89.NAME + "] Filtered " + str(
len(result_list)) + " fitting events.")
return json.dumps(result_list) return json.dumps(result_list)
@@ -212,15 +215,17 @@ class DicoverContentCurrentlyPopularFollowers(DVMTaskInterface):
definitions.EventDefinitions.KIND_ZAP]).since(since) # Notes, reactions, zaps definitions.EventDefinitions.KIND_ZAP]).since(since) # Notes, reactions, zaps
# filter = Filter().author(keys.public_key()) # filter = Filter().author(keys.public_key())
print("[" + self.dvm_config.NIP89.NAME + "] Syncing notes of the last " + str( if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
self.db_since) + " seconds.. this might take a while..") print("[" + self.dvm_config.NIP89.NAME + "] Syncing notes of the last " + str(
self.db_since) + " seconds.. this might take a while..")
dbopts = NegentropyOptions().direction(NegentropyDirection.DOWN) dbopts = NegentropyOptions().direction(NegentropyDirection.DOWN)
await cli.reconcile(filter1, dbopts) await cli.reconcile(filter1, dbopts)
await cli.database().delete(Filter().until(Timestamp.from_secs( await cli.database().delete(Filter().until(Timestamp.from_secs(
Timestamp.now().as_secs() - self.db_since))) # Clear old events so db doesn't get too full. Timestamp.now().as_secs() - self.db_since))) # Clear old events so db doesn't get too full.
await cli.shutdown() await cli.shutdown()
print("[" + self.dvm_config.NIP89.NAME + "] Done Syncing Notes of the last " + str( if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
self.db_since) + " seconds..") print("[" + self.dvm_config.NIP89.NAME + "] Done Syncing Notes of the last " + str(
self.db_since) + " seconds..")
# We build an example here that we can call by either calling this file directly from the main directory, # We build an example here that we can call by either calling this file directly from the main directory,

View File

@@ -149,7 +149,8 @@ class DicoverContentCurrentlyPopularbyTopic(DVMTaskInterface):
filter1 = Filter().kind(definitions.EventDefinitions.KIND_NOTE).since(since) filter1 = Filter().kind(definitions.EventDefinitions.KIND_NOTE).since(since)
events = await database.query([filter1]) events = await database.query([filter1])
print("[" + self.dvm_config.NIP89.NAME + "] Considering " + str(len(events)) + " Events") if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
print("[" + self.dvm_config.NIP89.NAME + "] Considering " + str(len(events)) + " Events")
ns.finallist = {} ns.finallist = {}
for event in events: for event in events:
@@ -171,9 +172,9 @@ class DicoverContentCurrentlyPopularbyTopic(DVMTaskInterface):
# print(EventId.parse(entry[0]).to_bech32() + "/" + EventId.parse(entry[0]).to_hex() + ": " + str(entry[1])) # print(EventId.parse(entry[0]).to_bech32() + "/" + EventId.parse(entry[0]).to_hex() + ": " + str(entry[1]))
e_tag = Tag.parse(["e", entry[0]]) e_tag = Tag.parse(["e", entry[0]])
result_list.append(e_tag.as_vec()) result_list.append(e_tag.as_vec())
if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
print("[" + self.dvm_config.NIP89.NAME + "] Filtered " + str( print("[" + self.dvm_config.NIP89.NAME + "] Filtered " + str(
len(result_list)) + " fitting events.") len(result_list)) + " fitting events.")
#await cli.shutdown() #await cli.shutdown()
return json.dumps(result_list) return json.dumps(result_list)
@@ -209,15 +210,17 @@ class DicoverContentCurrentlyPopularbyTopic(DVMTaskInterface):
definitions.EventDefinitions.KIND_ZAP]).since(since) # Notes, reactions, zaps definitions.EventDefinitions.KIND_ZAP]).since(since) # Notes, reactions, zaps
# filter = Filter().author(keys.public_key()) # filter = Filter().author(keys.public_key())
print("[" + self.dvm_config.NIP89.NAME + "] Syncing notes of the last " + str( if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
self.db_since) + " seconds.. this might take a while..") print("[" + self.dvm_config.NIP89.NAME + "] Syncing notes of the last " + str(
self.db_since) + " seconds.. this might take a while..")
dbopts = NegentropyOptions().direction(NegentropyDirection.DOWN) dbopts = NegentropyOptions().direction(NegentropyDirection.DOWN)
await cli.reconcile(filter1, dbopts) await cli.reconcile(filter1, dbopts)
await cli.database().delete(Filter().until(Timestamp.from_secs( await cli.database().delete(Filter().until(Timestamp.from_secs(
Timestamp.now().as_secs() - self.db_since))) # Clear old events so db doesn't get too full. Timestamp.now().as_secs() - self.db_since))) # Clear old events so db doesn't get too full.
await cli.shutdown() await cli.shutdown()
print( if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
"[" + self.dvm_config.NIP89.NAME + "] Done Syncing Notes of the last " + str(self.db_since) + " seconds..") print(
"[" + self.dvm_config.NIP89.NAME + "] Done Syncing Notes of the last " + str(self.db_since) + " seconds..")
# We build an example here that we can call by either calling this file directly from the main directory, # We build an example here that we can call by either calling this file directly from the main directory,

View File

@@ -15,6 +15,7 @@ from nostr_dvm.utils.nip88_utils import NIP88Config, check_and_set_d_tag_nip88,
from nostr_dvm.utils.nip89_utils import NIP89Config, check_and_set_d_tag, create_amount_tag from nostr_dvm.utils.nip89_utils import NIP89Config, check_and_set_d_tag, create_amount_tag
from nostr_dvm.utils.output_utils import post_process_list_to_events from nostr_dvm.utils.output_utils import post_process_list_to_events
""" """
This File contains a Module to update the database for content discovery dvms This File contains a Module to update the database for content discovery dvms
Accepted Inputs: none Accepted Inputs: none
@@ -141,15 +142,17 @@ class DicoverContentDBUpdateScheduler(DVMTaskInterface):
definitions.EventDefinitions.KIND_ZAP]).since(since) # Notes, reactions, zaps definitions.EventDefinitions.KIND_ZAP]).since(since) # Notes, reactions, zaps
# filter = Filter().author(keys.public_key()) # filter = Filter().author(keys.public_key())
print("[" + self.dvm_config.IDENTIFIER + "] Syncing notes of the last " + str( if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
self.db_since) + " seconds.. this might take a while..") print("[" + self.dvm_config.IDENTIFIER + "] Syncing notes of the last " + str(
self.db_since) + " seconds.. this might take a while..")
dbopts = NegentropyOptions().direction(NegentropyDirection.DOWN) dbopts = NegentropyOptions().direction(NegentropyDirection.DOWN)
await cli.reconcile(filter1, dbopts) await cli.reconcile(filter1, dbopts)
await cli.database().delete(Filter().until(Timestamp.from_secs( await cli.database().delete(Filter().until(Timestamp.from_secs(
Timestamp.now().as_secs() - self.db_since))) # Clear old events so db doesn't get too full. Timestamp.now().as_secs() - self.db_since))) # Clear old events so db doesn't get too full.
await cli.shutdown() await cli.shutdown()
print( if self.dvm_config.LOGLEVEL.value >= LogLevel.DEBUG.value:
"[" + self.dvm_config.IDENTIFIER + "] Done Syncing Notes of the last " + str(self.db_since) + " seconds..") print(
"[" + self.dvm_config.IDENTIFIER + "] Done Syncing Notes of the last " + str(self.db_since) + " seconds..")
# We build an example here that we can call by either calling this file directly from the main directory, # We build an example here that we can call by either calling this file directly from the main directory,

View File

@@ -1,6 +1,6 @@
import os import os
from nostr_sdk import Keys from nostr_sdk import Keys, LogLevel
from nostr_dvm.utils.nip88_utils import NIP88Config from nostr_dvm.utils.nip88_utils import NIP88Config
from nostr_dvm.utils.nip89_utils import NIP89Config from nostr_dvm.utils.nip89_utils import NIP89Config
@@ -8,7 +8,6 @@ from nostr_dvm.utils.nostr_utils import check_and_set_private_key
from nostr_dvm.utils.output_utils import PostProcessFunctionType from nostr_dvm.utils.output_utils import PostProcessFunctionType
from nostr_dvm.utils.zap_utils import check_and_set_ln_bits_keys from nostr_dvm.utils.zap_utils import check_and_set_ln_bits_keys
class DVMConfig: class DVMConfig:
SUPPORTED_DVMS = [] SUPPORTED_DVMS = []
PRIVATE_KEY: str = "" PRIVATE_KEY: str = ""
@@ -61,6 +60,7 @@ class DVMConfig:
SCHEDULE_UPDATES_SECONDS = 0 SCHEDULE_UPDATES_SECONDS = 0
UPDATE_DATABASE = True # DVMs that use a db manage their db by default. If a dvm should use the same db as another DVM, deactive it for those who do. UPDATE_DATABASE = True # DVMs that use a db manage their db by default. If a dvm should use the same db as another DVM, deactive it for those who do.
CUSTOM_PROCESSING_MESSAGE = None CUSTOM_PROCESSING_MESSAGE = None
LOGLEVEL = LogLevel.DEBUG
def build_default_config(identifier): def build_default_config(identifier):

View File

@@ -22,6 +22,7 @@ from nostr_dvm.utils.nip89_utils import create_amount_tag, NIP89Config, check_an
from nostr_dvm.utils.nostr_utils import check_and_set_private_key from nostr_dvm.utils.nostr_utils import check_and_set_private_key
from nostr_dvm.utils.zap_utils import check_and_set_ln_bits_keys from nostr_dvm.utils.zap_utils import check_and_set_ln_bits_keys
rebroadcast_NIP89 = False # Announce NIP89 on startup rebroadcast_NIP89 = False # Announce NIP89 on startup
rebroadcast_NIP65_Relay_List = False rebroadcast_NIP65_Relay_List = False
update_profile = False update_profile = False
@@ -43,7 +44,7 @@ AVOID_PAID_OUTBOX_RELAY_LIST = ["wss://nostrelay.yeghro.site", "wss://nostr.wine
"wss://bitcoiner.social", "wss://relay.stoner.com", "wss://nostr.l00p.org", "wss://relay.nostr.ro", "wss://nostr.kollider.xyz", "wss://bitcoiner.social", "wss://relay.stoner.com", "wss://nostr.l00p.org", "wss://relay.nostr.ro", "wss://nostr.kollider.xyz",
"wss://relay.valera.co", "wss://relay.austrich.net", "wss://relay.nostrich.de", "wss://nostr.azte.co", "wss://nostr-relay.schnitzel.world", "wss://relay.valera.co", "wss://relay.austrich.net", "wss://relay.nostrich.de", "wss://nostr.azte.co", "wss://nostr-relay.schnitzel.world",
"wss://relay.nostriches.org", "wss://happytavern.co", "wss://onlynotes.lol", "wss://offchain.pub", "wss://purplepag.es", "wss://relay.plebstr.com" "wss://relay.nostriches.org", "wss://happytavern.co", "wss://onlynotes.lol", "wss://offchain.pub", "wss://purplepag.es", "wss://relay.plebstr.com"
"wss://poster.place/relay", "wss://relayable.org", "wss://bbb.santos.lol", "wss://relay.bitheaven.social" "wss://poster.place/relay", "wss://relayable.org", "wss://bbb.santos.lol", "wss://relay.bitheaven.social", "wss://theforest.nostr1.com"
@@ -59,6 +60,7 @@ def build_db_scheduler(name, identifier, admin_config, options, image, descripti
dvm_config.SHOWLOG = True dvm_config.SHOWLOG = True
dvm_config.SCHEDULE_UPDATES_SECONDS = update_rate # Every 10 minutes dvm_config.SCHEDULE_UPDATES_SECONDS = update_rate # Every 10 minutes
dvm_config.UPDATE_DATABASE = update_db dvm_config.UPDATE_DATABASE = update_db
dvm_config.LOGLEVEL = LogLevel.INFO
# Activate these to use a subscription based model instead # Activate these to use a subscription based model instead
# dvm_config.SUBSCRIPTION_REQUIRED = True # dvm_config.SUBSCRIPTION_REQUIRED = True
# dvm_config.SUBSCRIPTION_DAILY_COST = 1 # dvm_config.SUBSCRIPTION_DAILY_COST = 1
@@ -99,6 +101,7 @@ def build_example_nostrband(name, identifier, admin_config, image, about, custom
dvm_config.USE_OWN_VENV = False dvm_config.USE_OWN_VENV = False
dvm_config.CUSTOM_PROCESSING_MESSAGE = custom_processing_msg dvm_config.CUSTOM_PROCESSING_MESSAGE = custom_processing_msg
dvm_config.AVOID_PAID_OUTBOX_RELAY_LIST = AVOID_PAID_OUTBOX_RELAY_LIST dvm_config.AVOID_PAID_OUTBOX_RELAY_LIST = AVOID_PAID_OUTBOX_RELAY_LIST
dvm_config.LOGLEVEL = LogLevel.INFO
#dvm_config.RELAY_LIST = ["wss://dvms.f7z.io", #dvm_config.RELAY_LIST = ["wss://dvms.f7z.io",
# "wss://nostr.mom", "wss://nostr.oxtr.dev", "wss://relay.nostr.bg" # "wss://nostr.mom", "wss://nostr.oxtr.dev", "wss://relay.nostr.bg"
# ] # ]
@@ -131,6 +134,7 @@ def build_longform(name, identifier, admin_config, options, cost=0, update_rate=
dvm_config.SCHEDULE_UPDATES_SECONDS = update_rate # Every 10 minutes dvm_config.SCHEDULE_UPDATES_SECONDS = update_rate # Every 10 minutes
dvm_config.UPDATE_DATABASE = update_db dvm_config.UPDATE_DATABASE = update_db
dvm_config.AVOID_PAID_OUTBOX_RELAY_LIST = AVOID_PAID_OUTBOX_RELAY_LIST dvm_config.AVOID_PAID_OUTBOX_RELAY_LIST = AVOID_PAID_OUTBOX_RELAY_LIST
dvm_config.LOGLEVEL = LogLevel.INFO
# Activate these to use a subscription based model instead # Activate these to use a subscription based model instead
# dvm_config.SUBSCRIPTION_REQUIRED = True # dvm_config.SUBSCRIPTION_REQUIRED = True
# dvm_config.SUBSCRIPTION_DAILY_COST = 1 # dvm_config.SUBSCRIPTION_DAILY_COST = 1
@@ -180,6 +184,7 @@ def build_example_topic(name, identifier, admin_config, options, image, descript
dvm_config.SCHEDULE_UPDATES_SECONDS = update_rate # Every 10 minutes dvm_config.SCHEDULE_UPDATES_SECONDS = update_rate # Every 10 minutes
dvm_config.UPDATE_DATABASE = update_db dvm_config.UPDATE_DATABASE = update_db
dvm_config.FIX_COST = cost dvm_config.FIX_COST = cost
dvm_config.LOGLEVEL = LogLevel.INFO
dvm_config.CUSTOM_PROCESSING_MESSAGE = processing_msg dvm_config.CUSTOM_PROCESSING_MESSAGE = processing_msg
dvm_config.AVOID_PAID_OUTBOX_RELAY_LIST = AVOID_PAID_OUTBOX_RELAY_LIST dvm_config.AVOID_PAID_OUTBOX_RELAY_LIST = AVOID_PAID_OUTBOX_RELAY_LIST
#dvm_config.RELAY_LIST = ["wss://dvms.f7z.io", #dvm_config.RELAY_LIST = ["wss://dvms.f7z.io",
@@ -219,6 +224,7 @@ def build_example_popular(name, identifier, admin_config, options, image, cost=0
update_db=True): update_db=True):
dvm_config = build_default_config(identifier) dvm_config = build_default_config(identifier)
dvm_config.USE_OWN_VENV = False dvm_config.USE_OWN_VENV = False
dvm_config.LOGLEVEL = LogLevel.INFO
# dvm_config.SHOWLOG = True # dvm_config.SHOWLOG = True
dvm_config.SCHEDULE_UPDATES_SECONDS = update_rate # Every 10 minutes dvm_config.SCHEDULE_UPDATES_SECONDS = update_rate # Every 10 minutes
dvm_config.UPDATE_DATABASE = update_db dvm_config.UPDATE_DATABASE = update_db
@@ -264,6 +270,7 @@ def build_example_popular_followers(name, identifier, admin_config, options, ima
dvm_config = build_default_config(identifier) dvm_config = build_default_config(identifier)
dvm_config.USE_OWN_VENV = False dvm_config.USE_OWN_VENV = False
dvm_config.SHOWLOG = True dvm_config.SHOWLOG = True
dvm_config.LOGLEVEL = LogLevel.INFO
dvm_config.SCHEDULE_UPDATES_SECONDS = update_rate # Every x seconds dvm_config.SCHEDULE_UPDATES_SECONDS = update_rate # Every x seconds
dvm_config.UPDATE_DATABASE = update_db dvm_config.UPDATE_DATABASE = update_db
dvm_config.FIX_COST = cost dvm_config.FIX_COST = cost
@@ -309,6 +316,7 @@ def build_example_top_zapped(name, identifier, admin_config, options, image, cos
dvm_config = build_default_config(identifier) dvm_config = build_default_config(identifier)
dvm_config.USE_OWN_VENV = False dvm_config.USE_OWN_VENV = False
dvm_config.SHOWLOG = True dvm_config.SHOWLOG = True
dvm_config.LOGLEVEL = LogLevel.INFO
dvm_config.SCHEDULE_UPDATES_SECONDS = update_rate # Every 10 minutes dvm_config.SCHEDULE_UPDATES_SECONDS = update_rate # Every 10 minutes
dvm_config.UPDATE_DATABASE = update_db dvm_config.UPDATE_DATABASE = update_db
dvm_config.FIX_COST = cost dvm_config.FIX_COST = cost

View File

@@ -78,7 +78,11 @@ async def analyse_users(user_ids=None):
try: try:
user_keys = [] user_keys = []
for npub in user_ids: for npub in user_ids:
user_keys.append(PublicKey.parse(npub)) try:
user_keys.append(PublicKey.parse(npub))
except Exception as e:
print(npub)
print(e)
database = await NostrDatabase.sqlite("db/nostr_followlists.db") database = await NostrDatabase.sqlite("db/nostr_followlists.db")
followers_filter = Filter().authors(user_keys).kind(Kind(3)) followers_filter = Filter().authors(user_keys).kind(Kind(3))
@@ -94,8 +98,10 @@ async def analyse_users(user_ids=None):
return allfriends return allfriends
else: else:
print("no followers")
return [] return []
except: except Exception as e:
print(e)
return [] return []
@@ -117,12 +123,16 @@ def write_to_csv(friends, file="friends222.csv"):
writer.writerow(row) writer.writerow(row)
def main(user_key): def main(user_key, create_csv, get_profile = False, remove_followings_from_set = False):
create_csv = True
file = "db/friends223.csv" file = "db/friends223.csv"
# make sure key is in hex format
if create_csv: if create_csv:
# clear previous file # clear previous file
try: try:
print("Deleting existing file, creating new one")
os.remove(file) os.remove(file)
except: except:
print("Creating new file") print("Creating new file")
@@ -130,23 +140,32 @@ def main(user_key):
asyncio.run(sync_db()) asyncio.run(sync_db())
# make sure key is in hex format
user_id = PublicKey.parse(user_key).to_hex() user_id = PublicKey.parse(user_key).to_hex()
user_friends = asyncio.run(analyse_users([user_id])) user_friends_level1 = asyncio.run(analyse_users([user_id]))
friendlist = [] friendlist = []
for npub in user_friends[0].friends: for npub in user_friends_level1[0].friends:
friendlist.append(npub) friendlist.append(npub)
me = Friend(user_id, friendlist) me = Friend(user_id, friendlist)
write_to_csv([me], file) write_to_csv([me], file)
# for every npub we follow, we look at the npubs they follow (this might take a while)
friendlist = []
for friend in user_friends:
for npub in friend.friends:
friendlist.append(npub)
users_friends = asyncio.run(analyse_users(friendlist)) # for every npub we follow, we look at the npubs they follow (this might take a while)
write_to_csv(users_friends, file) friendlist2 = []
for friend in user_friends_level1:
for npub in friend.friends:
friendlist2.append(npub)
user_friends_level2 = asyncio.run(analyse_users(friendlist2))
write_to_csv(user_friends_level2, file)
friendlist3 = []
for friend in user_friends_level2:
for npub in friend.friends:
friendlist3.append(npub)
print(len(friendlist3))
user_friends_level3 = asyncio.run(analyse_users(friendlist3))
write_to_csv(user_friends_level3, file)
df = pd.read_csv(file, sep=',') df = pd.read_csv(file, sep=',')
@@ -156,19 +175,40 @@ def main(user_key):
G_fb = nx.read_edgelist(file, delimiter=",", create_using=nx.DiGraph(), nodetype=str) G_fb = nx.read_edgelist(file, delimiter=",", create_using=nx.DiGraph(), nodetype=str)
print(G_fb) print(G_fb)
pr = nx.pagerank(G_fb) pr = nx.pagerank(G_fb)
sorted_nodes = sorted([(node, pagerank) for node, pagerank in pr.items()], key=lambda x: pr[x[0]], # Use this to find people your followers follow
reverse=True)[:50] if remove_followings_from_set:
user_id = PublicKey.parse(user_key).to_hex()
user_friends_level1 = asyncio.run(analyse_users([user_id]))
friendlist = []
for npub in user_friends_level1[0].friends:
friendlist.append(npub)
sorted_nodes = sorted([(node, pagerank) for node, pagerank in pr.items() if node not in friendlist], key=lambda x: pr[x[0]],
reverse=True)[:50]
else:
sorted_nodes = sorted([(node, pagerank) for node, pagerank in pr.items()], key=lambda x: pr[x[0]],
reverse=True)[:50]
for node in sorted_nodes: for node in sorted_nodes:
# print(PublicKey.parse(node[0]).to_bech32() + "," + str(node[1])) try:
name, nip05, lud16 = asyncio.run(getmetadata(node[0])) pk = PublicKey.parse(node[0]).to_bech32()
try: except:
pk = PublicKey.parse(node[0]).to_bech32() pk = node[0]
except:
pk = node[0] if get_profile:
print(name + " (" + pk + ") " + "," + str(node[1])) name, nip05, lud16 = asyncio.run(getmetadata(node[0]))
print(name + " (" + pk + ") " + "," + str(node[1]))
else:
print(pk + "," + str(node[1]))
user_id = "99bb5591c9116600f845107d31f9b59e2f7c7e09a1ff802e84f1d43da557ca64" user_id = "99bb5591c9116600f845107d31f9b59e2f7c7e09a1ff802e84f1d43da557ca64"
#user_id = "npub1gcxzte5zlkncx26j68ez60fzkvtkm9e0vrwdcvsjakxf9mu9qewqlfnj5z" #user_id = "npub1gcxzte5zlkncx26j68ez60fzkvtkm9e0vrwdcvsjakxf9mu9qewqlfnj5z"
main(user_id) fetch_profiles = True
create_csv = False
remove_followings_from_set = True
main(user_id, create_csv, fetch_profiles, remove_followings_from_set)