diff --git a/nostr_dvm/tasks/discovery_trending_notes_gleasonator.py b/nostr_dvm/tasks/discovery_trending_notes_gleasonator.py index 285821b..8fffe9a 100644 --- a/nostr_dvm/tasks/discovery_trending_notes_gleasonator.py +++ b/nostr_dvm/tasks/discovery_trending_notes_gleasonator.py @@ -83,7 +83,7 @@ class TrendingNotesGleasonator(DVMTaskInterface): ltags = ["#e", "pub.ditto.trends"] authors = [PublicKey.parse("db0e60d10b9555a39050c258d460c5c461f6d18f467aa9f62de1a728b8a891a4")] - notes_filter = Filter().authors(authors).custom_tag(SingleLetterTag.lowercase(Alphabet.L), ltags) + notes_filter = Filter().authors(authors).kind(Kind(1985)).custom_tag(SingleLetterTag.lowercase(Alphabet.L), ltags) events = await cli.get_events_of([notes_filter], timedelta(seconds=10)) diff --git a/setup.py b/setup.py index 1835e62..802b5b4 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,6 @@ from setuptools import setup, find_packages -VERSION = '0.6.5' +VERSION = '0.6.6' DESCRIPTION = 'A framework to build and run Nostr NIP90 Data Vending Machines' LONG_DESCRIPTION = ('A framework to build and run Nostr NIP90 Data Vending Machines. See the github repository for more information') @@ -15,7 +15,9 @@ setup( packages=find_packages(include=['nostr_dvm', 'nostr_dvm.*']), install_requires=["nostr-sdk==0.32.1", - "bech32", + "bech32==1.2.0", + "networkx==3.3", + "scipy==1.13.1", "pycryptodome==3.20.0", "python-dotenv==1.0.0", "emoji==2.8.0", diff --git a/tests/discovery.py b/tests/discovery.py index f81690d..0aa3d41 100644 --- a/tests/discovery.py +++ b/tests/discovery.py @@ -130,6 +130,7 @@ def build_longform(name, identifier, admin_config, options, cost=0, update_rate= dvm_config.SHOWLOG = True dvm_config.SCHEDULE_UPDATES_SECONDS = update_rate # Every 10 minutes dvm_config.UPDATE_DATABASE = update_db + dvm_config.AVOID_PAID_OUTBOX_RELAY_LIST = AVOID_PAID_OUTBOX_RELAY_LIST # Activate these to use a subscription based model instead # dvm_config.SUBSCRIPTION_REQUIRED = True # dvm_config.SUBSCRIPTION_DAILY_COST = 1 diff --git a/tests/wot.py b/tests/wot.py new file mode 100644 index 0000000..8cccc27 --- /dev/null +++ b/tests/wot.py @@ -0,0 +1,174 @@ +import asyncio +import json +from datetime import timedelta +import os +import csv +import networkx as nx + +import pandas as pd + +import warnings + +warnings.filterwarnings('ignore') + +from nostr_sdk import RelayLimits, PublicKey, Options, Client, SecretKey, Keys, NostrSigner, RelayOptions, Filter, \ + PublicKey, Kind, \ + NegentropyOptions, NegentropyDirection, ClientBuilder, NostrDatabase, init_logger, LogLevel + + +# init_logger(LogLevel.INFO) +async def getmetadata(npub): + name = "" + nip05 = "" + lud16 = "" + try: + pk = PublicKey.parse(npub) + except: + return "", "", "" + opts = (Options().wait_for_send(False).send_timeout(timedelta(seconds=5))) + keys = Keys.parse("nsec1zmzllu40a7mr7ztl78uwfwslnp0pn0pww868adl05x52d4la237s6m8qfj") + signer = NostrSigner.keys(keys) + client = ClientBuilder().signer(signer).opts(opts).build() + await client.add_relay("wss://relay.damus.io") + await client.add_relay("wss://relay.primal.net") + await client.add_relay("wss://purplepag.es") + await client.connect() + + profile_filter = Filter().kind(Kind(0)).author(pk).limit(1) + events = await client.get_events_of([profile_filter], timedelta(seconds=4)) + if len(events) > 0: + try: + profile = json.loads(events[0].content()) + if profile.get("name"): + name = profile['name'] + if profile.get("nip05"): + nip05 = profile['nip05'] + if profile.get("lud16"): + lud16 = profile['lud16'] + except Exception as e: + print(e) + await client.shutdown() + return name, nip05, lud16 + + +async def sync_db(): + opts = (Options().wait_for_send(False).send_timeout(timedelta(seconds=5))) + keys = Keys.parse("nsec1zmzllu40a7mr7ztl78uwfwslnp0pn0pww868adl05x52d4la237s6m8qfj") + signer = NostrSigner.keys(keys) + database = await NostrDatabase.sqlite("db/nostr_followlists.db") + cli = ClientBuilder().signer(signer).database(database).opts(opts).build() + + await cli.add_relay("wss://relay.damus.io") # TODO ADD MORE + # await cli.add_relay("wss://relay.primal.net") # TODO ADD MORE + await cli.connect() + + filter1 = Filter().kind(Kind(3)) + + # filter = Filter().author(keys.public_key()) + print("Syncing Profile Database.. this might take a while..") + dbopts = NegentropyOptions().direction(NegentropyDirection.DOWN) + await cli.reconcile(filter1, dbopts) + print("Done Syncing Profile Database.") + await cli.shutdown() + + +async def analyse_users(user_ids=None): + if user_ids is None: + user_ids = [] + try: + user_keys = [] + for npub in user_ids: + user_keys.append(PublicKey.parse(npub)) + + database = await NostrDatabase.sqlite("db/nostr_followlists.db") + followers_filter = Filter().authors(user_keys).kind(Kind(3)) + followers = await database.query([followers_filter]) + allfriends = [] + if len(followers) > 0: + for follower in followers: + frens = [] + for tag in follower.tags(): + if tag.as_vec()[0] == "p": + frens.append(tag.as_vec()[1]) + allfriends.append(Friend(follower.author().to_hex(), frens)) + + return allfriends + else: + return [] + except: + return [] + + +class Friend(object): + def __init__(self, user_id, friends): + self.user_id = user_id + self.friends = friends + + +def write_to_csv(friends, file="friends222.csv"): + with open(file, 'a') as f: + writer = csv.writer(f) + friendcounter = 0 + for friend in friends: + print(friendcounter) + friendcounter += 1 + for fren in friend.friends: + row = [friend.user_id, fren] + writer.writerow(row) + + +def main(user_key): + create_csv = True + file = "db/friends223.csv" + if create_csv: + # clear previous file + try: + os.remove(file) + except: + print("Creating new file") + # sync the database, this might take a while if it's empty or hasn't been updated in a long time + asyncio.run(sync_db()) + + + # make sure key is in hex format + user_id = PublicKey.parse(user_key).to_hex() + user_friends = asyncio.run(analyse_users([user_id])) + friendlist = [] + for npub in user_friends[0].friends: + friendlist.append(npub) + me = Friend(user_id, friendlist) + write_to_csv([me], file) + + # for every npub we follow, we look at the npubs they follow (this might take a while) + friendlist = [] + for friend in user_friends: + for npub in friend.friends: + friendlist.append(npub) + + users_friends = asyncio.run(analyse_users(friendlist)) + write_to_csv(users_friends, file) + + + df = pd.read_csv(file, sep=',') + df.info() + df.tail() + + G_fb = nx.read_edgelist(file, delimiter=",", create_using=nx.DiGraph(), nodetype=str) + print(G_fb) + pr = nx.pagerank(G_fb) + sorted_nodes = sorted([(node, pagerank) for node, pagerank in pr.items()], key=lambda x: pr[x[0]], + reverse=True)[:50] + for node in sorted_nodes: + # print(PublicKey.parse(node[0]).to_bech32() + "," + str(node[1])) + name, nip05, lud16 = asyncio.run(getmetadata(node[0])) + try: + pk = PublicKey.parse(node[0]).to_bech32() + except: + pk = node[0] + print(name + " (" + pk + ") " + "," + str(node[1])) + + +user_id = "99bb5591c9116600f845107d31f9b59e2f7c7e09a1ff802e84f1d43da557ca64" +#user_id = "npub1gcxzte5zlkncx26j68ez60fzkvtkm9e0vrwdcvsjakxf9mu9qewqlfnj5z" + +main(user_id)