Merge branch 'feature/archives'

This commit is contained in:
Daniele Tonon
2023-07-27 23:55:53 +02:00
9 changed files with 320 additions and 8 deletions

View File

@@ -4,6 +4,7 @@ package main
import (
"encoding/json"
"strings"
"time"
"github.com/dgraph-io/badger"
@@ -59,6 +60,38 @@ func (c *Cache) Get(key string) ([]byte, bool) {
return val, true
}
func (c *Cache) GetPaginatedkeys(prefix string, page int, size int) []string {
keys := []string{}
err := c.DB.View(func(txn *badger.Txn) error {
opts := badger.DefaultIteratorOptions
opts.PrefetchValues = false
it := txn.NewIterator(opts)
defer it.Close()
start := (page-1)*size + 1
index := 1
for it.Seek([]byte(prefix)); it.ValidForPrefix([]byte(prefix)); it.Next() {
if index < start {
index++
continue
}
if index > start+size-1 {
break
}
item := it.Item()
k := item.Key()
keys = append(keys, strings.TrimPrefix(string(k), prefix+":"))
index++
}
return nil
})
if err != nil {
log.Fatal().Err(err).Msg("")
}
return keys
}
func (c *Cache) GetJSON(key string, recv any) bool {
b, ok := c.Get(key)
if !ok {

28
main.go
View File

@@ -1,11 +1,14 @@
package main
import (
"context"
"embed"
"fmt"
"html"
"net/http"
"os"
"text/template"
"time"
"github.com/kelseyhightower/envconfig"
"github.com/rs/zerolog"
@@ -30,6 +33,21 @@ var (
log = zerolog.New(os.Stderr).Output(zerolog.ConsoleWriter{Out: os.Stdout}).With().Timestamp().Logger()
)
func updateArchives(ctx context.Context) {
for {
select {
// Check for the cancellation signal.
case <-ctx.Done():
fmt.Println("Exit updateArchives gracefully...")
return
default:
loadNpubsArchive(ctx)
loadRelaysArchive(ctx)
}
time.Sleep(24 * time.Hour)
}
}
func main() {
err := envconfig.Process("", &s)
if err != nil {
@@ -39,6 +57,10 @@ func main() {
// initialize disk cache
defer cache.initialize()()
// initialize the function to update the npubs/relays archive
ctx := context.Background()
go updateArchives(ctx)
// initialize templates
// use a mapping to expressly link the templates and share them between more kinds/types
templateMapping["profile"] = "profile.html"
@@ -53,6 +75,7 @@ func main() {
"mdToHTML": mdToHTML,
"escapeString": html.EscapeString,
"sanitizeXSS": sanitizeXSS,
"trimProtocol": trimProtocol,
}
tmpl = template.Must(
@@ -65,10 +88,15 @@ func main() {
http.HandleFunc("/njump/image/", generate)
http.HandleFunc("/njump/proxy/", proxy)
http.Handle("/njump/static/", http.StripPrefix("/njump/", http.FileServer(http.FS(static))))
http.HandleFunc("/npubs-archive/", renderArchive)
http.HandleFunc("/relays-archive/", renderArchive)
http.HandleFunc("/", render)
log.Print("listening at http://0.0.0.0:" + s.Port)
if err := http.ListenAndServe("0.0.0.0:"+s.Port, nil); err != nil {
log.Fatal().Err(err).Msg("")
}
select {}
}

View File

@@ -34,6 +34,17 @@ var (
profiles = []string{
"wss://purplepag.es",
}
trustedPubKeys = []string{
"7bdef7be22dd8e59f4600e044aa53a1cf975a9dc7d27df5833bc77db784a5805", // dtonon
"3bf0c63fcb93463407af97a5e5ee64fa883d107ef9e558472c4eb9aaaefa459d", // fiatjaf
"97c70a44366a6535c145b333f973ea86dfdc2d7a99da618c40c64705ad98e322", // hodlbod
"ee11a5dff40c19a555f41fe42b48f00e618c91225622ae37b6c2bb67b76c4e49", // Michael Dilger
}
excludedRelays = []string{
"wss://filter.nostr.wine", // paid
}
)
func getRelay() string {
@@ -159,7 +170,7 @@ func getLastNotes(ctx context.Context, code string, limit int) []*nostr.Event {
for event := range events {
lastNotes = nostr.InsertEventIntoDescendingList(lastNotes, event)
}
return lastNotes
}
@@ -180,3 +191,40 @@ func relaysForPubkey(ctx context.Context, pubkey string, extraRelays ...string)
pubkeyRelays = unique(pubkeyRelays)
return pubkeyRelays
}
func contactsForPubkey(ctx context.Context, pubkey string, extraRelays ...string) []string {
pubkeyContacts := make([]string, 0, 100)
relays := make([]string, 0, 12)
if ok := cache.GetJSON("cc:"+pubkey, &pubkeyContacts); !ok {
fmt.Printf("Searching contacts for %s\n", pubkey)
ctx, cancel := context.WithTimeout(ctx, time.Millisecond*1500)
pubkeyRelays := relaysForPubkey(ctx, pubkey, relays...)
relays = append(relays, pubkeyRelays...)
relays = append(relays, always...)
relays = append(relays, profiles...)
ch := pool.SubManyEose(ctx, relays, nostr.Filters{
{
Kinds: []int{3},
Authors: []string{pubkey},
Limit: 2,
},
})
for event := range ch {
for _, tag := range event.Tags {
if tag[0] == "p" {
pubkeyContacts = append(pubkeyContacts, tag[1])
}
}
}
cancel()
if len(pubkeyContacts) > 0 {
cache.SetJSONWithTTL("cc:"+pubkey, pubkeyContacts, time.Hour*6)
}
}
pubkeyContacts = unique(pubkeyContacts)
return pubkeyContacts
}

View File

@@ -10,10 +10,11 @@ var cache = Cache{}
type Cache struct{}
func (c *Cache) initialize() func() { return func() {} }
func (c *Cache) Get(key string) ([]byte, bool) { return nil, false }
func (c *Cache) GetJSON(key string, recv any) bool { return false }
func (c *Cache) Set(key string, value []byte) {}
func (c *Cache) SetJSON(key string, value any) {}
func (c *Cache) SetWithTTL(key string, value []byte, ttl time.Duration) {}
func (c *Cache) SetJSONWithTTL(key string, value any, ttl time.Duration) {}
func (c *Cache) initialize() func() { return func() {} }
func (c *Cache) Get(key string) ([]byte, bool) { return nil, false }
func (c *Cache) GetJSON(key string, recv any) bool { return false }
func (c *Cache) Set(key string, value []byte) {}
func (c *Cache) SetJSON(key string, value any) {}
func (c *Cache) SetWithTTL(key string, value []byte, ttl time.Duration) {}
func (c *Cache) SetJSONWithTTL(key string, value any, ttl time.Duration) {}
func (c *Cache) GetPaginatedkeys(prefix string, page int, size int) []string { return []string{} }

73
render_archive.go Normal file
View File

@@ -0,0 +1,73 @@
package main
import (
"fmt"
"net/http"
"strconv"
"strings"
"github.com/nbd-wtf/go-nostr/nip19"
)
func renderArchive(w http.ResponseWriter, r *http.Request) {
resultsPerPage := 50
lastIndex := strings.LastIndex(r.URL.Path, "/")
page := 1
if lastIndex != -1 {
pageString := r.URL.Path[lastIndex+1:]
pageInt, err := strconv.Atoi(pageString)
if err != nil {
page = 1
} else {
page = pageInt
}
}
prefix := ""
path_prefix := ""
title := ""
area := strings.Split(r.URL.Path[1:], "/")[0]
if area == "npubs-archive" {
prefix = "pa"
path_prefix = ""
title = "Nostr npubs archive"
} else {
prefix = "ra"
path_prefix = "r/"
title = "Nostr relays archive"
}
keys := cache.GetPaginatedkeys(prefix, page, resultsPerPage)
data := []string{}
for i := 0; i < len(keys); i++ {
if area == "npubs-archive" {
npub, _ := nip19.EncodePublicKey(keys[i])
data = append(data, npub)
} else {
data = append(data, keys[i])
}
}
prevPage := page - 1
nextPage := page + 1
if len(keys) == 0 {
prevPage = 0
nextPage = 0
}
params := map[string]any{
"title": title,
"pathPrefix": path_prefix,
"data": data,
"paginationUrl": area,
"nextPage": fmt.Sprint(nextPage),
"prevPage": fmt.Sprint(prevPage),
}
w.Header().Set("Cache-Control", "max-age=86400")
if err := tmpl.ExecuteTemplate(w, "archive.html", params); err != nil {
log.Error().Err(err).Msg("error rendering")
return
}
}

View File

@@ -712,6 +712,15 @@ iframe {
color: #969696;
}
}
.container .column_content a.pagination {
color: #e32a6d;
}
.container .column_content a.pagination.next {
float: right;
}
.container .column_content a.pagination.prev {
float: left;
}
.container .column_clients {
position: -webkit-sticky;
position: sticky;

View File

@@ -657,6 +657,15 @@ iframe {
}
}
}
a.pagination {
color: $color-accent1;
&.next {
float: right;
}
&.prev {
float: left;
}
}
}
.column_clients {

62
templates/archive.html Normal file
View File

@@ -0,0 +1,62 @@
<!DOCTYPE html>
<html class="theme--default">
<meta charset="UTF-8" />
<head>
<title>{{.title}}</title>
{{template "head_common.html" }}
</head>
<body class="profile">
{{template "top.html" .}}
<div class="container_wrapper">
<div class="container">
<div class="column columnA">
<div class="info-wrapper">
{{.title}}
<span class="display">&nbsp;</span>
</div>
<div class="pic-wrapper">
</div>
</div>
<div class="column column_content">
<div class="field info-wrapper">
<h1 class="name">
{{.title}}
</h1>
</div>
<div class="field separator long"></div>
<div class="field last_notes">
{{range $index, $element := .data }}
<a href="/{{$.pathPrefix}}{{$element | trimProtocol | escapeString}}" class="note">
<div class="content">{{$element | escapeString}}</div>
</a>
{{end}}
</div>
{{if not (eq .prevPage "0")}}
<a href="/{{.paginationUrl}}/{{.prevPage | escapeString}}" class="pagination prev"><< Prev page</a>
{{end}}
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
{{if not (eq .nextPage "0")}}
<a href="/{{.paginationUrl}}/{{.nextPage | escapeString}}" class="pagination next">Next page >></a>
{{end}}
</div>
<aside class="column column_clients"></aside>
</div>
</div>
{{template "footer.html"}}
<script>
{{template "scripts.js"}}
</script>
</body>
</html>

View File

@@ -1,11 +1,13 @@
package main
import (
"context"
"encoding/json"
"fmt"
"net/http"
"regexp"
"strings"
"time"
"github.com/gomarkdown/markdown"
"github.com/gomarkdown/markdown/html"
@@ -344,3 +346,50 @@ func trimProtocol(relay string) string {
relay = strings.TrimPrefix(relay, "ws:/") // Some browsers replace upfront '//' with '/'
return relay
}
func loadNpubsArchive(ctx context.Context) {
fmt.Println("Refreshing the npubs archive")
contactsArchive := make([]string, 0, 500)
for _, pubkey := range trustedPubKeys {
ctx, cancel := context.WithTimeout(ctx, time.Second*4)
pubkeyContacts := contactsForPubkey(ctx, pubkey)
contactsArchive = append(contactsArchive, pubkeyContacts...)
cancel()
}
contactsArchive = unique(contactsArchive)
for _, contact := range contactsArchive {
fmt.Printf("Adding contact %s\n", contact)
cache.SetWithTTL("pa:"+contact, nil, time.Hour*24*90)
}
}
func loadRelaysArchive(ctx context.Context) {
fmt.Println("Refreshing the relays archive")
relaysArchive := make([]string, 0, 500)
for _, pubkey := range trustedPubKeys {
ctx, cancel := context.WithTimeout(ctx, time.Second*4)
pubkeyContacts := relaysForPubkey(ctx, pubkey)
relaysArchive = append(relaysArchive, pubkeyContacts...)
cancel()
}
relaysArchive = unique(relaysArchive)
for _, relay := range relaysArchive {
for _, excluded := range excludedRelays {
if strings.Contains(relay, excluded) {
fmt.Printf("Skypping relay %s\n", relay)
continue
}
}
if strings.Contains(relay, "/npub1") {
continue // Skip relays with personalyzed query like filter.nostr.wine
}
fmt.Printf("Adding relay %s\n", relay)
cache.SetWithTTL("ra:"+relay, nil, time.Hour*24*7)
}
}