remove HTML archives pages, leave only the sitemaps.

This commit is contained in:
fiatjaf
2024-06-12 15:30:41 -03:00
parent a7d29b151a
commit 1eb3c45755
4 changed files with 21 additions and 114 deletions

View File

@@ -1,48 +0,0 @@
package main
import "fmt"
templ archiveTemplate(params ArchivePageParams) {
<!DOCTYPE html>
<html class="theme--default text-lg font-light print:text-base sm:text-xl">
<meta charset="UTF-8"/>
<head>
<title>{ params.Title }</title>
@headCommonTemplate(params.HeadParams)
</head>
<body class="mb-16 bg-white text-gray-600 dark:bg-neutral-900 dark:text-neutral-50 print:text-black">
@topTemplate(params.HeadParams)
<div class="mx-auto block px-4 sm:flex sm:items-center sm:justify-center sm:px-0">
<div class="w-full max-w-screen-2xl gap-10 overflow-visible print:w-full sm:flex sm:w-11/12 sm:px-4 md:w-10/12 lg:w-9/12 lg:gap-48vw">
<div class="relative top-auto flex basis-1/5 sm:max-w-[20%] items-center self-start sm:sticky sm:top-8 sm:mt-8 sm:block sm:items-start">
<div
class="hidden text-2xl"
_="on load or scroll from window or resize from window get #page_name then measure its top, height then if top is less than height / -2 or height is 0 remove .hidden otherwise add .hidden"
>{ params.Title }</div>
</div>
<div class="w-full break-words break-all print:w-full basis-3/5">
<div id="page_name" class="mb-6 leading-5">
<h1 class="hidden sm:block text-2xl">{ params.Title }</h1>
</div>
<div class="mb-6 leading-5">
for _, v:= range params.Data {
<a class="block" href={ templ.URL("/" + params.PathPrefix + v) }>
{ v }
</a>
}
</div>
<div class="flex justify-between">
if params.PrevPage != 0 {
<a href={ templ.URL(fmt.Sprintf("/%s/%d", params.PaginationUrl, params.PrevPage)) }>&lt;&lt; Prev page</a>
}
if params.NextPage != 0 {
<a href={ templ.URL(fmt.Sprintf("/%s/%d", params.PaginationUrl, params.NextPage)) }>Next page &gt;&gt;</a>
}
</div>
</div>
</div>
</div>
@footerTemplate()
</body>
</html>
}

View File

@@ -125,8 +125,6 @@ func main() {
mux.HandleFunc("/relays-archive.xml", renderArchive)
mux.HandleFunc("/npubs-archive.xml", renderArchive)
mux.HandleFunc("/services/oembed", renderOEmbed)
mux.HandleFunc("/relays-archive/", renderArchive)
mux.HandleFunc("/npubs-archive/", renderArchive)
mux.HandleFunc("/njump/image/", renderImage)
mux.HandleFunc("/njump/proxy/", proxy)
mux.HandleFunc("/robots.txt", renderRobots)

View File

@@ -92,18 +92,6 @@ type AboutParams struct {
HeadParams
}
type ArchivePageParams struct {
HeadParams
Title string
PathPrefix string
Data []string
ModifiedAt string
PaginationUrl string
NextPage int
PrevPage int
}
type EmbeddedNoteParams struct {
Content template.HTML
CreatedAt string

View File

@@ -11,17 +11,13 @@ import (
"github.com/nbd-wtf/go-nostr/nip19"
)
const (
NPUBS_ARCHIVE = iota
RELAYS_ARCHIVE = iota
)
func renderArchive(w http.ResponseWriter, r *http.Request) {
fmt.Println(r.URL.Path, "@.", r.Header.Get("user-agent"))
code := r.URL.Path[1:]
hostname := code[2:]
resultsPerPage := 50
isSitemap := false
if strings.HasSuffix(hostname, ".xml") {
isSitemap = true
resultsPerPage = 5000
}
lastIndex := strings.LastIndex(r.URL.Path, "/")
page := 1
@@ -37,42 +33,29 @@ func renderArchive(w http.ResponseWriter, r *http.Request) {
prefix := ""
pathPrefix := ""
title := ""
area := ""
var area int
if strings.HasPrefix(r.URL.Path[1:], "npubs-archive") {
area = "npubs-archive"
} else if strings.HasPrefix(r.URL.Path[1:], "relays-archive") {
area = "relays-archive"
}
if area == "npubs-archive" {
area = NPUBS_ARCHIVE
prefix = "pa:"
pathPrefix = ""
title = "Nostr npubs archive"
} else {
} else if strings.HasPrefix(r.URL.Path[1:], "relays-archive") {
area = RELAYS_ARCHIVE
prefix = "ra:"
pathPrefix = "r/"
title = "Nostr relays archive"
}
keys := cache.GetPaginatedKeys(prefix, page, resultsPerPage)
keys := cache.GetPaginatedKeys(prefix, page, 5000)
data := []string{}
for i := 0; i < len(keys); i++ {
if area == "npubs-archive" {
switch area {
case NPUBS_ARCHIVE:
npub, _ := nip19.EncodePublicKey(keys[i][3:])
data = append(data, npub)
} else {
case RELAYS_ARCHIVE:
data = append(data, trimProtocol(keys[i][3:]))
}
}
prevPage := page - 1
nextPage := page + 1
if len(keys) == 0 {
prevPage = 0
nextPage = 0
}
// Generate a random duration between 2 and 6 hours
minHours := 2
maxHours := 6
@@ -87,19 +70,6 @@ func renderArchive(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Cache-Control", "max-age=60")
}
if !isSitemap {
archiveTemplate(ArchivePageParams{
HeadParams: HeadParams{IsProfile: false},
Title: title,
PathPrefix: pathPrefix,
Data: data,
ModifiedAt: modifiedAt,
PaginationUrl: area,
NextPage: nextPage,
PrevPage: prevPage,
}).Render(r.Context(), w)
} else {
w.Header().Add("content-type", "text/xml")
w.Write([]byte(XML_HEADER))
SitemapTemplate.Render(w, &SitemapPage{
@@ -108,5 +78,4 @@ func renderArchive(w http.ResponseWriter, r *http.Request) {
PathPrefix: pathPrefix,
Data: data,
})
}
}