mirror of
https://github.com/aljazceru/njump.git
synced 2025-12-17 14:24:27 +01:00
remove HTML archives pages, leave only the sitemaps.
This commit is contained in:
@@ -1,48 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import "fmt"
|
|
||||||
|
|
||||||
templ archiveTemplate(params ArchivePageParams) {
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html class="theme--default text-lg font-light print:text-base sm:text-xl">
|
|
||||||
<meta charset="UTF-8"/>
|
|
||||||
<head>
|
|
||||||
<title>{ params.Title }</title>
|
|
||||||
@headCommonTemplate(params.HeadParams)
|
|
||||||
</head>
|
|
||||||
<body class="mb-16 bg-white text-gray-600 dark:bg-neutral-900 dark:text-neutral-50 print:text-black">
|
|
||||||
@topTemplate(params.HeadParams)
|
|
||||||
<div class="mx-auto block px-4 sm:flex sm:items-center sm:justify-center sm:px-0">
|
|
||||||
<div class="w-full max-w-screen-2xl gap-10 overflow-visible print:w-full sm:flex sm:w-11/12 sm:px-4 md:w-10/12 lg:w-9/12 lg:gap-48vw">
|
|
||||||
<div class="relative top-auto flex basis-1/5 sm:max-w-[20%] items-center self-start sm:sticky sm:top-8 sm:mt-8 sm:block sm:items-start">
|
|
||||||
<div
|
|
||||||
class="hidden text-2xl"
|
|
||||||
_="on load or scroll from window or resize from window get #page_name then measure its top, height then if top is less than height / -2 or height is 0 remove .hidden otherwise add .hidden"
|
|
||||||
>{ params.Title }</div>
|
|
||||||
</div>
|
|
||||||
<div class="w-full break-words break-all print:w-full basis-3/5">
|
|
||||||
<div id="page_name" class="mb-6 leading-5">
|
|
||||||
<h1 class="hidden sm:block text-2xl">{ params.Title }</h1>
|
|
||||||
</div>
|
|
||||||
<div class="mb-6 leading-5">
|
|
||||||
for _, v:= range params.Data {
|
|
||||||
<a class="block" href={ templ.URL("/" + params.PathPrefix + v) }>
|
|
||||||
{ v }
|
|
||||||
</a>
|
|
||||||
}
|
|
||||||
</div>
|
|
||||||
<div class="flex justify-between">
|
|
||||||
if params.PrevPage != 0 {
|
|
||||||
<a href={ templ.URL(fmt.Sprintf("/%s/%d", params.PaginationUrl, params.PrevPage)) }><< Prev page</a>
|
|
||||||
}
|
|
||||||
if params.NextPage != 0 {
|
|
||||||
<a href={ templ.URL(fmt.Sprintf("/%s/%d", params.PaginationUrl, params.NextPage)) }>Next page >></a>
|
|
||||||
}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
@footerTemplate()
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
}
|
|
||||||
2
main.go
2
main.go
@@ -125,8 +125,6 @@ func main() {
|
|||||||
mux.HandleFunc("/relays-archive.xml", renderArchive)
|
mux.HandleFunc("/relays-archive.xml", renderArchive)
|
||||||
mux.HandleFunc("/npubs-archive.xml", renderArchive)
|
mux.HandleFunc("/npubs-archive.xml", renderArchive)
|
||||||
mux.HandleFunc("/services/oembed", renderOEmbed)
|
mux.HandleFunc("/services/oembed", renderOEmbed)
|
||||||
mux.HandleFunc("/relays-archive/", renderArchive)
|
|
||||||
mux.HandleFunc("/npubs-archive/", renderArchive)
|
|
||||||
mux.HandleFunc("/njump/image/", renderImage)
|
mux.HandleFunc("/njump/image/", renderImage)
|
||||||
mux.HandleFunc("/njump/proxy/", proxy)
|
mux.HandleFunc("/njump/proxy/", proxy)
|
||||||
mux.HandleFunc("/robots.txt", renderRobots)
|
mux.HandleFunc("/robots.txt", renderRobots)
|
||||||
|
|||||||
12
pages.go
12
pages.go
@@ -92,18 +92,6 @@ type AboutParams struct {
|
|||||||
HeadParams
|
HeadParams
|
||||||
}
|
}
|
||||||
|
|
||||||
type ArchivePageParams struct {
|
|
||||||
HeadParams
|
|
||||||
|
|
||||||
Title string
|
|
||||||
PathPrefix string
|
|
||||||
Data []string
|
|
||||||
ModifiedAt string
|
|
||||||
PaginationUrl string
|
|
||||||
NextPage int
|
|
||||||
PrevPage int
|
|
||||||
}
|
|
||||||
|
|
||||||
type EmbeddedNoteParams struct {
|
type EmbeddedNoteParams struct {
|
||||||
Content template.HTML
|
Content template.HTML
|
||||||
CreatedAt string
|
CreatedAt string
|
||||||
|
|||||||
@@ -11,17 +11,13 @@ import (
|
|||||||
"github.com/nbd-wtf/go-nostr/nip19"
|
"github.com/nbd-wtf/go-nostr/nip19"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
NPUBS_ARCHIVE = iota
|
||||||
|
RELAYS_ARCHIVE = iota
|
||||||
|
)
|
||||||
|
|
||||||
func renderArchive(w http.ResponseWriter, r *http.Request) {
|
func renderArchive(w http.ResponseWriter, r *http.Request) {
|
||||||
fmt.Println(r.URL.Path, "@.", r.Header.Get("user-agent"))
|
fmt.Println(r.URL.Path, "@.", r.Header.Get("user-agent"))
|
||||||
code := r.URL.Path[1:]
|
|
||||||
hostname := code[2:]
|
|
||||||
resultsPerPage := 50
|
|
||||||
isSitemap := false
|
|
||||||
|
|
||||||
if strings.HasSuffix(hostname, ".xml") {
|
|
||||||
isSitemap = true
|
|
||||||
resultsPerPage = 5000
|
|
||||||
}
|
|
||||||
|
|
||||||
lastIndex := strings.LastIndex(r.URL.Path, "/")
|
lastIndex := strings.LastIndex(r.URL.Path, "/")
|
||||||
page := 1
|
page := 1
|
||||||
@@ -37,42 +33,29 @@ func renderArchive(w http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
prefix := ""
|
prefix := ""
|
||||||
pathPrefix := ""
|
pathPrefix := ""
|
||||||
title := ""
|
var area int
|
||||||
area := ""
|
|
||||||
if strings.HasPrefix(r.URL.Path[1:], "npubs-archive") {
|
if strings.HasPrefix(r.URL.Path[1:], "npubs-archive") {
|
||||||
area = "npubs-archive"
|
area = NPUBS_ARCHIVE
|
||||||
} else if strings.HasPrefix(r.URL.Path[1:], "relays-archive") {
|
|
||||||
area = "relays-archive"
|
|
||||||
}
|
|
||||||
|
|
||||||
if area == "npubs-archive" {
|
|
||||||
prefix = "pa:"
|
prefix = "pa:"
|
||||||
pathPrefix = ""
|
pathPrefix = ""
|
||||||
title = "Nostr npubs archive"
|
} else if strings.HasPrefix(r.URL.Path[1:], "relays-archive") {
|
||||||
} else {
|
area = RELAYS_ARCHIVE
|
||||||
prefix = "ra:"
|
prefix = "ra:"
|
||||||
pathPrefix = "r/"
|
pathPrefix = "r/"
|
||||||
title = "Nostr relays archive"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
keys := cache.GetPaginatedKeys(prefix, page, resultsPerPage)
|
keys := cache.GetPaginatedKeys(prefix, page, 5000)
|
||||||
data := []string{}
|
data := []string{}
|
||||||
for i := 0; i < len(keys); i++ {
|
for i := 0; i < len(keys); i++ {
|
||||||
if area == "npubs-archive" {
|
switch area {
|
||||||
|
case NPUBS_ARCHIVE:
|
||||||
npub, _ := nip19.EncodePublicKey(keys[i][3:])
|
npub, _ := nip19.EncodePublicKey(keys[i][3:])
|
||||||
data = append(data, npub)
|
data = append(data, npub)
|
||||||
} else {
|
case RELAYS_ARCHIVE:
|
||||||
data = append(data, trimProtocol(keys[i][3:]))
|
data = append(data, trimProtocol(keys[i][3:]))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
prevPage := page - 1
|
|
||||||
nextPage := page + 1
|
|
||||||
if len(keys) == 0 {
|
|
||||||
prevPage = 0
|
|
||||||
nextPage = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate a random duration between 2 and 6 hours
|
// Generate a random duration between 2 and 6 hours
|
||||||
minHours := 2
|
minHours := 2
|
||||||
maxHours := 6
|
maxHours := 6
|
||||||
@@ -87,19 +70,6 @@ func renderArchive(w http.ResponseWriter, r *http.Request) {
|
|||||||
w.Header().Set("Cache-Control", "max-age=60")
|
w.Header().Set("Cache-Control", "max-age=60")
|
||||||
}
|
}
|
||||||
|
|
||||||
if !isSitemap {
|
|
||||||
archiveTemplate(ArchivePageParams{
|
|
||||||
HeadParams: HeadParams{IsProfile: false},
|
|
||||||
|
|
||||||
Title: title,
|
|
||||||
PathPrefix: pathPrefix,
|
|
||||||
Data: data,
|
|
||||||
ModifiedAt: modifiedAt,
|
|
||||||
PaginationUrl: area,
|
|
||||||
NextPage: nextPage,
|
|
||||||
PrevPage: prevPage,
|
|
||||||
}).Render(r.Context(), w)
|
|
||||||
} else {
|
|
||||||
w.Header().Add("content-type", "text/xml")
|
w.Header().Add("content-type", "text/xml")
|
||||||
w.Write([]byte(XML_HEADER))
|
w.Write([]byte(XML_HEADER))
|
||||||
SitemapTemplate.Render(w, &SitemapPage{
|
SitemapTemplate.Render(w, &SitemapPage{
|
||||||
@@ -108,5 +78,4 @@ func renderArchive(w http.ResponseWriter, r *http.Request) {
|
|||||||
PathPrefix: pathPrefix,
|
PathPrefix: pathPrefix,
|
||||||
Data: data,
|
Data: data,
|
||||||
})
|
})
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user