Add robots.txt

This commit is contained in:
Daniele Tonon
2023-09-11 01:42:17 +02:00
parent a269fd32cb
commit c4bad18ee5
3 changed files with 26 additions and 0 deletions

View File

@@ -69,6 +69,7 @@ func main() {
templateMapping["relay_sitemap"] = "sitemap.xml"
templateMapping["archive"] = "archive.html"
templateMapping["archive_sitemap"] = "sitemap.xml"
templateMapping["robots"] = "robots.txt"
funcMap := template.FuncMap{
"basicFormatting": basicFormatting,
@@ -87,6 +88,7 @@ func main() {
)
// routes
http.HandleFunc("/robots.txt", renderRobots)
http.HandleFunc("/njump/image/", generate)
http.HandleFunc("/njump/proxy/", proxy)
http.Handle("/njump/static/", http.StripPrefix("/njump/", http.FileServer(http.FS(static))))

19
render_robots.go Normal file
View File

@@ -0,0 +1,19 @@
package main
import (
"net/http"
)
func renderRobots(w http.ResponseWriter, r *http.Request) {
typ := "robots"
w.Header().Set("Cache-Control", "max-age=3600")
params := map[string]any{
"CanonicalHost": s.CanonicalHost,
}
if err := tmpl.ExecuteTemplate(w, templateMapping[typ], params); err != nil {
log.Error().Err(err).Msg("error rendering")
return
}
}

5
templates/robots.txt Normal file
View File

@@ -0,0 +1,5 @@
User-agent: *
Allow: /
Sitemap: https://{{.CanonicalHost}}/npubs-archive.xml
Sitemap: https://{{.CanonicalHost}}/relays-archive.xml