Add robots.txt checking

Still needs periodic cache refresh
This commit is contained in:
2024-10-23 14:24:10 +03:00
parent 1ac250ca6e
commit 094394afc2
5 changed files with 108 additions and 37 deletions

View File

@@ -1,8 +1,8 @@
package gemini
import (
"testing"
"reflect"
"testing"
)
func TestParseRobotsTxt(t *testing.T) {
@@ -15,6 +15,7 @@ Disallow: /admin/`
expected := []string{
"gemini://example.com/cgi-bin/wp.cgi/view",
"gemini://example.com/cgi-bin/wp.cgi/media",
"gemini://example.com/admin/",
}
result := ParseRobotsTxt(input, "example.com")
@@ -23,3 +24,13 @@ Disallow: /admin/`
t.Errorf("ParseRobotsTxt() = %v, want %v", result, expected)
}
}
func TestParseRobotsTxtEmpty(t *testing.T) {
input := ``
result := ParseRobotsTxt(input, "example.com")
if len(result) != 0 {
t.Errorf("ParseRobotsTxt() = %v, want empty []string", result)
}
}