feat: Implement robots.txt parser
This commit is contained in:
25
gemini/robots_test.go
Normal file
25
gemini/robots_test.go
Normal file
@@ -0,0 +1,25 @@
|
||||
package gemini
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
func TestParseRobotsTxt(t *testing.T) {
|
||||
input := `User-agent: *
|
||||
Disallow: /cgi-bin/wp.cgi/view
|
||||
Disallow: /cgi-bin/wp.cgi/media
|
||||
User-agent: googlebot
|
||||
Disallow: /admin/`
|
||||
|
||||
expected := []string{
|
||||
"gemini://example.com/cgi-bin/wp.cgi/view",
|
||||
"gemini://example.com/cgi-bin/wp.cgi/media",
|
||||
}
|
||||
|
||||
result := ParseRobotsTxt(input, "example.com")
|
||||
|
||||
if !reflect.DeepEqual(result, expected) {
|
||||
t.Errorf("ParseRobotsTxt() = %v, want %v", result, expected)
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user