var robots = ` # Group 1 User-agent: Googlebot Disallow: /nogooglebot/ # Group 2 User-agent: * Allow: / Sitemap: http://www.example.com/sitemap.xml Sitemap: http://www.example.com/sitemap2.xml ` func TestRobotsClient_Httptest(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { fmt.Fprintln(w, robots) })) defer ts.Close() client := NewRobotsClient() robotRules, err := client.FetchRobots(ts.URL) assert.NoError(t, err) fmt.Println(robotRules) expectedRobotRules := RobotRules{ rules: []Rule{ { UserAgents: []string{"Googlebot"}, Disallow: []string{"/nogooglebot/"}, }, { UserAgents: []string{"*"}, Allow: []string{"/"}, }, }, Sitemaps: []string{ "http://www.example.com/sitemap.xml", "http://www.example.com/sitemap2.xml", }, } assert.Equal(t, expectedRobotRules, robotRules) }