package main import ( "fmt" ) type Fetcher interface { // Fetch returns the body of URL and // a slice of URLs found on that page. Fetch(url string) (body string, urls []string, err error) } // Crawl uses fetcher to recursively crawl // pages starting with url, to a maximum of depth. func Crawl(url string, depth int, fetcher Fetcher) { // DONE: Fetch URLs in parallel. // DONE: Don't fetch the same URL twice. ch := make(chan Fetched) n := 1 cache := map[string]bool{url: true} go Fetch(url, depth, fetcher, ch) for n > 0 { n-- fetched := <-ch if fetched.err != nil { fmt.Println(fetched.err) continue } fmt.Printf("found: %s %q\n", fetched.url, fetched.body) if fetched.depth > 1 { for _, u := range fetched.urls { if !cache[u] { n++ cache[u] = true go Fetch(u, fetched.depth-1, fetcher, ch) } } } } } func Fetch(url string, depth int, fetcher Fetcher, ch chan Fetched) { body, urls, err := fetcher.Fetch(url) ch <- Fetched{url, depth, body, urls, err} } type Fetched struct { url string depth int body string urls []string err error } func main() { Crawl("http://golang.org/", 4, fetcher) } // fakeFetcher is Fetcher that returns canned results. type fakeFetcher map[string]*fakeResult type fakeResult struct { body string urls []string } func (f fakeFetcher) Fetch(url string) (string, []string, error) { if res, ok := f[url]; ok { return res.body, res.urls, nil } return "", nil, fmt.Errorf("not found: %s", url) } // fetcher is a populated fakeFetcher. var fetcher = fakeFetcher{ "http://golang.org/": &fakeResult{ "The Go Programming Language", []string{ "http://golang.org/pkg/", "http://golang.org/cmd/", }, }, "http://golang.org/pkg/": &fakeResult{ "Packages", []string{ "http://golang.org/", "http://golang.org/cmd/", "http://golang.org/pkg/fmt/", "http://golang.org/pkg/os/", }, }, "http://golang.org/pkg/fmt/": &fakeResult{ "Package fmt", []string{ "http://golang.org/", "http://golang.org/pkg/", }, }, "http://golang.org/pkg/os/": &fakeResult{ "Package os", []string{ "http://golang.org/", "http://golang.org/pkg/", }, }, }