-
-
Save mattetti/3798173 to your computer and use it in GitHub Desktop.
| package main | |
| import ( | |
| "fmt" | |
| "net/http" | |
| "time" | |
| ) | |
| var urls = []string{ | |
| "https://splice.com/", | |
| "https://golang.org/", | |
| "https://matt.aimonetti.net/", | |
| } | |
| type HttpResponse struct { | |
| url string | |
| response *http.Response | |
| err error | |
| } | |
| func asyncHttpGets(urls []string) []*HttpResponse { | |
| ch := make(chan *HttpResponse, len(urls)) // buffered | |
| responses := []*HttpResponse{} | |
| for _, url := range urls { | |
| go func(url string) { | |
| fmt.Printf("Fetching %s \n", url) | |
| resp, err := http.Get(url) | |
| if err == nil { | |
| resp.Body.Close() | |
| } | |
| ch <- &HttpResponse{url, resp, err} | |
| }(url) | |
| } | |
| for { | |
| select { | |
| case r := <-ch: | |
| fmt.Printf("%s was fetched\n", r.url) | |
| responses = append(responses, r) | |
| if len(responses) == len(urls) { | |
| return responses | |
| } | |
| case <-time.After(50 * time.Millisecond): | |
| fmt.Printf(".") | |
| } | |
| } | |
| return responses | |
| } | |
| func main() { | |
| results := asyncHttpGets(urls) | |
| for _, result := range results { | |
| if result.err != nil { | |
| fmt.Printf("%s error: %v\n", result.url, | |
| result.err) | |
| continue | |
| } | |
| fmt.Printf("%s status: %s\n", result.url, | |
| result.response.Status) | |
| } | |
| } |
@DudeFactory the problem was that one of the domains wasn't active anymore and I wasn't checking the error when fetching the URL.
I fixed the domain and added an error check afterresp, err := http.Get(url)see: https://gist.github.com/mattetti/3798173#file-gistfile1-go-L28Hopefully that helps clarifying your confusion.
Thanks. I tried only check resp is nil
Still doesn't work for me. For example, I added new domains:
"http://www.webmagnat.ro",
"http://nickelfreesolutions.com",
"http://scheepvaarttelefoongids.nl",
"http://tursan.net",
"http://plannersanonymous.com",
"http://saltstack.com",
"http://deconsquad.com",
"http://migom.com",
"http://tjprc.org",
"http://worklife.dk",
"http://food-hub.org",
So it is stuck and some one suggest me use "Wait Group construct", but why it is working for you?
Stackoverflow question: https://stackoverflow.com/questions/60148016/how-send-n-get-requests-where-n-10-urls
@DudeFactory I fixed the example, try with your domains now. The problem was that my quick fix was skipping writing to the channel in case of error so I didn't have to check if there was an error. The fix was to check for errors when reading from the channel and print the proper statement.
@DudeFactory the problem was that one of the domains wasn't active anymore and I wasn't checking the error when fetching the URL.
I fixed the domain and added an error check after
resp, err := http.Get(url)see: https://gist.github.com/mattetti/3798173#file-gistfile1-go-L28Hopefully that helps clarifying your confusion.