package main | |
import ( | |
"fmt" | |
"net/http" | |
"time" | |
) | |
var urls = []string{ | |
"https://splice.com/", | |
"https://golang.org/", | |
"https://matt.aimonetti.net/", | |
} | |
type HttpResponse struct { | |
url string | |
response *http.Response | |
err error | |
} | |
func asyncHttpGets(urls []string) []*HttpResponse { | |
ch := make(chan *HttpResponse, len(urls)) // buffered | |
responses := []*HttpResponse{} | |
for _, url := range urls { | |
go func(url string) { | |
fmt.Printf("Fetching %s \n", url) | |
resp, err := http.Get(url) | |
if err == nil { | |
resp.Body.Close() | |
} | |
ch <- &HttpResponse{url, resp, err} | |
}(url) | |
} | |
for { | |
select { | |
case r := <-ch: | |
fmt.Printf("%s was fetched\n", r.url) | |
responses = append(responses, r) | |
if len(responses) == len(urls) { | |
return responses | |
} | |
case <-time.After(50 * time.Millisecond): | |
fmt.Printf(".") | |
} | |
} | |
return responses | |
} | |
func main() { | |
results := asyncHttpGets(urls) | |
for _, result := range results { | |
if result.err != nil { | |
fmt.Printf("%s error: %v\n", result.url, | |
result.err) | |
continue | |
} | |
fmt.Printf("%s status: %s\n", result.url, | |
result.response.Status) | |
} | |
} |
Thank you!
Hello from the future.
The code doesn't work.
Error is:
./prog.go:46:2: unreachable code
Go vet exited.Fetching http://pulsoconf.co/
panic: runtime error: invalid memory address or nil pointer dereference
[signal SIGSEGV: segmentation violation code=0xffffffff addr=0x0 pc=0x2b79aa]goroutine 6 [running]:
main.asyncHttpGets.func1(0x832180, 0x328cb4, 0x14, 0x864ed8)
/tmp/sandbox137476626/prog.go:28 +0xea
created by main.asyncHttpGets
/tmp/sandbox137476626/prog.go:25 +0xa0
I'm new in GO, I tried to understand the issue, but without result. Could you please help?
@DudeFactory the problem was that one of the domains wasn't active anymore and I wasn't checking the error when fetching the URL.
I fixed the domain and added an error check after resp, err := http.Get(url)
see: https://gist.github.com/mattetti/3798173#file-gistfile1-go-L28
Hopefully that helps clarifying your confusion.
@DudeFactory the problem was that one of the domains wasn't active anymore and I wasn't checking the error when fetching the URL.
I fixed the domain and added an error check afterresp, err := http.Get(url)
see: https://gist.github.com/mattetti/3798173#file-gistfile1-go-L28Hopefully that helps clarifying your confusion.
Thanks. I tried only check resp is nil
Still doesn't work for me. For example, I added new domains:
"http://www.webmagnat.ro",
"http://nickelfreesolutions.com",
"http://scheepvaarttelefoongids.nl",
"http://tursan.net",
"http://plannersanonymous.com",
"http://saltstack.com",
"http://deconsquad.com",
"http://migom.com",
"http://tjprc.org",
"http://worklife.dk",
"http://food-hub.org",
So it is stuck and some one suggest me use "Wait Group construct", but why it is working for you?
Stackoverflow question: https://stackoverflow.com/questions/60148016/how-send-n-get-requests-where-n-10-urls
@DudeFactory I fixed the example, try with your domains now. The problem was that my quick fix was skipping writing to the channel in case of error so I didn't have to check if there was an error. The fix was to check for errors when reading from the channel and print the proper statement.
How about fetching http requests with a timeout .
This one does not work; gives a runtime error .
https://gist.github.com/pranjal5215/18f95fa506d59db9c740
What I m basically trying to do is fetch URLs with a timeout (have kept very small timeout; want to timeout deliberately), but on timeout throws runtime.
[I am New to go]