-
-
Save mattetti/3798173 to your computer and use it in GitHub Desktop.
package main | |
import ( | |
"fmt" | |
"net/http" | |
"time" | |
) | |
var urls = []string{ | |
"https://splice.com/", | |
"https://golang.org/", | |
"https://matt.aimonetti.net/", | |
} | |
type HttpResponse struct { | |
url string | |
response *http.Response | |
err error | |
} | |
func asyncHttpGets(urls []string) []*HttpResponse { | |
ch := make(chan *HttpResponse, len(urls)) // buffered | |
responses := []*HttpResponse{} | |
for _, url := range urls { | |
go func(url string) { | |
fmt.Printf("Fetching %s \n", url) | |
resp, err := http.Get(url) | |
if err == nil { | |
resp.Body.Close() | |
} | |
ch <- &HttpResponse{url, resp, err} | |
}(url) | |
} | |
for { | |
select { | |
case r := <-ch: | |
fmt.Printf("%s was fetched\n", r.url) | |
responses = append(responses, r) | |
if len(responses) == len(urls) { | |
return responses | |
} | |
case <-time.After(50 * time.Millisecond): | |
fmt.Printf(".") | |
} | |
} | |
return responses | |
} | |
func main() { | |
results := asyncHttpGets(urls) | |
for _, result := range results { | |
if result.err != nil { | |
fmt.Printf("%s error: %v\n", result.url, | |
result.err) | |
continue | |
} | |
fmt.Printf("%s status: %s\n", result.url, | |
result.response.Status) | |
} | |
} |
As @kr pointed out, a nicer version that doesn't require returning a slice is available here: https://gist.github.com/4013851/199db5624032fe503e9518b77375111ffb6ba54f
How about fetching http requests with a timeout .
This one does not work; gives a runtime error .
https://gist.github.com/pranjal5215/18f95fa506d59db9c740
What I m basically trying to do is fetch URLs with a timeout (have kept very small timeout; want to timeout deliberately), but on timeout throws runtime.
[I am New to go]
Thank you!
Hello from the future.
The code doesn't work.
Error is:
./prog.go:46:2: unreachable code
Go vet exited.Fetching http://pulsoconf.co/
panic: runtime error: invalid memory address or nil pointer dereference
[signal SIGSEGV: segmentation violation code=0xffffffff addr=0x0 pc=0x2b79aa]goroutine 6 [running]:
main.asyncHttpGets.func1(0x832180, 0x328cb4, 0x14, 0x864ed8)
/tmp/sandbox137476626/prog.go:28 +0xea
created by main.asyncHttpGets
/tmp/sandbox137476626/prog.go:25 +0xa0
I'm new in GO, I tried to understand the issue, but without result. Could you please help?
@DudeFactory the problem was that one of the domains wasn't active anymore and I wasn't checking the error when fetching the URL.
I fixed the domain and added an error check after resp, err := http.Get(url)
see: https://gist.github.com/mattetti/3798173#file-gistfile1-go-L28
Hopefully that helps clarifying your confusion.
@DudeFactory the problem was that one of the domains wasn't active anymore and I wasn't checking the error when fetching the URL.
I fixed the domain and added an error check afterresp, err := http.Get(url)
see: https://gist.github.com/mattetti/3798173#file-gistfile1-go-L28Hopefully that helps clarifying your confusion.
Thanks. I tried only check resp is nil
Still doesn't work for me. For example, I added new domains:
"http://www.webmagnat.ro",
"http://nickelfreesolutions.com",
"http://scheepvaarttelefoongids.nl",
"http://tursan.net",
"http://plannersanonymous.com",
"http://saltstack.com",
"http://deconsquad.com",
"http://migom.com",
"http://tjprc.org",
"http://worklife.dk",
"http://food-hub.org",
So it is stuck and some one suggest me use "Wait Group construct", but why it is working for you?
Stackoverflow question: https://stackoverflow.com/questions/60148016/how-send-n-get-requests-where-n-10-urls
@DudeFactory I fixed the example, try with your domains now. The problem was that my quick fix was skipping writing to the channel in case of error so I didn't have to check if there was an error. The fix was to check for errors when reading from the channel and print the proper statement.
Hi! It's nice to see more Go examples such as this one showing up in slide decks. :)
Couple of comments on this gist:
for _, results :=
should befor _, result :=
.https://gist.github.com/4013851/ec1fc92967e607a487c220e14c75252828bd0606
resp.Body.Close
https://gist.github.com/4013851/69dc3a64214afc3663998f4e8a74ec8693f8d070
time.After
instead ofdefault
+time.Sleep
will avoid blockingwhen there's useful work that could be done.
https://gist.github.com/4013851/b4de0cb09ac3753dea97f8b459b10e0e35106a07
you could just as easily have main read them off the channel.
https://gist.github.com/4013851/199db5624032fe503e9518b77375111ffb6ba54f