Skip to content

Instantly share code, notes, and snippets.

@domgreen
Created September 20, 2017 22:08
Show Gist options
  • Save domgreen/7a77ab7254db1736859a96a8b5b4e0d3 to your computer and use it in GitHub Desktop.
Save domgreen/7a77ab7254db1736859a96a8b5b4e0d3 to your computer and use it in GitHub Desktop.
A Tour of Go - Exercise: Web Crawler
package main
// https://play.golang.org/p/hc-EOu8nYJ
import (
"fmt"
"sync"
)
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func OldCrawl(url string, depth int, fetcher Fetcher) {
// TODO: Fetch URLs in parallel.
// TODO: Don't fetch the same URL twice.
// This implementation doesn't do either:
if depth <= 0 {
return
}
body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
return
}
fmt.Printf("found: %s %q\n", url, body)
for _, u := range urls {
OldCrawl(u, depth-1, fetcher)
}
return
}
type SafeMap struct {
done map[string]bool
mux sync.Mutex
}
func (m *SafeMap) Add(key string) {
m.mux.Lock()
defer m.mux.Unlock()
m.done[key] = true
}
func (m *SafeMap) Contains(key string) bool {
m.mux.Lock()
defer m.mux.Unlock()
return m.done[key]
}
func Crawl(url string, depth int, fetcher Fetcher, visited SafeMap) {
// TODO: Fetch URLs in parallel.
// TODO: Don't fetch the same URL twice.
// This implementation doesn't do either:
fmt.Printf("starting crawl %s %d\n", url, depth)
if depth <= 0 {
return
}
visited.Add(url)
res := make(chan string)
go func(out chan string) {
defer close(res)
_, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println("error")
return
}
for _, u := range urls {
if visited.Contains(u) == false {
out <- u
}
}
}(res)
var wg sync.WaitGroup
defer wg.Wait()
for u := range res {
wg.Add(1)
go func(next string) {
Crawl(next, depth-1, fetcher, visited)
wg.Done()
}(u)
}
return
}
func OuterCrawl(url string, depth int, fetcher Fetcher) {
s := SafeMap{done: make(map[string]bool)}
Crawl(url, depth, fetcher, s)
}
func main() {
OuterCrawl("http://golang.org/", 4, fetcher)
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://gogogo/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://gogogo/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://gogogo/": &fakeResult{
"GoGoGo",
[]string{
"http://google.com/",
"http://github.com/",
},
},
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment