Created
December 30, 2019 21:45
-
-
Save delongGao/71a91cfc0cda99a023fa6036b74e4324 to your computer and use it in GitHub Desktop.
golang tour web crawler exercise
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package main | |
import ( | |
"fmt" | |
"sync" | |
// since we use waitgroup instead, no need for time anymore | |
// "time" | |
) | |
type Fetcher interface { | |
// Fetch returns the body of URL and | |
// a slice of URLs found on that page. | |
Fetch(url string) (body string, urls []string, err error) | |
} | |
type SafeCache struct { | |
v map[string]bool | |
mux sync.Mutex | |
} | |
func (sc SafeCache) checkCache(key string) bool { | |
sc.mux.Lock() | |
defer sc.mux.Unlock() | |
_, ok := sc.v[key] | |
return ok == true | |
} | |
func (sc SafeCache) addCache(key string) { | |
sc.mux.Lock() | |
defer sc.mux.Unlock() | |
sc.v[key] = true | |
} | |
var sc = SafeCache{v: make(map[string]bool)} | |
var wg sync.WaitGroup | |
// Crawl uses fetcher to recursively crawl | |
// pages starting with url, to a maximum of depth. | |
func Crawl(url string, depth int, fetcher Fetcher) { | |
// TODO: Fetch URLs in parallel. | |
// TODO: Don't fetch the same URL twice. | |
// This implementation doesn't do either: | |
defer wg.Done() | |
if depth <= 0 { | |
return | |
} | |
// check cache to avoid double fetch | |
if sc.checkCache(url) { | |
return | |
} | |
sc.addCache(url) | |
body, urls, err := fetcher.Fetch(url) | |
if err != nil { | |
fmt.Println(err) | |
return | |
} | |
fmt.Printf("found: %s %q\n", url, body) | |
for _, u := range urls { | |
wg.Add(1) | |
go Crawl(u, depth-1, fetcher) | |
} | |
return | |
} | |
func main() { | |
wg.Add(1) | |
Crawl("https://golang.org/", 4, fetcher) | |
// I really didn't get this by myself, we need to wait for goroutines to run in this case | |
// but wait in main thread is not ideal | |
// time.Sleep(5 * time.Second) | |
// instead a better way is using sync.WaitGroup | |
wg.Wait() | |
} | |
// fakeFetcher is Fetcher that returns canned results. | |
type fakeFetcher map[string]*fakeResult | |
type fakeResult struct { | |
body string | |
urls []string | |
} | |
func (f fakeFetcher) Fetch(url string) (string, []string, error) { | |
if res, ok := f[url]; ok { | |
return res.body, res.urls, nil | |
} | |
return "", nil, fmt.Errorf("not found: %s", url) | |
} | |
// fetcher is a populated fakeFetcher. | |
var fetcher = fakeFetcher{ | |
"https://golang.org/": &fakeResult{ | |
"The Go Programming Language", | |
[]string{ | |
"https://golang.org/pkg/", | |
"https://golang.org/cmd/", | |
}, | |
}, | |
"https://golang.org/pkg/": &fakeResult{ | |
"Packages", | |
[]string{ | |
"https://golang.org/", | |
"https://golang.org/cmd/", | |
"https://golang.org/pkg/fmt/", | |
"https://golang.org/pkg/os/", | |
}, | |
}, | |
"https://golang.org/pkg/fmt/": &fakeResult{ | |
"Package fmt", | |
[]string{ | |
"https://golang.org/", | |
"https://golang.org/pkg/", | |
}, | |
}, | |
"https://golang.org/pkg/os/": &fakeResult{ | |
"Package os", | |
[]string{ | |
"https://golang.org/", | |
"https://golang.org/pkg/", | |
}, | |
}, | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment