Created
April 25, 2018 01:50
-
-
Save pckhoi/a8aa85da6e1bd1608280c51226195002 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package main | |
import ( | |
"fmt" | |
"sync" | |
) | |
type Fetcher interface { | |
// Fetch returns the body of URL and | |
// a slice of URLs found on that page. | |
Fetch(url string) (body string, urls []string, err error) | |
} | |
type SafeUrlMap struct { | |
mux sync.Mutex | |
v map[string]int | |
} | |
type crawResult struct { | |
url string | |
body string | |
} | |
func merge(cs ...chan crawResult) chan crawResult { | |
var wg sync.WaitGroup | |
out := make(chan crawResult) | |
output := func(c chan crawResult) { | |
for n := range c { | |
out <- n | |
} | |
wg.Done() | |
} | |
wg.Add(len(cs)) | |
for _, c := range cs { | |
go output(c) | |
} | |
go func() { | |
wg.Wait() | |
close(out) | |
}() | |
return out | |
} | |
func _crawl(url string, depth int, fetcher Fetcher, urlMap *SafeUrlMap) chan crawResult { | |
ch := make(chan crawResult, 1) | |
if depth <= 0 { | |
defer close(ch) | |
return ch | |
} | |
body, urls, err := fetcher.Fetch(url) | |
fmt.Printf("fetched %s\n", url) | |
if err != nil { | |
fmt.Println(err) | |
defer close(ch) | |
return ch | |
} | |
ch <- crawResult{url, body} | |
defer close(ch) | |
channels := []chan crawResult{ch} | |
for i := 0; i < len(urls); i++ { | |
_url := urls[i] | |
urlMap.mux.Lock() | |
_, ok := urlMap.v[_url] | |
if !ok { | |
urlMap.v[_url] = 1 | |
} | |
urlMap.mux.Unlock() | |
if !ok { | |
channels = append(channels, _crawl(_url, depth-1, fetcher, urlMap)) | |
} | |
} | |
return merge(channels...) | |
} | |
// Crawl uses fetcher to recursively crawl | |
// pages starting with url, to a maximum of depth. | |
func Crawl(url string, depth int, fetcher Fetcher) { | |
// TODO: Fetch URLs in parallel. | |
// TODO: Don't fetch the same URL twice. | |
// This implementation doesn't do either: | |
urlMap := SafeUrlMap{v: make(map[string]int)} | |
urlMap.v[url] = 1 | |
ch := _crawl(url, depth, fetcher, &urlMap) | |
for result := range ch { | |
fmt.Printf("found: %s %q\n", result.url, result.body) | |
} | |
return | |
} | |
func main() { | |
Crawl("https://golang.org/", 4, fetcher) | |
} | |
// fakeFetcher is Fetcher that returns canned results. | |
type fakeFetcher map[string]*fakeResult | |
type fakeResult struct { | |
body string | |
urls []string | |
} | |
func (f fakeFetcher) Fetch(url string) (string, []string, error) { | |
if res, ok := f[url]; ok { | |
return res.body, res.urls, nil | |
} | |
return "", nil, fmt.Errorf("not found: %s", url) | |
} | |
// fetcher is a populated fakeFetcher. | |
var fetcher = fakeFetcher{ | |
"https://golang.org/": &fakeResult{ | |
"The Go Programming Language", | |
[]string{ | |
"https://golang.org/pkg/", | |
"https://golang.org/cmd/", | |
}, | |
}, | |
"https://golang.org/pkg/": &fakeResult{ | |
"Packages", | |
[]string{ | |
"https://golang.org/", | |
"https://golang.org/cmd/", | |
"https://golang.org/pkg/fmt/", | |
"https://golang.org/pkg/os/", | |
}, | |
}, | |
"https://golang.org/pkg/fmt/": &fakeResult{ | |
"Package fmt", | |
[]string{ | |
"https://golang.org/", | |
"https://golang.org/pkg/", | |
}, | |
}, | |
"https://golang.org/pkg/os/": &fakeResult{ | |
"Package os", | |
[]string{ | |
"https://golang.org/", | |
"https://golang.org/pkg/", | |
}, | |
}, | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment