Created
July 2, 2015 07:19
-
-
Save snowwolf007cn/a6373178a5a2be537383 to your computer and use it in GitHub Desktop.
Go Tour Exercise: Web Crawler
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package main | |
import "fmt" | |
type Fetcher interface { | |
// Fetch returns the body of URL and | |
// a slice of URLs found on that page. | |
Fetch(url string) (body string, urls []string, err error) | |
} | |
// Crawl uses fetcher to recursively crawl | |
// pages starting with url, to a maximum of depth. | |
func Crawl(url string, depth int, fetcher Fetcher, end chan bool) { | |
//channel to exchange urls been processed between go routines | |
used_urls := make(chan map[string]bool) | |
//channnel to exchange urls been crawling or crawled between go routines | |
crawled_urls := make(chan map[string]bool) | |
//map to searlized transmit urls which are processed | |
used_urls_map := map[string]bool{url: true} | |
//map to searlized transmmit urls which are processing or processed | |
//true for processed, false for processing | |
crawled_urls_map := map[string]bool{url: false} | |
go _Crawl(url, depth, fetcher, used_urls, crawled_urls, end) | |
//send map to channel crawled first or goroutines will fail, because code in line 31 | |
crawled_urls <- crawled_urls_map | |
used_urls <- used_urls_map | |
return | |
} | |
func _Crawl(url string, depth int, fetcher Fetcher, used_urls chan map[string]bool, crawled_urls chan map[string]bool, end chan bool) { | |
crawled_urls_map := <-crawled_urls | |
if depth <= 0 { | |
crawled_urls_map[url] = true | |
return | |
} | |
body, urls, err := fetcher.Fetch(url) | |
used_urls_map := <-used_urls | |
used_urls_map[url] = true | |
if err != nil { | |
fmt.Printf("not found: %s\n", url) | |
crawled_urls_map[url] = true | |
return | |
} | |
fmt.Printf("found: %s %q\n", url, body) | |
crawled_urls_map[url] = true | |
for _, u := range urls { | |
if _, ok := used_urls_map[u]; !ok { | |
used_urls_map[u] = true | |
crawled_urls_map[u] = false | |
go _Crawl(u, depth-1, fetcher, used_urls, crawled_urls, end) | |
crawled_urls <- crawled_urls_map | |
used_urls <- used_urls_map | |
} | |
} | |
all_checked := true | |
for u, _ := range crawled_urls_map { | |
all_checked = all_checked && crawled_urls_map[u] | |
} | |
if all_checked { | |
end <- true | |
} | |
} | |
func main() { | |
end := make(chan bool, 1) | |
Crawl("http://golang.org/", 4, fetcher, end) | |
<-end | |
} | |
// fakeFetcher is Fetcher that returns canned results. | |
type fakeFetcher map[string]*fakeResult | |
type fakeResult struct { | |
body string | |
urls []string | |
} | |
func (f fakeFetcher) Fetch(url string) (string, []string, error) { | |
if res, ok := f[url]; ok { | |
return res.body, res.urls, nil | |
} | |
return "", nil, fmt.Errorf("not found: %s", url) | |
} | |
// fetcher is a populated fakeFetcher. | |
var fetcher = fakeFetcher{ | |
"http://golang.org/": &fakeResult{ | |
"The Go Programming Language", | |
[]string{ | |
"http://golang.org/pkg/", | |
"http://golang.org/cmd/", | |
}, | |
}, | |
"http://golang.org/pkg/": &fakeResult{ | |
"Packages", | |
[]string{ | |
"http://golang.org/", | |
"http://golang.org/cmd/", | |
"http://golang.org/pkg/fmt/", | |
"http://golang.org/pkg/os/", | |
}, | |
}, | |
"http://golang.org/pkg/fmt/": &fakeResult{ | |
"Package fmt", | |
[]string{ | |
"http://golang.org/", | |
"http://golang.org/pkg/", | |
}, | |
}, | |
"http://golang.org/pkg/os/": &fakeResult{ | |
"Package os", | |
[]string{ | |
"http://golang.org/", | |
"http://golang.org/pkg/", | |
}, | |
}, | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment