Skip to content

Instantly share code, notes, and snippets.

@ngerakines
Created October 9, 2011 01:29
Show Gist options
  • Save ngerakines/1273159 to your computer and use it in GitHub Desktop.
Save ngerakines/1273159 to your computer and use it in GitHub Desktop.
package main
import (
"fmt"
"tour/wc"
"strings"
)
func WordCount(s string) map[string]int {
fields := make(map[string] int)
split_fields := strings.Fields(s)
for _, v := range split_fields {
if _, ok := fields[v]; ok {
fields[v]++
} else {
fields[v] = 1
}
}
return fields
}
func main() {
wc.Test(WordCount)
}
package main
import "fmt"
func fibonacci() func() int {
a, b := 0, 1
return func() int {
a, b = b, a + b
return a
}
}
func main() {
f := fibonacci()
for i := 0; i < 10; i++ {
fmt.Println(f())
}
}
package main
import (
"os"
"fmt"
)
type CrawlAction struct {
url string
body string
urls []string
err os.Error
}
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err os.Error)
}
func handle(urls chan CrawlAction, response chan CrawlAction) {
fmt.Println("handle created, waiting for urls")
for {
select {
case url := <- urls:
fmt.Printf("crawling %s\n", url.url)
body, urls, err := fetcher.Fetch(url.url)
url.body = body
url.urls = urls
url.err = err
response <- url
default:
return
}
}
}
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(seed_url string) {
// TODO: Fetch URLs in parallel.
// TODO: Don't fetch the same URL twice.
// This implementation doesn't do either:
urls := make(map[string]int)
urls[seed_url] = 0
for {
url_queue := make(chan CrawlAction, 10)
response_queue := make(chan CrawlAction, 10)
crawl_count := 0
for url, v := range urls {
if v == 0 {
crawl_count++
var ca CrawlAction
ca.url = url
url_queue <- ca
urls[url] = 1
}
}
if crawl_count == 0 {
close(url_queue)
close(response_queue)
return
}
go handle(url_queue, response_queue)
for i := 0; i < crawl_count; i++ {
response := <- response_queue
if response.err != nil {
fmt.Println(response.err)
} else {
for _, new_url := range response.urls {
_, present := urls[new_url]
if present == false {
fmt.Printf("found: %s %q\n", response.url, response.body)
urls[new_url] = 0
}
}
}
}
close(url_queue)
}
return
}
func main() {
Crawl("http://golang.org/")
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f *fakeFetcher) Fetch(url string) (string, []string, os.Error) {
if res, ok := (*f)[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = &fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment