Skip to content

Instantly share code, notes, and snippets.

@zyxar
Last active April 17, 2026 15:45
Show Gist options
  • Select an option

  • Save zyxar/2317744 to your computer and use it in GitHub Desktop.

Select an option

Save zyxar/2317744 to your computer and use it in GitHub Desktop.
tour.golang exercise solutions
/* Exercise: Loops and Functions #43 */
package main
import (
"fmt"
"math"
)
func Sqrt(x float64) float64 {
z := float64(2.)
s := float64(0)
for {
z = z - (z*z - x)/(2*z)
if math.Abs(s-z) < 1e-15 {
break
}
s = z
}
return s
}
func main() {
fmt.Println(Sqrt(2))
fmt.Println(math.Sqrt(2))
}
/******************************************************************************************************/
/* Exercise: Maps #44 */
package main
import (
"tour/wc"
"strings"
)
func WordCount(s string) map[string]int {
ss := strings.Fields(s)
num := len(ss)
ret := make(map[string]int)
for i := 0; i < num; i++ {
(ret[ss[i]])++
}
return ret
}
func main() {
wc.Test(WordCount)
}
/******************************************************************************************************/
/* Exercise: Slices #45 */
package main
import "tour/pic"
func Pic(dx, dy int) [][]uint8 {
ret := make([][]uint8, dy)
for i := 0; i < dy; i++ {
ret[i] = make([]uint8, dx)
for j := 0; j < dx; j++ {
ret[i][j] = uint8(i^j+(i+j)/2)
}
}
return ret
}
func main() {
pic.Show(Pic)
}
/******************************************************************************************************/
/* Exercise: Fibonacci closure #46 */
package main
import "fmt"
// fibonacci is a function that returns
// a function that returns an int.
func fibonacci() func() int {
var x, y = 0, 1
return func() (z int) {
z, x, y = x, y, x+y
return
}
}
func main() {
f := fibonacci()
for i := 0; i < 10; i++ {
fmt.Println(f())
}
}
/******************************************************************************************************/
/* Advanced Exercise: Complex cube roots #47 */
package main
import (
"fmt"
"math/cmplx"
)
func Cbrt(x complex128) complex128 {
z := complex128(2)
s := complex128(0)
for {
z = z - (cmplx.Pow(z,3) - x)/(3 * (z * z))
if cmplx.Abs(s-z) < 1e-17 {
break
}
s = z
}
return z
}
func main() {
fmt.Println(Cbrt(2))
}
/******************************************************************************************************/
/* Exercise: Errors #57 */
package main
import (
"fmt"
)
type ErrNegativeSqrt float64
func (e ErrNegativeSqrt) Error() string {
return fmt.Sprintf("cannot Sqrt negativ number: %g", float64(e))
}
func Sqrt(f float64) (float64, error) {
if f < 0 {
return 0, ErrNegativeSqrt(f)
}
return 0, nil
}
func main() {
fmt.Println(Sqrt(2))
fmt.Println(Sqrt(-2))
}
/******************************************************************************************************/
/* Exercise: Images #58 */
package main
import (
"image"
"tour/pic"
"image/color"
)
type Image struct{
Width, Height int
colr uint8
}
func (r *Image) Bounds() image.Rectangle {
return image.Rect(0, 0, r.Width, r.Height)
}
func (r *Image) ColorModel() color.Model {
return color.RGBAModel
}
func (r *Image) At(x, y int) color.Color {
return color.RGBA{r.colr+uint8(x), r.colr+uint8(y), 255, 255}
}
func main() {
m := Image{100, 100, 128}
pic.ShowImage(&m)
}
/******************************************************************************************************/
/* Exercise: Rot13 Reader #59: 'You cracked the code!' */
package main
import (
"io"
"os"
"strings"
)
type rot13Reader struct {
r io.Reader
}
func (rot *rot13Reader) Read(p []byte) (n int, err error) {
n,err = rot.r.Read(p)
for i := 0; i < len(p); i++ {
if (p[i] >= 'A' && p[i] < 'N') || (p[i] >='a' && p[i] < 'n') {
p[i] += 13
} else if (p[i] > 'M' && p[i] <= 'Z') || (p[i] > 'm' && p[i] <= 'z'){
p[i] -= 13
}
}
return
}
func main() {
s := strings.NewReader(
"Lbh penpxrq gur pbqr!")
r := rot13Reader{s}
io.Copy(os.Stdout, &r)
}
/******************************************************************************************************/
/* Exercise: Equivalent Binary Trees #67 */
package main
import (
"tour/tree"
"fmt"
)
// Walk walks the tree t sending all values
// from the tree to the channel ch.
func Walk(t *tree.Tree, ch chan int) {
_walk(t, ch)
close(ch)
}
func _walk(t *tree.Tree, ch chan int) {
if t != nil {
_walk(t.Left, ch)
ch <- t.Value
_walk(t.Right, ch)
}
}
// Same determines whether the trees
// t1 and t2 contain the same values.
func Same(t1, t2 *tree.Tree) bool {
ch1 := make(chan int)
ch2 := make(chan int)
go Walk(t1, ch1)
go Walk(t2, ch2)
for i := range ch1 {
if i != <- ch2 {
return false
}
}
return true
}
func main() {
//tree.New(2)
ch := make(chan int)
go Walk(tree.New(1), ch)
for v := range ch {
fmt.Print(v)
}
fmt.Println(Same(tree.New(1), tree.New(1)))
fmt.Println(Same(tree.New(1), tree.New(2)))
}
/******************************************************************************************************/
/* Exercise: Web Crawler #69 */
package main
import (
"fmt"
)
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
var store map[string]bool
func Krawl(url string, fetcher Fetcher, Urls chan []string) {
body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
} else {
fmt.Printf("found: %s %q\n", url, body)
}
Urls <- urls
}
func Crawl(url string, depth int, fetcher Fetcher) {
Urls := make(chan []string)
go Krawl(url, fetcher, Urls)
band := 1
store[url] = true // init for level 0 done
for i := 0; i < depth; i++ {
for band > 0 {
band--
next := <- Urls
for _, url := range next {
if _, done := store[url] ; !done {
store[url] = true
band++
go Krawl(url, fetcher, Urls)
}
}
}
}
return
}
func main() {
store = make(map[string]bool)
Crawl("http://golang.org/", 4, fetcher)
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f *fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := (*f)[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = &fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}
@mpcjanssen
Copy link
Copy Markdown

The ro13 solution assumes the lower reader gives the requested number of bytes. This is not necessarily the case so:

for i, b := range p

should be

for i:=0 ; i < n ; i++

@Tantalum73
Copy link
Copy Markdown

Tantalum73 commented May 26, 2016

I want to share my solution for the image exercise (number 25) with you and everyone who stumbles over this post.

package main

import (
    "golang.org/x/tour/pic"
    "image/color"
    "image"
)


type Image struct{
    Rect image.Rectangle
}

func (image Image) ColorModel() color.Model {
        return color.RGBAModel
}
func (i Image) Bounds() image.Rectangle {
    return i.Rect
}
func (image Image) At(x, y int) color.Color {
    return color.RGBA{uint8(x*y), uint8(y+x), 255, 255}
}

func main() {
    m := Image{image.Rect(0,0,300,300)}
    pic.ShowImage(m)
}

@hugdru
Copy link
Copy Markdown

hugdru commented Sep 29, 2016

/* Exercise: Web Crawler #69 - https://tour.golang.org/concurrency/10 */

package main

import (
    "fmt"
    "sync"
)

type Set struct {
    v   map[string]bool
    mux sync.Mutex
}

func (set *Set) Register(key string) {
    set.mux.Lock()
    defer set.mux.Unlock()
    set.v[key] = true
}

func (set *Set) Exists(key string) bool {
    set.mux.Lock()
    defer set.mux.Unlock()
    return set.v[key]
}

type Fetcher interface {
    Fetch(url string) (body string, urls []string, err error)
}

func Crawl(url string, depth int, fetcher Fetcher, ch chan string) {
    set := Set{v: make(map[string]bool)}
    var wg sync.WaitGroup
    wg.Add(1)
    go crawl(url, depth, fetcher, set, ch, &wg)
    wg.Wait()
    close(ch)
}

func crawl(url string, depth int, fetcher Fetcher, set Set, ch chan string, wg *sync.WaitGroup) {
    defer wg.Done()

    if depth <= 0 || set.Exists(url) {
        return
    }

    body, urls, err := fetcher.Fetch(url)
    if err != nil {
        fmt.Println(err)
        return
    }
    set.Register(url)
    ch <- url

    fmt.Printf("found: %s %q\n", url, body)
    for _, u := range urls {
        if set.Exists(u) {
            continue
        }
        wg.Add(1)
        go crawl(u, depth-1, fetcher, set, ch, wg)
    }
    return
}

func main() {
    ch := make(chan string)
    go Crawl("http://golang.org/", 4, fetcher, ch)
    for value := range ch {
        fmt.Println(value)
    }
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
    body string
    urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
    if res, ok := f[url]; ok {
        return res.body, res.urls, nil
    }
    return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
    "http://golang.org/": &fakeResult{
        "The Go Programming Language",
        []string{
            "http://golang.org/pkg/",
            "http://golang.org/cmd/",
        },
    },
    "http://golang.org/pkg/": &fakeResult{
        "Packages",
        []string{
            "http://golang.org/",
            "http://golang.org/cmd/",
            "http://golang.org/pkg/fmt/",
            "http://golang.org/pkg/os/",
        },
    },
    "http://golang.org/pkg/fmt/": &fakeResult{
        "Package fmt",
        []string{
            "http://golang.org/",
            "http://golang.org/pkg/",
        },
    },
    "http://golang.org/pkg/os/": &fakeResult{
        "Package os",
        []string{
            "http://golang.org/",
            "http://golang.org/pkg/",
        },
    },
}

@mymtw
Copy link
Copy Markdown

mymtw commented Oct 6, 2016

My Solution for stringers does not have any sense, but if condition will be slice of bytes for IPAddr(dynamic size):

package main

import (
    "fmt"
    "strings"
    "strconv"
)


type IPAddr [4]byte

// TODO: Add a "String() string" method to IPAddr.
func (ipaddr IPAddr) String() string {
    var strs []string
    for _, v := range ipaddr {
        strs = append(
            strs, strconv.Itoa(int(v)))
    }
    return strings.Join(strs, ".")
}

func main() {
    hosts := map[string]IPAddr{
        "loopback":  {127, 0, 0, 1},
        "googleDNS": {8, 8, 8, 8},
    }
    for name, ip := range hosts {
        fmt.Printf("%v: %v\n", name, ip)
    }
}

@ppontryagin
Copy link
Copy Markdown

ppontryagin commented Oct 21, 2016

/* Exercise: Loops and Functions #43 */

package go_play

import "math"

const (
    Delta = 0.01
)

// By dividing by x regardless of the scale of x 
// the difference should maintain a constant percentage via its fractional value.
// https://sites.google.com/a/stonybrook.edu/functional-programming-scala/lecture-1-5
func isGoodEnough(guess, x float64) bool {
    return math.Abs((guess * guess) - x) / x < Delta // divide by x 
}

func iter(zN, x float64) float64 {
    zN1 := zN - (zN * zN - x) / (2 * zN)
    return zN1
}

func Sqrt(x float64) float64 {
    x0 := x
    guess := x
    for ; isGoodEnough(guess, x); guess = iter(x0, x) {
        x0 = guess
    }
    return guess
}

@1e0ng
Copy link
Copy Markdown

1e0ng commented Nov 30, 2016

@zyxar , for #67, even though the test cases the question provided passed, that solution will not pass when ch2 has more elements. It even gets panic when ch2 has fewer elements.

@mohigupt
Copy link
Copy Markdown

mohigupt commented Dec 15, 2016

My solution #69 WebCrawler using Mutex
My specific changes are only in provided Crawl function (and of course, the declaration for Cache type)

package main

import (
    "fmt"
    "sync"
)

type Fetcher interface {
    // Fetch returns the body of URL and
    // a slice of URLs found on that page.
    Fetch(url string) (body string, urls []string, err error)
}


type Cache struct {
    uniqueUrl  map[string]int
    mutx       sync.Mutex    
}

var myCache = Cache{uniqueUrl:make(map[string]int)}
var wg sync.WaitGroup     // required to let all launched goroutines finish before main() exits

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {

    defer wg.Done()
    
    if depth <= 0 {
        return
    }
        
    _, visited := myCache.uniqueUrl[url]
    if visited {
        return
    }
    myCache.mutx.Lock()
    myCache.uniqueUrl[url] = 1
    myCache.mutx.Unlock()
    
    body, urls, err := fetcher.Fetch(url)
    if err != nil {
        fmt.Println(err)
        return
    }
    fmt.Printf("found: %s %q\n", url, body)
    for _, u := range urls {
        wg.Add(1)
        go Crawl(u, depth-1, fetcher)
    }
    return
}

func main() {
    wg.Add(1)
    Crawl("http://golang.org/", 4, fetcher)
    wg.Wait()
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
    body string
    urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
    if res, ok := f[url]; ok {
        return res.body, res.urls, nil
    }
    return "", nil, fmt.Errorf("***not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
    "http://golang.org/": &fakeResult{
        "The Go Programming Language",
        []string{
            "http://golang.org/pkg/",
            "http://golang.org/cmd/",
        },
    },
    "http://golang.org/pkg/": &fakeResult{
        "Packages",
        []string{
            "http://golang.org/",
            "http://golang.org/cmd/",
            "http://golang.org/pkg/fmt/",
            "http://golang.org/pkg/os/",
        },
    },
    "http://golang.org/pkg/fmt/": &fakeResult{
        "Package fmt",
        []string{
            "http://golang.org/",
            "http://golang.org/pkg/",
        },
    },
    "http://golang.org/pkg/os/": &fakeResult{
        "Package os",
        []string{
            "http://golang.org/",
            "http://golang.org/pkg/",
        },
    },
}

@tim-smart
Copy link
Copy Markdown

tim-smart commented Feb 10, 2017

My crawler:

package main

import (
	"fmt"
	"sync"
	"time"
)

type Fetcher interface {
	// Fetch returns the body of URL and
	// a slice of URLs found on that page.
	Fetch(url string) (body string, urls []string, err error)
}

type Crawler struct {
	baseUrl string
	depth int
	fetcher *Fetcher
	
	cache map[string] bool
	active int
	
	mux sync.Mutex
}

func NewCrawler(baseUrl string, depth int, fetcher *Fetcher) *Crawler {
	return &Crawler{
		baseUrl: baseUrl,
		depth: depth,
		cache: make(map[string] bool),
		fetcher: fetcher,
	}
}

func (c *Crawler) RegisterWorker() {
	c.mux.Lock()
	c.active++
	c.mux.Unlock()
}

func (c *Crawler) DeregisterWorkerAndMaybeCloseChannel(ch chan<- string) {
	c.mux.Lock()

	c.active--
	if c.active <= 0 {
		close(ch)
	}
	
	c.mux.Unlock()
}

func (c *Crawler) CheckUrl(url string) bool {
	c.mux.Lock()
	defer c.mux.Unlock()

	if _, ok := c.cache[url]; ok {
		return false
	}
	
	c.cache[url] = true
	return true
}

func (c *Crawler) Work(url string, currentDepth int, urlsChannel chan<- string) {
	if currentDepth <= 0 || !c.CheckUrl(url) {
		c.DeregisterWorkerAndMaybeCloseChannel(urlsChannel)
		return
	}
	
	_, urls, err := (*c.fetcher).Fetch(url)
	if err != nil {
		fmt.Println(err)
		c.DeregisterWorkerAndMaybeCloseChannel(urlsChannel)
		return
	}
	urlsChannel <- url
	
	for _, subUrl := range urls {
		c.RegisterWorker()
		go c.Work(subUrl, currentDepth - 1, urlsChannel)
	}
	
	c.DeregisterWorkerAndMaybeCloseChannel(urlsChannel)
}

func (c *Crawler) Crawl(urlsChannel chan<- string) {
	c.RegisterWorker()
	go c.Work(c.baseUrl, c.depth, urlsChannel)
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
	urls := make(chan string)

	crawler := NewCrawler(url, depth, &fetcher)
	crawler.Crawl(urls)
	
	for u := range urls {
		fmt.Println("Got url:", u)
	}
}

func main() {
	Crawl("http://golang.org/", 4, fetcher)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	time.Sleep(200 * time.Millisecond)
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
	"http://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"http://golang.org/pkg/",
			"http://golang.org/cmd/",
		},
	},
	"http://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"http://golang.org/",
			"http://golang.org/cmd/",
			"http://golang.org/pkg/fmt/",
			"http://golang.org/pkg/os/",
		},
	},
	"http://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
	"http://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
}

@Gzure
Copy link
Copy Markdown

Gzure commented Mar 30, 2017

My solution #69 WebCrawler using Mutex

package main

import (
	"fmt"
	"sync"
	"time"
)

type urlCache struct{
	m map[string]bool
	mux sync.Mutex
}

func (uc *urlCache) Add(url string){
	uc.mux.Lock()
	uc.m[url] = true
	uc.mux.Unlock()
}

func (uc *urlCache) Value(url string) bool{
	uc.mux.Lock()
	defer uc.mux.Unlock()
	return uc.m[url]
}

type Fetcher interface {
	// Fetch returns the body of URL and
	// a slice of URLs found on that page.
	Fetch(url string) (body string, urls []string, err error)
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher,uc *urlCache) {
	// TODO: Fetch URLs in parallel.
	// TODO: Don't fetch the same URL twice.
	// This implementation doesn't do either:
	if depth <= 0 {
		return
	}
	if uc.Value(url) {
		//fmt.Println(url," is fetched")
		return 
	}
	body, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Println(err)
		return
	}
	uc.Add(url)
	fmt.Printf("found: %s %q\n", url, body)
	for _, u := range urls {
		//fmt.Println(u)
		go Crawl(u, depth-1, fetcher,uc)
	}
	return
}

func main() {
	uc := urlCache{m:make(map[string]bool)}
	go Crawl("http://golang.org/", 4, fetcher,&uc)
	time.Sleep(time.Second)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
	"http://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"http://golang.org/pkg/",
			"http://golang.org/cmd/",
		},
	},
	"http://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"http://golang.org/",
			"http://golang.org/cmd/",
			"http://golang.org/pkg/fmt/",
			"http://golang.org/pkg/os/",
		},
	},
	"http://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
	"http://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
}

@hirata-ita
Copy link
Copy Markdown

I apologize that I didn´t check all the above comments.
There are too many.

There are two problems for the Web Crawler:

  • mutual exclusion of the map
  • termination of the go routines (Crawl): It is necessary to check if the all the go routines terminated.

The solution for the mutual exclusion is straightforward. I used sync.Mutex
For the termination, I used the Dijkstra & Scholten idea. It is simple and it is detailed in "Termination Detection for Diffusing Computations", Information Processing Letters, Vol. 11, No. 1, Aug. 1980.

package main

import (
"fmt"
"sync"
)

// SafeMap is safe map of URL to use concurrently.
type SafeMap struct {
v map[string]int
mux sync.Mutex
}

func (c *SafeMap) Ins(key string) {
c.mux.Lock()
c.v[key]++
c.mux.Unlock()
}

func (c *SafeMap) HasValue(key string) bool {
c.mux.Lock()
defer c.mux.Unlock()
_, ok:= c.v[key]
return ok
}

type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.

func Crawl(url string, depth int, fetcher Fetcher, ch chan int) {
// TODO: Fetch URLs in parallel.
// TODO: Don't fetch the same URL twice.
// This implementation doesn't do either:
defer func () {ch <-1} ()
if depth <= 0 {
return
}
ok := c.HasValue(url)
if ok {
//fmt.Println("URL alread in the map",url)
return
} else {
c.Ins(url)
}

body, urls, err := fetcher.Fetch(url)
if err != nil {
	fmt.Println(err)
	return
}
fmt.Printf("found: %s %q\n", url, body)
nUrls := len(urls)
chs := make(chan int)
for _, u := range urls {
	go Crawl(u, depth-1, fetcher, chs)
}
for i:=0; i<nUrls; i++ {<-chs}
return

}

var c=SafeMap{v:make(map[string]int)}

func main() {
ch := make(chan int)
go Crawl("http://golang.org/", 4, fetcher, ch)
fmt.Println("Processed ",<-ch)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
body string
urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}

@cixuuz
Copy link
Copy Markdown

cixuuz commented Jan 1, 2018

My solution using sync.Mutex

package main

import (
	"fmt"
	"sync"
)

type Fetcher interface {
	// Fetch returns the body of URL and
	// a slice of URLs found on that page.
	Fetch(url string) (body string, urls []string, err error)
}

func crawl(url string, fetcher Fetcher, Urls chan []string) {
	body, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Println(err)
	} else {
		fmt.Printf("found: %s %q\n", url, body)
	}
	
	Urls <- urls
}

// SafeUrls is safe to use concurrently.
type SafeUrls struct {
	v   map[string]bool
	mux sync.Mutex
}

var Urls chan []string = make(chan []string)

// Set url visited for the given key.
func (c *SafeUrls) Add(key string) {
	c.mux.Lock()
	// Lock so only one goroutine at a time can access the map c.v.
	c.v[key] = true
	c.mux.Unlock()
}

// Value returns the current value of the urls for the given key.
func (c *SafeUrls) Value(key string) bool {
	c.mux.Lock()
	// Lock so only one goroutine at a time can access the map c.v.
	defer c.mux.Unlock()
	return c.v[key]
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
	if depth <= 0 {
		return
	}
	
	go crawl(url, fetcher, Urls)
	
	urls := <- Urls
	for _, u := range urls {
		Crawl(u, depth-1, fetcher)
	}
	return
}

func main() {
	Crawl("http://golang.org/", 4, fetcher)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
	"http://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"http://golang.org/pkg/",
			"http://golang.org/cmd/",
		},
	},
	"http://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"http://golang.org/",
			"http://golang.org/cmd/",
			"http://golang.org/pkg/fmt/",
			"http://golang.org/pkg/os/",
		},
	},
	"http://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
	"http://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
}

@BnSmth
Copy link
Copy Markdown

BnSmth commented Jan 11, 2018

Here is my solution to the crawler. It uses a mutex within the url cache and a wait group in order to terminate once all go routines have finished. I'm not sure whether this is idiomatic or not (especially with the UrlCache, and Crawler structs/methods), if anyone has any feedback I would appreciate it.

package main

import (
	"fmt"
	"sync"
)

type Fetcher interface {
	Fetch(url string) (body string, urls []string, err error)
}

type UrlCache struct {
	urls map[string]bool
	mux  sync.Mutex
}

func (cache *UrlCache) Add(url string) {
	cache.mux.Lock()
	cache.urls[url] = true
	cache.mux.Unlock()
}

func (cache *UrlCache) Contains(url string) bool {
	cache.mux.Lock()
	defer cache.mux.Unlock()
	return cache.urls[url]
}

type Crawler struct {
	urlCache  UrlCache
	fetcher   Fetcher
	waitGroup sync.WaitGroup
}

func (crawler *Crawler) Crawl(url string, depth int) {
	defer crawler.waitGroup.Done()

	if depth <= 0 {
		return
	}

	if crawler.urlCache.Contains(url) {
		return
	}

	_, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Errorf("crawl: error when fetching url %s %v\n", url, err)
		return
	}

	fmt.Printf("visited %s - %d urls found\n", url, len(urls))
	crawler.urlCache.Add(url)

	for _, u := range urls {
		crawler.waitGroup.Add(1)
		go crawler.Crawl(u, depth-1)
	}
}

func main() {
	waitGroup := sync.WaitGroup{}
	waitGroup.Add(1)

	crawler := Crawler{
		urlCache:  UrlCache{urls: make(map[string]bool)},
		fetcher:   fetcher,
		waitGroup: waitGroup,
	}

	go crawler.Crawl("http://golang.org/", 4)
	crawler.waitGroup.Wait()
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
	"http://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"http://golang.org/pkg/",
			"http://golang.org/cmd/",
		},
	},
	"http://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"http://golang.org/",
			"http://golang.org/cmd/",
			"http://golang.org/pkg/fmt/",
			"http://golang.org/pkg/os/",
		},
	},
	"http://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
	"http://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
}

@findshrey
Copy link
Copy Markdown

At line 61 the code :
ret[i] = make([]uint8, dx)
Can you tell me what is the reasoning behind using dx as length of slice? It doesnt say the length is dx but slice is of dx elements. Kinda confused :/

@shipa988
Copy link
Copy Markdown

if we know that trees have 10 nodes, then this variant has a chance of life)):
package main

import (
"golang.org/x/tour/tree"
"fmt"
"sort"
)

// Walk walks the tree t sending all values
// from the tree to the channel ch.
//var cnt1 chan int

func Walk(t *tree.Tree, ch chan int) {
if t != nil {
ch <- t.Value
count:=<-ch
if count == 10 {
close(ch)
return}
Walk(t.Right, ch)
Walk(t.Left, ch)
}
}

// Same determines whether the trees
// t1 and t2 contain the same values.

func Same(t1, t2 *tree.Tree) bool {
ch1 := make(chan int)
go Walk(t1, ch1)
ch2 := make(chan int)
go Walk(t2, ch2)
var n1 [10]int
var n2 [10]int
i1,i2 := 0, 0
for i:=range ch1 {
n1[i1] = i
i1++
ch1<-i1
}
for i:=range ch2 {
n2[i2] = i
i2++
ch2<-i2
}
sort.Ints(n1[:])
sort.Ints(n2[:])
if n1 == n2 {
return true
} else {return false}

}

func main() {
fmt.Println(Same(tree.New(1), tree.New(1)))
fmt.Println(Same(tree.New(1), tree.New(2)))

}

@shipa988
Copy link
Copy Markdown

for Crawler, I only used knowledge from previous lessons, such as a SafeCouter (without sync.WaitGroup)
package main

import (
"fmt"
"sync"
"time"
)

type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}

type SafeHashset struct {
mux sync.Mutex
h map[string]bool
}
func (counter *SafeCounter) Inc(inc int) {
counter.mux.Lock()
defer counter.mux.Unlock()
counter.c = counter.c + inc
}

type SafeCounter struct {
mux sync.Mutex
c int
}
func (counter *SafeCounter) Value() int {
counter.mux.Lock()
defer counter.mux.Unlock()
return counter.c
}

func (hashset *SafeHashset) Add(url string) bool {
hashset.mux.Lock()
defer hashset.mux.Unlock()
if _, ok := hashset.h[url]; !ok {
hashset.h[url] = true
return true
} else {
return false
}
}

//var ch chan struct{} = make(chan struct {})
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
var hashset = SafeHashset{h: make(map[string]bool)}
var count =SafeCounter{}

func Crawl(url string, depth int, fetcher Fetcher) {
defer count.Inc(-1)
// TODO: Fetch URLs in parallel.
// TODO: Don't fetch the same URL twice.
// This implementation doesn't do either:
if depth <= 0 {
return
}

if hashset.Add(url) {
	body, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Printf("found: %s %q\n", url, body)
	for _, u := range urls {
		count.Inc(1)
		go Crawl(u, depth-1, fetcher)
		//defer <-ch
	}
}
return

}

func main() {
count.Inc(1)
Crawl("http://golang.org/", 4, fetcher)
for {
if count.Value() == 0 {
break
}
time.Sleep(time.Second)
}
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
body string
urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}

@eugeneYWang
Copy link
Copy Markdown

my binary tree #67 solution

package main

import (
	"golang.org/x/tour/tree"
	"fmt"
)

// Walk walks the tree t sending all values
// from the tree to the channel ch.
func Walk(t *tree.Tree, ch chan int, layer int) {
	if t == nil { 
		return
	}
	
	Walk(t.Left, ch, layer + 1)
	ch <- t.Value
	Walk(t.Right, ch, layer + 1)
	
	if layer == 0 {
		close(ch)
	}
}

// Same determines whether the trees
// t1 and t2 contain the same values.
func Same(t1, t2 *tree.Tree) bool {
	
	ch1 := make(chan int, 10)
	ch2 := make(chan int, 10)
	
	go Walk(t1, ch1, 0)
	go Walk(t2, ch2, 0)
	
	for {
		n1, ok1 := <- ch1
		n2, ok2 := <- ch2
		
		if n1 != n2 || ok1 != ok2 {
			return false
		} else if ok1 == ok2 && ok1 == false {
			return true
		}
	}
}

func main() {
	fmt.Println(Same(tree.New(1), tree.New(2)));
	fmt.Println(Same(tree.New(1), tree.New(1)));
}

@Aternus
Copy link
Copy Markdown

Aternus commented Aug 14, 2022

IP Address Solution:

package tour

import "fmt"

type IPAddr [4]byte

func (ipAddr IPAddr) String() string {
	return fmt.Sprintf("%d.%d.%d.%d", ipAddr[0], ipAddr[1], ipAddr[2], ipAddr[3])
}

func IPAddress() {
	hosts := map[string]IPAddr{
		"loopback":  {127, 0, 0, 1},
		"googleDNS": {8, 8, 8, 8},
	}
	for name, ip := range hosts {
		fmt.Printf("%v: %v\n", name, ip)
	}
}

@Aternus
Copy link
Copy Markdown

Aternus commented Aug 14, 2022

Reader Solution:

package tour

import (
	"fmt"
)

type MyReader struct{}

func (r MyReader) Read(b []byte) (int, error) {
	bufferLength := len(b)
	if bufferLength <= 0 {
		return 0, fmt.Errorf("buffer length must be greater than 0")
	}
	buffer := make([]byte, bufferLength)
	for k := range buffer {
		buffer[k] = uint8('A')
	}
	n := copy(b, buffer)
	return n, nil
}

@Aternus
Copy link
Copy Markdown

Aternus commented Aug 16, 2022

rot13Reader Solution:
❓ this solution saves an extra loop by returning io.EOF as early as possible.

package tour

import (
	"io"
	"os"
	"strings"
)

type rot13Reader struct {
	r io.Reader
}

func (r rot13Reader) Read(b []byte) (int, error) {
	n, err := r.r.Read(b)
	// short circuit: EOF
	if err == io.EOF {
		return 0, io.EOF
	}
	for k, v := range b {
		b[k] = rot13(v)
	}
	return n, err
}

func Rot13ReaderMain() {
	s := strings.NewReader("Lbh penpxrq gur pbqr!")
	r := rot13Reader{s}
	io.Copy(os.Stdout, &r)
}

func rot13(b byte) byte {
	if b >= 'A' && b <= 'Z' {
		b = 'A' + (b-'A'+13)%26
	} else if b >= 'a' && b <= 'z' {
		b = 'a' + (b-'a'+13)%26
	}
	return b
}

@Aternus
Copy link
Copy Markdown

Aternus commented Aug 22, 2022

Images exercise solution:

package tour

import (
	"image"
	"image/color"
)

type Image struct {
	Width  int
	Height int
}

func (i Image) ColorModel() color.Model {
	return color.RGBAModel
}

func (i Image) Bounds() image.Rectangle {
	return image.Rect(0, 0, i.Width, i.Height)
}

func (i Image) At(x, y int) color.Color {
	r := uint8(x + y)
	g := uint8(x ^ y)
	b := uint8(x * y)
	//fmt.Println("R:", r, "G:", g, "B:", b)
	return color.RGBA{r, g, b, 255}
}

@AlbertoCoder
Copy link
Copy Markdown

AlbertoCoder commented Sep 12, 2022

This is my solution to the Exercise "Stringers":

package main

import "fmt"

type IPAddr [4]byte

// TODO: Add a "String() string" method to IPAddr.

// What I did was I returned the call to Sprintf (in the fmt package) and formatted each of the four byte values in the array
// of bytes, separated with dots:

func (ip IPAddr) String() string{
		
        // The 'Sprintf' function formats according to a format specifier and returns the resulting string.
        // In other words: you first return the resulting string of the function 'Sprintf' and then
       // this method (String()) returns the previously returned string by 'Sprintf':

	return fmt.Sprintf("%v.%v.%v.%v",ip[0],ip[1],ip[2],ip[3]) 
	
}

func main() {
	hosts := map[string]IPAddr{
		"loopback":  {127, 0, 0, 1},
		"googleDNS": {8, 8, 8, 8},
	}
	for nombre, ip := range hosts {
		fmt.Printf("%v: %v\n", nombre, ip)
	}
}

I hope it helps!

@AlbertoCoder
Copy link
Copy Markdown

Excuse me:

In the rot13Reader exercise, how is Read(p []byte) called? I'm a bit confused, since I don't see it being invoked in main()...

package main

import (
	"io"
	"os"
	"strings"
)

type rot13Reader struct {
	r io.Reader
}

func (rot *rot13Reader) Read(p []byte) (n int, err error) {
	n,err = rot.r.Read(p)
	for i := 0; i < len(p); i++ {
		if (p[i] >= 'A' && p[i] < 'N') || (p[i] >='a' && p[i] < 'n') {
			p[i] += 13
		} else if (p[i] > 'M' && p[i] <= 'Z') || (p[i] > 'm' && p[i] <= 'z'){
			p[i] -= 13
		}
	}
	return
}

func main() {
	s := strings.NewReader(
		"Lbh penpxrq gur pbqr!")
	r := rot13Reader{s}
	io.Copy(os.Stdout, &r)
}

@americanstone
Copy link
Copy Markdown

MIT Phd crawl solution

package main

import (
	"fmt"
	"sync"
)

type Cache struct {
	m sync.Mutex
	v map[string]uint
}

func (s *Cache) Exist(key string) bool {
	s.m.Lock()
	defer s.m.Unlock()
	if _, ok := s.v[key]; ok {
		s.v[key]++
		return true
	} else {
		s.v[key] = 1
		return false
	}
}

func (s *Cache) String() string {
	s.m.Lock()
	defer s.m.Unlock()
	var sb string
	for k, v := range s.v {
		sb += fmt.Sprintf("key: %s => value: %q", k, v)
	}
	return fmt.Sprintln(sb)
}

type Fetcher interface {
	Fetch(url string) (body string, urls []string, err error)
}

//
// Serial crawler
//
func SerialCrawl(url string, depth int, fetcher Fetcher, cache *Cache) {
	if cache.Exist(url) {
		return
	}
	//fmt.Printf("cached values %q\n", cache)

	fmt.Println("depth ", depth)
	if depth <= 0 {
		return
	}
	body, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Printf("Found: %s %q\n", url, body)
	//fmt.Printf("fetch children %q\n", urls)
	for _, u := range urls {
		//fmt.Printf("recursive crawl url %s\n", u)
		SerialCrawl(u, depth-1, fetcher, cache)
	}
}

//
// Concurrent crawler with shared state and Mutex
//

func ConcurrentMutex(url string, depth int, fetcher Fetcher, cache *Cache) {
	if cache.Exist(url) {
		return
	}
	//fmt.Printf("cached values %q\n", cache)

	if depth <= 0 {
		return
	}

	body, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Printf("Found: %s %q\n", url, body)
	//fmt.Printf("fetch children %q\n", urls)
	var done sync.WaitGroup
	for _, u := range urls {
		done.Add(1)
		//fmt.Printf("recursive crawl url %s\n", u)
		u2 := u
		go func() {
			defer done.Done()
			ConcurrentMutex(u2, depth-1, fetcher, cache)
		}()
		//go func(u string) {
		//	defer done.Done()
		//	ConcurrentMutex(u, depth -1,fetcher, cache)
		//}(u)
	}
	done.Wait()
}

//
// Concurrent crawler with channels
//

func worker(url string, depth uint, ch chan []string, fetcher Fetcher, cache *Cache) {
	//fmt.Printf("working on url %s depth %d\n", url, depth)
	body, urls, err := fetcher.Fetch(url)

	if err != nil {
		ch <- []string{}
	} else {
		fmt.Printf("Found: %s %q\n", url, body)
		ch <- urls
	}
}

func master(url string, ch chan []string, depth uint, fetcher Fetcher, cache *Cache) {
	n := 1
	copyDepth := depth
	for urls := range ch {
		// fmt.Printf("dep :%s\n", fmt.Sprintf("%q", copyDepth))
		if copyDepth == 0 {
			break
		}
		for _, url := range urls {
			if !cache.Exist(url) {
				n += 1
				go worker(url, depth, ch, fetcher, cache)
			}
		}
		depth--
		n -= 1
		if n == 0 {
			break
		}
	}
}

func ConcurrentChan(url string, depth uint, fetcher Fetcher, cache *Cache) {
	ch := make(chan []string)
	go func() {
		ch <- []string{url}
	}()
	master(url, ch, depth, fetcher, cache)
}

func main() {
	cache := Cache{v: make(map[string]uint)}
	SerialCrawl("https://golang.org/", 4, fetcher, &cache)

	ConcurrentMutex("https://golang.org/", 4, fetcher, &cache)

	ConcurrentChan("https://golang.org/", 4, fetcher, &cache)

	fmt.Printf("\nCached urls %q\n", cache.v)
}

type fakeFetcher map[string]*fakeResult // to avoid copy value

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

var fetcher = fakeFetcher{
	"https://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"https://golang.org/pkg/",
			"https://golang.org/cmd/",
		},
	},
	"https://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"https://golang.org/",
			"https://golang.org/cmd/",
			"https://golang.org/pkg/fmt/",
			"https://golang.org/pkg/os/",
		},
	},
	"https://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
	"https://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
}

@ViktorVoloshko
Copy link
Copy Markdown

ViktorVoloshko commented Oct 13, 2023

My relatively simple solution for crawl w/o global variables and channels and with mutex and WaitGroup:

package main

import (
	"fmt"
	"sync"
)

type UrlCache struct {
	urls map[string]bool
	mu   sync.Mutex
}

func (uc *UrlCache) Add(url string) {
	uc.mu.Lock()
	uc.urls[url] = true
	uc.mu.Unlock()
}

func (uc *UrlCache) Get(url string) bool {
	uc.mu.Lock()
	defer uc.mu.Unlock()
	return uc.urls[url]
}

type Fetcher interface {
	// Fetch returns the body of URL and
	// a slice of URLs found on that page.
	Fetch(url string) (body string, urls []string, err error)
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
	var wg sync.WaitGroup
	cache := UrlCache{urls: make(map[string]bool)}
	wg.Add(1)
	go crawl(url, depth, fetcher, &cache, &wg)
	wg.Wait()
}

func crawl(url string, depth int, fetcher Fetcher, cache *UrlCache, wg *sync.WaitGroup) {
	defer wg.Done()
	if depth <= 0 || cache.Get(url) {
		return
	}
	cache.Add(url)
	body, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Printf("found: %s %q\n", url, body)
	for _, u := range urls {
		wg.Add(1)
		go crawl(u, depth-1, fetcher, cache, wg)
	}
}

func main() {
	Crawl("https://golang.org/", 4, fetcher)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
	"https://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"https://golang.org/pkg/",
			"https://golang.org/cmd/",
		},
	},
	"https://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"https://golang.org/",
			"https://golang.org/cmd/",
			"https://golang.org/pkg/fmt/",
			"https://golang.org/pkg/os/",
		},
	},
	"https://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
	"https://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
}

@apo1967
Copy link
Copy Markdown

apo1967 commented Nov 30, 2023

@Aternus

Reader Solution:

package tour

import (
	"fmt"
)

type MyReader struct{}

func (r MyReader) Read(b []byte) (int, error) {
	bufferLength := len(b)
	if bufferLength <= 0 {
		return 0, fmt.Errorf("buffer length must be greater than 0")
	}
	buffer := make([]byte, bufferLength)
	for k := range buffer {
		buffer[k] = uint8('A')
	}
	n := copy(b, buffer)
	return n, nil
}

Is there a reason why you just don't set the 'A' directly in b? Why creating an internal slice and then copying to b? Just curious...

@dkowsley
Copy link
Copy Markdown

dkowsley commented Sep 15, 2024

In the rot13Reader exercise, how is Read(p []byte) called? I'm a bit confused, since I don't see it being invoked in main()...

In the quoted code, we see a call to io.Copy

func main() {
	s := strings.NewReader(
		"Lbh penpxrq gur pbqr!")
	r := rot13Reader{s}
	io.Copy(os.Stdout, &r)
}

The relevant call chain is io.Copy(dst, src) -> io.copyBuffer(dst, src, nil) -> src.Read(buf) (src == r, in this example).

The function definition for io.Copy and the io.Reader interface it uses are:

type Reader interface {
	Read(p []byte) (n int, err error)
}

func Copy(dst Writer, src Reader) (written int64, err error) {
	return copyBuffer(dst, src, nil)
}

io.copyBuffer has the following definition (I have replaced most of the code with comments to make it easier to read). Take note of the fourth line:

func copyBuffer(dst Writer, src Reader, buf []byte) (written int64, err error) {
        // if buf is nil we create a temp 32KB buffer for it
	for {
		nr, er := src.Read(buf) // our call to the Read function we created in our example
		// perform writes to dst with error handling and returning if we have finished reading
	}
	return written, err
}

@linhns
Copy link
Copy Markdown

linhns commented Jan 15, 2025

My solution for Web Crawler, heavily inspired by the official solution:

package main

import (
	"fmt"
	"sync"
)

type Fetcher interface {
	// Fetch returns the body of URL and
	// a slice of URLs found on that page.
	Fetch(url string) (body string, urls []string, err error)
}

type URLCache struct {
	mu      sync.Mutex
	fetched map[string]struct{}
}

func (c *URLCache) Add(url string) {
	c.mu.Lock()
	defer c.mu.Unlock()
	c.fetched[url] = struct{}{}
}

func (c *URLCache) Has(url string) bool {
	c.mu.Lock()
	defer c.mu.Unlock()
	_, ok := c.fetched[url]
	return ok
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func crawl(url string, depth int, fetcher Fetcher, urlCache *URLCache) {
	// TODO: Fetch URLs in parallel.
	// TODO: Don't fetch the same URL twice.
	// This implementation doesn't do either:
	if depth <= 0 {
		fmt.Printf("<- Done with %v, maximum depth reached\n", url)
		return
	}

	if urlCache.Has(url) {
		fmt.Printf("<- Done with %v, already fetched\n", url)
		return
	}

	body, urls, err := fetcher.Fetch(url)

	urlCache.Add(url)

	if err != nil {
		fmt.Printf("<- Error on %v: %v\n", url, err)
		return
	}

	fmt.Printf("Found: %s %q\n", url, body)

	var wg sync.WaitGroup
	wg.Add(len(urls))
	for i, u := range urls {
		fmt.Printf("-> Crawling child %v/%v of %v : %v.\n", i, len(urls), url, u)
		go func(url string) {
			defer wg.Done()
			crawl(url, depth-1, fetcher, urlCache)
		}(u)
	}
	wg.Wait()
	fmt.Printf("<- Done with %v\n", url)
}

func Crawl(url string, depth int, fetcher Fetcher) {
	urlCache := URLCache{fetched: make(map[string]struct{})}
	crawl(url, depth, fetcher, &urlCache)
}

func main() {
	Crawl("https://golang.org/", 4, fetcher)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
	"https://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"https://golang.org/pkg/",
			"https://golang.org/cmd/",
		},
	},
	"https://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"https://golang.org/",
			"https://golang.org/cmd/",
			"https://golang.org/pkg/fmt/",
			"https://golang.org/pkg/os/",
		},
	},
	"https://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
	"https://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
}

To avoid creating a global variable, I used an unexported function that takes in a concurrent-safe cache and initialize it in Crawl.

@fenix-hub
Copy link
Copy Markdown

fenix-hub commented Apr 17, 2026

@apo1967

@Aternus

Reader Solution:

package tour

import (
	"fmt"
)

type MyReader struct{}

func (r MyReader) Read(b []byte) (int, error) {
	bufferLength := len(b)
	if bufferLength <= 0 {
		return 0, fmt.Errorf("buffer length must be greater than 0")
	}
	buffer := make([]byte, bufferLength)
	for k := range buffer {
		buffer[k] = uint8('A')
	}
	n := copy(b, buffer)
	return n, nil
}

Is there a reason why you just don't set the 'A' directly in b? Why creating an internal slice and then copying to b? Just curious...

Just did the same as you...

func (r MyReader) Read(b []byte) (int, error) {
	l := len(b)
	for i := range l {
		b[i] = byte('A')
	}
	return l, nil
}

Can't find a specific reason, other than explicit exercise constraints which are not included in this case, since b will already be a copy of whatever variable the Validate function will use as an argument.

@fenix-hub
Copy link
Copy Markdown

My solution for rot13:

package main

import (
	"io"
	"os"
	"strings"
)

type rot13Reader struct {
	r io.Reader
}

func (reader *rot13Reader) Read(b []byte) (int, error) {
	n, err := reader.r.Read(b)
	for i := range n {
		if (b[i] >= 65 && b[i] <= 77 || b[i] >= 97 && b[i] <= 109) {
			b[i] += 13
		} else if (b[i] >= 78 && b[i] <= 90 || b[i] >= 110 && b[i] <= 122) {
			b[i] -= 13
		}
	}
	return n, err
}

func main() {
	s := strings.NewReader("Lbh penpxrq gur pbqr!")
	r := rot13Reader{s}
	io.Copy(os.Stdout, &r)
}

I've not seen this solution already, which I propose as an efficient/smart alternative: instead of relying on explicit characters, ASCII characters can be just handled by their int value (through the standard Ascii Table here )

@fenix-hub
Copy link
Copy Markdown

My solution for Crawler.
Honestly this seemed the hardest so far: why doesn't the TourGo website instructs the user about the WaitGroup before this exercise? Without it this exercise was almost impossible without changing most of the function definitions.

package main

import (
	"fmt"
	"sync"
)


type Fetcher interface {
	Fetch(url string) (string, []string, error)
}

type FetchResult struct { 
	body string
	urls []string
	err error 
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
	defer wg.Done() // gets executed at the end of Crawl
	
	if depth <= 0 {
		return
	}
	if cache.Check(url) {
		fmt.Printf("Hit! %s\n", url)
		return
	}
	
	body, urls, err := fetcher.Fetch(url)
	cache.Set(url)
	
	if err != nil {
		fmt.Println(err)
		return
	}
	
	fmt.Printf("found: %s %q\n", url, body)
	for _, u := range urls {
		wg.Add(1)
		go Crawl(u, depth-1, fetcher)
	}
	return
}


type Cache struct {
	v map[string]bool // Url -> Body
	m sync.Mutex
}

func (c *Cache) Check(key string) bool {
	c.m.Lock()
	defer c.m.Unlock()
	return c.v[key]  
}

func (c *Cache) Set(key string) {
	c.m.Lock()
	c.v[key] = true
	c.m.Unlock()
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
	"https://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"https://golang.org/pkg/",
			"https://golang.org/cmd/",
		},
	},
	"https://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"https://golang.org/",
			"https://golang.org/cmd/",
			"https://golang.org/pkg/fmt/",
			"https://golang.org/pkg/os/",
		},
	},
	"https://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
	"https://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
}

var cache Cache = Cache{ v: make(map[string]bool) }
var wg sync.WaitGroup     // required to let all launched goroutines finish before main() exits

func main() {
	wg.Add(1)
	go Crawl("https://golang.org/", 4, fetcher)
	wg.Wait()
}

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment