Skip to content

Instantly share code, notes, and snippets.

@zyxar
Last active October 31, 2024 22:30
Show Gist options
  • Save zyxar/2317744 to your computer and use it in GitHub Desktop.
Save zyxar/2317744 to your computer and use it in GitHub Desktop.
tour.golang exercise solutions
/* Exercise: Loops and Functions #43 */
package main
import (
"fmt"
"math"
)
func Sqrt(x float64) float64 {
z := float64(2.)
s := float64(0)
for {
z = z - (z*z - x)/(2*z)
if math.Abs(s-z) < 1e-15 {
break
}
s = z
}
return s
}
func main() {
fmt.Println(Sqrt(2))
fmt.Println(math.Sqrt(2))
}
/******************************************************************************************************/
/* Exercise: Maps #44 */
package main
import (
"tour/wc"
"strings"
)
func WordCount(s string) map[string]int {
ss := strings.Fields(s)
num := len(ss)
ret := make(map[string]int)
for i := 0; i < num; i++ {
(ret[ss[i]])++
}
return ret
}
func main() {
wc.Test(WordCount)
}
/******************************************************************************************************/
/* Exercise: Slices #45 */
package main
import "tour/pic"
func Pic(dx, dy int) [][]uint8 {
ret := make([][]uint8, dy)
for i := 0; i < dy; i++ {
ret[i] = make([]uint8, dx)
for j := 0; j < dx; j++ {
ret[i][j] = uint8(i^j+(i+j)/2)
}
}
return ret
}
func main() {
pic.Show(Pic)
}
/******************************************************************************************************/
/* Exercise: Fibonacci closure #46 */
package main
import "fmt"
// fibonacci is a function that returns
// a function that returns an int.
func fibonacci() func() int {
var x, y = 0, 1
return func() (z int) {
z, x, y = x, y, x+y
return
}
}
func main() {
f := fibonacci()
for i := 0; i < 10; i++ {
fmt.Println(f())
}
}
/******************************************************************************************************/
/* Advanced Exercise: Complex cube roots #47 */
package main
import (
"fmt"
"math/cmplx"
)
func Cbrt(x complex128) complex128 {
z := complex128(2)
s := complex128(0)
for {
z = z - (cmplx.Pow(z,3) - x)/(3 * (z * z))
if cmplx.Abs(s-z) < 1e-17 {
break
}
s = z
}
return z
}
func main() {
fmt.Println(Cbrt(2))
}
/******************************************************************************************************/
/* Exercise: Errors #57 */
package main
import (
"fmt"
)
type ErrNegativeSqrt float64
func (e ErrNegativeSqrt) Error() string {
return fmt.Sprintf("cannot Sqrt negativ number: %g", float64(e))
}
func Sqrt(f float64) (float64, error) {
if f < 0 {
return 0, ErrNegativeSqrt(f)
}
return 0, nil
}
func main() {
fmt.Println(Sqrt(2))
fmt.Println(Sqrt(-2))
}
/******************************************************************************************************/
/* Exercise: Images #58 */
package main
import (
"image"
"tour/pic"
"image/color"
)
type Image struct{
Width, Height int
colr uint8
}
func (r *Image) Bounds() image.Rectangle {
return image.Rect(0, 0, r.Width, r.Height)
}
func (r *Image) ColorModel() color.Model {
return color.RGBAModel
}
func (r *Image) At(x, y int) color.Color {
return color.RGBA{r.colr+uint8(x), r.colr+uint8(y), 255, 255}
}
func main() {
m := Image{100, 100, 128}
pic.ShowImage(&m)
}
/******************************************************************************************************/
/* Exercise: Rot13 Reader #59: 'You cracked the code!' */
package main
import (
"io"
"os"
"strings"
)
type rot13Reader struct {
r io.Reader
}
func (rot *rot13Reader) Read(p []byte) (n int, err error) {
n,err = rot.r.Read(p)
for i := 0; i < len(p); i++ {
if (p[i] >= 'A' && p[i] < 'N') || (p[i] >='a' && p[i] < 'n') {
p[i] += 13
} else if (p[i] > 'M' && p[i] <= 'Z') || (p[i] > 'm' && p[i] <= 'z'){
p[i] -= 13
}
}
return
}
func main() {
s := strings.NewReader(
"Lbh penpxrq gur pbqr!")
r := rot13Reader{s}
io.Copy(os.Stdout, &r)
}
/******************************************************************************************************/
/* Exercise: Equivalent Binary Trees #67 */
package main
import (
"tour/tree"
"fmt"
)
// Walk walks the tree t sending all values
// from the tree to the channel ch.
func Walk(t *tree.Tree, ch chan int) {
_walk(t, ch)
close(ch)
}
func _walk(t *tree.Tree, ch chan int) {
if t != nil {
_walk(t.Left, ch)
ch <- t.Value
_walk(t.Right, ch)
}
}
// Same determines whether the trees
// t1 and t2 contain the same values.
func Same(t1, t2 *tree.Tree) bool {
ch1 := make(chan int)
ch2 := make(chan int)
go Walk(t1, ch1)
go Walk(t2, ch2)
for i := range ch1 {
if i != <- ch2 {
return false
}
}
return true
}
func main() {
//tree.New(2)
ch := make(chan int)
go Walk(tree.New(1), ch)
for v := range ch {
fmt.Print(v)
}
fmt.Println(Same(tree.New(1), tree.New(1)))
fmt.Println(Same(tree.New(1), tree.New(2)))
}
/******************************************************************************************************/
/* Exercise: Web Crawler #69 */
package main
import (
"fmt"
)
type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}
var store map[string]bool
func Krawl(url string, fetcher Fetcher, Urls chan []string) {
body, urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
} else {
fmt.Printf("found: %s %q\n", url, body)
}
Urls <- urls
}
func Crawl(url string, depth int, fetcher Fetcher) {
Urls := make(chan []string)
go Krawl(url, fetcher, Urls)
band := 1
store[url] = true // init for level 0 done
for i := 0; i < depth; i++ {
for band > 0 {
band--
next := <- Urls
for _, url := range next {
if _, done := store[url] ; !done {
store[url] = true
band++
go Krawl(url, fetcher, Urls)
}
}
}
}
return
}
func main() {
store = make(map[string]bool)
Crawl("http://golang.org/", 4, fetcher)
}
// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult
type fakeResult struct {
body string
urls []string
}
func (f *fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := (*f)[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}
// fetcher is a populated fakeFetcher.
var fetcher = &fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}
@BnSmth
Copy link

BnSmth commented Jan 11, 2018

Here is my solution to the crawler. It uses a mutex within the url cache and a wait group in order to terminate once all go routines have finished. I'm not sure whether this is idiomatic or not (especially with the UrlCache, and Crawler structs/methods), if anyone has any feedback I would appreciate it.

package main

import (
	"fmt"
	"sync"
)

type Fetcher interface {
	Fetch(url string) (body string, urls []string, err error)
}

type UrlCache struct {
	urls map[string]bool
	mux  sync.Mutex
}

func (cache *UrlCache) Add(url string) {
	cache.mux.Lock()
	cache.urls[url] = true
	cache.mux.Unlock()
}

func (cache *UrlCache) Contains(url string) bool {
	cache.mux.Lock()
	defer cache.mux.Unlock()
	return cache.urls[url]
}

type Crawler struct {
	urlCache  UrlCache
	fetcher   Fetcher
	waitGroup sync.WaitGroup
}

func (crawler *Crawler) Crawl(url string, depth int) {
	defer crawler.waitGroup.Done()

	if depth <= 0 {
		return
	}

	if crawler.urlCache.Contains(url) {
		return
	}

	_, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Errorf("crawl: error when fetching url %s %v\n", url, err)
		return
	}

	fmt.Printf("visited %s - %d urls found\n", url, len(urls))
	crawler.urlCache.Add(url)

	for _, u := range urls {
		crawler.waitGroup.Add(1)
		go crawler.Crawl(u, depth-1)
	}
}

func main() {
	waitGroup := sync.WaitGroup{}
	waitGroup.Add(1)

	crawler := Crawler{
		urlCache:  UrlCache{urls: make(map[string]bool)},
		fetcher:   fetcher,
		waitGroup: waitGroup,
	}

	go crawler.Crawl("http://golang.org/", 4)
	crawler.waitGroup.Wait()
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
	"http://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"http://golang.org/pkg/",
			"http://golang.org/cmd/",
		},
	},
	"http://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"http://golang.org/",
			"http://golang.org/cmd/",
			"http://golang.org/pkg/fmt/",
			"http://golang.org/pkg/os/",
		},
	},
	"http://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
	"http://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
}

@findshrey
Copy link

At line 61 the code :
ret[i] = make([]uint8, dx)
Can you tell me what is the reasoning behind using dx as length of slice? It doesnt say the length is dx but slice is of dx elements. Kinda confused :/

@shipa988
Copy link

if we know that trees have 10 nodes, then this variant has a chance of life)):
package main

import (
"golang.org/x/tour/tree"
"fmt"
"sort"
)

// Walk walks the tree t sending all values
// from the tree to the channel ch.
//var cnt1 chan int

func Walk(t *tree.Tree, ch chan int) {
if t != nil {
ch <- t.Value
count:=<-ch
if count == 10 {
close(ch)
return}
Walk(t.Right, ch)
Walk(t.Left, ch)
}
}

// Same determines whether the trees
// t1 and t2 contain the same values.

func Same(t1, t2 *tree.Tree) bool {
ch1 := make(chan int)
go Walk(t1, ch1)
ch2 := make(chan int)
go Walk(t2, ch2)
var n1 [10]int
var n2 [10]int
i1,i2 := 0, 0
for i:=range ch1 {
n1[i1] = i
i1++
ch1<-i1
}
for i:=range ch2 {
n2[i2] = i
i2++
ch2<-i2
}
sort.Ints(n1[:])
sort.Ints(n2[:])
if n1 == n2 {
return true
} else {return false}

}

func main() {
fmt.Println(Same(tree.New(1), tree.New(1)))
fmt.Println(Same(tree.New(1), tree.New(2)))

}

@shipa988
Copy link

for Crawler, I only used knowledge from previous lessons, such as a SafeCouter (without sync.WaitGroup)
package main

import (
"fmt"
"sync"
"time"
)

type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}

type SafeHashset struct {
mux sync.Mutex
h map[string]bool
}
func (counter *SafeCounter) Inc(inc int) {
counter.mux.Lock()
defer counter.mux.Unlock()
counter.c = counter.c + inc
}

type SafeCounter struct {
mux sync.Mutex
c int
}
func (counter *SafeCounter) Value() int {
counter.mux.Lock()
defer counter.mux.Unlock()
return counter.c
}

func (hashset *SafeHashset) Add(url string) bool {
hashset.mux.Lock()
defer hashset.mux.Unlock()
if _, ok := hashset.h[url]; !ok {
hashset.h[url] = true
return true
} else {
return false
}
}

//var ch chan struct{} = make(chan struct {})
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
var hashset = SafeHashset{h: make(map[string]bool)}
var count =SafeCounter{}

func Crawl(url string, depth int, fetcher Fetcher) {
defer count.Inc(-1)
// TODO: Fetch URLs in parallel.
// TODO: Don't fetch the same URL twice.
// This implementation doesn't do either:
if depth <= 0 {
return
}

if hashset.Add(url) {
	body, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Printf("found: %s %q\n", url, body)
	for _, u := range urls {
		count.Inc(1)
		go Crawl(u, depth-1, fetcher)
		//defer <-ch
	}
}
return

}

func main() {
count.Inc(1)
Crawl("http://golang.org/", 4, fetcher)
for {
if count.Value() == 0 {
break
}
time.Sleep(time.Second)
}
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
body string
urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"http://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"http://golang.org/pkg/",
"http://golang.org/cmd/",
},
},
"http://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"http://golang.org/",
"http://golang.org/cmd/",
"http://golang.org/pkg/fmt/",
"http://golang.org/pkg/os/",
},
},
"http://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
"http://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"http://golang.org/",
"http://golang.org/pkg/",
},
},
}

@eugeneYWang
Copy link

my binary tree #67 solution

package main

import (
	"golang.org/x/tour/tree"
	"fmt"
)

// Walk walks the tree t sending all values
// from the tree to the channel ch.
func Walk(t *tree.Tree, ch chan int, layer int) {
	if t == nil { 
		return
	}
	
	Walk(t.Left, ch, layer + 1)
	ch <- t.Value
	Walk(t.Right, ch, layer + 1)
	
	if layer == 0 {
		close(ch)
	}
}

// Same determines whether the trees
// t1 and t2 contain the same values.
func Same(t1, t2 *tree.Tree) bool {
	
	ch1 := make(chan int, 10)
	ch2 := make(chan int, 10)
	
	go Walk(t1, ch1, 0)
	go Walk(t2, ch2, 0)
	
	for {
		n1, ok1 := <- ch1
		n2, ok2 := <- ch2
		
		if n1 != n2 || ok1 != ok2 {
			return false
		} else if ok1 == ok2 && ok1 == false {
			return true
		}
	}
}

func main() {
	fmt.Println(Same(tree.New(1), tree.New(2)));
	fmt.Println(Same(tree.New(1), tree.New(1)));
}

@Aternus
Copy link

Aternus commented Aug 14, 2022

IP Address Solution:

package tour

import "fmt"

type IPAddr [4]byte

func (ipAddr IPAddr) String() string {
	return fmt.Sprintf("%d.%d.%d.%d", ipAddr[0], ipAddr[1], ipAddr[2], ipAddr[3])
}

func IPAddress() {
	hosts := map[string]IPAddr{
		"loopback":  {127, 0, 0, 1},
		"googleDNS": {8, 8, 8, 8},
	}
	for name, ip := range hosts {
		fmt.Printf("%v: %v\n", name, ip)
	}
}

@Aternus
Copy link

Aternus commented Aug 14, 2022

Reader Solution:

package tour

import (
	"fmt"
)

type MyReader struct{}

func (r MyReader) Read(b []byte) (int, error) {
	bufferLength := len(b)
	if bufferLength <= 0 {
		return 0, fmt.Errorf("buffer length must be greater than 0")
	}
	buffer := make([]byte, bufferLength)
	for k := range buffer {
		buffer[k] = uint8('A')
	}
	n := copy(b, buffer)
	return n, nil
}

@Aternus
Copy link

Aternus commented Aug 16, 2022

rot13Reader Solution:
❓ this solution saves an extra loop by returning io.EOF as early as possible.

package tour

import (
	"io"
	"os"
	"strings"
)

type rot13Reader struct {
	r io.Reader
}

func (r rot13Reader) Read(b []byte) (int, error) {
	n, err := r.r.Read(b)
	// short circuit: EOF
	if err == io.EOF {
		return 0, io.EOF
	}
	for k, v := range b {
		b[k] = rot13(v)
	}
	return n, err
}

func Rot13ReaderMain() {
	s := strings.NewReader("Lbh penpxrq gur pbqr!")
	r := rot13Reader{s}
	io.Copy(os.Stdout, &r)
}

func rot13(b byte) byte {
	if b >= 'A' && b <= 'Z' {
		b = 'A' + (b-'A'+13)%26
	} else if b >= 'a' && b <= 'z' {
		b = 'a' + (b-'a'+13)%26
	}
	return b
}

@Aternus
Copy link

Aternus commented Aug 22, 2022

Images exercise solution:

package tour

import (
	"image"
	"image/color"
)

type Image struct {
	Width  int
	Height int
}

func (i Image) ColorModel() color.Model {
	return color.RGBAModel
}

func (i Image) Bounds() image.Rectangle {
	return image.Rect(0, 0, i.Width, i.Height)
}

func (i Image) At(x, y int) color.Color {
	r := uint8(x + y)
	g := uint8(x ^ y)
	b := uint8(x * y)
	//fmt.Println("R:", r, "G:", g, "B:", b)
	return color.RGBA{r, g, b, 255}
}

@AlbertoCoder
Copy link

AlbertoCoder commented Sep 12, 2022

This is my solution to the Exercise "Stringers":

package main

import "fmt"

type IPAddr [4]byte

// TODO: Add a "String() string" method to IPAddr.

// What I did was I returned the call to Sprintf (in the fmt package) and formatted each of the four byte values in the array
// of bytes, separated with dots:

func (ip IPAddr) String() string{
		
        // The 'Sprintf' function formats according to a format specifier and returns the resulting string.
        // In other words: you first return the resulting string of the function 'Sprintf' and then
       // this method (String()) returns the previously returned string by 'Sprintf':

	return fmt.Sprintf("%v.%v.%v.%v",ip[0],ip[1],ip[2],ip[3]) 
	
}

func main() {
	hosts := map[string]IPAddr{
		"loopback":  {127, 0, 0, 1},
		"googleDNS": {8, 8, 8, 8},
	}
	for nombre, ip := range hosts {
		fmt.Printf("%v: %v\n", nombre, ip)
	}
}

I hope it helps!

@AlbertoCoder
Copy link

Excuse me:

In the rot13Reader exercise, how is Read(p []byte) called? I'm a bit confused, since I don't see it being invoked in main()...

package main

import (
	"io"
	"os"
	"strings"
)

type rot13Reader struct {
	r io.Reader
}

func (rot *rot13Reader) Read(p []byte) (n int, err error) {
	n,err = rot.r.Read(p)
	for i := 0; i < len(p); i++ {
		if (p[i] >= 'A' && p[i] < 'N') || (p[i] >='a' && p[i] < 'n') {
			p[i] += 13
		} else if (p[i] > 'M' && p[i] <= 'Z') || (p[i] > 'm' && p[i] <= 'z'){
			p[i] -= 13
		}
	}
	return
}

func main() {
	s := strings.NewReader(
		"Lbh penpxrq gur pbqr!")
	r := rot13Reader{s}
	io.Copy(os.Stdout, &r)
}

@americanstone
Copy link

MIT Phd crawl solution

package main

import (
	"fmt"
	"sync"
)

type Cache struct {
	m sync.Mutex
	v map[string]uint
}

func (s *Cache) Exist(key string) bool {
	s.m.Lock()
	defer s.m.Unlock()
	if _, ok := s.v[key]; ok {
		s.v[key]++
		return true
	} else {
		s.v[key] = 1
		return false
	}
}

func (s *Cache) String() string {
	s.m.Lock()
	defer s.m.Unlock()
	var sb string
	for k, v := range s.v {
		sb += fmt.Sprintf("key: %s => value: %q", k, v)
	}
	return fmt.Sprintln(sb)
}

type Fetcher interface {
	Fetch(url string) (body string, urls []string, err error)
}

//
// Serial crawler
//
func SerialCrawl(url string, depth int, fetcher Fetcher, cache *Cache) {
	if cache.Exist(url) {
		return
	}
	//fmt.Printf("cached values %q\n", cache)

	fmt.Println("depth ", depth)
	if depth <= 0 {
		return
	}
	body, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Printf("Found: %s %q\n", url, body)
	//fmt.Printf("fetch children %q\n", urls)
	for _, u := range urls {
		//fmt.Printf("recursive crawl url %s\n", u)
		SerialCrawl(u, depth-1, fetcher, cache)
	}
}

//
// Concurrent crawler with shared state and Mutex
//

func ConcurrentMutex(url string, depth int, fetcher Fetcher, cache *Cache) {
	if cache.Exist(url) {
		return
	}
	//fmt.Printf("cached values %q\n", cache)

	if depth <= 0 {
		return
	}

	body, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Printf("Found: %s %q\n", url, body)
	//fmt.Printf("fetch children %q\n", urls)
	var done sync.WaitGroup
	for _, u := range urls {
		done.Add(1)
		//fmt.Printf("recursive crawl url %s\n", u)
		u2 := u
		go func() {
			defer done.Done()
			ConcurrentMutex(u2, depth-1, fetcher, cache)
		}()
		//go func(u string) {
		//	defer done.Done()
		//	ConcurrentMutex(u, depth -1,fetcher, cache)
		//}(u)
	}
	done.Wait()
}

//
// Concurrent crawler with channels
//

func worker(url string, depth uint, ch chan []string, fetcher Fetcher, cache *Cache) {
	//fmt.Printf("working on url %s depth %d\n", url, depth)
	body, urls, err := fetcher.Fetch(url)

	if err != nil {
		ch <- []string{}
	} else {
		fmt.Printf("Found: %s %q\n", url, body)
		ch <- urls
	}
}

func master(url string, ch chan []string, depth uint, fetcher Fetcher, cache *Cache) {
	n := 1
	copyDepth := depth
	for urls := range ch {
		// fmt.Printf("dep :%s\n", fmt.Sprintf("%q", copyDepth))
		if copyDepth == 0 {
			break
		}
		for _, url := range urls {
			if !cache.Exist(url) {
				n += 1
				go worker(url, depth, ch, fetcher, cache)
			}
		}
		depth--
		n -= 1
		if n == 0 {
			break
		}
	}
}

func ConcurrentChan(url string, depth uint, fetcher Fetcher, cache *Cache) {
	ch := make(chan []string)
	go func() {
		ch <- []string{url}
	}()
	master(url, ch, depth, fetcher, cache)
}

func main() {
	cache := Cache{v: make(map[string]uint)}
	SerialCrawl("https://golang.org/", 4, fetcher, &cache)

	ConcurrentMutex("https://golang.org/", 4, fetcher, &cache)

	ConcurrentChan("https://golang.org/", 4, fetcher, &cache)

	fmt.Printf("\nCached urls %q\n", cache.v)
}

type fakeFetcher map[string]*fakeResult // to avoid copy value

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

var fetcher = fakeFetcher{
	"https://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"https://golang.org/pkg/",
			"https://golang.org/cmd/",
		},
	},
	"https://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"https://golang.org/",
			"https://golang.org/cmd/",
			"https://golang.org/pkg/fmt/",
			"https://golang.org/pkg/os/",
		},
	},
	"https://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
	"https://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
}

@ViktorVoloshko
Copy link

ViktorVoloshko commented Oct 13, 2023

My relatively simple solution for crawl w/o global variables and channels and with mutex and WaitGroup:

package main

import (
	"fmt"
	"sync"
)

type UrlCache struct {
	urls map[string]bool
	mu   sync.Mutex
}

func (uc *UrlCache) Add(url string) {
	uc.mu.Lock()
	uc.urls[url] = true
	uc.mu.Unlock()
}

func (uc *UrlCache) Get(url string) bool {
	uc.mu.Lock()
	defer uc.mu.Unlock()
	return uc.urls[url]
}

type Fetcher interface {
	// Fetch returns the body of URL and
	// a slice of URLs found on that page.
	Fetch(url string) (body string, urls []string, err error)
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
	var wg sync.WaitGroup
	cache := UrlCache{urls: make(map[string]bool)}
	wg.Add(1)
	go crawl(url, depth, fetcher, &cache, &wg)
	wg.Wait()
}

func crawl(url string, depth int, fetcher Fetcher, cache *UrlCache, wg *sync.WaitGroup) {
	defer wg.Done()
	if depth <= 0 || cache.Get(url) {
		return
	}
	cache.Add(url)
	body, urls, err := fetcher.Fetch(url)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Printf("found: %s %q\n", url, body)
	for _, u := range urls {
		wg.Add(1)
		go crawl(u, depth-1, fetcher, cache, wg)
	}
}

func main() {
	Crawl("https://golang.org/", 4, fetcher)
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
	body string
	urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
	if res, ok := f[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
	"https://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"https://golang.org/pkg/",
			"https://golang.org/cmd/",
		},
	},
	"https://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"https://golang.org/",
			"https://golang.org/cmd/",
			"https://golang.org/pkg/fmt/",
			"https://golang.org/pkg/os/",
		},
	},
	"https://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
	"https://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"https://golang.org/",
			"https://golang.org/pkg/",
		},
	},
}

@apo1967
Copy link

apo1967 commented Nov 30, 2023

@Aternus

Reader Solution:

package tour

import (
	"fmt"
)

type MyReader struct{}

func (r MyReader) Read(b []byte) (int, error) {
	bufferLength := len(b)
	if bufferLength <= 0 {
		return 0, fmt.Errorf("buffer length must be greater than 0")
	}
	buffer := make([]byte, bufferLength)
	for k := range buffer {
		buffer[k] = uint8('A')
	}
	n := copy(b, buffer)
	return n, nil
}

Is there a reason why you just don't set the 'A' directly in b? Why creating an internal slice and then copying to b? Just curious...

@dkowsley
Copy link

dkowsley commented Sep 15, 2024

In the rot13Reader exercise, how is Read(p []byte) called? I'm a bit confused, since I don't see it being invoked in main()...

In the quoted code, we see a call to io.Copy

func main() {
	s := strings.NewReader(
		"Lbh penpxrq gur pbqr!")
	r := rot13Reader{s}
	io.Copy(os.Stdout, &r)
}

The relevant call chain is io.Copy(dst, src) -> io.copyBuffer(dst, src, nil) -> src.Read(buf) (src == r, in this example).

The function definition for io.Copy and the io.Reader interface it uses are:

type Reader interface {
	Read(p []byte) (n int, err error)
}

func Copy(dst Writer, src Reader) (written int64, err error) {
	return copyBuffer(dst, src, nil)
}

io.copyBuffer has the following definition (I have replaced most of the code with comments to make it easier to read). Take note of the fourth line:

func copyBuffer(dst Writer, src Reader, buf []byte) (written int64, err error) {
        // if buf is nil we create a temp 32KB buffer for it
	for {
		nr, er := src.Read(buf) // our call to the Read function we created in our example
		// perform writes to dst with error handling and returning if we have finished reading
	}
	return written, err
}

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment