Skip to content

Instantly share code, notes, and snippets.

@tranphuoctien
Created February 26, 2018 07:46
Show Gist options
  • Save tranphuoctien/2b9c7e5a6dd3244213a0a89474023b6f to your computer and use it in GitHub Desktop.
Save tranphuoctien/2b9c7e5a6dd3244213a0a89474023b6f to your computer and use it in GitHub Desktop.
package main
import (
"fmt"
"sync"
)
const (
MAX_WORKERS = 10 // Maximum worker goroutines
HOLDING_CAPACITY = 30 // Holding capacity of the channel
)
type Scrapper struct {
Url string
}
func (s *Scrapper) Scrap() {
// Run will do the heavy lifting of scraping the urls one at a time
fmt.Printf("Scrapped %v \n", s.Url)
}
//Below is the endless list of incoming urls from the internet.
var list = []string{"google.com", "yahoo.com", "reddit.com", "golang.org", "js4.red"}
func main() {
urls := make(chan *Scrapper, HOLDING_CAPACITY)
var wg sync.WaitGroup
for i := 0; i < MAX_WORKERS; i++ {
wg.Add(1)
go func() {
for url := range urls {
url.Scrap()
}
wg.Done()
}()
}
for i := 0; i < len(list); i++ {
urls <- &Scrapper{Url: list[i]}
}
close(urls)
wg.Wait()
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment