Skip to content

Commit

Permalink
Fix typos in solutions
Browse files Browse the repository at this point in the history
  • Loading branch information
Matthias Fasching authored and fasmat committed Mar 1, 2024
1 parent 48cc814 commit 577bcdc
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 41 deletions.
30 changes: 11 additions & 19 deletions challenge01/challenge01_solution_02.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,12 @@ import (
)

type Crawler02 struct {
fetched map[string]bool
wg *sync.WaitGroup
mu sync.Mutex
fetched sync.Map
wg sync.WaitGroup
}

func New02() *Crawler02 {
c := &Crawler02{
fetched: make(map[string]bool),
wg: new(sync.WaitGroup),
}
c.wg.Add(1)
c := &Crawler02{}
return c
}

Expand All @@ -30,18 +25,18 @@ func NewWithRateLimit02(...interface{}) Crawler {
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func (c *Crawler02) Crawl(url string, depth int, fetcher fetcher.Fetcher) {
go c.crawHandler(url, depth, fetcher)

c.wg.Add(1)
go c.crawlHandler(url, depth, fetcher)
c.wg.Wait()
}

func (c *Crawler02) crawHandler(url string, depth int, fetcher fetcher.Fetcher) {
func (c *Crawler02) crawlHandler(url string, depth int, fetcher fetcher.Fetcher) {
defer c.wg.Done()
if depth <= 0 {
return
}

if c.checkFetched(url) {
if c.fetchedBefore(url) {
fmt.Printf("skipping: %s\n", url)
return
}
Expand All @@ -55,17 +50,14 @@ func (c *Crawler02) crawHandler(url string, depth int, fetcher fetcher.Fetcher)

c.wg.Add(len(urls))
for _, u := range urls {
go c.Crawl(u, depth-1, fetcher)
go c.crawlHandler(u, depth-1, fetcher)
}
}

func (c *Crawler02) checkFetched(url string) bool {
c.mu.Lock()
defer c.mu.Unlock()

if c.fetched[url] {
func (c *Crawler02) fetchedBefore(url string) bool {
if _, ok := c.fetched.Load(url); ok {
return true
}
c.fetched[url] = true
c.fetched.Store(url, true)
return false
}
33 changes: 11 additions & 22 deletions challenge01/challenge01_solution_03.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,46 +9,38 @@ import (
)

type Crawler03 struct {
fetched map[string]bool
wg *sync.WaitGroup
mu sync.Mutex
fetched sync.Map
wg sync.WaitGroup
ticker *time.Ticker
}

func New03() *Crawler03 {
c := &Crawler03{
fetched: make(map[string]bool),
wg: new(sync.WaitGroup),
}
c.wg.Add(1)
c := &Crawler03{}
return c
}

func NewWithRateLimit03(rate time.Duration) *Crawler03 {
c := &Crawler03{
fetched: make(map[string]bool),
wg: new(sync.WaitGroup),
ticker: time.NewTicker(rate),
}
c.wg.Add(1)
return c
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func (c *Crawler03) Crawl(url string, depth int, fetcher fetcher.Fetcher) {
go c.crawHandler(url, depth, fetcher)

c.wg.Add(1)
go c.crawlHandler(url, depth, fetcher)
c.wg.Wait()
}

func (c *Crawler03) crawHandler(url string, depth int, fetcher fetcher.Fetcher) {
func (c *Crawler03) crawlHandler(url string, depth int, fetcher fetcher.Fetcher) {
defer c.wg.Done()
if depth <= 0 {
return
}

if c.checkFetched(url) {
if c.fetchedBefore(url) {
fmt.Printf("skipping: %s\n", url)
return
}
Expand All @@ -66,17 +58,14 @@ func (c *Crawler03) crawHandler(url string, depth int, fetcher fetcher.Fetcher)

c.wg.Add(len(urls))
for _, u := range urls {
go c.Crawl(u, depth-1, fetcher)
go c.crawlHandler(u, depth-1, fetcher)
}
}

func (c *Crawler03) checkFetched(url string) bool {
c.mu.Lock()
defer c.mu.Unlock()

if c.fetched[url] {
func (c *Crawler03) fetchedBefore(url string) bool {
if _, ok := c.fetched.Load(url); ok {
return true
}
c.fetched[url] = true
c.fetched.Store(url, true)
return false
}

0 comments on commit 577bcdc

Please sign in to comment.