CHSHJ 发表于 2018-9-21 08:26:28

Go Playground exercise

package main  

  
import (
  
   "fmt"
  
   "sync"
  
)
  

  
type Fetcher interface {
  
   // Fetch returns the body of URL and
  
   // a slice of URLs found on that page.
  
   Fetch(url string) (body string, urls []string, err error)
  
}
  

  
// Crawl uses fetcher to recursively crawl
  
// pages starting with url, to a maximum of depth.
  
func Crawl(url string, depth int, fetcher Fetcher, c *urlSync, wg *sync.WaitGroup) {
  
   // TODO: Fetch URLs in parallel.
  
   // TODO: Don't fetch the same URL twice.
  
   // This implementation doesn't do either:
  
   defer wg.Done()
  
   if depth
页: [1]
查看完整版本: Go Playground exercise