有没有更好的方法来跟踪goroutine响应?

时间:2013-02-23 09:00:27

标签: go goroutine

我正试图绕过goroutines。我创建了一个简单的程序,在多个搜索引擎中并行执行相同的搜索。目前为了跟踪回复的数量,我计算了我收到的数字。虽然看起来有点业余。

有没有更好的方法知道我何时收到以下代码中所有goroutine的回复?

package main

import (
    "fmt"
    "net/http"
    "log"
)

type Query struct {
    url string
    status string
}

func search (url string, out chan Query) {
    fmt.Printf("Fetching URL %s\n", url)
    resp, err := http.Get(url)

    if err != nil {
        log.Fatal(err)
    }

    defer resp.Body.Close()

    out <- Query{url, resp.Status}
}

func main() {
    searchTerm := "carrot"

    fmt.Println("Hello world! Searching for ", searchTerm)

    searchEngines := []string{
        "http://www.bing.co.uk/?q=",
        "http://www.google.co.uk/?q=",
        "http://www.yahoo.co.uk/?q="}

    out := make(chan Query)

    for i := 0; i < len(searchEngines); i++ {
        go search(searchEngines[i] + searchTerm, out)
    }

    progress := 0

    for {
                    // is there a better way of doing this step?
        if progress >= len(searchEngines) {
            break
        }
        fmt.Println("Polling...")
        query := <-out
        fmt.Printf("Status from %s was %s\n", query.url, query.status)
        progress++
    }
}

1 个答案:

答案 0 :(得分:11)

请使用sync.WaitGrouppkg doc

中有一个示例
searchEngines := []string{
    "http://www.bing.co.uk/?q=",
    "http://www.google.co.uk/?q=",
    "http://www.yahoo.co.uk/?q="}
var wg sync.WaitGroup
out := make(chan Query)

for i := 0; i < len(searchEngines); i++ {
    wg.Add(1)
    go func (url string) {
        defer wg.Done()
        fmt.Printf("Fetching URL %s\n", url)
        resp, err := http.Get(url)

        if err != nil {
            log.Fatal(err)
        }

        defer resp.Body.Close()

        out <- Query{url, resp.Status}

    }(searchEngines[i] + searchTerm)

}
wg.Wait()
相关问题