I'm trying to get my head around goroutines. I've created a simple program that performs the same search in parallel across multiple search engines. At the moment to keep track of the number of responses, I count the number I've received. It seems a bit amateur though.
Is there a better way of knowing when I've received a response from all of the goroutines in the following code?
package main
import (
"fmt"
"net/http"
"log"
)
type Query struct {
url string
status string
}
func search (url string, out chan Query) {
fmt.Printf("Fetching URL %s
", url)
resp, err := http.Get(url)
if err != nil {
log.Fatal(err)
}
defer resp.Body.Close()
out <- Query{url, resp.Status}
}
func main() {
searchTerm := "carrot"
fmt.Println("Hello world! Searching for ", searchTerm)
searchEngines := []string{
"http://www.bing.co.uk/?q=",
"http://www.google.co.uk/?q=",
"http://www.yahoo.co.uk/?q="}
out := make(chan Query)
for i := 0; i < len(searchEngines); i++ {
go search(searchEngines[i] + searchTerm, out)
}
progress := 0
for {
// is there a better way of doing this step?
if progress >= len(searchEngines) {
break
}
fmt.Println("Polling...")
query := <-out
fmt.Printf("Status from %s was %s
", query.url, query.status)
progress++
}
}
Please use sync.WaitGroup
, there is an example in the pkg doc
searchEngines := []string{
"http://www.bing.co.uk/?q=",
"http://www.google.co.uk/?q=",
"http://www.yahoo.co.uk/?q="}
var wg sync.WaitGroup
out := make(chan Query)
for i := 0; i < len(searchEngines); i++ {
wg.Add(1)
go func (url string) {
defer wg.Done()
fmt.Printf("Fetching URL %s
", url)
resp, err := http.Get(url)
if err != nil {
log.Fatal(err)
}
defer resp.Body.Close()
out <- Query{url, resp.Status}
}(searchEngines[i] + searchTerm)
}
wg.Wait()