如何关闭频道

I try to adapt this example: https://gobyexample.com/worker-pools

But I don't know how to stop the channel because program don't exit at the end of the channel loop.

Can you explain how to exit the program?

package main

import (
    "github.com/SlyMarbo/rss"
    "bufio"
    "fmt"
    "log"
    "os"
)

func readLines(path string) ([]string, error) {
    file, err := os.Open(path)
    if err != nil {
        return nil, err
    }
    defer file.Close()

    var lines []string
    scanner := bufio.NewScanner(file)
    for scanner.Scan() {
        lines = append(lines, scanner.Text())
    }
    return lines, scanner.Err()
}


func worker(id int, jobs <-chan string, results chan<- string) {
    for url := range jobs {
        fmt.Println("worker", id, "processing job", url)
        feed, err := rss.Fetch(url)
        if err != nil {
            fmt.Println("Error on: ", url)
            continue
        }
        borne := 0
        for _, value := range feed.Items {
            if borne < 5 {
                results <- value.Link
                borne = borne +1
            } else {
                continue
            }
        }
    }
}


func main() {
    jobs := make(chan string)
    results := make(chan string)

    for w := 1; w <= 16; w++ {
        go worker(w, jobs, results)
    }


    urls, err := readLines("flux.txt")
    if err != nil { 
        log.Fatalf("readLines: %s", err) 
    }

    for _, url := range urls {
        jobs <- url
    }

    close(jobs)

    // it seems program runs over...
    for msg := range results {
        fmt.Println(msg)
    }
}

The flux.txt is a flat text file like :

The problem is that, in the example you are referring to, the worker pool reads from results 9 times:

for a := 1; a <= 9; a++ {
    <-results
}

Your program, on the other hand, does a range loop over the results which has a different semantics in go. The range operator does not stop until the channel is closed.

for msg := range results {
    fmt.Println(msg)
}

To fix your problem you'd need to close the results channel. However, if you just call close(results) before the for loop, you most probably will get a panic, because the workers might be writing on results.

To fix this problem, you need to add another channel to be notified when all the workers are done. You can do this either using a sync.WaitGroup or :

const (
    workers = 16
)

func main() {
    jobs := make(chan string, 100)
    results := make(chan string, 100)
    var wg sync.WaitGroup

    for w := 0; w < workers; w++ {
        go func() {
            wg.Add(1)
            defer wg.Done()
            worker(w, jobs, results)
        }()
    }

    urls, err := readLines("flux.txt")
    if err != nil {
        log.Fatalf("readLines: %s", err)
    }

    for _, url := range urls {
        jobs <- url
    }

    close(jobs)

    wg.Wait()

    close(results)

    // it seems program runs over...
    for msg := range results {
        fmt.Println(msg)
    }
}

Or a done channel:

package main

import (
    "bufio"
    "fmt"
    "github.com/SlyMarbo/rss"
    "log"
    "os"
)

func readLines(path string) ([]string, error) {
    file, err := os.Open(path)
    if err != nil {
        return nil, err
    }
    defer file.Close()

    var lines []string
    scanner := bufio.NewScanner(file)
    for scanner.Scan() {
        lines = append(lines, scanner.Text())
    }
    return lines, scanner.Err()
}

func worker(id int, jobs <-chan string, results chan<- string, done chan struct{}) {
    for url := range jobs {
        fmt.Println("worker", id, "processing job", url)
        feed, err := rss.Fetch(url)
        if err != nil {
            fmt.Println("Error on: ", url)
            continue
        }
        borne := 0
        for _, value := range feed.Items {
            if borne < 5 {
                results <- value.Link
                borne = borne + 1
            } else {
                continue
            }
        }
    }
    close(done)
}

const (
    workers = 16
)

func main() {
    jobs := make(chan string, 100)
    results := make(chan string, 100)
    dones := make([]chan struct{}, workers)

    for w := 0; w < workers; w++ {
        dones[w] = make(chan struct{})
        go worker(w, jobs, results, dones[w])
    }

    urls, err := readLines("flux.txt")
    if err != nil {
        log.Fatalf("readLines: %s", err)
    }

    for _, url := range urls {
        jobs <- url
    }


    close(jobs)

    for _, done := range dones {
        <-done
    }

    close(results)

    // it seems program runs over...
    for msg := range results {
        fmt.Println(msg)
    }
}