为什么每笔交易都算作客户?

I am processing a bunch of files and then dumping the results in PostgreSQL. I would like to process many workers at the same time but keep getting the error "pq: sorry, too many clients already". This seems to happen whenever workers is > 100 or so. (For simplicity, the code below demonstrates the process but instead of processing a file I am simply inserting 1M rows in each table).

Since I am reusing the same *db why am I getting this error? Does each transaction count as a client or am I doing something wrong?

package main

import (
    "database/sql"
    "flag"
    "fmt"
    "log"
    "sync"

    "github.com/lib/pq"
)

func process(db *sql.DB, table string) error {
    if _, err := db.Exec(fmt.Sprintf(`DROP TABLE IF EXISTS %v;`, table)); err != nil {
        return err
    }

    col := "age"
    s := fmt.Sprintf(`
        CREATE TABLE %v (
            pk serial PRIMARY KEY,
            %v int NOT NULL
    )`, table, col)

    _, err := db.Exec(s)
    if err != nil {
        return err
    }

    tx, err := db.Begin()
    if err != nil {
        return err
    }

    defer func() {
        if err != nil {
            tx.Rollback()
            return
        }
        err = tx.Commit()
    }()

    stmt, err := tx.Prepare(pq.CopyIn(table, col))
    if err != nil {
        return err
    }

    defer func() {
        err = stmt.Close()
    }()

    for i := 0; i < 1e6; i++ {
        if _, err = stmt.Exec(i); err != nil {
            return err
        }
    }

    return err

}

func main() {
    var u string
    flag.StringVar(&u, "user", "", "user")

    var pass string
    flag.StringVar(&pass, "pass", "", "pass")

    var host string
    flag.StringVar(&host, "host", "", "host")

    var database string
    flag.StringVar(&database, "database", "", "database")

    var workers int
    flag.IntVar(&workers, "workers", 10, "workers")

    flag.Parse()

    db, err := sql.Open("postgres",
        fmt.Sprintf(
            "user=%s password=%s host=%s database=%s sslmode=require",
            u, pass, host, database,
        ),
    )

    if err != nil {
        log.Fatalln(err)
    }

    defer db.Close()
    db.SetMaxIdleConns(0)

    var wg sync.WaitGroup
    ch := make(chan int)

    for i := 0; i < workers; i++ {
        wg.Add(1)
        go func() {
            defer wg.Done()
            for i := range ch {
                table := fmt.Sprintf("_table%d", i)
                log.Println(table)
                if err := process(db, table); err != nil {
                    log.Fatalln(err)
                }
            }
        }()
    }

    for i := 0; i < 300; i++ {
        ch <- i
    }

    close(ch)
    wg.Wait()
}

I realize I can simply increase the posgresql settings but would like to understand the question: How to increase the max connections in postgres?

Since I am reusing the same *db why am I getting this error?

I suspect the Postgress driver is using a separate connections for each of your workers which is a smart decision for most cases.

Does each transaction count as a client or am I doing something wrong?

In your case yes each transaction count as a client, because you are calling process() as a goroutine. You are creating as many concurrent transactions as workers. Since each of your transactions is long all of them are probably using an individual connection to the database at the same time and hence you hit a limit.

go func() {
        defer wg.Done()
        for i := range ch {
            table := fmt.Sprintf("_table%d", i)
            log.Println(table)
            if err := process(db, table); err != nil {
                log.Fatalln(err)
            }
        }
    }()