如何从STDIN导入行到Postgresql?

In Python I have the following that will bulk-load rows to Postgresql without using a file:

import csv
import subprocess

mylist, keys = [{'name': 'fred'}, {'name': 'mary'}], ['name']
p = subprocess.Popen(['psql', 'mydb', '-U', 'openupitsme', '-h', 'my.ip.address', '--no-password', '-c',
    '\COPY tester(%s) FROM STDIN (FORMAT CSV)' % ', '.join(keys),
    '--set=ON_ERROR_STOP=false'
    ], stdin=subprocess.PIPE
)
for d in mylist:
    dict_writer = csv.DictWriter(p.stdin, keys, quoting=csv.QUOTE_MINIMAL)
    dict_writer.writerow(d)
p.stdin.close()

I am trying to accomplish the same in Go. I am currently writing the rows to a file then importing them and then deleting that file. I'd like to import the rows from STDIN like I do in Python. I have:

package main

import (
    "database/sql"
    "log"
    "os"
    "os/exec"

    _ "github.com/lib/pq"
)

var (
    err error
    db  *sql.DB
)

func main() {
    var err error
    fh := "/path/to/my/file.txt"
    f, err := os.Create(fh)
    if err != nil {
        panic(err)
    }
    defer f.Close()
    defer os.Remove(fh)
    rows := []string{"fred", "mary"}
    for _, n := range rows {
        _, err = f.WriteString(n + "
")
        if err != nil {
            panic(err)
        }
    }
    // dump to postgresql
    c := exec.Command("psql", "mydb", "-U", "openupitsme", "-h", "my.ip.address", "--no-password",
        "-c", `\COPY tester(customer) FROM `+fh)
    if out, err := c.CombinedOutput(); err != nil {
        log.Println(string(out), err)
    }
}

EDIT: A bit further along but this is not inserting records:

    keys := []string{"link", "domain"}
    records := [][]string{
        {"first_name", "last_name"},
        {"Rob", "Pike"},
        {"Ken", "Thompson"},
        {"Robert", "Griesemer"},
    }

    cmd := exec.Command("psql")
    stdin, err := cmd.StdinPipe()
    if err != nil {
        log.Println(err)
    }
    stdout, err := cmd.StdoutPipe()
    if err != nil {
        log.Println(err)
    }
    if err := cmd.Start(); err != nil {
        log.Println(err)
    }
    go func() {
        _, err = io.WriteString(stdin, "search -U meyo -h 1.2.3.4 -p 1111 --no-password -c ")
        if err != nil {
            log.Println(err)
        }
        _, err := io.WriteString(stdin, fmt.Sprintf("COPY links(%s) FROM STDIN (FORMAT CSV)", strings.Join(keys, ",")))
        if err != nil {
            log.Println(err)
        }
        w := csv.NewWriter(stdin)
        if err := w.WriteAll(records); err != nil {
            log.Fatalln("error writing record to csv:", err)
        }
        w.Flush()
        if err := w.Error(); err != nil {
            log.Fatal(err)
        }
        if err != nil {
            log.Println(err)
        }
        stdin.Close()
    }()

    done := make(chan bool)
    go func() {
        _, err := io.Copy(os.Stdout, stdout)
        if err != nil {
            log.Fatal(err)
        }
        stdout.Close()
        done <- true
    }()
    <-done

    if err := cmd.Wait(); err != nil {
        log.Println(err, cmd.Args, stdout)
    }

No records are inserted and I get a non-helpful error:

exit status 2

The following code should point you in the direction you want to go:

package main

import (
    "fmt"
    "log"
    "os"
    "os/exec"
    "strings"
)

func main() {
    keys := []string{"customer"}
    sqlCmd := fmt.Sprintf("COPY tester(%s) FROM STDIN (FORMAT CSV)", strings.Join(keys, ","))
    cmd := exec.Command("psql", "<dbname>", "-U", "<username>", "-h", "<host_ip>", "--no-password", "-c", sqlCmd)
    cmd.Stdin = os.Stdin
    output, _ := cmd.CombinedOutput()
    log.Println(string(output))
}

If the keys need to be dynamic you can harvest them from os.Args.

Please note that if you plan to use the psql command then you don't need to import database/sql or lib/pq. If you are interested in using lib/pq then have a look at Bulk Imports in the lib/pq documentation.

The github.com/lib/pq package docs actually have an example of how to do what you want. Here is the adapted text of the whole program:

package main

import (
    "database/sql"
    "log"

    "github.com/lib/pq"
)

func main() {
    records := [][]string{
        {"Rob", "Pike"},
        {"Ken", "Thompson"},
        {"Robert", "Griesemer"},
    }

    db, err := sql.Open("postgres", "dbname=postgres user=postgres password=postgres")
    if err != nil {
        log.Fatalf("open: %v", err)
    }
    if err = db.Ping(); err != nil {
        log.Fatalf("open ping: %v", err)
    }
    defer db.Close()

    txn, err := db.Begin()
    if err != nil {
        log.Fatalf("begin: %v", err)
    }

    stmt, err := txn.Prepare(pq.CopyIn("test", "first_name", "last_name"))
    if err != nil {
        log.Fatalf("prepare: %v", err)
    }

    for _, r := range records {
        _, err = stmt.Exec(r[0], r[1])
        if err != nil {
            log.Fatalf("exec: %v", err)
        }
    }

    _, err = stmt.Exec()
    if err != nil {
        log.Fatalf("exec: %v", err)
    }

    err = stmt.Close()
    if err != nil {
        log.Fatalf("stmt close: %v", err)
    }

    err = txn.Commit()
    if err != nil {
        log.Fatalf("commit: %v", err)
    }
}

On my machine this imports 1 000 000 records in about 2 seconds.