Golang io.copy在请求主体上两次

I am building a blob storage system and i picked Go as the programming language. I create a stream to do a multipart file upload from client to the blob server.

The stream works fine, but i want to make a sha1 hash from the request body. I need to io.Copy the body twice. The sha1 gets created but the multipart streams 0 bytes after that.

  1. For creating the hash
  2. For streaming the body as multipart

any idea how i can do this?

the client upload

func (c *Client) Upload(h *UploadHandle) (*PutResult, error) {
body, bodySize, err := h.Read()
if err != nil {
    return nil, err
}

// Creating a sha1 hash from the bytes of body
dropRef, err := drop.Sha1FromReader(body)
if err != nil {
    return nil, err
}

bodyReader, bodyWriter := io.Pipe()
writer := multipart.NewWriter(bodyWriter)

errChan := make(chan error, 1)
go func() {
    defer bodyWriter.Close()
    part, err := writer.CreateFormFile(dropRef, dropRef)
    if err != nil {
        errChan <- err
        return
    }
    if _, err := io.Copy(part, body); err != nil {
        errChan <- err
        return
    }
    if err = writer.Close(); err != nil {
        errChan <- err
    }
}()

req, err := http.NewRequest("POST", c.Server+"/drops/upload", bodyReader)
req.Header.Add("Content-Type", writer.FormDataContentType())
resp, err := c.Do(req)
if err != nil {
    return nil, err
}
  .....
 }

the sha1 func

func Sha1FromReader(src io.Reader) (string, error) {
hash := sha1.New()
_, err := io.Copy(hash, src)
if err != nil {
    return "", err
}
return hex.EncodeToString(hash.Sum(nil)), nil

}

upload handle

func (h *UploadHandle) Read() (io.Reader, int64, error) {
var b bytes.Buffer

hw := &Hasher{&b, sha1.New()}
n, err := io.Copy(hw, h.Contents)

if err != nil {
    return nil, 0, err
}

return &b, n, nil

}

You can't do that directly but you can write a wrapper that does the hashing on io.Copy

// this works for either a reader or writer, 
//  but if you use both in the same time the hash will be wrong.
type Hasher struct {
    io.Writer
    io.Reader
    hash.Hash
    Size uint64
}

func (h *Hasher) Write(p []byte) (n int, err error) {
    n, err = h.Writer.Write(p)
    h.Hash.Write(p)
    h.Size += uint64(n)
    return
}

func (h *Hasher) Read(p []byte) (n int, err error) {
    n, err = h.Reader.Read(p)
    h.Hash.Write(p[:n]) //on error n is gonna be 0 so this is still safe.
    return
}

func (h *Hasher) Sum() string {
    return hex.EncodeToString(h.Hash.Sum(nil))
}

func (h *UploadHandle) Read() (io.Reader, string, int64, error) {
    var b bytes.Buffer

    hashedReader := &Hasher{Reader: h.Contents, Hash: sha1.New()}
    n, err := io.Copy(&b, hashedReader)

    if err != nil {
        return nil, "", 0, err
    }

    return &b, hashedReader.Sum(), n, nil
}

// updated version based on @Dustin's comment since I complete forgot io.TeeReader existed.

func (h *UploadHandle) Read() (io.Reader, string, int64, error) {
    var b bytes.Buffer

    hash := sha1.New()
    n, err := io.Copy(&b, io.TeeReader(h.Contents, hash))

    if err != nil {
        return nil, "", 0, err
    }

    return &b, hex.EncodeToString(hash.Sum(nil)), n, nil
}

You have two options.

The most direct way is to use io.MultiWriter.

But if you need the hash to produce the multipart output, then you will have to copy to a bytes.Buffer and then write the buffer back to each writer.

I would suggest using an io.TeeReader if you want to push all reads from the blob through the sha1 concurrently.

bodyReader := io.TeeReader(body, hash)

Now as the bodyReader is consumed during upload, the hash is automatically updated.