我该如何优化执行哈希操作时读取的代码,并尝试重新开始重新读取它?

How do I make this not require a file seek? Basically, I am doing a hash, and then re-reading the file. This is not optimal. How can I optimize by either using TeeReader or another method to read in chunks so Hashing and Writing content can happen without duplication of reading.

Also, do I need to specify content length myself?

// PUT method
func (c *Client) PutFileOld(filename string, noLen bool) error {
    file, err := os.Open(filename)
    if err != nil {
        return err
    }
    defer file.Close()

    hasher := md5.New()

    if _, err := io.Copy(hasher, file); err != nil {
        log.Fatal("Could not compute MD5")
    }

    // Lazy way to go back to the beginning since the reader has consumed our bytes
    // and we have to compute the hash
    file.Seek(0, 0)

    c.MD5 = hex.EncodeToString(hasher.Sum(nil)[:16])
    log.Printf("Uploading to: %s", fmt.Sprintf("%s/%s", c.baseURL, filename))
    baseURL, err := url.Parse(fmt.Sprintf("%s/%s", c.baseURL, filename))
    if err != nil {
        return err
    }

    log.Printf("MD5: %s - file: %s
", c.MD5, filename)
    req, err := http.NewRequest(http.MethodPut, baseURL.String(), bufio.NewReader(file))
    if err != nil {
        return err
    }
    req.Header.Set("Content-Type", "application/octet-stream")
    req.Header.Set("Content-Md5", c.MD5)
    fi, _ := file.Stat()

    // Not sure if this is needed, or if Go sets it automatically
    req.ContentLength = fi.Size()

    res, err := c.httpClient.Do(req)
    if err != nil {
        return err
    }

    dump, err := httputil.DumpResponse(res, true)
    if err != nil {
        log.Fatal(err)
    }
    fmt.Printf("%q
", dump)
    c.StatusCode = res.StatusCode
    defer res.Body.Close()

    return nil
}