Reading HTTP client response twice

If I were asked to implement this feature, I would do something like this:

func Get(w http.ResponseWriter, r *http.Request) {
	req, err := http.NewRequestWithContext(r.Context(), http.MethodGet, "https://api.fake.io/id-1", nil)
	if err != nil {
		w.WriteHeader(500)
		return
	}

	res, err := http.DefaultTransport.RoundTrip(req)
	if err != nil {
		w.WriteHeader(500)
		return
	}
	defer res.Body.Close()

	bs := new(bytes.Buffer)
	writer := io.MultiWriter(w, bs)
	_, _ = io.Copy(writer, res.Body) // will 200 Code
	// plan a		// Recommend this plan
	flush, ok := writer.(http.Flusher)
	if ok {
		flush.Flush()
	}
	_ = Log(req, res, bs)
	// plan b
	go func() { // or goroutine pool ; If it's just simple concurrency, When a high number of requests are made in a short period of time, a large amount of memory overhead can be incurred.
		_ = Log(req, res, bs)
	}()
}

func Log(req *http.Request, res *http.Response, bs *bytes.Buffer) error {
	dump, err := httputil.DumpResponse(res, false)
	if err != nil {
		return err
	}
	file, err := os.OpenFile(uuid.NewString()+".log", os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.FileMode(0600))
	if err != nil {
		return err
	}
	defer file.Close()
	_, err = file.WriteString(fmt.Sprintf("%s %s HTTP/%d.%d\n", req.Method, req.URL.RequestURI(), req.ProtoMajor, req.ProtoMinor))
	if err != nil {
		return err
	}
	_, err = file.Write(dump)
	if err != nil {
		return err
	}
	_, err = file.WriteString("\r\n\r\n")
	if err != nil {
		return err
	}
	_, err = io.Copy(file, bs)
	if err != nil {
		return err
	}
	return nil
}