Go / Go Lang - Openai Streaming

Hi, Please help.
I am building a chatbot with Openai. And want to use streaming. The problem is when openai response and add 1 word, Go add 1 line. As a result in my terminal, i get this :

Stream response: 
Hello
Hello!
Hello! How
Hello! How can
Hello! How can I
Hello! How can I assist
Hello! How can I assist you
Hello! How can I assist you today
Hello! How can I assist you today

Stream finished

This is my code

// With Streaming Response --- SSE
func ChatWithBot() gin.HandlerFunc {

	return func(c *gin.Context) {

		var message MessageRequest

		// Getting the response from user
		err := c.BindJSON(&message)

		if err != nil {
			c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
			return

		}

		userMessage := message.Message

		// Setting the modal

		OpenAI_API_Key := os.Getenv("OPENAI_API_KEY")

		openAiClient := openai.NewClient(OpenAI_API_Key)

		ctx := context.Background()

		reqToOpenAI := openai.ChatCompletionRequest{
			Model:     openai.GPT3Dot5Turbo,
			MaxTokens: 8,
			Messages: []openai.ChatCompletionMessage{
				{
					Role:    openai.ChatMessageRoleUser,
					Content: userMessage,
				},
			},
			Stream: true,
		}

		stream, err := openAiClient.CreateChatCompletionStream(ctx, reqToOpenAI)

		if err != nil {
			fmt.Printf("Error creating completion")
			c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
			return
		}

		defer stream.Close()

		// w := c.Writer
		// w.Header().Set("Content-Type", "text/plain; charset=utf-8")
		// c.Header("Cache-Control", "no-cache")
		// c.Header("Connection", "keep-alive")

		fmt.Printf("Stream response: ")

		full := ""

		for {
			response, err := stream.Recv()
			if errors.Is(err, io.EOF) {
				fmt.Println("\nStream finished")
				break

			}

			if err != nil {
				fmt.Printf("\nStream error: %v\n", err)
				return
			}

			openAIReply := response.Choices[0].Delta.Content

			// Send each new reply as a separate SSE message

			// Remove any newline characters from the response
			openAIReply = strings.ReplaceAll(openAIReply, "\n", "")

			full += openAIReply

			fmt.Println(full)

			c.Writer.Write([]byte(full))
			c.Writer.(http.Flusher).Flush()
		}



}

And in frontend i get this

And for refernce this is a JS code i try to remake

async function main() {
    let full = "";

    const completion = await openai.chat.completions.create({
        message: [{ role: "user", content: "Recipes for chicken fry" }],
        model: "gpt-3.5-turbo",
        stream: true,
    });

    for await (const part of completion) {
        let text = part.choices[0].delta.content;
        full += text;

        console.clear();

        console.log(full);
    }
}

Thank you

After getting help from Drew and Tim (in Slack) i finally now can solve the problem. thank you.

This is the Lastes update code

	for {
			response, err := stream.Recv()
			if errors.Is(err, io.EOF) {
				fmt.Println("\nStream finished")
				return
			}

			if err != nil {
				fmt.Printf("\nStream error: %v\n", err)
				return
			}

			openAIReply := response.Choices[0].Delta.Content

			fmt.Printf(openAIReply)

			// fmt.Fprint(openAIReply)
			c.Writer.WriteString(openAIReply)

			c.Writer.Flush()
			// c.JSON(http.StatusOK, gin.H{"success": true, "message": openAIReply})
		}