How cancel all goroutines processing http requests?
package main
import (
"bytes"
"errors"
"fmt"
"github.com/levigross/grequests"
"golang.org/x/net/html/charset"
"io/ioutil"
)
var workerComplete = make(chan []string)
func fetch(url string) (string, error) {
response, err := grequests.Get(url, nil)
if err != nil {
return "", errors.New("Error request: " + err.Error())
}
defer response.ClearInternalBuffer()
raw_page, err := charset.NewReader(bytes.NewReader(response.Bytes()), response.Header.Get("Content-Type"))
if err != nil {
return "", errors.New("Encoding error: " + err.Error())
}
unicodePage, err := ioutil.ReadAll(raw_page)
if err != nil {
return "", errors.New("Response read error : " + err.Error())
}
return string(unicodePage), nil
}
func downloadWebPage(url_ string, pageDownloaded chan []string) {
html, err := fetch(url_)
if err != nil {
err = errors.New("Fetch error " + url_ + " : " + err.Error())
pageDownloaded <- []string{"", err.Error()}
}
pageDownloaded <- []string{html, ""}
}
func main() {
urls := []string{"http://golang-book.ru/chapter-10-concurrency.html",
"http://golang-book.ru/chapter-10-concurrency.html", "https://gobyexample.com/json"}
urlsCount := 0
for _, url_ := range urls {
urlsCount += 1
go downloadWebPage(url_, workerComplete)
}
for i := 0; i < urlsCount; {
result := <-workerComplete
html, err := result[0], result[1]
if err != "" {
fmt.Println(err)
}
fmt.Println(html[:10])
i += 1
}