The program is spawning many goroutines (getStock) simultaneously which, I believe, is resulting in the remote server immediately dropping the connection. I am not trying to create a DOS, but I still want to aggressively get data without getting 'connection reset' errors.
What are some strategies to only have at most N (eg. 20) simultaneous connections? Is there a built-in queue for GET requests in the golang Http client? I'm still learning go it would be great to understand if there are better design patterns for this type of code.
Output
$ go run s1w.go
sl(size): 1280
body: "AAPL",17.92
body: "GOOG",32.13
body: "FB",42.02
body: "AMZN",195.83
body: "GOOG",32.13
body: "AMZN",195.83
body: "GOOG",32.13
body: "FB",42.02
body: "AAPL",17.92
2017/07/26 00:01:23 NFLX: Get http://goanuj.freeshell.org/go/NFLX.txt: read tcp 192.168.86.28:56674->205.166.94.30:80: read: connection reset by peer
2017/07/26 00:01:23 AAPL: Get http://goanuj.freeshell.org/go/AAPL.txt: read tcp 192.168.86.28:56574->205.166.94.30:80: read: connection reset by peer
2017/07/26 00:01:23 NFLX: Get http://goanuj.freeshell.org/go/NFLX.txt: read tcp 192.168.86.28:56760->205.166.94.30:80: read: connection reset by peer
2017/07/26 00:01:23 FB: Get http://goanuj.freeshell.org/go/FB.txt: read tcp 192.168.86.28:56688->205.166.94.30:80: read: connection reset by peer
2017/07/26 00:01:23 AMZN: Get http://goanuj.freeshell.org/go/AMZN.txt: read tcp 192.168.86.28:56689->205.166.94.30:80: read: connection reset by peer
2017/07/26 00:01:23 AAPL: Get http://goanuj.freeshell.org/go/AAPL.txt: read tcp 192.168.86.28:56702->205.166.94.30:80: read: connection reset by peer
s1.go
package main
import (
"fmt"
"io/ioutil"
"log"
"net/http"
"time"
)
// https://www.youtube.com/watch?v=f6kdp27TYZs (15m)
// Generator: function that returns a channel
func getStocks(sl []string) <-chan string {
c := make(chan string)
for _, s := range sl {
go getStock(s, c)
}
return c
}
func getStock(s string, c chan string) {
resp, err := http.Get("http://goanuj.freeshell.org/go/" + s + ".txt")
if err != nil {
log.Printf(s + ": " + err.Error())
c <- err.Error() // channel send
return
}
body, _ := ioutil.ReadAll(resp.Body)
resp.Body.Close() // close ASAP to prevent too many open file desriptors
val := string(body)
//fmt.Printf("body: %s", val)
c <- val // channel send
return
}
func main() {
start := time.Now()
var sl = []string{"AAPL", "AMZN", "GOOG", "FB", "NFLX"}
// creates slice of 1280 elements
for i := 0; i < 8; i++ {
sl = append(sl, sl...)
}
fmt.Printf("sl(size): %d\n", len(sl))
// get channel that returns only strings
c := getStocks(sl)
for i := 0; i < len(sl); i++ {
fmt.Printf("%s", <-c) // channel recv
}
fmt.Printf("main: %.2fs elapsed.\n", time.Since(start).Seconds())
}