英文:
too many open files when making http requests
问题
我正在尝试在Go语言中创建一个HTTP客户端程序,该程序将进行多个HTTP GET请求。我正在使用缓冲通道来限制并发请求的数量。
当我运行程序时,我得到以下错误信息:
Get http://198.18.96.213/: dial tcp 198.18.96.213:80: too many open files
以下是我的程序:
package main
import (
"fmt"
"net/http"
"time"
)
func HttpGets(numRequests int, numConcurrent int, url string) map[int]int {
// 我想要每个状态码的请求数量
responseStatuses := map[int]int {
100: 0, 101 : 0, 102 : 0, 200 : 0, 201 : 0, 202 : 0, 203 : 0, 204 : 0, 205 : 0,
206 : 0, 207 : 0, 208 : 0, 226 : 0, 300 : 0, 301 : 0, 302 : 0, 303 : 0, 304 : 0,
305 : 0, 306 : 0, 307 : 0, 308 : 0, 400 : 0, 401 : 0, 402 : 0, 403 : 0, 404 : 0,
405 : 0, 406 : 0, 407 : 0, 408 : 0, 409 : 0, 410 : 0, 411 : 0, 412 : 0, 413 : 0,
414 : 0, 415 : 0, 416 : 0, 417 : 0, 421 : 0, 422 : 0, 423 : 0, 424 : 0, 425 : 0,
426 : 0, 427 : 0, 428 : 0, 429 : 0, 430 : 0, 431 : 0, 500 : 0, 501 : 0, 502 : 0,
503 : 0, 504 : 0, 505 : 0, 506 : 0, 507 : 0, 508 : 0, 509 : 0, 510 : 0, 511 : 0,
}
reqsDone := 0
ch := make(chan *http.Response, numConcurrent)
for i := 0; i < numRequests; i++ {
go func(url string) {
client := &http.Client{}
req, reqErr := http.NewRequest("GET", url, nil)
if reqErr != nil {
fmt.Println(reqErr)
}
// 添加 Connection: close 头部,希望解决打开文件过多的错误。在这里找到的:http://craigwickesser.com/2015/01/golang-http-to-many-open-files/
req.Header.Add("Connection", "close")
resp, err := client.Do(req)
if err != nil {
fmt.Println(err)
}
ch <- resp
}(url)
}
for {
select {
case r := <-ch:
reqsDone += 1 // 可能需要加锁?
status := r.StatusCode
if _, ok := responseStatuses[status]; ok {
responseStatuses[status] += 1
} else {
responseStatuses[status] = 1
}
r.Body.Close() // 尝试关闭响应体,希望解决打开文件过多的错误
if reqsDone == numRequests {
return responseStatuses
}
}
}
return responseStatuses
}
func main() {
var numRequests, numConcurrent = 500, 10
url := "http://198.18.96.213/"
beginTime := time.Now()
results := HttpGets(numRequests, numConcurrent, url)
endTime := time.Since(beginTime)
fmt.Printf("Total time elapsed: %s\n", endTime)
for k,v := range results {
if v != 0 {
fmt.Printf("%d : %d\n", k, v)
}
}
}
如何确保文件/套接字关闭,以便在进行多个请求时不会出现此错误?
英文:
I am trying to create a http client program in go which would make many http GET requests. I am using a buffered channel to limit the number of concurrent requests.
When I run the program I get
Get http://198.18.96.213/: dial tcp 198.18.96.213:80: too many open files
Here is my program:
package main
import (
"fmt"
"net/http"
"time"
)
func HttpGets(numRequests int, numConcurrent int, url string) map[int]int {
// I want number of requests with each status code
responseStatuses := map[int]int {
100: 0, 101 : 0, 102 : 0, 200 : 0, 201 : 0, 202 : 0, 203 : 0, 204 : 0, 205 : 0,
206 : 0, 207 : 0, 208 : 0, 226 : 0, 300 : 0, 301 : 0, 302 : 0, 303 : 0, 304 : 0,
305 : 0, 306 : 0, 307 : 0, 308 : 0, 400 : 0, 401 : 0, 402 : 0, 403 : 0, 404 : 0,
405 : 0, 406 : 0, 407 : 0, 408 : 0, 409 : 0, 410 : 0, 411 : 0, 412 : 0, 413 : 0,
414 : 0, 415 : 0, 416 : 0, 417 : 0, 421 : 0, 422 : 0, 423 : 0, 424 : 0, 425 : 0,
426 : 0, 427 : 0, 428 : 0, 429 : 0, 430 : 0, 431 : 0, 500 : 0, 501 : 0, 502 : 0,
503 : 0, 504 : 0, 505 : 0, 506 : 0, 507 : 0, 508 : 0, 509 : 0, 510 : 0, 511 : 0,
}
reqsDone := 0
ch := make(chan *http.Response, numConcurrent)
for i := 0; i < numRequests; i++ {
go func(url string) {
client := &http.Client{}
req, reqErr := http.NewRequest("GET", url, nil)
if reqErr != nil {
fmt.Println(reqErr)
}
// adding connection:close header hoping to get rid
// of too many files open error. Found this in http://craigwickesser.com/2015/01/golang-http-to-many-open-files/
req.Header.Add("Connection", "close")
resp, err := client.Do(req)
if (err !=nil) {
fmt.Println(err)
}
ch <- resp
}(url)
}
for {
select {
case r := <-ch:
reqsDone += 1 // probably needs a lock?
status := r.StatusCode
if _, ok := responseStatuses[status]; ok {
responseStatuses[status] += 1
} else {
responseStatuses[status] = 1
}
r.Body.Close() // trying to close body hoping to get rid of too many files open error
if (reqsDone == numRequests) {
return responseStatuses
}
}
}
return responseStatuses
}
func main() {
var numRequests, numConcurrent = 500, 10
url := "http://198.18.96.213/"
beginTime := time.Now()
results := HttpGets(numRequests, numConcurrent, url)
endTime := time.Since(beginTime)
fmt.Printf("Total time elapsed: %s\n", endTime)
for k,v := range results {
if v!=0 {
fmt.Printf("%d : %d\n", k, v)
}
}
}
How to I ensure files/socets are closed so that I don't get this error when making multiple requests?
答案1
得分: 1
基本上,你正在生成数百个goroutine,它们将启动连接并阻塞,直到关闭。
下面是一个快速(但非常丑陋)的工作代码:
var (
responseStatuses = make(map[int]int, 63)
reqsDone = 0
urlCh = make(chan string, numConcurrent)
ch = make(chan *http.Response, numConcurrent)
)
log.Println(numConcurrent, numRequests, len(responseStatuses))
for i := 0; i < numConcurrent; i++ {
go func() {
for url := range urlCh {
client := &http.Client{}
req, reqErr := http.NewRequest("GET", url, nil)
if reqErr != nil {
fmt.Println(reqErr)
}
// 添加connection:close头部,希望摆脱打开太多文件的错误。在http://craigwickesser.com/2015/01/golang-http-to-many-open-files/中找到了这个方法。
req.Header.Add("Connection", "close")
resp, err := client.Do(req)
if err != nil {
fmt.Println(err)
}
ch <- resp
}
}()
}
go func() {
for i := 0; i < numRequests; i++ {
urlCh <- url
}
close(urlCh)
}()
英文:
Basically you were spawning 100s of goroutines that will start the connection the block until they are closed.
Here's a quick (and very ugly) working code:
var (
responseStatuses = make(map[int]int, 63)
reqsDone = 0
urlCh = make(chan string, numConcurrent)
ch = make(chan *http.Response, numConcurrent)
)
log.Println(numConcurrent, numRequests, len(responseStatuses))
for i := 0; i < numConcurrent; i++ {
go func() {
for url := range urlCh {
client := &http.Client{}
req, reqErr := http.NewRequest("GET", url, nil)
if reqErr != nil {
fmt.Println(reqErr)
}
// adding connection:close header hoping to get rid
// of too many files open error. Found this in http://craigwickesser.com/2015/01/golang-http-to-many-open-files/
req.Header.Add("Connection", "close")
resp, err := client.Do(req)
if err != nil {
fmt.Println(err)
}
ch <- resp
}
}()
}
go func() {
for i := 0; i < numRequests; i++ {
urlCh <- url
}
close(urlCh)
}()
答案2
得分: 0
你可以使用以下库:
Requests:一个用于简化进行HTTP请求的Go库(每秒20,000个请求)
https://github.com/alessiosavi/Requests
这个库的思想是先分配一个请求列表,然后使用可配置的“并行”因子发送请求,允许同时运行“N”个请求。
// 这个数组将包含请求列表
var reqs []requests.Request
// N是并行运行的请求数量,为了避免“TO MANY OPEN FILES”错误,N必须小于ulimit阈值
var N int = 12
// 创建请求列表
for i := 0; i < 1000; i++ {
// 在这个例子中,我们使用相同的URL、方法、主体和头部初始化1000个请求
req, err := requests.InitRequest("https://127.0.0.1:5000", "GET", nil, nil, true)
if err != nil {
// 请求不符合要求,将不会添加到请求列表中
log.Println("跳过请求 [", i, "]。错误:", err)
} else {
// 如果没有错误,将创建的请求添加到需要发送的请求列表中
reqs = append(reqs, *req)
}
}
到此为止,我们有了一个包含需要发送的请求的列表。
让我们并行发送它们!
// 这个数组将包含给定请求的响应
var response []datastructure.Response
// 使用N个并行请求发送请求
response = requests.ParallelRequest(reqs, N)
// 打印响应
for i := range response {
// Dump是一个打印与响应相关的所有信息的方法
log.Println("请求 [", i, "] -> ", response[i].Dump())
// 或者使用响应中的数据
log.Println("头部信息:", response[i].Headers)
log.Println("状态码:", response[i].StatusCode)
log.Println("耗时:", response[i].Time)
log.Println("错误:", response[i].Error)
log.Println("主体:", string(response[i].Body))
}
你可以在该仓库的example文件夹中找到示例用法。
剧透:
!我是这个小库的作者
英文:
You can use the following library:
Requests: A Go library for reduce the headache when making HTTP requests (20k/s req)
https://github.com/alessiosavi/Requests
The idea is to allocate a list of request, than send them with a configurable "parallel" factor that allow to run only "N" request at time.
// This array will contains the list of request
var reqs []requests.Request
// N is the number of request to run in parallel, in order to avoid "TO MANY OPEN FILES. N have to be lower than ulimit threshold"
var N int = 12
// Create the list of request
for i := 0; i < 1000; i++ {
// In this case, we init 1000 request with same URL,METHOD,BODY,HEADERS
req, err := requests.InitRequest("https://127.0.0.1:5000", "GET", nil, nil, true)
if err != nil {
// Request is not compliant, and will not be add to the list
log.Println("Skipping request [", i, "]. Error: ", err)
} else {
// If no error occurs, we can append the request created to the list of request that we need to send
reqs = append(reqs, *req)
}
}
At this point, we have a list that contains the requests that have to be sent.
Let's send them in parallel!
// This array will contains the response from the givens request
var response []datastructure.Response
// send the request using N request to send in parallel
response = requests.ParallelRequest(reqs, N)
// Print the response
for i := range response {
// Dump is a method that print every information related to the response
log.Println("Request [", i, "] -> ", response[i].Dump())
// Or use the data present in the response
log.Println("Headers: ", response[i].Headers)
log.Println("Status code: ", response[i].StatusCode)
log.Println("Time elapsed: ", response[i].Time)
log.Println("Error: ", response[i].Error)
log.Println("Body: ", string(response[i].Body))
}
You can find example usage into the example folder of the repository.
SPOILER:
>! I'm the author of this little library
通过集体智慧和协作来改善编程学习和解决问题的方式。致力于成为全球开发者共同参与的知识库,让每个人都能够通过互相帮助和分享经验来进步。
评论