Upstream test server source code: https://gist.github.com/madawei2699/760a896c442408bf6a4475bd98eff2c8
package main
import (
"encoding/json"
"net/http"
"sync"
"time"
"github.com/labstack/echo/v4"
"github.com/labstack/echo/v4/middleware"
)
func main() {
// Echo instance
e := echo.New()
// Middleware
e.Use(middleware.Logger())
e.Use(middleware.Recover())
// Routes
e.GET("/", hello)
e.GET("/proxy", sendJson)
// Start server
e.Logger.Fatal(e.Start(":3333"))
}
// Handler
func hello(c echo.Context) error {
return c.String(http.StatusOK, "Hello, World!")
}
type TestJson struct {
BytesOut string `json:"bytes_out"`
Developer string `json:"developer"`
IP string `json:"ip"`
Name string `json:"name"`
Method string `json:"method"`
Protocol string `json:"protocol"`
StatusCode string `json:"status_code"`
Time string `json:"time"`
}
var myClient = &http.Client{Timeout: 10 * time.Second}
func getJson(url string, target interface{}) error {
r, err := myClient.Get(url)
if err != nil {
return err
}
defer r.Body.Close()
return json.NewDecoder(r.Body).Decode(target)
}
func sendJson(c echo.Context) error {
goroutine := 5
dataSlice := make([]*TestJson, goroutine)
var wg sync.WaitGroup
wg.Add(goroutine)
for g := 0; g < goroutine; g++ {
go func(g int) {
defer wg.Done()
r := new(TestJson)
getJson("http://localhost:1323/json", r)
dataSlice[g] = r
}(g)
}
wg.Wait()
return c.JSON(200, map[string]interface{}{
"data": []interface{}{dataSlice},
})
}
wrk -t12 -c12 -d30s http://localhost:3333/proxy
Running 30s test @ http://localhost:3333/proxy
12 threads and 12 connections
Thread Stats Avg Stdev Max +/- Stdev
Latency 49.23ms 35.61ms 222.78ms 62.95%
Req/Sec 21.97 16.59 80.00 77.30%
7778 requests in 30.11s, 7.26MB read
Requests/sec: 258.35
Transfer/sec: 247.08KB
wrk -t12 -c20 -d30s http://localhost:3333/proxy
Running 30s test @ http://localhost:3333/proxy
12 threads and 20 connections
Thread Stats Avg Stdev Max +/- Stdev
Latency 49.98ms 109.09ms 1.12s 96.10%
Req/Sec 31.95 32.41 161.00 78.31%
10735 requests in 30.10s, 9.99MB read
Requests/sec: 356.59
Transfer/sec: 339.80KB
wrk -t12 -c200 -d30s http://localhost:3333/proxy
Running 30s test @ http://localhost:3333/proxy
12 threads and 200 connections
Thread Stats Avg Stdev Max +/- Stdev
Latency 191.38ms 199.94ms 1.93s 93.47%
Req/Sec 89.23 38.76 240.00 65.14%
29573 requests in 30.10s, 27.28MB read
Socket errors: connect 0, read 40, write 0, timeout 93
Requests/sec: 982.56
Transfer/sec: 0.91MB
wrk -t12 -c20 -d30s http://localhost:3333/proxy
Running 30s test @ http://localhost:3333/proxy
12 threads and 20 connections
Thread Stats Avg Stdev Max +/- Stdev
Latency 20.18ms 26.47ms 387.13ms 88.58%
Req/Sec 60.49 42.33 202.00 69.79%
21414 requests in 30.11s, 19.76MB read
Requests/sec: 711.30
Transfer/sec: 672.20KB
wrk -t12 -c12 -d30s http://localhost:3333/proxy
Running 30s test @ http://localhost:3333/proxy
12 threads and 12 connections
Thread Stats Avg Stdev Max +/- Stdev
Latency 17.37ms 22.32ms 340.98ms 90.06%
Req/Sec 78.97 37.90 190.00 59.28%
28099 requests in 30.08s, 25.87MB read
Requests/sec: 934.03
Transfer/sec: 0.86MB
wrk -t12 -c200 -d30s http://localhost:3333/proxy
Running 30s test @ http://localhost:3333/proxy
12 threads and 200 connections
Thread Stats Avg Stdev Max +/- Stdev
Latency 472.61ms 367.91ms 1.68s 72.00%
Req/Sec 42.80 41.71 171.00 79.72%
12816 requests in 30.09s, 11.85MB read
Socket errors: connect 0, read 70, write 0, timeout 0
Requests/sec: 425.91
Transfer/sec: 403.41KB