Skip to content

Instantly share code, notes, and snippets.

@itang
Created July 23, 2011 18:11
Show Gist options
  • Save itang/1101702 to your computer and use it in GitHub Desktop.
Save itang/1101702 to your computer and use it in GitHub Desktop.
simple benchmark for Blueeyes, Servlet, Play!
package demo
import blueeyes.BlueEyesServer
import blueeyes.BlueEyesServiceBuilder
import blueeyes.concurrent.Future
import blueeyes.concurrent.Future._
import blueeyes.core.data.{ ByteChunk, BijectionsChunkJson }
import blueeyes.core.http.HttpStatusCodes._
import blueeyes.core.http.combinators.HttpRequestCombinators
import blueeyes.core.http.MimeTypes._
import blueeyes.json.JsonAST._
import blueeyes.core.http.{ HttpStatus, HttpRequest, HttpResponse }
case class BlueEyesDemoConfig() {
}
trait BlueEyesDemoService extends BlueEyesServiceBuilder with BijectionsChunkJson {
val helloService = service("hello", "1.0") { context =>
startup {
BlueEyesDemoConfig().future
} ->
request { it =>
path("/hello") {
produce(application / json) {
get { request: HttpRequest[ByteChunk] =>
Future.sync {
HttpResponse[JValue](content = Some(JArray(List("Hello", "World"))))
}
}
}
}
} ->
shutdown { demoConfig: BlueEyesDemoConfig =>
().future
}
}
}
object BlueEyesDemo extends BlueEyesServer with BlueEyesDemoService {
override def main(args: Array[String]) = super.main(Array("--configFile", "/etc/default/appserver.conf"))
}
package test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.alibaba.fastjson.JSON;
/**
* Servlet implementation class Test
*/
@WebServlet("/Test")
public class ServletTest extends HttpServlet {
private static final long serialVersionUID = 1L;
protected void doGet(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
response.setContentType("application/json");
List<String> list = new ArrayList<String>(2);
list.add("Hello");
list.add("World!");
response.getWriter().write(JSON.toJSONString(list));
}
}
package controllers;
import play.*;
import play.mvc.*;
import java.util.*;
import com.alibaba.fastjson.JSON;
public class Application extends Controller {
public static void index() {
List<String> list = new ArrayList<String>(2);
list.add("Hello");
list.add("World!");
// renderJSON(list);
renderJSON( JSON.toJSONString(list) );
}
}
/////////////////////////////////////////////////////
# Application dependencies
require:
- play
- com.alibaba -> fastjson 1.1.1:
transitive: false
repositories:
- alibaba:
type: iBiblio
root: "http://code.alibabatech.com/mvn/releases/"
contains:
- com.alibaba -> *
//////////////////
modified application -> prod
$ play dependencies
$ play run
$ ab -n 10000 -c 10 http://localhost:8080/testservlet/Test
This is ApacheBench, Version 2.3 <$Revision: 655654 $>
Benchmarking localhost (be patient)
Completed 1000 requests
Completed 2000 requests
Completed 3000 requests
Completed 4000 requests
Completed 5000 requests
Completed 6000 requests
Completed 7000 requests
Completed 8000 requests
Completed 9000 requests
Completed 10000 requests
Finished 10000 requests
Server Software: Apache-Coyote/1.1
Server Hostname: localhost
Server Port: 8080
Document Path: /testservlet/Test
Document Length: 18 bytes
Concurrency Level: 10
Time taken for tests: 1.117 seconds
Complete requests: 10000
Failed requests: 0
Write errors: 0
Total transferred: 1910764 bytes
HTML transferred: 180072 bytes
Requests per second: 8954.15 [#/sec] (mean)
Time per request: 1.117 [ms] (mean)
Time per request: 0.112 [ms] (mean, across all concurrent requests)
Transfer rate: 1670.83 [Kbytes/sec] received
Connection Times (ms)
min mean[+/-sd] median max
Connect: 0 0 0.2 0 2
Processing: 0 1 0.3 1 10
Waiting: 0 0 0.3 0 10
Total: 0 1 0.3 1 11
Percentage of the requests served within a certain time (ms)
50% 1
66% 1
75% 1
80% 1
90% 1
95% 1
98% 2
99% 2
100% 11 (longest request)
$ ab -n 10000 -c 10 http://localhost:8585/hello
This is ApacheBench, Version 2.3 <$Revision: 655654 $>
Benchmarking localhost (be patient)
Completed 1000 requests
Completed 2000 requests
Completed 3000 requests
Completed 4000 requests
Completed 5000 requests
Completed 6000 requests
Completed 7000 requests
Completed 8000 requests
Completed 9000 requests
Completed 10000 requests
Finished 10000 requests
Server Software:
Server Hostname: localhost
Server Port: 8585
Document Path: /hello
Document Length: 17 bytes
Concurrency Level: 10
Time taken for tests: 2.846 seconds
Complete requests: 10000
Failed requests: 0
Write errors: 0
Total transferred: 880000 bytes
HTML transferred: 170000 bytes
Requests per second: 3513.72 [#/sec] (mean)
Time per request: 2.846 [ms] (mean)
Time per request: 0.285 [ms] (mean, across all concurrent requests)
Transfer rate: 301.96 [Kbytes/sec] received
Connection Times (ms)
min mean[+/-sd] median max
Connect: 0 0 0.0 0 2
Processing: 0 3 1.5 2 19
Waiting: 0 3 1.5 2 19
Total: 1 3 1.5 2 19
Percentage of the requests served within a certain time (ms)
50% 2
66% 3
75% 3
80% 4
90% 5
95% 6
98% 7
99% 8
100% 19 (longest request)
$ ab -n 10000 -c 10 http://localhost:9000/
Benchmarking localhost (be patient)
Completed 1000 requests
Completed 2000 requests
Completed 3000 requests
Completed 4000 requests
Completed 5000 requests
Completed 6000 requests
Completed 7000 requests
Completed 8000 requests
Completed 9000 requests
Completed 10000 requests
Finished 10000 requests
Server Software: Play!
Server Hostname: localhost
Server Port: 9000
Document Path: /
Document Length: 18 bytes
Concurrency Level: 10
Time taken for tests: 1.194 seconds
Complete requests: 10000
Failed requests: 0
Write errors: 0
Total transferred: 3720000 bytes
HTML transferred: 180000 bytes
Requests per second: 8375.71 [#/sec] (mean)
Time per request: 1.194 [ms] (mean)
Time per request: 0.119 [ms] (mean, across all concurrent requests)
Transfer rate: 3042.74 [Kbytes/sec] received
Connection Times (ms)
min mean[+/-sd] median max
Connect: 0 0 0.1 0 1
Processing: 0 1 0.9 1 32
Waiting: 0 1 0.9 1 32
Total: 0 1 0.9 1 32
Percentage of the requests served within a certain time (ms)
50% 1
66% 1
75% 1
80% 1
90% 2
95% 2
98% 3
99% 5
100% 32 (longest request)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment