Skip to content

Instantly share code, notes, and snippets.

@fundon
Created April 29, 2020 07:17
Show Gist options
  • Save fundon/1b3e703ace491371ad0092b815a213cf to your computer and use it in GitHub Desktop.
Save fundon/1b3e703ace491371ad0092b815a213cf to your computer and use it in GitHub Desktop.
smol spawn vs tokio spawn
//! An HTTP server based on `hyper`.
//!
//! Run with:
//!
//! ```
//! cargo run --example hyper-server
//! ```
//!
//! Using https://github.com/mcollina/autocannon
//! ```
//! npm i autocannon -g
//! autocannon 127.0.0.1:8000
//! ```
//!
//! Open in the browser any of these addresses:
//!
//! - http://localhost:8000/
use std::convert::Infallible;
use std::net::SocketAddr;
use std::thread;
use anyhow::Result;
use futures::prelude::*;
use hyper::service::{make_service_fn, service_fn};
use hyper::{Body, Request, Response, Server};
use smol::Task;
/// Serves a request and returns a response.
async fn serve(_req: Request<Body>) -> Result<Response<Body>> {
Ok(Response::new(Body::from("Hello world!")))
}
/// Listens for incoming connections and serves them.
async fn listen() -> Result<()> {
Server::bind(&"127.0.0.1:8000".parse::<SocketAddr>().unwrap())
.executor(SmolExecutor)
.serve(make_service_fn(|_| async {
Ok::<_, Infallible>(service_fn(serve))
}))
.await?;
Ok(())
}
fn main() -> Result<()> {
// Create an executor thread pool.
for _ in 0..num_cpus::get().max(1) {
thread::spawn(|| smol::run(future::pending::<()>()));
}
smol::run(Task::spawn(async { listen().await }))
}
/// Spawns futures.
#[derive(Clone)]
struct SmolExecutor;
impl<F: Future + Send + 'static> hyper::rt::Executor<F> for SmolExecutor {
fn execute(&self, fut: F) {
// smol spawn
Task::spawn(async { drop(fut.await) }).detach();
// tokio spawn
//tokio::spawn(async { drop(fut.await) });
}
}
λ autocannon 127.0.0.1:8000
Running 10s test @ http://127.0.0.1:8000
10 connections
┌─────────┬──────┬──────┬───────┬──────┬─────────┬─────────┬─────────┐
│ Stat │ 2.5% │ 50% │ 97.5% │ 99% │ Avg │ Stdev │ Max │
├─────────┼──────┼──────┼───────┼──────┼─────────┼─────────┼─────────┤
│ Latency │ 0 ms │ 0 ms │ 0 ms │ 0 ms │ 0.01 ms │ 0.07 ms │ 6.13 ms │
└─────────┴──────┴──────┴───────┴──────┴─────────┴─────────┴─────────┘
┌───────────┬────────┬────────┬─────────┬─────────┬─────────┬─────────┬────────┐
│ Stat │ 1% │ 2.5% │ 50% │ 97.5% │ Avg │ Stdev │ Min │
├───────────┼────────┼────────┼─────────┼─────────┼─────────┼─────────┼────────┤
│ Req/Sec │ 32991 │ 32991 │ 33951 │ 35871 │ 34288 │ 974.35 │ 32987 │
├───────────┼────────┼────────┼─────────┼─────────┼─────────┼─────────┼────────┤
│ Bytes/Sec │ 2.9 MB │ 2.9 MB │ 2.99 MB │ 3.16 MB │ 3.02 MB │ 85.4 kB │ 2.9 MB │
└───────────┴────────┴────────┴─────────┴─────────┴─────────┴─────────┴────────┘
Req/Bytes counts sampled once per second.
377k requests in 11.05s, 33.2 MB read
λ autocannon 127.0.0.1:8000
Running 10s test @ http://127.0.0.1:8000
10 connections
┌─────────┬──────┬──────┬───────┬──────┬─────────┬─────────┬─────────┐
│ Stat │ 2.5% │ 50% │ 97.5% │ 99% │ Avg │ Stdev │ Max │
├─────────┼──────┼──────┼───────┼──────┼─────────┼─────────┼─────────┤
│ Latency │ 0 ms │ 0 ms │ 0 ms │ 0 ms │ 0.01 ms │ 0.06 ms │ 6.18 ms │
└─────────┴──────┴──────┴───────┴──────┴─────────┴─────────┴─────────┘
┌───────────┬─────────┬─────────┬─────────┬─────────┬──────────┬─────────┬─────────┐
│ Stat │ 1% │ 2.5% │ 50% │ 97.5% │ Avg │ Stdev │ Min │
├───────────┼─────────┼─────────┼─────────┼─────────┼──────────┼─────────┼─────────┤
│ Req/Sec │ 34879 │ 34879 │ 38719 │ 39071 │ 37714.91 │ 1446.97 │ 34859 │
├───────────┼─────────┼─────────┼─────────┼─────────┼──────────┼─────────┼─────────┤
│ Bytes/Sec │ 3.07 MB │ 3.07 MB │ 3.41 MB │ 3.44 MB │ 3.32 MB │ 127 kB │ 3.07 MB │
└───────────┴─────────┴─────────┴─────────┴─────────┴──────────┴─────────┴─────────┘
Req/Bytes counts sampled once per second.
415k requests in 11.04s, 36.5 MB read
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment