TechLead
Lesson 14 of 28
5 min read
Rust

Async/Await in Rust

Master Rust async programming with futures, async fn, .await, the tokio runtime, async traits, select!, join!, and structured concurrency.

Async/Await in Rust

Rust's async/await enables efficient concurrent I/O without the overhead of OS threads. Async functions return futures — lazy computations that must be driven by a runtime (like Tokio or async-std). This gives you fine-grained control over concurrency with zero-cost abstractions.

Async vs Threads

  • Threads: Good for CPU-bound work. Each thread = ~8KB stack. OS-managed scheduling.
  • Async: Good for I/O-bound work. Each task = ~few hundred bytes. Cooperative scheduling.
  • Rule of thumb: Use async for servers handling thousands of connections; use threads for parallel computation.

Futures and async fn

// async fn returns a Future — nothing happens until awaited
async fn fetch_data(url: &str) -> String {
    // Simulate network request
    tokio::time::sleep(std::time::Duration::from_millis(100)).await;
    format!("Data from {url}")
}

async fn process() -> Vec {
    // .await suspends this task until the future completes
    let data1 = fetch_data("https://api.example.com/a").await;
    let data2 = fetch_data("https://api.example.com/b").await;
    // These run SEQUENTIALLY — each await blocks

    vec![data1, data2]
}

// Futures are lazy — this does nothing:
// let future = fetch_data("url"); // No work happens!
// future.await; // NOW it runs

Tokio Runtime

// Cargo.toml:
// [dependencies]
// tokio = { version = "1", features = ["full"] }

#[tokio::main]
async fn main() {
    println!("Starting async work...");

    let result = fetch_data("https://api.example.com").await;
    println!("{result}");
}

// Equivalent to:
// fn main() {
//     tokio::runtime::Runtime::new()
//         .unwrap()
//         .block_on(async {
//             // your async code here
//         });
// }

async fn fetch_data(url: &str) -> String {
    // Real HTTP request with reqwest:
    // let body = reqwest::get(url).await?.text().await?;
    format!("Response from {url}")
}

Concurrent Execution with join! and select!

use tokio::time::{sleep, Duration};

async fn task_a() -> String {
    sleep(Duration::from_millis(200)).await;
    String::from("Result A")
}

async fn task_b() -> String {
    sleep(Duration::from_millis(100)).await;
    String::from("Result B")
}

#[tokio::main]
async fn main() {
    // join! — run concurrently, wait for ALL to complete
    let (a, b) = tokio::join!(task_a(), task_b());
    println!("Both done: {a}, {b}");
    // Total time: ~200ms (not 300ms)

    // select! — run concurrently, return when FIRST completes
    tokio::select! {
        result = task_a() => println!("A finished first: {result}"),
        result = task_b() => println!("B finished first: {result}"),
    }
    // B finishes first (100ms < 200ms)
}

Spawning Tasks

use tokio::task;

#[tokio::main]
async fn main() {
    // Spawn independent tasks on the runtime
    let handle1 = task::spawn(async {
        // This runs concurrently
        tokio::time::sleep(std::time::Duration::from_millis(100)).await;
        42
    });

    let handle2 = task::spawn(async {
        tokio::time::sleep(std::time::Duration::from_millis(50)).await;
        "hello"
    });

    // Await the results
    let result1 = handle1.await.unwrap();
    let result2 = handle2.await.unwrap();
    println!("{result1}, {result2}");

    // Spawn CPU-bound work on a blocking thread
    let blocking_result = task::spawn_blocking(|| {
        // This runs on a dedicated thread pool
        // Safe for CPU-intensive or blocking I/O
        let mut sum: u64 = 0;
        for i in 0..1_000_000 {
            sum += i;
        }
        sum
    }).await.unwrap();

    println!("Blocking result: {blocking_result}");

    // Spawn many concurrent tasks
    let mut handles = vec![];
    for i in 0..10 {
        handles.push(task::spawn(async move {
            tokio::time::sleep(std::time::Duration::from_millis(10)).await;
            i * i
        }));
    }

    let results: Vec = futures::future::join_all(handles)
        .await
        .into_iter()
        .map(|r| r.unwrap())
        .collect();
    println!("Results: {:?}", results);
}

Async Channels and Streams

use tokio::sync::mpsc;

#[tokio::main]
async fn main() {
    // Async channel (bounded)
    let (tx, mut rx) = mpsc::channel::(32);

    let tx2 = tx.clone();

    tokio::spawn(async move {
        tx.send("Hello from task 1".into()).await.unwrap();
    });

    tokio::spawn(async move {
        tx2.send("Hello from task 2".into()).await.unwrap();
    });

    // Receive messages
    while let Some(msg) = rx.recv().await {
        println!("Received: {msg}");
    }

    // oneshot channel — single message
    let (tx, rx) = tokio::sync::oneshot::channel();
    tokio::spawn(async move {
        tx.send(42).unwrap();
    });
    let value = rx.await.unwrap();
    println!("Oneshot: {value}");
}

Key Takeaways

  • ✅ Async functions return futures that are lazy — nothing runs until awaited
  • ✅ Tokio is the most popular async runtime; add it with tokio = { features = ["full"] }
  • ✅ Use join! to run futures concurrently; select! to race them
  • tokio::spawn creates independent concurrent tasks
  • ✅ Use async channels (tokio::sync::mpsc) for communication between async tasks

Continue Learning