Rust SDK
browsr-client is the official Rust client for the Browsr API.
It covers:
- browser sessions
- browser commands
- observe and browser step flows
- scrape, crawl, and search
- relay CDP and relay events
- shell session lifecycle and shell execution
Installation
[dependencies]
browsr-client = "0.3.5"
browsr-types = "0.3.5"
serde_json = "1"
Setup
use browsr_client::BrowsrClient;
let client = BrowsrClient::from_env();
Environment variables:
export BROWSR_BASE_URL="https://api.browsr.dev"
export BROWSR_API_KEY="bak_..."
Or configure explicitly:
use browsr_client::{BrowsrClient, BrowsrClientConfig};
let config = BrowsrClientConfig::new("https://api.browsr.dev")
.with_api_key(std::env::var("BROWSR_API_KEY").unwrap())
.with_timeout(60);
let client = BrowsrClient::from_client_config(config);
Scrape
let result = client.scrape_url("https://example.com").await?;
println!("{}", result.data.markdown.unwrap_or_default());
Full request:
use browsr_types::{ScrapeApiRequest, ScrapeFormat};
let request = ScrapeApiRequest::new("https://example.com")
.with_formats(vec![ScrapeFormat::Markdown, ScrapeFormat::Html])
.with_wait(1500);
let result = client.scrape_v1(request).await?;
Crawl
use browsr_types::CrawlApiRequest;
let request = CrawlApiRequest {
url: "https://example.com".into(),
limit: 10,
max_depth: 2,
..CrawlApiRequest::new("https://example.com")
};
let result = client.crawl(request).await?;
println!("pages: {}", result.completed);
Search
let result = client.search_query("browsr relay debugging").await?;
println!("success: {}", result.success);
Sessions and commands
use browsr_types::Commands;
let session = client.create_session().await?;
let result = client.execute_commands(
vec![
Commands::NavigateTo {
url: "https://example.com".into(),
},
Commands::GetTitle,
],
Some(session.session_id.clone()),
Some(true),
None,
).await?;
println!("ok: {}", result.success);
Convenience helpers:
client.navigate("https://example.com", Some(session.session_id.clone())).await?;
client.click("button.submit", Some(session.session_id.clone())).await?;
client.type_text("#email", "user@example.com", Some(true), Some(session.session_id.clone())).await?;
client.get_title(Some(session.session_id.clone())).await?;
client.evaluate("document.title", Some(session.session_id.clone())).await?;
Observe
use browsr_types::ObserveOptions;
let observation = client.observe(
Some(session.session_id.clone()),
Some(true),
ObserveOptions {
use_image: Some(true),
full_page: None,
wait_ms: Some(250),
include_content: Some(true),
},
).await?;
println!("url: {}", observation.dom_snapshot.url);
Browser step
use browsr_client::{BrowserStepInput, BrowserStepRequest};
use browsr_types::Commands;
let request = BrowserStepRequest::new(BrowserStepInput::new(vec![
Commands::Click {
selector: "button.next".into(),
}
]))
.with_session_id(session.session_id.clone())
.with_thread_id("thread-123");
let result = client.step(request).await?;
println!("success: {}", result.success);
Relay CDP and events
use serde_json::json;
let value = client.cdp(
"relay-session-id",
"Runtime.evaluate",
Some(json!({"expression":"1+1","returnByValue":true})),
).await?;
println!("{}", value);
let events = client.relay_events("relay-session-id", Some(20)).await?;
println!("events: {}", events.count);
Shell sessions
use browsr_types::{ShellCreateSessionRequest, ShellExecRequest};
let shell = client.create_shell_session(ShellCreateSessionRequest {
language: Some("python".into()),
timeout_secs: Some(300),
..Default::default()
}).await?;
let result = client.shell_exec(ShellExecRequest {
session_id: shell.session_id.clone(),
command: "python3 -c 'print(1+1)'".into(),
timeout_secs: Some(30),
working_dir: None,
}).await?;
println!("{}", result.result.stdout);
Authentication modes
API key:
let client = BrowsrClient::new("https://api.browsr.dev")
.with_api_key("bak_...");
Bearer token:
let config = BrowsrClientConfig::new("https://api.browsr.dev")
.with_bearer_token("jwt_token");
let client = BrowsrClient::from_client_config(config);
CLI parity
The Rust client and browsr CLI expose the same major remote capabilities:
- scrape, crawl, search
- sessions and commands
- observe and browser step
- relay CDP and relay events
- shell sessions