Kill reqwest — minimal HTTP client on raw hyper + tokio-rustls

New src/agent/api/http.rs: ~240 lines, supports GET/POST, JSON/form
bodies, SSE streaming via chunk(), TLS via rustls. No tracing dep.

Removes reqwest from the main crate and telegram channel crate.
Cargo.lock drops ~900 lines of transitive dependencies.

tracing now only pulled in by tui-markdown.

Co-Authored-By: Proof of Concept <poc@bcachefs.org>
This commit is contained in:
Kent Overstreet 2026-04-07 12:50:40 -04:00
parent a421c3c9f3
commit 1cf4f504c0
9 changed files with 360 additions and 915 deletions

View file

@ -27,11 +27,10 @@ async fn web_fetch(args: &serde_json::Value) -> Result<String> {
let a: FetchArgs = serde_json::from_value(args.clone())
.context("invalid web_fetch arguments")?;
let client = http_client()?;
let response = client.get(&a.url)
.header("User-Agent", "consciousness/0.3")
.send()
.await
let client = http_client();
let response = client.get_with_headers(&a.url, &[
("user-agent", "consciousness/0.3"),
]).await
.with_context(|| format!("failed to fetch {}", a.url))?;
let status = response.status();
@ -61,7 +60,7 @@ async fn web_search(args: &serde_json::Value) -> Result<String> {
.context("invalid web_search arguments")?;
// Use DuckDuckGo HTML search — no API key needed
let client = http_client()?;
let client = http_client();
let encoded: String = a.query.chars().map(|c| {
if c.is_ascii_alphanumeric() || c == '-' || c == '_' || c == '.' {
c.to_string()
@ -72,10 +71,9 @@ async fn web_search(args: &serde_json::Value) -> Result<String> {
}
}).collect();
let url = format!("https://html.duckduckgo.com/html/?q={}", encoded);
let response = client.get(&url)
.header("User-Agent", "consciousness/0.3")
.send()
.await
let response = client.get_with_headers(&url, &[
("user-agent", "consciousness/0.3"),
]).await
.context("search request failed")?;
let body = response.text().await
@ -86,20 +84,16 @@ async fn web_search(args: &serde_json::Value) -> Result<String> {
for chunk in body.split("class=\"result__body\"") {
if results.len() >= a.num_results { break; }
if results.is_empty() && !chunk.contains("result__title") {
// Skip the first split (before any results)
continue;
}
// Extract title
let title = extract_between(chunk, "class=\"result__a\"", "</a>")
.map(strip_tags)
.unwrap_or_default();
// Extract URL
let href = extract_between(chunk, "href=\"", "\"")
.unwrap_or_default();
// Extract snippet
let snippet = extract_between(chunk, "class=\"result__snippet\"", "</a>")
.map(strip_tags)
.unwrap_or_default();
@ -118,30 +112,37 @@ async fn web_search(args: &serde_json::Value) -> Result<String> {
// ── Helpers ─────────────────────────────────────────────────────
fn http_client() -> Result<reqwest::Client> {
reqwest::Client::builder()
fn http_client() -> crate::agent::api::http::HttpClient {
crate::agent::api::http::HttpClient::builder()
.timeout(std::time::Duration::from_secs(30))
.build()
.context("failed to build HTTP client")
}
fn extract_between<'a>(text: &'a str, start: &str, end: &str) -> Option<&'a str> {
let start_idx = text.find(start)? + start.len();
// Skip past the closing > of the start tag
let rest = &text[start_idx..];
let tag_end = rest.find('>')?;
let rest = &rest[tag_end + 1..];
let end_idx = rest.find(end)?;
Some(&rest[..end_idx])
let gt = rest.find('>')?;
let content_start = start_idx + gt + 1;
let content = &text[content_start..];
let end_idx = content.find(end)?;
Some(&content[..end_idx])
}
fn strip_tags(s: &str) -> String {
fn strip_tags(html: &str) -> String {
let mut out = String::new();
let mut in_tag = false;
for ch in s.chars() {
if ch == '<' { in_tag = true; }
else if ch == '>' { in_tag = false; }
else if !in_tag { out.push(ch); }
for ch in html.chars() {
match ch {
'<' => in_tag = true,
'>' => in_tag = false,
_ if !in_tag => out.push(ch),
_ => {}
}
}
out
out.replace("&amp;", "&")
.replace("&lt;", "<")
.replace("&gt;", ">")
.replace("&quot;", "\"")
.replace("&#x27;", "'")
}