Skip to content

Commit fd85bbd

Browse files
committed
chore: integration test modified to use simple streaming example component
Signed-off-by: Aditya <aditya.salunkh919@gmail.com>
1 parent 8692b9c commit fd85bbd

File tree

19 files changed

+1949
-330
lines changed

19 files changed

+1949
-330
lines changed
-491 KB
Binary file not shown.
131 KB
Binary file not shown.

crates/wash-runtime/tests/integration_http_streaming_gemini.rs renamed to crates/wash-runtime/tests/integration_http_streaming.rs

Lines changed: 52 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,11 @@
1-
//! Integration test for streaming Gemini proxy component
1+
//! Integration test for HTTP streaming component
2+
//!
3+
//! This test demonstrates:
4+
//! 1. Starting a host with HTTP and logging plugins
5+
//! 2. Creating and starting a workload that streams data at fixed intervals
6+
//! 3. Verifying that exactly 20 chunks are received
7+
//! 4. Validating total streaming duration is approximately 10 seconds
8+
//! 5. Asserting each chunk arrives at ~0.5 second intervals
29
310
use anyhow::Result;
411
use futures::stream;
@@ -24,12 +31,12 @@ use wash_runtime::{
2431
types::{Component, LocalResources, Workload, WorkloadStartRequest},
2532
wit::WitInterface,
2633
};
27-
28-
const HTTP_STREAMING_GEMINI_WASM: &[u8] = include_bytes!("./fixtures/http_ai_proxy.wasm");
34+
// The component we built just now
35+
const HTTP_STREAMING_WASM: &[u8] = include_bytes!("./fixtures/http_streaming.wasm");
2936

3037
#[test_log::test(tokio::test)]
31-
async fn wasi_http_gemini_proxy() -> Result<()> {
32-
println!("\n🚀 STREAMING GEMINI PROXY TEST\n");
38+
async fn wasi_http_stream_random_chunks() -> Result<()> {
39+
println!("\n🚀 STREAMING RANDOM CHUNKS TEST\n");
3340

3441
let engine = Engine::builder().build()?;
3542
let port = find_available_port().await?;
@@ -51,15 +58,12 @@ async fn wasi_http_gemini_proxy() -> Result<()> {
5158
workload_id: uuid::Uuid::new_v4().to_string(),
5259
workload: Workload {
5360
namespace: "test".to_string(),
54-
name: "gemini-proxy".to_string(),
61+
name: "random-streamer".to_string(),
5562
annotations: HashMap::new(),
5663
service: None,
5764
components: vec![Component {
58-
bytes: bytes::Bytes::from_static(HTTP_STREAMING_GEMINI_WASM),
59-
local_resources: LocalResources {
60-
allowed_hosts: vec!["generativelanguage.googleapis.com".to_string()],
61-
..Default::default()
62-
},
65+
bytes: bytes::Bytes::from_static(HTTP_STREAMING_WASM),
66+
local_resources: LocalResources::default(),
6367
pool_size: 1,
6468
max_invocations: 100,
6569
}],
@@ -71,7 +75,7 @@ async fn wasi_http_gemini_proxy() -> Result<()> {
7175
version: Some(semver::Version::parse("0.2.2").unwrap()),
7276
config: {
7377
let mut config = HashMap::new();
74-
config.insert("host".to_string(), "gemini-proxy".to_string());
78+
config.insert("host".to_string(), "random-streamer".to_string());
7579
config
7680
},
7781
},
@@ -90,8 +94,7 @@ async fn wasi_http_gemini_proxy() -> Result<()> {
9094
host.workload_start(req).await?;
9195
println!("✓ Workload deployed\n");
9296

93-
let prompt = "Explain how AI works";
94-
let body_stream = stream::iter([Ok::<_, hyper::Error>(Frame::data(Bytes::from(prompt)))]);
97+
let body_stream = stream::iter([Ok::<_, hyper::Error>(Frame::data(Bytes::from("")))]);
9598

9699
// Create HTTP client first
97100
let stream = tokio::net::TcpStream::connect(addr).await?;
@@ -111,13 +114,13 @@ async fn wasi_http_gemini_proxy() -> Result<()> {
111114
// Build request with relative URI but explicit Host header
112115
let mut request = hyper::Request::builder()
113116
.method(hyper::Method::POST)
114-
.uri("/gemini-proxy")
117+
.uri("/stream-random")
115118
.header("content-type", "text/plain")
116119
.body(BoxBody::new(StreamBody::new(body_stream)))?;
117120

118121
request.headers_mut().insert(
119122
hyper::header::HOST,
120-
hyper::header::HeaderValue::from_static("gemini-proxy"),
123+
hyper::header::HeaderValue::from_static("random-streamer"),
121124
);
122125

123126
let response = sender.send_request(request).await?;
@@ -135,15 +138,17 @@ async fn wasi_http_gemini_proxy() -> Result<()> {
135138
let mut chunk_count = 0;
136139
let mut total_bytes = 0;
137140
let mut response_text = String::new();
141+
let mut chunk_timestamps = Vec::new();
138142

139143
while let Some(frame) = body_stream.frame().await {
140144
match frame {
141145
Ok(frame) => {
142146
if let Some(chunk) = frame.data_ref() {
147+
let elapsed = start_time.elapsed();
148+
chunk_timestamps.push(elapsed);
143149
chunk_count += 1;
144150
total_bytes += chunk.len();
145151

146-
let elapsed = start_time.elapsed();
147152
println!(
148153
"[{:.7}s] Chunk #{} received ({} bytes)",
149154
elapsed.as_secs_f64(),
@@ -174,32 +179,48 @@ async fn wasi_http_gemini_proxy() -> Result<()> {
174179
total_bytes
175180
);
176181

177-
// Assertions to verify streaming behavior
178-
assert!(
179-
chunk_count >= 10,
180-
"Expected at least 10 chunks for streaming response, got {}",
182+
// Assert exactly 20 chunks
183+
assert_eq!(
184+
chunk_count, 20,
185+
"Expected exactly 20 chunks, got {}",
181186
chunk_count
182187
);
183188

189+
// Assert total time is approximately 10 seconds (20 chunks * 0.5s = 10s)
190+
// Allow some variance for timing precision (9-11 seconds)
184191
assert!(
185-
total_bytes > 100,
186-
"Expected substantial response content, got {} bytes",
187-
total_bytes
192+
total_time.as_secs_f64() >= 9.0 && total_time.as_secs_f64() <= 11.0,
193+
"Expected total time ~10 seconds (9-11s range), got {:.2}s",
194+
total_time.as_secs_f64()
188195
);
189196

197+
// Assert chunks arrived at approximately 0.5 second intervals
198+
for i in 1..chunk_timestamps.len() {
199+
let interval = (chunk_timestamps[i] - chunk_timestamps[i - 1]).as_secs_f64();
200+
assert!(
201+
interval >= 0.4 && interval <= 0.6,
202+
"Chunk {} interval was {:.3}s, expected ~0.5s (0.4-0.6s range)",
203+
i + 1,
204+
interval
205+
);
206+
}
207+
208+
// Verify response is not empty
190209
assert!(
191210
!response_text.is_empty(),
192211
"Expected non-empty response text"
193212
);
194213

195-
// Verify that streaming actually took time (not instant)
196-
assert!(
197-
total_time.as_millis() > 100,
198-
"Response came too fast ({:?}ms), may not be streaming",
199-
total_time.as_millis()
200-
);
201-
202214
println!("✓ All streaming assertions passed!");
215+
println!(" - Received exactly 20 chunks");
216+
println!(
217+
" - Total time: {:.2}s (~10s expected)",
218+
total_time.as_secs_f64()
219+
);
220+
println!(
221+
" - Average interval: {:.3}s (~0.5s expected)",
222+
total_time.as_secs_f64() / 20.0
223+
);
203224

204225
Ok(())
205226
}

0 commit comments

Comments
 (0)