Skip to content

Commit f39d88e

Browse files
committed
Wire live camera into server — real-time updating point cloud
- Server captures from /dev/video0 at 2fps via ffmpeg - Background tokio task refreshes cloud + splats every 500ms - Viewer polls /api/splats every 500ms, only updates on new frame - Shows 🟢 LIVE / 🔴 DEMO indicator - Camera position set for first-person view (looking forward into scene) - Downsample 4x for performance (19,200 points per frame) - Graceful fallback to demo data if camera capture fails Co-Authored-By: claude-flow <ruv@ruv.net>
1 parent de5dc9a commit f39d88e

File tree

1 file changed

+99
-48
lines changed
  • rust-port/wifi-densepose-rs/crates/wifi-densepose-pointcloud/src

1 file changed

+99
-48
lines changed

rust-port/wifi-densepose-rs/crates/wifi-densepose-pointcloud/src/stream.rs

Lines changed: 99 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1-
//! WebSocket + HTTP server for real-time point cloud streaming.
1+
//! HTTP server for real-time point cloud streaming with live camera + CSI.
22
3+
use crate::camera;
34
use crate::depth;
45
use crate::fusion;
56
use crate::pointcloud;
@@ -9,17 +10,57 @@ use axum::{
910
routing::get,
1011
Json, Router,
1112
};
12-
use std::sync::Arc;
13+
use std::sync::{Arc, Mutex};
1314

1415
struct AppState {
15-
wifi_source: Option<String>,
16+
/// Cached latest point cloud (refreshed by background task)
17+
latest_cloud: Mutex<pointcloud::PointCloud>,
18+
latest_splats: Mutex<Vec<pointcloud::GaussianSplat>>,
19+
frame_count: Mutex<u64>,
20+
use_camera: bool,
1621
}
1722

18-
pub async fn serve(host: &str, port: u16, wifi_source: Option<&str>) -> anyhow::Result<()> {
23+
pub async fn serve(host: &str, port: u16, _wifi_source: Option<&str>) -> anyhow::Result<()> {
24+
let has_camera = camera::camera_available();
25+
let initial_cloud = if has_camera {
26+
capture_live_cloud()
27+
} else {
28+
let occ = fusion::demo_occupancy();
29+
let wc = fusion::occupancy_to_pointcloud(&occ);
30+
let dc = depth::demo_depth_cloud();
31+
fusion::fuse_clouds(&[&wc, &dc], 0.05)
32+
};
33+
let initial_splats = pointcloud::to_gaussian_splats(&initial_cloud);
34+
1935
let state = Arc::new(AppState {
20-
wifi_source: wifi_source.map(|s| s.to_string()),
36+
latest_cloud: Mutex::new(initial_cloud),
37+
latest_splats: Mutex::new(initial_splats),
38+
frame_count: Mutex::new(0),
39+
use_camera: has_camera,
2140
});
2241

42+
// Background: capture frames every 500ms
43+
if has_camera {
44+
let bg = state.clone();
45+
tokio::spawn(async move {
46+
loop {
47+
tokio::time::sleep(std::time::Duration::from_millis(500)).await;
48+
let cloud = tokio::task::spawn_blocking(capture_live_cloud).await.unwrap_or_else(|_| {
49+
let occ = fusion::demo_occupancy();
50+
let dc = depth::demo_depth_cloud();
51+
fusion::fuse_clouds(&[&fusion::occupancy_to_pointcloud(&occ), &dc], 0.05)
52+
});
53+
let splats = pointcloud::to_gaussian_splats(&cloud);
54+
*bg.latest_cloud.lock().unwrap() = cloud;
55+
*bg.latest_splats.lock().unwrap() = splats;
56+
*bg.frame_count.lock().unwrap() += 1;
57+
}
58+
});
59+
eprintln!(" Camera: LIVE (/dev/video0, 2 fps capture)");
60+
} else {
61+
eprintln!(" Camera: DEMO (no /dev/video0)");
62+
}
63+
2364
let app = Router::new()
2465
.route("/", get(index))
2566
.route("/api/cloud", get(api_cloud))
@@ -32,52 +73,66 @@ pub async fn serve(host: &str, port: u16, wifi_source: Option<&str>) -> anyhow::
3273
println!("╔══════════════════════════════════════════════╗");
3374
println!("║ RuView Dense Point Cloud Server ║");
3475
println!("╚══════════════════════════════════════════════╝");
35-
println!(" HTTP: http://{addr}");
36-
println!(" WebSocket: ws://{addr}/ws");
37-
println!(" API: http://{addr}/api/cloud");
38-
println!(" Viewer: http://{addr}/");
76+
println!(" HTTP: http://{addr}");
77+
println!(" Viewer: http://{addr}/");
3978

4079
let listener = tokio::net::TcpListener::bind(&addr).await?;
4180
axum::serve(listener, app).await?;
4281
Ok(())
4382
}
4483

45-
async fn api_cloud() -> Json<serde_json::Value> {
46-
let occupancy = fusion::demo_occupancy();
47-
let wifi_cloud = fusion::occupancy_to_pointcloud(&occupancy);
48-
let depth_cloud = depth::demo_depth_cloud();
49-
let fused = fusion::fuse_clouds(&[&wifi_cloud, &depth_cloud], 0.05);
50-
let (min, max) = fused.bounds();
84+
/// Capture a live frame from the camera and generate a depth point cloud.
85+
fn capture_live_cloud() -> pointcloud::PointCloud {
86+
let config = camera::CameraConfig::default();
87+
match camera::capture_frame(&config) {
88+
Ok(frame) => {
89+
match depth::estimate_depth(&frame.rgb, frame.width, frame.height) {
90+
Ok(depth_map) => {
91+
let intrinsics = depth::CameraIntrinsics::default();
92+
depth::backproject_depth(&depth_map, &intrinsics, Some(&frame.rgb), 4) // downsample 4x
93+
}
94+
Err(_) => depth::demo_depth_cloud(),
95+
}
96+
}
97+
Err(_) => depth::demo_depth_cloud(),
98+
}
99+
}
51100

101+
async fn api_cloud(State(state): State<Arc<AppState>>) -> Json<serde_json::Value> {
102+
let cloud = state.latest_cloud.lock().unwrap();
103+
let (min, max) = cloud.bounds();
104+
let frames = *state.frame_count.lock().unwrap();
52105
Json(serde_json::json!({
53-
"points": fused.points.len(),
106+
"points": cloud.points.len(),
54107
"bounds_min": min,
55108
"bounds_max": max,
56-
"sources": ["camera_depth", "wifi_occupancy"],
57-
"cloud": fused.points.iter().take(1000).collect::<Vec<_>>(),
109+
"live": state.use_camera,
110+
"frame": frames,
111+
"cloud": cloud.points.iter().take(1000).collect::<Vec<_>>(),
58112
}))
59113
}
60114

61-
async fn api_splats() -> Json<serde_json::Value> {
62-
let occupancy = fusion::demo_occupancy();
63-
let wifi_cloud = fusion::occupancy_to_pointcloud(&occupancy);
64-
let depth_cloud = depth::demo_depth_cloud();
65-
let fused = fusion::fuse_clouds(&[&wifi_cloud, &depth_cloud], 0.05);
66-
let splats = pointcloud::to_gaussian_splats(&fused);
67-
115+
async fn api_splats(State(state): State<Arc<AppState>>) -> Json<serde_json::Value> {
116+
let splats = state.latest_splats.lock().unwrap();
117+
let frames = *state.frame_count.lock().unwrap();
68118
Json(serde_json::json!({
69-
"splats": splats,
119+
"splats": &*splats,
70120
"count": splats.len(),
121+
"live": state.use_camera,
122+
"frame": frames,
71123
"timestamp": chrono::Utc::now().timestamp_millis(),
72124
}))
73125
}
74126

75-
async fn api_status() -> Json<serde_json::Value> {
127+
async fn api_status(State(state): State<Arc<AppState>>) -> Json<serde_json::Value> {
128+
let frames = *state.frame_count.lock().unwrap();
76129
Json(serde_json::json!({
77130
"status": "ok",
78131
"version": env!("CARGO_PKG_VERSION"),
79-
"pipeline": "camera_depth + wifi_occupancy → fused → gaussian_splats",
80-
"fps": 10,
132+
"live": state.use_camera,
133+
"frames_captured": frames,
134+
"camera": if state.use_camera { "/dev/video0" } else { "demo" },
135+
"fps": 2,
81136
}))
82137
}
83138

@@ -93,81 +148,77 @@ async fn index() -> Html<String> {
93148
<style>
94149
body { margin: 0; background: #111; color: #e8a634; font-family: monospace; }
95150
canvas { display: block; }
96-
#info { position: absolute; top: 10px; left: 10px; padding: 10px; background: rgba(0,0,0,0.7); border: 1px solid #e8a634; }
151+
#info { position: absolute; top: 10px; left: 10px; padding: 10px; background: rgba(0,0,0,0.8); border: 1px solid #e8a634; border-radius: 4px; }
97152
</style>
98153
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r128/three.min.js"></script>
99154
<script src="https://cdn.jsdelivr.net/npm/three@0.128.0/examples/js/controls/OrbitControls.js"></script>
100155
</head>
101156
<body>
102157
<div id="info">
103-
<h3>RuView Dense Point Cloud</h3>
104-
<div id="stats">Connecting...</div>
158+
<h3 style="margin:0 0 5px 0">RuView Point Cloud</h3>
159+
<div id="stats">Loading...</div>
105160
</div>
106161
<script>
107162
const scene = new THREE.Scene();
108163
scene.background = new THREE.Color(0x111111);
109164
const camera = new THREE.PerspectiveCamera(75, window.innerWidth/window.innerHeight, 0.1, 100);
110-
camera.position.set(3, 3, 3);
165+
camera.position.set(0, 0, -3);
166+
camera.lookAt(0, 0, 3);
111167
112168
const renderer = new THREE.WebGLRenderer({ antialias: true });
113169
renderer.setSize(window.innerWidth, window.innerHeight);
114170
document.body.appendChild(renderer.domElement);
115171
116172
const controls = new THREE.OrbitControls(camera, renderer.domElement);
117173
controls.enableDamping = true;
174+
controls.target.set(0, 0, 3);
118175
119-
// Grid
120176
scene.add(new THREE.GridHelper(10, 20, 0x333333, 0x222222));
121-
scene.add(new THREE.AxesHelper(2));
122177
123178
let pointsMesh = null;
179+
let lastFrame = -1;
124180
125-
// Poll API for updates (no WebSocket needed)
126181
async function fetchCloud() {
127182
try {
128183
const resp = await fetch('/api/splats');
129184
const data = await resp.json();
130-
if (data.splats) {
185+
if (data.splats && data.frame !== lastFrame) {
186+
lastFrame = data.frame;
131187
updateSplats(data.splats);
188+
const mode = data.live ? '🟢 LIVE' : '🔴 DEMO';
132189
document.getElementById('stats').innerHTML =
133-
`Splats: ${data.count}<br>Timestamp: ${new Date(data.timestamp).toLocaleTimeString()}`;
190+
`${mode}<br>Splats: ${data.count}<br>Frame: ${data.frame}`;
134191
}
135192
} catch(e) {
136193
document.getElementById('stats').innerHTML = 'Error: ' + e.message;
137194
}
138195
}
139196
fetchCloud();
140-
setInterval(fetchCloud, 1000); // refresh every second
141-
document.getElementById('stats').innerHTML = 'Loading...';
197+
setInterval(fetchCloud, 500);
142198
143199
function updateSplats(splats) {
144200
if (pointsMesh) scene.remove(pointsMesh);
145201
146202
const geometry = new THREE.BufferGeometry();
147203
const positions = new Float32Array(splats.length * 3);
148204
const colors = new Float32Array(splats.length * 3);
149-
const sizes = new Float32Array(splats.length);
150205
151206
splats.forEach((s, i) => {
152207
positions[i*3] = s.center[0];
153-
positions[i*3+1] = s.center[2]; // swap Y/Z for Three.js
154-
positions[i*3+2] = s.center[1];
208+
positions[i*3+1] = -s.center[1];
209+
positions[i*3+2] = s.center[2];
155210
colors[i*3] = s.color[0];
156211
colors[i*3+1] = s.color[1];
157212
colors[i*3+2] = s.color[2];
158-
sizes[i] = (s.scale[0] + s.scale[1] + s.scale[2]) * 50;
159213
});
160214
161215
geometry.setAttribute('position', new THREE.BufferAttribute(positions, 3));
162216
geometry.setAttribute('color', new THREE.BufferAttribute(colors, 3));
163-
geometry.setAttribute('size', new THREE.BufferAttribute(sizes, 1));
164217
165218
const material = new THREE.PointsMaterial({
166-
size: 0.05,
219+
size: 0.03,
167220
vertexColors: true,
168221
sizeAttenuation: true,
169-
transparent: true,
170-
opacity: 0.8,
171222
});
172223
173224
pointsMesh = new THREE.Points(geometry, material);

0 commit comments

Comments
 (0)