Browse Source

Adding documentation and better support for pointcloud in dora-rerun

tags/v0.3.11-rc1
haixuanTao haixuantao 9 months ago
parent
commit
8a57b9c79d
4 changed files with 114 additions and 47 deletions
  1. +2
    -0
      binaries/daemon/src/lib.rs
  2. +3
    -28
      examples/videostream-encoding/dataflow.yml
  3. +68
    -0
      examples/videostream-encoding/dataflow_reachy.yml
  4. +41
    -19
      node-hub/dora-rerun/src/lib.rs

+ 2
- 0
binaries/daemon/src/lib.rs View File

@@ -277,6 +277,8 @@ impl Daemon {
r#"{ router: ["tcp/[::]:7447"], peer: ["tcp/[::]:5456"] }"#,
)
.unwrap();

// Linkstate make it possible to connect two daemons on different network through a public daemon
zenoh_config
.insert_json5("routing/peer", r#"{ mode: "linkstate" }"#)
.unwrap();


node-hub/dora-dav1d/dataflow.yml → examples/videostream-encoding/dataflow.yml View File

@@ -9,19 +9,9 @@ nodes:
outputs:
- image
env:
CAPTURE_PATH: 1
IMAGE_WIDTH: 640
IMAGE_HEIGHT: 480

- id: echo
build: pip install -e ../../node-hub/dora-echo
path: dora-echo
_unstable_deploy:
machine: decoder
#inputs:
# image: camera/image
outputs:
- image
CAPTURE_PATH: 0
IMAGE_WIDTH: 1280
IMAGE_HEIGHT: 720

- id: rav1e-local
path: dora-rav1e
@@ -32,9 +22,6 @@ nodes:
image: camera/image
outputs:
- image
env:
IMAGE_WIDTH: 640
IMAGE_HEIGHT: 480

- id: dav1d-remote
path: dora-dav1d
@@ -45,9 +32,6 @@ nodes:
image: rav1e-local/image
outputs:
- image
env:
IMAGE_WIDTH: 640
IMAGE_HEIGHT: 480

- id: rav1e-remote
path: dora-rav1e
@@ -83,12 +67,3 @@ nodes:
inputs:
image_decode: dav1d-local/image
image_echo: echo/image
#image_2: camera6/image

# - id: plot2
# build: pip install -e ../../node-hub/opencv-plot
# _unstable_deploy:
# machine: encoder
# path: opencv-plot
# inputs:
# image: echo/image

+ 68
- 0
examples/videostream-encoding/dataflow_reachy.yml View File

@@ -0,0 +1,68 @@
nodes:
- id: camera
path: dora-reachy2-camera
_unstable_deploy:
machine: encoder
inputs:
tick: dora/timer/millis/50
outputs:
- image_right
- image_left
- image_depth
- depth
env:
CAPTURE_PATH: 0
IMAGE_WIDTH: 640
IMAGE_HEIGHT: 480
ROBOT_IP: 127.0.0.1

- id: rav1e-local
path: dora-rav1e
build: cargo build -p dora-rav1e --release
_unstable_deploy:
machine: encoder
inputs:
depth: camera/depth
outputs:
- depth
env:
RAV1E_SPEED: 7

- id: rav1e-local-image
path: dora-rav1e
build: cargo build -p dora-rav1e --release
_unstable_deploy:
machine: encoder
inputs:
image_depth: camera/image_depth
image_left: camera/image_left
outputs:
- image_left
- image_depth
- depth
env:
RAV1E_SPEED: 10

- id: dav1d-remote
path: dora-dav1d
build: cargo build -p dora-dav1d --release
_unstable_deploy:
machine: plot
inputs:
image_depth: rav1e-local-image/image_depth
image_left: rav1e-local-image/image_left
depth: rav1e-local/depth
outputs:
- image_left
- image_depth
- depth

- id: plot
build: pip install -e ../../node-hub/dora-rerun
_unstable_deploy:
machine: plot
path: dora-rerun
inputs:
image: dav1d-remote/image_depth
depth: dav1d-remote/depth
image_left: dav1d-remote/image_left

+ 41
- 19
node-hub/dora-rerun/src/lib.rs View File

@@ -190,18 +190,30 @@ pub fn lib_main() -> Result<()> {
dora_node_api::arrow::datatypes::DataType::Float64 => {
let buffer: &Float64Array = data.as_any().downcast_ref().unwrap();

let points_3d = buffer.iter().enumerate().map(|(i, z)| {
let mut points = vec![];
buffer.iter().enumerate().for_each(|(i, z)| {
let u = i as f32 % *width as f32; // Calculate x-coordinate (u)
let v = i as f32 / *width as f32; // Calculate y-coordinate (v)
let z = z.unwrap_or_default() as f32;

(
(u - resolution[0] as f32) * z / focal_length[0] as f32,
(v - resolution[1] as f32) * z / focal_length[1] as f32,
z,
)
if let Some(z) = z {
let z = z as f32;
// Skip points that have empty depth or is too far away
if z == 0. || z > 8.0 {
points.push((0., 0., 0.));
return;
}
let y = (u - resolution[0] as f32) * z / focal_length[0] as f32;
let x = (v - resolution[1] as f32) * z / focal_length[1] as f32;
let new_x = sin_theta * z + cos_theta * x;
let new_y = -y;
let new_z = cos_theta * z - sin_theta * x;

points.push((new_x, new_y, new_z));
} else {
points.push((0., 0., 0.));
}
});
let points_3d = Points3D::new(points_3d);
let points_3d = Points3D::new(points);
if let Some(color_buffer) = image_cache.get(&id.replace("depth", "image")) {
let colors = if let Some(mask) =
mask_cache.get(&id.replace("depth", "masks"))
@@ -240,20 +252,30 @@ pub fn lib_main() -> Result<()> {
}
dora_node_api::arrow::datatypes::DataType::UInt16 => {
let buffer: &UInt16Array = data.as_any().downcast_ref().unwrap();
let points_3d = buffer.iter().enumerate().map(|(i, z)| {
let mut points = vec![];
buffer.iter().enumerate().for_each(|(i, z)| {
let u = i as f32 % *width as f32; // Calculate x-coordinate (u)
let v = i as f32 / *width as f32; // Calculate y-coordinate (v)
let z = z.unwrap_or_default() as f32 / 1_000.;
let y = (u - resolution[0] as f32) * z / focal_length[0] as f32;
let x = (v - resolution[1] as f32) * z / focal_length[1] as f32;
let new_x = sin_theta * z + cos_theta * x;
let new_y = -y;
let new_z = cos_theta * z - sin_theta * x;

(new_x, new_y, new_z)

if let Some(z) = z {
let z = z as f32;
// Skip points that have empty depth or is too far away
if z == 0. || z > 8.0 {
points.push((0., 0., 0.));
return;
}
let y = (u - resolution[0] as f32) * z / focal_length[0] as f32;
let x = (v - resolution[1] as f32) * z / focal_length[1] as f32;
let new_x = sin_theta * z + cos_theta * x;
let new_y = -y;
let new_z = cos_theta * z - sin_theta * x;

points.push((new_x, new_y, new_z));
} else {
points.push((0., 0., 0.));
}
});
let points_3d = Points3D::new(points_3d);
let points_3d = Points3D::new(points);
if let Some(color_buffer) = image_cache.get(&id.replace("depth", "image")) {
let colors = if let Some(mask) =
mask_cache.get(&id.replace("depth", "masks"))


Loading…
Cancel
Save