Browse Source

fix ci/cd and improve examples

tags/v0.3.11-rc1
haixuantao 9 months ago
parent
commit
a6fe69cbed
8 changed files with 108 additions and 15 deletions
  1. +2
    -2
      Cargo.lock
  2. +0
    -0
      examples/av1-encoding/dataflow.yml
  3. +0
    -0
      examples/av1-encoding/dataflow_reachy.yml
  4. +73
    -0
      examples/av1-encoding/ios-dev.yaml
  5. +3
    -0
      examples/depth_camera/ios-dev.yaml
  6. +26
    -9
      node-hub/dora-ios-lidar/dora_ios_lidar/main.py
  7. +2
    -2
      node-hub/dora-ios-lidar/pyproject.toml
  8. +2
    -2
      node-hub/dora-rerun/src/lib.rs

+ 2
- 2
Cargo.lock View File

@@ -2779,9 +2779,9 @@ checksum = "5c297a1c74b71ae29df00c3e22dd9534821d60eb9af5a0192823fa2acea70c2a"

[[package]]
name = "dav1d"
version = "0.10.3"
version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0d4b54a40baf633a71c6f0fb49494a7e4ee7bc26f3e727212b6cb915aa1ea1e1"
checksum = "80c3f80814db85397819d464bb553268992c393b4b3b5554b89c1655996d5926"
dependencies = [
"av-data",
"bitflags 2.9.0",


examples/videostream-encoding/dataflow.yml → examples/av1-encoding/dataflow.yml View File


examples/videostream-encoding/dataflow_reachy.yml → examples/av1-encoding/dataflow_reachy.yml View File


+ 73
- 0
examples/av1-encoding/ios-dev.yaml View File

@@ -0,0 +1,73 @@
nodes:
- id: camera
build: pip install -e ../../node-hub/dora-ios-lidar
path: dora-ios-lidar
_unstable_deploy:
machine: encoder-ios
inputs:
tick: dora/timer/millis/20
outputs:
- image
- depth
env:
IMAGE_WIDTH: 640
IMAGE_HEIGHT: 480

- id: rav1e-local
path: dora-rav1e
build: cargo build -p dora-rav1e --release
_unstable_deploy:
machine: encoder-ios
inputs:
image: camera/image
depth: camera/depth
outputs:
- image
- depth
env:
RAV1E_SPEED: 4

- id: dav1d-remote
path: dora-dav1d
build: cargo build -p dora-dav1d --release
_unstable_deploy:
machine: decoder
inputs:
image: rav1e-local/image
depth: rav1e-local/depth
outputs:
- image
- depth

- id: rav1e-remote
path: dora-rav1e
build: cargo build -p dora-rav1e --release
_unstable_deploy:
machine: decoder
inputs:
image: dav1d-remote/image
depth: dav1d-remote/depth
outputs:
- image
- depth

- id: dav1d-local
path: dora-dav1d
build: cargo build -p dora-dav1d --release
_unstable_deploy:
machine: encoder-ios
inputs:
image: rav1e-remote/image
depth: rav1e-remote/depth
outputs:
- image
- depth

- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
_unstable_deploy:
machine: encoder-ios
inputs:
image: dav1d-remote/image
depth: dav1d-remote/depth

+ 3
- 0
examples/depth_camera/ios-dev.yaml View File

@@ -7,6 +7,9 @@ nodes:
outputs:
- image
- depth
env:
IMAGE_WIDTH: 640
IMAGE_HEIGHT: 480

- id: plot
build: pip install -e ../../node-hub/dora-rerun


+ 26
- 9
node-hub/dora-ios-lidar/dora_ios_lidar/main.py View File

@@ -1,5 +1,6 @@
"""TODO: Add docstring."""

import os
from threading import Event

import cv2
@@ -9,6 +10,9 @@ from dora import Node
from record3d import Record3DStream
from scipy.spatial.transform import Rotation

image_width = os.getenv("IMAGE_WIDTH")
image_height = os.getenv("IMAGE_HEIGHT")


class DemoApp:
"""TODO: Add docstring."""
@@ -87,10 +91,23 @@ class DemoApp:
intrinsic_mat = self.get_intrinsic_mat_from_coeffs(
self.session.get_intrinsic_mat(),
)
pose = self.get_camera_pose()
# pose = self.get_camera_pose()

if depth.shape != rgb.shape:
rgb = cv2.resize(rgb, (depth.shape[1], depth.shape[0]))
if image_width is not None and image_height is not None:
f_0 = intrinsic_mat[0, 0] * (int(image_height) / rgb.shape[0])
f_1 = intrinsic_mat[1, 1] * (int(image_width) / rgb.shape[1])
r_0 = intrinsic_mat[0, 2] * (int(image_height) / rgb.shape[0])
r_1 = intrinsic_mat[1, 2] * (int(image_width) / rgb.shape[1])
rgb = cv2.resize(rgb, (int(image_width), int(image_height)))
depth = cv2.resize(depth, (int(image_width), int(image_height)))
else:
f_0 = intrinsic_mat[0, 0]
f_1 = intrinsic_mat[1, 1]
r_0 = intrinsic_mat[0, 2]
r_1 = intrinsic_mat[1, 2]

node.send_output(
"image",
pa.array(rgb.ravel()),
@@ -102,7 +119,7 @@ class DemoApp:
)

depth = (np.array(depth) * 1_000).astype(np.uint16)
depth = np.clip(depth, 0, 4095) # Clip depth to uint12
node.send_output(
"depth",
pa.array(depth.ravel()),
@@ -111,16 +128,16 @@ class DemoApp:
"height": depth.shape[0],
"encoding": "mono16",
"focal": [
int(intrinsic_mat[0, 0]),
int(intrinsic_mat[1, 1]),
int(f_0),
int(f_1),
],
"resolution": [
int(intrinsic_mat[0, 2]),
int(intrinsic_mat[1, 2]),
int(r_0),
int(r_1),
],
"roll": pose[3],
"pitch": pose[4],
"yaw": pose[5],
# "roll": pose[3],
# "pitch": pose[4], # Adding 90 degrees to pitch
# "yaw": pose[5],
},
)



+ 2
- 2
node-hub/dora-ios-lidar/pyproject.toml View File

@@ -5,9 +5,9 @@ authors = [{ name = "Your Name", email = "email@email.com" }]
description = "dora-ios-lidar"
license = { text = "MIT" }
readme = "README.md"
requires-python = ">=3.8"
requires-python = "==3.8"

dependencies = ["dora-rs >= 0.3.9", "opencv-python>=4.11.0.86", "record3d>=1.4"]
dependencies = ["dora-rs >= 0.3.9", "opencv-python>=4.11.0.86", "record3d>=1.4", "scipy"]

[dependency-groups]
dev = ["pytest >=8.1.1", "ruff >=0.9.1"]


+ 2
- 2
node-hub/dora-rerun/src/lib.rs View File

@@ -189,8 +189,8 @@ pub fn lib_main() -> Result<()> {
} else {
camera_pitch
};
let cos_theta = pitch.cos(); // np.cos(np.deg2rad(180-38))
let sin_theta = pitch.sin(); // np.sin(np.deg2rad(180-38))
let cos_theta = pitch.cos();
let sin_theta = pitch.sin();

let points = match data.data_type() {
dora_node_api::arrow::datatypes::DataType::Float64 => {


Loading…
Cancel
Save