Browse Source

Merge branch 'main' into rust-api-docs

pull/1056/head
Philipp Oppermann 6 months ago
parent
commit
7036f8dd5f
Failed to extract signature
100 changed files with 3737 additions and 1175 deletions
  1. +1
    -1
      .github/workflows/ci.yml
  2. +31
    -10
      .github/workflows/node_hub_test.sh
  3. +0
    -2
      .github/workflows/pip-release.yml
  4. +136
    -53
      Cargo.lock
  5. +23
    -23
      Cargo.toml
  6. +54
    -0
      Changelog.md
  7. +16
    -10
      README.md
  8. +1
    -1
      apis/python/node/Cargo.toml
  9. +0
    -8
      apis/python/node/pyproject.toml
  10. +3
    -3
      apis/python/node/src/lib.rs
  11. +1
    -0
      apis/rust/node/src/node/mod.rs
  12. +0
    -8
      binaries/cli/pyproject.toml
  13. +41
    -4
      binaries/cli/src/command/build/mod.rs
  14. +41
    -1
      binaries/cli/src/command/check.rs
  15. +66
    -0
      binaries/cli/src/command/coordinator.rs
  16. +91
    -0
      binaries/cli/src/command/daemon.rs
  17. +28
    -0
      binaries/cli/src/command/destroy.rs
  18. +28
    -4
      binaries/cli/src/command/graph.rs
  19. +1
    -0
      binaries/cli/src/command/graph/.gitignore
  20. +0
    -0
      binaries/cli/src/command/graph/mermaid-template.html
  21. +59
    -0
      binaries/cli/src/command/list.rs
  22. +46
    -1
      binaries/cli/src/command/logs.rs
  23. +96
    -52
      binaries/cli/src/command/mod.rs
  24. +21
    -0
      binaries/cli/src/command/new.rs
  25. +36
    -4
      binaries/cli/src/command/run.rs
  26. +15
    -0
      binaries/cli/src/command/runtime.rs
  27. +139
    -0
      binaries/cli/src/command/self_.rs
  28. +1
    -1
      binaries/cli/src/command/start/attach.rs
  29. +80
    -50
      binaries/cli/src/command/start/mod.rs
  30. +111
    -0
      binaries/cli/src/command/stop.rs
  31. +20
    -1
      binaries/cli/src/command/up.rs
  32. +117
    -0
      binaries/cli/src/common.rs
  33. +7
    -743
      binaries/cli/src/lib.rs
  34. +1
    -1
      binaries/coordinator/Cargo.toml
  35. +0
    -13
      binaries/daemon/src/spawn.rs
  36. +1
    -1
      examples/c++-ros2-dataflow/node-rust-api/main.cc
  37. +14
    -0
      examples/keyboard/dataflow.yml
  38. +60
    -0
      examples/so101/Readme.md
  39. +48
    -0
      examples/so101/arm_gamepad_control.yml
  40. +33
    -0
      examples/so101/leader_follower.yml
  41. +3
    -3
      examples/speech-to-speech/README.md
  42. +1
    -1
      examples/speech-to-text/whisper-dev.yml
  43. +30
    -0
      examples/urdf/broken_fanuc.yml
  44. +30
    -0
      examples/urdf/broken_poppy.yml
  45. +35
    -0
      examples/urdf/franka.yml
  46. +30
    -0
      examples/urdf/gen3.yml
  47. +33
    -0
      examples/urdf/kuka.yml
  48. +35
    -0
      examples/urdf/piper.yml
  49. +35
    -0
      examples/urdf/so_arm101.yml
  50. +35
    -0
      examples/urdf/ur5.yml
  51. +70
    -0
      examples/urdf/vggt/franka.yml
  52. +68
    -0
      examples/urdf/vggt/kuka.yml
  53. +69
    -0
      examples/urdf/vggt/so_arm101.yml
  54. +59
    -0
      examples/urdf/vggt/z1.yml
  55. +35
    -0
      examples/urdf/z1.yml
  56. +54
    -0
      examples/vggt/depth-to-avif.yaml
  57. +0
    -8
      examples/vggt/depth.dora-session.yaml
  58. +34
    -0
      examples/vggt/image_saver.py
  59. +53
    -0
      examples/vggt/realsense-to-avif.yaml
  60. +1
    -1
      libraries/core/src/descriptor/validate.rs
  61. +1
    -1
      libraries/extensions/ros2-bridge/python/Cargo.toml
  62. +7
    -8
      libraries/extensions/ros2-bridge/python/src/lib.rs
  63. +4
    -7
      libraries/extensions/ros2-bridge/python/src/qos.rs
  64. +1
    -1
      libraries/extensions/telemetry/tracing/src/telemetry.rs
  65. +1
    -1
      libraries/message/Cargo.toml
  66. +1
    -1
      node-hub/dora-argotranslate/pyproject.toml
  67. +1
    -1
      node-hub/dora-cotracker/pyproject.toml
  68. +1
    -1
      node-hub/dora-distil-whisper/pyproject.toml
  69. +1
    -1
      node-hub/dora-echo/pyproject.toml
  70. +1
    -1
      node-hub/dora-gradio/pyproject.toml
  71. +1
    -1
      node-hub/dora-internvl/pyproject.toml
  72. +1
    -1
      node-hub/dora-ios-lidar/pyproject.toml
  73. +1
    -1
      node-hub/dora-keyboard/pyproject.toml
  74. +1
    -1
      node-hub/dora-kokoro-tts/pyproject.toml
  75. +1
    -1
      node-hub/dora-microphone/pyproject.toml
  76. +9
    -6
      node-hub/dora-mujoco/pyproject.toml
  77. +1
    -1
      node-hub/dora-object-to-pose/Cargo.toml
  78. +1
    -1
      node-hub/dora-openai-server/pyproject.toml
  79. +1
    -1
      node-hub/dora-opus/pyproject.toml
  80. +1
    -1
      node-hub/dora-outtetts/pyproject.toml
  81. +1
    -1
      node-hub/dora-parler/pyproject.toml
  82. +1
    -2
      node-hub/dora-phi4/pyproject.toml
  83. +1309
    -0
      node-hub/dora-phi4/uv.lock
  84. +1
    -1
      node-hub/dora-piper/pyproject.toml
  85. +1
    -1
      node-hub/dora-pyaudio/pyproject.toml
  86. +1
    -1
      node-hub/dora-pyorbbecksdk/pyproject.toml
  87. +1
    -1
      node-hub/dora-pyrealsense/pyproject.toml
  88. +85
    -83
      node-hub/dora-pytorch-kinematics/dora_pytorch_kinematics/main.py
  89. +1
    -1
      node-hub/dora-qwen/pyproject.toml
  90. +4
    -0
      node-hub/dora-qwen2-5-vl/dora_qwen2_5_vl/main.py
  91. +1
    -1
      node-hub/dora-qwen2-5-vl/pyproject.toml
  92. +1
    -1
      node-hub/dora-qwenvl/pyproject.toml
  93. +3
    -2
      node-hub/dora-rav1e/Cargo.toml
  94. +80
    -19
      node-hub/dora-rav1e/src/lib.rs
  95. +1
    -1
      node-hub/dora-rdt-1b/pyproject.toml
  96. +1
    -1
      node-hub/dora-reachy2/pyproject.toml
  97. +2
    -1
      node-hub/dora-rerun/Cargo.toml
  98. +3
    -2
      node-hub/dora-rerun/pyproject.toml
  99. +25
    -9
      node-hub/dora-rerun/src/lib.rs
  100. +1
    -1
      node-hub/dora-rerun/src/series.rs

+ 1
- 1
.github/workflows/ci.yml View File

@@ -197,7 +197,7 @@ jobs:
required-ros-distributions: humble
- run: 'source /opt/ros/humble/setup.bash && echo AMENT_PREFIX_PATH=${AMENT_PREFIX_PATH} >> "$GITHUB_ENV"'
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v5
uses: astral-sh/setup-uv@v6
with:
enable-cache: true
- name: Install pyarrow


+ 31
- 10
.github/workflows/node_hub_test.sh View File

@@ -1,6 +1,9 @@
#!/bin/bash
set -euo

# Check if we are running in a GitHub Actions environment
CI=${GITHUB_ACTIONS:-false}

# List of ignored modules
ignored_folders=("dora-parler" "dora-opus" "dora-internvl" "dora-magma")

@@ -13,6 +16,32 @@ dir=$(pwd)
# Get the base name of the directory (without the path)
base_dir=$(basename "$dir")

# Large node list requiring space cleanup
large_node=("dora-phi4")

export PYTEST_ADDOPTS="-x"

# Check if the current directory is in the large node list and if we're in the CI environment
if [[ " ${large_node[@]} " =~ " ${base_dir} " ]] && [[ "$CI" == "true" ]]; then
echo "Running cleanup for $base_dir..."
sudo rm -rf /opt/hostedtoolcache/CodeQL || :
# 1.4GB
sudo rm -rf /opt/hostedtoolcache/go || :
# 489MB
sudo rm -rf /opt/hostedtoolcache/PyPy || :
# 376MB
sudo rm -rf /opt/hostedtoolcache/node || :
# Remove Web browser packages
sudo apt purge -y \
firefox \
google-chrome-stable \
microsoft-edge-stable
sudo rm -rf /usr/local/lib/android/
sudo rm -rf /usr/share/dotnet/
sudo rm -rf /opt/ghc/
fi


# Check if the directory name is in the ignored list
if [[ " ${ignored_folders[@]} " =~ " ${base_dir} " ]]; then
echo "Skipping $base_dir as we cannot test it on the CI..."
@@ -60,16 +89,8 @@ else
if [ "$GITHUB_EVENT_NAME" == "release" ] || [ "$GITHUB_EVENT_NAME" == "workflow_dispatch" ]; then
maturin publish --skip-existing
fi
# x86_64-apple-darwin
rustup target add x86_64-apple-darwin
maturin build --target x86_64-apple-darwin --zig --release
# If GITHUB_EVENT_NAME is release or workflow_dispatch, publish the wheel
if [ "$GITHUB_EVENT_NAME" == "release" ] || [ "$GITHUB_EVENT_NAME" == "workflow_dispatch" ]; then
maturin publish --target x86_64-apple-darwin --skip-existing --zig
fi

elif [[ "$(uname)" = "Linux" ]]; then
elif [[ "$(uname)" = "Linux" ]] || [[ "$CI" == "false" ]]; then
if [ -f "$dir/Cargo.toml" ]; then
echo "Running build and tests for Rust project in $dir..."
cargo check
@@ -94,7 +115,7 @@ else
else
uv run pytest
fi
if [ "$GITHUB_EVENT_NAME" == "release" ] || [ "$GITHUB_EVENT_NAME" == "workflow_dispatch" ]; then
if [ "${GITHUB_EVENT_NAME:-false}" == "release" ] || [ "${GITHUB_EVENT_NAME:-false}" == "workflow_dispatch" ]; then
uv build
uv publish --check-url https://pypi.org/simple
fi


+ 0
- 2
.github/workflows/pip-release.yml View File

@@ -192,8 +192,6 @@ jobs:
fail-fast: false
matrix:
platform:
- runner: macos-13
target: x86_64
- runner: macos-13
target: aarch64
repository:


+ 136
- 53
Cargo.lock View File

@@ -1165,9 +1165,9 @@ dependencies = [

[[package]]
name = "avif-serialize"
version = "0.8.3"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "98922d6a4cfbcb08820c69d8eeccc05bb1f29bfa06b4f5b1dbfe9a868bd7608e"
checksum = "2ea8ef51aced2b9191c08197f55450d830876d9933f8f48a429b354f1d496b42"
dependencies = [
"arrayvec",
]
@@ -1303,7 +1303,7 @@ dependencies = [

[[package]]
name = "benchmark-example-node"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"eyre",
@@ -1316,7 +1316,7 @@ dependencies = [

[[package]]
name = "benchmark-example-sink"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"eyre",
@@ -2089,7 +2089,7 @@ dependencies = [

[[package]]
name = "communication-layer-pub-sub"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"flume 0.10.14",
"zenoh 0.7.0-rc",
@@ -2097,7 +2097,7 @@ dependencies = [

[[package]]
name = "communication-layer-request-reply"
version = "0.3.11"
version = "0.3.12"

[[package]]
name = "concat-idents"
@@ -2279,6 +2279,21 @@ dependencies = [
"libc",
]

[[package]]
name = "crc"
version = "3.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675"
dependencies = [
"crc-catalog",
]

[[package]]
name = "crc-catalog"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5"

[[package]]
name = "crc32fast"
version = "1.4.2"
@@ -2911,7 +2926,7 @@ dependencies = [

[[package]]
name = "dora-arrow-convert"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"arrow",
"chrono",
@@ -2922,7 +2937,7 @@ dependencies = [

[[package]]
name = "dora-cli"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"bat",
"clap 4.5.32",
@@ -2966,7 +2981,7 @@ dependencies = [

[[package]]
name = "dora-coordinator"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"ctrlc",
"dora-core",
@@ -2987,7 +3002,7 @@ dependencies = [

[[package]]
name = "dora-core"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-message",
"dunce",
@@ -3011,7 +3026,7 @@ dependencies = [

[[package]]
name = "dora-daemon"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"aligned-vec",
"async-trait",
@@ -3048,7 +3063,7 @@ dependencies = [

[[package]]
name = "dora-dav1d"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"bitstream-io",
"bytemuck",
@@ -3062,7 +3077,7 @@ dependencies = [

[[package]]
name = "dora-download"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"eyre",
"reqwest",
@@ -3092,7 +3107,7 @@ dependencies = [

[[package]]
name = "dora-kit-car"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"dotenv",
@@ -3106,7 +3121,7 @@ dependencies = [

[[package]]
name = "dora-message"
version = "0.5.0-alpha"
version = "0.5.0"
dependencies = [
"aligned-vec",
"arrow-data",
@@ -3127,7 +3142,7 @@ dependencies = [

[[package]]
name = "dora-metrics"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"eyre",
"opentelemetry 0.29.1",
@@ -3148,7 +3163,7 @@ dependencies = [

[[package]]
name = "dora-node-api"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"aligned-vec",
"arrow",
@@ -3173,7 +3188,7 @@ dependencies = [

[[package]]
name = "dora-node-api-c"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"arrow-array",
"dora-node-api",
@@ -3183,7 +3198,7 @@ dependencies = [

[[package]]
name = "dora-node-api-cxx"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"arrow",
"cxx",
@@ -3201,7 +3216,7 @@ dependencies = [

[[package]]
name = "dora-node-api-python"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"arrow",
"dora-cli",
@@ -3215,6 +3230,7 @@ dependencies = [
"futures",
"pyo3",
"pyo3-build-config",
"pyo3_special_method_derive",
"pythonize",
"serde_yaml 0.9.34+deprecated",
"tokio",
@@ -3222,7 +3238,7 @@ dependencies = [

[[package]]
name = "dora-object-to-pose"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"eyre",
@@ -3231,7 +3247,7 @@ dependencies = [

[[package]]
name = "dora-openai-proxy-server"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"chrono",
"dora-node-api",
@@ -3252,7 +3268,7 @@ dependencies = [

[[package]]
name = "dora-operator-api"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-arrow-convert",
"dora-operator-api-macros",
@@ -3261,14 +3277,14 @@ dependencies = [

[[package]]
name = "dora-operator-api-c"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-operator-api-types",
]

[[package]]
name = "dora-operator-api-cxx"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"cxx",
"cxx-build",
@@ -3277,7 +3293,7 @@ dependencies = [

[[package]]
name = "dora-operator-api-macros"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"proc-macro2",
"quote",
@@ -3286,7 +3302,7 @@ dependencies = [

[[package]]
name = "dora-operator-api-python"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"aligned-vec",
"arrow",
@@ -3302,7 +3318,7 @@ dependencies = [

[[package]]
name = "dora-operator-api-types"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"arrow",
"dora-arrow-convert",
@@ -3311,12 +3327,13 @@ dependencies = [

[[package]]
name = "dora-rav1e"
version = "0.3.11+fix1"
version = "0.3.12"
dependencies = [
"avif-serialize",
"bytemuck",
"dora-node-api",
"eyre",
"little_exif",
"log",
"pyo3",
"rav1e",
@@ -3324,7 +3341,7 @@ dependencies = [

[[package]]
name = "dora-record"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"chrono",
"dora-node-api",
@@ -3336,13 +3353,14 @@ dependencies = [

[[package]]
name = "dora-rerun"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"bytemuck",
"dora-node-api",
"eyre",
"k",
"ndarray 0.15.6",
"pinyin",
"pyo3",
"rand 0.9.1",
"rerun",
@@ -3351,7 +3369,7 @@ dependencies = [

[[package]]
name = "dora-ros2-bridge"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"array-init",
"dora-daemon",
@@ -3374,7 +3392,7 @@ dependencies = [

[[package]]
name = "dora-ros2-bridge-msg-gen"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"anyhow",
"glob",
@@ -3390,7 +3408,7 @@ dependencies = [

[[package]]
name = "dora-ros2-bridge-python"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"arrow",
"dora-ros2-bridge",
@@ -3398,13 +3416,14 @@ dependencies = [
"eyre",
"futures",
"pyo3",
"pyo3_special_method_derive",
"serde",
"serde_assert",
]

[[package]]
name = "dora-runtime"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"aligned-vec",
"arrow",
@@ -3443,7 +3462,7 @@ dependencies = [

[[package]]
name = "dora-tracing"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"eyre",
"opentelemetry 0.18.0",
@@ -6378,6 +6397,19 @@ version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4ce301924b7887e9d637144fdade93f9dfff9b60981d4ac161db09720d39aa5"

[[package]]
name = "little_exif"
version = "0.6.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ebaa2b90127fb33dcf2ec1f5be8b496e92d5d1736789decca8eb2798c52f2e7"
dependencies = [
"crc",
"log",
"miniz_oxide",
"paste",
"quick-xml 0.37.5",
]

[[package]]
name = "llguidance"
version = "0.7.0"
@@ -6738,9 +6770,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"

[[package]]
name = "miniz_oxide"
version = "0.8.5"
version = "0.8.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5"
checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316"
dependencies = [
"adler2",
"simd-adler32",
@@ -6982,7 +7014,7 @@ dependencies = [

[[package]]
name = "multiple-daemons-example-node"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"eyre",
@@ -6993,14 +7025,14 @@ dependencies = [

[[package]]
name = "multiple-daemons-example-operator"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-operator-api",
]

[[package]]
name = "multiple-daemons-example-sink"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"eyre",
@@ -8420,6 +8452,12 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"

[[package]]
name = "pinyin"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "16f2611cd06a1ac239a0cea4521de9eb068a6ca110324ee00631aa68daa74fc0"

[[package]]
name = "piper"
version = "0.2.4"
@@ -8963,6 +9001,29 @@ dependencies = [
"syn 2.0.101",
]

[[package]]
name = "pyo3_special_method_derive"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b75ccfc0eef7c0478db8aa23d23e97b39c74e29da21d9bf1e1934e63ad0709d"
dependencies = [
"pyo3",
"pyo3_special_method_derive_macro",
]

[[package]]
name = "pyo3_special_method_derive_macro"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cfc6e17b94c919f8295729dcddd368b7cd78231c39a3c23cdeec77b5834010cf"
dependencies = [
"proc-macro2",
"pyo3",
"quote",
"quote_into",
"syn 2.0.101",
]

[[package]]
name = "python3-dll-a"
version = "0.2.13"
@@ -9028,9 +9089,9 @@ dependencies = [

[[package]]
name = "quick-xml"
version = "0.37.2"
version = "0.37.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "165859e9e55f79d67b96c5d96f4e88b6f2695a1972849c15a6a3f5c59fc2c003"
checksum = "331e97a1af0bf59823e6eadffe373d7b27f485be8748f71471c662c1f269b7fb"
dependencies = [
"memchr",
]
@@ -9149,6 +9210,28 @@ dependencies = [
"proc-macro2",
]

[[package]]
name = "quote_into"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93886ed56f228a5d960fc4d26afa3736df12a251872869cf24f5efe5f07699b9"
dependencies = [
"proc-macro2",
"quote",
"quote_into_macro",
]

[[package]]
name = "quote_into_macro"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36b828998c40452b5afe441c75194e93181432e669585f4ceb7b0d32a3f73525"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]

[[package]]
name = "r-efi"
version = "5.2.0"
@@ -11017,7 +11100,7 @@ checksum = "03251193000f4bd3b042892be858ee50e8b3719f2b08e5833ac4353724632430"

[[package]]
name = "receive_data"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"chrono",
"dora-node-api",
@@ -11457,7 +11540,7 @@ dependencies = [

[[package]]
name = "rust-dataflow-example-node"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"eyre",
@@ -11468,7 +11551,7 @@ dependencies = [

[[package]]
name = "rust-dataflow-example-sink"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"eyre",
@@ -11476,7 +11559,7 @@ dependencies = [

[[package]]
name = "rust-dataflow-example-sink-dynamic"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"eyre",
@@ -11484,7 +11567,7 @@ dependencies = [

[[package]]
name = "rust-dataflow-example-status-node"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"eyre",
@@ -11503,7 +11586,7 @@ dependencies = [

[[package]]
name = "rust-ros2-dataflow-example-node"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"dora-ros2-bridge",
@@ -11999,7 +12082,7 @@ dependencies = [
"hyper 1.6.0",
"indicatif",
"log",
"quick-xml 0.37.2",
"quick-xml 0.37.5",
"regex",
"reqwest",
"self-replace",
@@ -12347,7 +12430,7 @@ dependencies = [

[[package]]
name = "shared-memory-server"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"bincode",
"eyre",
@@ -13173,7 +13256,7 @@ dependencies = [

[[package]]
name = "terminal-print"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"eyre",
@@ -14604,7 +14687,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "896fdafd5d28145fce7958917d69f2fd44469b1d4e861cb5961bcbeebc6d1484"
dependencies = [
"proc-macro2",
"quick-xml 0.37.2",
"quick-xml 0.37.5",
"quote",
]



+ 23
- 23
Cargo.toml View File

@@ -50,37 +50,37 @@ members = [
[workspace.package]
edition = "2021"
# Make sure to also bump `apis/node/python/__init__.py` version.
version = "0.3.11"
version = "0.3.12"
description = "`dora` goal is to be a low latency, composable, and distributed data flow."
documentation = "https://dora-rs.ai"
license = "Apache-2.0"
repository = "https://github.com/dora-rs/dora/"

[workspace.dependencies]
dora-node-api = { version = "0.3.11", path = "apis/rust/node", default-features = false }
dora-node-api-python = { version = "0.3.11", path = "apis/python/node", default-features = false }
dora-operator-api = { version = "0.3.11", path = "apis/rust/operator", default-features = false }
dora-operator-api-macros = { version = "0.3.11", path = "apis/rust/operator/macros" }
dora-operator-api-types = { version = "0.3.11", path = "apis/rust/operator/types" }
dora-operator-api-python = { version = "0.3.11", path = "apis/python/operator" }
dora-operator-api-c = { version = "0.3.11", path = "apis/c/operator" }
dora-node-api-c = { version = "0.3.11", path = "apis/c/node" }
dora-core = { version = "0.3.11", path = "libraries/core" }
dora-arrow-convert = { version = "0.3.11", path = "libraries/arrow-convert" }
dora-tracing = { version = "0.3.11", path = "libraries/extensions/telemetry/tracing" }
dora-metrics = { version = "0.3.11", path = "libraries/extensions/telemetry/metrics" }
dora-download = { version = "0.3.11", path = "libraries/extensions/download" }
shared-memory-server = { version = "0.3.11", path = "libraries/shared-memory-server" }
communication-layer-request-reply = { version = "0.3.11", path = "libraries/communication-layer/request-reply" }
dora-cli = { version = "0.3.11", path = "binaries/cli" }
dora-runtime = { version = "0.3.11", path = "binaries/runtime" }
dora-daemon = { version = "0.3.11", path = "binaries/daemon" }
dora-coordinator = { version = "0.3.11", path = "binaries/coordinator" }
dora-ros2-bridge = { version = "0.3.11", path = "libraries/extensions/ros2-bridge" }
dora-ros2-bridge-msg-gen = { version = "0.3.11", path = "libraries/extensions/ros2-bridge/msg-gen" }
dora-node-api = { version = "0.3.12", path = "apis/rust/node", default-features = false }
dora-node-api-python = { version = "0.3.12", path = "apis/python/node", default-features = false }
dora-operator-api = { version = "0.3.12", path = "apis/rust/operator", default-features = false }
dora-operator-api-macros = { version = "0.3.12", path = "apis/rust/operator/macros" }
dora-operator-api-types = { version = "0.3.12", path = "apis/rust/operator/types" }
dora-operator-api-python = { version = "0.3.12", path = "apis/python/operator" }
dora-operator-api-c = { version = "0.3.12", path = "apis/c/operator" }
dora-node-api-c = { version = "0.3.12", path = "apis/c/node" }
dora-core = { version = "0.3.12", path = "libraries/core" }
dora-arrow-convert = { version = "0.3.12", path = "libraries/arrow-convert" }
dora-tracing = { version = "0.3.12", path = "libraries/extensions/telemetry/tracing" }
dora-metrics = { version = "0.3.12", path = "libraries/extensions/telemetry/metrics" }
dora-download = { version = "0.3.12", path = "libraries/extensions/download" }
shared-memory-server = { version = "0.3.12", path = "libraries/shared-memory-server" }
communication-layer-request-reply = { version = "0.3.12", path = "libraries/communication-layer/request-reply" }
dora-cli = { version = "0.3.12", path = "binaries/cli" }
dora-runtime = { version = "0.3.12", path = "binaries/runtime" }
dora-daemon = { version = "0.3.12", path = "binaries/daemon" }
dora-coordinator = { version = "0.3.12", path = "binaries/coordinator" }
dora-ros2-bridge = { version = "0.3.12", path = "libraries/extensions/ros2-bridge" }
dora-ros2-bridge-msg-gen = { version = "0.3.12", path = "libraries/extensions/ros2-bridge/msg-gen" }
dora-ros2-bridge-python = { path = "libraries/extensions/ros2-bridge/python" }
# versioned independently from the other dora crates
dora-message = { version = "0.5.0-alpha", path = "libraries/message" }
dora-message = { version = "0.5.0", path = "libraries/message" }
arrow = { version = "54.2.1" }
arrow-schema = { version = "54.2.1" }
arrow-data = { version = "54.2.1" }


+ 54
- 0
Changelog.md View File

@@ -1,5 +1,59 @@
# Changelog

## v0.3.12 (2025-06-30)

## What's Changed

- Implemented dora-cotracker node by @ShashwatPatil in https://github.com/dora-rs/dora/pull/931
- Minor fix and add boxes2d example to facebook/cotracker by @haixuanTao in https://github.com/dora-rs/dora/pull/950
- Update Rust crate tokio to v1.44.2 [SECURITY] by @renovate in https://github.com/dora-rs/dora/pull/951
- Post 3.11 release fix by @haixuanTao in https://github.com/dora-rs/dora/pull/954
- Bump crossbeam-channel from 0.5.14 to 0.5.15 by @dependabot in https://github.com/dora-rs/dora/pull/959
- Added E ruff flag for pydocstyle by @7SOMAY in https://github.com/dora-rs/dora/pull/958
- Revert "Added E ruff flag for better code quality [skip ci]" by @haixuanTao in https://github.com/dora-rs/dora/pull/968
- Ease of use changes in benches for issue #957 by @Ignavar in https://github.com/dora-rs/dora/pull/969
- Reachy cotracker by @haixuanTao in https://github.com/dora-rs/dora/pull/972
- Improve rav1e by @haixuanTao in https://github.com/dora-rs/dora/pull/974
- Fix pyrealsense by @haixuanTao in https://github.com/dora-rs/dora/pull/973
- Added Self Uninstall Command by @Shar-jeel-Sajid in https://github.com/dora-rs/dora/pull/944
- Improve benchmark implementation & Add warning for discarding events by @Mivik in https://github.com/dora-rs/dora/pull/971
- docs: Updated README: Added comprehensive usage documentation with vi… by @LeonRust in https://github.com/dora-rs/dora/pull/983
- Fix rerun-viewer example. by @francocipollone in https://github.com/dora-rs/dora/pull/989
- docs: add license badge by @Radovenchyk in https://github.com/dora-rs/dora/pull/996
- Disable sccache for `musllinux` jobs by @haixuanTao in https://github.com/dora-rs/dora/pull/1000
- Remove unused sysinfo monitor by @Mivik in https://github.com/dora-rs/dora/pull/1007
- Refactor Python CUDA IPC API by @Mivik in https://github.com/dora-rs/dora/pull/1002
- fix terminal not printing stdout on nvml warning by @haixuanTao in https://github.com/dora-rs/dora/pull/1008
- Fix issue #1006: [Brief description of the fix] by @sohamukute in https://github.com/dora-rs/dora/pull/1013
- Improving so100 usability by @haixuanTao in https://github.com/dora-rs/dora/pull/988
- Add dora-mediapipe node for quick human pose estimation by @haixuanTao in https://github.com/dora-rs/dora/pull/986
- Bump torch to 2.7 by @haixuanTao in https://github.com/dora-rs/dora/pull/1015
- refactor(tracing): use builder style by @sjfhsjfh in https://github.com/dora-rs/dora/pull/1009
- Fix spawning runtime through python when it is installed with pip by @haixuanTao in https://github.com/dora-rs/dora/pull/1011
- chore(deps): update dependency numpy to v2 by @renovate in https://github.com/dora-rs/dora/pull/1014
- Fix error when multiple visualization key is active and when urdf_transform env variable is not present by @haixuanTao in https://github.com/dora-rs/dora/pull/1016
- Update pyrealsense2 Dependencies for L515 Support and Fix README wget Link by @kingchou007 in https://github.com/dora-rs/dora/pull/1021
- Minor fix for mujoco sim by @haixuanTao in https://github.com/dora-rs/dora/pull/1023
- dora-mujoco simulation node with example for controlling any arm by @ShashwatPatil in https://github.com/dora-rs/dora/pull/1012
- fix ros CI/CD by @haixuanTao in https://github.com/dora-rs/dora/pull/1027
- dora-vggt by @haixuanTao in https://github.com/dora-rs/dora/pull/1024
- Adding vision to openai server by @haixuanTao in https://github.com/dora-rs/dora/pull/1025
- Revert "Adding vision to openai server" by @haixuanTao in https://github.com/dora-rs/dora/pull/1031
- Expose AllInputClosed message as a Stop message by @haixuanTao in https://github.com/dora-rs/dora/pull/1026
- Add support for git repository sources for nodes by @phil-opp in https://github.com/dora-rs/dora/pull/901
- Adding vision to rust openai proxy server by @haixuanTao in https://github.com/dora-rs/dora/pull/1033
- Add automatic robot descriptions URDF retrieval from https://github.com/robot-descriptions/robot_descriptions.py by @haixuanTao in https://github.com/dora-rs/dora/pull/1032

## New Contributors

- @Mivik made their first contribution in https://github.com/dora-rs/dora/pull/971
- @francocipollone made their first contribution in https://github.com/dora-rs/dora/pull/989
- @sohamukute made their first contribution in https://github.com/dora-rs/dora/pull/1013
- @sjfhsjfh made their first contribution in https://github.com/dora-rs/dora/pull/1009
- @kingchou007 made their first contribution in https://github.com/dora-rs/dora/pull/1021

**Full Changelog**: https://github.com/dora-rs/dora/compare/v0.3.11...v0.3.12

## v0.3.11 (2025-04-07)

## What's Changed


+ 16
- 10
README.md View File

@@ -62,6 +62,8 @@
<details open>
<summary><b>2025</b></summary>

- \[07/25\] Added Kornia rust nodes in the hub for V4L / Gstreamer cameras and Sobel image processing.
- \[06/25\] Add support for git based node, dora-vggt for multi-camera depth estimation, and adding robot_descriptions_py as a default way to get urdfs within dora.
- \[05/25\] Add support for dora-pytorch-kinematics for fk and ik, dora-mediapipe for pose estimation, dora-rustypot for rust serialport read/write, points2d and points3d visualization in rerun.
- \[04/25\] Add support for dora-cotracker to track any point on a frame, dora-rav1e AV1 encoding up to 12bit and dora-dav1d AV1 decoding,
- \[03/25\] Add support for dora async Python.
@@ -74,16 +76,16 @@

## Support Matrix

| | dora-rs |
| --------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **APIs** | Python >= 3.7 including sync ⭐✅ <br> Rust ✅<br> C/C++ 🆗 <br>ROS2 >= Foxy 🆗 |
| **OS** | Linux: Arm 32 ⭐✅ Arm 64 ⭐✅ x64_86 ⭐✅ <br>MacOS: Arm 64 ⭐✅ x64_86 ✅<br>Windows: x64_86 🆗 <br>WSL: x64_86 🆗 <br> Android: 🛠️ (Blocked by: https://github.com/elast0ny/shared_memory/issues/32) <br> IOS: 🛠️ |
| **Message Format** | Arrow ✅ <br> Standard Specification 🛠️ |
| **Local Communication** | Shared Memory ✅ <br> [Cuda IPC](https://arrow.apache.org/docs/python/api/cuda.html) 📐 |
| **Remote Communication** | [Zenoh](https://zenoh.io/) 📐 |
| **Metrics, Tracing, and Logging** | Opentelemetry 📐 |
| **Configuration** | YAML ✅ |
| **Package Manager** | [pip](https://pypi.org/): Python Node ✅ Rust Node ✅ C/C++ Node 🛠️ <br>[cargo](https://crates.io/): Rust Node ✅ |
| | dora-rs |
| --------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **APIs** | Python >= 3.7 including sync ⭐✅ <br> Rust ✅<br> C/C++ 🆗 <br>ROS2 >= Foxy 🆗 |
| **OS** | Linux: Arm 32 ⭐✅ Arm 64 ⭐✅ x64_86 ⭐✅ <br>MacOS: Arm 64 ⭐✅ <br>Windows: x64_86 🆗 <br>WSL: x64_86 🆗 <br> Android: 🛠️ (Blocked by: https://github.com/elast0ny/shared_memory/issues/32) <br> IOS: 🛠️ |
| **Message Format** | Arrow ✅ <br> Standard Specification 🛠️ |
| **Local Communication** | Shared Memory ✅ <br> [Cuda IPC](https://arrow.apache.org/docs/python/api/cuda.html) 📐 |
| **Remote Communication** | [Zenoh](https://zenoh.io/) 📐 |
| **Metrics, Tracing, and Logging** | Opentelemetry 📐 |
| **Configuration** | YAML ✅ |
| **Package Manager** | [pip](https://pypi.org/): Python Node ✅ Rust Node ✅ C/C++ Node 🛠️ <br>[cargo](https://crates.io/): Rust Node ✅ |

> - ⭐ = Recommended
> - ✅ = First Class Support
@@ -102,6 +104,8 @@
| Camera | [PyOrbbeckSDK](https://github.com/dora-rs/dora/blob/main/node-hub/dora-pyorbbecksdk) | 📐 | Image and depth from Orbbeck Camera | ![Downloads](https://img.shields.io/pypi/dm/dora-pyorbbecksdk?label=%20) | ![License](https://img.shields.io/pypi/l/dora-pyorbbecksdk?label=%20) |
| Camera | [PyRealsense](https://github.com/dora-rs/dora/blob/main/node-hub/dora-pyrealsense) | Linux🆗 <br> Mac🛠️ | Image and depth from Realsense | ![Downloads](https://img.shields.io/pypi/dm/dora-pyrealsense?label=%20) | ![License](https://img.shields.io/pypi/l/dora-pyrealsense?label=%20) |
| Camera | [OpenCV Video Capture](https://github.com/dora-rs/dora/blob/main/node-hub/opencv-video-capture) | ✅ | Image stream from OpenCV Camera | ![Downloads](https://img.shields.io/pypi/dm/opencv-video-capture?label=%20) | ![License](https://img.shields.io/pypi/l/opencv-video-capture?label=%20) |
| Camera | [Kornia V4L Capture](https://github.com/kornia/dora-nodes-hub/tree/main/kornia-v4l-capture) | ✅ | Video stream for Linux Camera (rust) | | ![License](https://img.shields.io/badge/license-Apache%202-blue) |
| Camera | [Kornia GST Capture](https://github.com/kornia/dora-nodes-hub/tree/main/kornia-gst-capture) | ✅ | Video Capture using Gstreamer (rust) | | ![License](https://img.shields.io/badge/license-Apache%202-blue) |
| Peripheral | [Keyboard](https://github.com/dora-rs/dora/blob/main/node-hub/dora-keyboard) | ✅ | Keyboard char listener | ![Downloads](https://img.shields.io/pypi/dm/dora-keyboard?label=%20) | ![License](https://img.shields.io/pypi/l/dora-keyboard?label=%20) |
| Peripheral | [Microphone](https://github.com/dora-rs/dora/blob/main/node-hub/dora-microphone) | ✅ | Audio from microphone | ![Downloads](https://img.shields.io/pypi/dm/dora-microphone?label=%20) | ![License](https://img.shields.io/pypi/l/dora-microphone?label=%20) |
| Peripheral | [PyAudio(Speaker)](https://github.com/dora-rs/dora/blob/main/node-hub/dora-pyaudio) | ✅ | Output audio from speaker | ![Downloads](https://img.shields.io/pypi/dm/dora-pyaudio?label=%20) | ![License](https://img.shields.io/pypi/l/dora-pyaudio?label=%20) |
@@ -134,6 +138,7 @@
| Simulator | [Mujoco](https://github.com/dora-rs/dora-lerobot/blob/main/node-hub/mujoco-client) | 📐 | Mujoco Simulator | | |
| Simulator | [Carla](https://github.com/dora-rs/dora-drives) | 📐 | Carla Simulator | | |
| Simulator | [Gymnasium](https://github.com/dora-rs/dora-lerobot/blob/main/gym_dora) | 📐 | Experimental OpenAI Gymnasium bridge | | |
| Image Processing | [Kornia Sobel Operator](https://github.com/kornia/dora-nodes-hub/tree/main/kornia-imgproc-sobel) | ✅ | Kornia image processing Sobel operator (rust) | | ![License](https://img.shields.io/badge/license-Apache%202-blue) |

## Examples

@@ -144,6 +149,7 @@
| Vision | [Vision Language Model(VLM)](https://github.com/dora-rs/dora/blob/main/examples/vlm) | Use a VLM to understand images. | ![License](https://img.shields.io/github/last-commit/dora-rs/dora?path=examples%2Fvlm&label=%20) |
| Vision | [YOLO](https://github.com/dora-rs/dora/blob/main/examples/python-dataflow) | Use YOLO to detect object within image. | ![License](https://img.shields.io/github/last-commit/dora-rs/dora?path=examples%2Fpython-dataflow&label=%20) |
| Vision | [Camera](https://github.com/dora-rs/dora/blob/main/examples/camera) | Simple webcam plot example | ![License](https://img.shields.io/github/last-commit/dora-rs/dora?path=examples%2Fcamera&label=%20) |
| Vision | [Image Processing](https://github.com/kornia/kornia-rs/tree/main/examples/dora) | Multi camera image processing | |
| Model Training | [Piper RDT](https://github.com/dora-rs/dora/blob/main/examples/piper) | Piper RDT Pipeline | ![License](https://img.shields.io/github/last-commit/dora-rs/dora?path=examples%2Fpiper&label=%20) |
| Model Training | [LeRobot - Alexander Koch](https://raw.githubusercontent.com/dora-rs/dora-lerobot/refs/heads/main/README.md) | Training Alexander Koch Low Cost Robot with LeRobot | ![License](https://img.shields.io/github/last-commit/dora-rs/dora-lerobot?path=robots&label=%20) |
| ROS2 | [C++ ROS2 Example](https://github.com/dora-rs/dora/blob/main/examples/c++-ros2-dataflow) | Example using C++ ROS2 | ![License](https://img.shields.io/github/last-commit/dora-rs/dora?path=examples%2Fc%2b%2b-ros2-dataflow&label=%20) |


+ 1
- 1
apis/python/node/Cargo.toml View File

@@ -30,7 +30,7 @@ arrow = { workspace = true, features = ["pyarrow"] }
pythonize = { workspace = true }
futures = "0.3.28"
dora-ros2-bridge-python = { workspace = true }
# pyo3_special_method_derive = "0.4.2"
pyo3_special_method_derive = "0.4.3"
tokio = { version = "1.24.2", features = ["rt"] }

[build-dependencies]


+ 0
- 8
apis/python/node/pyproject.toml View File

@@ -22,11 +22,3 @@ extend-select = [
"D", # pydocstyle
"UP",
]

[tool.maturin.target.x86_64-apple-darwin]
# macOS deployment target SDK version
macos-deployment-target = "14.5"

[tool.maturin.target.aarch64-apple-darwin]
# macOS deployment target SDK version
macos-deployment-target = "14.5"

+ 3
- 3
apis/python/node/src/lib.rs View File

@@ -17,7 +17,7 @@ use eyre::Context;
use futures::{Stream, StreamExt};
use pyo3::prelude::*;
use pyo3::types::{PyBytes, PyDict};
/// use pyo3_special_method_derive::{Dict, Dir, Repr, Str};
use pyo3_special_method_derive::{Dict, Dir, Repr, Str};

/// The custom node API lets you integrate `dora` into your application.
/// It allows you to retrieve input and send output in any fashion you want.
@@ -32,7 +32,7 @@ use pyo3::types::{PyBytes, PyDict};
///
/// :type node_id: str, optional
#[pyclass]
/// #[derive(Dir, Dict, Str, Repr)]
#[derive(Dir, Dict, Str, Repr)]
pub struct Node {
events: Events,
node: DelayedCleanup<DoraNode>,
@@ -381,7 +381,7 @@ pub fn resolve_dataflow(dataflow: String) -> eyre::Result<PathBuf> {
#[pyfunction]
#[pyo3(signature = (dataflow_path, uv=None))]
pub fn run(dataflow_path: String, uv: Option<bool>) -> eyre::Result<()> {
dora_cli::command::run(dataflow_path, uv.unwrap_or_default())
dora_cli::run_func(dataflow_path, uv.unwrap_or_default())
}

#[pymodule]


+ 1
- 0
apis/rust/node/src/node/mod.rs View File

@@ -58,6 +58,7 @@ mod drop_stream;
/// TCP.
pub const ZERO_COPY_THRESHOLD: usize = 4096;

#[allow(dead_code)]
enum TokioRuntime {
Runtime(Runtime),
Handle(Handle),


+ 0
- 8
binaries/cli/pyproject.toml View File

@@ -18,11 +18,3 @@ extend-select = [
"D", # pydocstyle
"UP",
]

[tool.maturin.target.x86_64-apple-darwin]
# macOS deployment target SDK version
macos-deployment-target = "14.5"

[tool.maturin.target.aarch64-apple-darwin]
# macOS deployment target SDK version
macos-deployment-target = "14.5"

+ 41
- 4
binaries/cli/src/command/build/mod.rs View File

@@ -54,9 +54,13 @@ use dora_core::{
};
use dora_message::{descriptor::NodeSource, BuildId};
use eyre::Context;
use std::collections::BTreeMap;
use std::{collections::BTreeMap, net::IpAddr};

use crate::{connect_to_coordinator, resolve_dataflow, session::DataflowSession};
use super::{default_tracing, Executable};
use crate::{
common::{connect_to_coordinator, local_working_dir, resolve_dataflow},
session::DataflowSession,
};

use distributed::{build_distributed_dataflow, wait_until_dataflow_built};
use local::build_dataflow_locally;
@@ -65,9 +69,42 @@ mod distributed;
mod git;
mod local;

#[derive(Debug, clap::Args)]
/// Run build commands provided in the given dataflow.
pub struct Build {
/// Path to the dataflow descriptor file
#[clap(value_name = "PATH")]
dataflow: String,
/// Address of the dora coordinator
#[clap(long, value_name = "IP")]
coordinator_addr: Option<IpAddr>,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT")]
coordinator_port: Option<u16>,
// Use UV to build nodes.
#[clap(long, action)]
uv: bool,
// Run build on local machine
#[clap(long, action)]
local: bool,
}

impl Executable for Build {
fn execute(self) -> eyre::Result<()> {
default_tracing()?;
build(
self.dataflow,
self.coordinator_addr,
self.coordinator_port,
self.uv,
self.local,
)
}
}

pub fn build(
dataflow: String,
coordinator_addr: Option<std::net::IpAddr>,
coordinator_addr: Option<IpAddr>,
coordinator_port: Option<u16>,
uv: bool,
force_local: bool,
@@ -153,7 +190,7 @@ pub fn build(
BuildKind::ThroughCoordinator {
mut coordinator_session,
} => {
let local_working_dir = super::local_working_dir(
let local_working_dir = local_working_dir(
&dataflow_path,
&dataflow_descriptor,
&mut *coordinator_session,


+ 41
- 1
binaries/cli/src/command/check.rs View File

@@ -1,11 +1,15 @@
use crate::connect_to_coordinator;
use super::{default_tracing, Executable};
use crate::{common::connect_to_coordinator, LOCALHOST};
use communication_layer_request_reply::TcpRequestReplyConnection;
use dora_core::descriptor::DescriptorExt;
use dora_core::{descriptor::Descriptor, topics::DORA_COORDINATOR_PORT_CONTROL_DEFAULT};
use dora_message::{cli_to_coordinator::ControlRequest, coordinator_to_cli::ControlRequestReply};
use eyre::{bail, Context};
use std::{
io::{IsTerminal, Write},
net::SocketAddr,
};
use std::{net::IpAddr, path::PathBuf};
use termcolor::{Color, ColorChoice, ColorSpec, WriteColor};

pub fn check_environment(coordinator_addr: SocketAddr) -> eyre::Result<()> {
@@ -75,3 +79,39 @@ pub fn daemon_running(session: &mut TcpRequestReplyConnection) -> Result<bool, e

Ok(running)
}

#[derive(Debug, clap::Args)]
/// Check if the coordinator and the daemon is running.
pub struct Check {
/// Path to the dataflow descriptor file (enables additional checks)
#[clap(long, value_name = "PATH", value_hint = clap::ValueHint::FilePath)]
dataflow: Option<PathBuf>,
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
coordinator_port: u16,
}

impl Executable for Check {
fn execute(self) -> eyre::Result<()> {
default_tracing()?;

match self.dataflow {
Some(dataflow) => {
let working_dir = dataflow
.canonicalize()
.context("failed to canonicalize dataflow path")?
.parent()
.ok_or_else(|| eyre::eyre!("dataflow path has no parent dir"))?
.to_owned();
Descriptor::blocking_read(&dataflow)?.check(&working_dir)?;
check_environment((self.coordinator_addr, self.coordinator_port).into())?
}
None => check_environment((self.coordinator_addr, self.coordinator_port).into())?,
}

Ok(())
}
}

+ 66
- 0
binaries/cli/src/command/coordinator.rs View File

@@ -0,0 +1,66 @@
use super::Executable;
use crate::LISTEN_WILDCARD;
use dora_coordinator::Event;
use dora_core::topics::{DORA_COORDINATOR_PORT_CONTROL_DEFAULT, DORA_COORDINATOR_PORT_DEFAULT};

#[cfg(feature = "tracing")]
use dora_tracing::TracingBuilder;

use eyre::Context;
use std::net::{IpAddr, SocketAddr};
use tokio::runtime::Builder;
use tracing::level_filters::LevelFilter;

#[derive(Debug, clap::Args)]
/// Run coordinator
pub struct Coordinator {
/// Network interface to bind to for daemon communication
#[clap(long, default_value_t = LISTEN_WILDCARD)]
interface: IpAddr,
/// Port number to bind to for daemon communication
#[clap(long, default_value_t = DORA_COORDINATOR_PORT_DEFAULT)]
port: u16,
/// Network interface to bind to for control communication
#[clap(long, default_value_t = LISTEN_WILDCARD)]
control_interface: IpAddr,
/// Port number to bind to for control communication
#[clap(long, default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
control_port: u16,
/// Suppresses all log output to stdout.
#[clap(long)]
quiet: bool,
}

impl Executable for Coordinator {
fn execute(self) -> eyre::Result<()> {
#[cfg(feature = "tracing")]
{
let name = "dora-coordinator";
let mut builder = TracingBuilder::new(name);
if !self.quiet {
builder = builder.with_stdout("info");
}
builder = builder.with_file(name, LevelFilter::INFO)?;
builder
.build()
.wrap_err("failed to set up tracing subscriber")?;
}

let rt = Builder::new_multi_thread()
.enable_all()
.build()
.context("tokio runtime failed")?;
rt.block_on(async {
let bind = SocketAddr::new(self.interface, self.port);
let bind_control = SocketAddr::new(self.control_interface, self.control_port);
let (port, task) =
dora_coordinator::start(bind, bind_control, futures::stream::empty::<Event>())
.await?;
if !self.quiet {
println!("Listening for incoming daemon connection on {port}");
}
task.await
})
.context("failed to run dora-coordinator")
}
}

+ 91
- 0
binaries/cli/src/command/daemon.rs View File

@@ -0,0 +1,91 @@
use super::Executable;
use crate::{common::handle_dataflow_result, session::DataflowSession};
use dora_core::topics::{
DORA_COORDINATOR_PORT_DEFAULT, DORA_DAEMON_LOCAL_LISTEN_PORT_DEFAULT, LOCALHOST,
};

use dora_daemon::LogDestination;
#[cfg(feature = "tracing")]
use dora_tracing::TracingBuilder;

use eyre::Context;
use std::{
net::{IpAddr, SocketAddr},
path::PathBuf,
};
use tokio::runtime::Builder;
use tracing::level_filters::LevelFilter;

#[derive(Debug, clap::Args)]
/// Run daemon
pub struct Daemon {
/// Unique identifier for the machine (required for distributed dataflows)
#[clap(long)]
machine_id: Option<String>,
/// Local listen port for event such as dynamic node.
#[clap(long, default_value_t = DORA_DAEMON_LOCAL_LISTEN_PORT_DEFAULT)]
local_listen_port: u16,
/// Address and port number of the dora coordinator
#[clap(long, short, default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, default_value_t = DORA_COORDINATOR_PORT_DEFAULT)]
coordinator_port: u16,
#[clap(long, hide = true)]
run_dataflow: Option<PathBuf>,
/// Suppresses all log output to stdout.
#[clap(long)]
quiet: bool,
}

impl Executable for Daemon {
fn execute(self) -> eyre::Result<()> {
#[cfg(feature = "tracing")]
{
let name = "dora-daemon";
let filename = self
.machine_id
.as_ref()
.map(|id| format!("{name}-{id}"))
.unwrap_or(name.to_string());
let mut builder = TracingBuilder::new(name);
if !self.quiet {
builder = builder.with_stdout("info,zenoh=warn");
}
builder = builder.with_file(filename, LevelFilter::INFO)?;
builder
.build()
.wrap_err("failed to set up tracing subscriber")?;
}

let rt = Builder::new_multi_thread()
.enable_all()
.build()
.context("tokio runtime failed")?;
rt.block_on(async {
match self.run_dataflow {
Some(dataflow_path) => {
tracing::info!("Starting dataflow `{}`", dataflow_path.display());
if self.coordinator_addr != LOCALHOST {
tracing::info!(
"Not using coordinator addr {} as `run_dataflow` is for local dataflow only. Please use the `start` command for remote coordinator",
self.coordinator_addr
);
}
let dataflow_session =
DataflowSession::read_session(&dataflow_path).context("failed to read DataflowSession")?;

let result = dora_daemon::Daemon::run_dataflow(&dataflow_path,
dataflow_session.build_id, dataflow_session.local_build, dataflow_session.session_id, false,
LogDestination::Tracing,
).await?;
handle_dataflow_result(result, None)
}
None => {
dora_daemon::Daemon::run(SocketAddr::new(self.coordinator_addr, self.coordinator_port), self.machine_id, self.local_listen_port).await
}
}
})
.context("failed to run dora-daemon")
}
}

+ 28
- 0
binaries/cli/src/command/destroy.rs View File

@@ -0,0 +1,28 @@
use super::{default_tracing, up, Executable};
use dora_core::topics::{DORA_COORDINATOR_PORT_CONTROL_DEFAULT, LOCALHOST};
use std::net::IpAddr;
use std::path::PathBuf;

#[derive(Debug, clap::Args)]
/// Destroy running coordinator and daemon. If some dataflows are still running, they will be stopped first.
pub struct Destroy {
/// Use a custom configuration
#[clap(long, hide = true)]
config: Option<PathBuf>,
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
coordinator_port: u16,
}

impl Executable for Destroy {
fn execute(self) -> eyre::Result<()> {
default_tracing()?;
up::destroy(
self.config.as_deref(),
(self.coordinator_addr, self.coordinator_port).into(),
)
}
}

binaries/cli/src/graph/mod.rs → binaries/cli/src/command/graph.rs View File

@@ -1,11 +1,35 @@
use std::{fs::File, io::Write, path::Path};

use super::Executable;
use dora_core::descriptor::{Descriptor, DescriptorExt};
use eyre::Context;
use std::{
fs::File,
io::Write,
path::{Path, PathBuf},
};

const MERMAID_TEMPLATE: &str = include_str!("graph/mermaid-template.html");

#[derive(Debug, clap::Args)]
/// Generate a visualization of the given graph using mermaid.js. Use --open to open browser.
pub struct Graph {
/// Path to the dataflow descriptor file
#[clap(value_name = "PATH", value_hint = clap::ValueHint::FilePath)]
dataflow: PathBuf,
/// Visualize the dataflow as a Mermaid diagram (instead of HTML)
#[clap(long, action)]
mermaid: bool,
/// Open the HTML visualization in the browser
#[clap(long, action)]
open: bool,
}

const MERMAID_TEMPLATE: &str = include_str!("mermaid-template.html");
impl Executable for Graph {
fn execute(self) -> eyre::Result<()> {
create(self.dataflow, self.mermaid, self.open)
}
}

pub(crate) fn create(dataflow: std::path::PathBuf, mermaid: bool, open: bool) -> eyre::Result<()> {
fn create(dataflow: std::path::PathBuf, mermaid: bool, open: bool) -> eyre::Result<()> {
if mermaid {
let visualized = visualize_as_mermaid(&dataflow)?;
println!("{visualized}");

+ 1
- 0
binaries/cli/src/command/graph/.gitignore View File

@@ -0,0 +1 @@
!*template.html

binaries/cli/src/graph/mermaid-template.html → binaries/cli/src/command/graph/mermaid-template.html View File


+ 59
- 0
binaries/cli/src/command/list.rs View File

@@ -0,0 +1,59 @@
use std::io::Write;

use super::{default_tracing, Executable};
use crate::{
common::{connect_to_coordinator, query_running_dataflows},
LOCALHOST,
};
use clap::Args;
use communication_layer_request_reply::TcpRequestReplyConnection;
use dora_core::topics::DORA_COORDINATOR_PORT_CONTROL_DEFAULT;
use dora_message::coordinator_to_cli::DataflowStatus;
use eyre::eyre;
use tabwriter::TabWriter;

#[derive(Debug, Args)]
/// List running dataflows.
pub struct ListArgs {
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
pub coordinator_addr: std::net::IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
pub coordinator_port: u16,
}

impl Executable for ListArgs {
fn execute(self) -> eyre::Result<()> {
default_tracing()?;

let mut session =
connect_to_coordinator((self.coordinator_addr, self.coordinator_port).into())
.map_err(|_| eyre!("Failed to connect to coordinator"))?;

list(&mut *session)
}
}

fn list(session: &mut TcpRequestReplyConnection) -> Result<(), eyre::ErrReport> {
let list = query_running_dataflows(session)?;

let mut tw = TabWriter::new(vec![]);
tw.write_all(b"UUID\tName\tStatus\n")?;
for entry in list.0 {
let uuid = entry.id.uuid;
let name = entry.id.name.unwrap_or_default();
let status = match entry.status {
DataflowStatus::Running => "Running",
DataflowStatus::Finished => "Succeeded",
DataflowStatus::Failed => "Failed",
};
tw.write_all(format!("{uuid}\t{name}\t{status}\n").as_bytes())?;
}
tw.flush()?;
let formatted = String::from_utf8(tw.into_inner()?)?;

println!("{formatted}");

Ok(())
}

+ 46
- 1
binaries/cli/src/command/logs.rs View File

@@ -1,9 +1,54 @@
use super::{default_tracing, Executable};
use crate::common::{connect_to_coordinator, query_running_dataflows};
use bat::{Input, PrettyPrinter};
use clap::Args;
use communication_layer_request_reply::TcpRequestReplyConnection;
use dora_core::topics::{DORA_COORDINATOR_PORT_CONTROL_DEFAULT, LOCALHOST};
use dora_message::{cli_to_coordinator::ControlRequest, coordinator_to_cli::ControlRequestReply};
use eyre::{bail, Context, Result};
use uuid::Uuid;

use bat::{Input, PrettyPrinter};
#[derive(Debug, Args)]
/// Show logs of a given dataflow and node.
pub struct LogsArgs {
/// Identifier of the dataflow
#[clap(value_name = "UUID_OR_NAME")]
pub dataflow: Option<String>,
/// Show logs for the given node
#[clap(value_name = "NAME")]
pub node: String,
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
pub coordinator_addr: std::net::IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
pub coordinator_port: u16,
}

impl Executable for LogsArgs {
fn execute(self) -> eyre::Result<()> {
default_tracing()?;

let mut session =
connect_to_coordinator((self.coordinator_addr, self.coordinator_port).into())
.wrap_err("failed to connect to dora coordinator")?;
let list =
query_running_dataflows(&mut *session).wrap_err("failed to query running dataflows")?;
if let Some(dataflow) = self.dataflow {
let uuid = Uuid::parse_str(&dataflow).ok();
let name = if uuid.is_some() { None } else { Some(dataflow) };
logs(&mut *session, uuid, name, self.node)
} else {
let active = list.get_active();
let uuid = match &active[..] {
[] => bail!("No dataflows are running"),
[uuid] => uuid.clone(),
_ => inquire::Select::new("Choose dataflow to show logs:", active).prompt()?,
};
logs(&mut *session, Some(uuid.uuid), None, self.node)
}
}
}

pub fn logs(
session: &mut TcpRequestReplyConnection,


+ 96
- 52
binaries/cli/src/command/mod.rs View File

@@ -1,60 +1,104 @@
pub use build::build;
pub use logs::logs;
pub use run::run;
pub use start::start;

use std::path::{Path, PathBuf};

use communication_layer_request_reply::TcpRequestReplyConnection;
use dora_core::descriptor::Descriptor;
use dora_message::{cli_to_coordinator::ControlRequest, coordinator_to_cli::ControlRequestReply};
use eyre::{bail, Context, ContextCompat};

mod build;
pub mod check;
mod check;
mod coordinator;
mod daemon;
mod destroy;
mod graph;
mod list;
mod logs;
mod new;
mod run;
mod runtime;
mod self_;
mod start;
pub mod up;

fn local_working_dir(
dataflow_path: &Path,
dataflow_descriptor: &Descriptor,
coordinator_session: &mut TcpRequestReplyConnection,
) -> eyre::Result<Option<PathBuf>> {
Ok(
if dataflow_descriptor
.nodes
.iter()
.all(|n| n.deploy.as_ref().map(|d| d.machine.as_ref()).is_none())
&& cli_and_daemon_on_same_machine(coordinator_session)?
{
Some(
dunce::canonicalize(dataflow_path)
.context("failed to canonicalize dataflow file path")?
.parent()
.context("dataflow path has no parent dir")?
.to_owned(),
)
} else {
None
},
)
mod stop;
mod up;

pub use run::run_func;

use build::Build;
use check::Check;
use coordinator::Coordinator;
use daemon::Daemon;
use destroy::Destroy;
use eyre::Context;
use graph::Graph;
use list::ListArgs;
use logs::LogsArgs;
use new::NewArgs;
use run::Run;
use runtime::Runtime;
use self_::SelfSubCommand;
use start::Start;
use stop::Stop;
use up::Up;

/// dora-rs cli client
#[derive(Debug, clap::Subcommand)]
pub enum Command {
Check(Check),
Graph(Graph),
Build(Build),
New(NewArgs),
Run(Run),
Up(Up),
Destroy(Destroy),
Start(Start),
Stop(Stop),
List(ListArgs),
// Planned for future releases:
// Dashboard,
#[command(allow_missing_positional = true)]
Logs(LogsArgs),
// Metrics,
// Stats,
// Get,
// Upgrade,
Daemon(Daemon),
Runtime(Runtime),
Coordinator(Coordinator),

Self_ {
#[clap(subcommand)]
command: SelfSubCommand,
},
}

fn default_tracing() -> eyre::Result<()> {
#[cfg(feature = "tracing")]
{
use dora_tracing::TracingBuilder;

TracingBuilder::new("dora-cli")
.with_stdout("warn")
.build()
.wrap_err("failed to set up tracing subscriber")?;
}
Ok(())
}

pub trait Executable {
fn execute(self) -> eyre::Result<()>;
}

fn cli_and_daemon_on_same_machine(session: &mut TcpRequestReplyConnection) -> eyre::Result<bool> {
let reply_raw = session
.request(&serde_json::to_vec(&ControlRequest::CliAndDefaultDaemonOnSameMachine).unwrap())
.wrap_err("failed to send start dataflow message")?;

let result: ControlRequestReply =
serde_json::from_slice(&reply_raw).wrap_err("failed to parse reply")?;
match result {
ControlRequestReply::CliAndDefaultDaemonIps {
default_daemon,
cli,
} => Ok(default_daemon.is_some() && default_daemon == cli),
ControlRequestReply::Error(err) => bail!("{err}"),
other => bail!("unexpected start dataflow reply: {other:?}"),
impl Executable for Command {
fn execute(self) -> eyre::Result<()> {
match self {
Command::Check(args) => args.execute(),
Command::Coordinator(args) => args.execute(),
Command::Graph(args) => args.execute(),
Command::Build(args) => args.execute(),
Command::New(args) => args.execute(),
Command::Run(args) => args.execute(),
Command::Up(args) => args.execute(),
Command::Destroy(args) => args.execute(),
Command::Start(args) => args.execute(),
Command::Stop(args) => args.execute(),
Command::List(args) => args.execute(),
Command::Logs(args) => args.execute(),
Command::Daemon(args) => args.execute(),
Command::Self_ { command } => command.execute(),
Command::Runtime(args) => args.execute(),
}
}
}

+ 21
- 0
binaries/cli/src/command/new.rs View File

@@ -0,0 +1,21 @@
use clap::Args;

use super::{default_tracing, Executable};

#[derive(Debug, Args)]
/// Generate a new project or node. Choose the language between Rust, Python, C or C++.
pub struct NewArgs {
#[clap(flatten)]
// TODO!: better impl
args: crate::CommandNew,
/// Internal flag for creating with path dependencies
#[clap(hide = true, long)]
pub internal_create_with_path_dependencies: bool,
}

impl Executable for NewArgs {
fn execute(self) -> eyre::Result<()> {
default_tracing()?;
crate::template::create(self.args, self.internal_create_with_path_dependencies)
}
}

+ 36
- 4
binaries/cli/src/command/run.rs View File

@@ -5,15 +5,41 @@
//!
//! Use `dora build --local` or manual build commands to build your nodes.

use super::Executable;
use crate::{
common::{handle_dataflow_result, resolve_dataflow},
output::print_log_message,
session::DataflowSession,
};
use dora_daemon::{flume, Daemon, LogDestination};
use dora_tracing::TracingBuilder;
use eyre::Context;
use tokio::runtime::Builder;

use crate::{
handle_dataflow_result, output::print_log_message, resolve_dataflow, session::DataflowSession,
};
#[derive(Debug, clap::Args)]
/// Run a dataflow locally.
///
/// Directly runs the given dataflow without connecting to a dora
/// coordinator or daemon. The dataflow is executed on the local machine.
pub struct Run {
/// Path to the dataflow descriptor file
#[clap(value_name = "PATH")]
dataflow: String,
// Use UV to run nodes.
#[clap(long, action)]
uv: bool,
}

pub fn run_func(dataflow: String, uv: bool) -> eyre::Result<()> {
#[cfg(feature = "tracing")]
{
let log_level = std::env::var("RUST_LOG").ok().unwrap_or("info".to_string());
TracingBuilder::new("run")
.with_stdout(log_level)
.build()
.wrap_err("failed to set up tracing subscriber")?;
}

pub fn run(dataflow: String, uv: bool) -> Result<(), eyre::Error> {
let dataflow_path = resolve_dataflow(dataflow).context("could not resolve dataflow")?;
let dataflow_session =
DataflowSession::read_session(&dataflow_path).context("failed to read DataflowSession")?;
@@ -39,3 +65,9 @@ pub fn run(dataflow: String, uv: bool) -> Result<(), eyre::Error> {
))?;
handle_dataflow_result(result, None)
}

impl Executable for Run {
fn execute(self) -> eyre::Result<()> {
run_func(self.dataflow, self.uv)
}
}

+ 15
- 0
binaries/cli/src/command/runtime.rs View File

@@ -0,0 +1,15 @@
use eyre::Context;

use super::Executable;

#[derive(Debug, clap::Args)]
/// Run runtime
pub struct Runtime;

impl Executable for Runtime {
fn execute(self) -> eyre::Result<()> {
// No tracing: Do not set the runtime in the cli.
// ref: 72b4be808122574fcfda69650954318e0355cc7b cli::run
dora_runtime::main().context("Failed to run dora-runtime")
}
}

+ 139
- 0
binaries/cli/src/command/self_.rs View File

@@ -0,0 +1,139 @@
use super::{default_tracing, Executable};
use clap::Subcommand;
use eyre::{bail, Context};

#[derive(Debug, Subcommand)]
/// Dora CLI self-management commands
pub enum SelfSubCommand {
/// Check for updates or update the CLI
Update {
/// Only check for updates without installing
#[clap(long)]
check_only: bool,
},
/// Remove The Dora CLI from the system
Uninstall {
/// Force uninstallation without confirmation
#[clap(long)]
force: bool,
},
}

impl Executable for SelfSubCommand {
fn execute(self) -> eyre::Result<()> {
default_tracing()?;

match self {
SelfSubCommand::Update { check_only } => {
println!("Checking for updates...");

#[cfg(target_os = "linux")]
let bin_path_in_archive = format!("dora-cli-{}/dora", env!("TARGET"));
#[cfg(target_os = "macos")]
let bin_path_in_archive = format!("dora-cli-{}/dora", env!("TARGET"));
#[cfg(target_os = "windows")]
let bin_path_in_archive = String::from("dora.exe");

let status = self_update::backends::github::Update::configure()
.repo_owner("dora-rs")
.repo_name("dora")
.bin_path_in_archive(&bin_path_in_archive)
.bin_name("dora")
.show_download_progress(true)
.current_version(env!("CARGO_PKG_VERSION"))
.build()?;

if check_only {
// Only check if an update is available
match status.get_latest_release() {
Ok(release) => {
let current_version = self_update::cargo_crate_version!();
if current_version != release.version {
println!(
"An update is available: {}. Run 'dora self update' to update",
release.version
);
} else {
println!(
"Dora CLI is already at the latest version: {}",
current_version
);
}
}
Err(e) => println!("Failed to check for updates: {}", e),
}
} else {
// Perform the actual update
match status.update() {
Ok(update_status) => match update_status {
self_update::Status::UpToDate(version) => {
println!("Dora CLI is already at the latest version: {}", version);
}
self_update::Status::Updated(version) => {
println!("Successfully updated Dora CLI to version: {}", version);
}
},
Err(e) => println!("Failed to update: {}", e),
}
}
}
SelfSubCommand::Uninstall { force } => {
if !force {
let confirmed =
inquire::Confirm::new("Are you sure you want to uninstall Dora CLI?")
.with_default(false)
.prompt()
.wrap_err("Uninstallation cancelled")?;

if !confirmed {
println!("Uninstallation cancelled");
return Ok(());
}
}

println!("Uninstalling Dora CLI...");
#[cfg(feature = "python")]
{
println!("Detected Python installation...");

// Try uv pip uninstall first
let uv_status = std::process::Command::new("uv")
.args(["pip", "uninstall", "dora-rs-cli"])
.status();

if let Ok(status) = uv_status {
if status.success() {
println!("Dora CLI has been successfully uninstalled via uv pip.");
return Ok(());
}
}

// Fall back to regular pip uninstall
println!("Trying with pip...");
let status = std::process::Command::new("pip")
.args(["uninstall", "-y", "dora-rs-cli"])
.status()
.wrap_err("Failed to run pip uninstall")?;

if status.success() {
println!("Dora CLI has been successfully uninstalled via pip.");
} else {
bail!("Failed to uninstall Dora CLI via pip.");
}
}
#[cfg(not(feature = "python"))]
{
match self_replace::self_delete() {
Ok(_) => {
println!("Dora CLI has been successfully uninstalled.");
}
Err(e) => {
bail!("Failed to uninstall Dora CLI: {}", e);
}
}
}
}
}
Ok(())
}
}

+ 1
- 1
binaries/cli/src/command/start/attach.rs View File

@@ -14,7 +14,7 @@ use std::{path::PathBuf, sync::mpsc, time::Duration};
use tracing::{error, info};
use uuid::Uuid;

use crate::handle_dataflow_result;
use crate::common::handle_dataflow_result;
use crate::output::print_log_message;

pub fn attach_dataflow(


+ 80
- 50
binaries/cli/src/command/start/mod.rs View File

@@ -2,73 +2,104 @@
//!
//! The `dora start` command does not run any build commands, nor update git dependencies or similar. Use `dora build` for that.

use super::{default_tracing, Executable};
use crate::{
command::start::attach::attach_dataflow,
common::{connect_to_coordinator, local_working_dir, resolve_dataflow},
output::print_log_message,
session::DataflowSession,
};
use communication_layer_request_reply::{TcpConnection, TcpRequestReplyConnection};
use dora_core::descriptor::{Descriptor, DescriptorExt};
use dora_core::{
descriptor::{Descriptor, DescriptorExt},
topics::{DORA_COORDINATOR_PORT_CONTROL_DEFAULT, LOCALHOST},
};
use dora_message::{
cli_to_coordinator::ControlRequest, common::LogMessage, coordinator_to_cli::ControlRequestReply,
};
use eyre::{bail, Context};
use std::{
net::{SocketAddr, TcpStream},
net::{IpAddr, SocketAddr, TcpStream},
path::PathBuf,
};
use uuid::Uuid;

use crate::{
connect_to_coordinator, output::print_log_message, resolve_dataflow, session::DataflowSession,
};
use attach::attach_dataflow;

mod attach;

pub fn start(
#[derive(Debug, clap::Args)]
/// Start the given dataflow path. Attach a name to the running dataflow by using --name.
pub struct Start {
/// Path to the dataflow descriptor file
#[clap(value_name = "PATH")]
dataflow: String,
/// Assign a name to the dataflow
#[clap(long)]
name: Option<String>,
coordinator_socket: SocketAddr,
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
coordinator_port: u16,
/// Attach to the dataflow and wait for its completion
#[clap(long, action)]
attach: bool,
/// Run the dataflow in background
#[clap(long, action)]
detach: bool,
/// Enable hot reloading (Python only)
#[clap(long, action)]
hot_reload: bool,
// Use UV to run nodes.
#[clap(long, action)]
uv: bool,
) -> eyre::Result<()> {
let (dataflow, dataflow_descriptor, mut session, dataflow_id) =
start_dataflow(dataflow, name, coordinator_socket, uv)?;

let attach = match (attach, detach) {
(true, true) => eyre::bail!("both `--attach` and `--detach` are given"),
(true, false) => true,
(false, true) => false,
(false, false) => {
println!("attaching to dataflow (use `--detach` to run in background)");
true
}
};
}

if attach {
let log_level = env_logger::Builder::new()
.filter_level(log::LevelFilter::Info)
.parse_default_env()
.build()
.filter();

attach_dataflow(
dataflow_descriptor,
dataflow,
dataflow_id,
&mut *session,
hot_reload,
coordinator_socket,
log_level,
)
} else {
let print_daemon_name = dataflow_descriptor.nodes.iter().any(|n| n.deploy.is_some());
// wait until dataflow is started
wait_until_dataflow_started(
dataflow_id,
&mut session,
coordinator_socket,
log::LevelFilter::Info,
print_daemon_name,
)
impl Executable for Start {
fn execute(self) -> eyre::Result<()> {
default_tracing()?;
let coordinator_socket = (self.coordinator_addr, self.coordinator_port).into();

let (dataflow, dataflow_descriptor, mut session, dataflow_id) =
start_dataflow(self.dataflow, self.name, coordinator_socket, self.uv)?;

let attach = match (self.attach, self.detach) {
(true, true) => eyre::bail!("both `--attach` and `--detach` are given"),
(true, false) => true,
(false, true) => false,
(false, false) => {
println!("attaching to dataflow (use `--detach` to run in background)");
true
}
};

if attach {
let log_level = env_logger::Builder::new()
.filter_level(log::LevelFilter::Info)
.parse_default_env()
.build()
.filter();

attach_dataflow(
dataflow_descriptor,
dataflow,
dataflow_id,
&mut *session,
self.hot_reload,
coordinator_socket,
log_level,
)
} else {
let print_daemon_name = dataflow_descriptor.nodes.iter().any(|n| n.deploy.is_some());
// wait until dataflow is started
wait_until_dataflow_started(
dataflow_id,
&mut session,
coordinator_socket,
log::LevelFilter::Info,
print_daemon_name,
)
}
}
}

@@ -87,8 +118,7 @@ fn start_dataflow(
let mut session = connect_to_coordinator(coordinator_socket)
.wrap_err("failed to connect to dora coordinator")?;

let local_working_dir =
super::local_working_dir(&dataflow, &dataflow_descriptor, &mut *session)?;
let local_working_dir = local_working_dir(&dataflow, &dataflow_descriptor, &mut *session)?;

let dataflow_id = {
let dataflow = dataflow_descriptor.clone();


+ 111
- 0
binaries/cli/src/command/stop.rs View File

@@ -0,0 +1,111 @@
use super::{default_tracing, Executable};
use crate::common::{connect_to_coordinator, handle_dataflow_result, query_running_dataflows};
use communication_layer_request_reply::TcpRequestReplyConnection;
use dora_core::topics::{DORA_COORDINATOR_PORT_CONTROL_DEFAULT, LOCALHOST};
use dora_message::cli_to_coordinator::ControlRequest;
use dora_message::coordinator_to_cli::ControlRequestReply;
use duration_str::parse;
use eyre::{bail, Context};
use std::net::IpAddr;
use std::time::Duration;
use uuid::Uuid;

#[derive(Debug, clap::Args)]
/// Stop the given dataflow UUID. If no id is provided, you will be able to choose between the running dataflows.
pub struct Stop {
/// UUID of the dataflow that should be stopped
uuid: Option<Uuid>,
/// Name of the dataflow that should be stopped
#[clap(long)]
name: Option<String>,
/// Kill the dataflow if it doesn't stop after the given duration
#[clap(long, value_name = "DURATION")]
#[arg(value_parser = parse)]
grace_duration: Option<Duration>,
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
coordinator_port: u16,
}

impl Executable for Stop {
fn execute(self) -> eyre::Result<()> {
default_tracing()?;
let mut session =
connect_to_coordinator((self.coordinator_addr, self.coordinator_port).into())
.wrap_err("could not connect to dora coordinator")?;
match (self.uuid, self.name) {
(Some(uuid), _) => stop_dataflow(uuid, self.grace_duration, &mut *session),
(None, Some(name)) => stop_dataflow_by_name(name, self.grace_duration, &mut *session),
(None, None) => stop_dataflow_interactive(self.grace_duration, &mut *session),
}
}
}

fn stop_dataflow_interactive(
grace_duration: Option<Duration>,
session: &mut TcpRequestReplyConnection,
) -> eyre::Result<()> {
let list = query_running_dataflows(session).wrap_err("failed to query running dataflows")?;
let active = list.get_active();
if active.is_empty() {
eprintln!("No dataflows are running");
} else {
let selection = inquire::Select::new("Choose dataflow to stop:", active).prompt()?;
stop_dataflow(selection.uuid, grace_duration, session)?;
}

Ok(())
}

fn stop_dataflow(
uuid: Uuid,
grace_duration: Option<Duration>,
session: &mut TcpRequestReplyConnection,
) -> Result<(), eyre::ErrReport> {
let reply_raw = session
.request(
&serde_json::to_vec(&ControlRequest::Stop {
dataflow_uuid: uuid,
grace_duration,
})
.unwrap(),
)
.wrap_err("failed to send dataflow stop message")?;
let result: ControlRequestReply =
serde_json::from_slice(&reply_raw).wrap_err("failed to parse reply")?;
match result {
ControlRequestReply::DataflowStopped { uuid, result } => {
handle_dataflow_result(result, Some(uuid))
}
ControlRequestReply::Error(err) => bail!("{err}"),
other => bail!("unexpected stop dataflow reply: {other:?}"),
}
}

fn stop_dataflow_by_name(
name: String,
grace_duration: Option<Duration>,
session: &mut TcpRequestReplyConnection,
) -> Result<(), eyre::ErrReport> {
let reply_raw = session
.request(
&serde_json::to_vec(&ControlRequest::StopByName {
name,
grace_duration,
})
.unwrap(),
)
.wrap_err("failed to send dataflow stop_by_name message")?;
let result: ControlRequestReply =
serde_json::from_slice(&reply_raw).wrap_err("failed to parse reply")?;
match result {
ControlRequestReply::DataflowStopped { uuid, result } => {
handle_dataflow_result(result, Some(uuid))
}
ControlRequestReply::Error(err) => bail!("{err}"),
other => bail!("unexpected stop dataflow reply: {other:?}"),
}
}

+ 20
- 1
binaries/cli/src/command/up.rs View File

@@ -1,8 +1,27 @@
use crate::{command::check::daemon_running, connect_to_coordinator, LOCALHOST};
use super::check::daemon_running;
use super::{default_tracing, Executable};
use crate::{common::connect_to_coordinator, LOCALHOST};
use dora_core::topics::DORA_COORDINATOR_PORT_CONTROL_DEFAULT;
use dora_message::{cli_to_coordinator::ControlRequest, coordinator_to_cli::ControlRequestReply};
use eyre::{bail, Context, ContextCompat};
use std::path::PathBuf;
use std::{fs, net::SocketAddr, path::Path, process::Command, time::Duration};

#[derive(Debug, clap::Args)]
/// Spawn coordinator and daemon in local mode (with default config)
pub struct Up {
/// Use a custom configuration
#[clap(long, hide = true, value_name = "PATH", value_hint = clap::ValueHint::FilePath)]
config: Option<PathBuf>,
}

impl Executable for Up {
fn execute(self) -> eyre::Result<()> {
default_tracing()?;
up(self.config.as_deref())
}
}

#[derive(Debug, Default, serde::Serialize, serde::Deserialize)]
struct UpConfig {}



+ 117
- 0
binaries/cli/src/common.rs View File

@@ -0,0 +1,117 @@
use crate::formatting::FormatDataflowError;
use communication_layer_request_reply::{RequestReplyLayer, TcpLayer, TcpRequestReplyConnection};
use dora_core::descriptor::{source_is_url, Descriptor};
use dora_download::download_file;
use dora_message::{
cli_to_coordinator::ControlRequest,
coordinator_to_cli::{ControlRequestReply, DataflowList, DataflowResult},
};
use eyre::{bail, Context, ContextCompat};
use std::{
env::current_dir,
net::SocketAddr,
path::{Path, PathBuf},
};
use tokio::runtime::Builder;
use uuid::Uuid;

pub(crate) fn handle_dataflow_result(
result: DataflowResult,
uuid: Option<Uuid>,
) -> Result<(), eyre::Error> {
if result.is_ok() {
Ok(())
} else {
Err(match uuid {
Some(uuid) => {
eyre::eyre!("Dataflow {uuid} failed:\n{}", FormatDataflowError(&result))
}
None => {
eyre::eyre!("Dataflow failed:\n{}", FormatDataflowError(&result))
}
})
}
}

pub(crate) fn query_running_dataflows(
session: &mut TcpRequestReplyConnection,
) -> eyre::Result<DataflowList> {
let reply_raw = session
.request(&serde_json::to_vec(&ControlRequest::List).unwrap())
.wrap_err("failed to send list message")?;
let reply: ControlRequestReply =
serde_json::from_slice(&reply_raw).wrap_err("failed to parse reply")?;
let ids = match reply {
ControlRequestReply::DataflowList(list) => list,
ControlRequestReply::Error(err) => bail!("{err}"),
other => bail!("unexpected list dataflow reply: {other:?}"),
};

Ok(ids)
}

pub(crate) fn connect_to_coordinator(
coordinator_addr: SocketAddr,
) -> std::io::Result<Box<TcpRequestReplyConnection>> {
TcpLayer::new().connect(coordinator_addr)
}

pub(crate) fn resolve_dataflow(dataflow: String) -> eyre::Result<PathBuf> {
let dataflow = if source_is_url(&dataflow) {
// try to download the shared library
let target_path = current_dir().context("Could not access the current dir")?;
let rt = Builder::new_current_thread()
.enable_all()
.build()
.context("tokio runtime failed")?;
rt.block_on(async { download_file(&dataflow, &target_path).await })
.wrap_err("failed to download dataflow yaml file")?
} else {
PathBuf::from(dataflow)
};
Ok(dataflow)
}

pub(crate) fn local_working_dir(
dataflow_path: &Path,
dataflow_descriptor: &Descriptor,
coordinator_session: &mut TcpRequestReplyConnection,
) -> eyre::Result<Option<PathBuf>> {
Ok(
if dataflow_descriptor
.nodes
.iter()
.all(|n| n.deploy.as_ref().map(|d| d.machine.as_ref()).is_none())
&& cli_and_daemon_on_same_machine(coordinator_session)?
{
Some(
dunce::canonicalize(dataflow_path)
.context("failed to canonicalize dataflow file path")?
.parent()
.context("dataflow path has no parent dir")?
.to_owned(),
)
} else {
None
},
)
}

pub(crate) fn cli_and_daemon_on_same_machine(
session: &mut TcpRequestReplyConnection,
) -> eyre::Result<bool> {
let reply_raw = session
.request(&serde_json::to_vec(&ControlRequest::CliAndDefaultDaemonOnSameMachine).unwrap())
.wrap_err("failed to send start dataflow message")?;

let result: ControlRequestReply =
serde_json::from_slice(&reply_raw).wrap_err("failed to parse reply")?;
match result {
ControlRequestReply::CliAndDefaultDaemonIps {
default_daemon,
cli,
} => Ok(default_daemon.is_some() && default_daemon == cli),
ControlRequestReply::Error(err) => bail!("{err}"),
other => bail!("unexpected start dataflow reply: {other:?}"),
}
}

+ 7
- 743
binaries/cli/src/lib.rs View File

@@ -1,42 +1,19 @@
use colored::Colorize;
use communication_layer_request_reply::{RequestReplyLayer, TcpLayer, TcpRequestReplyConnection};
use dora_coordinator::Event;
use dora_core::{
descriptor::{source_is_url, Descriptor, DescriptorExt},
topics::{
DORA_COORDINATOR_PORT_CONTROL_DEFAULT, DORA_COORDINATOR_PORT_DEFAULT,
DORA_DAEMON_LOCAL_LISTEN_PORT_DEFAULT,
},
};
use dora_daemon::{Daemon, LogDestination};
use dora_download::download_file;
use dora_message::{
cli_to_coordinator::ControlRequest,
coordinator_to_cli::{ControlRequestReply, DataflowList, DataflowResult, DataflowStatus},
};
#[cfg(feature = "tracing")]
use dora_tracing::TracingBuilder;
use duration_str::parse;
use eyre::{bail, Context};
use formatting::FormatDataflowError;
use std::{env::current_dir, io::Write, net::SocketAddr};
use command::Executable;
use std::{
net::{IpAddr, Ipv4Addr},
path::PathBuf,
time::Duration,
};
use tabwriter::TabWriter;
use tokio::runtime::Builder;
use tracing::level_filters::LevelFilter;
use uuid::Uuid;

pub mod command;
mod command;
mod common;
mod formatting;
mod graph;
pub mod output;
pub mod session;
mod template;

pub use command::run_func;

const LOCALHOST: IpAddr = IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1));
const LISTEN_WILDCARD: IpAddr = IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0));

@@ -44,228 +21,7 @@ const LISTEN_WILDCARD: IpAddr = IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0));
#[clap(version)]
pub struct Args {
#[clap(subcommand)]
command: Command,
}

/// dora-rs cli client
#[derive(Debug, clap::Subcommand)]
enum Command {
/// Check if the coordinator and the daemon is running.
Check {
/// Path to the dataflow descriptor file (enables additional checks)
#[clap(long, value_name = "PATH", value_hint = clap::ValueHint::FilePath)]
dataflow: Option<PathBuf>,
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
coordinator_port: u16,
},
/// Generate a visualization of the given graph using mermaid.js. Use --open to open browser.
Graph {
/// Path to the dataflow descriptor file
#[clap(value_name = "PATH", value_hint = clap::ValueHint::FilePath)]
dataflow: PathBuf,
/// Visualize the dataflow as a Mermaid diagram (instead of HTML)
#[clap(long, action)]
mermaid: bool,
/// Open the HTML visualization in the browser
#[clap(long, action)]
open: bool,
},
/// Run build commands provided in the given dataflow.
Build {
/// Path to the dataflow descriptor file
#[clap(value_name = "PATH")]
dataflow: String,
/// Address of the dora coordinator
#[clap(long, value_name = "IP")]
coordinator_addr: Option<IpAddr>,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT")]
coordinator_port: Option<u16>,
// Use UV to build nodes.
#[clap(long, action)]
uv: bool,
// Run build on local machine
#[clap(long, action)]
local: bool,
},
/// Generate a new project or node. Choose the language between Rust, Python, C or C++.
New {
#[clap(flatten)]
args: CommandNew,
#[clap(hide = true, long)]
internal_create_with_path_dependencies: bool,
},
/// Run a dataflow locally.
///
/// Directly runs the given dataflow without connecting to a dora
/// coordinator or daemon. The dataflow is executed on the local machine.
Run {
/// Path to the dataflow descriptor file
#[clap(value_name = "PATH")]
dataflow: String,
// Use UV to run nodes.
#[clap(long, action)]
uv: bool,
},
/// Spawn coordinator and daemon in local mode (with default config)
Up {
/// Use a custom configuration
#[clap(long, hide = true, value_name = "PATH", value_hint = clap::ValueHint::FilePath)]
config: Option<PathBuf>,
},
/// Destroy running coordinator and daemon. If some dataflows are still running, they will be stopped first.
Destroy {
/// Use a custom configuration
#[clap(long, hide = true)]
config: Option<PathBuf>,
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
coordinator_port: u16,
},
/// Start the given dataflow path. Attach a name to the running dataflow by using --name.
Start {
/// Path to the dataflow descriptor file
#[clap(value_name = "PATH")]
dataflow: String,
/// Assign a name to the dataflow
#[clap(long)]
name: Option<String>,
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
coordinator_port: u16,
/// Attach to the dataflow and wait for its completion
#[clap(long, action)]
attach: bool,
/// Run the dataflow in background
#[clap(long, action)]
detach: bool,
/// Enable hot reloading (Python only)
#[clap(long, action)]
hot_reload: bool,
// Use UV to run nodes.
#[clap(long, action)]
uv: bool,
},
/// Stop the given dataflow UUID. If no id is provided, you will be able to choose between the running dataflows.
Stop {
/// UUID of the dataflow that should be stopped
uuid: Option<Uuid>,
/// Name of the dataflow that should be stopped
#[clap(long)]
name: Option<String>,
/// Kill the dataflow if it doesn't stop after the given duration
#[clap(long, value_name = "DURATION")]
#[arg(value_parser = parse)]
grace_duration: Option<Duration>,
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
coordinator_port: u16,
},
/// List running dataflows.
List {
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
coordinator_port: u16,
},
// Planned for future releases:
// Dashboard,
/// Show logs of a given dataflow and node.
#[command(allow_missing_positional = true)]
Logs {
/// Identifier of the dataflow
#[clap(value_name = "UUID_OR_NAME")]
dataflow: Option<String>,
/// Show logs for the given node
#[clap(value_name = "NAME")]
node: String,
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
coordinator_port: u16,
},
// Metrics,
// Stats,
// Get,
// Upgrade,
/// Run daemon
Daemon {
/// Unique identifier for the machine (required for distributed dataflows)
#[clap(long)]
machine_id: Option<String>,
/// Local listen port for event such as dynamic node.
#[clap(long, default_value_t = DORA_DAEMON_LOCAL_LISTEN_PORT_DEFAULT)]
local_listen_port: u16,
/// Address and port number of the dora coordinator
#[clap(long, short, default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, default_value_t = DORA_COORDINATOR_PORT_DEFAULT)]
coordinator_port: u16,
#[clap(long, hide = true)]
run_dataflow: Option<PathBuf>,
/// Suppresses all log output to stdout.
#[clap(long)]
quiet: bool,
},
/// Run runtime
Runtime,
/// Run coordinator
Coordinator {
/// Network interface to bind to for daemon communication
#[clap(long, default_value_t = LISTEN_WILDCARD)]
interface: IpAddr,
/// Port number to bind to for daemon communication
#[clap(long, default_value_t = DORA_COORDINATOR_PORT_DEFAULT)]
port: u16,
/// Network interface to bind to for control communication
#[clap(long, default_value_t = LISTEN_WILDCARD)]
control_interface: IpAddr,
/// Port number to bind to for control communication
#[clap(long, default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
control_port: u16,
/// Suppresses all log output to stdout.
#[clap(long)]
quiet: bool,
},
/// Dora CLI self-management commands
Self_ {
#[clap(subcommand)]
command: SelfSubCommand,
},
}

#[derive(Debug, clap::Subcommand)]
enum SelfSubCommand {
/// Check for updates or update the CLI
Update {
/// Only check for updates without installing
#[clap(long)]
check_only: bool,
},
/// Remove The Dora CLI from the system
Uninstall {
/// Force uninstallation without confirmation
#[clap(long)]
force: bool,
},
command: command::Command,
}

#[derive(Debug, clap::Args)]
@@ -298,503 +54,13 @@ enum Lang {
}

pub fn lib_main(args: Args) {
if let Err(err) = run_cli(args) {
if let Err(err) = args.command.execute() {
eprintln!("\n\n{}", "[ERROR]".bold().red());
eprintln!("{err:?}");
std::process::exit(1);
}
}

fn run_cli(args: Args) -> eyre::Result<()> {
tracing_log::LogTracer::init()?;

#[cfg(feature = "tracing")]
match &args.command {
Command::Daemon {
quiet, machine_id, ..
} => {
let name = "dora-daemon";
let filename = machine_id
.as_ref()
.map(|id| format!("{name}-{id}"))
.unwrap_or(name.to_string());

let mut builder = TracingBuilder::new(name);
if !quiet {
builder = builder.with_stdout("info,zenoh=warn");
}
builder = builder.with_file(filename, LevelFilter::INFO)?;
builder
.build()
.wrap_err("failed to set up tracing subscriber")?;
}
Command::Runtime => {
// Do not set the runtime in the cli.
}
Command::Coordinator { quiet, .. } => {
let name = "dora-coordinator";
let mut builder = TracingBuilder::new(name);
if !quiet {
builder = builder.with_stdout("info");
}
builder = builder.with_file(name, LevelFilter::INFO)?;
builder
.build()
.wrap_err("failed to set up tracing subscriber")?;
}
Command::Run { .. } | Command::Build { .. } => {
let log_level = std::env::var("RUST_LOG").ok().unwrap_or("info".to_string());
TracingBuilder::new("run")
.with_stdout(log_level)
.build()
.wrap_err("failed to set up tracing subscriber")?;
}
_ => {
TracingBuilder::new("dora-cli")
.with_stdout("warn")
.build()
.wrap_err("failed to set up tracing subscriber")?;
}
};

match args.command {
Command::Check {
dataflow,
coordinator_addr,
coordinator_port,
} => match dataflow {
Some(dataflow) => {
let working_dir = dataflow
.canonicalize()
.context("failed to canonicalize dataflow path")?
.parent()
.ok_or_else(|| eyre::eyre!("dataflow path has no parent dir"))?
.to_owned();
Descriptor::blocking_read(&dataflow)?.check(&working_dir)?;
command::check::check_environment((coordinator_addr, coordinator_port).into())?
}
None => command::check::check_environment((coordinator_addr, coordinator_port).into())?,
},
Command::Graph {
dataflow,
mermaid,
open,
} => {
graph::create(dataflow, mermaid, open)?;
}
Command::Build {
dataflow,
coordinator_addr,
coordinator_port,
uv,
local,
} => command::build(dataflow, coordinator_addr, coordinator_port, uv, local)?,
Command::New {
args,
internal_create_with_path_dependencies,
} => template::create(args, internal_create_with_path_dependencies)?,
Command::Run { dataflow, uv } => command::run(dataflow, uv)?,
Command::Up { config } => {
command::up::up(config.as_deref())?;
}
Command::Logs {
dataflow,
node,
coordinator_addr,
coordinator_port,
} => {
let mut session = connect_to_coordinator((coordinator_addr, coordinator_port).into())
.wrap_err("failed to connect to dora coordinator")?;
let list = query_running_dataflows(&mut *session)
.wrap_err("failed to query running dataflows")?;
if let Some(dataflow) = dataflow {
let uuid = Uuid::parse_str(&dataflow).ok();
let name = if uuid.is_some() { None } else { Some(dataflow) };
command::logs(&mut *session, uuid, name, node)?
} else {
let active: Vec<dora_message::coordinator_to_cli::DataflowIdAndName> =
list.get_active();
let uuid = match &active[..] {
[] => bail!("No dataflows are running"),
[uuid] => uuid.clone(),
_ => inquire::Select::new("Choose dataflow to show logs:", active).prompt()?,
};
command::logs(&mut *session, Some(uuid.uuid), None, node)?
}
}
Command::Start {
dataflow,
name,
coordinator_addr,
coordinator_port,
attach,
detach,
hot_reload,
uv,
} => {
let coordinator_socket = (coordinator_addr, coordinator_port).into();
command::start(
dataflow,
name,
coordinator_socket,
attach,
detach,
hot_reload,
uv,
)?
}
Command::List {
coordinator_addr,
coordinator_port,
} => match connect_to_coordinator((coordinator_addr, coordinator_port).into()) {
Ok(mut session) => list(&mut *session)?,
Err(_) => {
bail!("No dora coordinator seems to be running.");
}
},
Command::Stop {
uuid,
name,
grace_duration,
coordinator_addr,
coordinator_port,
} => {
let mut session = connect_to_coordinator((coordinator_addr, coordinator_port).into())
.wrap_err("could not connect to dora coordinator")?;
match (uuid, name) {
(Some(uuid), _) => stop_dataflow(uuid, grace_duration, &mut *session)?,
(None, Some(name)) => stop_dataflow_by_name(name, grace_duration, &mut *session)?,
(None, None) => stop_dataflow_interactive(grace_duration, &mut *session)?,
}
}
Command::Destroy {
config,
coordinator_addr,
coordinator_port,
} => command::up::destroy(
config.as_deref(),
(coordinator_addr, coordinator_port).into(),
)?,
Command::Coordinator {
interface,
port,
control_interface,
control_port,
quiet,
} => {
let rt = Builder::new_multi_thread()
.enable_all()
.build()
.context("tokio runtime failed")?;
rt.block_on(async {
let bind = SocketAddr::new(interface, port);
let bind_control = SocketAddr::new(control_interface, control_port);
let (port, task) =
dora_coordinator::start(bind, bind_control, futures::stream::empty::<Event>())
.await?;
if !quiet {
println!("Listening for incoming daemon connection on {port}");
}
task.await
})
.context("failed to run dora-coordinator")?
}
Command::Daemon {
coordinator_addr,
coordinator_port,
local_listen_port,
machine_id,
run_dataflow,
quiet: _,
} => {
let rt = Builder::new_multi_thread()
.enable_all()
.build()
.context("tokio runtime failed")?;
rt.block_on(async {
match run_dataflow {
Some(dataflow_path) => {
tracing::info!("Starting dataflow `{}`", dataflow_path.display());
if coordinator_addr != LOCALHOST {
tracing::info!(
"Not using coordinator addr {} as `run_dataflow` is for local dataflow only. Please use the `start` command for remote coordinator",
coordinator_addr
);
}
let dataflow_session =
DataflowSession::read_session(&dataflow_path).context("failed to read DataflowSession")?;

let result = Daemon::run_dataflow(&dataflow_path,
dataflow_session.build_id, dataflow_session.local_build, dataflow_session.session_id, false,
LogDestination::Tracing,
).await?;
handle_dataflow_result(result, None)
}
None => {
Daemon::run(SocketAddr::new(coordinator_addr, coordinator_port), machine_id, local_listen_port).await
}
}
})
.context("failed to run dora-daemon")?
}
Command::Runtime => dora_runtime::main().context("Failed to run dora-runtime")?,
Command::Self_ { command } => match command {
SelfSubCommand::Update { check_only } => {
println!("Checking for updates...");

#[cfg(target_os = "linux")]
let bin_path_in_archive = format!("dora-cli-{}/dora", env!("TARGET"));
#[cfg(target_os = "macos")]
let bin_path_in_archive = format!("dora-cli-{}/dora", env!("TARGET"));
#[cfg(target_os = "windows")]
let bin_path_in_archive = String::from("dora.exe");

let status = self_update::backends::github::Update::configure()
.repo_owner("dora-rs")
.repo_name("dora")
.bin_path_in_archive(&bin_path_in_archive)
.bin_name("dora")
.show_download_progress(true)
.current_version(env!("CARGO_PKG_VERSION"))
.build()?;

if check_only {
// Only check if an update is available
match status.get_latest_release() {
Ok(release) => {
let current_version = self_update::cargo_crate_version!();
if current_version != release.version {
println!(
"An update is available: {}. Run 'dora self update' to update",
release.version
);
} else {
println!(
"Dora CLI is already at the latest version: {}",
current_version
);
}
}
Err(e) => println!("Failed to check for updates: {}", e),
}
} else {
// Perform the actual update
match status.update() {
Ok(update_status) => match update_status {
self_update::Status::UpToDate(version) => {
println!("Dora CLI is already at the latest version: {}", version);
}
self_update::Status::Updated(version) => {
println!("Successfully updated Dora CLI to version: {}", version);
}
},
Err(e) => println!("Failed to update: {}", e),
}
}
}
SelfSubCommand::Uninstall { force } => {
if !force {
let confirmed =
inquire::Confirm::new("Are you sure you want to uninstall Dora CLI?")
.with_default(false)
.prompt()
.wrap_err("Uninstallation cancelled")?;

if !confirmed {
println!("Uninstallation cancelled");
return Ok(());
}
}

println!("Uninstalling Dora CLI...");
#[cfg(feature = "python")]
{
println!("Detected Python installation...");

// Try uv pip uninstall first
let uv_status = std::process::Command::new("uv")
.args(["pip", "uninstall", "dora-rs-cli"])
.status();

if let Ok(status) = uv_status {
if status.success() {
println!("Dora CLI has been successfully uninstalled via uv pip.");
return Ok(());
}
}

// Fall back to regular pip uninstall
println!("Trying with pip...");
let status = std::process::Command::new("pip")
.args(["uninstall", "-y", "dora-rs-cli"])
.status()
.wrap_err("Failed to run pip uninstall")?;

if status.success() {
println!("Dora CLI has been successfully uninstalled via pip.");
} else {
bail!("Failed to uninstall Dora CLI via pip.");
}
}
#[cfg(not(feature = "python"))]
{
match self_replace::self_delete() {
Ok(_) => {
println!("Dora CLI has been successfully uninstalled.");
}
Err(e) => {
bail!("Failed to uninstall Dora CLI: {}", e);
}
}
}
}
},
};

Ok(())
}

fn stop_dataflow_interactive(
grace_duration: Option<Duration>,
session: &mut TcpRequestReplyConnection,
) -> eyre::Result<()> {
let list = query_running_dataflows(session).wrap_err("failed to query running dataflows")?;
let active = list.get_active();
if active.is_empty() {
eprintln!("No dataflows are running");
} else {
let selection = inquire::Select::new("Choose dataflow to stop:", active).prompt()?;
stop_dataflow(selection.uuid, grace_duration, session)?;
}

Ok(())
}

fn stop_dataflow(
uuid: Uuid,
grace_duration: Option<Duration>,
session: &mut TcpRequestReplyConnection,
) -> Result<(), eyre::ErrReport> {
let reply_raw = session
.request(
&serde_json::to_vec(&ControlRequest::Stop {
dataflow_uuid: uuid,
grace_duration,
})
.unwrap(),
)
.wrap_err("failed to send dataflow stop message")?;
let result: ControlRequestReply =
serde_json::from_slice(&reply_raw).wrap_err("failed to parse reply")?;
match result {
ControlRequestReply::DataflowStopped { uuid, result } => {
handle_dataflow_result(result, Some(uuid))
}
ControlRequestReply::Error(err) => bail!("{err}"),
other => bail!("unexpected stop dataflow reply: {other:?}"),
}
}

fn handle_dataflow_result(result: DataflowResult, uuid: Option<Uuid>) -> Result<(), eyre::Error> {
if result.is_ok() {
Ok(())
} else {
Err(match uuid {
Some(uuid) => {
eyre::eyre!("Dataflow {uuid} failed:\n{}", FormatDataflowError(&result))
}
None => {
eyre::eyre!("Dataflow failed:\n{}", FormatDataflowError(&result))
}
})
}
}

fn stop_dataflow_by_name(
name: String,
grace_duration: Option<Duration>,
session: &mut TcpRequestReplyConnection,
) -> Result<(), eyre::ErrReport> {
let reply_raw = session
.request(
&serde_json::to_vec(&ControlRequest::StopByName {
name,
grace_duration,
})
.unwrap(),
)
.wrap_err("failed to send dataflow stop_by_name message")?;
let result: ControlRequestReply =
serde_json::from_slice(&reply_raw).wrap_err("failed to parse reply")?;
match result {
ControlRequestReply::DataflowStopped { uuid, result } => {
handle_dataflow_result(result, Some(uuid))
}
ControlRequestReply::Error(err) => bail!("{err}"),
other => bail!("unexpected stop dataflow reply: {other:?}"),
}
}

fn list(session: &mut TcpRequestReplyConnection) -> Result<(), eyre::ErrReport> {
let list = query_running_dataflows(session)?;

let mut tw = TabWriter::new(vec![]);
tw.write_all(b"UUID\tName\tStatus\n")?;
for entry in list.0 {
let uuid = entry.id.uuid;
let name = entry.id.name.unwrap_or_default();
let status = match entry.status {
DataflowStatus::Running => "Running",
DataflowStatus::Finished => "Succeeded",
DataflowStatus::Failed => "Failed",
};
tw.write_all(format!("{uuid}\t{name}\t{status}\n").as_bytes())?;
}
tw.flush()?;
let formatted = String::from_utf8(tw.into_inner()?)?;

println!("{formatted}");

Ok(())
}

fn query_running_dataflows(session: &mut TcpRequestReplyConnection) -> eyre::Result<DataflowList> {
let reply_raw = session
.request(&serde_json::to_vec(&ControlRequest::List).unwrap())
.wrap_err("failed to send list message")?;
let reply: ControlRequestReply =
serde_json::from_slice(&reply_raw).wrap_err("failed to parse reply")?;
let ids = match reply {
ControlRequestReply::DataflowList(list) => list,
ControlRequestReply::Error(err) => bail!("{err}"),
other => bail!("unexpected list dataflow reply: {other:?}"),
};

Ok(ids)
}

fn connect_to_coordinator(
coordinator_addr: SocketAddr,
) -> std::io::Result<Box<TcpRequestReplyConnection>> {
TcpLayer::new().connect(coordinator_addr)
}

fn resolve_dataflow(dataflow: String) -> eyre::Result<PathBuf> {
let dataflow = if source_is_url(&dataflow) {
// try to download the shared library
let target_path = current_dir().context("Could not access the current dir")?;
let rt = Builder::new_current_thread()
.enable_all()
.build()
.context("tokio runtime failed")?;
rt.block_on(async { download_file(&dataflow, &target_path).await })
.wrap_err("failed to download dataflow yaml file")?
} else {
PathBuf::from(dataflow)
};
Ok(dataflow)
}

#[cfg(feature = "python")]
use clap::Parser;
#[cfg(feature = "python")]
@@ -804,8 +70,6 @@ use pyo3::{
wrap_pyfunction, Bound, PyResult, Python,
};

use crate::session::DataflowSession;

#[cfg(feature = "python")]
#[pyfunction]
fn py_main(_py: Python) -> PyResult<()> {


+ 1
- 1
binaries/coordinator/Cargo.toml View File

@@ -19,7 +19,7 @@ futures = "0.3.21"
tokio = { version = "1.24.2", features = ["full"] }
tokio-stream = { version = "0.1.8", features = ["io-util", "net"] }
uuid = { version = "1.2.1" }
dora-core = { workspace = true }
dora-core = { workspace = true, features = ["build"] }
tracing = "0.1.36"
dora-tracing = { workspace = true, optional = true }
futures-concurrency = "7.1.0"


+ 0
- 13
binaries/daemon/src/spawn.rs View File

@@ -430,19 +430,6 @@ impl PreparedNode {
}
};

if buffer.contains("TRACE")
|| buffer.contains("INFO")
|| buffer.contains("DEBUG")
|| buffer.contains("WARN")
|| buffer.contains("ERROR")
{
// tracing output, potentially multi-line -> keep reading following lines
// until double-newline
if !buffer.ends_with("\n\n") && !finished {
continue;
}
}

// send the buffered lines
let lines = std::mem::take(&mut buffer);
let sent = stdout_tx.send(lines.clone()).await;


+ 1
- 1
examples/c++-ros2-dataflow/node-rust-api/main.cc View File

@@ -73,7 +73,7 @@ int main()
std::cerr << "Unknown event type " << static_cast<int>(ty) << std::endl;
}

if (received_ticks > 20)
if (received_ticks > 20 && responses_received > 0)
{
break;
}


+ 14
- 0
examples/keyboard/dataflow.yml View File

@@ -0,0 +1,14 @@
nodes:
- id: keyboard
build: pip install -e ../../node-hub/dora-keyboard
path: dora-keyboard
outputs:
- char
env:
DISPLAY: $DISPLAY

- id: rerun
path: dora-rerun
build: pip install -e ../../node-hub/dora-rerun
inputs:
text_input: keyboard/char

+ 60
- 0
examples/so101/Readme.md View File

@@ -0,0 +1,60 @@
## SO101 Arm Control

This example provides gamepad control and leader-follower functionality for the SO-101 robotic arm.

### Install Dependencies

install the required Python packages for rerun visualization (optional):

```bash
# Install the URDF loader for Rerun visualization
pip install git+https://github.com/dora-rs/rerun-loader-python-urdf
```

### Hardware Setup

1. Connect your SO-101 arm(s) to your computer via USB/serial
2. Note the serial port names (e.g.,for linux `/dev/ttyACM0`, `/dev/ttyACM1`)
3. Connect your gamepad controller
4. Update the `PORT` environment variable in the YAML files

#### Single Arm Control (arm_gamepad_control.yml)

Control a single SO-101 arm with gamepad input and visualization:

```bash
dora build arm.yml
dora run arm.yml
```

#### Leader-Follower Mode (leader_follower.yml)

Use one arm as a leader to control another follower arm:

```bash
dora build leader.yml
dora run leader.yml
```

#### Serial Port Configuration

Update the `PORT` environment variable in the YAML files:

```yaml
env:
PORT: /dev/ttyACM0 # Change to your actual port
```

## Troubleshooting

### Serial Connection Issues
- Check that the arm is powered on and connected
- Verify the correct serial port in the YAML configuration
- Ensure proper permissions: `sudo chmod +x PORT`

### Gamepad Not Detected
- Verify gamepad is connected and recognized by the system
- Test with `jstest /dev/input/js0` (Linux)

## Safety Notes
- Always ensure the arm has sufficient clearance before operation

+ 48
- 0
examples/so101/arm_gamepad_control.yml View File

@@ -0,0 +1,48 @@
nodes:
- id: so101
build: pip install -e ../../node-hub/dora-rustypot
path: dora-rustypot
inputs:
tick: dora/timer/millis/10
pose: pytorch_kinematics/cmd_vel
outputs:
- pose
env:
PORT: /dev/ttyACM0
IDS: 1 2 3 4 5

- id: pytorch_kinematics
build: pip install -e ../../node-hub/dora-pytorch-kinematics
path: dora-pytorch-kinematics
inputs:
cmd_vel: gamepad/cmd_vel
outputs:
- cmd_vel
env:
MODEL_NAME: "so_arm101_description"
END_EFFECTOR_LINK: "gripper"
TRANSFORM: "0. 0. 0. 1. 0. 0. 0."
POSITION_TOLERANCE: 0.01
ROTATION_TOLERANCE: 0.03

- id: gamepad
build: pip install -e ../../node-hub/gamepad
path: gamepad
outputs:
- cmd_vel
- raw_control
inputs:
tick: dora/timer/millis/10
env:
MAX_LINEAR_SPEED: 0.01
MAX_ANGULAR_SPEED: 0.05

# comment below path if you don't want to visualize the arm in rerun
- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
inputs:
jointstate_so101_new_calib: so101/pose
env:
so101_new_calib_urdf: "so_arm101_description"
so101_new_calib_transform: "0. 0. 0. 1. 0. 0. 0."

+ 33
- 0
examples/so101/leader_follower.yml View File

@@ -0,0 +1,33 @@
nodes:
- id: so101
build: pip install -e ../../node-hub/dora-rustypot
path: dora-rustypot
inputs:
tick: dora/timer/millis/10
pose: leader_interface/pose
outputs:
- pose
env:
PORT: /dev/ttyACM0
IDS: 1 2 3 4 5 6

- id: leader_interface
build: pip install -e ../../node-hub/dora-rustypot
path: dora-rustypot
inputs:
tick: dora/timer/millis/10
outputs:
- pose
env:
PORT: /dev/ttyACM1
IDS: 1 2 3 4 5 6

# comment below path if you don't want to visualize the arms in rerun
- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
inputs:
jointstate_so101_new_calib: so101/pose
env:
so101_new_calib_urdf: "so_arm101_description"
so101_new_calib_transform: "0. 0. 0. 1. 0. 0. 0."

+ 3
- 3
examples/speech-to-speech/README.md View File

@@ -1,4 +1,4 @@
# Dora Speech to Text example
# Dora Speech to Speech example

Make sure to have, dora, pip and cargo installed.

@@ -23,6 +23,6 @@ sudo apt-get install espeak
```bash
uv venv --seed -p 3.11
uv pip install -e ../../apis/python/node --reinstall
dora build kokoro-dev.yml
dora run kokoro-dev.yml
dora build kokoro-dev.yml --uv
dora run kokoro-dev.yml --uv
```

+ 1
- 1
examples/speech-to-text/whisper-dev.yml View File

@@ -28,7 +28,7 @@ nodes:
# USE_MODELSCOPE_HUB: true

- id: dora-rerun
build: cargo build -p dora-rerun --release
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
inputs:
original_text: dora-distil-whisper/text

+ 30
- 0
examples/urdf/broken_fanuc.yml View File

@@ -0,0 +1,30 @@
nodes:
- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
inputs:
jointstate_m710ic70: pytorch_kinematics/cmd_vel
env:
m710ic70_urdf: "fanuc_m710ic_description"
m710ic70_transform: "0. 0. 0. 1. 0. 0. 0."

- id: gamepad
build: pip install -e ../../node-hub/gamepad
path: gamepad
outputs:
- cmd_vel
- raw_control
inputs:
tick: dora/timer/millis/10

- id: pytorch_kinematics
build: pip install -e ../../node-hub/dora-pytorch-kinematics
path: dora-pytorch-kinematics
inputs:
cmd_vel: gamepad/cmd_vel
outputs:
- cmd_vel
env:
MODEL_NAME: "fanuc_m710ic_description"
END_EFFECTOR_LINK: "tool0"
TRANSFORM: "0. 0. 0. 1. 0. 0. 0."

+ 30
- 0
examples/urdf/broken_poppy.yml View File

@@ -0,0 +1,30 @@
nodes:
- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
inputs:
jointstate_poppy_ergo_jr: pytorch_kinematics/cmd_vel
env:
poppy_ergo_jr_urdf: "poppy_ergo_jr_description"
poppy_ergo_jr_transform: "0. 0. 0. 1. 0. 0. 0."

- id: gamepad
build: pip install -e ../../node-hub/gamepad
path: gamepad
outputs:
- cmd_vel
- raw_control
inputs:
tick: dora/timer/millis/10

- id: pytorch_kinematics
build: pip install -e ../../node-hub/dora-pytorch-kinematics
path: dora-pytorch-kinematics
inputs:
cmd_vel: gamepad/cmd_vel
outputs:
- cmd_vel
env:
MODEL_NAME: "poppy_ergo_jr_description"
END_EFFECTOR_LINK: "section_5"
TRANSFORM: "0. 0. 0. 1. 0. 0. 0."

+ 35
- 0
examples/urdf/franka.yml View File

@@ -0,0 +1,35 @@
nodes:
- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
inputs:
jointstate_panda: pytorch_kinematics/cmd_vel
env:
panda_urdf: "panda_description"
panda_transform: "0. 0. 0. 1. 0. 0. 0."

- id: gamepad
build: pip install -e ../../node-hub/gamepad
path: gamepad
outputs:
- cmd_vel
- raw_control
inputs:
tick: dora/timer/millis/10
env:
MAX_LINEAR_SPEED: 0.01
MAX_ANGULAR_SPEED: 0.05

- id: pytorch_kinematics
build: pip install -e ../../node-hub/dora-pytorch-kinematics
path: dora-pytorch-kinematics
inputs:
cmd_vel: gamepad/cmd_vel
outputs:
- cmd_vel
env:
MODEL_NAME: "panda_description"
END_EFFECTOR_LINK: "panda_link8"
TRANSFORM: "0. 0. 0. 1. 0. 0. 0."
POSITION_TOLERANCE: 0.001
ROTATION_TOLERANCE: 0.001

+ 30
- 0
examples/urdf/gen3.yml View File

@@ -0,0 +1,30 @@
nodes:
- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
inputs:
jointstate_jaco: pytorch_kinematics/cmd_vel
env:
jaco_urdf: "gen3_description"
jaco_transform: "0. 0. 0. 1. 0. 0. 0."

- id: gamepad
build: pip install -e ../../node-hub/gamepad
path: gamepad
outputs:
- cmd_vel
- raw_control
inputs:
tick: dora/timer/millis/10

- id: pytorch_kinematics
build: pip install -e ../../node-hub/dora-pytorch-kinematics
path: dora-pytorch-kinematics
inputs:
cmd_vel: gamepad/cmd_vel
outputs:
- cmd_vel
env:
MODEL_NAME: "gen3_description"
END_EFFECTOR_LINK: "j2n6s300_end_effector"
TRANSFORM: "0. 0. 0. 1. 0. 0. 0."

+ 33
- 0
examples/urdf/kuka.yml View File

@@ -0,0 +1,33 @@
nodes:
- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
inputs:
jointstate_iiwa14_primitive_collision: pytorch_kinematics/cmd_vel
env:
iiwa14_primitive_collision_urdf: "iiwa14_description"
iiwa14_primitive_collision_transform: "0. 0. 0. 1. 0. 0. 0."

- id: gamepad
build: pip install -e ../../node-hub/gamepad
path: gamepad
outputs:
- cmd_vel
- raw_control
inputs:
tick: dora/timer/millis/10
env:
MAX_LINEAR_SPEED: 0.02
MAX_ANGULAR_SPEED: 0.10

- id: pytorch_kinematics
build: pip install -e ../../node-hub/dora-pytorch-kinematics
path: dora-pytorch-kinematics
inputs:
cmd_vel: gamepad/cmd_vel
outputs:
- cmd_vel
env:
MODEL_NAME: "iiwa14_description"
END_EFFECTOR_LINK: "iiwa_link_7"
TRANSFORM: "0. 0. 0. 1. 0. 0. 0."

+ 35
- 0
examples/urdf/piper.yml View File

@@ -0,0 +1,35 @@
nodes:
- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
inputs:
jointstate_piper_description: pytorch_kinematics/cmd_vel
env:
piper_description_urdf: "piper_description"
piper_description_transform: "0. 0. 0. 1. 0. 0. 0."

- id: gamepad
build: pip install -e ../../node-hub/gamepad
path: gamepad
outputs:
- cmd_vel
- raw_control
inputs:
tick: dora/timer/millis/10
env:
MAX_LINEAR_SPEED: 0.01
MAX_ANGULAR_SPEED: 0.05

- id: pytorch_kinematics
build: pip install -e ../../node-hub/dora-pytorch-kinematics
path: dora-pytorch-kinematics
inputs:
cmd_vel: gamepad/cmd_vel
outputs:
- cmd_vel
env:
MODEL_NAME: "piper_description"
END_EFFECTOR_LINK: "link6"
TRANSFORM: "0. 0. 0. 1. 0. 0. 0."
POSITION_TOLERANCE: 0.001
ROTATION_TOLERANCE: 0.001

+ 35
- 0
examples/urdf/so_arm101.yml View File

@@ -0,0 +1,35 @@
nodes:
- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
inputs:
jointstate_so101_new_calib: pytorch_kinematics/cmd_vel
env:
so101_new_calib_urdf: "so_arm101_description"
so101_new_calib_transform: "0. 0. 0. 1. 0. 0. 0."

- id: gamepad
build: pip install -e ../../node-hub/gamepad
path: gamepad
outputs:
- cmd_vel
- raw_control
inputs:
tick: dora/timer/millis/10
env:
MAX_LINEAR_SPEED: 0.01
MAX_ANGULAR_SPEED: 0.05

- id: pytorch_kinematics
build: pip install -e ../../node-hub/dora-pytorch-kinematics
path: dora-pytorch-kinematics
inputs:
cmd_vel: gamepad/cmd_vel
outputs:
- cmd_vel
env:
MODEL_NAME: "so_arm101_description"
END_EFFECTOR_LINK: "gripper"
TRANSFORM: "0. 0. 0. 1. 0. 0. 0."
POSITION_TOLERANCE: 0.01
ROTATION_TOLERANCE: 0.03

+ 35
- 0
examples/urdf/ur5.yml View File

@@ -0,0 +1,35 @@
nodes:
- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
inputs:
jointstate_ur5_robot: pytorch_kinematics/cmd_vel
env:
ur5_robot_urdf: "ur5_description"
ur5_robot_transform: "0. 0. 0. 1. 0. 0. 0."

- id: gamepad
build: pip install -e ../../node-hub/gamepad
path: gamepad
outputs:
- cmd_vel
- raw_control
inputs:
tick: dora/timer/millis/10
env:
MAX_LINEAR_SPEED: 0.01
MAX_ANGULAR_SPEED: 0.05

- id: pytorch_kinematics
build: pip install -e ../../node-hub/dora-pytorch-kinematics
path: dora-pytorch-kinematics
inputs:
cmd_vel: gamepad/cmd_vel
outputs:
- cmd_vel
env:
MODEL_NAME: "ur5_description"
END_EFFECTOR_LINK: "tool0"
TRANSFORM: "0. 0. 0. 1. 0. 0. 0."
POSITION_TOLERANCE: 0.001
ROTATION_TOLERANCE: 0.001

+ 70
- 0
examples/urdf/vggt/franka.yml View File

@@ -0,0 +1,70 @@
nodes:
- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
inputs:
jointstate_panda: pytorch_kinematics/cmd_vel
camera/image: dora-vggt/image
camera/depth: dora-vggt/depth
env:
panda_urdf: "panda_description"
panda_transform: .5 -0. -0.1 1. 0. 0. 0.
CAMERA_PITCH: 1.5708

- id: gamepad
build: pip install -e ../../node-hub/gamepad
path: gamepad
outputs:
- cmd_vel
- raw_control
inputs:
tick: dora/timer/millis/10
env:
MAX_LINEAR_SPEED: 0.01
MAX_ANGULAR_SPEED: 0.05

- id: pytorch_kinematics
build: pip install -e ../../node-hub/dora-pytorch-kinematics
path: dora-pytorch-kinematics
inputs:
cmd_vel: gamepad/cmd_vel
outputs:
- cmd_vel
env:
MODEL_NAME: "panda_description"
END_EFFECTOR_LINK: "panda_link8"
TRANSFORM: .5 -0. -0.1 1. 0. 0. 0.
POSITION_TOLERANCE: 0.001
ROTATION_TOLERANCE: 0.001

- id: camera
build: pip install -e ../../../node-hub/opencv-video-capture
path: opencv-video-capture
inputs:
tick: dora/timer/millis/100
outputs:
- image
env:
CAPTURE_PATH: 4

- id: camera2
build: pip install -e ../../../node-hub/opencv-video-capture
path: opencv-video-capture
inputs:
tick: dora/timer/millis/100
outputs:
- image
env:
CAPTURE_PATH: 6

- id: dora-vggt
build: pip install -e ../../../node-hub/dora-vggt
path: dora-vggt
inputs:
image: camera/image
image2: camera2/image
outputs:
- depth
- image
env:
SCALE_FACTOR: 0.9

+ 68
- 0
examples/urdf/vggt/kuka.yml View File

@@ -0,0 +1,68 @@
nodes:
- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
inputs:
jointstate_iiwa14_primitive_collision: pytorch_kinematics/cmd_vel
camera/image: dora-vggt/image
camera/depth: dora-vggt/depth
env:
iiwa14_primitive_collision_urdf: "iiwa14_description"
iiwa14_primitive_collision_transform: .5 -0. -0.1 1. 0. 0. 0.
CAMERA_PITCH: 1.5708

- id: gamepad
build: pip install -e ../../node-hub/gamepad
path: gamepad
outputs:
- cmd_vel
- raw_control
inputs:
tick: dora/timer/millis/10
env:
MAX_LINEAR_SPEED: 0.02
MAX_ANGULAR_SPEED: 0.10

- id: pytorch_kinematics
build: pip install -e ../../node-hub/dora-pytorch-kinematics
path: dora-pytorch-kinematics
inputs:
cmd_vel: gamepad/cmd_vel
outputs:
- cmd_vel
env:
MODEL_NAME: "iiwa14_description"
END_EFFECTOR_LINK: "iiwa_link_7"
TRANSFORM: .5 -0. -0.1 1. 0. 0. 0.

- id: camera
build: pip install -e ../../../node-hub/opencv-video-capture
path: opencv-video-capture
inputs:
tick: dora/timer/millis/100
outputs:
- image
env:
CAPTURE_PATH: 4

- id: camera2
build: pip install -e ../../../node-hub/opencv-video-capture
path: opencv-video-capture
inputs:
tick: dora/timer/millis/100
outputs:
- image
env:
CAPTURE_PATH: 6

- id: dora-vggt
build: pip install -e ../../../node-hub/dora-vggt
path: dora-vggt
inputs:
image: camera/image
image2: camera2/image
outputs:
- depth
- image
env:
SCALE_FACTOR: 0.9

+ 69
- 0
examples/urdf/vggt/so_arm101.yml View File

@@ -0,0 +1,69 @@
nodes:
- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
inputs:
jointstate_so101_new_calib: pytorch_kinematics/cmd_vel
camera/image: dora-vggt/image
camera/depth: dora-vggt/depth
env:
so101_new_calib_urdf: "so_arm101_description"
so101_new_calib_transform: .14 -0. 0.4 -.5 .5 .5 -.5

- id: gamepad
build: pip install -e ../../node-hub/gamepad
path: gamepad
outputs:
- cmd_vel
- raw_control
inputs:
tick: dora/timer/millis/10
env:
MAX_LINEAR_SPEED: 0.01
MAX_ANGULAR_SPEED: 0.05

- id: pytorch_kinematics
build: pip install -e ../../node-hub/dora-pytorch-kinematics
path: dora-pytorch-kinematics
inputs:
cmd_vel: gamepad/cmd_vel
outputs:
- cmd_vel
env:
MODEL_NAME: "so_arm101_description"
END_EFFECTOR_LINK: "gripper"
TRANSFORM: .14 -0. 0.4 -.5 .5 .5 -.5
POSITION_TOLERANCE: 0.01
ROTATION_TOLERANCE: 0.03

- id: camera
build: pip install -e ../../../node-hub/opencv-video-capture
path: opencv-video-capture
inputs:
tick: dora/timer/millis/100
outputs:
- image
env:
CAPTURE_PATH: 4

- id: camera2
build: pip install -e ../../../node-hub/opencv-video-capture
path: opencv-video-capture
inputs:
tick: dora/timer/millis/100
outputs:
- image
env:
CAPTURE_PATH: 6

- id: dora-vggt
build: pip install -e ../../../node-hub/dora-vggt
path: dora-vggt
inputs:
image: camera/image
image2: camera2/image
outputs:
- depth
- image
env:
SCALE_FACTOR: 0.9

+ 59
- 0
examples/urdf/vggt/z1.yml View File

@@ -0,0 +1,59 @@
nodes:
- id: plot
build: pip install -e ../../../node-hub/dora-rerun
path: dora-rerun
inputs:
jointstate_z1: pytorch_kinematics/cmd_vel
camera/image: dora-vggt/image
camera/depth: dora-vggt/depth
env:
z1_urdf: z1_description
z1_transform: .5 -0.2 -0.11 1. 0. 0. 0.
CAMERA_PITCH: 1.5708

- id: gamepad
build: pip install -e ../../../node-hub/gamepad
path: gamepad
outputs:
- cmd_vel
- raw_control
inputs:
tick: dora/timer/millis/10
env:
MAX_LINEAR_SPEED: 0.01
MAX_ANGULAR_SPEED: 0.05

- id: pytorch_kinematics
build: pip install -e ../../../node-hub/dora-pytorch-kinematics
path: dora-pytorch-kinematics
inputs:
cmd_vel: gamepad/cmd_vel
outputs:
- cmd_vel
env:
MODEL_NAME: "z1_description"
END_EFFECTOR_LINK: "link06"
TRANSFORM: .5 -0.2 -0.11 1. 0. 0. 0.
POSITION_TOLERANCE: 0.001
ROTATION_TOLERANCE: 0.001

- id: camera
build: pip install -e ../../../node-hub/opencv-video-capture
path: opencv-video-capture
inputs:
tick: dora/timer/millis/100
outputs:
- image
env:
CAPTURE_PATH: 4

- id: dora-vggt
build: pip install -e ../../../node-hub/dora-vggt
path: dora-vggt
inputs:
image: camera/image
outputs:
- depth
- image
env:
SCALE_FACTOR: 0.88

+ 35
- 0
examples/urdf/z1.yml View File

@@ -0,0 +1,35 @@
nodes:
- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
inputs:
jointstate_z1: pytorch_kinematics/cmd_vel
env:
z1_urdf: "z1_description"
z1_transform: "0. 0. 0. 1. 0. 0. 0."

- id: gamepad
build: pip install -e ../../node-hub/gamepad
path: gamepad
outputs:
- cmd_vel
- raw_control
inputs:
tick: dora/timer/millis/10
env:
MAX_LINEAR_SPEED: 0.01
MAX_ANGULAR_SPEED: 0.05

- id: pytorch_kinematics
build: pip install -e ../../node-hub/dora-pytorch-kinematics
path: dora-pytorch-kinematics
inputs:
cmd_vel: gamepad/cmd_vel
outputs:
- cmd_vel
env:
MODEL_NAME: "z1_description"
END_EFFECTOR_LINK: "link06"
TRANSFORM: "0. 0. 0. 1. 0. 0. 0."
POSITION_TOLERANCE: 0.001
ROTATION_TOLERANCE: 0.001

+ 54
- 0
examples/vggt/depth-to-avif.yaml View File

@@ -0,0 +1,54 @@
nodes:
- id: camera
build: pip install opencv-video-capture
path: opencv-video-capture
inputs:
tick: dora/timer/millis/100
outputs:
- image
env:
CAPTURE_PATH: 1

- id: dora-vggt
build: pip install -e ../../node-hub/dora-vggt
path: dora-vggt
inputs:
image: camera/image
outputs:
- depth
- image
env:
DEPTH_ENCODING: mono16

- id: rav1e-depth
path: dora-rav1e
build: pip install -e ../../node-hub/dora-rav1e
inputs:
depth: dora-vggt/depth
outputs:
- depth
env:
ENCODING: avif

- id: rav1e-image
path: dora-rav1e
build: pip install -e ../../node-hub/dora-rav1e
inputs:
image: dora-vggt/image
outputs:
- image
env:
ENCODING: avif

- id: bench
path: image_saver.py
inputs:
vggt_image: rav1e-image/image
vggt_depth: rav1e-depth/depth

- id: plot
build: pip install dora-rerun
path: dora-rerun
inputs:
camera/image: dora-vggt/image
camera/depth: dora-vggt/depth

+ 0
- 8
examples/vggt/depth.dora-session.yaml View File

@@ -1,8 +0,0 @@
build_id: 2b402c1e-e52e-45e9-86e5-236b33a77369
session_id: 275de19c-e605-4865-bc5f-2f15916bade9
git_sources: {}
local_build:
node_working_dirs:
camera: /Users/xaviertao/Documents/work/dora/examples/vggt
dora-vggt: /Users/xaviertao/Documents/work/dora/examples/vggt
plot: /Users/xaviertao/Documents/work/dora/examples/vggt

+ 34
- 0
examples/vggt/image_saver.py View File

@@ -0,0 +1,34 @@
from dora import Node

node = Node()

index_dict = {}
i = 0

LEAD_TOPIC = "vggt_depth"

for event in node:
if event["type"] == "INPUT":
if LEAD_TOPIC in event["id"]:
storage = event["value"]
metadata = event["metadata"]
encoding = metadata["encoding"]
width = metadata["width"]
height = metadata["height"]

# Save to file
filename = f"out/{event['id']}_{i}.{encoding}"
with open(filename, "wb") as f:
f.write(storage.to_numpy())
for key, value in index_dict.items():
filename = f"out/{key}_{i}.{value['metadata']['encoding']}"
with open(filename, "wb") as f:
f.write(value["value"])
i += 1
else:
# Store the event in the index dictionary
index_dict[event["id"]] = {
"type": event["type"],
"value": event["value"].to_numpy(),
"metadata": event["metadata"],
}

+ 53
- 0
examples/vggt/realsense-to-avif.yaml View File

@@ -0,0 +1,53 @@
nodes:
- id: camera
build: pip install -e ../../node-hub/dora-pyrealsense
path: dora-pyrealsense
inputs:
tick: dora/timer/millis/100
outputs:
- image
- depth

- id: dora-vggt
build: pip install -e ../../node-hub/dora-vggt
path: dora-vggt
inputs:
image: camera/image
outputs:
- depth
- image
env:
DEPTH_ENCODING: mono16

- id: rav1e-depth-vggt
path: dora-rav1e
build: cargo build -p dora-rav1e --release
inputs:
depth: dora-vggt/depth
outputs:
- depth
env:
ENCODING: avif

- id: rav1e-depth-realsense
path: dora-rav1e
build: cargo build -p dora-rav1e --release
inputs:
depth: camera/depth
outputs:
- depth
env:
ENCODING: avif

- id: bench
path: image_saver.py
inputs:
camera_depth: rav1e-depth-vggt/depth
vggt_depth: rav1e-depth-realsense/depth

- id: plot
build: pip install dora-rerun
path: dora-rerun
inputs:
camera/image: dora-vggt/image
camera/depth: dora-vggt/depth

+ 1
- 1
libraries/core/src/descriptor/validate.rs View File

@@ -54,7 +54,7 @@ pub fn check_dataflow(
};
}
},
dora_message::descriptor::NodeSource::GitBranch { repo, rev } => {
dora_message::descriptor::NodeSource::GitBranch { .. } => {
info!("skipping check for node with git source");
}
},


+ 1
- 1
libraries/extensions/ros2-bridge/python/Cargo.toml View File

@@ -12,7 +12,7 @@ eyre = "0.6"
serde = "1.0.166"
arrow = { workspace = true, features = ["pyarrow"] }
futures = "0.3.28"
# pyo3_special_method_derive = "0.4.2"
pyo3_special_method_derive = "0.4.3"

[dev-dependencies]
serde_assert = "0.7.1"

+ 7
- 8
libraries/extensions/ros2-bridge/python/src/lib.rs View File

@@ -18,7 +18,7 @@ use pyo3::{
types::{PyAnyMethods, PyDict, PyList, PyModule, PyModuleMethods},
Bound, PyAny, PyObject, PyResult, Python,
};
/// use pyo3_special_method_derive::{Dict, Dir, Repr, Str};
use pyo3_special_method_derive::{Dict, Dir, Repr, Str};
use typed::{deserialize::StructDeserializer, TypeInfo, TypedValue};

pub mod qos;
@@ -46,7 +46,7 @@ pub mod typed;
/// :type ros_paths: typing.List[str], optional
///
#[pyclass]
/// #[derive(Str, Repr, Dir, Dict)]
#[derive(Str, Repr, Dir, Dict)]
pub struct Ros2Context {
context: ros2_client::Context,
messages: Arc<HashMap<String, HashMap<String, Message>>>,
@@ -150,7 +150,7 @@ impl Ros2Context {
/// See: https://github.com/jhelovuo/ros2-client/issues/4
///
#[pyclass]
/// #[derive(Str, Repr, Dir, Dict)]
#[derive(Str, Repr, Dir, Dict)]
pub struct Ros2Node {
node: ros2_client::Node,
messages: Arc<HashMap<String, HashMap<String, Message>>>,
@@ -257,8 +257,7 @@ impl Ros2Node {
/// ROS2 Node Options
/// :type rosout: bool, optional
///
#[derive(Clone, Default)]
/// , Str, Repr, Dir, Dict)]
#[derive(Clone, Default, Str, Repr, Dir, Dict)]
#[pyclass]
#[non_exhaustive]
pub struct Ros2NodeOptions {
@@ -289,7 +288,7 @@ impl From<Ros2NodeOptions> for ros2_client::NodeOptions {
/// - dora Ros2 bridge functionality is considered **unstable**. It may be changed
/// at any point without it being considered a breaking change.
#[pyclass]
/// #[derive(Str, Repr, Dir, Dict)]
#[derive(Str, Repr, Dir, Dict)]
#[non_exhaustive]
pub struct Ros2Topic {
topic: rustdds::Topic,
@@ -302,7 +301,7 @@ pub struct Ros2Topic {
/// - dora Ros2 bridge functionality is considered **unstable**. It may be changed
/// at any point without it being considered a breaking change.
#[pyclass]
/// #[derive(Str, Repr, Dir, Dict)]
#[derive(Str, Repr, Dir, Dict)]
#[non_exhaustive]
pub struct Ros2Publisher {
publisher: ros2_client::Publisher<TypedValue<'static>>,
@@ -373,7 +372,7 @@ impl Ros2Publisher {
/// - dora Ros2 bridge functionality is considered **unstable**. It may be changed
/// at any point without it being considered a breaking change.
#[pyclass]
/// #[derive(Str, Repr, Dir, Dict)]
#[derive(Str, Repr, Dir, Dict)]
#[non_exhaustive]
pub struct Ros2Subscription {
deserializer: StructDeserializer<'static>,


+ 4
- 7
libraries/extensions/ros2-bridge/python/src/qos.rs View File

@@ -1,6 +1,6 @@
use ::dora_ros2_bridge::rustdds::{self, policy};
use pyo3::prelude::{pyclass, pymethods};
/// use pyo3_special_method_derive::{Dict, Dir, Repr, Str};
use pyo3_special_method_derive::{Dict, Dir, Repr, Str};

/// ROS2 QoS Policy
///
@@ -13,8 +13,7 @@ use pyo3::prelude::{pyclass, pymethods};
/// :type keep_last: int, optional
/// :rtype: dora.Ros2QoSPolicies
///
#[derive(Clone)]
/// , Str, Repr, Dir, Dict)]
#[derive(Clone, Str, Repr, Dir, Dict)]
#[pyclass]
#[non_exhaustive]
pub struct Ros2QosPolicies {
@@ -80,8 +79,7 @@ impl From<Ros2QosPolicies> for rustdds::QosPolicies {
/// DDS 2.2.3.4 DURABILITY
///
/// :rtype: dora.Ros2Durability
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
/// , Str, Repr, Dir, Dict)]
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Str, Repr, Dir, Dict)]
#[pyclass(eq, eq_int)]
pub enum Ros2Durability {
Volatile,
@@ -107,8 +105,7 @@ impl From<Ros2Durability> for policy::Durability {

/// DDS 2.2.3.11 LIVELINESS
/// :rtype: dora.Ros2Liveliness
#[derive(Copy, Clone, PartialEq)]
/// , Str, Repr, Dir, Dict)]
#[derive(Copy, Clone, PartialEq, Str, Repr, Dir, Dict)]
#[pyclass(eq, eq_int)]
pub enum Ros2Liveliness {
Automatic,


+ 1
- 1
libraries/extensions/telemetry/tracing/src/telemetry.rs View File

@@ -6,7 +6,7 @@ use std::collections::HashMap;

struct MetadataMap<'a>(HashMap<&'a str, &'a str>);

impl<'a> Extractor for MetadataMap<'a> {
impl Extractor for MetadataMap<'_> {
/// Get a value for a key from the MetadataMap. If the value can't be converted to &str, returns None
fn get(&self, key: &str) -> Option<&str> {
self.0.get(key).cloned()


+ 1
- 1
libraries/message/Cargo.toml View File

@@ -1,7 +1,7 @@
[package]
name = "dora-message"
# versioned separately from the other dora crates
version = "0.5.0-alpha"
version = "0.5.0"
edition.workspace = true
documentation.workspace = true
description.workspace = true


+ 1
- 1
node-hub/dora-argotranslate/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-argotranslate"
version = "0.3.11"
version = "0.3.12"
description = "Dora Node for Text translating using Argostranslate"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },


+ 1
- 1
node-hub/dora-cotracker/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-cotracker"
version = "0.1.0"
version = "0.3.12"
authors = [{ name = "Shashwat Patil", email = "shashwatpatil974@gmail.com" }]
description = "A Dora node implementing real-time object tracking using Facebook's CoTracker model"
license = "CC-BY-1.0"


+ 1
- 1
node-hub/dora-distil-whisper/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-distil-whisper"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 1
- 1
node-hub/dora-echo/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-echo"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 1
- 1
node-hub/dora-gradio/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-gradio"
version = "0.2.0"
version = "0.3.12"
authors = [{ name = "Shashwat Patil", email = "email@email.com" }]
description = "dora-gradio"
license = { text = "MIT" }


+ 1
- 1
node-hub/dora-internvl/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-internvl"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 1
- 1
node-hub/dora-ios-lidar/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-ios-lidar"
version = "0.3.11"
version = "0.3.12"
authors = [{ name = "Your Name", email = "email@email.com" }]
description = "dora-ios-lidar"
license = { text = "MIT" }


+ 1
- 1
node-hub/dora-keyboard/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-keyboard"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 1
- 1
node-hub/dora-kokoro-tts/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-kokoro-tts"
version = "0.3.11"
version = "0.3.12"
authors = [{ name = "Your Name", email = "email@email.com" }]
description = "dora-kokoro-tts"
license = { text = "MIT" }


+ 1
- 1
node-hub/dora-microphone/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-microphone"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 9
- 6
node-hub/dora-mujoco/pyproject.toml View File

@@ -8,11 +8,11 @@ readme = "README.md"
requires-python = ">=3.8"

dependencies = [
"dora-rs >= 0.3.9",
"mujoco >= 3.1.6",
"numpy >= 1.21.0",
"pyarrow >= 14.0.1",
"robot_descriptions >= 1.12.0",
"dora-rs >= 0.3.9",
"mujoco >= 3.1.6",
"numpy >= 1.21.0",
"pyarrow >= 14.0.1",
"robot_descriptions",
]

[dependency-groups]
@@ -23,9 +23,12 @@ dora-mujoco = "dora_mujoco.main:main"

[tool.ruff.lint]
extend-select = [
"UP", # pyupgrade
"UP", # pyupgrade
"PERF", # Ruff's PERF rule
"RET", # Ruff's RET rule
"RSE", # Ruff's RSE rule
"N", # Ruff's N rule
]

[tool.uv.sources]
robot-descriptions = { git = "https://github.com/robot-descriptions/robot_descriptions.py.git" }

+ 1
- 1
node-hub/dora-object-to-pose/Cargo.toml View File

@@ -1,6 +1,6 @@
[package]
name = "dora-object-to-pose"
version = "0.3.11"
version = "0.3.12"
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html


+ 1
- 1
node-hub/dora-openai-server/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-openai-server"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 1
- 1
node-hub/dora-opus/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-opus"
version = "0.3.11"
version = "0.3.12"
description = "Dora Node for Text translating using Opus"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },


+ 1
- 1
node-hub/dora-outtetts/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-outtetts"
version = "0.3.11"
version = "0.3.12"
authors = []
description = "dora-outtetts"
license = { text = "MIT" }


+ 1
- 1
node-hub/dora-parler/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-parler"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 1
- 2
node-hub/dora-phi4/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-phi4"
version = "0.3.11"
version = "0.3.12"
authors = [{ name = "Somay", email = "ssomay2002@gmail.com" }]
description = "DORA node for Phi-4 multimodal model"
license = { text = "MIT" }
@@ -18,7 +18,6 @@ dependencies = [
"scipy==1.15.2",
"backoff==2.2.1",
"peft==0.13.2",
"bitsandbytes>=0.42.0",
"opencv-python",
"requests",
]


+ 1309
- 0
node-hub/dora-phi4/uv.lock
File diff suppressed because it is too large
View File


+ 1
- 1
node-hub/dora-piper/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-piper"
version = "0.3.11"
version = "0.3.12"
authors = [{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" }]
description = "Dora Node for using Agilex piper"
license = { text = "MIT" }


+ 1
- 1
node-hub/dora-pyaudio/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-pyaudio"
version = "0.3.11"
version = "0.3.12"
authors = [{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" }]
license = { text = "MIT" }
readme = "README.md"


+ 1
- 1
node-hub/dora-pyorbbecksdk/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-pyorbbecksdk"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Xiang Yang", email = "Ryu-Yang@qq.com" },


+ 1
- 1
node-hub/dora-pyrealsense/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-pyrealsense"
version = "0.3.11"
version = "0.3.12"
authors = [{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" }]
description = "Dora Node for capturing video with Pyrealsense"
license = { text = "MIT" }


+ 85
- 83
node-hub/dora-pytorch-kinematics/dora_pytorch_kinematics/main.py View File

@@ -11,6 +11,8 @@ import torch
from dora import Node
from pytorch_kinematics.transforms.rotation_conversions import matrix_to_euler_angles

POSITION_TOLERANCE = float(os.getenv("POSITION_TOLERANCE", "0.005"))
ROTATION_TOLERANCE = float(os.getenv("ROTATION_TOLERANCE", "0.05")) # in radians
TRANSFORM = np.array(os.getenv("TRANSFORM", "0. 0. 0. 1. 0. 0. 0.").split(" ")).astype(
np.float32,
) # wxyz format
@@ -229,18 +231,21 @@ class RobotKinematics:
# Instantiate and run the IK solver (core pytorch_kinematics objects/methods)
ik_solver = pk.PseudoInverseIK(
self.chain,
max_iterations=1_000,
max_iterations=100_000,
retry_configs=q_init,
joint_limits=torch.tensor(self.chain.get_joint_limits()),
early_stopping_any_converged=True,
early_stopping_no_improvement="all",
# early_stopping_no_improvement="all",
debug=False,
lr=0.05,
rot_tolerance=1e-4,
pos_tolerance=1e-3,
rot_tolerance=ROTATION_TOLERANCE,
pos_tolerance=POSITION_TOLERANCE,
)
solution_angles = ik_solver.solve(target_pose)
if solution_angles.err_rot > 1e-3 or solution_angles.err_pos > 1e-2:
if (
solution_angles.err_rot > ROTATION_TOLERANCE
or solution_angles.err_pos > POSITION_TOLERANCE
):
print(
f"IK did not converge: pos_err={solution_angles.err_pos}, rot_err={solution_angles.err_rot} for target {target_pose}",
)
@@ -357,99 +362,96 @@ def main():
robot = RobotKinematics(
urdf_path=model, urdf="", end_effector_link=end_effector_link
)
last_known_state = None
last_known_state = robot._default_q

for event in node:
if event["type"] == "INPUT":
metadata = event["metadata"]

if event["id"] == "cmd_vel":
if last_known_state is not None:
target_vel = event["value"].to_numpy() # expect 100ms
# Apply Forward Kinematics
target = robot.compute_fk(last_known_state)
target = (
np.array(get_xyz_rpy_array_from_transform3d(target))
+ target_vel
)
target = pa.array(target.ravel(), type=pa.float32())
target = pk.Transform3d(
pos=target[:3],
rot=pk.transforms.euler_angles_to_matrix(
torch.tensor(target[3:6]),
convention="XYZ",
),
target_vel = event["value"].to_numpy() # expect 100ms
# Apply Forward Kinematics
target = robot.compute_fk(last_known_state)
target = (
np.array(get_xyz_rpy_array_from_transform3d(target).detach())
+ target_vel
)
target = pa.array(target.ravel(), type=pa.float32())
target = pk.Transform3d(
pos=target[:3],
rot=pk.transforms.euler_angles_to_matrix(
torch.tensor(target[3:6]),
convention="XYZ",
),
)
rob_target = ROB_TF.inverse().compose(target)
solution = robot.compute_ik(rob_target, last_known_state)
if solution is None:
print(
"No IK Solution for :",
target,
"skipping this frame.",
)
rob_target = ROB_TF.inverse().compose(target)
solution = robot.compute_ik(rob_target, last_known_state)
if solution is None:
print(
"No IK Solution for :",
target,
"skipping this frame.",
)
continue
solution = solution.numpy().ravel()
metadata["encoding"] = "jointstate"
last_known_state = solution
solution = pa.array(last_known_state)
node.send_output(event["id"], solution, metadata=metadata)
continue
solution = solution.numpy().ravel()
metadata["encoding"] = "jointstate"
last_known_state = solution
solution = pa.array(last_known_state)
node.send_output(event["id"], solution, metadata=metadata)
else:
match metadata["encoding"]:
case "xyzquat":
# Apply Inverse Kinematics
if last_known_state is not None:
target = event["value"].to_numpy()
target = target.astype(np.float32)
target = pk.Transform3d(
pos=target[:3],
rot=torch.tensor(target[3:7]),
)
rob_target = ROB_TF.inverse().compose(target)
solution = robot.compute_ik(rob_target, last_known_state)
metadata["encoding"] = "jointstate"
last_known_state = solution.numpy().ravel()
solution = pa.array(last_known_state)
node.send_output(event["id"], solution, metadata=metadata)
target = event["value"].to_numpy()
target = target.astype(np.float32)
target = pk.Transform3d(
pos=target[:3],
rot=torch.tensor(target[3:7]),
)
rob_target = ROB_TF.inverse().compose(target)
solution = robot.compute_ik(rob_target, last_known_state)
metadata["encoding"] = "jointstate"
last_known_state = solution.numpy().ravel()
solution = pa.array(last_known_state)
node.send_output(event["id"], solution, metadata=metadata)
case "xyzrpy":
# Apply Inverse Kinematics
if last_known_state is not None:
target = event["value"].to_numpy()
target = target.astype(np.float32)
target = pk.Transform3d(
pos=target[:3],
rot=pk.transforms.euler_angles_to_matrix(
torch.tensor(target[3:6]),
convention="XYZ",
),
target = event["value"].to_numpy()
target = target.astype(np.float32)
target = pk.Transform3d(
pos=target[:3],
rot=pk.transforms.euler_angles_to_matrix(
torch.tensor(target[3:6]),
convention="XYZ",
),
)
rob_target = ROB_TF.inverse().compose(target)
solution = robot.compute_ik(rob_target, last_known_state)
if solution is None:
print(
"No IK Solution for :",
target,
"skipping this frame.",
)
rob_target = ROB_TF.inverse().compose(target)
solution = robot.compute_ik(rob_target, last_known_state)
if solution is None:
print(
"No IK Solution for :",
target,
"skipping this frame.",
)
continue

solution = solution.numpy().ravel()
delta_angles = (
solution - last_known_state[: len(solution)]
) # match with dof

valid = np.all(
(delta_angles >= -np.pi) & (delta_angles <= np.pi),
continue

solution = solution.numpy().ravel()
delta_angles = (
solution - last_known_state[: len(solution)]
) # match with dof

valid = np.all(
(delta_angles >= -np.pi) & (delta_angles <= np.pi),
)
if not valid:
print(
"IK solution is not valid, as the rotation are too wide. skipping.",
)
if not valid:
print(
"IK solution is not valid, as the rotation are too wide. skipping.",
)
continue
metadata["encoding"] = "jointstate"
last_known_state = solution
solution = pa.array(last_known_state)
node.send_output(event["id"], solution, metadata=metadata)
continue
metadata["encoding"] = "jointstate"
last_known_state = solution
solution = pa.array(last_known_state)
node.send_output(event["id"], solution, metadata=metadata)
case "jointstate":
target = event["value"].to_numpy()
last_known_state = target


+ 1
- 1
node-hub/dora-qwen/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-qwen"
version = "0.3.11"
version = "0.3.12"
authors = [{ name = "Your Name", email = "email@email.com" }]
description = "dora-qwen"
license = { text = "MIT" }


+ 4
- 0
node-hub/dora-qwen2-5-vl/dora_qwen2_5_vl/main.py View File

@@ -73,6 +73,10 @@ def generate(

messages = []

# If the texts is string, convert it to a list
if isinstance(texts, str):
texts = [texts]

for text in texts:
if text.startswith("<|system|>\n"):
messages.append(


+ 1
- 1
node-hub/dora-qwen2-5-vl/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-qwen2-5-vl"
version = "0.3.11"
version = "0.3.12.post1"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 1
- 1
node-hub/dora-qwenvl/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-qwenvl"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 3
- 2
node-hub/dora-rav1e/Cargo.toml View File

@@ -1,7 +1,7 @@
[package]
name = "dora-rav1e"
edition = "2021"
version = "0.3.11+fix1"
version = "0.3.12"
description.workspace = true
documentation.workspace = true
license = "BSD-2-Clause"
@@ -25,7 +25,8 @@ pyo3 = { workspace = true, features = [
"eyre",
"generate-import-lib",
], optional = true }
avif-serialize = "0.8.3"
avif-serialize = { version = "0.8.5" }
little_exif = { version = "0.6.9" }


[lib]


+ 80
- 19
node-hub/dora-rav1e/src/lib.rs View File

@@ -8,12 +8,16 @@
// PATENTS file, you can obtain it at www.aomedia.org/license/patent.

use std::env::var;
use std::vec;

use dora_node_api::arrow::array::AsArray;
use dora_node_api::arrow::datatypes::{UInt16Type, UInt8Type};
use dora_node_api::dora_core::config::DataId;
use dora_node_api::{DoraNode, Event, IntoArrow, Metadata, Parameter};
use dora_node_api::{DoraNode, Event, IntoArrow, Metadata, MetadataParameters, Parameter};
use eyre::{Context as EyreContext, Result};
use little_exif::exif_tag::ExifTag;
use little_exif::metadata::Metadata as ExifMetadata;
use little_exif::rational::uR64;
use log::warn;
use rav1e::color::{ColorDescription, MatrixCoefficients};
// Encode the same tiny blank frame 30 times
@@ -56,6 +60,25 @@ pub fn fill_zeros_toward_center_y_plane_in_place(y: &mut [u16], width: usize, he
}
}

fn metadata_to_exif(metadata: &MetadataParameters) -> Result<Vec<u8>> {
let mut metadata_exif = ExifMetadata::new();
metadata_exif.set_tag(ExifTag::Software("dora-rs".to_string()));
if let Some(Parameter::ListInt(focal_lengths)) = metadata.get("focal") {
metadata_exif.set_tag(ExifTag::FocalLength(
focal_lengths
.iter()
.map(|&f| uR64 {
nominator: f as u32,
denominator: 1,
})
.collect::<Vec<_>>(),
));
}

let vector = metadata_exif.as_u8_vec(little_exif::filetype::FileExtension::HEIF)?;
return Ok(vector);
}

fn bgr8_to_yuv420(bgr_data: Vec<u8>, width: usize, height: usize) -> (Vec<u8>, Vec<u8>, Vec<u8>) {
let mut y_plane = vec![0; width * height];
let mut u_plane = vec![0; (width / 2) * (height / 2)];
@@ -107,6 +130,7 @@ fn get_yuv_planes(buffer: &[u8], width: usize, height: usize) -> (&[u8], &[u8],
(y_plane, u_plane, v_plane)
}

#[allow(clippy::too_many_arguments)]
fn send_yuv(
y: &[u8],
u: &[u8],
@@ -118,7 +142,7 @@ fn send_yuv(
id: DataId,
metadata: &mut Metadata,
output_encoding: &str,
) -> () {
) {
// Create a new Arrow array for the YUV420 data
let cfg = Config::new().with_encoder_config(enc.clone());
let mut ctx: Context<u8> = cfg.new_context().unwrap();
@@ -126,13 +150,13 @@ fn send_yuv(

let xdec = f.planes[0].cfg.xdec;
let stride = (width + xdec) >> xdec;
f.planes[0].copy_from_raw_u8(&y, stride, 1);
f.planes[0].copy_from_raw_u8(y, stride, 1);
let xdec = f.planes[1].cfg.xdec;
let stride = (width + xdec) >> xdec;
f.planes[1].copy_from_raw_u8(&u, stride, 1);
f.planes[1].copy_from_raw_u8(u, stride, 1);
let xdec = f.planes[2].cfg.xdec;
let stride = (width + xdec) >> xdec;
f.planes[2].copy_from_raw_u8(&v, stride, 1);
f.planes[2].copy_from_raw_u8(v, stride, 1);

match ctx.send_frame(f) {
Ok(_) => {}
@@ -159,9 +183,18 @@ fn send_yuv(
} else {
MatrixCoefficients::BT709
};
let data = avif_serialize::Aviffy::new()
let mut aviffy = avif_serialize::Aviffy::new();
aviffy
.set_chroma_subsampling((true, true))
.set_seq_profile(0)
.set_seq_profile(0);

let aviffy = if let Ok(exif) = metadata_to_exif(&metadata.parameters) {
aviffy.set_exif(exif)
} else {
&mut aviffy
};

let data = aviffy
.matrix_coefficients(match matrix_coefficients {
MatrixCoefficients::Identity => {
avif_serialize::constants::MatrixCoefficients::Rgb
@@ -289,12 +322,9 @@ pub fn lib_main() -> Result<()> {
chroma_sampling: color::ChromaSampling::Cs420,
..Default::default()
};
match encoding {
"mono16" => {
enc.bit_depth = 12;
enc.chroma_sampling = color::ChromaSampling::Cs400;
}
_ => {}
if encoding == "mono16" {
enc.bit_depth = 12;
enc.chroma_sampling = color::ChromaSampling::Cs400;
}

if encoding == "bgr8" {
@@ -320,9 +350,9 @@ pub fn lib_main() -> Result<()> {

let (y, u, v) = get_yuv_planes(buffer, width, height);
send_yuv(
&y,
&u,
&v,
y,
u,
v,
enc,
width,
height,
@@ -336,13 +366,13 @@ pub fn lib_main() -> Result<()> {
if let Some(buffer) = data.as_primitive_opt::<UInt16Type>() {
let mut buffer = buffer.values().to_vec();
if std::env::var("FILL_ZEROS")
.map(|s| s != "false")
.map(|s| s.to_lowercase() != "false")
.unwrap_or(true)
{
fill_zeros_toward_center_y_plane_in_place(&mut buffer, width, height);
}

let bytes: &[u8] = &bytemuck::cast_slice(&buffer);
let bytes: &[u8] = bytemuck::cast_slice(&buffer);

let cfg = Config::new().with_encoder_config(enc.clone());
let mut ctx: Context<u16> = cfg.new_context().unwrap();
@@ -370,7 +400,38 @@ pub fn lib_main() -> Result<()> {
let data = pkt.data;
match output_encoding.as_str() {
"avif" => {
warn!("avif encoding not supported for mono16");
metadata.parameters.insert(
"encoding".to_string(),
Parameter::String("avif".to_string()),
);

let mut aviffy = avif_serialize::Aviffy::new();
aviffy
.full_color_range(false)
.set_seq_profile(0)
.set_monochrome(true);

let aviffy = if let Ok(exif) =
metadata_to_exif(&metadata.parameters)
{
aviffy.set_exif(exif)
} else {
&mut aviffy
};

let data = aviffy.to_vec(
&data,
None,
enc.width as u32,
enc.height as u32,
enc.bit_depth as u8,
);

let arrow = data.into_arrow();

node.send_output(id, metadata.parameters.clone(), arrow)
.context("could not send output")
.unwrap();
}
_ => {
metadata.parameters.insert(


+ 1
- 1
node-hub/dora-rdt-1b/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-rdt-1b"
version = "0.3.11"
version = "0.3.12"
authors = [{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" }]
description = "Dora Node for RDT 1B"
license = { text = "MIT" }


+ 1
- 1
node-hub/dora-reachy2/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-reachy2"
version = "0.3.11"
version = "0.3.12"
authors = [{ name = "Your Name", email = "email@email.com" }]
description = "dora-reachy2"
license = { text = "MIT" }


+ 2
- 1
node-hub/dora-rerun/Cargo.toml View File

@@ -17,7 +17,7 @@ python = ["pyo3"]
dora-node-api = { workspace = true, features = ["tracing"] }
eyre = "0.6.8"
tokio = { version = "1.24.2", features = ["rt"] }
rerun = { version = "0.23.1", features = ["web_viewer", "image"] }
rerun = { version = "0.23.3", features = ["web_viewer", "image"] }
ndarray = "0.15.6"
k = "0.32"
pyo3 = { workspace = true, features = [
@@ -28,6 +28,7 @@ pyo3 = { workspace = true, features = [
], optional = true }
bytemuck = "1.20.0"
rand = "0.9.1"
pinyin = "0.10.0"


[lib]


+ 3
- 2
node-hub/dora-rerun/pyproject.toml View File

@@ -8,7 +8,7 @@ dynamic = ["version"]
license = { text = "MIT" }
requires-python = ">=3.10"

dependencies = ["rerun-loader-urdf", 'rerun_sdk==0.23.1']
dependencies = ["rerun-loader-urdf", 'rerun_sdk>=0.23.1', "robot-descriptions"]

scripts = { "dora-rerun" = "dora_rerun:py_main" }

@@ -28,4 +28,5 @@ extend-select = [
]

[tool.uv.sources]
rerun-loader-urdf = { git = "https://github.com/haixuanTao/rerun-loader-python-example-urdf.git", branch = "patch-2" }
rerun-loader-urdf = { git = "https://github.com/dora-rs/rerun-loader-python-urdf.git" }
robot-descriptions = { git = "https://github.com/robot-descriptions/robot_descriptions.py.git" }

+ 25
- 9
node-hub/dora-rerun/src/lib.rs View File

@@ -12,6 +12,7 @@ use dora_node_api::{
};
use eyre::{bail, eyre, Context, Result};

use pinyin::ToPinyin;
use rerun::{
components::ImageBuffer, external::log::warn, ImageFormat, Points2D, Points3D, SpawnOptions,
};
@@ -160,8 +161,7 @@ pub fn lib_main() -> Result<()> {
let buffer: Vec<u8> =
buffer.chunks(3).flat_map(|x| [x[2], x[1], x[0]]).collect();
image_cache.insert(id.clone(), buffer.clone());
let image_buffer = ImageBuffer::try_from(buffer)
.context("Could not convert buffer to image buffer")?;
let image_buffer = ImageBuffer::from(buffer);
// let tensordata = ImageBuffer(buffer);

let image = rerun::Image::new(
@@ -174,8 +174,7 @@ pub fn lib_main() -> Result<()> {
let buffer: &UInt8Array = data.as_any().downcast_ref().unwrap();
image_cache.insert(id.clone(), buffer.values().to_vec());
let buffer: &[u8] = buffer.values();
let image_buffer = ImageBuffer::try_from(buffer)
.context("Could not convert buffer to image buffer")?;
let image_buffer = ImageBuffer::from(buffer);

let image = rerun::Image::new(
image_buffer,
@@ -317,7 +316,24 @@ pub fn lib_main() -> Result<()> {
let buffer: StringArray = data.to_data().into();
buffer.iter().try_for_each(|string| -> Result<()> {
if let Some(str) = string {
rec.log(id.as_str(), &rerun::TextLog::new(str))
let chars = str.chars().collect::<Vec<_>>();
let mut new_string = vec![];
for char in chars {
// Check if the character is a Chinese character
if char.is_ascii() || char.is_control() {
new_string.push(char);
continue;
}
// If it is a Chinese character, replace it with its pinyin
if let Some(pinyin) = char.to_pinyin() {
for char in pinyin.with_tone().chars() {
new_string.push(char);
}
new_string.push(' ');
}
}
let pinyined_str = new_string.iter().collect::<String>();
rec.log(id.as_str(), &rerun::TextLog::new(pinyined_str))
.wrap_err("Could not log text")
} else {
Ok(())
@@ -385,12 +401,12 @@ pub fn lib_main() -> Result<()> {
// Get color or assign random color in cache
let color = color_cache.get(&id);
let color = if let Some(color) = color {
color.clone()
*color
} else {
let color =
rerun::Color::from_rgb(rand::random::<u8>(), 180, rand::random::<u8>());

color_cache.insert(id.clone(), color.clone());
color_cache.insert(id.clone(), color);
color
};
let dataid = id;
@@ -412,12 +428,12 @@ pub fn lib_main() -> Result<()> {
// Get color or assign random color in cache
let color = color_cache.get(&id);
let color = if let Some(color) = color {
color.clone()
*color
} else {
let color =
rerun::Color::from_rgb(rand::random::<u8>(), 180, rand::random::<u8>());

color_cache.insert(id.clone(), color.clone());
color_cache.insert(id.clone(), color);
color
};
let dataid = id;


+ 1
- 1
node-hub/dora-rerun/src/series.rs View File

@@ -7,7 +7,7 @@ pub fn update_series(rec: &RecordingStream, id: DataId, data: ArrowData) -> Resu
for (i, value) in series.iter().enumerate() {
rec.log(
format!("{}_{}", id.as_str(), i),
&rerun::Scalar::new(*value as f64),
&rerun::Scalars::new([*value]),
)
.wrap_err("could not log series")?;
}


Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save