Browse Source

Merge branch 'main' into dataset_record

pull/1041/head
Haixuan Xavier Tao GitHub 6 months ago
parent
commit
c430c307d0
No known key found for this signature in database GPG Key ID: B5690EEEBB952194
98 changed files with 3296 additions and 1109 deletions
  1. +1
    -1
      .github/workflows/ci.yml
  2. +29
    -10
      .github/workflows/node_hub_test.sh
  3. +0
    -2
      .github/workflows/pip-release.yml
  4. +82
    -53
      Cargo.lock
  5. +23
    -23
      Cargo.toml
  6. +54
    -0
      Changelog.md
  7. +16
    -10
      README.md
  8. +0
    -8
      apis/python/node/pyproject.toml
  9. +1
    -1
      apis/python/node/src/lib.rs
  10. +2
    -2
      apis/rust/node/src/node/arrow_utils.rs
  11. +1
    -0
      apis/rust/node/src/node/mod.rs
  12. +0
    -8
      binaries/cli/pyproject.toml
  13. +41
    -4
      binaries/cli/src/command/build/mod.rs
  14. +41
    -1
      binaries/cli/src/command/check.rs
  15. +66
    -0
      binaries/cli/src/command/coordinator.rs
  16. +91
    -0
      binaries/cli/src/command/daemon.rs
  17. +28
    -0
      binaries/cli/src/command/destroy.rs
  18. +28
    -4
      binaries/cli/src/command/graph.rs
  19. +1
    -0
      binaries/cli/src/command/graph/.gitignore
  20. +0
    -0
      binaries/cli/src/command/graph/mermaid-template.html
  21. +59
    -0
      binaries/cli/src/command/list.rs
  22. +46
    -1
      binaries/cli/src/command/logs.rs
  23. +96
    -52
      binaries/cli/src/command/mod.rs
  24. +21
    -0
      binaries/cli/src/command/new.rs
  25. +36
    -4
      binaries/cli/src/command/run.rs
  26. +15
    -0
      binaries/cli/src/command/runtime.rs
  27. +139
    -0
      binaries/cli/src/command/self_.rs
  28. +1
    -1
      binaries/cli/src/command/start/attach.rs
  29. +78
    -48
      binaries/cli/src/command/start/mod.rs
  30. +111
    -0
      binaries/cli/src/command/stop.rs
  31. +20
    -1
      binaries/cli/src/command/up.rs
  32. +117
    -0
      binaries/cli/src/common.rs
  33. +7
    -743
      binaries/cli/src/lib.rs
  34. +1
    -1
      binaries/coordinator/Cargo.toml
  35. +0
    -13
      binaries/daemon/src/spawn.rs
  36. +1
    -1
      examples/c++-ros2-dataflow/node-rust-api/main.cc
  37. +60
    -0
      examples/so101/Readme.md
  38. +48
    -0
      examples/so101/arm_gamepad_control.yml
  39. +33
    -0
      examples/so101/leader_follower.yml
  40. +3
    -3
      examples/speech-to-speech/README.md
  41. +70
    -0
      examples/urdf/vggt/franka.yml
  42. +68
    -0
      examples/urdf/vggt/kuka.yml
  43. +69
    -0
      examples/urdf/vggt/so_arm101.yml
  44. +59
    -0
      examples/urdf/vggt/z1.yml
  45. +54
    -0
      examples/vggt/depth-to-avif.yaml
  46. +0
    -8
      examples/vggt/depth.dora-session.yaml
  47. +34
    -0
      examples/vggt/image_saver.py
  48. +53
    -0
      examples/vggt/realsense-to-avif.yaml
  49. +1
    -1
      libraries/core/src/descriptor/validate.rs
  50. +1
    -1
      libraries/extensions/telemetry/tracing/src/telemetry.rs
  51. +1
    -1
      libraries/message/Cargo.toml
  52. +1
    -1
      node-hub/dora-argotranslate/pyproject.toml
  53. +1
    -1
      node-hub/dora-cotracker/pyproject.toml
  54. +1
    -1
      node-hub/dora-distil-whisper/pyproject.toml
  55. +1
    -1
      node-hub/dora-echo/pyproject.toml
  56. +1
    -1
      node-hub/dora-gradio/pyproject.toml
  57. +1
    -1
      node-hub/dora-internvl/pyproject.toml
  58. +1
    -1
      node-hub/dora-ios-lidar/pyproject.toml
  59. +1
    -1
      node-hub/dora-keyboard/pyproject.toml
  60. +1
    -1
      node-hub/dora-kokoro-tts/pyproject.toml
  61. +1
    -1
      node-hub/dora-microphone/pyproject.toml
  62. +1
    -1
      node-hub/dora-object-to-pose/Cargo.toml
  63. +1
    -1
      node-hub/dora-openai-server/pyproject.toml
  64. +1
    -1
      node-hub/dora-opus/pyproject.toml
  65. +1
    -1
      node-hub/dora-outtetts/pyproject.toml
  66. +1
    -1
      node-hub/dora-parler/pyproject.toml
  67. +1
    -2
      node-hub/dora-phi4/pyproject.toml
  68. +1309
    -0
      node-hub/dora-phi4/uv.lock
  69. +1
    -1
      node-hub/dora-piper/pyproject.toml
  70. +1
    -1
      node-hub/dora-pyaudio/pyproject.toml
  71. +1
    -1
      node-hub/dora-pyorbbecksdk/pyproject.toml
  72. +1
    -1
      node-hub/dora-pyrealsense/pyproject.toml
  73. +1
    -1
      node-hub/dora-qwen/pyproject.toml
  74. +4
    -0
      node-hub/dora-qwen2-5-vl/dora_qwen2_5_vl/main.py
  75. +1
    -1
      node-hub/dora-qwen2-5-vl/pyproject.toml
  76. +1
    -1
      node-hub/dora-qwenvl/pyproject.toml
  77. +3
    -2
      node-hub/dora-rav1e/Cargo.toml
  78. +80
    -19
      node-hub/dora-rav1e/src/lib.rs
  79. +1
    -1
      node-hub/dora-rdt-1b/pyproject.toml
  80. +1
    -1
      node-hub/dora-reachy2/pyproject.toml
  81. +6
    -8
      node-hub/dora-rerun/src/lib.rs
  82. +1
    -1
      node-hub/dora-rerun/src/series.rs
  83. +1
    -1
      node-hub/dora-rerun/src/urdf.rs
  84. +8
    -8
      node-hub/dora-rustypot/src/lib.rs
  85. +1
    -1
      node-hub/dora-sam2/pyproject.toml
  86. +1
    -1
      node-hub/dora-ugv/pyproject.toml
  87. +1
    -1
      node-hub/dora-vad/pyproject.toml
  88. +37
    -24
      node-hub/dora-vggt/dora_vggt/main.py
  89. +1
    -1
      node-hub/dora-yolo/pyproject.toml
  90. +1
    -1
      node-hub/feetech-client/pyproject.toml
  91. +1
    -1
      node-hub/gamepad/pyproject.toml
  92. +1
    -1
      node-hub/llama-factory-recorder/pyproject.toml
  93. +3
    -2
      node-hub/openai-proxy-server/src/main.rs
  94. +1
    -1
      node-hub/opencv-plot/pyproject.toml
  95. +1
    -1
      node-hub/opencv-video-capture/pyproject.toml
  96. +1
    -1
      node-hub/pyarrow-assert/pyproject.toml
  97. +1
    -1
      node-hub/pyarrow-sender/pyproject.toml
  98. +1
    -1
      node-hub/terminal-input/pyproject.toml

+ 1
- 1
.github/workflows/ci.yml View File

@@ -197,7 +197,7 @@ jobs:
required-ros-distributions: humble
- run: 'source /opt/ros/humble/setup.bash && echo AMENT_PREFIX_PATH=${AMENT_PREFIX_PATH} >> "$GITHUB_ENV"'
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v5
uses: astral-sh/setup-uv@v6
with:
enable-cache: true
- name: Install pyarrow


+ 29
- 10
.github/workflows/node_hub_test.sh View File

@@ -1,6 +1,9 @@
#!/bin/bash
set -euo

# Check if we are running in a GitHub Actions environment
CI=${GITHUB_ACTIONS:-false}

# List of ignored modules
ignored_folders=("dora-parler" "dora-opus" "dora-internvl" "dora-magma")

@@ -14,6 +17,30 @@ dir=$(pwd)
base_dir=$(basename "$dir")

export GIT_LFS_SKIP_SMUDGE=1
# Large node list requiring space cleanup
large_node=("dora-phi4")

export PYTEST_ADDOPTS="-x"

# Check if the current directory is in the large node list and if we're in the CI environment
if [[ " ${large_node[@]} " =~ " ${base_dir} " ]] && [[ "$CI" == "true" ]]; then
echo "Running cleanup for $base_dir..."
sudo rm -rf /opt/hostedtoolcache/CodeQL || :
# 1.4GB
sudo rm -rf /opt/hostedtoolcache/go || :
# 489MB
sudo rm -rf /opt/hostedtoolcache/PyPy || :
# 376MB
sudo rm -rf /opt/hostedtoolcache/node || :
# Remove Web browser packages
sudo apt purge -y \
firefox \
google-chrome-stable \
microsoft-edge-stable
sudo rm -rf /usr/local/lib/android/
sudo rm -rf /usr/share/dotnet/
sudo rm -rf /opt/ghc/
fi

# Check if the directory name is in the ignored list
if [[ " ${ignored_folders[@]} " =~ " ${base_dir} " ]]; then
@@ -62,16 +89,8 @@ else
if [ "$GITHUB_EVENT_NAME" == "release" ] || [ "$GITHUB_EVENT_NAME" == "workflow_dispatch" ]; then
maturin publish --skip-existing
fi
# x86_64-apple-darwin
rustup target add x86_64-apple-darwin
maturin build --target x86_64-apple-darwin --zig --release
# If GITHUB_EVENT_NAME is release or workflow_dispatch, publish the wheel
if [ "$GITHUB_EVENT_NAME" == "release" ] || [ "$GITHUB_EVENT_NAME" == "workflow_dispatch" ]; then
maturin publish --target x86_64-apple-darwin --skip-existing --zig
fi

elif [[ "$(uname)" = "Linux" ]]; then
elif [[ "$(uname)" = "Linux" ]] || [[ "$CI" == "false" ]]; then
if [ -f "$dir/Cargo.toml" ]; then
echo "Running build and tests for Rust project in $dir..."
cargo check
@@ -96,7 +115,7 @@ else
else
uv run pytest
fi
if [ "$GITHUB_EVENT_NAME" == "release" ] || [ "$GITHUB_EVENT_NAME" == "workflow_dispatch" ]; then
if [ "${GITHUB_EVENT_NAME:-false}" == "release" ] || [ "${GITHUB_EVENT_NAME:-false}" == "workflow_dispatch" ]; then
uv build
uv publish --check-url https://pypi.org/simple
fi


+ 0
- 2
.github/workflows/pip-release.yml View File

@@ -192,8 +192,6 @@ jobs:
fail-fast: false
matrix:
platform:
- runner: macos-13
target: x86_64
- runner: macos-13
target: aarch64
repository:


+ 82
- 53
Cargo.lock View File

@@ -1165,9 +1165,9 @@ dependencies = [

[[package]]
name = "avif-serialize"
version = "0.8.3"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "98922d6a4cfbcb08820c69d8eeccc05bb1f29bfa06b4f5b1dbfe9a868bd7608e"
checksum = "2ea8ef51aced2b9191c08197f55450d830876d9933f8f48a429b354f1d496b42"
dependencies = [
"arrayvec",
]
@@ -1303,7 +1303,7 @@ dependencies = [

[[package]]
name = "benchmark-example-node"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"eyre",
@@ -1316,7 +1316,7 @@ dependencies = [

[[package]]
name = "benchmark-example-sink"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"eyre",
@@ -2089,7 +2089,7 @@ dependencies = [

[[package]]
name = "communication-layer-pub-sub"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"flume 0.10.14",
"zenoh 0.7.0-rc",
@@ -2097,7 +2097,7 @@ dependencies = [

[[package]]
name = "communication-layer-request-reply"
version = "0.3.11"
version = "0.3.12"

[[package]]
name = "concat-idents"
@@ -2279,6 +2279,21 @@ dependencies = [
"libc",
]

[[package]]
name = "crc"
version = "3.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675"
dependencies = [
"crc-catalog",
]

[[package]]
name = "crc-catalog"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5"

[[package]]
name = "crc32fast"
version = "1.4.2"
@@ -2911,7 +2926,7 @@ dependencies = [

[[package]]
name = "dora-arrow-convert"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"arrow",
"chrono",
@@ -2922,7 +2937,7 @@ dependencies = [

[[package]]
name = "dora-cli"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"bat",
"clap 4.5.32",
@@ -2966,7 +2981,7 @@ dependencies = [

[[package]]
name = "dora-coordinator"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"ctrlc",
"dora-core",
@@ -2987,7 +3002,7 @@ dependencies = [

[[package]]
name = "dora-core"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-message",
"dunce",
@@ -3011,7 +3026,7 @@ dependencies = [

[[package]]
name = "dora-daemon"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"aligned-vec",
"async-trait",
@@ -3048,7 +3063,7 @@ dependencies = [

[[package]]
name = "dora-dav1d"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"bitstream-io",
"bytemuck",
@@ -3062,7 +3077,7 @@ dependencies = [

[[package]]
name = "dora-download"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"eyre",
"reqwest",
@@ -3092,7 +3107,7 @@ dependencies = [

[[package]]
name = "dora-kit-car"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"dotenv",
@@ -3106,7 +3121,7 @@ dependencies = [

[[package]]
name = "dora-message"
version = "0.5.0-alpha"
version = "0.5.0"
dependencies = [
"aligned-vec",
"arrow-data",
@@ -3127,7 +3142,7 @@ dependencies = [

[[package]]
name = "dora-metrics"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"eyre",
"opentelemetry 0.29.1",
@@ -3148,7 +3163,7 @@ dependencies = [

[[package]]
name = "dora-node-api"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"aligned-vec",
"arrow",
@@ -3173,7 +3188,7 @@ dependencies = [

[[package]]
name = "dora-node-api-c"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"arrow-array",
"dora-node-api",
@@ -3183,7 +3198,7 @@ dependencies = [

[[package]]
name = "dora-node-api-cxx"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"arrow",
"cxx",
@@ -3201,7 +3216,7 @@ dependencies = [

[[package]]
name = "dora-node-api-python"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"arrow",
"dora-cli",
@@ -3222,7 +3237,7 @@ dependencies = [

[[package]]
name = "dora-object-to-pose"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"eyre",
@@ -3231,7 +3246,7 @@ dependencies = [

[[package]]
name = "dora-openai-proxy-server"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"chrono",
"dora-node-api",
@@ -3252,7 +3267,7 @@ dependencies = [

[[package]]
name = "dora-operator-api"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-arrow-convert",
"dora-operator-api-macros",
@@ -3261,14 +3276,14 @@ dependencies = [

[[package]]
name = "dora-operator-api-c"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-operator-api-types",
]

[[package]]
name = "dora-operator-api-cxx"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"cxx",
"cxx-build",
@@ -3277,7 +3292,7 @@ dependencies = [

[[package]]
name = "dora-operator-api-macros"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"proc-macro2",
"quote",
@@ -3286,7 +3301,7 @@ dependencies = [

[[package]]
name = "dora-operator-api-python"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"aligned-vec",
"arrow",
@@ -3302,7 +3317,7 @@ dependencies = [

[[package]]
name = "dora-operator-api-types"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"arrow",
"dora-arrow-convert",
@@ -3311,12 +3326,13 @@ dependencies = [

[[package]]
name = "dora-rav1e"
version = "0.3.11+fix1"
version = "0.3.12"
dependencies = [
"avif-serialize",
"bytemuck",
"dora-node-api",
"eyre",
"little_exif",
"log",
"pyo3",
"rav1e",
@@ -3324,7 +3340,7 @@ dependencies = [

[[package]]
name = "dora-record"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"chrono",
"dora-node-api",
@@ -3336,7 +3352,7 @@ dependencies = [

[[package]]
name = "dora-rerun"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"bytemuck",
"dora-node-api",
@@ -3351,7 +3367,7 @@ dependencies = [

[[package]]
name = "dora-ros2-bridge"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"array-init",
"dora-daemon",
@@ -3374,7 +3390,7 @@ dependencies = [

[[package]]
name = "dora-ros2-bridge-msg-gen"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"anyhow",
"glob",
@@ -3390,7 +3406,7 @@ dependencies = [

[[package]]
name = "dora-ros2-bridge-python"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"arrow",
"dora-ros2-bridge",
@@ -3404,7 +3420,7 @@ dependencies = [

[[package]]
name = "dora-runtime"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"aligned-vec",
"arrow",
@@ -3443,7 +3459,7 @@ dependencies = [

[[package]]
name = "dora-tracing"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"eyre",
"opentelemetry 0.18.0",
@@ -6378,6 +6394,19 @@ version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4ce301924b7887e9d637144fdade93f9dfff9b60981d4ac161db09720d39aa5"

[[package]]
name = "little_exif"
version = "0.6.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ebaa2b90127fb33dcf2ec1f5be8b496e92d5d1736789decca8eb2798c52f2e7"
dependencies = [
"crc",
"log",
"miniz_oxide",
"paste",
"quick-xml 0.37.5",
]

[[package]]
name = "llguidance"
version = "0.7.0"
@@ -6738,9 +6767,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"

[[package]]
name = "miniz_oxide"
version = "0.8.5"
version = "0.8.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5"
checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316"
dependencies = [
"adler2",
"simd-adler32",
@@ -6982,7 +7011,7 @@ dependencies = [

[[package]]
name = "multiple-daemons-example-node"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"eyre",
@@ -6993,14 +7022,14 @@ dependencies = [

[[package]]
name = "multiple-daemons-example-operator"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-operator-api",
]

[[package]]
name = "multiple-daemons-example-sink"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"eyre",
@@ -9028,9 +9057,9 @@ dependencies = [

[[package]]
name = "quick-xml"
version = "0.37.2"
version = "0.37.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "165859e9e55f79d67b96c5d96f4e88b6f2695a1972849c15a6a3f5c59fc2c003"
checksum = "331e97a1af0bf59823e6eadffe373d7b27f485be8748f71471c662c1f269b7fb"
dependencies = [
"memchr",
]
@@ -11017,7 +11046,7 @@ checksum = "03251193000f4bd3b042892be858ee50e8b3719f2b08e5833ac4353724632430"

[[package]]
name = "receive_data"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"chrono",
"dora-node-api",
@@ -11457,7 +11486,7 @@ dependencies = [

[[package]]
name = "rust-dataflow-example-node"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"eyre",
@@ -11468,7 +11497,7 @@ dependencies = [

[[package]]
name = "rust-dataflow-example-sink"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"eyre",
@@ -11476,7 +11505,7 @@ dependencies = [

[[package]]
name = "rust-dataflow-example-sink-dynamic"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"eyre",
@@ -11484,7 +11513,7 @@ dependencies = [

[[package]]
name = "rust-dataflow-example-status-node"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"eyre",
@@ -11503,7 +11532,7 @@ dependencies = [

[[package]]
name = "rust-ros2-dataflow-example-node"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"dora-ros2-bridge",
@@ -11999,7 +12028,7 @@ dependencies = [
"hyper 1.6.0",
"indicatif",
"log",
"quick-xml 0.37.2",
"quick-xml 0.37.5",
"regex",
"reqwest",
"self-replace",
@@ -12347,7 +12376,7 @@ dependencies = [

[[package]]
name = "shared-memory-server"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"bincode",
"eyre",
@@ -13173,7 +13202,7 @@ dependencies = [

[[package]]
name = "terminal-print"
version = "0.3.11"
version = "0.3.12"
dependencies = [
"dora-node-api",
"eyre",
@@ -14604,7 +14633,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "896fdafd5d28145fce7958917d69f2fd44469b1d4e861cb5961bcbeebc6d1484"
dependencies = [
"proc-macro2",
"quick-xml 0.37.2",
"quick-xml 0.37.5",
"quote",
]



+ 23
- 23
Cargo.toml View File

@@ -50,37 +50,37 @@ members = [
[workspace.package]
edition = "2021"
# Make sure to also bump `apis/node/python/__init__.py` version.
version = "0.3.11"
version = "0.3.12"
description = "`dora` goal is to be a low latency, composable, and distributed data flow."
documentation = "https://dora-rs.ai"
license = "Apache-2.0"
repository = "https://github.com/dora-rs/dora/"

[workspace.dependencies]
dora-node-api = { version = "0.3.11", path = "apis/rust/node", default-features = false }
dora-node-api-python = { version = "0.3.11", path = "apis/python/node", default-features = false }
dora-operator-api = { version = "0.3.11", path = "apis/rust/operator", default-features = false }
dora-operator-api-macros = { version = "0.3.11", path = "apis/rust/operator/macros" }
dora-operator-api-types = { version = "0.3.11", path = "apis/rust/operator/types" }
dora-operator-api-python = { version = "0.3.11", path = "apis/python/operator" }
dora-operator-api-c = { version = "0.3.11", path = "apis/c/operator" }
dora-node-api-c = { version = "0.3.11", path = "apis/c/node" }
dora-core = { version = "0.3.11", path = "libraries/core" }
dora-arrow-convert = { version = "0.3.11", path = "libraries/arrow-convert" }
dora-tracing = { version = "0.3.11", path = "libraries/extensions/telemetry/tracing" }
dora-metrics = { version = "0.3.11", path = "libraries/extensions/telemetry/metrics" }
dora-download = { version = "0.3.11", path = "libraries/extensions/download" }
shared-memory-server = { version = "0.3.11", path = "libraries/shared-memory-server" }
communication-layer-request-reply = { version = "0.3.11", path = "libraries/communication-layer/request-reply" }
dora-cli = { version = "0.3.11", path = "binaries/cli" }
dora-runtime = { version = "0.3.11", path = "binaries/runtime" }
dora-daemon = { version = "0.3.11", path = "binaries/daemon" }
dora-coordinator = { version = "0.3.11", path = "binaries/coordinator" }
dora-ros2-bridge = { version = "0.3.11", path = "libraries/extensions/ros2-bridge" }
dora-ros2-bridge-msg-gen = { version = "0.3.11", path = "libraries/extensions/ros2-bridge/msg-gen" }
dora-node-api = { version = "0.3.12", path = "apis/rust/node", default-features = false }
dora-node-api-python = { version = "0.3.12", path = "apis/python/node", default-features = false }
dora-operator-api = { version = "0.3.12", path = "apis/rust/operator", default-features = false }
dora-operator-api-macros = { version = "0.3.12", path = "apis/rust/operator/macros" }
dora-operator-api-types = { version = "0.3.12", path = "apis/rust/operator/types" }
dora-operator-api-python = { version = "0.3.12", path = "apis/python/operator" }
dora-operator-api-c = { version = "0.3.12", path = "apis/c/operator" }
dora-node-api-c = { version = "0.3.12", path = "apis/c/node" }
dora-core = { version = "0.3.12", path = "libraries/core" }
dora-arrow-convert = { version = "0.3.12", path = "libraries/arrow-convert" }
dora-tracing = { version = "0.3.12", path = "libraries/extensions/telemetry/tracing" }
dora-metrics = { version = "0.3.12", path = "libraries/extensions/telemetry/metrics" }
dora-download = { version = "0.3.12", path = "libraries/extensions/download" }
shared-memory-server = { version = "0.3.12", path = "libraries/shared-memory-server" }
communication-layer-request-reply = { version = "0.3.12", path = "libraries/communication-layer/request-reply" }
dora-cli = { version = "0.3.12", path = "binaries/cli" }
dora-runtime = { version = "0.3.12", path = "binaries/runtime" }
dora-daemon = { version = "0.3.12", path = "binaries/daemon" }
dora-coordinator = { version = "0.3.12", path = "binaries/coordinator" }
dora-ros2-bridge = { version = "0.3.12", path = "libraries/extensions/ros2-bridge" }
dora-ros2-bridge-msg-gen = { version = "0.3.12", path = "libraries/extensions/ros2-bridge/msg-gen" }
dora-ros2-bridge-python = { path = "libraries/extensions/ros2-bridge/python" }
# versioned independently from the other dora crates
dora-message = { version = "0.5.0-alpha", path = "libraries/message" }
dora-message = { version = "0.5.0", path = "libraries/message" }
arrow = { version = "54.2.1" }
arrow-schema = { version = "54.2.1" }
arrow-data = { version = "54.2.1" }


+ 54
- 0
Changelog.md View File

@@ -1,5 +1,59 @@
# Changelog

## v0.3.12 (2025-06-30)

## What's Changed

- Implemented dora-cotracker node by @ShashwatPatil in https://github.com/dora-rs/dora/pull/931
- Minor fix and add boxes2d example to facebook/cotracker by @haixuanTao in https://github.com/dora-rs/dora/pull/950
- Update Rust crate tokio to v1.44.2 [SECURITY] by @renovate in https://github.com/dora-rs/dora/pull/951
- Post 3.11 release fix by @haixuanTao in https://github.com/dora-rs/dora/pull/954
- Bump crossbeam-channel from 0.5.14 to 0.5.15 by @dependabot in https://github.com/dora-rs/dora/pull/959
- Added E ruff flag for pydocstyle by @7SOMAY in https://github.com/dora-rs/dora/pull/958
- Revert "Added E ruff flag for better code quality [skip ci]" by @haixuanTao in https://github.com/dora-rs/dora/pull/968
- Ease of use changes in benches for issue #957 by @Ignavar in https://github.com/dora-rs/dora/pull/969
- Reachy cotracker by @haixuanTao in https://github.com/dora-rs/dora/pull/972
- Improve rav1e by @haixuanTao in https://github.com/dora-rs/dora/pull/974
- Fix pyrealsense by @haixuanTao in https://github.com/dora-rs/dora/pull/973
- Added Self Uninstall Command by @Shar-jeel-Sajid in https://github.com/dora-rs/dora/pull/944
- Improve benchmark implementation & Add warning for discarding events by @Mivik in https://github.com/dora-rs/dora/pull/971
- docs: Updated README: Added comprehensive usage documentation with vi… by @LeonRust in https://github.com/dora-rs/dora/pull/983
- Fix rerun-viewer example. by @francocipollone in https://github.com/dora-rs/dora/pull/989
- docs: add license badge by @Radovenchyk in https://github.com/dora-rs/dora/pull/996
- Disable sccache for `musllinux` jobs by @haixuanTao in https://github.com/dora-rs/dora/pull/1000
- Remove unused sysinfo monitor by @Mivik in https://github.com/dora-rs/dora/pull/1007
- Refactor Python CUDA IPC API by @Mivik in https://github.com/dora-rs/dora/pull/1002
- fix terminal not printing stdout on nvml warning by @haixuanTao in https://github.com/dora-rs/dora/pull/1008
- Fix issue #1006: [Brief description of the fix] by @sohamukute in https://github.com/dora-rs/dora/pull/1013
- Improving so100 usability by @haixuanTao in https://github.com/dora-rs/dora/pull/988
- Add dora-mediapipe node for quick human pose estimation by @haixuanTao in https://github.com/dora-rs/dora/pull/986
- Bump torch to 2.7 by @haixuanTao in https://github.com/dora-rs/dora/pull/1015
- refactor(tracing): use builder style by @sjfhsjfh in https://github.com/dora-rs/dora/pull/1009
- Fix spawning runtime through python when it is installed with pip by @haixuanTao in https://github.com/dora-rs/dora/pull/1011
- chore(deps): update dependency numpy to v2 by @renovate in https://github.com/dora-rs/dora/pull/1014
- Fix error when multiple visualization key is active and when urdf_transform env variable is not present by @haixuanTao in https://github.com/dora-rs/dora/pull/1016
- Update pyrealsense2 Dependencies for L515 Support and Fix README wget Link by @kingchou007 in https://github.com/dora-rs/dora/pull/1021
- Minor fix for mujoco sim by @haixuanTao in https://github.com/dora-rs/dora/pull/1023
- dora-mujoco simulation node with example for controlling any arm by @ShashwatPatil in https://github.com/dora-rs/dora/pull/1012
- fix ros CI/CD by @haixuanTao in https://github.com/dora-rs/dora/pull/1027
- dora-vggt by @haixuanTao in https://github.com/dora-rs/dora/pull/1024
- Adding vision to openai server by @haixuanTao in https://github.com/dora-rs/dora/pull/1025
- Revert "Adding vision to openai server" by @haixuanTao in https://github.com/dora-rs/dora/pull/1031
- Expose AllInputClosed message as a Stop message by @haixuanTao in https://github.com/dora-rs/dora/pull/1026
- Add support for git repository sources for nodes by @phil-opp in https://github.com/dora-rs/dora/pull/901
- Adding vision to rust openai proxy server by @haixuanTao in https://github.com/dora-rs/dora/pull/1033
- Add automatic robot descriptions URDF retrieval from https://github.com/robot-descriptions/robot_descriptions.py by @haixuanTao in https://github.com/dora-rs/dora/pull/1032

## New Contributors

- @Mivik made their first contribution in https://github.com/dora-rs/dora/pull/971
- @francocipollone made their first contribution in https://github.com/dora-rs/dora/pull/989
- @sohamukute made their first contribution in https://github.com/dora-rs/dora/pull/1013
- @sjfhsjfh made their first contribution in https://github.com/dora-rs/dora/pull/1009
- @kingchou007 made their first contribution in https://github.com/dora-rs/dora/pull/1021

**Full Changelog**: https://github.com/dora-rs/dora/compare/v0.3.11...v0.3.12

## v0.3.11 (2025-04-07)

## What's Changed


+ 16
- 10
README.md View File

@@ -62,6 +62,8 @@
<details open>
<summary><b>2025</b></summary>

- \[07/25\] Added Kornia rust nodes in the hub for V4L / Gstreamer cameras and Sobel image processing.
- \[06/25\] Add support for git based node, dora-vggt for multi-camera depth estimation, and adding robot_descriptions_py as a default way to get urdfs within dora.
- \[05/25\] Add support for dora-pytorch-kinematics for fk and ik, dora-mediapipe for pose estimation, dora-rustypot for rust serialport read/write, points2d and points3d visualization in rerun.
- \[04/25\] Add support for dora-cotracker to track any point on a frame, dora-rav1e AV1 encoding up to 12bit and dora-dav1d AV1 decoding,
- \[03/25\] Add support for dora async Python.
@@ -74,16 +76,16 @@

## Support Matrix

| | dora-rs |
| --------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **APIs** | Python >= 3.7 including sync ⭐✅ <br> Rust ✅<br> C/C++ 🆗 <br>ROS2 >= Foxy 🆗 |
| **OS** | Linux: Arm 32 ⭐✅ Arm 64 ⭐✅ x64_86 ⭐✅ <br>MacOS: Arm 64 ⭐✅ x64_86 ✅<br>Windows: x64_86 🆗 <br>WSL: x64_86 🆗 <br> Android: 🛠️ (Blocked by: https://github.com/elast0ny/shared_memory/issues/32) <br> IOS: 🛠️ |
| **Message Format** | Arrow ✅ <br> Standard Specification 🛠️ |
| **Local Communication** | Shared Memory ✅ <br> [Cuda IPC](https://arrow.apache.org/docs/python/api/cuda.html) 📐 |
| **Remote Communication** | [Zenoh](https://zenoh.io/) 📐 |
| **Metrics, Tracing, and Logging** | Opentelemetry 📐 |
| **Configuration** | YAML ✅ |
| **Package Manager** | [pip](https://pypi.org/): Python Node ✅ Rust Node ✅ C/C++ Node 🛠️ <br>[cargo](https://crates.io/): Rust Node ✅ |
| | dora-rs |
| --------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| **APIs** | Python >= 3.7 including sync ⭐✅ <br> Rust ✅<br> C/C++ 🆗 <br>ROS2 >= Foxy 🆗 |
| **OS** | Linux: Arm 32 ⭐✅ Arm 64 ⭐✅ x64_86 ⭐✅ <br>MacOS: Arm 64 ⭐✅ <br>Windows: x64_86 🆗 <br>WSL: x64_86 🆗 <br> Android: 🛠️ (Blocked by: https://github.com/elast0ny/shared_memory/issues/32) <br> IOS: 🛠️ |
| **Message Format** | Arrow ✅ <br> Standard Specification 🛠️ |
| **Local Communication** | Shared Memory ✅ <br> [Cuda IPC](https://arrow.apache.org/docs/python/api/cuda.html) 📐 |
| **Remote Communication** | [Zenoh](https://zenoh.io/) 📐 |
| **Metrics, Tracing, and Logging** | Opentelemetry 📐 |
| **Configuration** | YAML ✅ |
| **Package Manager** | [pip](https://pypi.org/): Python Node ✅ Rust Node ✅ C/C++ Node 🛠️ <br>[cargo](https://crates.io/): Rust Node ✅ |

> - ⭐ = Recommended
> - ✅ = First Class Support
@@ -102,6 +104,8 @@
| Camera | [PyOrbbeckSDK](https://github.com/dora-rs/dora/blob/main/node-hub/dora-pyorbbecksdk) | 📐 | Image and depth from Orbbeck Camera | ![Downloads](https://img.shields.io/pypi/dm/dora-pyorbbecksdk?label=%20) | ![License](https://img.shields.io/pypi/l/dora-pyorbbecksdk?label=%20) |
| Camera | [PyRealsense](https://github.com/dora-rs/dora/blob/main/node-hub/dora-pyrealsense) | Linux🆗 <br> Mac🛠️ | Image and depth from Realsense | ![Downloads](https://img.shields.io/pypi/dm/dora-pyrealsense?label=%20) | ![License](https://img.shields.io/pypi/l/dora-pyrealsense?label=%20) |
| Camera | [OpenCV Video Capture](https://github.com/dora-rs/dora/blob/main/node-hub/opencv-video-capture) | ✅ | Image stream from OpenCV Camera | ![Downloads](https://img.shields.io/pypi/dm/opencv-video-capture?label=%20) | ![License](https://img.shields.io/pypi/l/opencv-video-capture?label=%20) |
| Camera | [Kornia V4L Capture](https://github.com/kornia/dora-nodes-hub/tree/main/kornia-v4l-capture) | ✅ | Video stream for Linux Camera (rust) | | ![License](https://img.shields.io/badge/license-Apache%202-blue) |
| Camera | [Kornia GST Capture](https://github.com/kornia/dora-nodes-hub/tree/main/kornia-gst-capture) | ✅ | Video Capture using Gstreamer (rust) | | ![License](https://img.shields.io/badge/license-Apache%202-blue) |
| Peripheral | [Keyboard](https://github.com/dora-rs/dora/blob/main/node-hub/dora-keyboard) | ✅ | Keyboard char listener | ![Downloads](https://img.shields.io/pypi/dm/dora-keyboard?label=%20) | ![License](https://img.shields.io/pypi/l/dora-keyboard?label=%20) |
| Peripheral | [Microphone](https://github.com/dora-rs/dora/blob/main/node-hub/dora-microphone) | ✅ | Audio from microphone | ![Downloads](https://img.shields.io/pypi/dm/dora-microphone?label=%20) | ![License](https://img.shields.io/pypi/l/dora-microphone?label=%20) |
| Peripheral | [PyAudio(Speaker)](https://github.com/dora-rs/dora/blob/main/node-hub/dora-pyaudio) | ✅ | Output audio from speaker | ![Downloads](https://img.shields.io/pypi/dm/dora-pyaudio?label=%20) | ![License](https://img.shields.io/pypi/l/dora-pyaudio?label=%20) |
@@ -134,6 +138,7 @@
| Simulator | [Mujoco](https://github.com/dora-rs/dora-lerobot/blob/main/node-hub/mujoco-client) | 📐 | Mujoco Simulator | | |
| Simulator | [Carla](https://github.com/dora-rs/dora-drives) | 📐 | Carla Simulator | | |
| Simulator | [Gymnasium](https://github.com/dora-rs/dora-lerobot/blob/main/gym_dora) | 📐 | Experimental OpenAI Gymnasium bridge | | |
| Image Processing | [Kornia Sobel Operator](https://github.com/kornia/dora-nodes-hub/tree/main/kornia-imgproc-sobel) | ✅ | Kornia image processing Sobel operator (rust) | | ![License](https://img.shields.io/badge/license-Apache%202-blue) |

## Examples

@@ -144,6 +149,7 @@
| Vision | [Vision Language Model(VLM)](https://github.com/dora-rs/dora/blob/main/examples/vlm) | Use a VLM to understand images. | ![License](https://img.shields.io/github/last-commit/dora-rs/dora?path=examples%2Fvlm&label=%20) |
| Vision | [YOLO](https://github.com/dora-rs/dora/blob/main/examples/python-dataflow) | Use YOLO to detect object within image. | ![License](https://img.shields.io/github/last-commit/dora-rs/dora?path=examples%2Fpython-dataflow&label=%20) |
| Vision | [Camera](https://github.com/dora-rs/dora/blob/main/examples/camera) | Simple webcam plot example | ![License](https://img.shields.io/github/last-commit/dora-rs/dora?path=examples%2Fcamera&label=%20) |
| Vision | [Image Processing](https://github.com/kornia/kornia-rs/tree/main/examples/dora) | Multi camera image processing | |
| Model Training | [Piper RDT](https://github.com/dora-rs/dora/blob/main/examples/piper) | Piper RDT Pipeline | ![License](https://img.shields.io/github/last-commit/dora-rs/dora?path=examples%2Fpiper&label=%20) |
| Model Training | [LeRobot - Alexander Koch](https://raw.githubusercontent.com/dora-rs/dora-lerobot/refs/heads/main/README.md) | Training Alexander Koch Low Cost Robot with LeRobot | ![License](https://img.shields.io/github/last-commit/dora-rs/dora-lerobot?path=robots&label=%20) |
| ROS2 | [C++ ROS2 Example](https://github.com/dora-rs/dora/blob/main/examples/c++-ros2-dataflow) | Example using C++ ROS2 | ![License](https://img.shields.io/github/last-commit/dora-rs/dora?path=examples%2Fc%2b%2b-ros2-dataflow&label=%20) |


+ 0
- 8
apis/python/node/pyproject.toml View File

@@ -22,11 +22,3 @@ extend-select = [
"D", # pydocstyle
"UP",
]

[tool.maturin.target.x86_64-apple-darwin]
# macOS deployment target SDK version
macos-deployment-target = "14.5"

[tool.maturin.target.aarch64-apple-darwin]
# macOS deployment target SDK version
macos-deployment-target = "14.5"

+ 1
- 1
apis/python/node/src/lib.rs View File

@@ -381,7 +381,7 @@ pub fn resolve_dataflow(dataflow: String) -> eyre::Result<PathBuf> {
#[pyfunction]
#[pyo3(signature = (dataflow_path, uv=None))]
pub fn run(dataflow_path: String, uv: Option<bool>) -> eyre::Result<()> {
dora_cli::command::run(dataflow_path, uv.unwrap_or_default())
dora_cli::run_func(dataflow_path, uv.unwrap_or_default())
}

#[pymodule]


+ 2
- 2
apis/rust/node/src/node/arrow_utils.rs View File

@@ -11,7 +11,7 @@ fn required_data_size_inner(array: &ArrayData, next_offset: &mut usize) {
for (buffer, spec) in array.buffers().iter().zip(&layout.buffers) {
// consider alignment padding
if let BufferSpec::FixedWidth { alignment, .. } = spec {
*next_offset = (*next_offset + alignment - 1) / alignment * alignment;
*next_offset = (*next_offset).div_ceil(*alignment) * alignment;
}
*next_offset += buffer.len();
}
@@ -42,7 +42,7 @@ fn copy_array_into_sample_inner(
);
// add alignment padding
if let BufferSpec::FixedWidth { alignment, .. } = spec {
*next_offset = (*next_offset + alignment - 1) / alignment * alignment;
*next_offset = (*next_offset).div_ceil(*alignment) * alignment;
}

target_buffer[*next_offset..][..len].copy_from_slice(buffer.as_slice());


+ 1
- 0
apis/rust/node/src/node/mod.rs View File

@@ -44,6 +44,7 @@ mod drop_stream;

pub const ZERO_COPY_THRESHOLD: usize = 4096;

#[allow(dead_code)]
enum TokioRuntime {
Runtime(Runtime),
Handle(Handle),


+ 0
- 8
binaries/cli/pyproject.toml View File

@@ -18,11 +18,3 @@ extend-select = [
"D", # pydocstyle
"UP",
]

[tool.maturin.target.x86_64-apple-darwin]
# macOS deployment target SDK version
macos-deployment-target = "14.5"

[tool.maturin.target.aarch64-apple-darwin]
# macOS deployment target SDK version
macos-deployment-target = "14.5"

+ 41
- 4
binaries/cli/src/command/build/mod.rs View File

@@ -5,9 +5,13 @@ use dora_core::{
};
use dora_message::{descriptor::NodeSource, BuildId};
use eyre::Context;
use std::collections::BTreeMap;
use std::{collections::BTreeMap, net::IpAddr};

use crate::{connect_to_coordinator, resolve_dataflow, session::DataflowSession};
use super::{default_tracing, Executable};
use crate::{
common::{connect_to_coordinator, local_working_dir, resolve_dataflow},
session::DataflowSession,
};

use distributed::{build_distributed_dataflow, wait_until_dataflow_built};
use local::build_dataflow_locally;
@@ -16,9 +20,42 @@ mod distributed;
mod git;
mod local;

#[derive(Debug, clap::Args)]
/// Run build commands provided in the given dataflow.
pub struct Build {
/// Path to the dataflow descriptor file
#[clap(value_name = "PATH")]
dataflow: String,
/// Address of the dora coordinator
#[clap(long, value_name = "IP")]
coordinator_addr: Option<IpAddr>,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT")]
coordinator_port: Option<u16>,
// Use UV to build nodes.
#[clap(long, action)]
uv: bool,
// Run build on local machine
#[clap(long, action)]
local: bool,
}

impl Executable for Build {
fn execute(self) -> eyre::Result<()> {
default_tracing()?;
build(
self.dataflow,
self.coordinator_addr,
self.coordinator_port,
self.uv,
self.local,
)
}
}

pub fn build(
dataflow: String,
coordinator_addr: Option<std::net::IpAddr>,
coordinator_addr: Option<IpAddr>,
coordinator_port: Option<u16>,
uv: bool,
force_local: bool,
@@ -104,7 +141,7 @@ pub fn build(
BuildKind::ThroughCoordinator {
mut coordinator_session,
} => {
let local_working_dir = super::local_working_dir(
let local_working_dir = local_working_dir(
&dataflow_path,
&dataflow_descriptor,
&mut *coordinator_session,


+ 41
- 1
binaries/cli/src/command/check.rs View File

@@ -1,11 +1,15 @@
use crate::connect_to_coordinator;
use super::{default_tracing, Executable};
use crate::{common::connect_to_coordinator, LOCALHOST};
use communication_layer_request_reply::TcpRequestReplyConnection;
use dora_core::descriptor::DescriptorExt;
use dora_core::{descriptor::Descriptor, topics::DORA_COORDINATOR_PORT_CONTROL_DEFAULT};
use dora_message::{cli_to_coordinator::ControlRequest, coordinator_to_cli::ControlRequestReply};
use eyre::{bail, Context};
use std::{
io::{IsTerminal, Write},
net::SocketAddr,
};
use std::{net::IpAddr, path::PathBuf};
use termcolor::{Color, ColorChoice, ColorSpec, WriteColor};

pub fn check_environment(coordinator_addr: SocketAddr) -> eyre::Result<()> {
@@ -75,3 +79,39 @@ pub fn daemon_running(session: &mut TcpRequestReplyConnection) -> Result<bool, e

Ok(running)
}

#[derive(Debug, clap::Args)]
/// Check if the coordinator and the daemon is running.
pub struct Check {
/// Path to the dataflow descriptor file (enables additional checks)
#[clap(long, value_name = "PATH", value_hint = clap::ValueHint::FilePath)]
dataflow: Option<PathBuf>,
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
coordinator_port: u16,
}

impl Executable for Check {
fn execute(self) -> eyre::Result<()> {
default_tracing()?;

match self.dataflow {
Some(dataflow) => {
let working_dir = dataflow
.canonicalize()
.context("failed to canonicalize dataflow path")?
.parent()
.ok_or_else(|| eyre::eyre!("dataflow path has no parent dir"))?
.to_owned();
Descriptor::blocking_read(&dataflow)?.check(&working_dir)?;
check_environment((self.coordinator_addr, self.coordinator_port).into())?
}
None => check_environment((self.coordinator_addr, self.coordinator_port).into())?,
}

Ok(())
}
}

+ 66
- 0
binaries/cli/src/command/coordinator.rs View File

@@ -0,0 +1,66 @@
use super::Executable;
use crate::LISTEN_WILDCARD;
use dora_coordinator::Event;
use dora_core::topics::{DORA_COORDINATOR_PORT_CONTROL_DEFAULT, DORA_COORDINATOR_PORT_DEFAULT};

#[cfg(feature = "tracing")]
use dora_tracing::TracingBuilder;

use eyre::Context;
use std::net::{IpAddr, SocketAddr};
use tokio::runtime::Builder;
use tracing::level_filters::LevelFilter;

#[derive(Debug, clap::Args)]
/// Run coordinator
pub struct Coordinator {
/// Network interface to bind to for daemon communication
#[clap(long, default_value_t = LISTEN_WILDCARD)]
interface: IpAddr,
/// Port number to bind to for daemon communication
#[clap(long, default_value_t = DORA_COORDINATOR_PORT_DEFAULT)]
port: u16,
/// Network interface to bind to for control communication
#[clap(long, default_value_t = LISTEN_WILDCARD)]
control_interface: IpAddr,
/// Port number to bind to for control communication
#[clap(long, default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
control_port: u16,
/// Suppresses all log output to stdout.
#[clap(long)]
quiet: bool,
}

impl Executable for Coordinator {
fn execute(self) -> eyre::Result<()> {
#[cfg(feature = "tracing")]
{
let name = "dora-coordinator";
let mut builder = TracingBuilder::new(name);
if !self.quiet {
builder = builder.with_stdout("info");
}
builder = builder.with_file(name, LevelFilter::INFO)?;
builder
.build()
.wrap_err("failed to set up tracing subscriber")?;
}

let rt = Builder::new_multi_thread()
.enable_all()
.build()
.context("tokio runtime failed")?;
rt.block_on(async {
let bind = SocketAddr::new(self.interface, self.port);
let bind_control = SocketAddr::new(self.control_interface, self.control_port);
let (port, task) =
dora_coordinator::start(bind, bind_control, futures::stream::empty::<Event>())
.await?;
if !self.quiet {
println!("Listening for incoming daemon connection on {port}");
}
task.await
})
.context("failed to run dora-coordinator")
}
}

+ 91
- 0
binaries/cli/src/command/daemon.rs View File

@@ -0,0 +1,91 @@
use super::Executable;
use crate::{common::handle_dataflow_result, session::DataflowSession};
use dora_core::topics::{
DORA_COORDINATOR_PORT_DEFAULT, DORA_DAEMON_LOCAL_LISTEN_PORT_DEFAULT, LOCALHOST,
};

use dora_daemon::LogDestination;
#[cfg(feature = "tracing")]
use dora_tracing::TracingBuilder;

use eyre::Context;
use std::{
net::{IpAddr, SocketAddr},
path::PathBuf,
};
use tokio::runtime::Builder;
use tracing::level_filters::LevelFilter;

#[derive(Debug, clap::Args)]
/// Run daemon
pub struct Daemon {
/// Unique identifier for the machine (required for distributed dataflows)
#[clap(long)]
machine_id: Option<String>,
/// Local listen port for event such as dynamic node.
#[clap(long, default_value_t = DORA_DAEMON_LOCAL_LISTEN_PORT_DEFAULT)]
local_listen_port: u16,
/// Address and port number of the dora coordinator
#[clap(long, short, default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, default_value_t = DORA_COORDINATOR_PORT_DEFAULT)]
coordinator_port: u16,
#[clap(long, hide = true)]
run_dataflow: Option<PathBuf>,
/// Suppresses all log output to stdout.
#[clap(long)]
quiet: bool,
}

impl Executable for Daemon {
fn execute(self) -> eyre::Result<()> {
#[cfg(feature = "tracing")]
{
let name = "dora-daemon";
let filename = self
.machine_id
.as_ref()
.map(|id| format!("{name}-{id}"))
.unwrap_or(name.to_string());
let mut builder = TracingBuilder::new(name);
if !self.quiet {
builder = builder.with_stdout("info,zenoh=warn");
}
builder = builder.with_file(filename, LevelFilter::INFO)?;
builder
.build()
.wrap_err("failed to set up tracing subscriber")?;
}

let rt = Builder::new_multi_thread()
.enable_all()
.build()
.context("tokio runtime failed")?;
rt.block_on(async {
match self.run_dataflow {
Some(dataflow_path) => {
tracing::info!("Starting dataflow `{}`", dataflow_path.display());
if self.coordinator_addr != LOCALHOST {
tracing::info!(
"Not using coordinator addr {} as `run_dataflow` is for local dataflow only. Please use the `start` command for remote coordinator",
self.coordinator_addr
);
}
let dataflow_session =
DataflowSession::read_session(&dataflow_path).context("failed to read DataflowSession")?;

let result = dora_daemon::Daemon::run_dataflow(&dataflow_path,
dataflow_session.build_id, dataflow_session.local_build, dataflow_session.session_id, false,
LogDestination::Tracing,
).await?;
handle_dataflow_result(result, None)
}
None => {
dora_daemon::Daemon::run(SocketAddr::new(self.coordinator_addr, self.coordinator_port), self.machine_id, self.local_listen_port).await
}
}
})
.context("failed to run dora-daemon")
}
}

+ 28
- 0
binaries/cli/src/command/destroy.rs View File

@@ -0,0 +1,28 @@
use super::{default_tracing, up, Executable};
use dora_core::topics::{DORA_COORDINATOR_PORT_CONTROL_DEFAULT, LOCALHOST};
use std::net::IpAddr;
use std::path::PathBuf;

#[derive(Debug, clap::Args)]
/// Destroy running coordinator and daemon. If some dataflows are still running, they will be stopped first.
pub struct Destroy {
/// Use a custom configuration
#[clap(long, hide = true)]
config: Option<PathBuf>,
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
coordinator_port: u16,
}

impl Executable for Destroy {
fn execute(self) -> eyre::Result<()> {
default_tracing()?;
up::destroy(
self.config.as_deref(),
(self.coordinator_addr, self.coordinator_port).into(),
)
}
}

binaries/cli/src/graph/mod.rs → binaries/cli/src/command/graph.rs View File

@@ -1,11 +1,35 @@
use std::{fs::File, io::Write, path::Path};

use super::Executable;
use dora_core::descriptor::{Descriptor, DescriptorExt};
use eyre::Context;
use std::{
fs::File,
io::Write,
path::{Path, PathBuf},
};

const MERMAID_TEMPLATE: &str = include_str!("graph/mermaid-template.html");

#[derive(Debug, clap::Args)]
/// Generate a visualization of the given graph using mermaid.js. Use --open to open browser.
pub struct Graph {
/// Path to the dataflow descriptor file
#[clap(value_name = "PATH", value_hint = clap::ValueHint::FilePath)]
dataflow: PathBuf,
/// Visualize the dataflow as a Mermaid diagram (instead of HTML)
#[clap(long, action)]
mermaid: bool,
/// Open the HTML visualization in the browser
#[clap(long, action)]
open: bool,
}

const MERMAID_TEMPLATE: &str = include_str!("mermaid-template.html");
impl Executable for Graph {
fn execute(self) -> eyre::Result<()> {
create(self.dataflow, self.mermaid, self.open)
}
}

pub(crate) fn create(dataflow: std::path::PathBuf, mermaid: bool, open: bool) -> eyre::Result<()> {
fn create(dataflow: std::path::PathBuf, mermaid: bool, open: bool) -> eyre::Result<()> {
if mermaid {
let visualized = visualize_as_mermaid(&dataflow)?;
println!("{visualized}");

+ 1
- 0
binaries/cli/src/command/graph/.gitignore View File

@@ -0,0 +1 @@
!*template.html

binaries/cli/src/graph/mermaid-template.html → binaries/cli/src/command/graph/mermaid-template.html View File


+ 59
- 0
binaries/cli/src/command/list.rs View File

@@ -0,0 +1,59 @@
use std::io::Write;

use super::{default_tracing, Executable};
use crate::{
common::{connect_to_coordinator, query_running_dataflows},
LOCALHOST,
};
use clap::Args;
use communication_layer_request_reply::TcpRequestReplyConnection;
use dora_core::topics::DORA_COORDINATOR_PORT_CONTROL_DEFAULT;
use dora_message::coordinator_to_cli::DataflowStatus;
use eyre::eyre;
use tabwriter::TabWriter;

#[derive(Debug, Args)]
/// List running dataflows.
pub struct ListArgs {
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
pub coordinator_addr: std::net::IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
pub coordinator_port: u16,
}

impl Executable for ListArgs {
fn execute(self) -> eyre::Result<()> {
default_tracing()?;

let mut session =
connect_to_coordinator((self.coordinator_addr, self.coordinator_port).into())
.map_err(|_| eyre!("Failed to connect to coordinator"))?;

list(&mut *session)
}
}

fn list(session: &mut TcpRequestReplyConnection) -> Result<(), eyre::ErrReport> {
let list = query_running_dataflows(session)?;

let mut tw = TabWriter::new(vec![]);
tw.write_all(b"UUID\tName\tStatus\n")?;
for entry in list.0 {
let uuid = entry.id.uuid;
let name = entry.id.name.unwrap_or_default();
let status = match entry.status {
DataflowStatus::Running => "Running",
DataflowStatus::Finished => "Succeeded",
DataflowStatus::Failed => "Failed",
};
tw.write_all(format!("{uuid}\t{name}\t{status}\n").as_bytes())?;
}
tw.flush()?;
let formatted = String::from_utf8(tw.into_inner()?)?;

println!("{formatted}");

Ok(())
}

+ 46
- 1
binaries/cli/src/command/logs.rs View File

@@ -1,9 +1,54 @@
use super::{default_tracing, Executable};
use crate::common::{connect_to_coordinator, query_running_dataflows};
use bat::{Input, PrettyPrinter};
use clap::Args;
use communication_layer_request_reply::TcpRequestReplyConnection;
use dora_core::topics::{DORA_COORDINATOR_PORT_CONTROL_DEFAULT, LOCALHOST};
use dora_message::{cli_to_coordinator::ControlRequest, coordinator_to_cli::ControlRequestReply};
use eyre::{bail, Context, Result};
use uuid::Uuid;

use bat::{Input, PrettyPrinter};
#[derive(Debug, Args)]
/// Show logs of a given dataflow and node.
pub struct LogsArgs {
/// Identifier of the dataflow
#[clap(value_name = "UUID_OR_NAME")]
pub dataflow: Option<String>,
/// Show logs for the given node
#[clap(value_name = "NAME")]
pub node: String,
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
pub coordinator_addr: std::net::IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
pub coordinator_port: u16,
}

impl Executable for LogsArgs {
fn execute(self) -> eyre::Result<()> {
default_tracing()?;

let mut session =
connect_to_coordinator((self.coordinator_addr, self.coordinator_port).into())
.wrap_err("failed to connect to dora coordinator")?;
let list =
query_running_dataflows(&mut *session).wrap_err("failed to query running dataflows")?;
if let Some(dataflow) = self.dataflow {
let uuid = Uuid::parse_str(&dataflow).ok();
let name = if uuid.is_some() { None } else { Some(dataflow) };
logs(&mut *session, uuid, name, self.node)
} else {
let active = list.get_active();
let uuid = match &active[..] {
[] => bail!("No dataflows are running"),
[uuid] => uuid.clone(),
_ => inquire::Select::new("Choose dataflow to show logs:", active).prompt()?,
};
logs(&mut *session, Some(uuid.uuid), None, self.node)
}
}
}

pub fn logs(
session: &mut TcpRequestReplyConnection,


+ 96
- 52
binaries/cli/src/command/mod.rs View File

@@ -1,60 +1,104 @@
pub use build::build;
pub use logs::logs;
pub use run::run;
pub use start::start;

use std::path::{Path, PathBuf};

use communication_layer_request_reply::TcpRequestReplyConnection;
use dora_core::descriptor::Descriptor;
use dora_message::{cli_to_coordinator::ControlRequest, coordinator_to_cli::ControlRequestReply};
use eyre::{bail, Context, ContextCompat};

mod build;
pub mod check;
mod check;
mod coordinator;
mod daemon;
mod destroy;
mod graph;
mod list;
mod logs;
mod new;
mod run;
mod runtime;
mod self_;
mod start;
pub mod up;

fn local_working_dir(
dataflow_path: &Path,
dataflow_descriptor: &Descriptor,
coordinator_session: &mut TcpRequestReplyConnection,
) -> eyre::Result<Option<PathBuf>> {
Ok(
if dataflow_descriptor
.nodes
.iter()
.all(|n| n.deploy.as_ref().map(|d| d.machine.as_ref()).is_none())
&& cli_and_daemon_on_same_machine(coordinator_session)?
{
Some(
dunce::canonicalize(dataflow_path)
.context("failed to canonicalize dataflow file path")?
.parent()
.context("dataflow path has no parent dir")?
.to_owned(),
)
} else {
None
},
)
mod stop;
mod up;

pub use run::run_func;

use build::Build;
use check::Check;
use coordinator::Coordinator;
use daemon::Daemon;
use destroy::Destroy;
use eyre::Context;
use graph::Graph;
use list::ListArgs;
use logs::LogsArgs;
use new::NewArgs;
use run::Run;
use runtime::Runtime;
use self_::SelfSubCommand;
use start::Start;
use stop::Stop;
use up::Up;

/// dora-rs cli client
#[derive(Debug, clap::Subcommand)]
pub enum Command {
Check(Check),
Graph(Graph),
Build(Build),
New(NewArgs),
Run(Run),
Up(Up),
Destroy(Destroy),
Start(Start),
Stop(Stop),
List(ListArgs),
// Planned for future releases:
// Dashboard,
#[command(allow_missing_positional = true)]
Logs(LogsArgs),
// Metrics,
// Stats,
// Get,
// Upgrade,
Daemon(Daemon),
Runtime(Runtime),
Coordinator(Coordinator),

Self_ {
#[clap(subcommand)]
command: SelfSubCommand,
},
}

fn default_tracing() -> eyre::Result<()> {
#[cfg(feature = "tracing")]
{
use dora_tracing::TracingBuilder;

TracingBuilder::new("dora-cli")
.with_stdout("warn")
.build()
.wrap_err("failed to set up tracing subscriber")?;
}
Ok(())
}

pub trait Executable {
fn execute(self) -> eyre::Result<()>;
}

fn cli_and_daemon_on_same_machine(session: &mut TcpRequestReplyConnection) -> eyre::Result<bool> {
let reply_raw = session
.request(&serde_json::to_vec(&ControlRequest::CliAndDefaultDaemonOnSameMachine).unwrap())
.wrap_err("failed to send start dataflow message")?;

let result: ControlRequestReply =
serde_json::from_slice(&reply_raw).wrap_err("failed to parse reply")?;
match result {
ControlRequestReply::CliAndDefaultDaemonIps {
default_daemon,
cli,
} => Ok(default_daemon.is_some() && default_daemon == cli),
ControlRequestReply::Error(err) => bail!("{err}"),
other => bail!("unexpected start dataflow reply: {other:?}"),
impl Executable for Command {
fn execute(self) -> eyre::Result<()> {
match self {
Command::Check(args) => args.execute(),
Command::Coordinator(args) => args.execute(),
Command::Graph(args) => args.execute(),
Command::Build(args) => args.execute(),
Command::New(args) => args.execute(),
Command::Run(args) => args.execute(),
Command::Up(args) => args.execute(),
Command::Destroy(args) => args.execute(),
Command::Start(args) => args.execute(),
Command::Stop(args) => args.execute(),
Command::List(args) => args.execute(),
Command::Logs(args) => args.execute(),
Command::Daemon(args) => args.execute(),
Command::Self_ { command } => command.execute(),
Command::Runtime(args) => args.execute(),
}
}
}

+ 21
- 0
binaries/cli/src/command/new.rs View File

@@ -0,0 +1,21 @@
use clap::Args;

use super::{default_tracing, Executable};

#[derive(Debug, Args)]
/// Generate a new project or node. Choose the language between Rust, Python, C or C++.
pub struct NewArgs {
#[clap(flatten)]
// TODO!: better impl
args: crate::CommandNew,
/// Internal flag for creating with path dependencies
#[clap(hide = true, long)]
pub internal_create_with_path_dependencies: bool,
}

impl Executable for NewArgs {
fn execute(self) -> eyre::Result<()> {
default_tracing()?;
crate::template::create(self.args, self.internal_create_with_path_dependencies)
}
}

+ 36
- 4
binaries/cli/src/command/run.rs View File

@@ -1,12 +1,38 @@
use super::Executable;
use crate::{
common::{handle_dataflow_result, resolve_dataflow},
output::print_log_message,
session::DataflowSession,
};
use dora_daemon::{flume, Daemon, LogDestination};
use dora_tracing::TracingBuilder;
use eyre::Context;
use tokio::runtime::Builder;

use crate::{
handle_dataflow_result, output::print_log_message, resolve_dataflow, session::DataflowSession,
};
#[derive(Debug, clap::Args)]
/// Run a dataflow locally.
///
/// Directly runs the given dataflow without connecting to a dora
/// coordinator or daemon. The dataflow is executed on the local machine.
pub struct Run {
/// Path to the dataflow descriptor file
#[clap(value_name = "PATH")]
dataflow: String,
// Use UV to run nodes.
#[clap(long, action)]
uv: bool,
}

pub fn run_func(dataflow: String, uv: bool) -> eyre::Result<()> {
#[cfg(feature = "tracing")]
{
let log_level = std::env::var("RUST_LOG").ok().unwrap_or("info".to_string());
TracingBuilder::new("run")
.with_stdout(log_level)
.build()
.wrap_err("failed to set up tracing subscriber")?;
}

pub fn run(dataflow: String, uv: bool) -> Result<(), eyre::Error> {
let dataflow_path = resolve_dataflow(dataflow).context("could not resolve dataflow")?;
let dataflow_session =
DataflowSession::read_session(&dataflow_path).context("failed to read DataflowSession")?;
@@ -32,3 +58,9 @@ pub fn run(dataflow: String, uv: bool) -> Result<(), eyre::Error> {
))?;
handle_dataflow_result(result, None)
}

impl Executable for Run {
fn execute(self) -> eyre::Result<()> {
run_func(self.dataflow, self.uv)
}
}

+ 15
- 0
binaries/cli/src/command/runtime.rs View File

@@ -0,0 +1,15 @@
use eyre::Context;

use super::Executable;

#[derive(Debug, clap::Args)]
/// Run runtime
pub struct Runtime;

impl Executable for Runtime {
fn execute(self) -> eyre::Result<()> {
// No tracing: Do not set the runtime in the cli.
// ref: 72b4be808122574fcfda69650954318e0355cc7b cli::run
dora_runtime::main().context("Failed to run dora-runtime")
}
}

+ 139
- 0
binaries/cli/src/command/self_.rs View File

@@ -0,0 +1,139 @@
use super::{default_tracing, Executable};
use clap::Subcommand;
use eyre::{bail, Context};

#[derive(Debug, Subcommand)]
/// Dora CLI self-management commands
pub enum SelfSubCommand {
/// Check for updates or update the CLI
Update {
/// Only check for updates without installing
#[clap(long)]
check_only: bool,
},
/// Remove The Dora CLI from the system
Uninstall {
/// Force uninstallation without confirmation
#[clap(long)]
force: bool,
},
}

impl Executable for SelfSubCommand {
fn execute(self) -> eyre::Result<()> {
default_tracing()?;

match self {
SelfSubCommand::Update { check_only } => {
println!("Checking for updates...");

#[cfg(target_os = "linux")]
let bin_path_in_archive = format!("dora-cli-{}/dora", env!("TARGET"));
#[cfg(target_os = "macos")]
let bin_path_in_archive = format!("dora-cli-{}/dora", env!("TARGET"));
#[cfg(target_os = "windows")]
let bin_path_in_archive = String::from("dora.exe");

let status = self_update::backends::github::Update::configure()
.repo_owner("dora-rs")
.repo_name("dora")
.bin_path_in_archive(&bin_path_in_archive)
.bin_name("dora")
.show_download_progress(true)
.current_version(env!("CARGO_PKG_VERSION"))
.build()?;

if check_only {
// Only check if an update is available
match status.get_latest_release() {
Ok(release) => {
let current_version = self_update::cargo_crate_version!();
if current_version != release.version {
println!(
"An update is available: {}. Run 'dora self update' to update",
release.version
);
} else {
println!(
"Dora CLI is already at the latest version: {}",
current_version
);
}
}
Err(e) => println!("Failed to check for updates: {}", e),
}
} else {
// Perform the actual update
match status.update() {
Ok(update_status) => match update_status {
self_update::Status::UpToDate(version) => {
println!("Dora CLI is already at the latest version: {}", version);
}
self_update::Status::Updated(version) => {
println!("Successfully updated Dora CLI to version: {}", version);
}
},
Err(e) => println!("Failed to update: {}", e),
}
}
}
SelfSubCommand::Uninstall { force } => {
if !force {
let confirmed =
inquire::Confirm::new("Are you sure you want to uninstall Dora CLI?")
.with_default(false)
.prompt()
.wrap_err("Uninstallation cancelled")?;

if !confirmed {
println!("Uninstallation cancelled");
return Ok(());
}
}

println!("Uninstalling Dora CLI...");
#[cfg(feature = "python")]
{
println!("Detected Python installation...");

// Try uv pip uninstall first
let uv_status = std::process::Command::new("uv")
.args(["pip", "uninstall", "dora-rs-cli"])
.status();

if let Ok(status) = uv_status {
if status.success() {
println!("Dora CLI has been successfully uninstalled via uv pip.");
return Ok(());
}
}

// Fall back to regular pip uninstall
println!("Trying with pip...");
let status = std::process::Command::new("pip")
.args(["uninstall", "-y", "dora-rs-cli"])
.status()
.wrap_err("Failed to run pip uninstall")?;

if status.success() {
println!("Dora CLI has been successfully uninstalled via pip.");
} else {
bail!("Failed to uninstall Dora CLI via pip.");
}
}
#[cfg(not(feature = "python"))]
{
match self_replace::self_delete() {
Ok(_) => {
println!("Dora CLI has been successfully uninstalled.");
}
Err(e) => {
bail!("Failed to uninstall Dora CLI: {}", e);
}
}
}
}
}
Ok(())
}
}

+ 1
- 1
binaries/cli/src/command/start/attach.rs View File

@@ -14,7 +14,7 @@ use std::{path::PathBuf, sync::mpsc, time::Duration};
use tracing::{error, info};
use uuid::Uuid;

use crate::handle_dataflow_result;
use crate::common::handle_dataflow_result;
use crate::output::print_log_message;

pub fn attach_dataflow(


+ 78
- 48
binaries/cli/src/command/start/mod.rs View File

@@ -1,70 +1,101 @@
use super::{default_tracing, Executable};
use crate::{
command::start::attach::attach_dataflow,
common::{connect_to_coordinator, local_working_dir, resolve_dataflow},
output::print_log_message,
session::DataflowSession,
};
use communication_layer_request_reply::{TcpConnection, TcpRequestReplyConnection};
use dora_core::descriptor::{Descriptor, DescriptorExt};
use dora_core::{
descriptor::{Descriptor, DescriptorExt},
topics::{DORA_COORDINATOR_PORT_CONTROL_DEFAULT, LOCALHOST},
};
use dora_message::{
cli_to_coordinator::ControlRequest, common::LogMessage, coordinator_to_cli::ControlRequestReply,
};
use eyre::{bail, Context};
use std::{
net::{SocketAddr, TcpStream},
net::{IpAddr, SocketAddr, TcpStream},
path::PathBuf,
};
use uuid::Uuid;

use crate::{
connect_to_coordinator, output::print_log_message, resolve_dataflow, session::DataflowSession,
};
use attach::attach_dataflow;

mod attach;

pub fn start(
#[derive(Debug, clap::Args)]
/// Start the given dataflow path. Attach a name to the running dataflow by using --name.
pub struct Start {
/// Path to the dataflow descriptor file
#[clap(value_name = "PATH")]
dataflow: String,
/// Assign a name to the dataflow
#[clap(long)]
name: Option<String>,
coordinator_socket: SocketAddr,
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
coordinator_port: u16,
/// Attach to the dataflow and wait for its completion
#[clap(long, action)]
attach: bool,
/// Run the dataflow in background
#[clap(long, action)]
detach: bool,
/// Enable hot reloading (Python only)
#[clap(long, action)]
hot_reload: bool,
// Use UV to run nodes.
#[clap(long, action)]
uv: bool,
) -> eyre::Result<()> {
let (dataflow, dataflow_descriptor, mut session, dataflow_id) =
start_dataflow(dataflow, name, coordinator_socket, uv)?;
}

let attach = match (attach, detach) {
(true, true) => eyre::bail!("both `--attach` and `--detach` are given"),
(true, false) => true,
(false, true) => false,
(false, false) => {
println!("attaching to dataflow (use `--detach` to run in background)");
true
}
};
impl Executable for Start {
fn execute(self) -> eyre::Result<()> {
default_tracing()?;
let coordinator_socket = (self.coordinator_addr, self.coordinator_port).into();

if attach {
let log_level = env_logger::Builder::new()
.filter_level(log::LevelFilter::Info)
.parse_default_env()
.build()
.filter();
let (dataflow, dataflow_descriptor, mut session, dataflow_id) =
start_dataflow(self.dataflow, self.name, coordinator_socket, self.uv)?;

attach_dataflow(
dataflow_descriptor,
dataflow,
dataflow_id,
&mut *session,
hot_reload,
coordinator_socket,
log_level,
)
} else {
let print_daemon_name = dataflow_descriptor.nodes.iter().any(|n| n.deploy.is_some());
// wait until dataflow is started
wait_until_dataflow_started(
dataflow_id,
&mut session,
coordinator_socket,
log::LevelFilter::Info,
print_daemon_name,
)
let attach = match (self.attach, self.detach) {
(true, true) => eyre::bail!("both `--attach` and `--detach` are given"),
(true, false) => true,
(false, true) => false,
(false, false) => {
println!("attaching to dataflow (use `--detach` to run in background)");
true
}
};

if attach {
let log_level = env_logger::Builder::new()
.filter_level(log::LevelFilter::Info)
.parse_default_env()
.build()
.filter();

attach_dataflow(
dataflow_descriptor,
dataflow,
dataflow_id,
&mut *session,
self.hot_reload,
coordinator_socket,
log_level,
)
} else {
let print_daemon_name = dataflow_descriptor.nodes.iter().any(|n| n.deploy.is_some());
// wait until dataflow is started
wait_until_dataflow_started(
dataflow_id,
&mut session,
coordinator_socket,
log::LevelFilter::Info,
print_daemon_name,
)
}
}
}

@@ -83,8 +114,7 @@ fn start_dataflow(
let mut session = connect_to_coordinator(coordinator_socket)
.wrap_err("failed to connect to dora coordinator")?;

let local_working_dir =
super::local_working_dir(&dataflow, &dataflow_descriptor, &mut *session)?;
let local_working_dir = local_working_dir(&dataflow, &dataflow_descriptor, &mut *session)?;

let dataflow_id = {
let dataflow = dataflow_descriptor.clone();


+ 111
- 0
binaries/cli/src/command/stop.rs View File

@@ -0,0 +1,111 @@
use super::{default_tracing, Executable};
use crate::common::{connect_to_coordinator, handle_dataflow_result, query_running_dataflows};
use communication_layer_request_reply::TcpRequestReplyConnection;
use dora_core::topics::{DORA_COORDINATOR_PORT_CONTROL_DEFAULT, LOCALHOST};
use dora_message::cli_to_coordinator::ControlRequest;
use dora_message::coordinator_to_cli::ControlRequestReply;
use duration_str::parse;
use eyre::{bail, Context};
use std::net::IpAddr;
use std::time::Duration;
use uuid::Uuid;

#[derive(Debug, clap::Args)]
/// Stop the given dataflow UUID. If no id is provided, you will be able to choose between the running dataflows.
pub struct Stop {
/// UUID of the dataflow that should be stopped
uuid: Option<Uuid>,
/// Name of the dataflow that should be stopped
#[clap(long)]
name: Option<String>,
/// Kill the dataflow if it doesn't stop after the given duration
#[clap(long, value_name = "DURATION")]
#[arg(value_parser = parse)]
grace_duration: Option<Duration>,
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
coordinator_port: u16,
}

impl Executable for Stop {
fn execute(self) -> eyre::Result<()> {
default_tracing()?;
let mut session =
connect_to_coordinator((self.coordinator_addr, self.coordinator_port).into())
.wrap_err("could not connect to dora coordinator")?;
match (self.uuid, self.name) {
(Some(uuid), _) => stop_dataflow(uuid, self.grace_duration, &mut *session),
(None, Some(name)) => stop_dataflow_by_name(name, self.grace_duration, &mut *session),
(None, None) => stop_dataflow_interactive(self.grace_duration, &mut *session),
}
}
}

fn stop_dataflow_interactive(
grace_duration: Option<Duration>,
session: &mut TcpRequestReplyConnection,
) -> eyre::Result<()> {
let list = query_running_dataflows(session).wrap_err("failed to query running dataflows")?;
let active = list.get_active();
if active.is_empty() {
eprintln!("No dataflows are running");
} else {
let selection = inquire::Select::new("Choose dataflow to stop:", active).prompt()?;
stop_dataflow(selection.uuid, grace_duration, session)?;
}

Ok(())
}

fn stop_dataflow(
uuid: Uuid,
grace_duration: Option<Duration>,
session: &mut TcpRequestReplyConnection,
) -> Result<(), eyre::ErrReport> {
let reply_raw = session
.request(
&serde_json::to_vec(&ControlRequest::Stop {
dataflow_uuid: uuid,
grace_duration,
})
.unwrap(),
)
.wrap_err("failed to send dataflow stop message")?;
let result: ControlRequestReply =
serde_json::from_slice(&reply_raw).wrap_err("failed to parse reply")?;
match result {
ControlRequestReply::DataflowStopped { uuid, result } => {
handle_dataflow_result(result, Some(uuid))
}
ControlRequestReply::Error(err) => bail!("{err}"),
other => bail!("unexpected stop dataflow reply: {other:?}"),
}
}

fn stop_dataflow_by_name(
name: String,
grace_duration: Option<Duration>,
session: &mut TcpRequestReplyConnection,
) -> Result<(), eyre::ErrReport> {
let reply_raw = session
.request(
&serde_json::to_vec(&ControlRequest::StopByName {
name,
grace_duration,
})
.unwrap(),
)
.wrap_err("failed to send dataflow stop_by_name message")?;
let result: ControlRequestReply =
serde_json::from_slice(&reply_raw).wrap_err("failed to parse reply")?;
match result {
ControlRequestReply::DataflowStopped { uuid, result } => {
handle_dataflow_result(result, Some(uuid))
}
ControlRequestReply::Error(err) => bail!("{err}"),
other => bail!("unexpected stop dataflow reply: {other:?}"),
}
}

+ 20
- 1
binaries/cli/src/command/up.rs View File

@@ -1,8 +1,27 @@
use crate::{command::check::daemon_running, connect_to_coordinator, LOCALHOST};
use super::check::daemon_running;
use super::{default_tracing, Executable};
use crate::{common::connect_to_coordinator, LOCALHOST};
use dora_core::topics::DORA_COORDINATOR_PORT_CONTROL_DEFAULT;
use dora_message::{cli_to_coordinator::ControlRequest, coordinator_to_cli::ControlRequestReply};
use eyre::{bail, Context, ContextCompat};
use std::path::PathBuf;
use std::{fs, net::SocketAddr, path::Path, process::Command, time::Duration};

#[derive(Debug, clap::Args)]
/// Spawn coordinator and daemon in local mode (with default config)
pub struct Up {
/// Use a custom configuration
#[clap(long, hide = true, value_name = "PATH", value_hint = clap::ValueHint::FilePath)]
config: Option<PathBuf>,
}

impl Executable for Up {
fn execute(self) -> eyre::Result<()> {
default_tracing()?;
up(self.config.as_deref())
}
}

#[derive(Debug, Default, serde::Serialize, serde::Deserialize)]
struct UpConfig {}



+ 117
- 0
binaries/cli/src/common.rs View File

@@ -0,0 +1,117 @@
use crate::formatting::FormatDataflowError;
use communication_layer_request_reply::{RequestReplyLayer, TcpLayer, TcpRequestReplyConnection};
use dora_core::descriptor::{source_is_url, Descriptor};
use dora_download::download_file;
use dora_message::{
cli_to_coordinator::ControlRequest,
coordinator_to_cli::{ControlRequestReply, DataflowList, DataflowResult},
};
use eyre::{bail, Context, ContextCompat};
use std::{
env::current_dir,
net::SocketAddr,
path::{Path, PathBuf},
};
use tokio::runtime::Builder;
use uuid::Uuid;

pub(crate) fn handle_dataflow_result(
result: DataflowResult,
uuid: Option<Uuid>,
) -> Result<(), eyre::Error> {
if result.is_ok() {
Ok(())
} else {
Err(match uuid {
Some(uuid) => {
eyre::eyre!("Dataflow {uuid} failed:\n{}", FormatDataflowError(&result))
}
None => {
eyre::eyre!("Dataflow failed:\n{}", FormatDataflowError(&result))
}
})
}
}

pub(crate) fn query_running_dataflows(
session: &mut TcpRequestReplyConnection,
) -> eyre::Result<DataflowList> {
let reply_raw = session
.request(&serde_json::to_vec(&ControlRequest::List).unwrap())
.wrap_err("failed to send list message")?;
let reply: ControlRequestReply =
serde_json::from_slice(&reply_raw).wrap_err("failed to parse reply")?;
let ids = match reply {
ControlRequestReply::DataflowList(list) => list,
ControlRequestReply::Error(err) => bail!("{err}"),
other => bail!("unexpected list dataflow reply: {other:?}"),
};

Ok(ids)
}

pub(crate) fn connect_to_coordinator(
coordinator_addr: SocketAddr,
) -> std::io::Result<Box<TcpRequestReplyConnection>> {
TcpLayer::new().connect(coordinator_addr)
}

pub(crate) fn resolve_dataflow(dataflow: String) -> eyre::Result<PathBuf> {
let dataflow = if source_is_url(&dataflow) {
// try to download the shared library
let target_path = current_dir().context("Could not access the current dir")?;
let rt = Builder::new_current_thread()
.enable_all()
.build()
.context("tokio runtime failed")?;
rt.block_on(async { download_file(&dataflow, &target_path).await })
.wrap_err("failed to download dataflow yaml file")?
} else {
PathBuf::from(dataflow)
};
Ok(dataflow)
}

pub(crate) fn local_working_dir(
dataflow_path: &Path,
dataflow_descriptor: &Descriptor,
coordinator_session: &mut TcpRequestReplyConnection,
) -> eyre::Result<Option<PathBuf>> {
Ok(
if dataflow_descriptor
.nodes
.iter()
.all(|n| n.deploy.as_ref().map(|d| d.machine.as_ref()).is_none())
&& cli_and_daemon_on_same_machine(coordinator_session)?
{
Some(
dunce::canonicalize(dataflow_path)
.context("failed to canonicalize dataflow file path")?
.parent()
.context("dataflow path has no parent dir")?
.to_owned(),
)
} else {
None
},
)
}

pub(crate) fn cli_and_daemon_on_same_machine(
session: &mut TcpRequestReplyConnection,
) -> eyre::Result<bool> {
let reply_raw = session
.request(&serde_json::to_vec(&ControlRequest::CliAndDefaultDaemonOnSameMachine).unwrap())
.wrap_err("failed to send start dataflow message")?;

let result: ControlRequestReply =
serde_json::from_slice(&reply_raw).wrap_err("failed to parse reply")?;
match result {
ControlRequestReply::CliAndDefaultDaemonIps {
default_daemon,
cli,
} => Ok(default_daemon.is_some() && default_daemon == cli),
ControlRequestReply::Error(err) => bail!("{err}"),
other => bail!("unexpected start dataflow reply: {other:?}"),
}
}

+ 7
- 743
binaries/cli/src/lib.rs View File

@@ -1,42 +1,19 @@
use colored::Colorize;
use communication_layer_request_reply::{RequestReplyLayer, TcpLayer, TcpRequestReplyConnection};
use dora_coordinator::Event;
use dora_core::{
descriptor::{source_is_url, Descriptor, DescriptorExt},
topics::{
DORA_COORDINATOR_PORT_CONTROL_DEFAULT, DORA_COORDINATOR_PORT_DEFAULT,
DORA_DAEMON_LOCAL_LISTEN_PORT_DEFAULT,
},
};
use dora_daemon::{Daemon, LogDestination};
use dora_download::download_file;
use dora_message::{
cli_to_coordinator::ControlRequest,
coordinator_to_cli::{ControlRequestReply, DataflowList, DataflowResult, DataflowStatus},
};
#[cfg(feature = "tracing")]
use dora_tracing::TracingBuilder;
use duration_str::parse;
use eyre::{bail, Context};
use formatting::FormatDataflowError;
use std::{env::current_dir, io::Write, net::SocketAddr};
use command::Executable;
use std::{
net::{IpAddr, Ipv4Addr},
path::PathBuf,
time::Duration,
};
use tabwriter::TabWriter;
use tokio::runtime::Builder;
use tracing::level_filters::LevelFilter;
use uuid::Uuid;

pub mod command;
mod command;
mod common;
mod formatting;
mod graph;
pub mod output;
pub mod session;
mod template;

pub use command::run_func;

const LOCALHOST: IpAddr = IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1));
const LISTEN_WILDCARD: IpAddr = IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0));

@@ -44,228 +21,7 @@ const LISTEN_WILDCARD: IpAddr = IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0));
#[clap(version)]
pub struct Args {
#[clap(subcommand)]
command: Command,
}

/// dora-rs cli client
#[derive(Debug, clap::Subcommand)]
enum Command {
/// Check if the coordinator and the daemon is running.
Check {
/// Path to the dataflow descriptor file (enables additional checks)
#[clap(long, value_name = "PATH", value_hint = clap::ValueHint::FilePath)]
dataflow: Option<PathBuf>,
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
coordinator_port: u16,
},
/// Generate a visualization of the given graph using mermaid.js. Use --open to open browser.
Graph {
/// Path to the dataflow descriptor file
#[clap(value_name = "PATH", value_hint = clap::ValueHint::FilePath)]
dataflow: PathBuf,
/// Visualize the dataflow as a Mermaid diagram (instead of HTML)
#[clap(long, action)]
mermaid: bool,
/// Open the HTML visualization in the browser
#[clap(long, action)]
open: bool,
},
/// Run build commands provided in the given dataflow.
Build {
/// Path to the dataflow descriptor file
#[clap(value_name = "PATH")]
dataflow: String,
/// Address of the dora coordinator
#[clap(long, value_name = "IP")]
coordinator_addr: Option<IpAddr>,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT")]
coordinator_port: Option<u16>,
// Use UV to build nodes.
#[clap(long, action)]
uv: bool,
// Run build on local machine
#[clap(long, action)]
local: bool,
},
/// Generate a new project or node. Choose the language between Rust, Python, C or C++.
New {
#[clap(flatten)]
args: CommandNew,
#[clap(hide = true, long)]
internal_create_with_path_dependencies: bool,
},
/// Run a dataflow locally.
///
/// Directly runs the given dataflow without connecting to a dora
/// coordinator or daemon. The dataflow is executed on the local machine.
Run {
/// Path to the dataflow descriptor file
#[clap(value_name = "PATH")]
dataflow: String,
// Use UV to run nodes.
#[clap(long, action)]
uv: bool,
},
/// Spawn coordinator and daemon in local mode (with default config)
Up {
/// Use a custom configuration
#[clap(long, hide = true, value_name = "PATH", value_hint = clap::ValueHint::FilePath)]
config: Option<PathBuf>,
},
/// Destroy running coordinator and daemon. If some dataflows are still running, they will be stopped first.
Destroy {
/// Use a custom configuration
#[clap(long, hide = true)]
config: Option<PathBuf>,
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
coordinator_port: u16,
},
/// Start the given dataflow path. Attach a name to the running dataflow by using --name.
Start {
/// Path to the dataflow descriptor file
#[clap(value_name = "PATH")]
dataflow: String,
/// Assign a name to the dataflow
#[clap(long)]
name: Option<String>,
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
coordinator_port: u16,
/// Attach to the dataflow and wait for its completion
#[clap(long, action)]
attach: bool,
/// Run the dataflow in background
#[clap(long, action)]
detach: bool,
/// Enable hot reloading (Python only)
#[clap(long, action)]
hot_reload: bool,
// Use UV to run nodes.
#[clap(long, action)]
uv: bool,
},
/// Stop the given dataflow UUID. If no id is provided, you will be able to choose between the running dataflows.
Stop {
/// UUID of the dataflow that should be stopped
uuid: Option<Uuid>,
/// Name of the dataflow that should be stopped
#[clap(long)]
name: Option<String>,
/// Kill the dataflow if it doesn't stop after the given duration
#[clap(long, value_name = "DURATION")]
#[arg(value_parser = parse)]
grace_duration: Option<Duration>,
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
coordinator_port: u16,
},
/// List running dataflows.
List {
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
coordinator_port: u16,
},
// Planned for future releases:
// Dashboard,
/// Show logs of a given dataflow and node.
#[command(allow_missing_positional = true)]
Logs {
/// Identifier of the dataflow
#[clap(value_name = "UUID_OR_NAME")]
dataflow: Option<String>,
/// Show logs for the given node
#[clap(value_name = "NAME")]
node: String,
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
coordinator_port: u16,
},
// Metrics,
// Stats,
// Get,
// Upgrade,
/// Run daemon
Daemon {
/// Unique identifier for the machine (required for distributed dataflows)
#[clap(long)]
machine_id: Option<String>,
/// Local listen port for event such as dynamic node.
#[clap(long, default_value_t = DORA_DAEMON_LOCAL_LISTEN_PORT_DEFAULT)]
local_listen_port: u16,
/// Address and port number of the dora coordinator
#[clap(long, short, default_value_t = LOCALHOST)]
coordinator_addr: IpAddr,
/// Port number of the coordinator control server
#[clap(long, default_value_t = DORA_COORDINATOR_PORT_DEFAULT)]
coordinator_port: u16,
#[clap(long, hide = true)]
run_dataflow: Option<PathBuf>,
/// Suppresses all log output to stdout.
#[clap(long)]
quiet: bool,
},
/// Run runtime
Runtime,
/// Run coordinator
Coordinator {
/// Network interface to bind to for daemon communication
#[clap(long, default_value_t = LISTEN_WILDCARD)]
interface: IpAddr,
/// Port number to bind to for daemon communication
#[clap(long, default_value_t = DORA_COORDINATOR_PORT_DEFAULT)]
port: u16,
/// Network interface to bind to for control communication
#[clap(long, default_value_t = LISTEN_WILDCARD)]
control_interface: IpAddr,
/// Port number to bind to for control communication
#[clap(long, default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
control_port: u16,
/// Suppresses all log output to stdout.
#[clap(long)]
quiet: bool,
},
/// Dora CLI self-management commands
Self_ {
#[clap(subcommand)]
command: SelfSubCommand,
},
}

#[derive(Debug, clap::Subcommand)]
enum SelfSubCommand {
/// Check for updates or update the CLI
Update {
/// Only check for updates without installing
#[clap(long)]
check_only: bool,
},
/// Remove The Dora CLI from the system
Uninstall {
/// Force uninstallation without confirmation
#[clap(long)]
force: bool,
},
command: command::Command,
}

#[derive(Debug, clap::Args)]
@@ -298,503 +54,13 @@ enum Lang {
}

pub fn lib_main(args: Args) {
if let Err(err) = run_cli(args) {
if let Err(err) = args.command.execute() {
eprintln!("\n\n{}", "[ERROR]".bold().red());
eprintln!("{err:?}");
std::process::exit(1);
}
}

fn run_cli(args: Args) -> eyre::Result<()> {
tracing_log::LogTracer::init()?;

#[cfg(feature = "tracing")]
match &args.command {
Command::Daemon {
quiet, machine_id, ..
} => {
let name = "dora-daemon";
let filename = machine_id
.as_ref()
.map(|id| format!("{name}-{id}"))
.unwrap_or(name.to_string());

let mut builder = TracingBuilder::new(name);
if !quiet {
builder = builder.with_stdout("info,zenoh=warn");
}
builder = builder.with_file(filename, LevelFilter::INFO)?;
builder
.build()
.wrap_err("failed to set up tracing subscriber")?;
}
Command::Runtime => {
// Do not set the runtime in the cli.
}
Command::Coordinator { quiet, .. } => {
let name = "dora-coordinator";
let mut builder = TracingBuilder::new(name);
if !quiet {
builder = builder.with_stdout("info");
}
builder = builder.with_file(name, LevelFilter::INFO)?;
builder
.build()
.wrap_err("failed to set up tracing subscriber")?;
}
Command::Run { .. } | Command::Build { .. } => {
let log_level = std::env::var("RUST_LOG").ok().unwrap_or("info".to_string());
TracingBuilder::new("run")
.with_stdout(log_level)
.build()
.wrap_err("failed to set up tracing subscriber")?;
}
_ => {
TracingBuilder::new("dora-cli")
.with_stdout("warn")
.build()
.wrap_err("failed to set up tracing subscriber")?;
}
};

match args.command {
Command::Check {
dataflow,
coordinator_addr,
coordinator_port,
} => match dataflow {
Some(dataflow) => {
let working_dir = dataflow
.canonicalize()
.context("failed to canonicalize dataflow path")?
.parent()
.ok_or_else(|| eyre::eyre!("dataflow path has no parent dir"))?
.to_owned();
Descriptor::blocking_read(&dataflow)?.check(&working_dir)?;
command::check::check_environment((coordinator_addr, coordinator_port).into())?
}
None => command::check::check_environment((coordinator_addr, coordinator_port).into())?,
},
Command::Graph {
dataflow,
mermaid,
open,
} => {
graph::create(dataflow, mermaid, open)?;
}
Command::Build {
dataflow,
coordinator_addr,
coordinator_port,
uv,
local,
} => command::build(dataflow, coordinator_addr, coordinator_port, uv, local)?,
Command::New {
args,
internal_create_with_path_dependencies,
} => template::create(args, internal_create_with_path_dependencies)?,
Command::Run { dataflow, uv } => command::run(dataflow, uv)?,
Command::Up { config } => {
command::up::up(config.as_deref())?;
}
Command::Logs {
dataflow,
node,
coordinator_addr,
coordinator_port,
} => {
let mut session = connect_to_coordinator((coordinator_addr, coordinator_port).into())
.wrap_err("failed to connect to dora coordinator")?;
let list = query_running_dataflows(&mut *session)
.wrap_err("failed to query running dataflows")?;
if let Some(dataflow) = dataflow {
let uuid = Uuid::parse_str(&dataflow).ok();
let name = if uuid.is_some() { None } else { Some(dataflow) };
command::logs(&mut *session, uuid, name, node)?
} else {
let active: Vec<dora_message::coordinator_to_cli::DataflowIdAndName> =
list.get_active();
let uuid = match &active[..] {
[] => bail!("No dataflows are running"),
[uuid] => uuid.clone(),
_ => inquire::Select::new("Choose dataflow to show logs:", active).prompt()?,
};
command::logs(&mut *session, Some(uuid.uuid), None, node)?
}
}
Command::Start {
dataflow,
name,
coordinator_addr,
coordinator_port,
attach,
detach,
hot_reload,
uv,
} => {
let coordinator_socket = (coordinator_addr, coordinator_port).into();
command::start(
dataflow,
name,
coordinator_socket,
attach,
detach,
hot_reload,
uv,
)?
}
Command::List {
coordinator_addr,
coordinator_port,
} => match connect_to_coordinator((coordinator_addr, coordinator_port).into()) {
Ok(mut session) => list(&mut *session)?,
Err(_) => {
bail!("No dora coordinator seems to be running.");
}
},
Command::Stop {
uuid,
name,
grace_duration,
coordinator_addr,
coordinator_port,
} => {
let mut session = connect_to_coordinator((coordinator_addr, coordinator_port).into())
.wrap_err("could not connect to dora coordinator")?;
match (uuid, name) {
(Some(uuid), _) => stop_dataflow(uuid, grace_duration, &mut *session)?,
(None, Some(name)) => stop_dataflow_by_name(name, grace_duration, &mut *session)?,
(None, None) => stop_dataflow_interactive(grace_duration, &mut *session)?,
}
}
Command::Destroy {
config,
coordinator_addr,
coordinator_port,
} => command::up::destroy(
config.as_deref(),
(coordinator_addr, coordinator_port).into(),
)?,
Command::Coordinator {
interface,
port,
control_interface,
control_port,
quiet,
} => {
let rt = Builder::new_multi_thread()
.enable_all()
.build()
.context("tokio runtime failed")?;
rt.block_on(async {
let bind = SocketAddr::new(interface, port);
let bind_control = SocketAddr::new(control_interface, control_port);
let (port, task) =
dora_coordinator::start(bind, bind_control, futures::stream::empty::<Event>())
.await?;
if !quiet {
println!("Listening for incoming daemon connection on {port}");
}
task.await
})
.context("failed to run dora-coordinator")?
}
Command::Daemon {
coordinator_addr,
coordinator_port,
local_listen_port,
machine_id,
run_dataflow,
quiet: _,
} => {
let rt = Builder::new_multi_thread()
.enable_all()
.build()
.context("tokio runtime failed")?;
rt.block_on(async {
match run_dataflow {
Some(dataflow_path) => {
tracing::info!("Starting dataflow `{}`", dataflow_path.display());
if coordinator_addr != LOCALHOST {
tracing::info!(
"Not using coordinator addr {} as `run_dataflow` is for local dataflow only. Please use the `start` command for remote coordinator",
coordinator_addr
);
}
let dataflow_session =
DataflowSession::read_session(&dataflow_path).context("failed to read DataflowSession")?;

let result = Daemon::run_dataflow(&dataflow_path,
dataflow_session.build_id, dataflow_session.local_build, dataflow_session.session_id, false,
LogDestination::Tracing,
).await?;
handle_dataflow_result(result, None)
}
None => {
Daemon::run(SocketAddr::new(coordinator_addr, coordinator_port), machine_id, local_listen_port).await
}
}
})
.context("failed to run dora-daemon")?
}
Command::Runtime => dora_runtime::main().context("Failed to run dora-runtime")?,
Command::Self_ { command } => match command {
SelfSubCommand::Update { check_only } => {
println!("Checking for updates...");

#[cfg(target_os = "linux")]
let bin_path_in_archive = format!("dora-cli-{}/dora", env!("TARGET"));
#[cfg(target_os = "macos")]
let bin_path_in_archive = format!("dora-cli-{}/dora", env!("TARGET"));
#[cfg(target_os = "windows")]
let bin_path_in_archive = String::from("dora.exe");

let status = self_update::backends::github::Update::configure()
.repo_owner("dora-rs")
.repo_name("dora")
.bin_path_in_archive(&bin_path_in_archive)
.bin_name("dora")
.show_download_progress(true)
.current_version(env!("CARGO_PKG_VERSION"))
.build()?;

if check_only {
// Only check if an update is available
match status.get_latest_release() {
Ok(release) => {
let current_version = self_update::cargo_crate_version!();
if current_version != release.version {
println!(
"An update is available: {}. Run 'dora self update' to update",
release.version
);
} else {
println!(
"Dora CLI is already at the latest version: {}",
current_version
);
}
}
Err(e) => println!("Failed to check for updates: {}", e),
}
} else {
// Perform the actual update
match status.update() {
Ok(update_status) => match update_status {
self_update::Status::UpToDate(version) => {
println!("Dora CLI is already at the latest version: {}", version);
}
self_update::Status::Updated(version) => {
println!("Successfully updated Dora CLI to version: {}", version);
}
},
Err(e) => println!("Failed to update: {}", e),
}
}
}
SelfSubCommand::Uninstall { force } => {
if !force {
let confirmed =
inquire::Confirm::new("Are you sure you want to uninstall Dora CLI?")
.with_default(false)
.prompt()
.wrap_err("Uninstallation cancelled")?;

if !confirmed {
println!("Uninstallation cancelled");
return Ok(());
}
}

println!("Uninstalling Dora CLI...");
#[cfg(feature = "python")]
{
println!("Detected Python installation...");

// Try uv pip uninstall first
let uv_status = std::process::Command::new("uv")
.args(["pip", "uninstall", "dora-rs-cli"])
.status();

if let Ok(status) = uv_status {
if status.success() {
println!("Dora CLI has been successfully uninstalled via uv pip.");
return Ok(());
}
}

// Fall back to regular pip uninstall
println!("Trying with pip...");
let status = std::process::Command::new("pip")
.args(["uninstall", "-y", "dora-rs-cli"])
.status()
.wrap_err("Failed to run pip uninstall")?;

if status.success() {
println!("Dora CLI has been successfully uninstalled via pip.");
} else {
bail!("Failed to uninstall Dora CLI via pip.");
}
}
#[cfg(not(feature = "python"))]
{
match self_replace::self_delete() {
Ok(_) => {
println!("Dora CLI has been successfully uninstalled.");
}
Err(e) => {
bail!("Failed to uninstall Dora CLI: {}", e);
}
}
}
}
},
};

Ok(())
}

fn stop_dataflow_interactive(
grace_duration: Option<Duration>,
session: &mut TcpRequestReplyConnection,
) -> eyre::Result<()> {
let list = query_running_dataflows(session).wrap_err("failed to query running dataflows")?;
let active = list.get_active();
if active.is_empty() {
eprintln!("No dataflows are running");
} else {
let selection = inquire::Select::new("Choose dataflow to stop:", active).prompt()?;
stop_dataflow(selection.uuid, grace_duration, session)?;
}

Ok(())
}

fn stop_dataflow(
uuid: Uuid,
grace_duration: Option<Duration>,
session: &mut TcpRequestReplyConnection,
) -> Result<(), eyre::ErrReport> {
let reply_raw = session
.request(
&serde_json::to_vec(&ControlRequest::Stop {
dataflow_uuid: uuid,
grace_duration,
})
.unwrap(),
)
.wrap_err("failed to send dataflow stop message")?;
let result: ControlRequestReply =
serde_json::from_slice(&reply_raw).wrap_err("failed to parse reply")?;
match result {
ControlRequestReply::DataflowStopped { uuid, result } => {
handle_dataflow_result(result, Some(uuid))
}
ControlRequestReply::Error(err) => bail!("{err}"),
other => bail!("unexpected stop dataflow reply: {other:?}"),
}
}

fn handle_dataflow_result(result: DataflowResult, uuid: Option<Uuid>) -> Result<(), eyre::Error> {
if result.is_ok() {
Ok(())
} else {
Err(match uuid {
Some(uuid) => {
eyre::eyre!("Dataflow {uuid} failed:\n{}", FormatDataflowError(&result))
}
None => {
eyre::eyre!("Dataflow failed:\n{}", FormatDataflowError(&result))
}
})
}
}

fn stop_dataflow_by_name(
name: String,
grace_duration: Option<Duration>,
session: &mut TcpRequestReplyConnection,
) -> Result<(), eyre::ErrReport> {
let reply_raw = session
.request(
&serde_json::to_vec(&ControlRequest::StopByName {
name,
grace_duration,
})
.unwrap(),
)
.wrap_err("failed to send dataflow stop_by_name message")?;
let result: ControlRequestReply =
serde_json::from_slice(&reply_raw).wrap_err("failed to parse reply")?;
match result {
ControlRequestReply::DataflowStopped { uuid, result } => {
handle_dataflow_result(result, Some(uuid))
}
ControlRequestReply::Error(err) => bail!("{err}"),
other => bail!("unexpected stop dataflow reply: {other:?}"),
}
}

fn list(session: &mut TcpRequestReplyConnection) -> Result<(), eyre::ErrReport> {
let list = query_running_dataflows(session)?;

let mut tw = TabWriter::new(vec![]);
tw.write_all(b"UUID\tName\tStatus\n")?;
for entry in list.0 {
let uuid = entry.id.uuid;
let name = entry.id.name.unwrap_or_default();
let status = match entry.status {
DataflowStatus::Running => "Running",
DataflowStatus::Finished => "Succeeded",
DataflowStatus::Failed => "Failed",
};
tw.write_all(format!("{uuid}\t{name}\t{status}\n").as_bytes())?;
}
tw.flush()?;
let formatted = String::from_utf8(tw.into_inner()?)?;

println!("{formatted}");

Ok(())
}

fn query_running_dataflows(session: &mut TcpRequestReplyConnection) -> eyre::Result<DataflowList> {
let reply_raw = session
.request(&serde_json::to_vec(&ControlRequest::List).unwrap())
.wrap_err("failed to send list message")?;
let reply: ControlRequestReply =
serde_json::from_slice(&reply_raw).wrap_err("failed to parse reply")?;
let ids = match reply {
ControlRequestReply::DataflowList(list) => list,
ControlRequestReply::Error(err) => bail!("{err}"),
other => bail!("unexpected list dataflow reply: {other:?}"),
};

Ok(ids)
}

fn connect_to_coordinator(
coordinator_addr: SocketAddr,
) -> std::io::Result<Box<TcpRequestReplyConnection>> {
TcpLayer::new().connect(coordinator_addr)
}

fn resolve_dataflow(dataflow: String) -> eyre::Result<PathBuf> {
let dataflow = if source_is_url(&dataflow) {
// try to download the shared library
let target_path = current_dir().context("Could not access the current dir")?;
let rt = Builder::new_current_thread()
.enable_all()
.build()
.context("tokio runtime failed")?;
rt.block_on(async { download_file(&dataflow, &target_path).await })
.wrap_err("failed to download dataflow yaml file")?
} else {
PathBuf::from(dataflow)
};
Ok(dataflow)
}

#[cfg(feature = "python")]
use clap::Parser;
#[cfg(feature = "python")]
@@ -804,8 +70,6 @@ use pyo3::{
wrap_pyfunction, Bound, PyResult, Python,
};

use crate::session::DataflowSession;

#[cfg(feature = "python")]
#[pyfunction]
fn py_main(_py: Python) -> PyResult<()> {


+ 1
- 1
binaries/coordinator/Cargo.toml View File

@@ -19,7 +19,7 @@ futures = "0.3.21"
tokio = { version = "1.24.2", features = ["full"] }
tokio-stream = { version = "0.1.8", features = ["io-util", "net"] }
uuid = { version = "1.2.1" }
dora-core = { workspace = true }
dora-core = { workspace = true, features = ["build"] }
tracing = "0.1.36"
dora-tracing = { workspace = true, optional = true }
futures-concurrency = "7.1.0"


+ 0
- 13
binaries/daemon/src/spawn.rs View File

@@ -430,19 +430,6 @@ impl PreparedNode {
}
};

if buffer.contains("TRACE")
|| buffer.contains("INFO")
|| buffer.contains("DEBUG")
|| buffer.contains("WARN")
|| buffer.contains("ERROR")
{
// tracing output, potentially multi-line -> keep reading following lines
// until double-newline
if !buffer.ends_with("\n\n") && !finished {
continue;
}
}

// send the buffered lines
let lines = std::mem::take(&mut buffer);
let sent = stdout_tx.send(lines.clone()).await;


+ 1
- 1
examples/c++-ros2-dataflow/node-rust-api/main.cc View File

@@ -73,7 +73,7 @@ int main()
std::cerr << "Unknown event type " << static_cast<int>(ty) << std::endl;
}

if (received_ticks > 20)
if (received_ticks > 20 && responses_received > 0)
{
break;
}


+ 60
- 0
examples/so101/Readme.md View File

@@ -0,0 +1,60 @@
## SO101 Arm Control

This example provides gamepad control and leader-follower functionality for the SO-101 robotic arm.

### Install Dependencies

install the required Python packages for rerun visualization (optional):

```bash
# Install the URDF loader for Rerun visualization
pip install git+https://github.com/dora-rs/rerun-loader-python-urdf
```

### Hardware Setup

1. Connect your SO-101 arm(s) to your computer via USB/serial
2. Note the serial port names (e.g.,for linux `/dev/ttyACM0`, `/dev/ttyACM1`)
3. Connect your gamepad controller
4. Update the `PORT` environment variable in the YAML files

#### Single Arm Control (arm_gamepad_control.yml)

Control a single SO-101 arm with gamepad input and visualization:

```bash
dora build arm.yml
dora run arm.yml
```

#### Leader-Follower Mode (leader_follower.yml)

Use one arm as a leader to control another follower arm:

```bash
dora build leader.yml
dora run leader.yml
```

#### Serial Port Configuration

Update the `PORT` environment variable in the YAML files:

```yaml
env:
PORT: /dev/ttyACM0 # Change to your actual port
```

## Troubleshooting

### Serial Connection Issues
- Check that the arm is powered on and connected
- Verify the correct serial port in the YAML configuration
- Ensure proper permissions: `sudo chmod +x PORT`

### Gamepad Not Detected
- Verify gamepad is connected and recognized by the system
- Test with `jstest /dev/input/js0` (Linux)

## Safety Notes
- Always ensure the arm has sufficient clearance before operation

+ 48
- 0
examples/so101/arm_gamepad_control.yml View File

@@ -0,0 +1,48 @@
nodes:
- id: so101
build: pip install -e ../../node-hub/dora-rustypot
path: dora-rustypot
inputs:
tick: dora/timer/millis/10
pose: pytorch_kinematics/cmd_vel
outputs:
- pose
env:
PORT: /dev/ttyACM0
IDS: 1 2 3 4 5

- id: pytorch_kinematics
build: pip install -e ../../node-hub/dora-pytorch-kinematics
path: dora-pytorch-kinematics
inputs:
cmd_vel: gamepad/cmd_vel
outputs:
- cmd_vel
env:
MODEL_NAME: "so_arm101_description"
END_EFFECTOR_LINK: "gripper"
TRANSFORM: "0. 0. 0. 1. 0. 0. 0."
POSITION_TOLERANCE: 0.01
ROTATION_TOLERANCE: 0.03

- id: gamepad
build: pip install -e ../../node-hub/gamepad
path: gamepad
outputs:
- cmd_vel
- raw_control
inputs:
tick: dora/timer/millis/10
env:
MAX_LINEAR_SPEED: 0.01
MAX_ANGULAR_SPEED: 0.05

# comment below path if you don't want to visualize the arm in rerun
- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
inputs:
jointstate_so101_new_calib: so101/pose
env:
so101_new_calib_urdf: "so_arm101_description"
so101_new_calib_transform: "0. 0. 0. 1. 0. 0. 0."

+ 33
- 0
examples/so101/leader_follower.yml View File

@@ -0,0 +1,33 @@
nodes:
- id: so101
build: pip install -e ../../node-hub/dora-rustypot
path: dora-rustypot
inputs:
tick: dora/timer/millis/10
pose: leader_interface/pose
outputs:
- pose
env:
PORT: /dev/ttyACM0
IDS: 1 2 3 4 5 6

- id: leader_interface
build: pip install -e ../../node-hub/dora-rustypot
path: dora-rustypot
inputs:
tick: dora/timer/millis/10
outputs:
- pose
env:
PORT: /dev/ttyACM1
IDS: 1 2 3 4 5 6

# comment below path if you don't want to visualize the arms in rerun
- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
inputs:
jointstate_so101_new_calib: so101/pose
env:
so101_new_calib_urdf: "so_arm101_description"
so101_new_calib_transform: "0. 0. 0. 1. 0. 0. 0."

+ 3
- 3
examples/speech-to-speech/README.md View File

@@ -1,4 +1,4 @@
# Dora Speech to Text example
# Dora Speech to Speech example

Make sure to have, dora, pip and cargo installed.

@@ -23,6 +23,6 @@ sudo apt-get install espeak
```bash
uv venv --seed -p 3.11
uv pip install -e ../../apis/python/node --reinstall
dora build kokoro-dev.yml
dora run kokoro-dev.yml
dora build kokoro-dev.yml --uv
dora run kokoro-dev.yml --uv
```

+ 70
- 0
examples/urdf/vggt/franka.yml View File

@@ -0,0 +1,70 @@
nodes:
- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
inputs:
jointstate_panda: pytorch_kinematics/cmd_vel
camera/image: dora-vggt/image
camera/depth: dora-vggt/depth
env:
panda_urdf: "panda_description"
panda_transform: .5 -0. -0.1 1. 0. 0. 0.
CAMERA_PITCH: 1.5708

- id: gamepad
build: pip install -e ../../node-hub/gamepad
path: gamepad
outputs:
- cmd_vel
- raw_control
inputs:
tick: dora/timer/millis/10
env:
MAX_LINEAR_SPEED: 0.01
MAX_ANGULAR_SPEED: 0.05

- id: pytorch_kinematics
build: pip install -e ../../node-hub/dora-pytorch-kinematics
path: dora-pytorch-kinematics
inputs:
cmd_vel: gamepad/cmd_vel
outputs:
- cmd_vel
env:
MODEL_NAME: "panda_description"
END_EFFECTOR_LINK: "panda_link8"
TRANSFORM: .5 -0. -0.1 1. 0. 0. 0.
POSITION_TOLERANCE: 0.001
ROTATION_TOLERANCE: 0.001

- id: camera
build: pip install -e ../../../node-hub/opencv-video-capture
path: opencv-video-capture
inputs:
tick: dora/timer/millis/100
outputs:
- image
env:
CAPTURE_PATH: 4

- id: camera2
build: pip install -e ../../../node-hub/opencv-video-capture
path: opencv-video-capture
inputs:
tick: dora/timer/millis/100
outputs:
- image
env:
CAPTURE_PATH: 6

- id: dora-vggt
build: pip install -e ../../../node-hub/dora-vggt
path: dora-vggt
inputs:
image: camera/image
image2: camera2/image
outputs:
- depth
- image
env:
SCALE_FACTOR: 0.9

+ 68
- 0
examples/urdf/vggt/kuka.yml View File

@@ -0,0 +1,68 @@
nodes:
- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
inputs:
jointstate_iiwa14_primitive_collision: pytorch_kinematics/cmd_vel
camera/image: dora-vggt/image
camera/depth: dora-vggt/depth
env:
iiwa14_primitive_collision_urdf: "iiwa14_description"
iiwa14_primitive_collision_transform: .5 -0. -0.1 1. 0. 0. 0.
CAMERA_PITCH: 1.5708

- id: gamepad
build: pip install -e ../../node-hub/gamepad
path: gamepad
outputs:
- cmd_vel
- raw_control
inputs:
tick: dora/timer/millis/10
env:
MAX_LINEAR_SPEED: 0.02
MAX_ANGULAR_SPEED: 0.10

- id: pytorch_kinematics
build: pip install -e ../../node-hub/dora-pytorch-kinematics
path: dora-pytorch-kinematics
inputs:
cmd_vel: gamepad/cmd_vel
outputs:
- cmd_vel
env:
MODEL_NAME: "iiwa14_description"
END_EFFECTOR_LINK: "iiwa_link_7"
TRANSFORM: .5 -0. -0.1 1. 0. 0. 0.

- id: camera
build: pip install -e ../../../node-hub/opencv-video-capture
path: opencv-video-capture
inputs:
tick: dora/timer/millis/100
outputs:
- image
env:
CAPTURE_PATH: 4

- id: camera2
build: pip install -e ../../../node-hub/opencv-video-capture
path: opencv-video-capture
inputs:
tick: dora/timer/millis/100
outputs:
- image
env:
CAPTURE_PATH: 6

- id: dora-vggt
build: pip install -e ../../../node-hub/dora-vggt
path: dora-vggt
inputs:
image: camera/image
image2: camera2/image
outputs:
- depth
- image
env:
SCALE_FACTOR: 0.9

+ 69
- 0
examples/urdf/vggt/so_arm101.yml View File

@@ -0,0 +1,69 @@
nodes:
- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
inputs:
jointstate_so101_new_calib: pytorch_kinematics/cmd_vel
camera/image: dora-vggt/image
camera/depth: dora-vggt/depth
env:
so101_new_calib_urdf: "so_arm101_description"
so101_new_calib_transform: .14 -0. 0.4 -.5 .5 .5 -.5

- id: gamepad
build: pip install -e ../../node-hub/gamepad
path: gamepad
outputs:
- cmd_vel
- raw_control
inputs:
tick: dora/timer/millis/10
env:
MAX_LINEAR_SPEED: 0.01
MAX_ANGULAR_SPEED: 0.05

- id: pytorch_kinematics
build: pip install -e ../../node-hub/dora-pytorch-kinematics
path: dora-pytorch-kinematics
inputs:
cmd_vel: gamepad/cmd_vel
outputs:
- cmd_vel
env:
MODEL_NAME: "so_arm101_description"
END_EFFECTOR_LINK: "gripper"
TRANSFORM: .14 -0. 0.4 -.5 .5 .5 -.5
POSITION_TOLERANCE: 0.01
ROTATION_TOLERANCE: 0.03

- id: camera
build: pip install -e ../../../node-hub/opencv-video-capture
path: opencv-video-capture
inputs:
tick: dora/timer/millis/100
outputs:
- image
env:
CAPTURE_PATH: 4

- id: camera2
build: pip install -e ../../../node-hub/opencv-video-capture
path: opencv-video-capture
inputs:
tick: dora/timer/millis/100
outputs:
- image
env:
CAPTURE_PATH: 6

- id: dora-vggt
build: pip install -e ../../../node-hub/dora-vggt
path: dora-vggt
inputs:
image: camera/image
image2: camera2/image
outputs:
- depth
- image
env:
SCALE_FACTOR: 0.9

+ 59
- 0
examples/urdf/vggt/z1.yml View File

@@ -0,0 +1,59 @@
nodes:
- id: plot
build: pip install -e ../../../node-hub/dora-rerun
path: dora-rerun
inputs:
jointstate_z1: pytorch_kinematics/cmd_vel
camera/image: dora-vggt/image
camera/depth: dora-vggt/depth
env:
z1_urdf: z1_description
z1_transform: .5 -0.2 -0.11 1. 0. 0. 0.
CAMERA_PITCH: 1.5708

- id: gamepad
build: pip install -e ../../../node-hub/gamepad
path: gamepad
outputs:
- cmd_vel
- raw_control
inputs:
tick: dora/timer/millis/10
env:
MAX_LINEAR_SPEED: 0.01
MAX_ANGULAR_SPEED: 0.05

- id: pytorch_kinematics
build: pip install -e ../../../node-hub/dora-pytorch-kinematics
path: dora-pytorch-kinematics
inputs:
cmd_vel: gamepad/cmd_vel
outputs:
- cmd_vel
env:
MODEL_NAME: "z1_description"
END_EFFECTOR_LINK: "link06"
TRANSFORM: .5 -0.2 -0.11 1. 0. 0. 0.
POSITION_TOLERANCE: 0.001
ROTATION_TOLERANCE: 0.001

- id: camera
build: pip install -e ../../../node-hub/opencv-video-capture
path: opencv-video-capture
inputs:
tick: dora/timer/millis/100
outputs:
- image
env:
CAPTURE_PATH: 4

- id: dora-vggt
build: pip install -e ../../../node-hub/dora-vggt
path: dora-vggt
inputs:
image: camera/image
outputs:
- depth
- image
env:
SCALE_FACTOR: 0.88

+ 54
- 0
examples/vggt/depth-to-avif.yaml View File

@@ -0,0 +1,54 @@
nodes:
- id: camera
build: pip install opencv-video-capture
path: opencv-video-capture
inputs:
tick: dora/timer/millis/100
outputs:
- image
env:
CAPTURE_PATH: 1

- id: dora-vggt
build: pip install -e ../../node-hub/dora-vggt
path: dora-vggt
inputs:
image: camera/image
outputs:
- depth
- image
env:
DEPTH_ENCODING: mono16

- id: rav1e-depth
path: dora-rav1e
build: pip install -e ../../node-hub/dora-rav1e
inputs:
depth: dora-vggt/depth
outputs:
- depth
env:
ENCODING: avif

- id: rav1e-image
path: dora-rav1e
build: pip install -e ../../node-hub/dora-rav1e
inputs:
image: dora-vggt/image
outputs:
- image
env:
ENCODING: avif

- id: bench
path: image_saver.py
inputs:
vggt_image: rav1e-image/image
vggt_depth: rav1e-depth/depth

- id: plot
build: pip install dora-rerun
path: dora-rerun
inputs:
camera/image: dora-vggt/image
camera/depth: dora-vggt/depth

+ 0
- 8
examples/vggt/depth.dora-session.yaml View File

@@ -1,8 +0,0 @@
build_id: 2b402c1e-e52e-45e9-86e5-236b33a77369
session_id: 275de19c-e605-4865-bc5f-2f15916bade9
git_sources: {}
local_build:
node_working_dirs:
camera: /Users/xaviertao/Documents/work/dora/examples/vggt
dora-vggt: /Users/xaviertao/Documents/work/dora/examples/vggt
plot: /Users/xaviertao/Documents/work/dora/examples/vggt

+ 34
- 0
examples/vggt/image_saver.py View File

@@ -0,0 +1,34 @@
from dora import Node

node = Node()

index_dict = {}
i = 0

LEAD_TOPIC = "vggt_depth"

for event in node:
if event["type"] == "INPUT":
if LEAD_TOPIC in event["id"]:
storage = event["value"]
metadata = event["metadata"]
encoding = metadata["encoding"]
width = metadata["width"]
height = metadata["height"]

# Save to file
filename = f"out/{event['id']}_{i}.{encoding}"
with open(filename, "wb") as f:
f.write(storage.to_numpy())
for key, value in index_dict.items():
filename = f"out/{key}_{i}.{value['metadata']['encoding']}"
with open(filename, "wb") as f:
f.write(value["value"])
i += 1
else:
# Store the event in the index dictionary
index_dict[event["id"]] = {
"type": event["type"],
"value": event["value"].to_numpy(),
"metadata": event["metadata"],
}

+ 53
- 0
examples/vggt/realsense-to-avif.yaml View File

@@ -0,0 +1,53 @@
nodes:
- id: camera
build: pip install -e ../../node-hub/dora-pyrealsense
path: dora-pyrealsense
inputs:
tick: dora/timer/millis/100
outputs:
- image
- depth

- id: dora-vggt
build: pip install -e ../../node-hub/dora-vggt
path: dora-vggt
inputs:
image: camera/image
outputs:
- depth
- image
env:
DEPTH_ENCODING: mono16

- id: rav1e-depth-vggt
path: dora-rav1e
build: cargo build -p dora-rav1e --release
inputs:
depth: dora-vggt/depth
outputs:
- depth
env:
ENCODING: avif

- id: rav1e-depth-realsense
path: dora-rav1e
build: cargo build -p dora-rav1e --release
inputs:
depth: camera/depth
outputs:
- depth
env:
ENCODING: avif

- id: bench
path: image_saver.py
inputs:
camera_depth: rav1e-depth-vggt/depth
vggt_depth: rav1e-depth-realsense/depth

- id: plot
build: pip install dora-rerun
path: dora-rerun
inputs:
camera/image: dora-vggt/image
camera/depth: dora-vggt/depth

+ 1
- 1
libraries/core/src/descriptor/validate.rs View File

@@ -54,7 +54,7 @@ pub fn check_dataflow(
};
}
},
dora_message::descriptor::NodeSource::GitBranch { repo, rev } => {
dora_message::descriptor::NodeSource::GitBranch { .. } => {
info!("skipping check for node with git source");
}
},


+ 1
- 1
libraries/extensions/telemetry/tracing/src/telemetry.rs View File

@@ -6,7 +6,7 @@ use std::collections::HashMap;

struct MetadataMap<'a>(HashMap<&'a str, &'a str>);

impl<'a> Extractor for MetadataMap<'a> {
impl Extractor for MetadataMap<'_> {
/// Get a value for a key from the MetadataMap. If the value can't be converted to &str, returns None
fn get(&self, key: &str) -> Option<&str> {
self.0.get(key).cloned()


+ 1
- 1
libraries/message/Cargo.toml View File

@@ -1,7 +1,7 @@
[package]
name = "dora-message"
# versioned separately from the other dora crates
version = "0.5.0-alpha"
version = "0.5.0"
edition.workspace = true
documentation.workspace = true
description.workspace = true


+ 1
- 1
node-hub/dora-argotranslate/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-argotranslate"
version = "0.3.11"
version = "0.3.12"
description = "Dora Node for Text translating using Argostranslate"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },


+ 1
- 1
node-hub/dora-cotracker/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-cotracker"
version = "0.1.0"
version = "0.3.12"
authors = [{ name = "Shashwat Patil", email = "shashwatpatil974@gmail.com" }]
description = "A Dora node implementing real-time object tracking using Facebook's CoTracker model"
license = "CC-BY-1.0"


+ 1
- 1
node-hub/dora-distil-whisper/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-distil-whisper"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 1
- 1
node-hub/dora-echo/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-echo"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 1
- 1
node-hub/dora-gradio/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-gradio"
version = "0.2.0"
version = "0.3.12"
authors = [{ name = "Shashwat Patil", email = "email@email.com" }]
description = "dora-gradio"
license = { text = "MIT" }


+ 1
- 1
node-hub/dora-internvl/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-internvl"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 1
- 1
node-hub/dora-ios-lidar/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-ios-lidar"
version = "0.3.11"
version = "0.3.12"
authors = [{ name = "Your Name", email = "email@email.com" }]
description = "dora-ios-lidar"
license = { text = "MIT" }


+ 1
- 1
node-hub/dora-keyboard/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-keyboard"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 1
- 1
node-hub/dora-kokoro-tts/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-kokoro-tts"
version = "0.3.11"
version = "0.3.12"
authors = [{ name = "Your Name", email = "email@email.com" }]
description = "dora-kokoro-tts"
license = { text = "MIT" }


+ 1
- 1
node-hub/dora-microphone/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-microphone"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 1
- 1
node-hub/dora-object-to-pose/Cargo.toml View File

@@ -1,6 +1,6 @@
[package]
name = "dora-object-to-pose"
version = "0.3.11"
version = "0.3.12"
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html


+ 1
- 1
node-hub/dora-openai-server/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-openai-server"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 1
- 1
node-hub/dora-opus/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-opus"
version = "0.3.11"
version = "0.3.12"
description = "Dora Node for Text translating using Opus"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },


+ 1
- 1
node-hub/dora-outtetts/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-outtetts"
version = "0.3.11"
version = "0.3.12"
authors = []
description = "dora-outtetts"
license = { text = "MIT" }


+ 1
- 1
node-hub/dora-parler/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-parler"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 1
- 2
node-hub/dora-phi4/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-phi4"
version = "0.3.11"
version = "0.3.12"
authors = [{ name = "Somay", email = "ssomay2002@gmail.com" }]
description = "DORA node for Phi-4 multimodal model"
license = { text = "MIT" }
@@ -18,7 +18,6 @@ dependencies = [
"scipy==1.15.2",
"backoff==2.2.1",
"peft==0.13.2",
"bitsandbytes>=0.42.0",
"opencv-python",
"requests",
]


+ 1309
- 0
node-hub/dora-phi4/uv.lock
File diff suppressed because it is too large
View File


+ 1
- 1
node-hub/dora-piper/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-piper"
version = "0.3.11"
version = "0.3.12"
authors = [{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" }]
description = "Dora Node for using Agilex piper"
license = { text = "MIT" }


+ 1
- 1
node-hub/dora-pyaudio/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-pyaudio"
version = "0.3.11"
version = "0.3.12"
authors = [{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" }]
license = { text = "MIT" }
readme = "README.md"


+ 1
- 1
node-hub/dora-pyorbbecksdk/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-pyorbbecksdk"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Xiang Yang", email = "Ryu-Yang@qq.com" },


+ 1
- 1
node-hub/dora-pyrealsense/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-pyrealsense"
version = "0.3.11"
version = "0.3.12"
authors = [{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" }]
description = "Dora Node for capturing video with Pyrealsense"
license = { text = "MIT" }


+ 1
- 1
node-hub/dora-qwen/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-qwen"
version = "0.3.11"
version = "0.3.12"
authors = [{ name = "Your Name", email = "email@email.com" }]
description = "dora-qwen"
license = { text = "MIT" }


+ 4
- 0
node-hub/dora-qwen2-5-vl/dora_qwen2_5_vl/main.py View File

@@ -73,6 +73,10 @@ def generate(

messages = []

# If the texts is string, convert it to a list
if isinstance(texts, str):
texts = [texts]

for text in texts:
if text.startswith("<|system|>\n"):
messages.append(


+ 1
- 1
node-hub/dora-qwen2-5-vl/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-qwen2-5-vl"
version = "0.3.11"
version = "0.3.12.post1"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 1
- 1
node-hub/dora-qwenvl/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-qwenvl"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 3
- 2
node-hub/dora-rav1e/Cargo.toml View File

@@ -1,7 +1,7 @@
[package]
name = "dora-rav1e"
edition = "2021"
version = "0.3.11+fix1"
version = "0.3.12"
description.workspace = true
documentation.workspace = true
license = "BSD-2-Clause"
@@ -25,7 +25,8 @@ pyo3 = { workspace = true, features = [
"eyre",
"generate-import-lib",
], optional = true }
avif-serialize = "0.8.3"
avif-serialize = { version = "0.8.5" }
little_exif = { version = "0.6.9" }


[lib]


+ 80
- 19
node-hub/dora-rav1e/src/lib.rs View File

@@ -8,12 +8,16 @@
// PATENTS file, you can obtain it at www.aomedia.org/license/patent.

use std::env::var;
use std::vec;

use dora_node_api::arrow::array::AsArray;
use dora_node_api::arrow::datatypes::{UInt16Type, UInt8Type};
use dora_node_api::dora_core::config::DataId;
use dora_node_api::{DoraNode, Event, IntoArrow, Metadata, Parameter};
use dora_node_api::{DoraNode, Event, IntoArrow, Metadata, MetadataParameters, Parameter};
use eyre::{Context as EyreContext, Result};
use little_exif::exif_tag::ExifTag;
use little_exif::metadata::Metadata as ExifMetadata;
use little_exif::rational::uR64;
use log::warn;
use rav1e::color::{ColorDescription, MatrixCoefficients};
// Encode the same tiny blank frame 30 times
@@ -56,6 +60,25 @@ pub fn fill_zeros_toward_center_y_plane_in_place(y: &mut [u16], width: usize, he
}
}

fn metadata_to_exif(metadata: &MetadataParameters) -> Result<Vec<u8>> {
let mut metadata_exif = ExifMetadata::new();
metadata_exif.set_tag(ExifTag::Software("dora-rs".to_string()));
if let Some(Parameter::ListInt(focal_lengths)) = metadata.get("focal") {
metadata_exif.set_tag(ExifTag::FocalLength(
focal_lengths
.iter()
.map(|&f| uR64 {
nominator: f as u32,
denominator: 1,
})
.collect::<Vec<_>>(),
));
}

let vector = metadata_exif.as_u8_vec(little_exif::filetype::FileExtension::HEIF)?;
return Ok(vector);
}

fn bgr8_to_yuv420(bgr_data: Vec<u8>, width: usize, height: usize) -> (Vec<u8>, Vec<u8>, Vec<u8>) {
let mut y_plane = vec![0; width * height];
let mut u_plane = vec![0; (width / 2) * (height / 2)];
@@ -107,6 +130,7 @@ fn get_yuv_planes(buffer: &[u8], width: usize, height: usize) -> (&[u8], &[u8],
(y_plane, u_plane, v_plane)
}

#[allow(clippy::too_many_arguments)]
fn send_yuv(
y: &[u8],
u: &[u8],
@@ -118,7 +142,7 @@ fn send_yuv(
id: DataId,
metadata: &mut Metadata,
output_encoding: &str,
) -> () {
) {
// Create a new Arrow array for the YUV420 data
let cfg = Config::new().with_encoder_config(enc.clone());
let mut ctx: Context<u8> = cfg.new_context().unwrap();
@@ -126,13 +150,13 @@ fn send_yuv(

let xdec = f.planes[0].cfg.xdec;
let stride = (width + xdec) >> xdec;
f.planes[0].copy_from_raw_u8(&y, stride, 1);
f.planes[0].copy_from_raw_u8(y, stride, 1);
let xdec = f.planes[1].cfg.xdec;
let stride = (width + xdec) >> xdec;
f.planes[1].copy_from_raw_u8(&u, stride, 1);
f.planes[1].copy_from_raw_u8(u, stride, 1);
let xdec = f.planes[2].cfg.xdec;
let stride = (width + xdec) >> xdec;
f.planes[2].copy_from_raw_u8(&v, stride, 1);
f.planes[2].copy_from_raw_u8(v, stride, 1);

match ctx.send_frame(f) {
Ok(_) => {}
@@ -159,9 +183,18 @@ fn send_yuv(
} else {
MatrixCoefficients::BT709
};
let data = avif_serialize::Aviffy::new()
let mut aviffy = avif_serialize::Aviffy::new();
aviffy
.set_chroma_subsampling((true, true))
.set_seq_profile(0)
.set_seq_profile(0);

let aviffy = if let Ok(exif) = metadata_to_exif(&metadata.parameters) {
aviffy.set_exif(exif)
} else {
&mut aviffy
};

let data = aviffy
.matrix_coefficients(match matrix_coefficients {
MatrixCoefficients::Identity => {
avif_serialize::constants::MatrixCoefficients::Rgb
@@ -289,12 +322,9 @@ pub fn lib_main() -> Result<()> {
chroma_sampling: color::ChromaSampling::Cs420,
..Default::default()
};
match encoding {
"mono16" => {
enc.bit_depth = 12;
enc.chroma_sampling = color::ChromaSampling::Cs400;
}
_ => {}
if encoding == "mono16" {
enc.bit_depth = 12;
enc.chroma_sampling = color::ChromaSampling::Cs400;
}

if encoding == "bgr8" {
@@ -320,9 +350,9 @@ pub fn lib_main() -> Result<()> {

let (y, u, v) = get_yuv_planes(buffer, width, height);
send_yuv(
&y,
&u,
&v,
y,
u,
v,
enc,
width,
height,
@@ -336,13 +366,13 @@ pub fn lib_main() -> Result<()> {
if let Some(buffer) = data.as_primitive_opt::<UInt16Type>() {
let mut buffer = buffer.values().to_vec();
if std::env::var("FILL_ZEROS")
.map(|s| s != "false")
.map(|s| s.to_lowercase() != "false")
.unwrap_or(true)
{
fill_zeros_toward_center_y_plane_in_place(&mut buffer, width, height);
}

let bytes: &[u8] = &bytemuck::cast_slice(&buffer);
let bytes: &[u8] = bytemuck::cast_slice(&buffer);

let cfg = Config::new().with_encoder_config(enc.clone());
let mut ctx: Context<u16> = cfg.new_context().unwrap();
@@ -370,7 +400,38 @@ pub fn lib_main() -> Result<()> {
let data = pkt.data;
match output_encoding.as_str() {
"avif" => {
warn!("avif encoding not supported for mono16");
metadata.parameters.insert(
"encoding".to_string(),
Parameter::String("avif".to_string()),
);

let mut aviffy = avif_serialize::Aviffy::new();
aviffy
.full_color_range(false)
.set_seq_profile(0)
.set_monochrome(true);

let aviffy = if let Ok(exif) =
metadata_to_exif(&metadata.parameters)
{
aviffy.set_exif(exif)
} else {
&mut aviffy
};

let data = aviffy.to_vec(
&data,
None,
enc.width as u32,
enc.height as u32,
enc.bit_depth as u8,
);

let arrow = data.into_arrow();

node.send_output(id, metadata.parameters.clone(), arrow)
.context("could not send output")
.unwrap();
}
_ => {
metadata.parameters.insert(


+ 1
- 1
node-hub/dora-rdt-1b/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-rdt-1b"
version = "0.3.11"
version = "0.3.12"
authors = [{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" }]
description = "Dora Node for RDT 1B"
license = { text = "MIT" }


+ 1
- 1
node-hub/dora-reachy2/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-reachy2"
version = "0.3.11"
version = "0.3.12"
authors = [{ name = "Your Name", email = "email@email.com" }]
description = "dora-reachy2"
license = { text = "MIT" }


+ 6
- 8
node-hub/dora-rerun/src/lib.rs View File

@@ -160,8 +160,7 @@ pub fn lib_main() -> Result<()> {
let buffer: Vec<u8> =
buffer.chunks(3).flat_map(|x| [x[2], x[1], x[0]]).collect();
image_cache.insert(id.clone(), buffer.clone());
let image_buffer = ImageBuffer::try_from(buffer)
.context("Could not convert buffer to image buffer")?;
let image_buffer = ImageBuffer::from(buffer);
// let tensordata = ImageBuffer(buffer);

let image = rerun::Image::new(
@@ -174,8 +173,7 @@ pub fn lib_main() -> Result<()> {
let buffer: &UInt8Array = data.as_any().downcast_ref().unwrap();
image_cache.insert(id.clone(), buffer.values().to_vec());
let buffer: &[u8] = buffer.values();
let image_buffer = ImageBuffer::try_from(buffer)
.context("Could not convert buffer to image buffer")?;
let image_buffer = ImageBuffer::from(buffer);

let image = rerun::Image::new(
image_buffer,
@@ -385,12 +383,12 @@ pub fn lib_main() -> Result<()> {
// Get color or assign random color in cache
let color = color_cache.get(&id);
let color = if let Some(color) = color {
color.clone()
*color
} else {
let color =
rerun::Color::from_rgb(rand::random::<u8>(), 180, rand::random::<u8>());

color_cache.insert(id.clone(), color.clone());
color_cache.insert(id.clone(), color);
color
};
let dataid = id;
@@ -412,12 +410,12 @@ pub fn lib_main() -> Result<()> {
// Get color or assign random color in cache
let color = color_cache.get(&id);
let color = if let Some(color) = color {
color.clone()
*color
} else {
let color =
rerun::Color::from_rgb(rand::random::<u8>(), 180, rand::random::<u8>());

color_cache.insert(id.clone(), color.clone());
color_cache.insert(id.clone(), color);
color
};
let dataid = id;


+ 1
- 1
node-hub/dora-rerun/src/series.rs View File

@@ -7,7 +7,7 @@ pub fn update_series(rec: &RecordingStream, id: DataId, data: ArrowData) -> Resu
for (i, value) in series.iter().enumerate() {
rec.log(
format!("{}_{}", id.as_str(), i),
&rerun::Scalar::new(*value as f64),
&rerun::Scalars::new([*value]),
)
.wrap_err("could not log series")?;
}


+ 1
- 1
node-hub/dora-rerun/src/urdf.rs View File

@@ -1,4 +1,4 @@
use std::{collections::HashMap, fmt::format, path::PathBuf};
use std::{collections::HashMap, path::PathBuf};

use eyre::{Context, ContextCompat, Result};
use k::{nalgebra::Quaternion, Chain, Translation3, UnitQuaternion};


+ 8
- 8
node-hub/dora-rustypot/src/lib.rs View File

@@ -44,12 +44,13 @@ pub fn lib_main() -> Result<()> {
}

while let Some(event) = events.recv() {
match event {
Event::Input {
id,
metadata: _,
data,
} => match id.as_str() {
if let Event::Input {
id,
metadata: _,
data,
} = event
{
match id.as_str() {
"tick" => {
if let Ok(joints) = c.read_present_position(&ids) {
let mut parameter = BTreeMap::new();
@@ -70,8 +71,7 @@ pub fn lib_main() -> Result<()> {
c.write_goal_position(&ids, &data).unwrap();
}
other => eprintln!("Received input `{other}`"),
},
_ => {}
}
}
}



+ 1
- 1
node-hub/dora-sam2/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-sam2"
version = "0.3.11"
version = "0.3.12"
authors = [{ name = "Your Name", email = "email@email.com" }]
description = "dora-sam2"
license = { text = "MIT" }


+ 1
- 1
node-hub/dora-ugv/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-ugv"
version = "0.3.11"
version = "0.3.12"
authors = [{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" }]
description = "Dora Node for using Agilex UGV"
license = { text = "MIT" }


+ 1
- 1
node-hub/dora-vad/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-vad"
version = "0.3.11"
version = "0.3.12"
description = "Dora Node for Text translating using Argostranslate"
authors = [{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" }]
license = { text = "MIT" }


+ 37
- 24
node-hub/dora-vggt/dora_vggt/main.py View File

@@ -1,7 +1,8 @@
"""TODO: Add docstring."""

import io
from collections import deque as Deque
import os
from collections import deque


import cv2
import numpy as np
@@ -13,26 +14,31 @@ from vggt.models.vggt import VGGT
from vggt.utils.load_fn import load_and_preprocess_images
from vggt.utils.pose_enc import pose_encoding_to_extri_intri

SCALE_FACTOR = float(os.getenv("SCALE_FACTOR", "1"))
VGGT_NUM_IMAGES = int(os.getenv("VGGT_NUM_IMAGES", "2"))
# bfloat16 is supported on Ampere GPUs (Compute Capability 8.0+)

dtype = torch.bfloat16

# Check if cuda is available and set the device accordingly
device = "cuda" if torch.cuda.is_available() else "cpu"

# Initialize the model and load the pretrained weights.
# This will automatically download the model weights the first time it's run, which may take a while.
model = VGGT.from_pretrained("facebook/VGGT-1B").to("cuda")
model = VGGT.from_pretrained("facebook/VGGT-1B").to(device)
model.eval()

DEPTH_ENCODING = os.environ.get("DEPTH_ENCODING", "float64")
# Import vecdeque


def main():
"""TODO: Add docstring."""
node = Node()
raw_images = Deque(maxlen=2)
raw_images = deque(maxlen=VGGT_NUM_IMAGES)

for event in node:
if event["type"] == "INPUT":

if "image" in event["id"]:
storage = event["value"]
metadata = event["metadata"]
@@ -80,7 +86,7 @@ def main():
raw_images.append(buffer)

with torch.no_grad():
images = load_and_preprocess_images(raw_images).to("cuda")
images = load_and_preprocess_images(raw_images).to(device)

images = images[None] # add batch dimension
aggregated_tokens_list, ps_idx = model.aggregator(images)
@@ -88,7 +94,7 @@ def main():
pose_enc = model.camera_head(aggregated_tokens_list)[-1]
# Extrinsic and intrinsic matrices, following OpenCV convention (camera from world)
extrinsic, intrinsic = pose_encoding_to_extri_intri(
pose_enc, images.shape[-2:]
pose_enc, images.shape[-2:],
)
intrinsic = intrinsic[-1][-1]
f_0 = intrinsic[0, 0]
@@ -98,29 +104,32 @@ def main():

# Predict Depth Maps
depth_map, depth_conf = model.depth_head(
aggregated_tokens_list, images, ps_idx
aggregated_tokens_list, images, ps_idx,
)
print(depth_conf.max())
depth_map[depth_conf < 1.0] = 0.0 # Set low confidence pixels to 0
depth_map = depth_map.to(torch.float64)

depth_map = depth_map[-1][-1].cpu().numpy()
depth_map = SCALE_FACTOR * depth_map
# Warning: Make sure to add my_output_id and my_input_id within the dataflow.
if DEPTH_ENCODING == "mono16":
depth_map = (depth_map * 1000).astype(np.uint16)

node.send_output(
output_id="depth",
output_id=event["id"].replace("image", "depth"),
data=pa.array(depth_map.ravel()),
metadata={
"width": depth_map.shape[1],
"height": depth_map.shape[0],
"focal": [
int(f_0),
int(f_1),
],
"resolution": [
int(r_0),
int(r_1),
],
"encoding": DEPTH_ENCODING,
"focal": [
int(f_0),
int(f_1),
],
"resolution": [
int(r_0),
int(r_1),
],
},
)

@@ -129,18 +138,22 @@ def main():
# reorder pixels to be in last dimension
image = image.transpose(1, 2, 0)

print(
f"Image shape: {image.shape}, dtype: {image.dtype} and depth map shape: {depth_map.shape}, dtype: {depth_map.dtype}"
)

# Warning: Make sure to add my_output_id and my_input_id within the dataflow.
node.send_output(
output_id="image",
output_id=event["id"],
data=pa.array(image.ravel()),
metadata={
"encoding": "rgb8",
"width": image.shape[1],
"height": image.shape[0],
"focal": [
int(f_0),
int(f_1),
],
"resolution": [
int(r_0),
int(r_1),
],
},
)



+ 1
- 1
node-hub/dora-yolo/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "dora-yolo"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 1
- 1
node-hub/feetech-client/pyproject.toml View File

@@ -8,7 +8,7 @@ readme = "README.md"
requires-python = ">=3.9"

dependencies = [
"dora-rs == 0.3.11",
"dora-rs == 0.3.12",
"numpy <= 2.0.0",
"feetech-servo-sdk == 1.0.0",
"pwm-position-control",


+ 1
- 1
node-hub/gamepad/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "gamepad"
version = "0.1.0"
version = "0.3.12"
authors = [{ name = "Shashwat Patil", email = "email@email.com" }]
description = "gamepad"
license = { text = "MIT" }


+ 1
- 1
node-hub/llama-factory-recorder/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "llama-factory-recorder"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 3
- 2
node-hub/openai-proxy-server/src/main.rs View File

@@ -96,7 +96,7 @@ async fn main() -> eyre::Result<()> {
)
.context("failed to send dora output")?;

reply_channels.push_back((reply, 0 as u64, request.model));
reply_channels.push_back((reply, 0_u64, request.model));
}
},
dora_node_api::merged::MergedEvent::Dora(event) => match event {
@@ -112,7 +112,7 @@ async fn main() -> eyre::Result<()> {
let data = data.as_string::<i32>();
let string = data.iter().fold("".to_string(), |mut acc, s| {
if let Some(s) = s {
acc.push_str("\n");
acc.push('\n');
acc.push_str(s);
}
acc
@@ -164,6 +164,7 @@ async fn main() -> eyre::Result<()> {
Ok(())
}

#[allow(clippy::large_enum_variant)]
enum ServerEvent {
Result(eyre::Result<()>),
ChatCompletionRequest {


+ 1
- 1
node-hub/opencv-plot/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "opencv-plot"
version = "0.3.11"
version = "0.3.12"
license = { file = "MIT" }
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },


+ 1
- 1
node-hub/opencv-video-capture/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "opencv-video-capture"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 1
- 1
node-hub/pyarrow-assert/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "pyarrow-assert"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 1
- 1
node-hub/pyarrow-sender/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "pyarrow-sender"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


+ 1
- 1
node-hub/terminal-input/pyproject.toml View File

@@ -1,6 +1,6 @@
[project]
name = "terminal-input"
version = "0.3.11"
version = "0.3.12"
authors = [
{ name = "Haixuan Xavier Tao", email = "tao.xavier@outlook.com" },
{ name = "Enzo Le Van", email = "dev@enzo-le-van.fr" },


Loading…
Cancel
Save