Compare commits

...

Author SHA1 Message Date
  haixuanTao ad85deec67 modified IP and fix flip error prompting 8 months ago
  haixuanTao 8eeca6a3b9 Final demo code 8 months ago
  haixuanTao e54cad8396 Minor improvement on reachy demo! 8 months ago
  haixuanTao c1e1d0b202 Minor improvement 8 months ago
  haixuanTao 0494ef753b Minor adjustement 8 months ago
  haixuanTao b1a3cd67f4 Additional minor improvement 8 months ago
  haixuanTao 32d0e0ac87 Improve reachy2 remote example 8 months ago
  haixuanTao 0788248697 Removve unnecessary files 8 months ago
  haixuanTao f4c10569f7 Fix pytorch kinematics 8 months ago
  haixuanTao e8395df1bd Minor improvement 8 months ago
  haixuanTao 8b2e831c48 Improved IK and positioning 8 months ago
  haixuanTao 4ef9fe3b68 working so100 inference code with qwenvl 8 months ago
  haixuanTao f6980ebee7 Fix CI/CD And improve example 8 months ago
  haixuanTao 2d9b2d4a04 Add path check 8 months ago
  haixuanTao 85aa693e58 Fix rerun urdf loader 8 months ago
  haixuanTao e253bf0823 Update chain with env variable transform 8 months ago
  haixuanTao 2930906e35 Add checks on IK to avoid breakage 9 months ago
  haixuanTao 521b1a54c0 Use fixed urdf to do ik 9 months ago
  haixuanTao c2ff3b8122 Add visualization alternative for rerun urdf 9 months ago
  haixuanTao 3cf134c736 Improve convergence 9 months ago
  haixuanTao ddc14c0126 Use default linux port 9 months ago
  haixuanTao 8f0fa67d39 draft: add arctan to anfgle computation 9 months ago
  haixuantao 87373a27e7 Adding rustypot to cargo.toml 9 months ago
  haixuantao 615bfd49b1 Adding example dataflow 9 months ago
  haixuantao 66bf1b74a4 fix pytorch kinematics 9 months ago
  haixuantao 2f985d623b Add rustypot 9 months ago
  haixuantao 483d4793ec add so-100 remote example 9 months ago
46 changed files with 4717 additions and 821 deletions
Unified View
  1. +1
    -1
      .github/workflows/node-hub-ci-cd.yml
  2. +175
    -129
      Cargo.lock
  3. +1
    -0
      Cargo.toml
  4. +7
    -0
      examples/reachy2-remote/boot_reachy.sh
  5. +88
    -100
      examples/reachy2-remote/dataflow_reachy.yml
  6. +45
    -0
      examples/reachy2-remote/no_torque copy.yml
  7. +45
    -0
      examples/reachy2-remote/no_torque.yml
  8. +45
    -0
      examples/reachy2-remote/no_torque_remote.yml
  9. +1
    -1
      examples/reachy2-remote/parse_bbox.py
  10. +13
    -15
      examples/reachy2-remote/parse_point.py
  11. +322
    -171
      examples/reachy2-remote/parse_pose.py
  12. +137
    -110
      examples/reachy2-remote/parse_whisper.py
  13. +46
    -0
      examples/so100-remote/no_torque.yml
  14. +69
    -0
      examples/so100-remote/parse_bbox.py
  15. +160
    -0
      examples/so100-remote/parse_pose.py
  16. +32
    -0
      examples/so100-remote/parse_whisper.py
  17. +185
    -0
      examples/so100-remote/qwenvl-remote.yml
  18. +141
    -0
      examples/so100-remote/qwenvl.yml
  19. +384
    -0
      examples/so100-remote/so100.urdf
  20. +384
    -0
      examples/so100-remote/so100_inference.urdf
  21. +1
    -39
      node-hub/dora-cotracker/dora_cotracker/main.py
  22. +2
    -2
      node-hub/dora-kokoro-tts/dora_kokoro_tts/main.py
  23. +65
    -11
      node-hub/dora-object-to-pose/src/lib.rs
  24. +40
    -0
      node-hub/dora-pytorch-kinematics/README.md
  25. +13
    -0
      node-hub/dora-pytorch-kinematics/dora_pytorch_kinematics/__init__.py
  26. +6
    -0
      node-hub/dora-pytorch-kinematics/dora_pytorch_kinematics/__main__.py
  27. +299
    -0
      node-hub/dora-pytorch-kinematics/dora_pytorch_kinematics/main.py
  28. +25
    -0
      node-hub/dora-pytorch-kinematics/pyproject.toml
  29. +13
    -0
      node-hub/dora-pytorch-kinematics/tests/test_dora_pytorch_kinematics.py
  30. +1397
    -0
      node-hub/dora-pytorch-kinematics/uv.lock
  31. +18
    -28
      node-hub/dora-reachy2/dora_reachy2/camera.py
  32. +2
    -1
      node-hub/dora-reachy2/dora_reachy2/head.py
  33. +19
    -32
      node-hub/dora-reachy2/dora_reachy2/left_arm.py
  34. +17
    -5
      node-hub/dora-reachy2/dora_reachy2/mobile_base.py
  35. +19
    -31
      node-hub/dora-reachy2/dora_reachy2/right_arm.py
  36. +1
    -0
      node-hub/dora-rerun/Cargo.toml
  37. +49
    -12
      node-hub/dora-rerun/src/lib.rs
  38. +35
    -24
      node-hub/dora-rerun/src/urdf.rs
  39. +12
    -0
      node-hub/dora-rustypot/Cargo.toml
  40. +79
    -0
      node-hub/dora-rustypot/src/main.rs
  41. +5
    -1
      node-hub/feetech-client/README.md
  42. +7
    -2
      node-hub/feetech-client/feetech_client/bus.py
  43. +200
    -0
      node-hub/feetech-client/feetech_client/configure.py
  44. +10
    -11
      node-hub/feetech-client/feetech_client/main.py
  45. +9
    -5
      node-hub/feetech-client/pyproject.toml
  46. +93
    -90
      node-hub/feetech-client/uv.lock

+ 1
- 1
.github/workflows/node-hub-ci-cd.yml View File

@@ -54,7 +54,7 @@ jobs:
run: | run: |
sudo apt update sudo apt update
sudo apt-get install portaudio19-dev sudo apt-get install portaudio19-dev
sudo apt-get install libdav1d-dev nasm
sudo apt-get install libdav1d-dev nasm libudev-dev
mkdir -p $HOME/.rustup/toolchains/stable-x86_64-unknown-linux-gnu/lib/rustlib/x86_64-unknown-linux-gnu/lib mkdir -p $HOME/.rustup/toolchains/stable-x86_64-unknown-linux-gnu/lib/rustlib/x86_64-unknown-linux-gnu/lib
ln -s /lib/x86_64-linux-gnu/libdav1d.so $HOME/.rustup/toolchains/stable-x86_64-unknown-linux-gnu/lib/rustlib/x86_64-unknown-linux-gnu/lib/libdav1d.so ln -s /lib/x86_64-linux-gnu/libdav1d.so $HOME/.rustup/toolchains/stable-x86_64-unknown-linux-gnu/lib/rustlib/x86_64-unknown-linux-gnu/lib/libdav1d.so




+ 175
- 129
Cargo.lock
File diff suppressed because it is too large
View File


+ 1
- 0
Cargo.toml View File

@@ -39,6 +39,7 @@ members = [
"node-hub/dora-mistral-rs", "node-hub/dora-mistral-rs",
"node-hub/dora-rav1e", "node-hub/dora-rav1e",
"node-hub/dora-dav1d", "node-hub/dora-dav1d",
"node-hub/dora-rustypot",
"libraries/extensions/ros2-bridge", "libraries/extensions/ros2-bridge",
"libraries/extensions/ros2-bridge/msg-gen", "libraries/extensions/ros2-bridge/msg-gen",
"libraries/extensions/ros2-bridge/python", "libraries/extensions/ros2-bridge/python",


+ 7
- 0
examples/reachy2-remote/boot_reachy.sh View File

@@ -0,0 +1,7 @@
ssh bedrock@reachy2-pvt02.local


docker exec -it core bash


ros2 service call /SetZuuuSafety zuuu_interfaces/srv/SetZuuuSafety "{safety_on: False}"

+ 88
- 100
examples/reachy2-remote/dataflow_reachy.yml View File

@@ -1,88 +1,74 @@
nodes: nodes:
- id: camera - id: camera
build: pip install -e ../../node-hub/dora-reachy2
path: dora-reachy2-camera path: dora-reachy2-camera
_unstable_deploy:
machine: encoder
inputs: inputs:
tick: dora/timer/millis/20
image_left: dora/timer/millis/50
outputs: outputs:
- image_left - image_left
- image_depth
env:
# IMAGE_WIDTH: 640
# IMAGE_HEIGHT: 480
ROBOT_IP: 10.42.0.242

- id: reachy-head
build: pip install -e ../../node-hub/dora-reachy2
path: dora-reachy2-head
inputs:
look: parse_pose/look
env:
# IMAGE_WIDTH: 640
# IMAGE_HEIGHT: 480
ROBOT_IP: 10.42.0.242

- id: camera_depth
build: pip install -e ../../node-hub/dora-reachy2
path: dora-reachy2-camera
inputs:
depth: dora/timer/millis/50
outputs:
- depth - depth
- image_depth
env: env:
IMAGE_WIDTH: 640
IMAGE_HEIGHT: 480
ROBOT_IP: 127.0.0.1
# IMAGE_WIDTH: 640
# IMAGE_HEIGHT: 480
ROBOT_IP: 10.42.0.242


- id: reachy-left-arm - id: reachy-left-arm
build: pip install -e ../../node-hub/dora-reachy2 build: pip install -e ../../node-hub/dora-reachy2
path: dora-reachy2-left-arm path: dora-reachy2-left-arm
_unstable_deploy:
machine: encoder
inputs: inputs:
pose: parse_pose/action_l_arm pose: parse_pose/action_l_arm
outputs: outputs:
- response_l_arm - response_l_arm
env: env:
ROBOT_IP: 127.0.0.1
ROBOT_IP: 10.42.0.242


- id: reachy-right-arm - id: reachy-right-arm
build: pip install -e ../../node-hub/dora-reachy2 build: pip install -e ../../node-hub/dora-reachy2
path: dora-reachy2-right-arm path: dora-reachy2-right-arm
_unstable_deploy:
machine: encoder
inputs: inputs:
pose: parse_pose/action_r_arm pose: parse_pose/action_r_arm
outputs: outputs:
- response_r_arm - response_r_arm
env: env:
ROBOT_IP: 127.0.0.1
ROBOT_IP: 10.42.0.242


- id: rav1e-local-image
path: dora-rav1e
build: cargo build -p dora-rav1e --release
_unstable_deploy:
machine: encoder
inputs:
image_depth: camera/image_depth
image_left: camera/image_left
outputs:
- image_left
- image_depth
env:
RAV1E_SPEED: 10

- id: rav1e-local-depth
path: dora-rav1e
build: cargo build -p dora-rav1e --release
_unstable_deploy:
machine: encoder
- id: reachy-mobile-base
build: pip install -e ../../node-hub/dora-reachy2
path: dora-reachy2-mobile-base
inputs: inputs:
depth: camera/depth
action_base: parse_point/action
action_whisper: parse_whisper/action
translate_base: parse_pose/translate_base
outputs: outputs:
- depth
- response_base
env: env:
RAV1E_SPEED: 7

- id: dav1d-remote
path: dora-dav1d
build: cargo build -p dora-dav1d --release
_unstable_deploy:
machine: gpu
inputs:
image_depth: rav1e-local-image/image_depth
image_left: rav1e-local-image/image_left
depth: rav1e-local-depth/depth
outputs:
- image_left
- image_depth
- depth
ROBOT_IP: 10.42.0.242


- id: dora-microphone - id: dora-microphone
build: pip install -e ../../node-hub/dora-microphone build: pip install -e ../../node-hub/dora-microphone
path: dora-microphone path: dora-microphone
_unstable_deploy:
machine: macbook
inputs: inputs:
tick: dora/timer/millis/2000 tick: dora/timer/millis/2000
outputs: outputs:
@@ -90,8 +76,6 @@ nodes:


- id: dora-vad - id: dora-vad
build: pip install -e ../../node-hub/dora-vad build: pip install -e ../../node-hub/dora-vad
_unstable_deploy:
machine: macbook
path: dora-vad path: dora-vad
inputs: inputs:
audio: dora-microphone/audio audio: dora-microphone/audio
@@ -100,8 +84,6 @@ nodes:


- id: dora-distil-whisper - id: dora-distil-whisper
build: pip install -e ../../node-hub/dora-distil-whisper build: pip install -e ../../node-hub/dora-distil-whisper
_unstable_deploy:
machine: gpu
path: dora-distil-whisper path: dora-distil-whisper
inputs: inputs:
input: dora-vad/audio input: dora-vad/audio
@@ -112,11 +94,10 @@ nodes:


- id: parse_whisper - id: parse_whisper
path: parse_whisper.py path: parse_whisper.py
_unstable_deploy:
machine: gpu
inputs: inputs:
text: dora-distil-whisper/text text: dora-distil-whisper/text
arrived: parse_point/arrived arrived: parse_point/arrived
success: parse_pose/success
outputs: outputs:
- bbox - bbox
- action - action
@@ -124,18 +105,23 @@ nodes:
- text - text
- action_release_left - action_release_left
- action_release_right - action_release_right
- follow_pose
- raise_arm_pose
- look_ahead
- emotion
- speech
env: env:
IMAGE_RESIZE_RATIO: "1.0" IMAGE_RESIZE_RATIO: "1.0"


- id: dora-qwenvl - id: dora-qwenvl
build: pip install -e ../../node-hub/dora-qwen2-5-vl build: pip install -e ../../node-hub/dora-qwen2-5-vl
path: dora-qwen2-5-vl path: dora-qwen2-5-vl
_unstable_deploy:
machine: gpu
inputs: inputs:
image_left: dav1d-remote/image_left
image_depth: dav1d-remote/image_depth
text: parse_whisper/text
image_left: camera/image_left
image_depth: camera_depth/image_depth
text:
source: parse_whisper/text
queue_size: 1000
outputs: outputs:
- text - text
env: env:
@@ -144,35 +130,29 @@ nodes:


- id: parse_bbox - id: parse_bbox
path: parse_bbox.py path: parse_bbox.py
_unstable_deploy:
machine: gpu
inputs: inputs:
text: dora-qwenvl/text text: dora-qwenvl/text
points: parse_whisper/points points: parse_whisper/points
outputs: outputs:
- bbox_track - bbox_track
- bbox_grab
- bbox_pick
env: env:
IMAGE_RESIZE_RATIO: "1.0" IMAGE_RESIZE_RATIO: "1.0"


- id: sam2 - id: sam2
build: pip install -e ../../node-hub/dora-sam2 build: pip install -e ../../node-hub/dora-sam2
path: dora-sam2 path: dora-sam2
_unstable_deploy:
machine: gpu
inputs: inputs:
image_depth: dav1d-remote/image_depth
boxes2d: parse_bbox/bbox_grab
image_depth: camera_depth/image_depth
boxes2d: parse_bbox/bbox_pick
outputs: outputs:
- masks - masks


- id: tracker - id: tracker
build: pip install -e ../../node-hub/dora-cotracker build: pip install -e ../../node-hub/dora-cotracker
path: dora-cotracker path: dora-cotracker
_unstable_deploy:
machine: gpu
inputs: inputs:
image: dav1d-remote/image_left
image: camera/image_left
boxes2d: parse_bbox/bbox_track boxes2d: parse_bbox/bbox_track
outputs: outputs:
- tracked_image - tracked_image
@@ -183,34 +163,36 @@ nodes:
- id: box_coordinates - id: box_coordinates
build: pip install -e ../../node-hub/dora-object-to-pose build: pip install -e ../../node-hub/dora-object-to-pose
path: dora-object-to-pose path: dora-object-to-pose
_unstable_deploy:
machine: gpu
inputs: inputs:
depth: dav1d-remote/depth
depth: camera_depth/depth
masks: sam2/masks masks: sam2/masks
outputs: outputs:
- pose - pose


- id: parse_pose - id: parse_pose
path: parse_pose.py path: parse_pose.py
_unstable_deploy:
machine: gpu
inputs: inputs:
pose: box_coordinates/pose pose: box_coordinates/pose
response_r_arm: reachy-right-arm/response_r_arm response_r_arm: reachy-right-arm/response_r_arm
response_l_arm: reachy-left-arm/response_l_arm response_l_arm: reachy-left-arm/response_l_arm
release_left: parse_whisper/action_release_left release_left: parse_whisper/action_release_left
release_right: parse_whisper/action_release_right release_right: parse_whisper/action_release_right
follow_pose: parse_whisper/follow_pose
raise_arm_pose: parse_whisper/raise_arm_pose
look_ahead: parse_whisper/look_ahead
translate_base: reachy-mobile-base/response_base
emotion: parse_whisper/emotion
outputs: outputs:
- action_r_arm - action_r_arm
- action_l_arm - action_l_arm
- translate_base
- look
- success
env: env:
IMAGE_RESIZE_RATIO: "1.0" IMAGE_RESIZE_RATIO: "1.0"


- id: parse_point - id: parse_point
path: parse_point.py path: parse_point.py
_unstable_deploy:
machine: gpu
inputs: inputs:
points: tracker/points points: tracker/points
outputs: outputs:
@@ -219,32 +201,38 @@ nodes:
env: env:
IMAGE_RESIZE_RATIO: "1.0" IMAGE_RESIZE_RATIO: "1.0"


- id: reachy-mobile-base
build: pip install -e ../../node-hub/dora-reachy2
path: dora-reachy2-mobile-base
_unstable_deploy:
machine: encoder
inputs:
action_base: parse_point/action
action_whisper: parse_whisper/action
outputs:
- response_base
env:
ROBOT_IP: 127.0.0.1

- id: plot - id: plot
build: pip install -e ../../node-hub/dora-rerun build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun path: dora-rerun
_unstable_deploy:
machine: macbook
inputs: inputs:
image: dav1d-remote/image_left
torso/image: dav1d-remote/image_depth
torso/depth: dav1d-remote/depth
torso/boxes2d: parse_bbox/bbox
head/boxes2d: parse_bbox/bbox_track
head/image: camera/image_left
torso/image: camera_depth/image_depth
torso/depth: camera_depth/depth
torso/boxes2d: parse_bbox/bbox_pick
torso/masks: sam2/masks
original_text: dora-distil-whisper/text original_text: dora-distil-whisper/text
emotion_text: parse_whisper/emotion
parsed_text: parse_whisper/text parsed_text: parse_whisper/text
qwenvl_text: dora-qwenvl/text qwenvl_text: dora-qwenvl/text
cotracker/tracked_image: tracker/tracked_image
head/points2d: tracker/points
env:
RERUN_MEMORY_LIMIT: 25%
CAMERA_PITCH: 2.40

- id: dora-kokoro-tts
build: pip install -e ../../node-hub/dora-kokoro-tts
path: dora-kokoro-tts
inputs:
text: parse_whisper/speech
outputs:
- audio
env: env:
RERUN_MEMORY_LIMIT: 5%
CAMERA_PITCH: 2.47
VOICE: bm_fable

- id: dora-pyaudio
build: pip install -e ../../node-hub/dora-pyaudio
path: dora-pyaudio
inputs:
audio: dora-kokoro-tts/audio

+ 45
- 0
examples/reachy2-remote/no_torque copy.yml View File

@@ -0,0 +1,45 @@
nodes:
- id: camera
build: pip install -e ../../node-hub/dora-reachy2
path: dora-reachy2-camera
_unstable_deploy:
machine: macbook
inputs:
image_left: dora/timer/millis/20
outputs:
#- image_depth
#- depth
- image_left
env:
# IMAGE_WIDTH: 640
# IMAGE_HEIGHT: 480
ROBOT_IP: 10.42.0.242

- id: camera_depth
build: pip install -e ../../node-hub/dora-reachy2
path: dora-reachy2-camera
_unstable_deploy:
machine: macbook
inputs:
depth: dora/timer/millis/20
outputs:
- image_depth
- depth
env:
# IMAGE_WIDTH: 640
# IMAGE_HEIGHT: 480
ROBOT_IP: 10.42.0.242

- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
_unstable_deploy:
machine: macbook
inputs:
torso/depth: camera_depth/depth
torso/image: camera_depth/image_depth
head/image_left: camera/image_left
#torso/depth: camera/depth
env:
RERUN_MEMORY_LIMIT: 5%
CAMERA_PITCH: 2.47

+ 45
- 0
examples/reachy2-remote/no_torque.yml View File

@@ -0,0 +1,45 @@
nodes:
- id: camera
build: pip install -e ../../node-hub/dora-reachy2
path: dora-reachy2-camera
_unstable_deploy:
machine: macbook
inputs:
image_left: dora/timer/millis/20
outputs:
#- image_depth
#- depth
- image_left
env:
# IMAGE_WIDTH: 640
# IMAGE_HEIGHT: 480
ROBOT_IP: 172.18.131.66

- id: camera_depth
build: pip install -e ../../node-hub/dora-reachy2
path: dora-reachy2-camera
_unstable_deploy:
machine: macbook
inputs:
depth: dora/timer/millis/20
outputs:
- image_depth
- depth
env:
# IMAGE_WIDTH: 640
# IMAGE_HEIGHT: 480
ROBOT_IP: 172.18.131.66

- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
_unstable_deploy:
machine: macbook
inputs:
torso/depth: camera_depth/depth
torso/image: camera_depth/image_depth
head/image_left: camera/image_left
#torso/depth: camera/depth
env:
RERUN_MEMORY_LIMIT: 5%
CAMERA_PITCH: 2.40

+ 45
- 0
examples/reachy2-remote/no_torque_remote.yml View File

@@ -0,0 +1,45 @@
nodes:
- id: camera
build: pip install -e ../../node-hub/dora-reachy2
path: dora-reachy2-camera
_unstable_deploy:
machine: macbook
inputs:
image_left: dora/timer/millis/20
outputs:
#- image_depth
#- depth
- image_left
env:
# IMAGE_WIDTH: 640
# IMAGE_HEIGHT: 480
ROBOT_IP: 10.42.0.242

- id: camera_depth
build: pip install -e ../../node-hub/dora-reachy2
path: dora-reachy2-camera
_unstable_deploy:
machine: macbook
inputs:
depth: dora/timer/millis/20
outputs:
- image_depth
- depth
env:
# IMAGE_WIDTH: 640
# IMAGE_HEIGHT: 480
ROBOT_IP: 10.42.0.242

- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
_unstable_deploy:
machine: macbook
inputs:
torso/depth: camera_depth/depth
torso/image: camera_depth/image_depth
head/image_left: camera/image_left
#torso/depth: camera/depth
env:
RERUN_MEMORY_LIMIT: 5%
CAMERA_PITCH: 2.47

+ 1
- 1
examples/reachy2-remote/parse_bbox.py View File

@@ -70,7 +70,7 @@ for event in node:
) )
elif image_id == "image_depth": elif image_id == "image_depth":
node.send_output( node.send_output(
"bbox_grab",
"bbox_pick",
pa.array(bboxes.ravel()), pa.array(bboxes.ravel()),
metadata, metadata,
) )

+ 13
- 15
examples/reachy2-remote/parse_point.py View File

@@ -13,6 +13,7 @@ node = Node()
IMAGE_RESIZE_RATIO = float(os.getenv("IMAGE_RESIZE_RATIO", "1.0")) IMAGE_RESIZE_RATIO = float(os.getenv("IMAGE_RESIZE_RATIO", "1.0"))


arrive_time = time.time() arrive_time = time.time()
arrived = False


for event in node: for event in node:
if event["type"] == "INPUT": if event["type"] == "INPUT":
@@ -25,38 +26,35 @@ for event in node:
# Do point 0 first # Do point 0 first
if len(values) == 0: if len(values) == 0:
continue continue
elif len(values) > 1:
elif len(values) >= 1:
point = values[-1] point = values[-1]


rz = int((width / 2) - point[0]) / (width / 2) rz = int((width / 2) - point[0]) / (width / 2)
x_distance = min(height, height - point[1]) x_distance = min(height, height - point[1])
y = 0 y = 0
if abs(rz) > 0.75: if abs(rz) > 0.75:
rz = np.deg2rad(45) * np.sign(rz)
rz = np.deg2rad(70) * np.sign(rz)
elif abs(rz) > 0.5: elif abs(rz) > 0.5:
rz = np.deg2rad(30) * np.sign(rz)
rz = np.deg2rad(60) * np.sign(rz)
elif abs(rz) > 0.3: elif abs(rz) > 0.3:
rz = np.deg2rad(20) * np.sign(rz)
rz = np.deg2rad(30) * np.sign(rz)
elif abs(rz) > 0.1: elif abs(rz) > 0.1:
rz = np.deg2rad(10) * np.sign(rz) rz = np.deg2rad(10) * np.sign(rz)
else: else:
x = 0 x = 0


if x_distance > (height * 0.8):
x = 1.0
if x_distance > (height * 0.7): if x_distance > (height * 0.7):
x = 0.7
x = 1.0
elif x_distance > (height * 0.5): elif x_distance > (height * 0.5):
x = 0.6
x = 0.8
elif x_distance > (height * 0.25): elif x_distance > (height * 0.25):
x = 0.5
x = 0.61
elif x_distance > (height * 0.1):
x = 0.3
else: else:
x = 0 x = 0
if x_distance < (height * 0.25):
print("ARRIVED!")
time.sleep(1.0)
if time.time() - arrive_time > 4.0:
node.send_output("arrived", pa.array([]))
arrive_time = time.time()
# Action
node.send_output("arrived", pa.array([]))
action = pa.array([x, y, 0, 0, 0, rz]) action = pa.array([x, y, 0, 0, 0, rz])
node.send_output("action", action) node.send_output("action", action)

+ 322
- 171
examples/reachy2-remote/parse_pose.py View File

@@ -1,8 +1,6 @@
"""TODO: Add docstring.""" """TODO: Add docstring."""


import json
import os import os

import numpy as np import numpy as np
import pyarrow as pa import pyarrow as pa
from dora import Node from dora import Node
@@ -15,64 +13,21 @@ IMAGE_RESIZE_RATIO = float(os.getenv("IMAGE_RESIZE_RATIO", "1.0"))
l_init_pose = [ l_init_pose = [
-7.0631310641087435, -7.0631310641087435,
-10.432298603362307, -10.432298603362307,
24.429809104404114,
-132.15000828778648,
-1.5494749438811133,
-21.749917789205202,
8.099312596108344,
57.429809104404114,
-126.15000828778648,
-3.5494749438811133,
-35.749917789205202,
0.999312596108344,
100, 100,
] ]
r_init_pose = [ r_init_pose = [
-5.60273587426976, -5.60273587426976,
10.780818397272316, 10.780818397272316,
-27.868146823156042,
-57.868146823156042,
-126.15650363072193, -126.15650363072193,
3.961108018106834, 3.961108018106834,
-35.43682799906162, -35.43682799906162,
350.9236448374495,
100,
]
r_release_closed_pose = [
-26.1507947940993,
12.16735021387949,
-2.2657319092611976,
-97.63648867582175,
-19.91084837404425,
22.10184328619011,
366.71351223614494,
0,
]

r_release_opened_pose = [
-26.1507947940993,
12.16735021387949,
-2.2657319092611976,
-97.63648867582175,
-19.91084837404425,
22.10184328619011,
366.71351223614494,
100,
]

l_release_opened_pose = [
-30.04330081906935,
-7.415231584691132,
3.6972339048071468,
-97.7274736257555,
12.996718740452982,
30.838020649757016,
-1.5572310505704858,
0,
]

l_release_closed_pose = [
-30.04330081906935,
-7.415231584691132,
3.6972339048071468,
-97.7274736257555,
12.996718740452982,
30.838020649757016,
-1.5572310505704858,
0.9236448374495,
100, 100,
] ]


@@ -98,17 +53,7 @@ cache = {}




## ---- INIT --- ## ---- INIT ---
node.send_output(
"action_r_arm",
pa.array(r_init_pose),
metadata={"encoding": "jointstate", "duration": 2},
)
node.send_output(
"action_l_arm",
pa.array(l_init_pose),
metadata={"encoding": "jointstate", "duration": 2},
)
node.send_output("look", pa.array([0.35, 0, 0]))
node.send_output("look", pa.array([1.0, 0, 0]))




for event in node: for event in node:
@@ -123,134 +68,325 @@ for event in node:
y = values[1] y = values[1]
z = values[2] z = values[2]
action = event["metadata"]["action"] action = event["metadata"]["action"]
node.send_output("look", pa.array([x, y, z]))


match action: match action:
case "grab":
case "pick":
if len(values) == 0: if len(values) == 0:
continue continue
x = x + 0.01
x = x + 0.05
z = z + 0.03


## Clip the Maximum and minim values for the height of the arm to avoid collision or weird movement.
trajectory = np.array(
[
[x, y, -0.16, 0, 0, 0, 100],
[x, y, z, 0, 0, 0, 0],
[x, y, -0.16, 0, 0, 0, 0],
],
).ravel()

if y < 0:
if y < 0: # right arm
node.send_output("look", pa.array([x, y, z]))

# Send a mobile base command to move slightly left to facilitate the grasp
node.send_output(
"translate_base",
pa.array([0.0, y + 0.3, 0, 0, 0, 0]),
metadata={"encoding": "xyzrpy", "duration": "0.3"},
)
event = wait_for_event(id="translate_base")
node.send_output("look", pa.array([x, -0.3, z]))

trajectory = np.array(
[
[x, -0.3, z + 0.1, 0, 0, 0, 100],
[x, -0.3, z, 0, 0, 0, 0],
[x, -0.3, z + 0.1, 0, 0, 0, 0],
[0.3, -0.3, -0.16, 0, 0, 0, 0],
],
).ravel()
node.send_output( node.send_output(
"action_r_arm", "action_r_arm",
pa.array(trajectory), pa.array(trajectory),
metadata={"encoding": "xyzrpy", "duration": "0.75"},
metadata={"encoding": "xyzrpy", "duration": "0.3"},
) )
event = wait_for_event(id="response_r_arm", timeout=5)
if event is not None and event[0]:
print("Success")
arm_holding_object = "right"
node.send_output(
"action_r_arm",
pa.array([0.1, -0.2, -0.16, 0, 0, 0, 0]),
metadata={"encoding": "xyzrpy", "duration": "1"},
)
else:
print("Failed: x: ", x, " y: ", y, " z: ", z)
node.send_output(
"action_r_arm",
pa.array(r_init_pose),
metadata={"encoding": "jointstate", "duration": "1"},
)
event = wait_for_event(id="response_r_arm")
else:
y += 0.03
event = wait_for_event(id="response_r_arm")

node.send_output("look", pa.array([x, -0.3, z]))

node.send_output(
"translate_base",
pa.array([0.0, -(y + 0.3), 0, 0, 0, 0]),
metadata={"encoding": "xyzrpy", "duration": "0.3"},
)
event = wait_for_event(id="translate_base")
arm_holding_object = "right"
node.send_output("look", pa.array([0.4, 0, 0]))
node.send_output("success", pa.array([True]))

else: # left arm
# Send a mobile base command to move slightly left to facilitate the grasp
node.send_output("look", pa.array([x, y, z]))

## Move the base
node.send_output(
"translate_base",
pa.array([0.0, y - 0.3, 0, 0, 0, 0]),
metadata={"encoding": "xyzrpy", "duration": "0.3"},
)
event = wait_for_event(id="translate_base")

node.send_output("look", pa.array([x, 0.3, z]))
trajectory = np.array(
[
[x, 0.3, z + 0.1, 0, 0, 0, 100],
[x, 0.3, z, 0, 0, 0, 0],
[x, 0.3, z + 0.1, 0, 0, 0, 0],
[0.3, 0.3, -0.16, 0, 0, 0, 0],
],
).ravel()
node.send_output("look", pa.array([x, 0.3, z]))

node.send_output( node.send_output(
"action_l_arm", "action_l_arm",
pa.array(trajectory), pa.array(trajectory),
metadata={"encoding": "xyzrpy", "duration": "0.75"},
metadata={"encoding": "xyzrpy", "duration": "0.3"},
) )
event = wait_for_event(id="response_l_arm", timeout=5)
if event is not None and event[0]:
print("Success")
arm_holding_object = "left"
node.send_output(
"action_l_arm",
pa.array([0.1, 0.2, -0.16, 0, 0, 0, 0]),
metadata={"encoding": "xyzrpy", "duration": "1"},
)
else:
print("Failed")

event = wait_for_event(id="response_l_arm")
if not (
event is not None
and event[0] is not None
and event[0][0].as_py()
):
node.send_output( node.send_output(
"action_l_arm",
pa.array(l_init_pose),
metadata={"encoding": "jointstate", "duration": "1"},
)
"action_l_arm",
pa.array(trajectory),
metadata={"encoding": "xyzrpy", "duration": "0.3"},
)

event = wait_for_event(id="response_l_arm") event = wait_for_event(id="response_l_arm")
node.send_output(
"translate_base",
pa.array([0.0, -(y - 0.3), 0, 0, 0, 0]),
metadata={"encoding": "xyzrpy", "duration": "0.3"},
)
event = wait_for_event(id="translate_base")
node.send_output("look", pa.array([0.4, 0.0, 0.0]))

arm_holding_object = "left"
node.send_output("success", pa.array([True]))
case "flip":
if len(values) == 0:
continue
x = x + 0.05
z = z + 0.03

if y < 0: # right arm
node.send_output("look", pa.array([x, y, z]))

# Send a mobile base command to move slightly left to facilitate the grasp
node.send_output(
"translate_base",
pa.array([0.0, y + 0.3, 0, 0, 0, 0]),
metadata={"encoding": "xyzrpy", "duration": "0.3"},
)
event = wait_for_event(id="translate_base")
node.send_output("look", pa.array([x, -0.3, z]))

trajectory = np.array(
[
[x, -0.3, z + 0.1, 0, 0, 0, 100],
[x, -0.3, z, 0, 0, 0, 0],
[x, -0.3, z + 0.1, 0, 0, 0, 0],
[x, -0.3, z + 0.1, -np.pi, 0, 0, 0],
[x, -0.3, z, -np.pi, 0, 0, 100],
[x, -0.3, z + 0.1, -np.pi, 0, 0, 100],
[x, -0.3, z + 0.1, 0, 0, 0, 100],
],
).ravel()
node.send_output(
"action_r_arm",
pa.array(trajectory),
metadata={"encoding": "xyzrpy", "duration": "0.8"},
)
event = wait_for_event(id="response_r_arm")

node.send_output("look", pa.array([x, -0.3, z]))

node.send_output(
"translate_base",
pa.array([0.0, -(y + 0.3), 0, 0, 0, 0]),
metadata={"encoding": "xyzrpy", "duration": "0.3"},
)
event = wait_for_event(id="translate_base")
arm_holding_object = "right"
node.send_output("look", pa.array([0.4, 0, 0]))
node.send_output("success", pa.array([True]))

else: # left arm
# Send a mobile base command to move slightly left to facilitate the grasp
node.send_output("look", pa.array([x, y, z]))

## Move the base
node.send_output(
"translate_base",
pa.array([0.0, y - 0.3, 0, 0, 0, 0]),
metadata={"encoding": "xyzrpy", "duration": "0.3"},
)
event = wait_for_event(id="translate_base")

node.send_output("look", pa.array([x, 0.3, z]))
trajectory = np.array(
[
[x, 0.3, z + 0.1, 0, 0, 0, 100],
[x, 0.3, z, 0, 0, 0, 0],
[x, 0.3, z + 0.1, 0, 0, 0, 0],
[x, 0.3, z + 0.1, -np.pi, 0, 0, 0],
[x, 0.3, z, -np.pi, 0, 0, 100],
[x, 0.3, z + 0.1, -np.pi, 0, 0, 100],
[x, 0.3, z + 0.1, 0, 0, 0, 100],
],
).ravel()
node.send_output("look", pa.array([x, 0.3, z]))

node.send_output(
"action_l_arm",
pa.array(trajectory),
metadata={"encoding": "xyzrpy", "duration": "0.8"},
)

event = wait_for_event(id="response_l_arm")

node.send_output(
"translate_base",
pa.array([0.0, -(y - 0.3), 0, 0, 0, 0]),
metadata={"encoding": "xyzrpy", "duration": "0.3"},
)
event = wait_for_event(id="translate_base")
node.send_output("look", pa.array([0.4, 0.0, 0.0]))

arm_holding_object = "left"
node.send_output("success", pa.array([True]))

case "release": case "release":
if len(values) == 0: if len(values) == 0:
continue continue
x = x + 0.01
if z < -0.4:
z = -0.16
x = x + 0.05
z = z + 0.13


## Clip the Maximum and minim values for the height of the arm to avoid collision or weird movement. ## Clip the Maximum and minim values for the height of the arm to avoid collision or weird movement.
trajectory = np.array(
[
[x, y, z + 0.1, 0, 0, 0, 100],
],
).ravel()

node.send_output("look", pa.array([x, y, z]))
if abs(y) > 0.5:
continue
if arm_holding_object is None: if arm_holding_object is None:
print("No arm holding object!!!")
continue continue
elif arm_holding_object == "right": elif arm_holding_object == "right":
# Send a mobile base command to move slightly left to facilitate the grasp
node.send_output(
"translate_base",
pa.array([0.0, y + 0.3, 0, 0, 0, 0]),
metadata={"encoding": "xyzrpy", "duration": "0.3"},
)
event = wait_for_event(id="translate_base", timeout=10)
node.send_output("look", pa.array([x, -0.3, z]))
trajectory = np.array(
[
[x, -0.3, z + 0.1, 0, 0, 0, 0],
[x, -0.3, z, 0, 0, 0, 100],
[x, -0.3, z + 0.1, 0, 0, 0, 100],
],
).ravel()

node.send_output( node.send_output(
"action_r_arm", "action_r_arm",
pa.array(trajectory), pa.array(trajectory),
metadata={"encoding": "xyzrpy", "duration": "0.75"},
metadata={"encoding": "xyzrpy", "duration": "0.3"},
) )
event = wait_for_event(id="response_r_arm", timeout=5)
if event is not None and event[0]:
print("Success release right with", event[0])
arm_holding_object = "right"
node.send_output(
"action_r_arm",
pa.array(r_init_pose),
metadata={"encoding": "jointstate", "duration": 1},
)
arm_holding_object = None
else:
print("Failed: x: ", x, " y: ", y, " z: ", z)
event = wait_for_event(id="response_r_arm", timeout=10)
if not (
event is not None
and event[0] is not None
and event[0][0].as_py()
):
trajectory = np.array(
[
[x, 0.3, -0.2, 0, 0, 0, 0],
[x, 0.3, -0.2, 0, 0, 0, 100],
[x, 0.3, -0.2, 0, 0, 0, 100],
],
).ravel()

node.send_output( node.send_output(
"action_r_arm",
pa.array(r_init_pose),
metadata={"encoding": "jointstate", "duration": "1"},
"action_l_arm",
pa.array(trajectory),
metadata={"encoding": "xyzrpy", "duration": "0.3"},
) )
event = wait_for_event(id="response_r_arm")
node.send_output(
"action_r_arm",
pa.array(r_init_pose),
metadata={"encoding": "jointstate", "duration": "0.7"},
)

event = wait_for_event(id="response_r_arm", timeout=10)
node.send_output(
"translate_base",
pa.array([0.0, -(y + 0.3), 0, 0, 0, 0]),
metadata={"encoding": "xyzrpy", "duration": "0.3"},
)
event = wait_for_event(id="translate_base", timeout=10)
node.send_output("success", pa.array([True]))
node.send_output("look", pa.array([0.4, 0.0, 0.0]))

else: else:
y += 0.03
node.send_output(
"translate_base",
pa.array([0.0, y - 0.3, 0, 0, 0, 0]),
metadata={"encoding": "xyzrpy", "duration": "0.3"},
)
event = wait_for_event(id="translate_base")
trajectory = np.array(
[
[x, 0.3, z + 0.1, 0, 0, 0, 0],
[x, 0.3, z, 0, 0, 0, 100],
[x, 0.3, z + 0.1, 0, 0, 0, 100],
],
).ravel()

node.send_output("look", pa.array([x, 0.3, z]))
node.send_output( node.send_output(
"action_l_arm", "action_l_arm",
pa.array(trajectory), pa.array(trajectory),
metadata={"encoding": "xyzrpy", "duration": "0.75"},
metadata={"encoding": "xyzrpy", "duration": "0.3"},
) )
event = wait_for_event(id="response_l_arm", timeout=5)
if event is not None and event[0]:
print("Success release left with", event[0])
arm_holding_object = "left"
node.send_output(
"action_l_arm",
pa.array(l_init_pose),
metadata={"encoding": "jointstate", "duration": 1},
)
arm_holding_object = None
else:
print("Failed")
event = wait_for_event(id="response_l_arm")
if not (
event is not None
and event[0] is not None
and event[0][0].as_py()
):
trajectory = np.array(
[
[x, 0.3, -0.2, 0, 0, 0, 0],
[x, 0.3, -0.2, 0, 0, 0, 100],
[x, 0.3, -0.2, 0, 0, 0, 100],
],
).ravel()

node.send_output( node.send_output(
"action_l_arm", "action_l_arm",
pa.array(l_init_pose),
metadata={"encoding": "jointstate", "duration": "1"},
pa.array(trajectory),
metadata={"encoding": "xyzrpy", "duration": "0.3"},
) )
event = wait_for_event(id="response_l_arm")

node.send_output(
"action_l_arm",
pa.array(l_init_pose),
metadata={"encoding": "jointstate", "duration": "0.7"},
)
event = wait_for_event(id="response_l_arm")

node.send_output(
"translate_base",
pa.array([0.0, -(y - 0.3), 0, 0, 0, 0]),
metadata={"encoding": "xyzrpy", "duration": "0.3"},
)
event = wait_for_event(id="translate_base")
node.send_output("success", pa.array([True]))
node.send_output("look", pa.array([0.4, 0.0, 0.0]))


elif event["id"] == "release_right": elif event["id"] == "release_right":
node.send_output( node.send_output(
@@ -275,26 +411,41 @@ for event in node:
pa.array(r_init_pose), pa.array(r_init_pose),
metadata={"encoding": "jointstate", "duration": 1}, metadata={"encoding": "jointstate", "duration": 1},
) )
elif event["id"] == "release_left":

elif event["id"] == "follow_pose":
node.send_output( node.send_output(
"action_l_arm", "action_l_arm",
pa.array( pa.array(
[
0.4,
0,
-0.16,
0,
0,
0,
100,
],
[0, -15, 0, 0, 0, 0, 0, 0],
), ),
metadata={"encoding": "xyzrpy", "duration": "0.75"},
metadata={"encoding": "jointstate", "duration": "0.75"},
) )

event, cache = wait_for_event(id="response_l_arm", cache=cache) event, cache = wait_for_event(id="response_l_arm", cache=cache)
if event is not None and event[0]:
node.send_output(
"action_l_arm",
pa.array(l_init_pose),
metadata={"encoding": "jointstate", "duration": 1},
)

node.send_output(
"action_r_arm",
pa.array(
[0, 15, 0, 0, 0, 0, 0, 0],
),
metadata={"encoding": "jointstate", "duration": "0.75"},
)
event, cache = wait_for_event(id="response_r_arm", cache=cache)

elif event["id"] == "raise_arm_pose":
node.send_output(
"action_r_arm",
pa.array(r_init_pose),
metadata={"encoding": "jointstate", "duration": 2},
)
event = wait_for_event(id="response_r_arm", timeout=10)

node.send_output(
"action_l_arm",
pa.array(l_init_pose),
metadata={"encoding": "jointstate", "duration": 2},
)
event = wait_for_event(id="response_l_arm", timeout=10)

elif event["id"] == "look_ahead":
node.send_output("look", pa.array([0.7, 0, 0]))

+ 137
- 110
examples/reachy2-remote/parse_whisper.py View File

@@ -1,135 +1,162 @@
"""TODO: Add docstring.""" """TODO: Add docstring."""


import json
import os import os
import time import time
import re


import numpy as np import numpy as np
import pyarrow as pa import pyarrow as pa
from dora import Node from dora import Node



node = Node() node = Node()


IMAGE_RESIZE_RATIO = float(os.getenv("IMAGE_RESIZE_RATIO", "1.0")) IMAGE_RESIZE_RATIO = float(os.getenv("IMAGE_RESIZE_RATIO", "1.0"))


queue = []


def extract_bboxes(json_text):
"""Extract bounding boxes from a JSON string with markdown markers and return them as a NumPy array.
last_prompt = ""


Parameters
----------
json_text : str
JSON string containing bounding box data, including ```json markers.


Returns
-------
np.ndarray: NumPy array of bounding boxes.
def handle_event(text: str):
global queue
if "stop" in text:
node.send_output("points", pa.array([], type=pa.float64()))
elif "follow" in text:
text = f"Given the prompt: {text}. Output the bounding boxes for the given followed object"
node.send_output("look_ahead", pa.array([1.0]))
time.sleep(0.5) # Sync image
node.send_output("text", pa.array([text]), {"image_id": "image_left"})
node.send_output("follow_pose", pa.array([1.0]))
elif "raise your arms" in text:
node.send_output("raise_arm_pose", pa.array([1.0]))


"""
# Ensure all lines are stripped of whitespace and markers
lines = json_text.strip().splitlines()
elif (
("pick" in text and "place" in text)
or ("make a hot dog" in text)
or ("make a vegetarian hot dog" in text)
or ("cook" in text)
):
if "make a hot dog" in text:
text = "pick the sausage, place it on the black grill, wait, flip the sausage on the grill, wait, pick the sausage on the grill, place it on the hot dog bun, speak it's ready!"
if "make a vegetarian hot dog" in text:
text = "pick the cucumber, place it on the black grill, wait, flip the cucumber on the grill, wait, pick the cucumber on the grill, place it on the hot dog bun, speak it's ready!"
elif "cook" in text:
# Match 'grill' followed by two words
match = re.search(r"\bcook\b\s+(\w+)\s+(\w+)", text)


# Filter out lines that are markdown markers
clean_lines = [line for line in lines if not line.strip().startswith("```")]
if match:
word1 = match.group(1)
word2 = match.group(2)
grilled_item = word1 + " " + word2
text = f"pick {grilled_item}, place it on the black grill, wait, flip {grilled_item}, wait, pick {grilled_item} again, place it on the white plate, speak {grilled_item} is ready!"


# Join the lines back into a single string
clean_text = "\n".join(clean_lines)
# Parse the cleaned JSON text
try:
data = json.loads(clean_text)
if "," or "." in text:
prompts = re.split(r"[,.]", text)
queue = prompts
first_prompt = queue[0]
queue = queue[1:]
handle_event(first_prompt)


# Extract bounding boxes
bboxes = [item["bbox_2d"] for item in data]
labels = [item["label"] for item in data]
elif "pick " in text:
text = text.replace("can you", "")
text = text.replace("please", "")
text = text.replace("reachy", "")

node.send_output("speech", pa.array(["I'm going to " + text]))

text = f"Given the prompt: {text}. Output the bounding boxes for the object to be picked"

node.send_output(
"text",
pa.array([text]),
{"image_id": "image_depth", "action": "pick"},
)
elif "place " in text:
text = text.replace("can you", "")
text = text.replace("please", "")
text = text.replace("reachy", "")

node.send_output("speech", pa.array(["I'm going to " + text]))

text = f"Given the prompt: {text}. Output the bounding boxes for the place to place the object"
node.send_output(
"text",
pa.array([text]),
{"image_id": "image_depth", "action": "release"},
)
elif " wait" in text:
node.send_output("speech", pa.array(["I'm going to wait for 5 seconds."]))

time.sleep(5)
if len(queue) > 0:
first_prompt = queue[0]
queue = queue[1:]
handle_event(first_prompt)
elif " speak" in text:
node.send_output("speech", pa.array([text.replace("speak ", "")]))

if len(queue) > 0:
first_prompt = queue[0]
queue = queue[1:]
handle_event(first_prompt)
elif " flip" in text:
text = text.replace("can you", "")
text = text.replace("please", "")
text = text.replace("reachy", "")

node.send_output("speech", pa.array(["I'm going to " + text]))

text = f"Given the prompt: {text}. Output the bounding boxes for the object to flip"
node.send_output(
"text",
pa.array([text]),
{"image_id": "image_depth", "action": "flip"},
)
# elif "flip " in text:
# node.send_output("flip", pa.array([True]))
elif "release left" in text:
node.send_output("action_release_left", pa.array([1.0]))
elif "release right" in text:
node.send_output("action_release_right", pa.array([1.0]))
elif "turn left" in text:
action = pa.array([0.0, 0, 0, 0, 0, np.deg2rad(30)])
node.send_output("action", action)
node.send_output("points", pa.array([]))
elif "turn right" in text:
action = pa.array([0.0, 0, 0, 0, 0, -np.deg2rad(30)])
node.send_output("action", action)
node.send_output("points", pa.array([]))
elif "turn around" in text:
action = pa.array([0.0, 0, 0, 0, 0, -np.deg2rad(180)])
node.send_output("action", action)
node.send_output("points", pa.array([]))
elif "move left" in text:
action = pa.array([0.0, 0.2, 0, 0, 0, 0])
node.send_output("action", action)
node.send_output("points", pa.array([]))
elif "move right" in text:
action = pa.array([0.0, -0.2, 0, 0, 0, 0])
node.send_output("action", action)
node.send_output("points", pa.array([]))
elif "move forward" in text:
action = pa.array([0.2, 0, 0, 0, 0, 0])
node.send_output("action", action)
node.send_output("points", pa.array([]))
elif "move backward" in text:
action = pa.array([-0.2, 0, 0, 0, 0, 0])
node.send_output("action", action)
node.send_output("points", pa.array([]))


return np.array(bboxes), np.array(labels)
except Exception as _e: # noqa
pass
return None, None


last_prompt = ""
for event in node: for event in node:
if event["type"] == "INPUT": if event["type"] == "INPUT":
if event["id"] == "text": if event["id"] == "text":
text = event["value"][0].as_py().lower()

if "stop" in text:
node.send_output("points", pa.array([], type=pa.float64()))
elif "follow" in text:
text = f"Given the prompt: {text}. Output the bounding boxes for the given followed object"
node.send_output("text", pa.array([text]), {"image_id": "image_left"})
elif "grab " in text:
text = f"Given the prompt: {text}. Output the bounding boxes for the given grabbed object"
node.send_output(
"text", pa.array([text]), {"image_id": "image_depth", "action": "grab"}
)
elif "get " in text:
text = f"Given the prompt: {text}. Output the bounding boxes for the object"
node.send_output(
"text", pa.array([text]), {"image_id": "image_left", "action": "grab"}
)
last_prompt = text
elif "put " in text:
text = f"Given the prompt: {text}. Output the bounding boxes for the place to put the object"
node.send_output(
"text",
pa.array([text]),
{"image_id": "image_left", "action": "release"},
)
last_prompt = text
elif "drop " in text:
text = f"Given the prompt: {text}. Output the bounding boxes for the place to drop the object"
node.send_output(
"text",
pa.array([text]),
{"image_id": "image_depth", "action": "release"},
)
elif "release left" in text:
node.send_output("action_release_left", pa.array([1.0]))
elif "release right" in text:
node.send_output("action_release_right", pa.array([1.0]))
elif "turn left" in text:
action = pa.array([0.0, 0, 0, 0, 0, np.deg2rad(160)])
node.send_output("action", action)
time.sleep(0.25)
action = pa.array([0.0, 0, 0, 0, 0, np.deg2rad(160)])
node.send_output("action", action)
node.send_output("points", pa.array([]))
elif "turn right" in text:
action = pa.array([0.0, 0, 0, 0, 0, -np.deg2rad(160)])
node.send_output("action", action)
time.sleep(0.25)
action = pa.array([0.0, 0, 0, 0, 0, -np.deg2rad(160)])
node.send_output("action", action)
node.send_output("points", pa.array([]))
elif "move forward" in text:
action = pa.array([0.5, 0, 0, 0, 0, 0])
node.send_output("action", action)
time.sleep(0.25)
node.send_output("action", action)
node.send_output("points", pa.array([]))
elif "move backward" in text:
action = pa.array([-0.5, 0, 0, 0, 0, 0])
node.send_output("action", action)
time.sleep(0.25)
node.send_output("action", action)
node.send_output("points", pa.array([]))
elif event["id"] == "arrived":
text = last_prompt
print("received arrived message")
node.send_output("points", pa.array([]))
if "get " in text:
text = f"Given the prompt: {text}. Output the bounding boxes for the place to put the object"
node.send_output(
"text",
pa.array([text]),
{"image_id": "image_depth", "action": "grab"},
)
elif "put " in text:
text = f"Given the prompt: {text}. Output the bounding boxes for the place to put the object"
node.send_output(
"text",
pa.array([text]),
{"image_id": "image_depth", "action": "release"},
)
event_text = event["value"][0].as_py().lower()
handle_event(event_text)
elif event["id"] == "success":
if len(queue) > 0:
time.sleep(0.3)
first_prompt = queue[0]
queue = queue[1:]
handle_event(first_prompt)

+ 46
- 0
examples/so100-remote/no_torque.yml View File

@@ -0,0 +1,46 @@
nodes:
- id: so100
path: dora-rustypot
inputs:
tick: dora/timer/millis/33
#pose: pytorch-kinematics/action
outputs:
- pose
env:
PORT: /dev/ttyACM0
IDS: 1 2 3 4 5 6

- id: camera
build: pip install -e ../../node-hub/dora-pyrealsense
path: dora-pyrealsense
inputs:
tick: dora/timer/millis/33
outputs:
- image
- depth

- id: pytorch-kinematics
build: pip install -e ../../node-hub/dora-pytorch-kinematics
path: dora-pytorch-kinematics
inputs:
pose: so100/pose
outputs:
- pose
- action
env:
URDF_PATH: so100.urdf
END_EFFECTOR_LINK: "Moving Jaw"
TRANSFORM: -0.18 0.02 -0.65 0.7 0 0 0.7

- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
inputs:
series_fk: pytorch-kinematics/pose
jointstate_so100: so100/pose
camera/image: camera/image
camera/depth: camera/depth
env:
so100_urdf: so100.urdf
so100_transform: -0.18 0.02 -0.65 0.7 0 0 0.7
CAMERA_PITCH: -3.1415

+ 69
- 0
examples/so100-remote/parse_bbox.py View File

@@ -0,0 +1,69 @@
"""TODO: Add docstring."""

import json
import os

import numpy as np
import pyarrow as pa
from dora import Node

node = Node()

IMAGE_RESIZE_RATIO = float(os.getenv("IMAGE_RESIZE_RATIO", "1.0"))


def extract_bboxes(json_text):
"""Extract bounding boxes from a JSON string with markdown markers and return them as a NumPy array.

Parameters
----------
json_text : str
JSON string containing bounding box data, including ```json markers.

Returns
-------
np.ndarray: NumPy array of bounding boxes.

"""
# Ensure all lines are stripped of whitespace and markers
lines = json_text.strip().splitlines()

# Filter out lines that are markdown markers
clean_lines = [line for line in lines if not line.strip().startswith("```")]

# Join the lines back into a single string
clean_text = "\n".join(clean_lines)
# Parse the cleaned JSON text
try:
data = json.loads(clean_text)

# Extract bounding boxes
bboxes = [item["bbox_2d"] for item in data]
labels = [item["label"] for item in data]

return np.array(bboxes), np.array(labels)
except Exception as _e: # noqa
pass
return None, None


for event in node:
if event["type"] == "INPUT":
if len(event["value"]) == 0:
node.send_output("bbox_track", pa.array([]))
continue

text = event["value"][0].as_py()
metadata = event["metadata"]
image_id = event["metadata"]["image_id"]

bboxes, labels = extract_bboxes(text)
if bboxes is not None and len(bboxes) > 0:
bboxes = bboxes * int(1 / IMAGE_RESIZE_RATIO)
metadata["image_id"] = image_id
metadata["encoding"] = "xyxy"
node.send_output(
"bbox",
pa.array(bboxes.ravel()),
metadata,
)

+ 160
- 0
examples/so100-remote/parse_pose.py View File

@@ -0,0 +1,160 @@
"""TODO: Add docstring."""

import time
import numpy as np
import pyarrow as pa
from dora import Node

node = Node()
top_z = -0.5
low_z = -0.57

roll = 1.86
pitch = 1.43
yaw_open = 0.8
yaw_close = -0.5


def grab(target_x, target_y, low_z, top_z, roll, pitch, yaw_open, yaw_close, last_x, last_y):

node.send_output(
"action",
pa.array([target_x, target_y, top_z, roll, pitch, yaw_open]),
metadata={"encoding": "xyzrpy"},
)

time.sleep(0.6)

node.send_output(
"action",
pa.array([target_x, target_y, low_z, roll, pitch, yaw_open]),
metadata={"encoding": "xyzrpy"},
)
time.sleep(0.2)


node.send_output(
"action",
pa.array([target_x, target_y, low_z, roll, pitch, yaw_close]),
metadata={"encoding": "xyzrpy"},
)

time.sleep(0.4)

node.send_output(
"action",
pa.array([target_x, target_y, top_z, roll, pitch, yaw_close]),
metadata={"encoding": "xyzrpy"},
)

time.sleep(0.5)

node.send_output(
"action",
pa.array([0.05, 0.04, top_z, roll, pitch, yaw_close]),
metadata={"encoding": "xyzrpy"},
)

def place(place_x, place_y, place_z, top_z, roll, pitch, yaw_open, yaw_close, last_x, last_y):


node.send_output(
"action",
pa.array([place_x, place_y, top_z, roll, pitch, yaw_close]),
metadata={"encoding": "xyzrpy"},
)

time.sleep(0.6)

node.send_output(
"action",
pa.array([place_x, place_y, place_z, roll, pitch, yaw_close]),
metadata={"encoding": "xyzrpy"},
)

time.sleep(0.2)


node.send_output(
"action",
pa.array([place_x, place_y, place_z, roll, pitch, yaw_open]),
metadata={"encoding": "xyzrpy"},
)
time.sleep(0.3)


node.send_output(
"action",
pa.array([place_x, place_y, top_z, roll, pitch, yaw_open]),
metadata={"encoding": "xyzrpy"},
)
time.sleep(0.3)

node.send_output(
"action",
pa.array([0.05, 0.04, top_z, roll, pitch, yaw_open]),
metadata={"encoding": "xyzrpy"},
)
time.sleep(0.6)


node.send_output(
"action",
pa.array([0.05, 0.04, top_z, roll, pitch, yaw_open]),
metadata={"encoding": "xyzrpy"},
)

last_x = 0
last_y = 0
last_z = 0

for event in node:
if event["type"] == "INPUT":
if event["id"] == "pose":
values = event["value"]
values = values.to_numpy()
print(values)
if len(values) == 0:
continue
x = values[0]
y = values[1]
z = values[2]
action = event["metadata"]["action"]

# Adjust z with the size of the gripper
z = z + 0.063
match action:
case "grab":
grab(
x,
y,
z,
top_z,
roll,
pitch,
yaw_open,
yaw_close,
last_x,
last_y
)
case "release":
place(
x,
y,
z,
top_z,
roll,
pitch,
yaw_open,
yaw_close,
last_x,
last_y
)
last_x = -0.05
last_y = 0.04
last_z = z

+ 32
- 0
examples/so100-remote/parse_whisper.py View File

@@ -0,0 +1,32 @@
"""TODO: Add docstring."""

import json
import os
import time

import numpy as np
import pyarrow as pa
from dora import Node

node = Node()


last_prompt = ""
for event in node:
if event["type"] == "INPUT":
if event["id"] == "text":
text = event["value"][0].as_py().lower()

if "grab " in text:
text = f"Given the prompt: {text}. Output the bounding boxes for the given object"
node.send_output(
"text", pa.array([text]), {"image_id": "image", "action": "grab"}
)

elif "put " in text:
text = f"Given the prompt: {text}. Output the bounding boxes for the place to put the object"
node.send_output(
"text",
pa.array([text]),
{"image_id": "image", "action": "release"},
)

+ 185
- 0
examples/so100-remote/qwenvl-remote.yml View File

@@ -0,0 +1,185 @@
nodes:
- id: so100
path: dora-rustypot
_unstable_deploy:
machine: local
inputs:
tick: dora/timer/millis/33
pose:
source: pytorch-kinematics/action
queue_size: 100
outputs:
- pose
env:
PORT: /dev/ttyACM0
TORQUE: 2000
IDS: 1 2 3 4 5 6

- id: camera
build: pip install -e ../../node-hub/dora-pyrealsense
path: dora-pyrealsense
_unstable_deploy:
machine: local
inputs:
tick: dora/timer/millis/33
outputs:
- image
- depth

- id: pytorch-kinematics
build: pip install node-hub/dora-pytorch-kinematics
path: dora-pytorch-kinematics
_unstable_deploy:
machine: gpu
git: https://github.com/dora-rs/dora.git
rev: f4c10569f7c44e04f44f15fe42e2a82eca4b6cb2 # pinned commit, update this when changing the message crate
inputs:
pose: so100/pose
action:
source: parse_pose/action
queue_size: 100
outputs:
- pose
- action
env:
URDF_PATH: so100.urdf
END_EFFECTOR_LINK: "Moving Jaw"
TRANSFORM: -0.18 0.02 -0.65 0.7 0 0 0.7

- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
_unstable_deploy:
machine: local
inputs:
#series_so100: so100/pose
# series_pose: pytorch-kinematics/pose
jointstate_so100: so100/pose
jointstate_so100_inference: pytorch-kinematics/action
camera/image: camera/image
camera/depth: camera/depth
text_whisper: dora-distil-whisper/text
text_vlm: dora-qwenvl/text
camera/boxes2d: parse_bbox/bbox
camera/masks: sam2/masks
env:
so100_urdf: so100.urdf
so100_inference_urdf: so100_inference.urdf
so100_transform: -0.18 0.02 -0.65 0.7 0 0 0.7
so100_inference_transform: -0.18 0.02 -0.65 0.7 0 0 0.7
CAMERA_PITCH: -3.1415

- id: dora-microphone
build: pip install node-hub/dora-microphone
path: dora-microphone
git: https://github.com/dora-rs/dora.git
rev: f4c10569f7c44e04f44f15fe42e2a82eca4b6cb2 # pinned commit, update this when changing the message crate
_unstable_deploy:
machine: local
inputs:
tick: dora/timer/millis/2000
outputs:
- audio

- id: parse_whisper
path: examples/remote-so100/parse_whisper.py
git: https://github.com/dora-rs/dora.git
rev: f4c10569f7c44e04f44f15fe42e2a82eca4b6cb2 # pinned commit, update this when changing the message crate
_unstable_deploy:
machine: gpu
inputs:
text: dora-distil-whisper/text
outputs:
- text

- id: dora-qwenvl
build: pip install node-hub/dora-qwen2-5-vl
path: dora-qwen2-5-vl
git: https://github.com/dora-rs/dora.git
rev: f4c10569f7c44e04f44f15fe42e2a82eca4b6cb2 # pinned commit, update this when changing the message crate
_unstable_deploy:
machine: gpu
inputs:
image: camera/image
text: parse_whisper/text
outputs:
- text
env:
DEFAULT_QUESTION: Output the bounding box of the suitcase.
IMAGE_RESIZE_RATIO: "1.0"

- id: parse_bbox
path: examples/remote-so100/parse_bbox.py
git: https://github.com/dora-rs/dora.git
rev: f4c10569f7c44e04f44f15fe42e2a82eca4b6cb2 # pinned commit, update this when changing the message crate
_unstable_deploy:
machine: gpu
inputs:
text: dora-qwenvl/text
outputs:
- bbox
env:
IMAGE_RESIZE_RATIO: "1.0"

- id: sam2
build: pip install node-hub/dora-sam2
path: dora-sam2
git: https://github.com/dora-rs/dora.git
rev: f4c10569f7c44e04f44f15fe42e2a82eca4b6cb2 # pinned commit, update this when changing the message crate
_unstable_deploy:
machine: gpus
inputs:
image: camera/image
boxes2d: parse_bbox/bbox
outputs:
- masks

- id: box_coordinates
build: pip install node-hub/dora-object-to-pose
path: dora-object-to-pose
git: https://github.com/dora-rs/dora.git
rev: f4c10569f7c44e04f44f15fe42e2a82eca4b6cb2 # pinned commit, update this when changing the message crate
_unstable_deploy:
machine: gpu
inputs:
depth: camera/depth
masks: sam2/masks
outputs:
- pose
env:
CAMERA_PITCH: -3.1415

- id: parse_pose
path: examples/remote-so100/parse_pose.py
_unstable_deploy:
machine: gpu
inputs:
pose: box_coordinates/pose
outputs:
- action

- id: dora-vad
build: pip install node-hub/dora-vad
path: dora-vad
git: https://github.com/dora-rs/dora.git
rev: f4c10569f7c44e04f44f15fe42e2a82eca4b6cb2 # pinned commit, update this when changing the message crate
_unstable_deploy:
machine: gpu
inputs:
audio: dora-microphone/audio
outputs:
- audio

- id: dora-distil-whisper
build: pip install node-hub/dora-distil-whisper
path: dora-distil-whisper
git: https://github.com/dora-rs/dora.git
rev: f4c10569f7c44e04f44f15fe42e2a82eca4b6cb2 # pinned commit, update this when changing the message crate
_unstable_deploy:
machine: gpu
inputs:
input: dora-vad/audio
outputs:
- text
env:
TARGET_LANGUAGE: english

+ 141
- 0
examples/so100-remote/qwenvl.yml View File

@@ -0,0 +1,141 @@
nodes:
- id: so100
path: dora-rustypot
inputs:
tick: dora/timer/millis/33
pose:
source: pytorch-kinematics/action
queue_size: 100
outputs:
- pose
env:
PORT: /dev/ttyACM0
TORQUE: 2000
IDS: 1 2 3 4 5 6

- id: camera
build: pip install -e ../../node-hub/dora-pyrealsense
path: dora-pyrealsense
inputs:
tick: dora/timer/millis/33
outputs:
- image
- depth

- id: pytorch-kinematics
build: pip install -e ../../node-hub/dora-pytorch-kinematics
path: dora-pytorch-kinematics
inputs:
pose: so100/pose
action:
source: parse_pose/action
queue_size: 100
outputs:
- pose
- action
env:
URDF_PATH: so100.urdf
END_EFFECTOR_LINK: "Moving Jaw"
TRANSFORM: -0.18 0.02 -0.65 0.7 0 0 0.7

- id: plot
build: pip install -e ../../node-hub/dora-rerun
path: dora-rerun
inputs:
#series_so100: so100/pose
# series_pose: pytorch-kinematics/pose
jointstate_so100: so100/pose
jointstate_so100_inference: pytorch-kinematics/action
camera/image: camera/image
camera/depth: camera/depth
text_whisper: dora-distil-whisper/text
text_vlm: dora-qwenvl/text
camera/boxes2d: parse_bbox/bbox
camera/masks: sam2/masks
env:
so100_urdf: so100.urdf
so100_inference_urdf: so100_inference.urdf
so100_transform: -0.18 0.02 -0.65 0.7 0 0 0.7
so100_inference_transform: -0.18 0.02 -0.65 0.7 0 0 0.7
CAMERA_PITCH: -3.1415

- id: dora-microphone
build: pip install -e ../../node-hub/dora-microphone
path: dora-microphone
inputs:
tick: dora/timer/millis/2000
outputs:
- audio

- id: parse_whisper
path: parse_whisper.py
inputs:
text: dora-distil-whisper/text
outputs:
- text

- id: dora-qwenvl
build: pip install -e ../../node-hub/dora-qwen2-5-vl
path: dora-qwen2-5-vl
inputs:
image: camera/image
text: parse_whisper/text
outputs:
- text
env:
DEFAULT_QUESTION: Output the bounding box of the suitcase.
IMAGE_RESIZE_RATIO: "1.0"

- id: parse_bbox
path: parse_bbox.py
inputs:
text: dora-qwenvl/text
outputs:
- bbox
env:
IMAGE_RESIZE_RATIO: "1.0"

- id: sam2
build: pip install -e ../../node-hub/dora-sam2
path: dora-sam2
inputs:
image: camera/image
boxes2d: parse_bbox/bbox
outputs:
- masks

- id: box_coordinates
build: pip install -e ../../node-hub/dora-object-to-pose
path: dora-object-to-pose
inputs:
depth: camera/depth
masks: sam2/masks
outputs:
- pose
env:
CAMERA_PITCH: -3.1415

- id: parse_pose
path: parse_pose.py
inputs:
pose: box_coordinates/pose
outputs:
- action

- id: dora-vad
build: pip install -e ../../node-hub/dora-vad
path: dora-vad
inputs:
audio: dora-microphone/audio
outputs:
- audio

- id: dora-distil-whisper
build: pip install -e ../../node-hub/dora-distil-whisper
path: dora-distil-whisper
inputs:
input: dora-vad/audio
outputs:
- text
env:
TARGET_LANGUAGE: english

+ 384
- 0
examples/so100-remote/so100.urdf View File

@@ -0,0 +1,384 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- This URDF was automatically created by SolidWorks to URDF Exporter! Originally created by Stephen Brawner (brawner@gmail.com)
Commit Version: 1.6.0-1-g15f4949 Build Version: 1.6.7594.29634
For more information, please see http://wiki.ros.org/sw_urdf_exporter -->
<robot
name="SO_5DOF_ARM100_8j_URDF.SLDASM">
<link
name="base_link">
<inertial>
<origin
xyz="-2.45960666746703E-07 0.0311418169687909 0.0175746661003382"
rpy="0 0 0" />
<mass
value="0.193184127927598" />
<inertia
ixx="0.000137030709467877"
ixy="2.10136126944992E-08"
ixz="4.24087422551286E-09"
iyy="0.000169089551209259"
iyz="2.26514711036514E-05"
izz="0.000145097720857224" />
</inertial>
<visual>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Base.STL" />
</geometry>
<material
name="">
<color
rgba="0.792156862745098 0.819607843137255 0.933333333333333 1" />
</material>
</visual>
<collision>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Base.STL" />
</geometry>
</collision>
</link>
<link
name="Rotation_Pitch">
<inertial>
<origin
xyz="-9.07886224712597E-05 0.0590971820568318 0.031089016892169"
rpy="0 0 0" />
<mass
value="0.119226314127197" />
<inertia
ixx="5.90408775624429E-05"
ixy="4.90800532852998E-07"
ixz="-5.90451772654387E-08"
iyy="3.21498601038881E-05"
iyz="-4.58026206663885E-06"
izz="5.86058514263952E-05" />
</inertial>
<visual>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Rotation_Pitch.STL" />
</geometry>
<material
name="">
<color
rgba="0.792156862745098 0.819607843137255 0.933333333333333 1" />
</material>
</visual>
<collision>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Rotation_Pitch.STL" />
</geometry>
</collision>
</link>
<joint
name="Rotation"
type="revolute">
<origin
xyz="0 -0.0452 0.0165"
rpy="1.5708 0 0" />
<parent
link="base_link" />
<child
link="Rotation_Pitch" />
<axis
xyz="0 -1 0" />
<!-- note for the so100 arm there is no well defined effort/velocity limits at the moment -->
</joint>
<link
name="Upper_Arm">
<inertial>
<origin
xyz="-1.7205170190925E-05 0.0701802156327694 0.00310545118155671"
rpy="0 0 0" />
<mass
value="0.162409284599177" />
<inertia
ixx="0.000167153146617081"
ixy="1.03902689187701E-06"
ixz="-1.20161820645189E-08"
iyy="7.01946992214245E-05"
iyz="2.11884806298698E-06"
izz="0.000213280241160769" />
</inertial>
<visual>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Upper_Arm.STL" />
</geometry>
<material
name="">
<color
rgba="0.792156862745098 0.819607843137255 0.933333333333333 1" />
</material>
</visual>
<collision>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Upper_Arm.STL" />
</geometry>
</collision>
</link>
<joint
name="Pitch"
type="revolute">
<origin
xyz="0 0.1025 0.0306"
rpy="0 0 0" />
<parent
link="Rotation_Pitch" />
<child
link="Upper_Arm" />
<axis
xyz="1 0 0" />
</joint>
<link
name="Lower_Arm">
<inertial>
<origin
xyz="-0.00339603710186651 0.00137796353960074 0.0768006751156044"
rpy="0 0 0" />
<mass
value="0.147967774582291" />
<inertia
ixx="0.000105333995841409"
ixy="1.73059237226499E-07"
ixz="-1.1720305455211E-05"
iyy="0.000138766654485212"
iyz="1.77429964684103E-06"
izz="5.08741652515214E-05" />
</inertial>
<visual>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Lower_Arm.STL" />
</geometry>
<material
name="">
<color
rgba="0.792156862745098 0.819607843137255 0.933333333333333 1" />
</material>
</visual>
<collision>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Lower_Arm.STL" />
</geometry>
</collision>
</link>
<joint
name="Elbow"
type="revolute">
<origin
xyz="0 0.11257 0.028"
rpy="-0.1 0 0" />
<parent
link="Upper_Arm" />
<child
link="Lower_Arm" />
<axis
xyz="1 0 0" />
</joint>
<link
name="Wrist_Pitch_Roll">
<inertial>
<origin
xyz="-0.00852653127372418 -0.0352278997897927 -2.34622481569413E-05"
rpy="0 0 0" />
<mass
value="0.066132067097723" />
<inertia
ixx="1.95717492443445E-05"
ixy="-6.62714374412293E-07"
ixz="5.20089016442066E-09"
iyy="2.38028417569933E-05"
iyz="4.09549055863776E-08"
izz="3.4540143384536E-05" />
</inertial>
<visual>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Wrist_Pitch_Roll.STL" />
</geometry>
<material
name="">
<color
rgba="0.792156862745098 0.819607843137255 0.933333333333333 1" />
</material>
</visual>
<collision>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Wrist_Pitch_Roll.STL" />
</geometry>
</collision>
</link>
<joint
name="Wrist_Pitch"
type="revolute">
<origin
xyz="0 0.0052 0.1349"
rpy="-1.5708 0 0" />
<parent
link="Lower_Arm" />
<child
link="Wrist_Pitch_Roll" />
<axis
xyz="1 0 0" />
<limit
lower="-1.8"
upper="1.8"
effort="0"
velocity="0" />
</joint>
<link
name="Fixed_Jaw">
<inertial>
<origin
xyz="0.00552376906426563 -0.0280167153359021 0.000483582592841092"
rpy="0 0 0" />
<mass
value="0.0929859131176897" />
<inertia
ixx="4.3328249304211E-05"
ixy="7.09654328670947E-06"
ixz="5.99838530879484E-07"
iyy="3.04451747368212E-05"
iyz="-1.58743247545413E-07"
izz="5.02460913506734E-05" />
</inertial>
<visual>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Fixed_Jaw.STL" />
</geometry>
<material
name="">
<color
rgba="0.792156862745098 0.819607843137255 0.933333333333333 1" />
</material>
</visual>
<!-- <collision>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Fixed_Jaw.STL" />
</geometry>
</collision> -->
<collision>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Wrist_Roll_Follower_SO101.STL" />
</geometry>
</collision>
</link>
<joint
name="Wrist_Roll"
type="revolute">
<origin
xyz="0 -0.0601 0"
rpy="0 1.5708 0" />
<parent
link="Wrist_Pitch_Roll" />
<child
link="Fixed_Jaw" />
<axis
xyz="0 1 0" />
</joint>
<link
name="Moving Jaw">
<inertial>
<origin
xyz="-0.00161744605468241 -0.0303472584046471 0.000449645961853651"
rpy="0 0 0" />
<mass
value="0.0202443794940372" />
<inertia
ixx="1.10911325081525E-05"
ixy="-5.35076503033314E-07"
ixz="-9.46105662101403E-09"
iyy="3.03576451001973E-06"
iyz="-1.71146075110632E-07"
izz="8.9916083370498E-06" />
</inertial>
<visual>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Moving Jaw.STL" />
</geometry>
<material
name="">
<color
rgba="0.792156862745098 0.819607843137255 0.933333333333333 1" />
</material>
</visual>
<collision>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Moving_Jaw_SO101.STL" />
</geometry>
</collision>
</link>
<joint
name="Jaw"
type="revolute">
<origin
xyz="-0.0202 -0.0244 0"
rpy="3.1416 0 3.33" />
<parent
link="Fixed_Jaw" />
<child
link="Moving Jaw" />
<axis
xyz="0 0 1" />
</joint>
</robot>

+ 384
- 0
examples/so100-remote/so100_inference.urdf View File

@@ -0,0 +1,384 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- This URDF was automatically created by SolidWorks to URDF Exporter! Originally created by Stephen Brawner (brawner@gmail.com)
Commit Version: 1.6.0-1-g15f4949 Build Version: 1.6.7594.29634
For more information, please see http://wiki.ros.org/sw_urdf_exporter -->
<robot
name="SO_5DOF_ARM100_8j_URDF.SLDASM">
<link
name="base_link">
<inertial>
<origin
xyz="-2.45960666746703E-07 0.0311418169687909 0.0175746661003382"
rpy="0 0 0" />
<mass
value="0.193184127927598" />
<inertia
ixx="0.000137030709467877"
ixy="2.10136126944992E-08"
ixz="4.24087422551286E-09"
iyy="0.000169089551209259"
iyz="2.26514711036514E-05"
izz="0.000145097720857224" />
</inertial>
<visual>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Base.STL" />
</geometry>
<material
name="">
<color
rgba="0.2 0.819607843137255 0.2 1" />
</material>
</visual>
<collision>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Base.STL" />
</geometry>
</collision>
</link>
<link
name="Rotation_Pitch">
<inertial>
<origin
xyz="-9.07886224712597E-05 0.0590971820568318 0.031089016892169"
rpy="0 0 0" />
<mass
value="0.119226314127197" />
<inertia
ixx="5.90408775624429E-05"
ixy="4.90800532852998E-07"
ixz="-5.90451772654387E-08"
iyy="3.21498601038881E-05"
iyz="-4.58026206663885E-06"
izz="5.86058514263952E-05" />
</inertial>
<visual>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Rotation_Pitch.STL" />
</geometry>
<material
name="">
<color
rgba="0.2 0.819607843137255 0.2 1" />
</material>
</visual>
<collision>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Rotation_Pitch.STL" />
</geometry>
</collision>
</link>
<joint
name="Rotation"
type="revolute">
<origin
xyz="0 -0.0452 0.0165"
rpy="1.5708 0 0" />
<parent
link="base_link" />
<child
link="Rotation_Pitch" />
<axis
xyz="0 -1 0" />
<!-- note for the so100 arm there is no well defined effort/velocity limits at the moment -->
</joint>
<link
name="Upper_Arm">
<inertial>
<origin
xyz="-1.7205170190925E-05 0.0701802156327694 0.00310545118155671"
rpy="0 0 0" />
<mass
value="0.162409284599177" />
<inertia
ixx="0.000167153146617081"
ixy="1.03902689187701E-06"
ixz="-1.20161820645189E-08"
iyy="7.01946992214245E-05"
iyz="2.11884806298698E-06"
izz="0.000213280241160769" />
</inertial>
<visual>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Upper_Arm.STL" />
</geometry>
<material
name="">
<color
rgba="0.2 0.819607843137255 0.2 1" />
</material>
</visual>
<collision>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Upper_Arm.STL" />
</geometry>
</collision>
</link>
<joint
name="Pitch"
type="revolute">
<origin
xyz="0 0.1025 0.0306"
rpy="0 0 0" />
<parent
link="Rotation_Pitch" />
<child
link="Upper_Arm" />
<axis
xyz="1 0 0" />
</joint>
<link
name="Lower_Arm">
<inertial>
<origin
xyz="-0.00339603710186651 0.00137796353960074 0.0768006751156044"
rpy="0 0 0" />
<mass
value="0.147967774582291" />
<inertia
ixx="0.000105333995841409"
ixy="1.73059237226499E-07"
ixz="-1.1720305455211E-05"
iyy="0.000138766654485212"
iyz="1.77429964684103E-06"
izz="5.08741652515214E-05" />
</inertial>
<visual>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Lower_Arm.STL" />
</geometry>
<material
name="">
<color
rgba="0.2 0.819607843137255 0.2 1" />
</material>
</visual>
<collision>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Lower_Arm.STL" />
</geometry>
</collision>
</link>
<joint
name="Elbow"
type="revolute">
<origin
xyz="0 0.11257 0.028"
rpy="-0.1 0 0" />
<parent
link="Upper_Arm" />
<child
link="Lower_Arm" />
<axis
xyz="1 0 0" />
</joint>
<link
name="Wrist_Pitch_Roll">
<inertial>
<origin
xyz="-0.00852653127372418 -0.0352278997897927 -2.34622481569413E-05"
rpy="0 0 0" />
<mass
value="0.066132067097723" />
<inertia
ixx="1.95717492443445E-05"
ixy="-6.62714374412293E-07"
ixz="5.20089016442066E-09"
iyy="2.38028417569933E-05"
iyz="4.09549055863776E-08"
izz="3.4540143384536E-05" />
</inertial>
<visual>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Wrist_Pitch_Roll.STL" />
</geometry>
<material
name="">
<color
rgba="0.2 0.819607843137255 0.2 1" />
</material>
</visual>
<collision>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Wrist_Pitch_Roll.STL" />
</geometry>
</collision>
</link>
<joint
name="Wrist_Pitch"
type="revolute">
<origin
xyz="0 0.0052 0.1349"
rpy="-1.5708 0 0" />
<parent
link="Lower_Arm" />
<child
link="Wrist_Pitch_Roll" />
<axis
xyz="1 0 0" />
<limit
lower="-1.8"
upper="1.8"
effort="0"
velocity="0" />
</joint>
<link
name="Fixed_Jaw">
<inertial>
<origin
xyz="0.00552376906426563 -0.0280167153359021 0.000483582592841092"
rpy="0 0 0" />
<mass
value="0.0929859131176897" />
<inertia
ixx="4.3328249304211E-05"
ixy="7.09654328670947E-06"
ixz="5.99838530879484E-07"
iyy="3.04451747368212E-05"
iyz="-1.58743247545413E-07"
izz="5.02460913506734E-05" />
</inertial>
<visual>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Fixed_Jaw.STL" />
</geometry>
<material
name="">
<color
rgba="0.2 0.819607843137255 0.2 1" />
</material>
</visual>
<!-- <collision>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Fixed_Jaw.STL" />
</geometry>
</collision> -->
<collision>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Wrist_Roll_Follower_SO101.STL" />
</geometry>
</collision>
</link>
<joint
name="Wrist_Roll"
type="revolute">
<origin
xyz="0 -0.0601 0"
rpy="0 1.5708 0" />
<parent
link="Wrist_Pitch_Roll" />
<child
link="Fixed_Jaw" />
<axis
xyz="0 1 0" />
</joint>
<link
name="Moving Jaw">
<inertial>
<origin
xyz="-0.00161744605468241 -0.0303472584046471 0.000449645961853651"
rpy="0 0 0" />
<mass
value="0.0202443794940372" />
<inertia
ixx="1.10911325081525E-05"
ixy="-5.35076503033314E-07"
ixz="-9.46105662101403E-09"
iyy="3.03576451001973E-06"
iyz="-1.71146075110632E-07"
izz="8.9916083370498E-06" />
</inertial>
<visual>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Moving Jaw.STL" />
</geometry>
<material
name="">
<color
rgba="0.2 0.819607843137255 0.2 1" />
</material>
</visual>
<collision>
<origin
xyz="0 0 0"
rpy="0 0 0" />
<geometry>
<mesh
filename="Moving_Jaw_SO101.STL" />
</geometry>
</collision>
</link>
<joint
name="Jaw"
type="revolute">
<origin
xyz="-0.0202 -0.0244 0"
rpy="3.1416 0 3.33" />
<parent
link="Fixed_Jaw" />
<child
link="Moving Jaw" />
<axis
xyz="0 0 1" />
</joint>
</robot>

+ 1
- 39
node-hub/dora-cotracker/dora_cotracker/main.py View File

@@ -69,45 +69,7 @@ class VideoTrackingNode:
visible_tracks.append([int(pt[0]), int(pt[1])]) visible_tracks.append([int(pt[0]), int(pt[1])])
visible_tracks = np.array(visible_tracks, dtype=np.float32) visible_tracks = np.array(visible_tracks, dtype=np.float32)


frame_viz = frame.copy()
num_input_stream = len(self.input_points)
# Draw input points in red
for i, (pt, vis) in enumerate(
zip(tracks[:num_input_stream], visibility[:num_input_stream])
):
if vis > 0.5:
x, y = int(pt[0]), int(pt[1])
cv2.circle(
frame_viz, (x, y), radius=3, color=(0, 255, 0), thickness=-1
)
cv2.putText(
frame_viz,
f"I{i}",
(x + 5, y - 5),
cv2.FONT_HERSHEY_SIMPLEX,
0.5,
(0, 255, 0),
1,
)


# Draw clicked points in red
for i, (pt, vis) in enumerate(
zip(tracks[num_input_stream:], visibility[num_input_stream:])
):
if vis > 0.5:
x, y = int(pt[0]), int(pt[1])
cv2.circle(
frame_viz, (x, y), radius=3, color=(0, 0, 255), thickness=-1
)
cv2.putText(
frame_viz,
f"C{i}",
(x + 5, y - 5),
cv2.FONT_HERSHEY_SIMPLEX,
0.5,
(0, 0, 255),
1,
)


# Send tracked points # Send tracked points
if len(visible_tracks) > 0: if len(visible_tracks) > 0:
@@ -123,7 +85,7 @@ class VideoTrackingNode:
}, },
) )


return frame, frame_viz
return frame, None


return None, None return None, None




+ 2
- 2
node-hub/dora-kokoro-tts/dora_kokoro_tts/main.py View File

@@ -7,7 +7,7 @@ from dora import Node
from kokoro import KPipeline from kokoro import KPipeline


LANGUAGE = os.getenv("LANGUAGE", "en") LANGUAGE = os.getenv("LANGUAGE", "en")
VOICE = os.getenv("VOICE", "af_heart")
def main(): def main():
"""TODO: Add docstring.""" """TODO: Add docstring."""
if LANGUAGE in ["en", "english"]: if LANGUAGE in ["en", "english"]:
@@ -31,7 +31,7 @@ def main():


generator = pipeline( generator = pipeline(
text, text,
voice="af_heart", # <= change voice here
voice=VOICE, # <= change voice here
speed=1.2, speed=1.2,
split_pattern=r"\n+", split_pattern=r"\n+",
) )


+ 65
- 11
node-hub/dora-object-to-pose/src/lib.rs View File

@@ -14,17 +14,17 @@ fn points_to_pose(points: &[(f32, f32, f32)]) -> Vec<f32> {
let ( let (
_sum_x, _sum_x,
_sum_y, _sum_y,
_sum_z,
sum_xy,
sum_x2,
sum_y2,
sum_z,
_sum_xy,
_sum_x2,
_sum_y2,
n, n,
x_min, x_min,
x_max, x_max,
y_min, y_min,
y_max, y_max,
z_min,
z_max,
_z_min,
_z_max,
) = points.iter().fold( ) = points.iter().fold(
( (
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 10.0, -10.0, 10.0, -10.0, 10., -10.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 10.0, -10.0, 10.0, -10.0, 10., -10.0,
@@ -62,11 +62,65 @@ fn points_to_pose(points: &[(f32, f32, f32)]) -> Vec<f32> {
) )
}, },
); );
let (mean_x, mean_y, mean_z) = (
(x_max + x_min) / 2.,
(y_max + y_min) / 2.,
(z_max + z_min) / 2.,
);

let (_mean_x, _mean_y, mean_z) = ((x_max + x_min) / 2., (y_max + y_min) / 2., sum_z / n);

// Second processing based on mean
let (
_sum_x,
_sum_y,
sum_z,
sum_xy,
sum_x2,
sum_y2,
n,
x_min,
x_max,
y_min,
y_max,
_z_min,
_z_max,
) = points
.iter()
.filter(|(_x, _y, z)| (z - mean_z).abs() < 0.05)
.fold(
(
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 10.0, -10.0, 10.0, -10.0, 10., -10.0,
),
|(
acc_x,
acc_y,
acc_z,
acc_xy,
acc_x2,
acc_y2,
acc_n,
acc_x_min,
acc_x_max,
acc_y_min,
acc_y_max,
acc_z_min,
acc_z_max,
),
(x, y, z)| {
(
acc_x + x,
acc_y + y,
acc_z + z,
acc_xy + x * y,
acc_x2 + x * x,
acc_y2 + y * y,
acc_n + 1.,
f32::min(acc_x_min, *x),
f32::max(acc_x_max, *x),
f32::min(acc_y_min, *y),
f32::max(acc_y_max, *y),
f32::min(acc_z_min, *z),
f32::max(acc_z_max, *z),
)
},
);
let (mean_x, mean_y, mean_z) = ((x_max + x_min) / 2., (y_max + y_min) / 2., sum_z / n);


// Compute covariance and standard deviations // Compute covariance and standard deviations
let cov = sum_xy / n - mean_x * mean_y; let cov = sum_xy / n - mean_x * mean_y;


+ 40
- 0
node-hub/dora-pytorch-kinematics/README.md View File

@@ -0,0 +1,40 @@
# dora-pytorch-kinematics

## Getting started

- Install it with uv:

```bash
uv venv -p 3.11 --seed
uv pip install -e .
```

## Contribution Guide

- Format with [ruff](https://docs.astral.sh/ruff/):

```bash
uv pip install ruff
uv run ruff check . --fix
```

- Lint with ruff:

```bash
uv run ruff check .
```

- Test with [pytest](https://github.com/pytest-dev/pytest)

```bash
uv pip install pytest
uv run pytest . # Test
```

## YAML Specification

## Examples

## License

dora-pytorch-kinematics's code are released under the MIT License

+ 13
- 0
node-hub/dora-pytorch-kinematics/dora_pytorch_kinematics/__init__.py View File

@@ -0,0 +1,13 @@
"""TODO: Add docstring."""

import os

# Define the path to the README file relative to the package directory
readme_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), "README.md")

# Read the content of the README file
try:
with open(readme_path, encoding="utf-8") as f:
__doc__ = f.read()
except FileNotFoundError:
__doc__ = "README file not found."

+ 6
- 0
node-hub/dora-pytorch-kinematics/dora_pytorch_kinematics/__main__.py View File

@@ -0,0 +1,6 @@
"""TODO: Add docstring."""

from .main import main

if __name__ == "__main__":
main()

+ 299
- 0
node-hub/dora-pytorch-kinematics/dora_pytorch_kinematics/main.py View File

@@ -0,0 +1,299 @@
"""TODO: Add docstring."""

import os
from typing import List, Optional, Tuple, Union

import numpy as np
import pyarrow as pa
import pytorch_kinematics as pk
import torch
from dora import Node
from pytorch_kinematics.transforms.rotation_conversions import matrix_to_euler_angles

TRANSFORM = np.array(os.getenv("TRANSFORM", "0. 0. 0. 0. 0. 0. 1.").split(" ")).astype(
np.float32,
)
pos = torch.tensor([TRANSFORM[0], TRANSFORM[1], TRANSFORM[2]])
rot = torch.tensor(
[
TRANSFORM[3],
TRANSFORM[4],
TRANSFORM[5],
TRANSFORM[6],
],
)
ROB_TF = pk.Transform3d(pos=pos, rot=rot)


def get_xyz_rpy_array_from_transform3d(
transform: "pk.transforms.Transform3d", convention: str = "XYZ",
) -> torch.Tensor:
"""XYZ-RPY conversion.
Extract translation (xyz) and rotation (RPY Euler angles in radians)
from a pytorch_kinematics Transform3d object and returns them concatenated
into a single tensor.

Args:
transform: A Transform3d object or a batch of Transform3d objects.
Expected to have a method like get_matrix() or directly
access attributes like .R and .T.
convention: The Euler angle convention (e.g., "XYZ", "ZYX").
RPY typically corresponds to "XYZ" (roll around X,
pitch around Y, yaw around Z). Adjust if needed.

Returns:
A single tensor of shape (..., 6) where the last dimension contains
[x, y, z, roll, pitch, yaw] in radians.

"""
# Attempt to get the full 4x4 transformation matrix
# Common methods are get_matrix() or accessing internal properties.
# This might need adjustment based on the specific API version.
matrix = transform.get_matrix() # Shape (..., 4, 4)

# Extract translation (first 3 elements of the last column)
xyz = matrix[..., :3, 3]

# Extract rotation matrix (top-left 3x3 submatrix)
rotation_matrix = matrix[..., :3, :3]

# Convert the rotation matrix to Euler angles in radians
# The matrix_to_euler_angles function likely exists based on pytorch3d's structure
rpy = matrix_to_euler_angles(
rotation_matrix, convention=convention,
) # Shape (..., 3)

# Concatenate xyz and rpy along the last dimension
return torch.cat((xyz, rpy), dim=-1) # Shape (..., 6)


class RobotKinematics:
"""wrapper for pytorch_kinematics FK and IK operations."""

def __init__(
self,
urdf_path: str,
end_effector_link: str,
device: Union[str, torch.device] = "cpu",
):
"""Initialize the kinematic chain from a URDF.

Args:
urdf_path (str): Path to the URDF file.
end_effector_link (str): Name of the end-effector link.
device (Union[str, torch.device]): Computation device ('cpu' or 'cuda').

"""
self.device = torch.device(device)
try:
# Load kinematic chain (core pytorch_kinematics object)
self.chain = pk.build_serial_chain_from_urdf(
open(urdf_path, mode="rb").read(), end_effector_link,
).to(device=self.device)
except Exception as e:
raise RuntimeError(f"Failed to build chain from URDF: {urdf_path}") from e

self.num_joints = len(self.chain.get_joint_parameter_names(exclude_fixed=True))
# Default initial guess for IK if none provided
self._default_q = torch.zeros(
(1, self.num_joints), device=self.device, dtype=torch.float32,
)
# print(f"Initialized RobotKinematicsConcise: {self.num_joints} joints, EE='{end_effector_link}', device='{self.device}'") # Optional print

def _prepare_joint_tensor(
self, joints: Union[List[float], torch.Tensor], batch_dim_required: bool = True,
) -> torch.Tensor:
"""Validate and formats joint angles input tensor."""
if isinstance(joints, list):
j = torch.tensor(joints, dtype=torch.float32, device=self.device)
elif isinstance(joints, np.ndarray):
j = torch.tensor(joints, device=self.device, dtype=torch.float32)
elif isinstance(joints, torch.Tensor):
j = joints.to(device=self.device, dtype=torch.float32)
else:
raise TypeError(
"Joints must be a list or torch.Tensor and got: ", joints.type,
)

if j.ndim == 1:
if j.shape[0] != self.num_joints:
raise ValueError(f"Expected {self.num_joints} joints, got {j.shape[0]}")
if batch_dim_required:
j = j.unsqueeze(0) # Add batch dimension if needed (e.g., shape (1, N))
elif j.ndim == 2:
if j.shape[1] != self.num_joints:
raise ValueError(
f"Expected {self.num_joints} joints (dim 1), got {j.shape[1]}",
)
if batch_dim_required and j.shape[0] != 1:
# Most common IK solvers expect batch=1 for initial guess, FK can handle batches
# Relaxing this check slightly, but user should be aware for IK
pass
else:
raise ValueError(f"Joint tensor must have 1 or 2 dimensions, got {j.ndim}")
return j

def compute_fk(
self, joint_angles: Union[List[float], torch.Tensor],
) -> pk.Transform3d:
"""Compute Forward Kinematics (FK).

Args:
joint_angles (Union[List[float], torch.Tensor]): Joint angles (radians).
Shape (num_joints,) or (B, num_joints).

Returns:
pk.Transform3d: End-effector pose(s). Batched if input is batched.

"""
# robot frame

angles_tensor = self._prepare_joint_tensor(
joint_angles, batch_dim_required=False,
) # FK handles batches naturally
# Direct call to pytorch_kinematics FK
goal_in_rob_frame_tf = self.chain.forward_kinematics(
angles_tensor, end_only=True,
)
goal_tf = ROB_TF.compose(goal_in_rob_frame_tf)
return goal_tf

def compute_ik(
self,
target_pose: pk.Transform3d,
initial_guess: Optional[Union[List[float], torch.Tensor]] = None,
iterations: int = 100,
lr: float = 0.1,
error_tolerance: float = 1e-4,
) -> Tuple[torch.Tensor, bool]:
"""Compute Inverse Kinematics (IK) using PseudoInverseIK.

Args:
target_pose (pk.Transform3d): Target end-effector pose (batch size 1).
initial_guess (Optional): Initial joint angles guess. Uses zeros if None.
Shape (num_joints,) or (1, num_joints).
iterations (int): Max solver iterations.
lr (float): Solver learning rate.
error_tolerance (float): Solver convergence tolerance.

Returns:
Tuple[torch.Tensor, bool]: Solution joint angles (1, num_joints), convergence status.

"""
if not isinstance(target_pose, pk.Transform3d):
raise TypeError("target_pose must be a pk.Transform3d")
target_pose = target_pose.to(device=self.device)
if target_pose.get_matrix().shape[0] != 1:
raise ValueError(
"target_pose must have batch size 1 for this IK method.",
) # Common limitation

# Prepare initial guess (q_init)
q_init = self._prepare_joint_tensor(
initial_guess if initial_guess is not None else self._default_q,
batch_dim_required=True,
)
if q_init.shape[0] != 1:
raise ValueError(
"initial_guess must result in batch size 1 for this IK setup.",
) # Enforce batch=1 for guess

q_init = q_init.requires_grad_(True) # Need gradient for solver

# Instantiate and run the IK solver (core pytorch_kinematics objects/methods)
ik_solver = pk.PseudoInverseIK(
self.chain,
max_iterations=1_000,
retry_configs=q_init,
joint_limits=torch.tensor(self.chain.get_joint_limits()),
early_stopping_any_converged=True,
early_stopping_no_improvement="all",
debug=False,
lr=0.05,
rot_tolerance=1e-4,
pos_tolerance=1e-3,
)
solution_angles = ik_solver.solve(target_pose)
if solution_angles.err_rot > 1e-3 or solution_angles.err_pos > 1e-2:
print(
f"IK did not converge: pos_err={solution_angles.err_pos}, rot_err={solution_angles.err_rot} for target {target_pose}",
)
return None
return solution_angles.solutions.detach()


def main():
"""TODO: Add docstring."""
node = Node()

urdf_path = os.getenv("URDF_PATH")
end_effector_link = os.getenv("END_EFFECTOR_LINK")
robot = RobotKinematics(urdf_path, end_effector_link)
last_known_state = None

for event in node:
if event["type"] == "INPUT":
metadata = event["metadata"]

match metadata["encoding"]:
case "xyzquat":
# Apply Inverse Kinematics
if last_known_state is not None:
target = event["value"].to_numpy()
target = target.astype(np.float32)
target = pk.Transform3d(
pos=target[:3], rot=torch.tensor(target[3:7]),
)
solution = robot.compute_ik(target, last_known_state)
metadata["encoding"] = "jointstate"
last_known_state = solution.numpy().ravel()
solution = pa.array(last_known_state)
node.send_output(event["id"], solution, metadata=metadata)
case "xyzrpy":
# Apply Inverse Kinematics
if last_known_state is not None:
target = event["value"].to_numpy()
target = target.astype(np.float32)
target = pk.Transform3d(
pos=target[:3],
rot=pk.transforms.euler_angles_to_matrix(
torch.tensor(target[3:6]), convention="XYZ",
),
)
rob_target = ROB_TF.inverse().compose(target)
solution = robot.compute_ik(rob_target, last_known_state)
if solution is None:
print(
"No IK Solution for :", target, "skipping this frame.",
)
continue

solution = solution.numpy().ravel()
delta_angles = solution - last_known_state

valid = np.all(
(delta_angles >= -np.pi) & (delta_angles <= np.pi),
)
if not valid:
print(
"IK solution is not valid, as the rotation are too wide. skipping.",
)
continue
metadata["encoding"] = "jointstate"
last_known_state = solution
solution = pa.array(last_known_state)
node.send_output(event["id"], solution, metadata=metadata)
case "jointstate":
target = event["value"].to_numpy()
last_known_state = target
# Apply Forward Kinematics
target = robot.compute_fk(target)
target = np.array(get_xyz_rpy_array_from_transform3d(target))
target = pa.array(target.ravel(), type=pa.float32())
metadata["encoding"] = "xyzrpy"
node.send_output(event["id"], target, metadata=metadata)


if __name__ == "__main__":
main()

+ 25
- 0
node-hub/dora-pytorch-kinematics/pyproject.toml View File

@@ -0,0 +1,25 @@
[project]
name = "dora-pytorch-kinematics"
version = "0.0.0"
authors = [{ name = "Your Name", email = "email@email.com" }]
description = "dora-pytorch-kinematics"
license = { text = "MIT" }
readme = "README.md"
requires-python = ">=3.8"

dependencies = [
"dora-rs >= 0.3.9",
"pytorch-kinematics>=0.7.5",
]

[dependency-groups]
dev = ["pytest >=8.1.1", "ruff >=0.9.1"]

[project.scripts]
dora-pytorch-kinematics = "dora_pytorch_kinematics.main:main"

[tool.ruff.lint]
extend-select = [
"D", # pydocstyle
"UP"
]

+ 13
- 0
node-hub/dora-pytorch-kinematics/tests/test_dora_pytorch_kinematics.py View File

@@ -0,0 +1,13 @@
"""Test module for dora_pytorch_kinematics package."""

import pytest


def test_import_main():
"""Test importing and running the main function."""
from dora_pytorch_kinematics.main import main

# Check that everything is working, and catch Dora RuntimeError
# as we're not running in a Dora dataflow.
with pytest.raises(RuntimeError):
main()

+ 1397
- 0
node-hub/dora-pytorch-kinematics/uv.lock
File diff suppressed because it is too large
View File


+ 18
- 28
node-hub/dora-reachy2/dora_reachy2/camera.py View File

@@ -11,31 +11,16 @@ from reachy2_sdk.media.camera import CameraView


def main(): def main():
"""TODO: Add docstring.""" """TODO: Add docstring."""
robot_ip = os.getenv("ROBOT_IP", "10.42.0.80")

for _ in range(10):
reachy = ReachySDK(robot_ip)
try:
reachy.cameras.teleop.get_frame(view=CameraView.LEFT)
params = reachy.cameras.depth.get_parameters(view=CameraView.DEPTH)
if params is not None:
break
except Exception as e:
print(e)
import time

time.sleep(1)

reachy.cameras.teleop.get_frame(view=CameraView.LEFT)
params = reachy.cameras.depth.get_parameters(view=CameraView.DEPTH)
height, width, _distortion_model, _d, k, _r, _p = params
robot_ip = os.getenv("ROBOT_IP", "127.0.0.1")


reachy = ReachySDK(robot_ip)
params = None
node = Node() node = Node()


for event in node: for event in node:
if event["type"] == "INPUT": if event["type"] == "INPUT":
if event["id"] == "tick":
(image_left, _) = reachy.cameras.teleop.get_frame(view=CameraView.LEFT)
if event["id"] == "image_left":
(image_left, _) = reachy.cameras.teleop.get_frame()


if image_left is int: if image_left is int:
continue continue
@@ -49,7 +34,7 @@ def main():
"height": image_left.shape[0], "height": image_left.shape[0],
}, },
) )
elif event["id"] == "image_right":
(image_right, _) = reachy.cameras.teleop.get_frame( (image_right, _) = reachy.cameras.teleop.get_frame(
view=CameraView.RIGHT, view=CameraView.RIGHT,
) )
@@ -66,8 +51,9 @@ def main():
"height": image_right.shape[0], "height": image_right.shape[0],
}, },
) )

(depth_image, _) = reachy.cameras.depth.get_frame()
elif event["id"] == "depth":
(depth_image, ts) = reachy.cameras.depth.get_frame()


node.send_output( node.send_output(
"image_depth", "image_depth",
@@ -78,12 +64,11 @@ def main():
"height": depth_image.shape[0], "height": depth_image.shape[0],
}, },
) )
(depth_frame, _) = reachy.cameras.depth.get_depth_frame()
(depth_frame, ts_depth) = reachy.cameras.depth.get_depth_frame()
if params is not None and depth_frame is not None: if params is not None and depth_frame is not None:
depth_frame = depth_frame.ravel().astype(np.float64) / 1_000.0

depth_frame = depth_frame.ravel()
node.send_output( node.send_output(
"depth", "depth",
pa.array(depth_frame), pa.array(depth_frame),
@@ -92,8 +77,13 @@ def main():
"height": height, "height": height,
"focal": [int(k[0, 0]), int(k[1, 1])], "focal": [int(k[0, 0]), int(k[1, 1])],
"resolution": [int(k[0, 2]), int(k[1, 2])], "resolution": [int(k[0, 2]), int(k[1, 2])],
"encoding": "mono16"
}, },
) )
else:
params = reachy.cameras.depth.get_parameters(view=CameraView.DEPTH)
height, width, _distortion_model, _d, k, _r, _p = params




if __name__ == "__main__": if __name__ == "__main__":


+ 2
- 1
node-hub/dora-reachy2/dora_reachy2/head.py View File

@@ -50,8 +50,9 @@ def main():
[roll, _pitch, yaw] = reachy.head.get_current_positions() [roll, _pitch, yaw] = reachy.head.get_current_positions()
if "look" in event["id"]: if "look" in event["id"]:
[x, y, z] = event["value"].to_numpy() [x, y, z] = event["value"].to_numpy()
duration = event["metadata"].get("duration", 0.5)
reachy.head.cancel_all_goto() reachy.head.cancel_all_goto()
reachy.head.look_at(x, y, z, duration=0.5)
reachy.head.look_at(x, y, z, duration=float(duration))


if reachy.head is not None: if reachy.head is not None:
reachy.head.turn_off() reachy.head.turn_off()


+ 19
- 32
node-hub/dora-reachy2/dora_reachy2/left_arm.py View File

@@ -9,62 +9,49 @@ from dora import Node
from reachy2_sdk import ReachySDK from reachy2_sdk import ReachySDK
from scipy.spatial.transform import Rotation from scipy.spatial.transform import Rotation


ROBOT_IP = os.getenv("ROBOT_IP", "10.42.0.80")
ROBOT_IP = os.getenv("ROBOT_IP", "127.0.0.1")




l_default_pose = [
42.212611297240635,
-16.95827541661092,
15.241872783848812,
-131.11770715700908,
0.1682905250638251,
-1.6613469324618695,
2.1666679127563904,
]


def l_arm_go_to_mixed_angles(reachy, x, y, z):
def l_arm_go_to_mixed_angles(reachy, x, y, z, roll):
"""TODO: Add docstring.""" """TODO: Add docstring."""
for theta in range(-80, -60, 10):
for theta in range(-46, -46, 10):
theta = -60
r = Rotation.from_euler("zyx", [0, theta, 0], degrees=True) r = Rotation.from_euler("zyx", [0, theta, 0], degrees=True)
transform = np.eye(4) transform = np.eye(4)
transform[:3, :3] = r.as_matrix() transform[:3, :3] = r.as_matrix()
transform[:3, 3] = [x, y, z] transform[:3, 3] = [x, y, z]


try: try:
return reachy.l_arm.inverse_kinematics(transform)

joint=reachy.l_arm.inverse_kinematics(transform)
if roll:
joint[-1] = joint[-1] - 180
return joint
except ValueError: except ValueError:
continue continue


for yaw in range(0, 90, 30):
for yaw in range(0, 30, 10):


## First try turning left ## First try turning left
pitch = -90
pitch = -60

r = Rotation.from_euler("ZYX", (-yaw, 0, 0), degrees=True) * Rotation.from_euler( r = Rotation.from_euler("ZYX", (-yaw, 0, 0), degrees=True) * Rotation.from_euler(
"ZYX", "ZYX",
(0, pitch, 0), (0, pitch, 0),
degrees=True, degrees=True,
)
)
transform = np.eye(4) transform = np.eye(4)
transform[:3, :3] = r.as_matrix() transform[:3, :3] = r.as_matrix()
transform[:3, 3] = [x, y, z] transform[:3, 3] = [x, y, z]


try: try:
return reachy.l_arm.inverse_kinematics(transform)
joint=reachy.l_arm.inverse_kinematics(transform)
if roll:
joint[-1] = joint[-1] - 180
return joint
except ValueError: except ValueError:
pass pass


try:
return reachy.l_arm.inverse_kinematics(transform)

except ValueError:
continue

# Fallback to default pose if we need the arm to be within x and z limits.
if x < 0.3 and z > -0.2:
return l_default_pose

print("Left arm: No solution found for x, y, z: ", x, y, z) print("Left arm: No solution found for x, y, z: ", x, y, z)
return [] return []


@@ -118,11 +105,11 @@ def main():
x = value[0] x = value[0]
y = value[1] y = value[1]
z = value[2] z = value[2]
_r = value[3]
r = value[3]
_p = value[4] _p = value[4]
_y = value[5] _y = value[5]
gripper = value[6] gripper = value[6]
joints = l_arm_go_to_mixed_angles(reachy, x, y, z)
joints = l_arm_go_to_mixed_angles(reachy, x, y, z, np.rad2deg(r))
response_ik = len(joints) > 0 response_ik = len(joints) > 0
if response_ik: if response_ik:
joint_values.append((joints, gripper)) joint_values.append((joints, gripper))


+ 17
- 5
node-hub/dora-reachy2/dora_reachy2/mobile_base.py View File

@@ -11,23 +11,35 @@ from reachy2_sdk import ReachySDK


def main(): def main():
"""TODO: Add docstring.""" """TODO: Add docstring."""
robot_ip = os.getenv("ROBOT_IP", "10.42.0.80")
robot_ip = os.getenv("ROBOT_IP", "127.0.0.1")


reachy = ReachySDK(robot_ip) reachy = ReachySDK(robot_ip)


if reachy.mobile_base is not None: if reachy.mobile_base is not None:
reachy.mobile_base.turn_on() reachy.mobile_base.turn_on()
reachy.mobile_base.reset_odometry() reachy.mobile_base.reset_odometry()

node = Node() node = Node()
for event in node: for event in node:
if event["type"] == "INPUT": if event["type"] == "INPUT":
if event["id"] == "action_base": if event["id"] == "action_base":
[x, y, _z, _rx, _ry, rz] = event["value"].to_numpy() [x, y, _z, _rx, _ry, rz] = event["value"].to_numpy()
reachy.mobile_base.rotate_by(np.rad2deg(rz))
reachy.mobile_base.translate_by(x, y)
time.sleep(0.5)
reachy.mobile_base.set_goal_speed(vx=x, vy=y, vtheta=np.rad2deg(rz) * 1.5)
reachy.mobile_base.send_speed_command()
elif event["id"] == "translate_base":
[x, y, _z, _rx, _ry, rz] = event["value"].to_numpy()
duration = float(event["metadata"].get("duration", 1))

if abs(y) <1:
reachy.mobile_base.translate_by(x, y, wait=True, )

node.send_output("response_base", pa.array([True])) node.send_output("response_base", pa.array([True]))
elif event["id"] == "action_whisper":
[x, y, _z, _rx, _ry, rz] = event["value"].to_numpy()
if x == 0. and y == 0.:
reachy.mobile_base.rotate_by(np.rad2deg(rz), wait=True)
else:
reachy.mobile_base.translate_by(x, y, wait=True)

if reachy.mobile_base is not None: if reachy.mobile_base is not None:
reachy.mobile_base.turn_off() reachy.mobile_base.turn_off()




+ 19
- 31
node-hub/dora-reachy2/dora_reachy2/right_arm.py View File

@@ -9,60 +9,48 @@ from dora import Node
from reachy2_sdk import ReachySDK from reachy2_sdk import ReachySDK
from scipy.spatial.transform import Rotation from scipy.spatial.transform import Rotation


ROBOT_IP = os.getenv("ROBOT_IP", "10.42.0.80")
ROBOT_IP = os.getenv("ROBOT_IP", "127.0.0.1")


r_default_pose = [
38.058172640242475,
0.07798708660299236,
2.0084781702579564,
-129.76629958820868,
4.428130313456095,
-9.272674208719419,
354.280491569214,
]



def r_arm_go_to_mixed_angles(reachy, x, y, z):
def r_arm_go_to_mixed_angles(reachy, x, y, z, roll):
"""TODO: Add docstring.""" """TODO: Add docstring."""
for theta in range(-80, -60, 10):
for theta in range(-46, -46, 10):
theta = -60

r = Rotation.from_euler("zyx", [0, theta, 0], degrees=True) r = Rotation.from_euler("zyx", [0, theta, 0], degrees=True)
transform = np.eye(4) transform = np.eye(4)
transform[:3, :3] = r.as_matrix() transform[:3, :3] = r.as_matrix()
transform[:3, 3] = [x, y, z] transform[:3, 3] = [x, y, z]


try: try:
return reachy.r_arm.inverse_kinematics(transform)

joint=reachy.r_arm.inverse_kinematics(transform)
if roll:
joint[-1] = joint[-1] + 180
return joint
except ValueError: except ValueError:
continue continue


for yaw in range(0, 90, 30):
for yaw in range(0, 30, 10):


## First try turning left ## First try turning left
pitch = -90
pitch = -60
r = Rotation.from_euler("ZYX", (yaw, 0, 0), degrees=True) * Rotation.from_euler( r = Rotation.from_euler("ZYX", (yaw, 0, 0), degrees=True) * Rotation.from_euler(
"ZYX", "ZYX",
(0, pitch, 0), (0, pitch, 0),
degrees=True, degrees=True,
)
)
transform = np.eye(4) transform = np.eye(4)
transform[:3, :3] = r.as_matrix() transform[:3, :3] = r.as_matrix()
transform[:3, 3] = [x, y, z] transform[:3, 3] = [x, y, z]


try: try:
return reachy.r_arm.inverse_kinematics(transform)
except ValueError:
pass

try:
return reachy.r_arm.inverse_kinematics(transform)
joint=reachy.r_arm.inverse_kinematics(transform)
if roll:
joint[-1] = joint[-1] + 180
return joint


except ValueError: except ValueError:
continue

# Fallback to default pose if we need the arm to be within x and z limits.
if x < 0.3 and z > -0.2:
return r_default_pose
pass


print("Right arm: No solution found for x, y, z: ", x, y, z) print("Right arm: No solution found for x, y, z: ", x, y, z)
return [] return []
@@ -117,11 +105,11 @@ def main():
x = value[0] x = value[0]
y = value[1] y = value[1]
z = value[2] z = value[2]
_r = value[3]
r = value[3]
_p = value[4] _p = value[4]
_y = value[5] _y = value[5]
gripper = value[6] gripper = value[6]
joints = r_arm_go_to_mixed_angles(reachy, x, y, z)
joints = r_arm_go_to_mixed_angles(reachy, x, y, z, np.rad2deg(r))
response_ik = len(joints) > 0 response_ik = len(joints) > 0
if response_ik: if response_ik:
joint_values.append((joints, gripper)) joint_values.append((joints, gripper))


+ 1
- 0
node-hub/dora-rerun/Cargo.toml View File

@@ -28,6 +28,7 @@ pyo3 = { workspace = true, features = [
"generate-import-lib", "generate-import-lib",
], optional = true } ], optional = true }
bytemuck = "1.20.0" bytemuck = "1.20.0"
rand = "0.9.1"




[lib] [lib]


+ 49
- 12
node-hub/dora-rerun/src/lib.rs View File

@@ -3,17 +3,19 @@
use std::{collections::HashMap, env::VarError, path::Path}; use std::{collections::HashMap, env::VarError, path::Path};


use dora_node_api::{ use dora_node_api::{
arrow::array::{Array, Float32Array, Float64Array, StringArray, UInt16Array, UInt8Array},
arrow::{array::AsArray, datatypes::Float32Type},
arrow::{
array::{Array, AsArray, Float64Array, StringArray, UInt16Array, UInt8Array},
datatypes::Float32Type,
},
dora_core::config::DataId, dora_core::config::DataId,
DoraNode, Event, Parameter,
into_vec, DoraNode, Event, Parameter,
}; };
use eyre::{eyre, Context, ContextCompat, Result};
use eyre::{eyre, Context, Result};


use rerun::{ use rerun::{
components::ImageBuffer, components::ImageBuffer,
external::{log::warn, re_types::ArrowBuffer}, external::{log::warn, re_types::ArrowBuffer},
ImageFormat, Points3D, SpawnOptions,
ImageFormat, Points2D, Points3D, SpawnOptions,
}; };
pub mod boxes2d; pub mod boxes2d;
pub mod series; pub mod series;
@@ -98,7 +100,7 @@ pub fn lib_main() -> Result<()> {
.unwrap_or("0.0".to_string()) .unwrap_or("0.0".to_string())
.parse::<f32>() .parse::<f32>()
.unwrap(); .unwrap();
let mut color_cache: HashMap<DataId, rerun::Color> = HashMap::new();
while let Some(event) = events.recv() { while let Some(event) = events.recv() {
if let Event::Input { id, data, metadata } = event { if let Event::Input { id, data, metadata } = event {
if id.as_str().contains("image") { if id.as_str().contains("image") {
@@ -315,12 +317,21 @@ pub fn lib_main() -> Result<()> {
continue; continue;
}; };
mask_cache.insert(id.clone(), masks.clone()); mask_cache.insert(id.clone(), masks.clone());
} else if id.as_str().contains("jointstate") {
let buffer: &Float32Array = data
.as_any()
.downcast_ref()
.context("jointstate is not float32")?;
let mut positions: Vec<f32> = buffer.values().to_vec();
} else if id.as_str().contains("jointstate") || id.as_str().contains("pose") {
let encoding = if let Some(Parameter::String(encoding)) =
metadata.parameters.get("encoding")
{
encoding
} else {
"jointstate"
};
if encoding != "jointstate" {
warn!("Got unexpected encoding: {} on position pose", encoding);
continue;
}
// Convert to Vec<f32>
let mut positions: Vec<f32> =
into_vec(&data).context("Could not parse jointstate as vec32")?;


// Match file name // Match file name
let mut id = id.as_str().replace("jointstate_", ""); let mut id = id.as_str().replace("jointstate_", "");
@@ -344,6 +355,32 @@ pub fn lib_main() -> Result<()> {
} }
} else if id.as_str().contains("series") { } else if id.as_str().contains("series") {
update_series(&rec, id, data).context("could not plot series")?; update_series(&rec, id, data).context("could not plot series")?;
} else if id.as_str().contains("points2d") {
// Get color or assign random color in cache
let color = color_cache.get(&id);
let color = if let Some(color) = color {
color.clone()
} else {
let color = rerun::Color::from_rgb(200, 80, rand::random::<u8>());

color_cache.insert(id.clone(), color.clone());
color
};
let dataid = id;

// get a random color
if let Ok(buffer) = into_vec::<f32>(&data) {
let mut points = vec![];
let mut colors = vec![];
buffer.chunks(2).for_each(|chunk| {
points.push((chunk[0], chunk[1]));
colors.push(color);
});
let points = Points2D::new(points);

rec.log(dataid.as_str(), &points.with_colors(colors))
.context("could not log points")?;
}
} else { } else {
println!("Could not find handler for {}", id); println!("Could not find handler for {}", id);
} }


+ 35
- 24
node-hub/dora-rerun/src/urdf.rs View File

@@ -1,8 +1,8 @@
use std::collections::HashMap;
use std::{collections::HashMap, path::PathBuf};


use eyre::{Context, ContextCompat, Result}; use eyre::{Context, ContextCompat, Result};
use k::Chain;
use rerun::{components::RotationAxisAngle, Angle, RecordingStream, Rotation3D, Vec3D};
use k::{nalgebra::Quaternion, Chain, Translation3, UnitQuaternion};
use rerun::{RecordingStream, Vec3D};
pub struct MyIntersperse<T, I> { pub struct MyIntersperse<T, I> {
iterator: I, iterator: I,
sep: T, sep: T,
@@ -56,14 +56,20 @@ fn get_entity_path(link: &k::Node<f32>, urdf_path: &str) -> String {
pub fn init_urdf(rec: &RecordingStream) -> Result<HashMap<String, Chain<f32>>> { pub fn init_urdf(rec: &RecordingStream) -> Result<HashMap<String, Chain<f32>>> {
// Get all env variable that end with urdf // Get all env variable that end with urdf
let urdfs = std::env::vars() let urdfs = std::env::vars()
.filter(|(key, _)| key.ends_with("_urdf"))
.filter(|(key, _)| key.ends_with("_urdf") || key.ends_with("_URDF"))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let mut chains = HashMap::new(); let mut chains = HashMap::new();
for (key, urdf_path) in urdfs { for (key, urdf_path) in urdfs {
let path = key.replace("_urdf", ".urdf");
let path = key.replace("_urdf", ".urdf").replace("_URDF", ".urdf");
let chain = k::Chain::<f32>::from_urdf_file(&urdf_path).context("Could not load URDF")?; let chain = k::Chain::<f32>::from_urdf_file(&urdf_path).context("Could not load URDF")?;


let transform = key.replace("_urdf", "_transform"); let transform = key.replace("_urdf", "_transform");

if PathBuf::from(&urdf_path).file_name() != PathBuf::from(&path).file_name() {
return Err(eyre::eyre!(
"URDF path should be the same as the key without _urdf or _URDF. Got {} instead of {}", urdf_path, path
));
}
if let Err(err) = rec.log_file_from_path(&urdf_path, None, false) { if let Err(err) = rec.log_file_from_path(&urdf_path, None, false) {
println!("Could not log file: {}. Errored with {}", urdf_path, err); println!("Could not log file: {}. Errored with {}", urdf_path, err);
println!("Make sure to install urdf loader with:"); println!("Make sure to install urdf loader with:");
@@ -71,23 +77,29 @@ pub fn init_urdf(rec: &RecordingStream) -> Result<HashMap<String, Chain<f32>>> {
"pip install git+https://github.com/rerun-io/rerun-loader-python-example-urdf.git" "pip install git+https://github.com/rerun-io/rerun-loader-python-example-urdf.git"
) )
}; };

// Get transform by replacing URDF_ with TRANSFORM_ // Get transform by replacing URDF_ with TRANSFORM_
if let Ok(transform) = std::env::var(transform) { if let Ok(transform) = std::env::var(transform) {
let transform = transform let transform = transform
.split(' ') .split(' ')
.map(|x| x.parse::<f32>().unwrap()) .map(|x| x.parse::<f32>().unwrap())
.collect::<Vec<f32>>(); .collect::<Vec<f32>>();
rec.log(
path.clone(),
&rerun::Transform3D::from_translation_rotation(
[transform[0], transform[1], transform[2]],
Rotation3D::AxisAngle(RotationAxisAngle::new(
[0., 0., 0.],
Angle::from_degrees(0.0),
)),
),
)
.unwrap();

let mut pose = chain.origin();

if transform.len() == 7 {
let quaternion =
Quaternion::new(transform[3], transform[4], transform[5], transform[6])
.normalize(); // Example quaternion
let rot = UnitQuaternion::from_quaternion(quaternion);
pose.append_rotation_mut(&rot);
}
pose.append_translation_mut(&Translation3::new(
transform[0],
transform[1],
transform[2],
));
chain.set_origin(pose);
chains.insert(path, chain); chains.insert(path, chain);
} }
} }
@@ -126,14 +138,13 @@ pub fn update_visualization(
let trans = link_to_parent_mat.column(3); let trans = link_to_parent_mat.column(3);
let trans = trans.as_slice(); let trans = trans.as_slice();
let quat = link_to_parent.rotation.quaternion(); let quat = link_to_parent.rotation.quaternion();
rec.log(
entity_path,
&rerun::Transform3D::from_translation_rotation(
Vec3D::new(trans[0], trans[1], trans[2]),
rerun::Quaternion::from([quat.i, quat.j, quat.k, quat.w]),
),
)
.context("Could not log transform")?;
let point_transform = rerun::Transform3D::from_translation_rotation(
Vec3D::new(trans[0], trans[1], trans[2]),
rerun::Quaternion::from([quat.i, quat.j, quat.k, quat.w]),
);
rec.log(entity_path.clone(), &point_transform)
.context("Could not log transform")?;
} }

Ok(()) Ok(())
} }

+ 12
- 0
node-hub/dora-rustypot/Cargo.toml View File

@@ -0,0 +1,12 @@
[package]
name = "dora-rustypot"
version = "0.1.0"
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
dora-node-api = { workspace = true }
eyre = "0.6.12"
rustypot = { git = "https://github.com/pollen-robotics/rustypot", branch = "next-release-1.0" }
serialport = { version = "4.7.1", default-features = false }

+ 79
- 0
node-hub/dora-rustypot/src/main.rs View File

@@ -0,0 +1,79 @@
use dora_node_api::dora_core::config::DataId;
use dora_node_api::{into_vec, DoraNode, Event, IntoArrow, Parameter};
use eyre::{Context, Result};
use rustypot::servo::feetech::sts3215::Sts3215Controller;
use std::collections::BTreeMap;
use std::time::Duration;

fn main() -> Result<()> {
let (mut node, mut events) = DoraNode::init_from_env()?;
let serialportname: String = std::env::var("PORT").context("Serial port name not provided")?;
let baudrate: u32 = std::env::var("BAUDRATE")
.unwrap_or_else(|_| "1000000".to_string())
.parse()
.context("Invalid baudrate")?;
let ids = std::env::var("IDS")
.unwrap_or_else(|_| "1,2,3,4,5,6".to_string())
.split(&[',', ' '][..])
.map(|s| s.parse::<u8>().unwrap())
.collect::<Vec<u8>>();

let serial_port = serialport::new(serialportname, baudrate)
.timeout(Duration::from_millis(1000))
.open()?;

let mut c = Sts3215Controller::new()
.with_protocol_v1()
.with_serial_port(serial_port);

if let Ok(torque) = std::env::var("TORQUE") {
let truthies = vec![true; ids.len()];

c.write_torque_enable(&ids, &truthies)
.expect("could not enable torque");

if let Ok(torque_limit) = torque.parse::<u16>() {
let limits = vec![torque_limit; ids.len()];
c.write_torque_limit(&ids, &limits)
.expect("could not enable torque");
}
} else {
let falsies = vec![false; ids.len()];
c.write_torque_enable(&ids, &falsies)
.expect("could not enable torque");
}

while let Some(event) = events.recv() {
match event {
Event::Input {
id,
metadata: _,
data,
} => match id.as_str() {
"tick" => {
if let Ok(joints) = c.read_present_position(&ids) {
let mut parameter = BTreeMap::new();
parameter.insert(
"encoding".to_string(),
Parameter::String("jointstate".to_string()),
);
node.send_output(
DataId::from("pose".to_string()),
parameter,
joints.into_arrow(),
)
.unwrap();
};
}
"pose" => {
let data: Vec<f64> = into_vec(&data).expect("could not cast values");
c.write_goal_position(&ids, &data).unwrap();
}
other => eprintln!("Received input `{other}`"),
},
_ => {}
}
}

Ok(())
}

+ 5
- 1
node-hub/feetech-client/README.md View File

@@ -1,5 +1,9 @@
## FeetechClient for SCS/STS motors ## FeetechClient for SCS/STS motors


> [!WARNING]
> As of 02.05.2025, this node is no more supported and is replaced in favor of dora-rustypot for higher performance
> with the possibility of being able to handle more servomotors.

This node is a client for the Feetech motors. It is based on the Dynamixel SDK and is used to control the motors. It This node is a client for the Feetech motors. It is based on the Dynamixel SDK and is used to control the motors. It
is a Python node that communicates with the motors via the USB port. is a Python node that communicates with the motors via the USB port.


@@ -97,4 +101,4 @@ the goal current for the motor at the beginning, null if you don't want to set i


## License ## License


This library is licensed under the [Apache License 2.0](../../LICENSE).
This library is licensed under the [Apache License 2.0](../../LICENSE).

+ 7
- 2
node-hub/feetech-client/feetech_client/bus.py View File

@@ -1,4 +1,5 @@
"""TODO: Add docstring.""" """TODO: Add docstring."""

import enum import enum
from typing import Union from typing import Union


@@ -150,7 +151,9 @@ class FeetechBus:
] ]


values = [ values = [
np.uint32(32767 - value.as_py()) if value < 0 else np.uint32(value.as_py())
np.uint32(32767 - value.as_py())
if value.as_py() < 0
else np.uint32(value.as_py())
for value in data.field("values") for value in data.field("values")
] ]


@@ -243,7 +246,9 @@ class FeetechBus:
values = pa.array( values = pa.array(
[ [
self.group_readers[group_key].getData( self.group_readers[group_key].getData(
idx, packet_address, packet_bytes_size,
idx,
packet_address,
packet_bytes_size,
) )
for idx in motor_ids for idx in motor_ids
], ],


+ 200
- 0
node-hub/feetech-client/feetech_client/configure.py View File

@@ -0,0 +1,200 @@
"""Module for configuring and setting up the SO100 robot hardware.

This module provides functionality for initializing and configuring the SO100 robot's
servo motors and other hardware components.

The program will:
1. Disable all torque motors of provided SO100.
2. Ask the user to move the SO100 to the position 1 (see CONFIGURING.md for more details).
3. Record the position of the SO100.
4. Ask the user to move the SO100 to the position 2 (see CONFIGURING.md for more details).
5. Record the position of the SO100.
8. Calculate the offset and inverted mode of the SO100.
9. Let the user verify in real time that the SO100 is working properly.

It will also enable all appropriate operating modes for the SO100.
"""

import argparse
import json
import time

import pyarrow as pa
from pwm_position_control.functions import construct_control_table
from pwm_position_control.tables import (
construct_logical_to_pwm_conversion_table_arrow,
construct_pwm_to_logical_conversion_table_arrow,
)
from pwm_position_control.transform import pwm_to_logical_arrow, wrap_joints_and_values

from .bus import FeetechBus, OperatingMode, TorqueMode

FULL_ARM = pa.array(
[
"shoulder_pan",
"shoulder_lift",
"elbow_flex",
"wrist_flex",
"wrist_roll",
"gripper",
],
type=pa.string(),
)

ARM_WITHOUT_GRIPPER = pa.array(
["shoulder_pan", "shoulder_lift", "elbow_flex", "wrist_flex", "wrist_roll"],
type=pa.string(),
)

GRIPPER = pa.array(["gripper"], type=pa.string())


def pause():
"""Pause execution and wait for user input."""
input("Press Enter to continue...")


def configure_servos(bus: FeetechBus):
"""Configure the servos with default settings.

Args:
bus: The FeetechBus instance to configure.

"""
bus.write_torque_enable(
wrap_joints_and_values(FULL_ARM, [TorqueMode.DISABLED.value] * 6),
)

bus.write_operating_mode(
wrap_joints_and_values(FULL_ARM, [OperatingMode.ONE_TURN.value] * 6),
)

bus.write_max_angle_limit(
wrap_joints_and_values(FULL_ARM, [pa.scalar(0, pa.uint32())] * 6),
)

bus.write_min_angle_limit(
wrap_joints_and_values(FULL_ARM, [pa.scalar(0, pa.uint32())] * 6),
)


def main():
"""Run the servo configuration process."""
parser = argparse.ArgumentParser(
description="SO100 Auto Configure: This program is used to automatically configure the Low Cost Robot (SO100) "
"for the user.",
)

parser.add_argument(
"--port",
type=str,
required=True,
help="The port of the SO100.",
)
parser.add_argument(
"--right",
action="store_true",
help="If the SO100 is on the right side of the user.",
)
parser.add_argument(
"--left",
action="store_true",
help="If the SO100 is on the left side of the user.",
)

args = parser.parse_args()

if args.right and args.left:
raise ValueError("You cannot specify both --right and --left.")

args = parser.parse_args()

targets = (
wrap_joints_and_values(FULL_ARM, [0, -90, 90, 0, -90, 0]),
wrap_joints_and_values(FULL_ARM, [90, 0, 0, 90, 0, -90]),
)

arm = FeetechBus(
args.port,
{
"shoulder_pan": (1, "st3215"),
"shoulder_lift": (2, "st3215"),
"elbow_flex": (3, "st3215"),
"wrist_flex": (4, "st3215"),
"wrist_roll": (5, "st3215"),
"gripper": (6, "st3215"),
},
)

configure_servos(arm)

print("Please move the SO100 to the first position.")
pause()
pwm_position_1 = arm.read_position(FULL_ARM)
print(pwm_position_1)

print("Please move the SO100 to the second position.")
pause()
pwm_position_2 = arm.read_position(FULL_ARM)
print(pwm_position_2)

print("Configuration completed.")

pwm_positions = (pwm_position_1, pwm_position_2)

pwm_to_logical_conversion_table = construct_pwm_to_logical_conversion_table_arrow(
pwm_positions,
targets,
)
logical_to_pwm_conversion_table = construct_logical_to_pwm_conversion_table_arrow(
pwm_positions,
targets,
)

control_table_json = {}
for i in range(len(FULL_ARM)):
control_table_json[FULL_ARM[i].as_py()] = {
"id": i + 1,
"model": "sts3215",
"torque": True,
"pwm_to_logical": pwm_to_logical_conversion_table[FULL_ARM[i].as_py()],
"logical_to_pwm": logical_to_pwm_conversion_table[FULL_ARM[i].as_py()],
}

left = "left" if args.left else "right"
path = (
input(
f"Please enter the path of the configuration file (default is follower.{left}.json): ",
)
or f"follower.{left}.json"
)

with open(path, "w") as file:
json.dump(control_table_json, file)

control_table = construct_control_table(
pwm_to_logical_conversion_table,
logical_to_pwm_conversion_table,
)

while True:
try:
pwm_position = arm.read_position(FULL_ARM)
logical_position = pwm_to_logical_arrow(
pwm_position,
control_table,
ranged=True,
).field("values")

print(f"Logical Position: {logical_position}")

except ConnectionError:
print(
"Connection error occurred. Please check the connection and try again.",
)

time.sleep(0.5)


if __name__ == "__main__":
main()

+ 10
- 11
node-hub/feetech-client/feetech_client/main.py View File

@@ -4,6 +4,7 @@ import argparse
import json import json
import os import os


import numpy as np
import pyarrow as pa import pyarrow as pa
from dora import Node from dora import Node


@@ -65,9 +66,16 @@ class Client:
def pull_position(self, node, metadata): def pull_position(self, node, metadata):
"""TODO: Add docstring.""" """TODO: Add docstring."""
try: try:
struct = self.bus.read_position(self.config["joints"])
metadata["encoding"] = "jointstate"
node.send_output( node.send_output(
"position", "position",
self.bus.read_position(self.config["joints"]),
pa.array(
np.deg2rad(
((struct.flatten()[1].to_numpy() - 2048) / 4096.0) * 360
),
type=pa.float32(),
),
metadata, metadata,
) )


@@ -166,21 +174,12 @@ def main():
"torque": wrap_joints_and_values( "torque": wrap_joints_and_values(
pa.array(config.keys(), pa.string()), pa.array(config.keys(), pa.string()),
pa.array( pa.array(
[
(
TorqueMode.ENABLED.value
if config[joint]["torque"]
else TorqueMode.DISABLED.value
)
for joint in joints
],
[(TorqueMode.DISABLED.value) for joint in joints],
type=pa.uint32(), type=pa.uint32(),
), ),
), ),
} }


print("Feetech Client Configuration: ", bus, flush=True)

client = Client(bus) client = Client(bus)
client.run() client.run()
client.close() client.close()


+ 9
- 5
node-hub/feetech-client/pyproject.toml View File

@@ -1,16 +1,17 @@
[project] [project]
name = "feetech-client" name = "feetech-client"
version = "0.1" version = "0.1"
authors = [{ name = "Hennzau", email = "<dev@enzo-le-van.fr>"}]
authors = [{ name = "Hennzau", email = "<dev@enzo-le-van.fr>" }]
description = "Dora Node client for feetech motors." description = "Dora Node client for feetech motors."
license = { text = "MIT" } license = { text = "MIT" }
readme = "README.md" readme = "README.md"
requires-python = ">=3.9" requires-python = ">=3.9"


dependencies = [ dependencies = [
"dora-rs == 0.3.5",
"numpy <= 2.0.0",
"feetech-servo-sdk == 1.0.0",
"dora-rs == 0.3.11",
"numpy <= 2.0.0",
"feetech-servo-sdk == 1.0.0",
"pwm-position-control",
] ]


[dependency-groups] [dependency-groups]
@@ -18,7 +19,7 @@ dev = ["pytest >=8.1.1", "ruff >=0.9.1"]


[project.scripts] [project.scripts]
feetech-client = "feetech_client.main:main" feetech-client = "feetech_client.main:main"
feetech-configure = "feetech_client.configure:main"
[build-system] [build-system]
requires = ["poetry-core>=1.8.0"] requires = ["poetry-core>=1.8.0"]
build-backend = "poetry.core.masonry.api" build-backend = "poetry.core.masonry.api"
@@ -34,3 +35,6 @@ extend-select = [
"N", # Ruff's N rule "N", # Ruff's N rule
"I", # Ruff's I rule "I", # Ruff's I rule
] ]

[tool.uv.sources]
pwm-position-control = { git = "https://github.com/Hennzau/pwm-position-control" }

+ 93
- 90
node-hub/feetech-client/uv.lock View File

@@ -1,5 +1,4 @@
version = 1 version = 1
revision = 1
requires-python = ">=3.9" requires-python = ">=3.9"


[[package]] [[package]]
@@ -31,6 +30,17 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/de/d8/a6bfeb961b8a15c8e73a51a839482783154a2f8927fef267d57d039043dc/dora_rs-0.3.5-cp37-abi3-win_amd64.whl", hash = "sha256:88b4fe5e5569562fcdb3817abb89532f4abca913e8bd02e4ec228833716cbd09", size = 6129593 }, { url = "https://files.pythonhosted.org/packages/de/d8/a6bfeb961b8a15c8e73a51a839482783154a2f8927fef267d57d039043dc/dora_rs-0.3.5-cp37-abi3-win_amd64.whl", hash = "sha256:88b4fe5e5569562fcdb3817abb89532f4abca913e8bd02e4ec228833716cbd09", size = 6129593 },
] ]


[[package]]
name = "dynamixel-sdk"
version = "3.7.31"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pyserial" },
]
wheels = [
{ url = "https://files.pythonhosted.org/packages/03/e8/2c65d11d312fdb1a2308563d32f63f93c39123bcfb4909d947e0b294c794/dynamixel_sdk-3.7.31-py3-none-any.whl", hash = "sha256:74e8c112ca6b0b869b196dd8c6a44ffd5dd5c1a3cb9fe2030e9933922406b466", size = 23664 },
]

[[package]] [[package]]
name = "exceptiongroup" name = "exceptiongroup"
version = "1.2.2" version = "1.2.2"
@@ -48,6 +58,7 @@ dependencies = [
{ name = "dora-rs" }, { name = "dora-rs" },
{ name = "feetech-servo-sdk" }, { name = "feetech-servo-sdk" },
{ name = "numpy" }, { name = "numpy" },
{ name = "pwm-position-control" },
] ]


[package.dev-dependencies] [package.dev-dependencies]
@@ -61,6 +72,7 @@ requires-dist = [
{ name = "dora-rs", specifier = "==0.3.5" }, { name = "dora-rs", specifier = "==0.3.5" },
{ name = "feetech-servo-sdk", specifier = "==1.0.0" }, { name = "feetech-servo-sdk", specifier = "==1.0.0" },
{ name = "numpy", specifier = "<=2.0.0" }, { name = "numpy", specifier = "<=2.0.0" },
{ name = "pwm-position-control", git = "https://github.com/Hennzau/pwm-position-control" },
] ]


[package.metadata.requires-dev] [package.metadata.requires-dev]
@@ -89,54 +101,45 @@ wheels = [


[[package]] [[package]]
name = "numpy" name = "numpy"
version = "2.0.0"
version = "1.26.4"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/05/35/fb1ada118002df3fe91b5c3b28bc0d90f879b881a5d8f68b1f9b79c44bfe/numpy-2.0.0.tar.gz", hash = "sha256:cf5d1c9e6837f8af9f92b6bd3e86d513cdc11f60fd62185cc49ec7d1aba34864", size = 18326228 }
sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/3a/83/24dafa898f172e198a1c164eb01675bbcbf5895ac8f9b1f8078ea5c2fdb5/numpy-2.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:04494f6ec467ccb5369d1808570ae55f6ed9b5809d7f035059000a37b8d7e86f", size = 21214540 },
{ url = "https://files.pythonhosted.org/packages/b6/8f/780b1719bee25794115b23dafd022aa4a835002077df58d4234ca6a23143/numpy-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2635dbd200c2d6faf2ef9a0d04f0ecc6b13b3cad54f7c67c61155138835515d2", size = 13307901 },
{ url = "https://files.pythonhosted.org/packages/3b/61/e1e77694c4ed929c8edebde7d2ac30dbf3ed452c1988b633569d3d7ff271/numpy-2.0.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:0a43f0974d501842866cc83471bdb0116ba0dffdbaac33ec05e6afed5b615238", size = 5238781 },
{ url = "https://files.pythonhosted.org/packages/fc/1f/34b58ba54b5f202728083b5007d4b27dfcfd0edc616addadb0b35c7817d7/numpy-2.0.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:8d83bb187fb647643bd56e1ae43f273c7f4dbcdf94550d7938cfc32566756514", size = 6882511 },
{ url = "https://files.pythonhosted.org/packages/e1/5f/e51e3ebdaad1bccffdf9ba4b979c8b2fe2bd376d10bf9e9b59e1c6972a1a/numpy-2.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79e843d186c8fb1b102bef3e2bc35ef81160ffef3194646a7fdd6a73c6b97196", size = 13904765 },
{ url = "https://files.pythonhosted.org/packages/d6/a8/6a2419c40c7b6f7cb4ef52c532c88e55490c4fa92885964757d507adddce/numpy-2.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d7696c615765091cc5093f76fd1fa069870304beaccfd58b5dcc69e55ef49c1", size = 19282097 },
{ url = "https://files.pythonhosted.org/packages/52/d3/74989fffc21c74fba73eb05591cf3a56aaa135ee2427826217487028abd0/numpy-2.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b4c76e3d4c56f145d41b7b6751255feefae92edbc9a61e1758a98204200f30fc", size = 19699985 },
{ url = "https://files.pythonhosted.org/packages/23/8a/a5cac659347f916cfaf2343eba577e98c83edd1ad6ada5586018961bf667/numpy-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:acd3a644e4807e73b4e1867b769fbf1ce8c5d80e7caaef0d90dcdc640dfc9787", size = 14406309 },
{ url = "https://files.pythonhosted.org/packages/8b/c4/858aadfd1f3f2f815c03be62556115f43796b805943755a9aef5b6b29b04/numpy-2.0.0-cp310-cp310-win32.whl", hash = "sha256:cee6cc0584f71adefe2c908856ccc98702baf95ff80092e4ca46061538a2ba98", size = 6358424 },
{ url = "https://files.pythonhosted.org/packages/9c/de/7d17991e0683f84bcfefcf4e3f43da6b37155b9e6a0429942494f044a7ef/numpy-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:ed08d2703b5972ec736451b818c2eb9da80d66c3e84aed1deeb0c345fefe461b", size = 16507217 },
{ url = "https://files.pythonhosted.org/packages/58/52/a1aea658c7134ea0977542fc4d1aa6f1f9876c6a14ffeecd9394d839bc16/numpy-2.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad0c86f3455fbd0de6c31a3056eb822fc939f81b1618f10ff3406971893b62a5", size = 21218342 },
{ url = "https://files.pythonhosted.org/packages/77/4d/ba4a60298c55478b34f13c97a0ac2cf8d225320322976252a250ed04040a/numpy-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7f387600d424f91576af20518334df3d97bc76a300a755f9a8d6e4f5cadd289", size = 13272871 },
{ url = "https://files.pythonhosted.org/packages/01/4a/611a907421d8098d5edc8c2b10c3583796ee8da4156f8f7de52c2f4c9d90/numpy-2.0.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:34f003cb88b1ba38cb9a9a4a3161c1604973d7f9d5552c38bc2f04f829536609", size = 5237037 },
{ url = "https://files.pythonhosted.org/packages/4f/c1/42d1789f1dff7b65f2d3237eb88db258a5a7fdfb981b895509887c92838d/numpy-2.0.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:b6f6a8f45d0313db07d6d1d37bd0b112f887e1369758a5419c0370ba915b3871", size = 6886342 },
{ url = "https://files.pythonhosted.org/packages/cd/37/595f27a95ff976e8086bc4be1ede21ed24ca4bc127588da59197a65d066f/numpy-2.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f64641b42b2429f56ee08b4f427a4d2daf916ec59686061de751a55aafa22e4", size = 13913798 },
{ url = "https://files.pythonhosted.org/packages/d1/27/2a7bd6855dc717aeec5f553073a3c426b9c816126555f8e616392eab856b/numpy-2.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7039a136017eaa92c1848152827e1424701532ca8e8967fe480fe1569dae581", size = 19292279 },
{ url = "https://files.pythonhosted.org/packages/1b/54/966a3f5a93d709672ad851f6db52461c0584bab52f2230cf76be482302c6/numpy-2.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46e161722e0f619749d1cd892167039015b2c2817296104487cd03ed4a955995", size = 19709770 },
{ url = "https://files.pythonhosted.org/packages/cc/8b/9340ac45b6cd8bb92a03f797dbe9b7949f5b3789482e1d824cbebc80fda7/numpy-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0e50842b2295ba8414c8c1d9d957083d5dfe9e16828b37de883f51fc53c4016f", size = 14417906 },
{ url = "https://files.pythonhosted.org/packages/fa/46/614507d78ca8ce1567ac2c3bf7a79bfd413d6fc96dc6b415abaeb3734c0a/numpy-2.0.0-cp311-cp311-win32.whl", hash = "sha256:2ce46fd0b8a0c947ae047d222f7136fc4d55538741373107574271bc00e20e8f", size = 6357084 },
{ url = "https://files.pythonhosted.org/packages/9b/0f/022ca4783b6e6239a53b988a4d315d67f9ae7126227fb2255054a558bd72/numpy-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbd6acc766814ea6443628f4e6751d0da6593dae29c08c0b2606164db026970c", size = 16511678 },
{ url = "https://files.pythonhosted.org/packages/b7/c8/899826a2d5c94f607f5e4a6f1a0e8b07c8fea3a5b674c5706115b8aad9bb/numpy-2.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:354f373279768fa5a584bac997de6a6c9bc535c482592d7a813bb0c09be6c76f", size = 20944792 },
{ url = "https://files.pythonhosted.org/packages/fe/ec/8ae7750d33565769c8bb7ba925d4e73ecb2de6cd8eaa6fd527fbd52797ee/numpy-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4d2f62e55a4cd9c58c1d9a1c9edaedcd857a73cb6fda875bf79093f9d9086f85", size = 13042186 },
{ url = "https://files.pythonhosted.org/packages/3f/ab/1dc9f176d3084a2546cf76eb213dc61586d015ef59b3b17947b0e40038af/numpy-2.0.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:1e72728e7501a450288fc8e1f9ebc73d90cfd4671ebbd631f3e7857c39bd16f2", size = 4977729 },
{ url = "https://files.pythonhosted.org/packages/a0/97/61ed64cedc1b94a7939e3ab3db587822320d90a77bef70fcb586ea7c1931/numpy-2.0.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:84554fc53daa8f6abf8e8a66e076aff6ece62de68523d9f665f32d2fc50fd66e", size = 6610230 },
{ url = "https://files.pythonhosted.org/packages/bb/31/1f050169270d51ef0346d4c84c7df1c45af16ea304ed5f7151584788d32e/numpy-2.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c73aafd1afca80afecb22718f8700b40ac7cab927b8abab3c3e337d70e10e5a2", size = 13619789 },
{ url = "https://files.pythonhosted.org/packages/28/95/b56fc6b2abe37c03923b50415df483cf93e09e7438872280a5486131d804/numpy-2.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49d9f7d256fbc804391a7f72d4a617302b1afac1112fac19b6c6cec63fe7fe8a", size = 18993635 },
{ url = "https://files.pythonhosted.org/packages/df/16/4c165a5194fc70e4a131f8db463e6baf34e0d191ed35d40a161ee4c885d4/numpy-2.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0ec84b9ba0654f3b962802edc91424331f423dcf5d5f926676e0150789cb3d95", size = 19408219 },
{ url = "https://files.pythonhosted.org/packages/00/4c/440bad868bd3aff4fe4e293175a20da70cddff8674b3654eb2f112868ccf/numpy-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:feff59f27338135776f6d4e2ec7aeeac5d5f7a08a83e80869121ef8164b74af9", size = 14101574 },
{ url = "https://files.pythonhosted.org/packages/26/18/49f1e851f4157198c50f67ea3462797283aa36dd4b0c24b15f63e8118481/numpy-2.0.0-cp312-cp312-win32.whl", hash = "sha256:c5a59996dc61835133b56a32ebe4ef3740ea5bc19b3983ac60cc32be5a665d54", size = 6060205 },
{ url = "https://files.pythonhosted.org/packages/ad/9c/4a93b8e395b755c53628573d75d7b21985d9a0f416e978d637084ccc8ec3/numpy-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:a356364941fb0593bb899a1076b92dfa2029f6f5b8ba88a14fd0984aaf76d0df", size = 16208660 },
{ url = "https://files.pythonhosted.org/packages/79/56/fb78389e7a1b1d0aa20dd0cbda5110d68f5df77b0a704180f0959b4f8ad1/numpy-2.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e61155fae27570692ad1d327e81c6cf27d535a5d7ef97648a17d922224b216de", size = 21219203 },
{ url = "https://files.pythonhosted.org/packages/c6/ae/cc990cc3e9a211365391c193805496e7c7df93854d577e6a03d8a2319a12/numpy-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4554eb96f0fd263041baf16cf0881b3f5dafae7a59b1049acb9540c4d57bc8cb", size = 13281472 },
{ url = "https://files.pythonhosted.org/packages/95/ed/3a23463e2608b54af1fbd3649cd403e81b82993685d2a21006291b879122/numpy-2.0.0-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:903703372d46bce88b6920a0cd86c3ad82dae2dbef157b5fc01b70ea1cfc430f", size = 5245754 },
{ url = "https://files.pythonhosted.org/packages/d6/a1/8e8f40820ffe78ea09233b58c0f8719707b738ef36efbdc34377989b7ea5/numpy-2.0.0-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:3e8e01233d57639b2e30966c63d36fcea099d17c53bf424d77f088b0f4babd86", size = 6887414 },
{ url = "https://files.pythonhosted.org/packages/3b/89/abc57eebba1da98f615c7cb5d5b04bc105f00bda34d27048772d1be5a9fb/numpy-2.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cde1753efe513705a0c6d28f5884e22bdc30438bf0085c5c486cdaff40cd67a", size = 13910703 },
{ url = "https://files.pythonhosted.org/packages/87/d3/74e627205462a170f39e7d7ddd2b4166a0d8ab163377592c7f4fa935cc8c/numpy-2.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821eedb7165ead9eebdb569986968b541f9908979c2da8a4967ecac4439bae3d", size = 19285329 },
{ url = "https://files.pythonhosted.org/packages/c8/2e/14e7d5dd9930993797e95121176acbc3ffc1bb0ccbd2f8f7be36285ebde0/numpy-2.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a1712c015831da583b21c5bfe15e8684137097969c6d22e8316ba66b5baabe4", size = 19705286 },
{ url = "https://files.pythonhosted.org/packages/82/2d/f89a5cce068cd178c52e9fdc10fc227966d9da0cce368610775e75111d24/numpy-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9c27f0946a3536403efb0e1c28def1ae6730a72cd0d5878db38824855e3afc44", size = 14409686 },
{ url = "https://files.pythonhosted.org/packages/51/e7/8ab01e44772d376efd3e1f48df618c0f6ed6aeac5e2242387f0c21a77ff7/numpy-2.0.0-cp39-cp39-win32.whl", hash = "sha256:63b92c512d9dbcc37f9d81b123dec99fdb318ba38c8059afc78086fe73820275", size = 6363194 },
{ url = "https://files.pythonhosted.org/packages/6a/1e/1d76829f03b7ac9c90e2b158f06b69cddf9a06b96667dd7e2d96acdc0593/numpy-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:3f6bed7f840d44c08ebdb73b1825282b801799e325bcbdfa6bc5c370e5aecc65", size = 16513565 },
{ url = "https://files.pythonhosted.org/packages/9a/e0/97d246e03f9597e7275dc2f0b24f6845fbb5380ef0fac330cb1b087229f8/numpy-2.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9416a5c2e92ace094e9f0082c5fd473502c91651fb896bc17690d6fc475128d6", size = 21076646 },
{ url = "https://files.pythonhosted.org/packages/18/66/10c93572d97b410f71ad9b59b20f2a23dcdd871f025bd5376a732b408520/numpy-2.0.0-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:17067d097ed036636fa79f6a869ac26df7db1ba22039d962422506640314933a", size = 6743807 },
{ url = "https://files.pythonhosted.org/packages/80/1a/354ad1a6627dbac1d4167591db51ce59ed972064bfb9979f9a37a7782900/numpy-2.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ecb5b0582cd125f67a629072fed6f83562d9dd04d7e03256c9829bdec027ad", size = 19089752 },
{ url = "https://files.pythonhosted.org/packages/5f/9f/fe311331410759da4d441d6d08dd54b80065f4946374e817611f4f9c527f/numpy-2.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cef04d068f5fb0518a77857953193b6bb94809a806bd0a14983a8f12ada060c9", size = 16431128 },
{ url = "https://files.pythonhosted.org/packages/a7/94/ace0fdea5241a27d13543ee117cbc65868e82213fb31a8eb7fe9ff23f313/numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0", size = 20631468 },
{ url = "https://files.pythonhosted.org/packages/20/f7/b24208eba89f9d1b58c1668bc6c8c4fd472b20c45573cb767f59d49fb0f6/numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a", size = 13966411 },
{ url = "https://files.pythonhosted.org/packages/fc/a5/4beee6488160798683eed5bdb7eead455892c3b4e1f78d79d8d3f3b084ac/numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4", size = 14219016 },
{ url = "https://files.pythonhosted.org/packages/4b/d7/ecf66c1cd12dc28b4040b15ab4d17b773b87fa9d29ca16125de01adb36cd/numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f", size = 18240889 },
{ url = "https://files.pythonhosted.org/packages/24/03/6f229fe3187546435c4f6f89f6d26c129d4f5bed40552899fcf1f0bf9e50/numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a", size = 13876746 },
{ url = "https://files.pythonhosted.org/packages/39/fe/39ada9b094f01f5a35486577c848fe274e374bbf8d8f472e1423a0bbd26d/numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2", size = 18078620 },
{ url = "https://files.pythonhosted.org/packages/d5/ef/6ad11d51197aad206a9ad2286dc1aac6a378059e06e8cf22cd08ed4f20dc/numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07", size = 5972659 },
{ url = "https://files.pythonhosted.org/packages/19/77/538f202862b9183f54108557bfda67e17603fc560c384559e769321c9d92/numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5", size = 15808905 },
{ url = "https://files.pythonhosted.org/packages/11/57/baae43d14fe163fa0e4c47f307b6b2511ab8d7d30177c491960504252053/numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71", size = 20630554 },
{ url = "https://files.pythonhosted.org/packages/1a/2e/151484f49fd03944c4a3ad9c418ed193cfd02724e138ac8a9505d056c582/numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef", size = 13997127 },
{ url = "https://files.pythonhosted.org/packages/79/ae/7e5b85136806f9dadf4878bf73cf223fe5c2636818ba3ab1c585d0403164/numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e", size = 14222994 },
{ url = "https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5", size = 18252005 },
{ url = "https://files.pythonhosted.org/packages/09/bf/2b1aaf8f525f2923ff6cfcf134ae5e750e279ac65ebf386c75a0cf6da06a/numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a", size = 13885297 },
{ url = "https://files.pythonhosted.org/packages/df/a0/4e0f14d847cfc2a633a1c8621d00724f3206cfeddeb66d35698c4e2cf3d2/numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a", size = 18093567 },
{ url = "https://files.pythonhosted.org/packages/d2/b7/a734c733286e10a7f1a8ad1ae8c90f2d33bf604a96548e0a4a3a6739b468/numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20", size = 5968812 },
{ url = "https://files.pythonhosted.org/packages/3f/6b/5610004206cf7f8e7ad91c5a85a8c71b2f2f8051a0c0c4d5916b76d6cbb2/numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2", size = 15811913 },
{ url = "https://files.pythonhosted.org/packages/95/12/8f2020a8e8b8383ac0177dc9570aad031a3beb12e38847f7129bacd96228/numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218", size = 20335901 },
{ url = "https://files.pythonhosted.org/packages/75/5b/ca6c8bd14007e5ca171c7c03102d17b4f4e0ceb53957e8c44343a9546dcc/numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b", size = 13685868 },
{ url = "https://files.pythonhosted.org/packages/79/f8/97f10e6755e2a7d027ca783f63044d5b1bc1ae7acb12afe6a9b4286eac17/numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b", size = 13925109 },
{ url = "https://files.pythonhosted.org/packages/0f/50/de23fde84e45f5c4fda2488c759b69990fd4512387a8632860f3ac9cd225/numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed", size = 17950613 },
{ url = "https://files.pythonhosted.org/packages/4c/0c/9c603826b6465e82591e05ca230dfc13376da512b25ccd0894709b054ed0/numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a", size = 13572172 },
{ url = "https://files.pythonhosted.org/packages/76/8c/2ba3902e1a0fc1c74962ea9bb33a534bb05984ad7ff9515bf8d07527cadd/numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0", size = 17786643 },
{ url = "https://files.pythonhosted.org/packages/28/4a/46d9e65106879492374999e76eb85f87b15328e06bd1550668f79f7b18c6/numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110", size = 5677803 },
{ url = "https://files.pythonhosted.org/packages/16/2e/86f24451c2d530c88daf997cb8d6ac622c1d40d19f5a031ed68a4b73a374/numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818", size = 15517754 },
{ url = "https://files.pythonhosted.org/packages/7d/24/ce71dc08f06534269f66e73c04f5709ee024a1afe92a7b6e1d73f158e1f8/numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c", size = 20636301 },
{ url = "https://files.pythonhosted.org/packages/ae/8c/ab03a7c25741f9ebc92684a20125fbc9fc1b8e1e700beb9197d750fdff88/numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be", size = 13971216 },
{ url = "https://files.pythonhosted.org/packages/6d/64/c3bcdf822269421d85fe0d64ba972003f9bb4aa9a419da64b86856c9961f/numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764", size = 14226281 },
{ url = "https://files.pythonhosted.org/packages/54/30/c2a907b9443cf42b90c17ad10c1e8fa801975f01cb9764f3f8eb8aea638b/numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3", size = 18249516 },
{ url = "https://files.pythonhosted.org/packages/43/12/01a563fc44c07095996d0129b8899daf89e4742146f7044cdbdb3101c57f/numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd", size = 13882132 },
{ url = "https://files.pythonhosted.org/packages/16/ee/9df80b06680aaa23fc6c31211387e0db349e0e36d6a63ba3bd78c5acdf11/numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c", size = 18084181 },
{ url = "https://files.pythonhosted.org/packages/28/7d/4b92e2fe20b214ffca36107f1a3e75ef4c488430e64de2d9af5db3a4637d/numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6", size = 5976360 },
{ url = "https://files.pythonhosted.org/packages/b5/42/054082bd8220bbf6f297f982f0a8f5479fcbc55c8b511d928df07b965869/numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea", size = 15814633 },
{ url = "https://files.pythonhosted.org/packages/3f/72/3df6c1c06fc83d9cfe381cccb4be2532bbd38bf93fbc9fad087b6687f1c0/numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30", size = 20455961 },
{ url = "https://files.pythonhosted.org/packages/8e/02/570545bac308b58ffb21adda0f4e220ba716fb658a63c151daecc3293350/numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c", size = 18061071 },
{ url = "https://files.pythonhosted.org/packages/f4/5f/fafd8c51235f60d49f7a88e2275e13971e90555b67da52dd6416caec32fe/numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0", size = 15709730 },
] ]


[[package]] [[package]]
@@ -157,53 +160,53 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 },
] ]


[[package]]
name = "pwm-position-control"
version = "0.1"
source = { git = "https://github.com/Hennzau/pwm-position-control#91d4e2ece8d72cbb8037ffb408e3ef3abe6b470d" }
dependencies = [
{ name = "dynamixel-sdk" },
{ name = "numpy" },
{ name = "pyarrow" },
]

[[package]] [[package]]
name = "pyarrow" name = "pyarrow"
version = "19.0.1"
version = "17.0.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/7f/09/a9046344212690f0632b9c709f9bf18506522feb333c894d0de81d62341a/pyarrow-19.0.1.tar.gz", hash = "sha256:3bf266b485df66a400f282ac0b6d1b500b9d2ae73314a153dbe97d6d5cc8a99e", size = 1129437 }
dependencies = [
{ name = "numpy" },
]
sdist = { url = "https://files.pythonhosted.org/packages/27/4e/ea6d43f324169f8aec0e57569443a38bab4b398d09769ca64f7b4d467de3/pyarrow-17.0.0.tar.gz", hash = "sha256:4beca9521ed2c0921c1023e68d097d0299b62c362639ea315572a58f3f50fd28", size = 1112479 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/36/01/b23b514d86b839956238d3f8ef206fd2728eee87ff1b8ce150a5678d9721/pyarrow-19.0.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:fc28912a2dc924dddc2087679cc8b7263accc71b9ff025a1362b004711661a69", size = 30688914 },
{ url = "https://files.pythonhosted.org/packages/c6/68/218ff7cf4a0652a933e5f2ed11274f724dd43b9813cb18dd72c0a35226a2/pyarrow-19.0.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:fca15aabbe9b8355800d923cc2e82c8ef514af321e18b437c3d782aa884eaeec", size = 32102866 },
{ url = "https://files.pythonhosted.org/packages/98/01/c295050d183014f4a2eb796d7d2bbfa04b6cccde7258bb68aacf6f18779b/pyarrow-19.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad76aef7f5f7e4a757fddcdcf010a8290958f09e3470ea458c80d26f4316ae89", size = 41147682 },
{ url = "https://files.pythonhosted.org/packages/40/17/a6c3db0b5f3678f33bbb552d2acbc16def67f89a72955b67b0109af23eb0/pyarrow-19.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d03c9d6f2a3dffbd62671ca070f13fc527bb1867b4ec2b98c7eeed381d4f389a", size = 42179192 },
{ url = "https://files.pythonhosted.org/packages/cf/75/c7c8e599300d8cebb6cb339014800e1c720c9db2a3fcb66aa64ec84bac72/pyarrow-19.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:65cf9feebab489b19cdfcfe4aa82f62147218558d8d3f0fc1e9dea0ab8e7905a", size = 40517272 },
{ url = "https://files.pythonhosted.org/packages/ef/c9/68ab123ee1528699c4d5055f645ecd1dd68ff93e4699527249d02f55afeb/pyarrow-19.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:41f9706fbe505e0abc10e84bf3a906a1338905cbbcf1177b71486b03e6ea6608", size = 42069036 },
{ url = "https://files.pythonhosted.org/packages/54/e3/d5cfd7654084e6c0d9c3ce949e5d9e0ccad569ae1e2d5a68a3ec03b2be89/pyarrow-19.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c6cb2335a411b713fdf1e82a752162f72d4a7b5dbc588e32aa18383318b05866", size = 25277951 },
{ url = "https://files.pythonhosted.org/packages/a0/55/f1a8d838ec07fe3ca53edbe76f782df7b9aafd4417080eebf0b42aab0c52/pyarrow-19.0.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:cc55d71898ea30dc95900297d191377caba257612f384207fe9f8293b5850f90", size = 30713987 },
{ url = "https://files.pythonhosted.org/packages/13/12/428861540bb54c98a140ae858a11f71d041ef9e501e6b7eb965ca7909505/pyarrow-19.0.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:7a544ec12de66769612b2d6988c36adc96fb9767ecc8ee0a4d270b10b1c51e00", size = 32135613 },
{ url = "https://files.pythonhosted.org/packages/2f/8a/23d7cc5ae2066c6c736bce1db8ea7bc9ac3ef97ac7e1c1667706c764d2d9/pyarrow-19.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0148bb4fc158bfbc3d6dfe5001d93ebeed253793fff4435167f6ce1dc4bddeae", size = 41149147 },
{ url = "https://files.pythonhosted.org/packages/a2/7a/845d151bb81a892dfb368bf11db584cf8b216963ccce40a5cf50a2492a18/pyarrow-19.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f24faab6ed18f216a37870d8c5623f9c044566d75ec586ef884e13a02a9d62c5", size = 42178045 },
{ url = "https://files.pythonhosted.org/packages/a7/31/e7282d79a70816132cf6cae7e378adfccce9ae10352d21c2fecf9d9756dd/pyarrow-19.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:4982f8e2b7afd6dae8608d70ba5bd91699077323f812a0448d8b7abdff6cb5d3", size = 40532998 },
{ url = "https://files.pythonhosted.org/packages/b8/82/20f3c290d6e705e2ee9c1fa1d5a0869365ee477e1788073d8b548da8b64c/pyarrow-19.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:49a3aecb62c1be1d822f8bf629226d4a96418228a42f5b40835c1f10d42e4db6", size = 42084055 },
{ url = "https://files.pythonhosted.org/packages/ff/77/e62aebd343238863f2c9f080ad2ef6ace25c919c6ab383436b5b81cbeef7/pyarrow-19.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:008a4009efdb4ea3d2e18f05cd31f9d43c388aad29c636112c2966605ba33466", size = 25283133 },
{ url = "https://files.pythonhosted.org/packages/78/b4/94e828704b050e723f67d67c3535cf7076c7432cd4cf046e4bb3b96a9c9d/pyarrow-19.0.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:80b2ad2b193e7d19e81008a96e313fbd53157945c7be9ac65f44f8937a55427b", size = 30670749 },
{ url = "https://files.pythonhosted.org/packages/7e/3b/4692965e04bb1df55e2c314c4296f1eb12b4f3052d4cf43d29e076aedf66/pyarrow-19.0.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:ee8dec072569f43835932a3b10c55973593abc00936c202707a4ad06af7cb294", size = 32128007 },
{ url = "https://files.pythonhosted.org/packages/22/f7/2239af706252c6582a5635c35caa17cb4d401cd74a87821ef702e3888957/pyarrow-19.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d5d1ec7ec5324b98887bdc006f4d2ce534e10e60f7ad995e7875ffa0ff9cb14", size = 41144566 },
{ url = "https://files.pythonhosted.org/packages/fb/e3/c9661b2b2849cfefddd9fd65b64e093594b231b472de08ff658f76c732b2/pyarrow-19.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ad4c0eb4e2a9aeb990af6c09e6fa0b195c8c0e7b272ecc8d4d2b6574809d34", size = 42202991 },
{ url = "https://files.pythonhosted.org/packages/fe/4f/a2c0ed309167ef436674782dfee4a124570ba64299c551e38d3fdaf0a17b/pyarrow-19.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d383591f3dcbe545f6cc62daaef9c7cdfe0dff0fb9e1c8121101cabe9098cfa6", size = 40507986 },
{ url = "https://files.pythonhosted.org/packages/27/2e/29bb28a7102a6f71026a9d70d1d61df926887e36ec797f2e6acfd2dd3867/pyarrow-19.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b4c4156a625f1e35d6c0b2132635a237708944eb41df5fbe7d50f20d20c17832", size = 42087026 },
{ url = "https://files.pythonhosted.org/packages/16/33/2a67c0f783251106aeeee516f4806161e7b481f7d744d0d643d2f30230a5/pyarrow-19.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:5bd1618ae5e5476b7654c7b55a6364ae87686d4724538c24185bbb2952679960", size = 25250108 },
{ url = "https://files.pythonhosted.org/packages/2b/8d/275c58d4b00781bd36579501a259eacc5c6dfb369be4ddeb672ceb551d2d/pyarrow-19.0.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:e45274b20e524ae5c39d7fc1ca2aa923aab494776d2d4b316b49ec7572ca324c", size = 30653552 },
{ url = "https://files.pythonhosted.org/packages/a0/9e/e6aca5cc4ef0c7aec5f8db93feb0bde08dbad8c56b9014216205d271101b/pyarrow-19.0.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:d9dedeaf19097a143ed6da37f04f4051aba353c95ef507764d344229b2b740ae", size = 32103413 },
{ url = "https://files.pythonhosted.org/packages/6a/fa/a7033f66e5d4f1308c7eb0dfcd2ccd70f881724eb6fd1776657fdf65458f/pyarrow-19.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ebfb5171bb5f4a52319344ebbbecc731af3f021e49318c74f33d520d31ae0c4", size = 41134869 },
{ url = "https://files.pythonhosted.org/packages/2d/92/34d2569be8e7abdc9d145c98dc410db0071ac579b92ebc30da35f500d630/pyarrow-19.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a21d39fbdb948857f67eacb5bbaaf36802de044ec36fbef7a1c8f0dd3a4ab2", size = 42192626 },
{ url = "https://files.pythonhosted.org/packages/0a/1f/80c617b1084fc833804dc3309aa9d8daacd46f9ec8d736df733f15aebe2c/pyarrow-19.0.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:99bc1bec6d234359743b01e70d4310d0ab240c3d6b0da7e2a93663b0158616f6", size = 40496708 },
{ url = "https://files.pythonhosted.org/packages/e6/90/83698fcecf939a611c8d9a78e38e7fed7792dcc4317e29e72cf8135526fb/pyarrow-19.0.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1b93ef2c93e77c442c979b0d596af45e4665d8b96da598db145b0fec014b9136", size = 42075728 },
{ url = "https://files.pythonhosted.org/packages/40/49/2325f5c9e7a1c125c01ba0c509d400b152c972a47958768e4e35e04d13d8/pyarrow-19.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:d9d46e06846a41ba906ab25302cf0fd522f81aa2a85a71021826f34639ad31ef", size = 25242568 },
{ url = "https://files.pythonhosted.org/packages/3f/72/135088d995a759d4d916ec4824cb19e066585b4909ebad4ab196177aa825/pyarrow-19.0.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:c0fe3dbbf054a00d1f162fda94ce236a899ca01123a798c561ba307ca38af5f0", size = 30702371 },
{ url = "https://files.pythonhosted.org/packages/2e/01/00beeebd33d6bac701f20816a29d2018eba463616bbc07397fdf99ac4ce3/pyarrow-19.0.1-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:96606c3ba57944d128e8a8399da4812f56c7f61de8c647e3470b417f795d0ef9", size = 32116046 },
{ url = "https://files.pythonhosted.org/packages/1f/c9/23b1ea718dfe967cbd986d16cf2a31fe59d015874258baae16d7ea0ccabc/pyarrow-19.0.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f04d49a6b64cf24719c080b3c2029a3a5b16417fd5fd7c4041f94233af732f3", size = 41091183 },
{ url = "https://files.pythonhosted.org/packages/3a/d4/b4a3aa781a2c715520aa8ab4fe2e7fa49d33a1d4e71c8fc6ab7b5de7a3f8/pyarrow-19.0.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a9137cf7e1640dce4c190551ee69d478f7121b5c6f323553b319cac936395f6", size = 42171896 },
{ url = "https://files.pythonhosted.org/packages/23/1b/716d4cd5a3cbc387c6e6745d2704c4b46654ba2668260d25c402626c5ddb/pyarrow-19.0.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:7c1bca1897c28013db5e4c83944a2ab53231f541b9e0c3f4791206d0c0de389a", size = 40464851 },
{ url = "https://files.pythonhosted.org/packages/ed/bd/54907846383dcc7ee28772d7e646f6c34276a17da740002a5cefe90f04f7/pyarrow-19.0.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:58d9397b2e273ef76264b45531e9d552d8ec8a6688b7390b5be44c02a37aade8", size = 42085744 },
{ url = "https://files.pythonhosted.org/packages/16/26/0ec396ebe98adefaffc0fff8e0dc14c8912e61093226284cf4b76faffd22/pyarrow-19.0.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:b9766a47a9cb56fefe95cb27f535038b5a195707a08bf61b180e642324963b46", size = 30701112 },
{ url = "https://files.pythonhosted.org/packages/ba/10/c35d96686bf7f13e55bb87f06fe06e7d95533c271ef7f9a5a76e26b16fc2/pyarrow-19.0.1-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:6c5941c1aac89a6c2f2b16cd64fe76bcdb94b2b1e99ca6459de4e6f07638d755", size = 32117180 },
{ url = "https://files.pythonhosted.org/packages/8c/0d/81881a55302b6847ea2ea187517faa039c219d80b55050904e354c2eddde/pyarrow-19.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd44d66093a239358d07c42a91eebf5015aa54fccba959db899f932218ac9cc8", size = 41161334 },
{ url = "https://files.pythonhosted.org/packages/af/17/ea60a07ec6f6bb0740f11715e0d22ab8fdfcc94bc729832321f498370d75/pyarrow-19.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:335d170e050bcc7da867a1ed8ffb8b44c57aaa6e0843b156a501298657b1e972", size = 42190375 },
{ url = "https://files.pythonhosted.org/packages/f2/87/4ef05a088b18082cde4950bdfca752dd31effb3ec201b8026e4816d0f3fa/pyarrow-19.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:1c7556165bd38cf0cd992df2636f8bcdd2d4b26916c6b7e646101aff3c16f76f", size = 40530649 },
{ url = "https://files.pythonhosted.org/packages/59/1e/9fb9a66a64eae4ff332a8f149d803d8c6c556714803d20d54ed2e9524a3b/pyarrow-19.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:699799f9c80bebcf1da0983ba86d7f289c5a2a5c04b945e2f2bcf7e874a91911", size = 42081576 },
{ url = "https://files.pythonhosted.org/packages/1b/ee/c110d8da8bdde8e832ccf1ff90be747cb684874e2dc8acf26840058b0c32/pyarrow-19.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:8464c9fbe6d94a7fe1599e7e8965f350fd233532868232ab2596a71586c5a429", size = 25465593 },
{ url = "https://files.pythonhosted.org/packages/39/5d/78d4b040bc5ff2fc6c3d03e80fca396b742f6c125b8af06bcf7427f931bc/pyarrow-17.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a5c8b238d47e48812ee577ee20c9a2779e6a5904f1708ae240f53ecbee7c9f07", size = 28994846 },
{ url = "https://files.pythonhosted.org/packages/3b/73/8ed168db7642e91180330e4ea9f3ff8bab404678f00d32d7df0871a4933b/pyarrow-17.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db023dc4c6cae1015de9e198d41250688383c3f9af8f565370ab2b4cb5f62655", size = 27165908 },
{ url = "https://files.pythonhosted.org/packages/81/36/e78c24be99242063f6d0590ef68c857ea07bdea470242c361e9a15bd57a4/pyarrow-17.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da1e060b3876faa11cee287839f9cc7cdc00649f475714b8680a05fd9071d545", size = 39264209 },
{ url = "https://files.pythonhosted.org/packages/18/4c/3db637d7578f683b0a8fb8999b436bdbedd6e3517bd4f90c70853cf3ad20/pyarrow-17.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c06d4624c0ad6674364bb46ef38c3132768139ddec1c56582dbac54f2663e2", size = 39862883 },
{ url = "https://files.pythonhosted.org/packages/81/3c/0580626896c842614a523e66b351181ed5bb14e5dfc263cd68cea2c46d90/pyarrow-17.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:fa3c246cc58cb5a4a5cb407a18f193354ea47dd0648194e6265bd24177982fe8", size = 38723009 },
{ url = "https://files.pythonhosted.org/packages/ee/fb/c1b47f0ada36d856a352da261a44d7344d8f22e2f7db3945f8c3b81be5dd/pyarrow-17.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:f7ae2de664e0b158d1607699a16a488de3d008ba99b3a7aa5de1cbc13574d047", size = 39855626 },
{ url = "https://files.pythonhosted.org/packages/19/09/b0a02908180a25d57312ab5919069c39fddf30602568980419f4b02393f6/pyarrow-17.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5984f416552eea15fd9cee03da53542bf4cddaef5afecefb9aa8d1010c335087", size = 25147242 },
{ url = "https://files.pythonhosted.org/packages/f9/46/ce89f87c2936f5bb9d879473b9663ce7a4b1f4359acc2f0eb39865eaa1af/pyarrow-17.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:1c8856e2ef09eb87ecf937104aacfa0708f22dfeb039c363ec99735190ffb977", size = 29028748 },
{ url = "https://files.pythonhosted.org/packages/8d/8e/ce2e9b2146de422f6638333c01903140e9ada244a2a477918a368306c64c/pyarrow-17.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e19f569567efcbbd42084e87f948778eb371d308e137a0f97afe19bb860ccb3", size = 27190965 },
{ url = "https://files.pythonhosted.org/packages/3b/c8/5675719570eb1acd809481c6d64e2136ffb340bc387f4ca62dce79516cea/pyarrow-17.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b244dc8e08a23b3e352899a006a26ae7b4d0da7bb636872fa8f5884e70acf15", size = 39269081 },
{ url = "https://files.pythonhosted.org/packages/5e/78/3931194f16ab681ebb87ad252e7b8d2c8b23dad49706cadc865dff4a1dd3/pyarrow-17.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b72e87fe3e1db343995562f7fff8aee354b55ee83d13afba65400c178ab2597", size = 39864921 },
{ url = "https://files.pythonhosted.org/packages/d8/81/69b6606093363f55a2a574c018901c40952d4e902e670656d18213c71ad7/pyarrow-17.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dc5c31c37409dfbc5d014047817cb4ccd8c1ea25d19576acf1a001fe07f5b420", size = 38740798 },
{ url = "https://files.pythonhosted.org/packages/4c/21/9ca93b84b92ef927814cb7ba37f0774a484c849d58f0b692b16af8eebcfb/pyarrow-17.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e3343cb1e88bc2ea605986d4b94948716edc7a8d14afd4e2c097232f729758b4", size = 39871877 },
{ url = "https://files.pythonhosted.org/packages/30/d1/63a7c248432c71c7d3ee803e706590a0b81ce1a8d2b2ae49677774b813bb/pyarrow-17.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:a27532c38f3de9eb3e90ecab63dfda948a8ca859a66e3a47f5f42d1e403c4d03", size = 25151089 },
{ url = "https://files.pythonhosted.org/packages/d4/62/ce6ac1275a432b4a27c55fe96c58147f111d8ba1ad800a112d31859fae2f/pyarrow-17.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9b8a823cea605221e61f34859dcc03207e52e409ccf6354634143e23af7c8d22", size = 29019418 },
{ url = "https://files.pythonhosted.org/packages/8e/0a/dbd0c134e7a0c30bea439675cc120012337202e5fac7163ba839aa3691d2/pyarrow-17.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1e70de6cb5790a50b01d2b686d54aaf73da01266850b05e3af2a1bc89e16053", size = 27152197 },
{ url = "https://files.pythonhosted.org/packages/cb/05/3f4a16498349db79090767620d6dc23c1ec0c658a668d61d76b87706c65d/pyarrow-17.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0071ce35788c6f9077ff9ecba4858108eebe2ea5a3f7cf2cf55ebc1dbc6ee24a", size = 39263026 },
{ url = "https://files.pythonhosted.org/packages/c2/0c/ea2107236740be8fa0e0d4a293a095c9f43546a2465bb7df34eee9126b09/pyarrow-17.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:757074882f844411fcca735e39aae74248a1531367a7c80799b4266390ae51cc", size = 39880798 },
{ url = "https://files.pythonhosted.org/packages/f6/b0/b9164a8bc495083c10c281cc65064553ec87b7537d6f742a89d5953a2a3e/pyarrow-17.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ba11c4f16976e89146781a83833df7f82077cdab7dc6232c897789343f7891a", size = 38715172 },
{ url = "https://files.pythonhosted.org/packages/f1/c4/9625418a1413005e486c006e56675334929fad864347c5ae7c1b2e7fe639/pyarrow-17.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b0c6ac301093b42d34410b187bba560b17c0330f64907bfa4f7f7f2444b0cf9b", size = 39874508 },
{ url = "https://files.pythonhosted.org/packages/ae/49/baafe2a964f663413be3bd1cf5c45ed98c5e42e804e2328e18f4570027c1/pyarrow-17.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:392bc9feabc647338e6c89267635e111d71edad5fcffba204425a7c8d13610d7", size = 25099235 },
{ url = "https://files.pythonhosted.org/packages/43/e0/a898096d35be240aa61fb2d54db58b86d664b10e1e51256f9300f47565e8/pyarrow-17.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:13d7a460b412f31e4c0efa1148e1d29bdf18ad1411eb6757d38f8fbdcc8645fb", size = 29007881 },
{ url = "https://files.pythonhosted.org/packages/59/22/f7d14907ed0697b5dd488d393129f2738629fa5bcba863e00931b7975946/pyarrow-17.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9b564a51fbccfab5a04a80453e5ac6c9954a9c5ef2890d1bcf63741909c3f8df", size = 27178117 },
{ url = "https://files.pythonhosted.org/packages/bf/ee/661211feac0ed48467b1d5c57298c91403809ec3ab78b1d175e1d6ad03cf/pyarrow-17.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32503827abbc5aadedfa235f5ece8c4f8f8b0a3cf01066bc8d29de7539532687", size = 39273896 },
{ url = "https://files.pythonhosted.org/packages/af/61/bcd9b58e38ead6ad42b9ed00da33a3f862bc1d445e3d3164799c25550ac2/pyarrow-17.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a155acc7f154b9ffcc85497509bcd0d43efb80d6f733b0dc3bb14e281f131c8b", size = 39875438 },
{ url = "https://files.pythonhosted.org/packages/75/63/29d1bfcc57af73cde3fc3baccab2f37548de512dbe0ab294b033cd203516/pyarrow-17.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:dec8d129254d0188a49f8a1fc99e0560dc1b85f60af729f47de4046015f9b0a5", size = 38735092 },
{ url = "https://files.pythonhosted.org/packages/39/f4/90258b4de753df7cc61cefb0312f8abcf226672e96cc64996e66afce817a/pyarrow-17.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:a48ddf5c3c6a6c505904545c25a4ae13646ae1f8ba703c4df4a1bfe4f4006bda", size = 39867610 },
{ url = "https://files.pythonhosted.org/packages/e7/f6/b75d4816c32f1618ed31a005ee635dd1d91d8164495d94f2ea092f594661/pyarrow-17.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:42bf93249a083aca230ba7e2786c5f673507fa97bbd9725a1e2754715151a204", size = 25148611 },
] ]


[[package]] [[package]]


Loading…
Cancel
Save