diff --git a/examples/so100-remote/README.md b/examples/so100-remote/README.md new file mode 100644 index 00000000..f277a11b --- /dev/null +++ b/examples/so100-remote/README.md @@ -0,0 +1,94 @@ +# SO100 and SO101 Remote Example + +## Hardware requirements + +- Realsense Camera +- so101 robotic arm + +## Download the 3D model of the SO100 + +```bash +[ -f "$HOME/Downloads/so100_urdf.zip" ] || (wget -O "$HOME/Downloads/so100_urdf.zip" https://huggingface.co/datasets/haixuantao/urdfs/resolve/main/so100/so100_urdf.zip && unzip -o "$HOME/Downloads/so100_urdf.zip" -d "$HOME/Downloads/so100_urdf") +``` + +## To get started + +```bash +uv venv --seed +dora build no_torque.yml --uv +``` + +## Make sure that both realsense and robotic arm connected + +On linux, for the arm you can check connection with: + +```bash +ls /dev/ttyACM* +``` + +This should show something like: + +```bash +/dev/ttyACM0 +``` + +Make sure to enable read with: + +```bash +sudo chmod 777 /dev/ttyACM0 +``` + +On linux, For the camera, make sure to have it well connected and check with: + +```bash +ls /dev/video** +``` + +Result should be as follows: + +```bash +/dev/video0 /dev/video2 /dev/video4 /dev/video6 /dev/video8 +/dev/video1 /dev/video3 /dev/video5 /dev/video7 /dev/video9 +``` + +## To run the no torque demo: + +```bash +dora run no_torque.yml --uv +``` + +If the placement of the virtual robot arm is wrong, you can move it using the so100_transform environment configuration. + +## To run the qwenvl demo: + +```bash +dora run qwenvl.yml --uv +``` + +## To run the qwenvl remote demo: + +On a remote machine: + +```bash +dora coordinator & +dora daemon --machine-id gpu +``` + +```bash +dora daemon --coordinator-addr +dora start qwenvl-remote.yml --uv --coordinator-addr +``` + +## To run the qwenvl compression demo: + +On a remote machine: + +```bash +dora coordinator & +dora daemon --machine-id gpu +``` + +```bash +dora daemon --coordinator-addr +dora start qwenvl-compression.yml --uv --coordinator-addr +``` diff --git a/examples/so100-remote/no_torque.yml b/examples/so100-remote/no_torque.yml index a3ca088a..e8af5218 100644 --- a/examples/so100-remote/no_torque.yml +++ b/examples/so100-remote/no_torque.yml @@ -30,7 +30,7 @@ nodes: env: # Link to your installation of so100-urdf. # https://huggingface.co/datasets/haixuantao/urdfs/resolve/main/so100/so100_urdf.zip - URDF_PATH: /home/xavier/Downloads/so100_urdf/so100.urdf + URDF_PATH: $HOME/Downloads/so100_urdf/so100.urdf END_EFFECTOR_LINK: "Moving Jaw" TRANSFORM: -0.2 -0.01 -0.57 0.7 0 0 0.7 @@ -38,13 +38,12 @@ nodes: build: pip install -e ../../node-hub/dora-rerun path: dora-rerun inputs: - series_fk: pytorch-kinematics/pose jointstate_so100: so100/pose camera/image: camera/image camera/depth: camera/depth env: # Link to your installation of so100-urdf. # https://huggingface.co/datasets/haixuantao/urdfs/resolve/main/so100/so100_urdf.zip - so100_urdf: /home/xavier/Downloads/so100_urdf/so100.urdf + so100_urdf: $HOME/Downloads/so100_urdf/so100.urdf so100_transform: -0.2 -0.01 -0.57 0.7 0 0 0.7 CAMERA_PITCH: -3.1415 diff --git a/examples/so100-remote/qwenvl-compression.yml b/examples/so100-remote/qwenvl-compression.yml index b903f57d..8ea873cc 100644 --- a/examples/so100-remote/qwenvl-compression.yml +++ b/examples/so100-remote/qwenvl-compression.yml @@ -84,7 +84,7 @@ nodes: camera/boxes2d: parse_bbox/bbox camera/masks: sam2/masks env: - so100_urdf: /home/xavier/Downloads/so100_urdf/so100.urdf + so100_urdf: $HOME/Downloads/so100_urdf/so100.urdf so100_transform: -0.2 -0.01 -0.57 0.7 0 0 0.7 CAMERA_PITCH: -3.1415 diff --git a/examples/so100-remote/qwenvl-remote.yml b/examples/so100-remote/qwenvl-remote.yml index 5aaa32d9..74fa317b 100644 --- a/examples/so100-remote/qwenvl-remote.yml +++ b/examples/so100-remote/qwenvl-remote.yml @@ -56,7 +56,7 @@ nodes: env: # Link to your installation of so100-urdf. # https://huggingface.co/datasets/haixuantao/urdfs/resolve/main/so100/so100_urdf.zip - so100_urdf: /home/xavier/Downloads/so100_urdf/so100.urdf + so100_urdf: $HOME/Downloads/so100_urdf/so100.urdf so100_transform: -0.2 -0.01 -0.57 0.7 0 0 0.7 so100_inference_transform: -0.2 -0.01 -0.57 0.7 0 0 0.7 CAMERA_PITCH: -3.1415 diff --git a/examples/so100-remote/qwenvl.yml b/examples/so100-remote/qwenvl.yml index 37b8a985..ebbfe0bb 100644 --- a/examples/so100-remote/qwenvl.yml +++ b/examples/so100-remote/qwenvl.yml @@ -36,7 +36,7 @@ nodes: env: # Link to your installation of so100-urdf. # https://huggingface.co/datasets/haixuantao/urdfs/resolve/main/so100/so100_urdf.zip - URDF_PATH: /home/xavier/Downloads/so100_urdf/so100.urdf + URDF_PATH: $HOME/Downloads/so100_urdf/so100.urdf END_EFFECTOR_LINK: "Moving Jaw" TRANSFORM: -0.2 -0.01 -0.57 0.7 0 0 0.7 @@ -55,7 +55,7 @@ nodes: camera/boxes2d: parse_bbox/bbox camera/masks: sam2/masks env: - so100_urdf: /home/xavier/Downloads/so100_urdf/so100.urdf + so100_urdf: $HOME/Downloads/so100_urdf/so100.urdf so100_transform: -0.2 -0.01 -0.57 0.7 0 0 0.7 so100_inference_transform: -0.2 -0.01 -0.57 0.7 0 0 0.7 CAMERA_PITCH: -3.1415