Update the documentation to the new daemon formattags/v0.2.0-rc
| @@ -31,7 +31,7 @@ For linux | |||
| wget https://github.com/dora-rs/dora/releases/download/<version>/dora-<version>-x86_64-Linux.zip | |||
| unzip dora-<version>-x86_64-Linux.zip | |||
| python3 -m pip install dora-rs==<version> | |||
| PATH=$PATH:$(pwd):$(pwd)/iceoryx | |||
| PATH=$PATH:$(pwd) | |||
| dora --help | |||
| ``` | |||
| @@ -119,7 +119,7 @@ nodes: | |||
| Composability as: | |||
| - [x] `YAML` declarative programming | |||
| - [x] language-agnostic: | |||
| - [x] polyglot: | |||
| - [x] Rust | |||
| - [x] C | |||
| - [x] C++ | |||
| @@ -128,10 +128,11 @@ Composability as: | |||
| Low latency as: | |||
| - [x] written in <i>...Cough...blazingly fast ...Cough...</i> Rust. | |||
| - [x] PubSub communication with [`iceoryx`](https://iceoryx.io/v1.0.1/) | |||
| - [x] PubSub communication with shared memory! | |||
| - [ ] Zero-copy on read! | |||
| Distributed as: | |||
| - [x] PubSub communication with [`zenoh`](https://github.com/eclipse-zenoh/zenoh) | |||
| - [ ] PubSub communication between machines with [`zenoh`](https://github.com/eclipse-zenoh/zenoh) | |||
| - [x] Distributed telemetry with [`opentelemetry`](https://github.com/open-telemetry/opentelemetry-rust) | |||
| --- | |||
| @@ -4,45 +4,10 @@ | |||
| The operator API is a framework for you to implement. The implemented operator will be managed by `dora`. This framework enable us to make optimisation and provide advanced features. | |||
| The operator definition is composed of 3 functions, `dora_init_operator` that initialise the operator and its context. `dora_drop_operator` that free the memory, and `dora_on_input` that action the logic of the operator on receiving an input. | |||
| The operator definition is composed of 3 functions, `dora_init_operator` that initialise the operator and its context. `dora_drop_operator` that free the memory, and `dora_on_event` that action the logic of the operator on receiving an input. | |||
| ```c | |||
| int dora_init_operator(void **operator_context) | |||
| { | |||
| // allocate a single byte to store a counter | |||
| // (the operator context pointer can be used to keep arbitrary data between calls) | |||
| void *context = malloc(1); | |||
| char *context_char = (char *)context; | |||
| *context_char = 0; | |||
| *operator_context = context; | |||
| return 0; | |||
| } | |||
| void dora_drop_operator(void *operator_context) | |||
| { | |||
| free(operator_context); | |||
| } | |||
| int dora_on_input( | |||
| const char *id_start, | |||
| size_t id_len, | |||
| const char *data_start, | |||
| size_t data_len, | |||
| const int (*output_fn_raw)(const char *id_start, | |||
| size_t id_len, | |||
| const char *data_start, | |||
| size_t data_len, | |||
| const void *output_context), | |||
| void *output_context, | |||
| const void *operator_context) | |||
| { | |||
| // handle the input ... | |||
| // (sending outputs is possible using `output_fn_raw`) | |||
| // (the `operator_context` is the pointer created in `dora_init_operator`, i.e., a counter in our case) | |||
| } | |||
| {{#include ../../examples/c-dataflow/operator.c:0:29}} | |||
| ``` | |||
| ### Try it out! | |||
| @@ -70,9 +35,9 @@ The custom node API allow you to integrate `dora` into your application. It allo | |||
| void *dora_context = init_dora_context_from_env(); | |||
| ``` | |||
| #### `dora_next_input` | |||
| #### `dora_next_event` | |||
| `dora_next_input` waits for the next input. To extract the input ID and data, use `read_dora_input_id` and `read_dora_input_data` on the returned pointer. | |||
| `dora_next_event` waits for the next event (e.g. an input). Use `read_dora_event_type` to read the event's type. Inputs are of type `DoraEventType_Input`. To extract the ID and data of an input event, use `read_dora_input_id` and `read_dora_input_data` on the returned pointer. It is safe to ignore any events and handle only the events that are relevant to the node. | |||
| ```c | |||
| void *input = dora_next_input(dora_context); | |||
| @@ -94,8 +59,8 @@ read_dora_input_data(input, &data, &data_len); | |||
| ```c | |||
| char out_id[] = "tick"; | |||
| char out_data[] = {0, 0, 0}; | |||
| dora_send_output(dora_context, out_id, strlen(out_id), &out_data, sizeof out_data); | |||
| char out_data[50]; | |||
| dora_send_output(dora_context, out_id, strlen(out_id), out_data, out_data_len); | |||
| ``` | |||
| ### Try it out! | |||
| @@ -7,16 +7,15 @@ Dataflows are specified through a YAML file. This section presents our current d | |||
| Dataflows are specified through the following format: | |||
| ```yaml | |||
| communication: | |||
| zenoh: | |||
| prefix: /example-python-no-webcam-dataflow | |||
| nodes: | |||
| - id: foo | |||
| # ... (see below) | |||
| - id: bar | |||
| # ... (see below) | |||
| deployment: | |||
| # (not specified yet, these fields are just examples) | |||
| zenoh_routers: | |||
| - 127.0.0.1 | |||
| kubernetes: | |||
| ``` | |||
| ### Inputs and Outputs | |||
| @@ -30,29 +29,34 @@ Input operands are specified using the <name>: <operator>/<output> syntax, where | |||
| Nodes are defined using the following format: | |||
| ```yaml | |||
| - id: some-unique-id | |||
| name: Human-Readable Node Name | |||
| description: An optional description of the node's purpose. | |||
| # EITHER: | |||
| operators: | |||
| - id: operator-1 | |||
| # ... (see below) | |||
| - id: operator-2 | |||
| # ... (see below) | |||
| # OR: | |||
| custom: | |||
| run: path/to/timestamp | |||
| env: | |||
| - ENVIRONMENT_VARIABLE_1: true | |||
| working-directory: some/path | |||
| inputs: | |||
| input_1: operator_2/output_4 | |||
| input_2: custom_node_2/output_4 | |||
| outputs: | |||
| - output_1 | |||
| nodes: | |||
| - id: some-unique-id | |||
| # For nodes with multiple operators | |||
| operators: | |||
| - id: operator-1 | |||
| # ... (see below) | |||
| - id: operator-2 | |||
| # ... (see below) | |||
| - id: some-unique-id-2 | |||
| custom: | |||
| source: path/to/timestamp | |||
| env: | |||
| - ENVIRONMENT_VARIABLE_1: true | |||
| working-directory: some/path | |||
| inputs: | |||
| input_1: operator_2/output_4 | |||
| input_2: custom_node_2/output_4 | |||
| outputs: | |||
| - output_1 | |||
| # Unique operator | |||
| - id: some-unique-id-3 | |||
| operator: | |||
| # ... (see below) | |||
| ``` | |||
| Nodes must provide either a `operators` field, or a `custom` field, but not both. Nodes with an `operators` field run a dora runtime process, which runs and manages the specified operators. Nodes with a `custom` field, run a custom executable. | |||
| @@ -116,24 +120,7 @@ The mandatory `communication` key specifies how dora nodes and operators should | |||
| Zenoh is quite flexible and can be easily scaled to distributed deployment. It does not require any extra setup since it supports peer-to-peer communication without an external broker. The drawback of zenoh is that it is still in an early stage of development, so it might still have reliability and performance issues. | |||
| _Note:_ Dora currently only supports local deployments, so interacting with remote nodes/operators is not possible yet. | |||
| - **[Iceoryx](https://iceoryx.io/):** The Eclipse iceoryx™ project provides an IPC middleware based on shared memory. It is very fast, but it only supports local communication. To use iceoryx as the communication backend, set the `communication` field to the following: | |||
| ```yaml | |||
| communication: | |||
| iceoryx: | |||
| app_name_prefix: dora-iceoryx-example | |||
| ``` | |||
| The `app_name_prefix` defines a prefix for the _application name_ that the dataflow will use. An additional UUID will be added to that prefix to ensure that the application name remains unique even if multiple instances of the same dataflow are running. | |||
| In order to use iceoryx, you need to start its broker deamon called [_RouDi_](https://iceoryx.io/v2.0.2/getting-started/overview/#roudi). Its executable name is `iox-roudi`. There are two ways to obtain it: | |||
| - Follow the [iceoryx installation chapter](https://iceoryx.io/v2.0.2/getting-started/installation/) | |||
| - Clone the `dora-rs` project and build its iceoryx example using `cargo build --example iceoryx`. After building, you can find the `iox-roudi` executable inside the `target` directory using the following command: `find target -type f -wholename "*/iox-roudi"`. | |||
| Run the `iox-roudi` executable to start the iceoryx broker deamon. Afterwards, you should be able to run your dataflow. | |||
| ## TODO: Integration with ROS 1/2 | |||
| To integrate dora-rs operators with ROS1 or ROS2 operators, we plan to provide special _bridge operators_. These operators act as a sink in one dataflow framework and push all messages to a different dataflow framework, where they act as source. | |||
| @@ -119,5 +119,5 @@ Let's write the graph definition so that the nodes know who to communicate with. | |||
| - Run the `dataflow`: | |||
| ```bash | |||
| dora-coordinator --run-dataflow dataflow.yml dora-runtime | |||
| dora-daemon --run-dataflow dataflow.yml | |||
| ``` | |||
| @@ -11,7 +11,7 @@ Install `dora` binaries from GitHub releases: | |||
| wget https://github.com/dora-rs/dora/releases/download/<version>/dora-<version>-x86_64-Linux.zip | |||
| unzip dora-<version>-x86_64-Linux.zip | |||
| python3 -m pip install dora-rs==<version> ## For Python API | |||
| PATH=$PATH:$(pwd):$(pwd)/iceoryx | |||
| PATH=$PATH:$(pwd) | |||
| dora --help | |||
| ``` | |||
| @@ -21,12 +21,6 @@ Build it using: | |||
| ```bash | |||
| git clone https://github.com/dora-rs/dora.git | |||
| cd dora | |||
| cargo build -p dora-coordinator -p dora-runtime --release | |||
| cargo build --all --release | |||
| PATH=$PATH:$(pwd)/target/release | |||
| ``` | |||
| If you want to use `Iceoryx`. Add `iox-roudi` to the path. | |||
| You can find `iox-roudi` with: | |||
| ```bash | |||
| find target -type f -wholename "*/iceoryx-install/bin/iox-roudi" | |||
| ``` | |||
| @@ -4,22 +4,24 @@ | |||
| By using `dora`, you can define robotic applications as a graph of nodes that can be easily swapped and replaced. Those nodes can be shared and implemented in different languages such as Rust, Python or C. `dora` will then connect those nodes and try to provide as many features as possible to facilitate the dataflow. | |||
| ## ✨ Features that we want to provide | |||
| ## ✨ Features | |||
| Composability as: | |||
| - [x] `YAML` declarative programming | |||
| - [x] language-agnostic: | |||
| - [x] polyglot: | |||
| - [x] Rust | |||
| - [x] C | |||
| - [x] C++ | |||
| - [x] Python | |||
| - [ ] Isolated operators and nodes that can be reused. | |||
| - [x] Isolated operators and custom nodes that can be reused. | |||
| Low latency as: | |||
| - [x] written in <i>...Cough...blazingly fast ...Cough...</i> Rust. | |||
| - [ ] Minimal abstraction, close to the metal. | |||
| - [x] PubSub communication with shared memory! | |||
| - [ ] Zero-copy on read! | |||
| Distributed as: | |||
| - [x] PubSub communication with [`zenoh`](https://github.com/eclipse-zenoh/zenoh) | |||
| - [ ] PubSub communication between machines with [`zenoh`](https://github.com/eclipse-zenoh/zenoh) | |||
| - [x] Distributed telemetry with [`opentelemetry`](https://github.com/open-telemetry/opentelemetry-rust) | |||
| @@ -4,15 +4,10 @@ | |||
| The operator API is a framework for you to implement. The implemented operator will be managed by `dora`. This framework enable us to make optimisation and provide advanced features. It is the recommended way of using `dora`. | |||
| An operator requires an `on_input` method and requires to return a `DoraStatus` of 0 or 1, depending of it needs to continue or stop. | |||
| An operator requires an `on_event` method and requires to return a `DoraStatus` , depending of it needs to continue or stop. | |||
| ```python | |||
| class Operator: | |||
| def on_input( | |||
| self, | |||
| dora_input: dict, | |||
| send_output: Callable[[str, bytes], None], | |||
| ) -> DoraStatus: | |||
| {{#include ../../examples/python-operator-dataflow/object_detection.py:0:25}} | |||
| ``` | |||
| > For Python, we recommend to allocate the operator on a single runtime. A runtime will share the same GIL with several operators making those operators run almost sequentially. See: [https://docs.rs/pyo3/latest/pyo3/marker/struct.Python.html#deadlocks](https://docs.rs/pyo3/latest/pyo3/marker/struct.Python.html#deadlocks) | |||
| @@ -4,25 +4,10 @@ | |||
| The operator API is a framework for you to implement. The implemented operator will be managed by `dora`. This framework enable us to make optimisation and provide advanced features. It is the recommended way of using `dora`. | |||
| An operator requires to be registered and implement the `DoraOperator` trait. It is composed of an `on_input` method that defines the behaviour of the operator when there is an input. | |||
| An operator requires to be registered and implement the `DoraOperator` trait. It is composed of an `on_event` method that defines the behaviour of the operator when there is an event such as receiving an input for example. | |||
| ```rust | |||
| use dora_operator_api::{register_operator, DoraOperator, DoraOutputSender, DoraStatus}; | |||
| register_operator!(ExampleOperator); | |||
| #[derive(Debug, Default)] | |||
| struct ExampleOperator { | |||
| time: Option<String>, | |||
| } | |||
| impl DoraOperator for ExampleOperator { | |||
| fn on_input( | |||
| &mut self, | |||
| id: &str, | |||
| data: &[u8], | |||
| output_sender: &mut DoraOutputSender, | |||
| ) -> Result<DoraStatus, ()> { | |||
| {{#include ../../examples/rust-dataflow/operator/src/lib.rs:0:17}} | |||
| ``` | |||
| ### Try it out! | |||
| @@ -63,23 +48,30 @@ The custom node API allow you to integrate `dora` into your application. It allo | |||
| `DoraNode::init_from_env()` initiate a node from environment variables set by `dora-coordinator` | |||
| ```rust | |||
| let node = DoraNode::init_from_env().await?; | |||
| let (mut node, mut events) = DoraNode::init_from_env()?; | |||
| ``` | |||
| #### `.inputs()` | |||
| #### `.recv()` | |||
| `.inputs()` gives you a stream of input that you can access using `next()` on the input stream. | |||
| `.recv()` wait for the next event on the events stream. | |||
| ```rust | |||
| let mut inputs = node.inputs().await?; | |||
| let event = events.recv(); | |||
| ``` | |||
| #### `.send_output(output_id, data)` | |||
| #### `.send_output(...)` | |||
| `send_output` send data from the node. | |||
| `send_output` send data from the node to the other nodes. | |||
| We take a closure as an input to enable zero copy on send. | |||
| ```rust | |||
| node.send_output(&data_id, data.as_bytes()).await?; | |||
| node.send_output( | |||
| &data_id, | |||
| metadata.parameters, | |||
| data.len(), | |||
| |out| { | |||
| out.copy_from_slice(data); | |||
| })?; | |||
| ``` | |||
| ### Try it out! | |||
| @@ -53,5 +53,5 @@ For a manual build, follow these steps: | |||
| - Start the `dora-coordinator`, passing the paths to the dataflow file and the `dora-runtime` as arguments: | |||
| ``` | |||
| ../../target/release/dora-coordinator --run-dataflow dataflow.yml ../../target/release/dora-runtime | |||
| ../../target/release/dora-daemon --run-dataflow dataflow.yml ../../target/release/dora-runtime | |||
| ``` | |||
| @@ -65,5 +65,5 @@ For a manual build, follow these steps: | |||
| - Start the `dora-coordinator`, passing the paths to the dataflow file and the `dora-runtime` as arguments: | |||
| ``` | |||
| ../../target/release/dora-coordinator --run-dataflow dataflow.yml ../../target/release/dora-runtime | |||
| ../../target/release/dora-daemon --run-dataflow dataflow.yml ../../target/release/dora-runtime | |||
| ``` | |||
| @@ -16,18 +16,10 @@ The [`dataflow.yml`](./dataflow.yml) defines a simple dataflow graph with the fo | |||
| cargo run --example python-dataflow | |||
| ``` | |||
| ## Installation | |||
| To install, you should run the `install.sh` script. | |||
| ```bash | |||
| install.sh | |||
| ``` | |||
| ## Run the dataflow as a standalone | |||
| - Start the `dora-coordinator`, passing the paths to the dataflow file and the `dora-runtime` as arguments: | |||
| - Start the `dora-daemon`: | |||
| ``` | |||
| ../../target/release/dora-coordinator --run-dataflow dataflow.yml ../../target/release/dora-runtime | |||
| ../../target/release/dora-daemon --run-dataflow dataflow.yml | |||
| ``` | |||
| @@ -26,8 +26,8 @@ install.sh | |||
| ## Run the dataflow as a standalone | |||
| - Start the `dora-coordinator`, passing the paths to the dataflow file and the `dora-runtime` as arguments: | |||
| - Start the `dora-coordinator`: | |||
| ``` | |||
| ../../target/release/dora-coordinator --run-dataflow dataflow.yml ../../target/release/dora-runtime | |||
| ../../target/release/dora-daemon --run-dataflow dataflow.yml | |||
| ``` | |||
| @@ -4,8 +4,8 @@ | |||
| import time | |||
| import urllib.request | |||
| import cv2 | |||
| import numpy as np | |||
| from dora import Node | |||
| print("Hello from no_webcam.py") | |||
| @@ -1,18 +1,14 @@ | |||
| #!/usr/bin/env python3 | |||
| # -*- coding: utf-8 -*- | |||
| from enum import Enum | |||
| from typing import Callable | |||
| import cv2 | |||
| import numpy as np | |||
| import torch | |||
| class DoraStatus(Enum): | |||
| CONTINUE = 0 | |||
| STOP = 1 | |||
| from dora import DoraStatus | |||
| class Operator: | |||
| """ | |||
| @@ -1,22 +1,20 @@ | |||
| #!/usr/bin/env python3 | |||
| # -*- coding: utf-8 -*- | |||
| import os | |||
| from enum import Enum | |||
| from typing import Callable | |||
| import cv2 | |||
| import numpy as np | |||
| from utils import LABELS | |||
| from dora import DoraStatus | |||
| CI = os.environ.get("CI") | |||
| font = cv2.FONT_HERSHEY_SIMPLEX | |||
| class DoraStatus(Enum): | |||
| CONTINUE = 0 | |||
| STOP = 1 | |||
| class Operator: | |||
| """ | |||
| Plot image and bounding box | |||
| @@ -63,7 +61,11 @@ class Operator: | |||
| self.bboxs = np.reshape(bboxs, (-1, 6)) | |||
| self.bounding_box_messages += 1 | |||
| print("received " + str(self.bounding_box_messages) + " bounding boxes") | |||
| print( | |||
| "received " | |||
| + str(self.bounding_box_messages) | |||
| + " bounding boxes" | |||
| ) | |||
| for bbox in self.bboxs: | |||
| [ | |||