| @@ -48,7 +48,7 @@ members = [ | |||||
| ] | ] | ||||
| [workspace.package] | [workspace.package] | ||||
| edition = "2021" | |||||
| edition = "2024" | |||||
| rust-version = "1.85.0" | rust-version = "1.85.0" | ||||
| # Make sure to also bump `apis/node/python/__init__.py` version. | # Make sure to also bump `apis/node/python/__init__.py` version. | ||||
| version = "0.3.12" | version = "0.3.12" | ||||
| @@ -62,7 +62,7 @@ mod ros2 { | |||||
| pub fn generate() -> PathBuf { | pub fn generate() -> PathBuf { | ||||
| use rust_format::Formatter; | use rust_format::Formatter; | ||||
| let paths = ament_prefix_paths(); | let paths = ament_prefix_paths(); | ||||
| let generated = dora_ros2_bridge_msg_gen::gen(paths.as_slice(), true); | |||||
| let generated = dora_ros2_bridge_msg_gen::generate(paths.as_slice(), true); | |||||
| let generated_string = rust_format::PrettyPlease::default() | let generated_string = rust_format::PrettyPlease::default() | ||||
| .format_tokens(generated) | .format_tokens(generated) | ||||
| .unwrap(); | .unwrap(); | ||||
| @@ -1,7 +1,7 @@ | |||||
| #![deny(unsafe_op_in_unsafe_fn)] | #![deny(unsafe_op_in_unsafe_fn)] | ||||
| use arrow_array::UInt8Array; | use arrow_array::UInt8Array; | ||||
| use dora_node_api::{arrow::array::AsArray, DoraNode, Event, EventStream}; | |||||
| use dora_node_api::{DoraNode, Event, EventStream, arrow::array::AsArray}; | |||||
| use eyre::Context; | use eyre::Context; | ||||
| use std::{ffi::c_void, ptr, slice}; | use std::{ffi::c_void, ptr, slice}; | ||||
| @@ -21,7 +21,7 @@ struct DoraContext { | |||||
| /// needed, use the [`free_dora_context`] function. | /// needed, use the [`free_dora_context`] function. | ||||
| /// | /// | ||||
| /// On error, a null pointer is returned. | /// On error, a null pointer is returned. | ||||
| #[no_mangle] | |||||
| #[unsafe(no_mangle)] | |||||
| pub extern "C" fn init_dora_context_from_env() -> *mut c_void { | pub extern "C" fn init_dora_context_from_env() -> *mut c_void { | ||||
| let context = || { | let context = || { | ||||
| let (node, events) = DoraNode::init_from_env()?; | let (node, events) = DoraNode::init_from_env()?; | ||||
| @@ -47,7 +47,7 @@ pub extern "C" fn init_dora_context_from_env() -> *mut c_void { | |||||
| /// Only pointers created through [`init_dora_context_from_env`] are allowed | /// Only pointers created through [`init_dora_context_from_env`] are allowed | ||||
| /// as arguments. Each context pointer must be freed exactly once. After | /// as arguments. Each context pointer must be freed exactly once. After | ||||
| /// freeing, the pointer must not be used anymore. | /// freeing, the pointer must not be used anymore. | ||||
| #[no_mangle] | |||||
| #[unsafe(no_mangle)] | |||||
| pub unsafe extern "C" fn free_dora_context(context: *mut c_void) { | pub unsafe extern "C" fn free_dora_context(context: *mut c_void) { | ||||
| let context: Box<DoraContext> = unsafe { Box::from_raw(context.cast()) }; | let context: Box<DoraContext> = unsafe { Box::from_raw(context.cast()) }; | ||||
| // drop all fields except for `node` | // drop all fields except for `node` | ||||
| @@ -71,7 +71,7 @@ pub unsafe extern "C" fn free_dora_context(context: *mut c_void) { | |||||
| /// The `context` argument must be a dora context created through | /// The `context` argument must be a dora context created through | ||||
| /// [`init_dora_context_from_env`]. The context must be still valid, i.e., not | /// [`init_dora_context_from_env`]. The context must be still valid, i.e., not | ||||
| /// freed yet. | /// freed yet. | ||||
| #[no_mangle] | |||||
| #[unsafe(no_mangle)] | |||||
| pub unsafe extern "C" fn dora_next_event(context: *mut c_void) -> *mut c_void { | pub unsafe extern "C" fn dora_next_event(context: *mut c_void) -> *mut c_void { | ||||
| let context: &mut DoraContext = unsafe { &mut *context.cast() }; | let context: &mut DoraContext = unsafe { &mut *context.cast() }; | ||||
| match context.events.recv() { | match context.events.recv() { | ||||
| @@ -87,7 +87,7 @@ pub unsafe extern "C" fn dora_next_event(context: *mut c_void) -> *mut c_void { | |||||
| /// The `event` argument must be a dora event received through | /// The `event` argument must be a dora event received through | ||||
| /// [`dora_next_event`]. The event must be still valid, i.e., not | /// [`dora_next_event`]. The event must be still valid, i.e., not | ||||
| /// freed yet. | /// freed yet. | ||||
| #[no_mangle] | |||||
| #[unsafe(no_mangle)] | |||||
| pub unsafe extern "C" fn read_dora_event_type(event: *const ()) -> EventType { | pub unsafe extern "C" fn read_dora_event_type(event: *const ()) -> EventType { | ||||
| let event: &Event = unsafe { &*event.cast() }; | let event: &Event = unsafe { &*event.cast() }; | ||||
| match event { | match event { | ||||
| @@ -125,7 +125,7 @@ pub enum EventType { | |||||
| /// | /// | ||||
| /// - Note: `Out_ptr` is not a null-terminated string. The length of the string | /// - Note: `Out_ptr` is not a null-terminated string. The length of the string | ||||
| /// is given by `out_len`. | /// is given by `out_len`. | ||||
| #[no_mangle] | |||||
| #[unsafe(no_mangle)] | |||||
| pub unsafe extern "C" fn read_dora_input_id( | pub unsafe extern "C" fn read_dora_input_id( | ||||
| event: *const (), | event: *const (), | ||||
| out_ptr: *mut *const u8, | out_ptr: *mut *const u8, | ||||
| @@ -165,7 +165,7 @@ pub unsafe extern "C" fn read_dora_input_id( | |||||
| /// freed yet. The returned `out_ptr` must not be used after | /// freed yet. The returned `out_ptr` must not be used after | ||||
| /// freeing the `event`, since it points directly into the event's | /// freeing the `event`, since it points directly into the event's | ||||
| /// memory. | /// memory. | ||||
| #[no_mangle] | |||||
| #[unsafe(no_mangle)] | |||||
| pub unsafe extern "C" fn read_dora_input_data( | pub unsafe extern "C" fn read_dora_input_data( | ||||
| event: *const (), | event: *const (), | ||||
| out_ptr: *mut *const u8, | out_ptr: *mut *const u8, | ||||
| @@ -203,7 +203,7 @@ pub unsafe extern "C" fn read_dora_input_data( | |||||
| /// ## Safety | /// ## Safety | ||||
| /// | /// | ||||
| /// Return `0` if the given event is not an input event. | /// Return `0` if the given event is not an input event. | ||||
| #[no_mangle] | |||||
| #[unsafe(no_mangle)] | |||||
| pub unsafe extern "C" fn read_dora_input_timestamp(event: *const ()) -> core::ffi::c_ulonglong { | pub unsafe extern "C" fn read_dora_input_timestamp(event: *const ()) -> core::ffi::c_ulonglong { | ||||
| let event: &Event = unsafe { &*event.cast() }; | let event: &Event = unsafe { &*event.cast() }; | ||||
| match event { | match event { | ||||
| @@ -221,7 +221,7 @@ pub unsafe extern "C" fn read_dora_input_timestamp(event: *const ()) -> core::ff | |||||
| /// freeing, the pointer and all derived pointers must not be used anymore. | /// freeing, the pointer and all derived pointers must not be used anymore. | ||||
| /// This also applies to the `read_dora_event_*` functions, which return | /// This also applies to the `read_dora_event_*` functions, which return | ||||
| /// pointers into the original event structure. | /// pointers into the original event structure. | ||||
| #[no_mangle] | |||||
| #[unsafe(no_mangle)] | |||||
| pub unsafe extern "C" fn free_dora_event(event: *mut c_void) { | pub unsafe extern "C" fn free_dora_event(event: *mut c_void) { | ||||
| let _: Box<Event> = unsafe { Box::from_raw(event.cast()) }; | let _: Box<Event> = unsafe { Box::from_raw(event.cast()) }; | ||||
| } | } | ||||
| @@ -241,7 +241,7 @@ pub unsafe extern "C" fn free_dora_event(event: *mut c_void) { | |||||
| /// UTF8-encoded string. | /// UTF8-encoded string. | ||||
| /// - The `data_ptr` and `data_len` fields must be the start pointer and length | /// - The `data_ptr` and `data_len` fields must be the start pointer and length | ||||
| /// a byte array. | /// a byte array. | ||||
| #[no_mangle] | |||||
| #[unsafe(no_mangle)] | |||||
| pub unsafe extern "C" fn dora_send_output( | pub unsafe extern "C" fn dora_send_output( | ||||
| context: *mut c_void, | context: *mut c_void, | ||||
| id_ptr: *const u8, | id_ptr: *const u8, | ||||
| @@ -26,7 +26,7 @@ fn register_operator_impl(item: &TokenStream2) -> syn::Result<TokenStream2> { | |||||
| .map_err(|e| syn::Error::new(e.span(), "expected type as argument"))?; | .map_err(|e| syn::Error::new(e.span(), "expected type as argument"))?; | ||||
| let init = quote! { | let init = quote! { | ||||
| #[no_mangle] | |||||
| #[unsafe(no_mangle)] | |||||
| pub unsafe extern "C" fn dora_init_operator() -> dora_operator_api::types::DoraInitResult { | pub unsafe extern "C" fn dora_init_operator() -> dora_operator_api::types::DoraInitResult { | ||||
| dora_operator_api::raw::dora_init_operator::<#operator_ty>() | dora_operator_api::raw::dora_init_operator::<#operator_ty>() | ||||
| } | } | ||||
| @@ -37,7 +37,7 @@ fn register_operator_impl(item: &TokenStream2) -> syn::Result<TokenStream2> { | |||||
| }; | }; | ||||
| let drop = quote! { | let drop = quote! { | ||||
| #[no_mangle] | |||||
| #[unsafe(no_mangle)] | |||||
| pub unsafe extern "C" fn dora_drop_operator(operator_context: *mut std::ffi::c_void) | pub unsafe extern "C" fn dora_drop_operator(operator_context: *mut std::ffi::c_void) | ||||
| -> dora_operator_api::types::DoraResult | -> dora_operator_api::types::DoraResult | ||||
| { | { | ||||
| @@ -50,7 +50,7 @@ fn register_operator_impl(item: &TokenStream2) -> syn::Result<TokenStream2> { | |||||
| }; | }; | ||||
| let on_event = quote! { | let on_event = quote! { | ||||
| #[no_mangle] | |||||
| #[unsafe(no_mangle)] | |||||
| pub unsafe extern "C" fn dora_on_event( | pub unsafe extern "C" fn dora_on_event( | ||||
| event: &mut dora_operator_api::types::RawEvent, | event: &mut dora_operator_api::types::RawEvent, | ||||
| send_output: &dora_operator_api::types::SendOutput, | send_output: &dora_operator_api::types::SendOutput, | ||||
| @@ -24,7 +24,7 @@ use uuid::Uuid; | |||||
| pub(crate) async fn control_events( | pub(crate) async fn control_events( | ||||
| control_listen_addr: SocketAddr, | control_listen_addr: SocketAddr, | ||||
| tasks: &FuturesUnordered<JoinHandle<()>>, | tasks: &FuturesUnordered<JoinHandle<()>>, | ||||
| ) -> eyre::Result<impl Stream<Item = Event>> { | |||||
| ) -> eyre::Result<impl Stream<Item = Event> + use<>> { | |||||
| let (tx, rx) = mpsc::channel(10); | let (tx, rx) = mpsc::channel(10); | ||||
| let (finish_tx, mut finish_rx) = mpsc::channel(1); | let (finish_tx, mut finish_rx) = mpsc::channel(1); | ||||
| @@ -5,13 +5,14 @@ use dora_core::{ | |||||
| build::{self, BuildInfo, GitManager, PrevGitSource}, | build::{self, BuildInfo, GitManager, PrevGitSource}, | ||||
| config::{DataId, Input, InputMapping, NodeId, NodeRunConfig, OperatorId}, | config::{DataId, Input, InputMapping, NodeId, NodeRunConfig, OperatorId}, | ||||
| descriptor::{ | descriptor::{ | ||||
| read_as_descriptor, CoreNodeKind, Descriptor, DescriptorExt, ResolvedNode, RuntimeNode, | |||||
| DYNAMIC_SOURCE, | |||||
| CoreNodeKind, DYNAMIC_SOURCE, Descriptor, DescriptorExt, ResolvedNode, RuntimeNode, | |||||
| read_as_descriptor, | |||||
| }, | }, | ||||
| topics::LOCALHOST, | topics::LOCALHOST, | ||||
| uhlc::{self, HLC}, | uhlc::{self, HLC}, | ||||
| }; | }; | ||||
| use dora_message::{ | use dora_message::{ | ||||
| BuildId, DataflowId, SessionId, | |||||
| common::{ | common::{ | ||||
| DaemonId, DataMessage, DropToken, GitSource, LogLevel, NodeError, NodeErrorCause, | DaemonId, DataMessage, DropToken, GitSource, LogLevel, NodeError, NodeErrorCause, | ||||
| NodeExitStatus, | NodeExitStatus, | ||||
| @@ -26,11 +27,10 @@ use dora_message::{ | |||||
| descriptor::NodeSource, | descriptor::NodeSource, | ||||
| metadata::{self, ArrowTypeInfo}, | metadata::{self, ArrowTypeInfo}, | ||||
| node_to_daemon::{DynamicNodeEvent, Timestamped}, | node_to_daemon::{DynamicNodeEvent, Timestamped}, | ||||
| BuildId, DataflowId, SessionId, | |||||
| }; | }; | ||||
| use dora_node_api::{arrow::datatypes::DataType, Parameter}; | |||||
| use eyre::{bail, eyre, Context, ContextCompat, Result}; | |||||
| use futures::{future, stream, FutureExt, TryFutureExt}; | |||||
| use dora_node_api::{Parameter, arrow::datatypes::DataType}; | |||||
| use eyre::{Context, ContextCompat, Result, bail, eyre}; | |||||
| use futures::{FutureExt, TryFutureExt, future, stream}; | |||||
| use futures_concurrency::stream::Merge; | use futures_concurrency::stream::Merge; | ||||
| use local_listener::DynamicNodeEventWrapper; | use local_listener::DynamicNodeEventWrapper; | ||||
| use log::{DaemonLogger, DataflowLogger, Logger}; | use log::{DaemonLogger, DataflowLogger, Logger}; | ||||
| @@ -59,7 +59,7 @@ use tokio::{ | |||||
| oneshot::{self, Sender}, | oneshot::{self, Sender}, | ||||
| }, | }, | ||||
| }; | }; | ||||
| use tokio_stream::{wrappers::ReceiverStream, Stream, StreamExt}; | |||||
| use tokio_stream::{Stream, StreamExt, wrappers::ReceiverStream}; | |||||
| use tracing::{error, warn}; | use tracing::{error, warn}; | ||||
| use uuid::{NoContext, Timestamp, Uuid}; | use uuid::{NoContext, Timestamp, Uuid}; | ||||
| @@ -168,7 +168,7 @@ impl Daemon { | |||||
| Some(coordinator_addr), | Some(coordinator_addr), | ||||
| daemon_id, | daemon_id, | ||||
| None, | None, | ||||
| clock, | |||||
| clock.clone(), | |||||
| Some(remote_daemon_events_tx), | Some(remote_daemon_events_tx), | ||||
| Default::default(), | Default::default(), | ||||
| log_destination, | log_destination, | ||||
| @@ -342,7 +342,9 @@ impl Daemon { | |||||
| ) | ) | ||||
| .unwrap(); | .unwrap(); | ||||
| if cfg!(target_os = "macos") { | if cfg!(target_os = "macos") { | ||||
| warn!("disabling multicast on macos systems. Enable it with the ZENOH_CONFIG env variable or file"); | |||||
| warn!( | |||||
| "disabling multicast on macos systems. Enable it with the ZENOH_CONFIG env variable or file" | |||||
| ); | |||||
| zenoh_config | zenoh_config | ||||
| .insert_json5("scouting/multicast", r#"{ enabled: false }"#) | .insert_json5("scouting/multicast", r#"{ enabled: false }"#) | ||||
| .unwrap(); | .unwrap(); | ||||
| @@ -374,7 +376,9 @@ impl Daemon { | |||||
| ) | ) | ||||
| .unwrap(); | .unwrap(); | ||||
| if cfg!(target_os = "macos") { | if cfg!(target_os = "macos") { | ||||
| warn!("disabling multicast on macos systems. Enable it with the ZENOH_CONFIG env variable or file"); | |||||
| warn!( | |||||
| "disabling multicast on macos systems. Enable it with the ZENOH_CONFIG env variable or file" | |||||
| ); | |||||
| zenoh_config | zenoh_config | ||||
| .insert_json5("scouting/multicast", r#"{ enabled: false }"#) | .insert_json5("scouting/multicast", r#"{ enabled: false }"#) | ||||
| .unwrap(); | .unwrap(); | ||||
| @@ -524,7 +528,9 @@ impl Daemon { | |||||
| if let Some(dataflow) = self.running.get_mut(&dataflow_id) { | if let Some(dataflow) = self.running.get_mut(&dataflow_id) { | ||||
| dataflow.running_nodes.insert(node_id, running_node); | dataflow.running_nodes.insert(node_id, running_node); | ||||
| } else { | } else { | ||||
| tracing::error!("failed to handle SpawnNodeResult: no running dataflow with ID {dataflow_id}"); | |||||
| tracing::error!( | |||||
| "failed to handle SpawnNodeResult: no running dataflow with ID {dataflow_id}" | |||||
| ); | |||||
| } | } | ||||
| } | } | ||||
| Err(error) => { | Err(error) => { | ||||
| @@ -1000,7 +1006,7 @@ impl Daemon { | |||||
| dataflow_descriptor: Descriptor, | dataflow_descriptor: Descriptor, | ||||
| local_nodes: BTreeSet<NodeId>, | local_nodes: BTreeSet<NodeId>, | ||||
| uv: bool, | uv: bool, | ||||
| ) -> eyre::Result<impl Future<Output = eyre::Result<BuildInfo>>> { | |||||
| ) -> eyre::Result<impl Future<Output = eyre::Result<BuildInfo>> + use<>> { | |||||
| let builder = build::Builder { | let builder = build::Builder { | ||||
| session_id, | session_id, | ||||
| base_working_dir, | base_working_dir, | ||||
| @@ -1095,7 +1101,7 @@ impl Daemon { | |||||
| dataflow_descriptor: Descriptor, | dataflow_descriptor: Descriptor, | ||||
| spawn_nodes: BTreeSet<NodeId>, | spawn_nodes: BTreeSet<NodeId>, | ||||
| uv: bool, | uv: bool, | ||||
| ) -> eyre::Result<impl Future<Output = eyre::Result<()>>> { | |||||
| ) -> eyre::Result<impl Future<Output = eyre::Result<()>> + use<>> { | |||||
| let mut logger = self | let mut logger = self | ||||
| .logger | .logger | ||||
| .for_dataflow(dataflow_id) | .for_dataflow(dataflow_id) | ||||
| @@ -1255,18 +1261,20 @@ impl Daemon { | |||||
| let finished_or_next = | let finished_or_next = | ||||
| futures::future::select(finished, subscriber.recv_async()); | futures::future::select(finished, subscriber.recv_async()); | ||||
| match finished_or_next.await { | match finished_or_next.await { | ||||
| future::Either::Left((finished, _)) => { | |||||
| match finished { | |||||
| Err(broadcast::error::RecvError::Closed) => { | |||||
| tracing::debug!("dataflow finished, breaking from zenoh subscribe task"); | |||||
| break; | |||||
| } | |||||
| other => { | |||||
| tracing::warn!("unexpected return value of dataflow finished_rx channel: {other:?}"); | |||||
| break; | |||||
| } | |||||
| future::Either::Left((finished, _)) => match finished { | |||||
| Err(broadcast::error::RecvError::Closed) => { | |||||
| tracing::debug!( | |||||
| "dataflow finished, breaking from zenoh subscribe task" | |||||
| ); | |||||
| break; | |||||
| } | } | ||||
| } | |||||
| other => { | |||||
| tracing::warn!( | |||||
| "unexpected return value of dataflow finished_rx channel: {other:?}" | |||||
| ); | |||||
| break; | |||||
| } | |||||
| }, | |||||
| future::Either::Right((sample, f)) => { | future::Either::Right((sample, f)) => { | ||||
| finished = f; | finished = f; | ||||
| let event = sample.map_err(|e| eyre!(e)).and_then(|s| { | let event = sample.map_err(|e| eyre!(e)).and_then(|s| { | ||||
| @@ -1,7 +1,8 @@ | |||||
| use crate::{ | use crate::{ | ||||
| CoreNodeKindExt, DoraEvent, Event, OutputId, RunningNode, | |||||
| log::{self, NodeLogger}, | log::{self, NodeLogger}, | ||||
| node_communication::spawn_listener_loop, | node_communication::spawn_listener_loop, | ||||
| node_inputs, CoreNodeKindExt, DoraEvent, Event, OutputId, RunningNode, | |||||
| node_inputs, | |||||
| }; | }; | ||||
| use aligned_vec::{AVec, ConstAlign}; | use aligned_vec::{AVec, ConstAlign}; | ||||
| use crossbeam::queue::ArrayQueue; | use crossbeam::queue::ArrayQueue; | ||||
| @@ -9,26 +10,26 @@ use dora_arrow_convert::IntoArrow; | |||||
| use dora_core::{ | use dora_core::{ | ||||
| config::DataId, | config::DataId, | ||||
| descriptor::{ | descriptor::{ | ||||
| resolve_path, source_is_url, Descriptor, OperatorDefinition, OperatorSource, PythonSource, | |||||
| ResolvedNode, ResolvedNodeExt, DYNAMIC_SOURCE, SHELL_SOURCE, | |||||
| DYNAMIC_SOURCE, Descriptor, OperatorDefinition, OperatorSource, PythonSource, ResolvedNode, | |||||
| ResolvedNodeExt, SHELL_SOURCE, resolve_path, source_is_url, | |||||
| }, | }, | ||||
| get_python_path, | get_python_path, | ||||
| uhlc::HLC, | uhlc::HLC, | ||||
| }; | }; | ||||
| use dora_download::download_file; | use dora_download::download_file; | ||||
| use dora_message::{ | use dora_message::{ | ||||
| DataflowId, | |||||
| common::{LogLevel, LogMessage}, | common::{LogLevel, LogMessage}, | ||||
| daemon_to_coordinator::{DataMessage, NodeExitStatus, Timestamped}, | daemon_to_coordinator::{DataMessage, NodeExitStatus, Timestamped}, | ||||
| daemon_to_node::{NodeConfig, RuntimeConfig}, | daemon_to_node::{NodeConfig, RuntimeConfig}, | ||||
| id::NodeId, | id::NodeId, | ||||
| DataflowId, | |||||
| }; | }; | ||||
| use dora_node_api::{ | use dora_node_api::{ | ||||
| Metadata, | |||||
| arrow::array::ArrayData, | arrow::array::ArrayData, | ||||
| arrow_utils::{copy_array_into_sample, required_data_size}, | arrow_utils::{copy_array_into_sample, required_data_size}, | ||||
| Metadata, | |||||
| }; | }; | ||||
| use eyre::{bail, ContextCompat, WrapErr}; | |||||
| use eyre::{ContextCompat, WrapErr, bail}; | |||||
| use std::{ | use std::{ | ||||
| future::Future, | future::Future, | ||||
| path::{Path, PathBuf}, | path::{Path, PathBuf}, | ||||
| @@ -59,7 +60,7 @@ impl Spawner { | |||||
| node_working_dir: PathBuf, | node_working_dir: PathBuf, | ||||
| node_stderr_most_recent: Arc<ArrayQueue<String>>, | node_stderr_most_recent: Arc<ArrayQueue<String>>, | ||||
| logger: &mut NodeLogger<'_>, | logger: &mut NodeLogger<'_>, | ||||
| ) -> eyre::Result<impl Future<Output = eyre::Result<PreparedNode>>> { | |||||
| ) -> eyre::Result<impl Future<Output = eyre::Result<PreparedNode>> + use<>> { | |||||
| let dataflow_id = self.dataflow_id; | let dataflow_id = self.dataflow_id; | ||||
| let node_id = node.id.clone(); | let node_id = node.id.clone(); | ||||
| logger | logger | ||||
| @@ -221,9 +222,9 @@ impl Spawner { | |||||
| cmd.arg("run"); | cmd.arg("run"); | ||||
| cmd.arg("python"); | cmd.arg("python"); | ||||
| tracing::info!( | tracing::info!( | ||||
| "spawning: uv run python -uc import dora; dora.start_runtime() # {}", | |||||
| node.id | |||||
| ); | |||||
| "spawning: uv run python -uc import dora; dora.start_runtime() # {}", | |||||
| node.id | |||||
| ); | |||||
| cmd | cmd | ||||
| } else { | } else { | ||||
| let python = get_python_path() | let python = get_python_path() | ||||
| @@ -362,7 +363,7 @@ impl PreparedNode { | |||||
| return Ok(RunningNode { | return Ok(RunningNode { | ||||
| pid: None, | pid: None, | ||||
| node_config: self.node_config, | node_config: self.node_config, | ||||
| }) | |||||
| }); | |||||
| } | } | ||||
| }; | }; | ||||
| @@ -1,7 +1,7 @@ | |||||
| [package] | [package] | ||||
| name = "aloha-client" | name = "aloha-client" | ||||
| version = "0.1.0" | version = "0.1.0" | ||||
| edition = "2021" | |||||
| edition = "2024" | |||||
| # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html | ||||
| @@ -1,7 +1,7 @@ | |||||
| [package] | [package] | ||||
| name = "aloha-teleop" | name = "aloha-teleop" | ||||
| version = "0.1.0" | version = "0.1.0" | ||||
| edition = "2021" | |||||
| edition = "2024" | |||||
| # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html | ||||
| @@ -5,10 +5,10 @@ use std::{collections::BTreeMap, future::Future, path::PathBuf}; | |||||
| use crate::descriptor::ResolvedNode; | use crate::descriptor::ResolvedNode; | ||||
| use dora_message::{ | use dora_message::{ | ||||
| SessionId, | |||||
| common::{GitSource, LogLevel}, | common::{GitSource, LogLevel}, | ||||
| descriptor::{CoreNodeKind, EnvValue}, | descriptor::{CoreNodeKind, EnvValue}, | ||||
| id::NodeId, | id::NodeId, | ||||
| SessionId, | |||||
| }; | }; | ||||
| use eyre::Context; | use eyre::Context; | ||||
| @@ -27,14 +27,17 @@ pub struct Builder { | |||||
| } | } | ||||
| impl Builder { | impl Builder { | ||||
| pub async fn build_node( | |||||
| pub async fn build_node<L>( | |||||
| self, | self, | ||||
| node: ResolvedNode, | node: ResolvedNode, | ||||
| git: Option<GitSource>, | git: Option<GitSource>, | ||||
| prev_git: Option<PrevGitSource>, | prev_git: Option<PrevGitSource>, | ||||
| mut logger: impl BuildLogger, | |||||
| mut logger: L, | |||||
| git_manager: &mut GitManager, | git_manager: &mut GitManager, | ||||
| ) -> eyre::Result<impl Future<Output = eyre::Result<BuiltNode>>> { | |||||
| ) -> eyre::Result<impl Future<Output = eyre::Result<BuiltNode>> + use<L>> | |||||
| where | |||||
| L: BuildLogger, | |||||
| { | |||||
| let prepared_git = if let Some(GitSource { repo, commit_hash }) = git { | let prepared_git = if let Some(GitSource { repo, commit_hash }) = git { | ||||
| let target_dir = self.base_working_dir.join("git"); | let target_dir = self.base_working_dir.join("git"); | ||||
| let git_folder = git_manager.choose_clone_dir( | let git_folder = git_manager.choose_clone_dir( | ||||
| @@ -6,14 +6,14 @@ use crate::{ | |||||
| use dora_message::{ | use dora_message::{ | ||||
| config::{Input, InputMapping, UserInputMapping}, | config::{Input, InputMapping, UserInputMapping}, | ||||
| descriptor::{CoreNodeKind, OperatorSource, ResolvedNode, DYNAMIC_SOURCE, SHELL_SOURCE}, | |||||
| descriptor::{CoreNodeKind, DYNAMIC_SOURCE, OperatorSource, ResolvedNode, SHELL_SOURCE}, | |||||
| id::{DataId, NodeId, OperatorId}, | id::{DataId, NodeId, OperatorId}, | ||||
| }; | }; | ||||
| use eyre::{bail, eyre, Context}; | |||||
| use eyre::{Context, bail, eyre}; | |||||
| use std::{collections::BTreeMap, path::Path, process::Command}; | use std::{collections::BTreeMap, path::Path, process::Command}; | ||||
| use tracing::info; | use tracing::info; | ||||
| use super::{resolve_path, Descriptor, DescriptorExt}; | |||||
| use super::{Descriptor, DescriptorExt, resolve_path}; | |||||
| const VERSION: &str = env!("CARGO_PKG_VERSION"); | const VERSION: &str = env!("CARGO_PKG_VERSION"); | ||||
| pub fn check_dataflow( | pub fn check_dataflow( | ||||
| @@ -49,7 +49,7 @@ pub fn check_dataflow( | |||||
| info!("skipping path check for node with build command"); | info!("skipping path check for node with build command"); | ||||
| } else { | } else { | ||||
| resolve_path(source, working_dir).wrap_err_with(|| { | resolve_path(source, working_dir).wrap_err_with(|| { | ||||
| format!("Could not find source path `{source}`") | |||||
| format!("Could not find source path `{}`", source) | |||||
| })?; | })?; | ||||
| }; | }; | ||||
| } | } | ||||
| @@ -145,9 +145,13 @@ impl ResolvedNodeExt for ResolvedNode { | |||||
| .filter(|op| op.config.send_stdout_as.is_some()) | .filter(|op| op.config.send_stdout_as.is_some()) | ||||
| .count(); | .count(); | ||||
| if count == 1 && n.operators.len() > 1 { | if count == 1 && n.operators.len() > 1 { | ||||
| tracing::warn!("All stdout from all operators of a runtime are going to be sent in the selected `send_stdout_as` operator.") | |||||
| tracing::warn!( | |||||
| "All stdout from all operators of a runtime are going to be sent in the selected `send_stdout_as` operator." | |||||
| ) | |||||
| } else if count > 1 { | } else if count > 1 { | ||||
| return Err(eyre!("More than one `send_stdout_as` entries for a runtime node. Please only use one `send_stdout_as` per runtime.")); | |||||
| return Err(eyre!( | |||||
| "More than one `send_stdout_as` entries for a runtime node. Please only use one `send_stdout_as` per runtime." | |||||
| )); | |||||
| } | } | ||||
| Ok(n.operators.iter().find_map(|op| { | Ok(n.operators.iter().find_map(|op| { | ||||
| op.config | op.config | ||||
| @@ -8,7 +8,7 @@ fn main() {} | |||||
| fn main() { | fn main() { | ||||
| use rust_format::Formatter; | use rust_format::Formatter; | ||||
| let paths = ament_prefix_paths(); | let paths = ament_prefix_paths(); | ||||
| let generated = dora_ros2_bridge_msg_gen::gen(paths.as_slice(), false); | |||||
| let generated = dora_ros2_bridge_msg_gen::generate(paths.as_slice(), false); | |||||
| let generated_string = rust_format::PrettyPlease::default() | let generated_string = rust_format::PrettyPlease::default() | ||||
| .format_tokens(generated) | .format_tokens(generated) | ||||
| .unwrap(); | .unwrap(); | ||||
| @@ -17,7 +17,7 @@ pub mod types; | |||||
| pub use crate::parser::get_packages; | pub use crate::parser::get_packages; | ||||
| #[allow(clippy::cognitive_complexity)] | #[allow(clippy::cognitive_complexity)] | ||||
| pub fn gen<P>(paths: &[P], create_cxx_bridge: bool) -> proc_macro2::TokenStream | |||||
| pub fn generate<P>(paths: &[P], create_cxx_bridge: bool) -> proc_macro2::TokenStream | |||||
| where | where | ||||
| P: AsRef<Path>, | P: AsRef<Path>, | ||||
| { | { | ||||
| @@ -381,7 +381,7 @@ impl Message { | |||||
| (def, imp) | (def, imp) | ||||
| } | } | ||||
| pub fn alias_token_stream(&self, package_name: &Ident) -> impl ToTokens { | |||||
| pub fn alias_token_stream(&self, package_name: &Ident) -> impl ToTokens + use<> { | |||||
| let cxx_name = format_ident!("{}", self.name); | let cxx_name = format_ident!("{}", self.name); | ||||
| let struct_raw_name = format_ident!("{package_name}__{}", self.name); | let struct_raw_name = format_ident!("{package_name}__{}", self.name); | ||||
| @@ -394,11 +394,11 @@ impl Message { | |||||
| } | } | ||||
| } | } | ||||
| pub fn token_stream(&self) -> impl ToTokens { | |||||
| pub fn token_stream(&self) -> impl ToTokens + use<> { | |||||
| self.token_stream_args(false) | self.token_stream_args(false) | ||||
| } | } | ||||
| pub fn token_stream_args(&self, gen_cxx_bridge: bool) -> impl ToTokens { | |||||
| pub fn token_stream_args(&self, gen_cxx_bridge: bool) -> impl ToTokens + use<> { | |||||
| let rust_type = format_ident!("{}", self.name); | let rust_type = format_ident!("{}", self.name); | ||||
| let raw_type = format_ident!("{}_Raw", self.name); | let raw_type = format_ident!("{}_Raw", self.name); | ||||
| let raw_ref_type = format_ident!("{}_RawRef", self.name); | let raw_ref_type = format_ident!("{}_RawRef", self.name); | ||||
| @@ -228,7 +228,7 @@ impl Service { | |||||
| (def, imp) | (def, imp) | ||||
| } | } | ||||
| pub fn token_stream_with_mod(&self) -> impl ToTokens { | |||||
| pub fn token_stream_with_mod(&self) -> impl ToTokens + use<> { | |||||
| let mod_name = format_ident!("_{}", self.name.to_snake_case()); | let mod_name = format_ident!("_{}", self.name.to_snake_case()); | ||||
| let inner = self.token_stream(); | let inner = self.token_stream(); | ||||
| quote! { | quote! { | ||||
| @@ -239,7 +239,7 @@ impl Service { | |||||
| } | } | ||||
| } | } | ||||
| pub fn token_stream(&self) -> impl ToTokens { | |||||
| pub fn token_stream(&self) -> impl ToTokens + use<> { | |||||
| let srv_type = format_ident!("{}", self.name); | let srv_type = format_ident!("{}", self.name); | ||||
| let req_type = format_ident!("{}_Request", self.name); | let req_type = format_ident!("{}_Request", self.name); | ||||
| let res_type = format_ident!("{}_Response", self.name); | let res_type = format_ident!("{}_Response", self.name); | ||||
| @@ -34,7 +34,7 @@ where | |||||
| type Target = Vec<T::Target>; | type Target = Vec<T::Target>; | ||||
| unsafe fn to_rust(&self) -> Self::Target { | unsafe fn to_rust(&self) -> Self::Target { | ||||
| self.iter().map(|v| v.to_rust()).collect() | |||||
| self.iter().map(|v| unsafe { v.to_rust() }).collect() | |||||
| } | } | ||||
| } | } | ||||
| @@ -117,7 +117,7 @@ where | |||||
| } else { | } else { | ||||
| let mut new_vec = vec | let mut new_vec = vec | ||||
| .iter() | .iter() | ||||
| .map(|v| FFIFromRust::from_rust(v)) | |||||
| .map(|v| unsafe { FFIFromRust::from_rust(v) }) | |||||
| .collect::<Vec<_>>(); | .collect::<Vec<_>>(); | ||||
| new_vec.shrink_to_fit(); | new_vec.shrink_to_fit(); | ||||
| assert_eq!(new_vec.len(), new_vec.capacity()); | assert_eq!(new_vec.len(), new_vec.capacity()); | ||||
| @@ -91,7 +91,7 @@ impl FFIString { | |||||
| if self.is_empty() { | if self.is_empty() { | ||||
| Ok("") | Ok("") | ||||
| } else { | } else { | ||||
| CStr::from_ptr(self.data).to_str() | |||||
| unsafe { CStr::from_ptr(self.data).to_str() } | |||||
| } | } | ||||
| } | } | ||||
| } | } | ||||
| @@ -100,7 +100,7 @@ impl FFIToRust for FFIString { | |||||
| type Target = String; | type Target = String; | ||||
| unsafe fn to_rust(&self) -> Self::Target { | unsafe fn to_rust(&self) -> Self::Target { | ||||
| self.to_str().expect("CStr::to_str failed").to_string() | |||||
| unsafe { self.to_str().expect("CStr::to_str failed").to_string() } | |||||
| } | } | ||||
| } | } | ||||
| @@ -174,7 +174,7 @@ impl FFIToRust for FFIWString { | |||||
| if self.is_empty() { | if self.is_empty() { | ||||
| Self::Target::new() | Self::Target::new() | ||||
| } else { | } else { | ||||
| U16String(U16CStr::from_ptr_str(self.data).to_ustring()) | |||||
| U16String(unsafe { U16CStr::from_ptr_str(self.data).to_ustring() }) | |||||
| } | } | ||||
| } | } | ||||
| } | } | ||||
| @@ -8,11 +8,11 @@ pub trait MessageT: Default + Send + Sync { | |||||
| type RawRef: FFIFromRust<From = Self>; | type RawRef: FFIFromRust<From = Self>; | ||||
| unsafe fn from_raw(from: &Self::Raw) -> Self { | unsafe fn from_raw(from: &Self::Raw) -> Self { | ||||
| from.to_rust() | |||||
| unsafe{ from.to_rust() } | |||||
| } | } | ||||
| unsafe fn to_raw_ref(&self) -> Self::RawRef { | unsafe fn to_raw_ref(&self) -> Self::RawRef { | ||||
| Self::RawRef::from_rust(self) | |||||
| unsafe {Self::RawRef::from_rust(self)} | |||||
| } | } | ||||
| } | } | ||||
| @@ -84,7 +84,9 @@ where | |||||
| unsafe fn to_rust(&self) -> <Self as FFIToRust>::Target { | unsafe fn to_rust(&self) -> <Self as FFIToRust>::Target { | ||||
| self.iter() | self.iter() | ||||
| .map(|v| v.to_rust()) | |||||
| .map(|v| unsafe { | |||||
| v.to_rust() | |||||
| }) | |||||
| .collect::<Vec<_>>() | .collect::<Vec<_>>() | ||||
| .try_into() | .try_into() | ||||
| .unwrap() | .unwrap() | ||||
| @@ -105,7 +107,7 @@ where | |||||
| unsafe fn from_rust(from: &Self::From) -> Self { | unsafe fn from_rust(from: &Self::From) -> Self { | ||||
| from.iter() | from.iter() | ||||
| .map(|v| FFIFromRust::from_rust(v)) | |||||
| .map(|v| unsafe { FFIFromRust::from_rust(v) }) | |||||
| .collect::<Vec<_>>() | .collect::<Vec<_>>() | ||||
| .try_into() | .try_into() | ||||
| .unwrap() | .unwrap() | ||||
| @@ -18,7 +18,9 @@ pub struct ShmemServer<T, U> { | |||||
| impl<T, U> ShmemServer<T, U> { | impl<T, U> ShmemServer<T, U> { | ||||
| pub unsafe fn new(memory: Shmem) -> eyre::Result<Self> { | pub unsafe fn new(memory: Shmem) -> eyre::Result<Self> { | ||||
| Ok(Self { | Ok(Self { | ||||
| channel: ShmemChannel::new_server(memory)?, | |||||
| channel: unsafe { | |||||
| ShmemChannel::new_server(memory)? | |||||
| }, | |||||
| reply_expected: false, | reply_expected: false, | ||||
| phantom: PhantomData, | phantom: PhantomData, | ||||
| }) | }) | ||||
| @@ -57,7 +59,9 @@ pub struct ShmemClient<T, U> { | |||||
| impl<T, U> ShmemClient<T, U> { | impl<T, U> ShmemClient<T, U> { | ||||
| pub unsafe fn new(memory: Shmem, timeout: Option<Duration>) -> eyre::Result<Self> { | pub unsafe fn new(memory: Shmem, timeout: Option<Duration>) -> eyre::Result<Self> { | ||||
| Ok(Self { | Ok(Self { | ||||
| channel: ShmemChannel::new_client(memory)?, | |||||
| channel: unsafe { | |||||
| ShmemChannel::new_client(memory)? | |||||
| }, | |||||
| timeout, | timeout, | ||||
| phantom: PhantomData, | phantom: PhantomData, | ||||
| }) | }) | ||||
| @@ -1,7 +1,7 @@ | |||||
| [package] | [package] | ||||
| name = "dora-mistral-rs" | name = "dora-mistral-rs" | ||||
| version = "0.1.0" | version = "0.1.0" | ||||
| edition = "2021" | |||||
| edition = "2024" | |||||
| # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html | ||||