| @@ -48,7 +48,7 @@ members = [ | |||
| ] | |||
| [workspace.package] | |||
| edition = "2021" | |||
| edition = "2024" | |||
| rust-version = "1.85.0" | |||
| # Make sure to also bump `apis/node/python/__init__.py` version. | |||
| version = "0.3.12" | |||
| @@ -62,7 +62,7 @@ mod ros2 { | |||
| pub fn generate() -> PathBuf { | |||
| use rust_format::Formatter; | |||
| let paths = ament_prefix_paths(); | |||
| let generated = dora_ros2_bridge_msg_gen::gen(paths.as_slice(), true); | |||
| let generated = dora_ros2_bridge_msg_gen::generate(paths.as_slice(), true); | |||
| let generated_string = rust_format::PrettyPlease::default() | |||
| .format_tokens(generated) | |||
| .unwrap(); | |||
| @@ -1,16 +1,15 @@ | |||
| use std::{any::Any, vec}; | |||
| use dora_node_api::{ | |||
| self, | |||
| self, Event, EventStream, | |||
| arrow::array::{AsArray, UInt8Array}, | |||
| merged::{MergeExternal, MergedEvent}, | |||
| Event, EventStream, | |||
| }; | |||
| use eyre::bail; | |||
| #[cfg(feature = "ros2-bridge")] | |||
| use dora_ros2_bridge::{_core, ros2_client}; | |||
| use futures_lite::{stream, Stream, StreamExt}; | |||
| use futures_lite::{Stream, StreamExt, stream}; | |||
| #[cxx::bridge] | |||
| #[allow(clippy::needless_lifetimes)] | |||
| @@ -2,7 +2,7 @@ | |||
| #![warn(unsafe_op_in_unsafe_fn)] | |||
| use dora_operator_api::{ | |||
| self, register_operator, DoraOperator, DoraOutputSender, DoraStatus, Event, IntoArrow, | |||
| self, DoraOperator, DoraOutputSender, DoraStatus, Event, IntoArrow, register_operator, | |||
| }; | |||
| use ffi::DoraSendOutputResult; | |||
| @@ -1,7 +1,7 @@ | |||
| #![deny(unsafe_op_in_unsafe_fn)] | |||
| use arrow_array::UInt8Array; | |||
| use dora_node_api::{arrow::array::AsArray, DoraNode, Event, EventStream}; | |||
| use dora_node_api::{DoraNode, Event, EventStream, arrow::array::AsArray}; | |||
| use eyre::Context; | |||
| use std::{ffi::c_void, ptr, slice}; | |||
| @@ -21,7 +21,7 @@ struct DoraContext { | |||
| /// needed, use the [`free_dora_context`] function. | |||
| /// | |||
| /// On error, a null pointer is returned. | |||
| #[no_mangle] | |||
| #[unsafe(no_mangle)] | |||
| pub extern "C" fn init_dora_context_from_env() -> *mut c_void { | |||
| let context = || { | |||
| let (node, events) = DoraNode::init_from_env()?; | |||
| @@ -47,7 +47,7 @@ pub extern "C" fn init_dora_context_from_env() -> *mut c_void { | |||
| /// Only pointers created through [`init_dora_context_from_env`] are allowed | |||
| /// as arguments. Each context pointer must be freed exactly once. After | |||
| /// freeing, the pointer must not be used anymore. | |||
| #[no_mangle] | |||
| #[unsafe(no_mangle)] | |||
| pub unsafe extern "C" fn free_dora_context(context: *mut c_void) { | |||
| let context: Box<DoraContext> = unsafe { Box::from_raw(context.cast()) }; | |||
| // drop all fields except for `node` | |||
| @@ -71,7 +71,7 @@ pub unsafe extern "C" fn free_dora_context(context: *mut c_void) { | |||
| /// The `context` argument must be a dora context created through | |||
| /// [`init_dora_context_from_env`]. The context must be still valid, i.e., not | |||
| /// freed yet. | |||
| #[no_mangle] | |||
| #[unsafe(no_mangle)] | |||
| pub unsafe extern "C" fn dora_next_event(context: *mut c_void) -> *mut c_void { | |||
| let context: &mut DoraContext = unsafe { &mut *context.cast() }; | |||
| match context.events.recv() { | |||
| @@ -87,7 +87,7 @@ pub unsafe extern "C" fn dora_next_event(context: *mut c_void) -> *mut c_void { | |||
| /// The `event` argument must be a dora event received through | |||
| /// [`dora_next_event`]. The event must be still valid, i.e., not | |||
| /// freed yet. | |||
| #[no_mangle] | |||
| #[unsafe(no_mangle)] | |||
| pub unsafe extern "C" fn read_dora_event_type(event: *const ()) -> EventType { | |||
| let event: &Event = unsafe { &*event.cast() }; | |||
| match event { | |||
| @@ -125,7 +125,7 @@ pub enum EventType { | |||
| /// | |||
| /// - Note: `Out_ptr` is not a null-terminated string. The length of the string | |||
| /// is given by `out_len`. | |||
| #[no_mangle] | |||
| #[unsafe(no_mangle)] | |||
| pub unsafe extern "C" fn read_dora_input_id( | |||
| event: *const (), | |||
| out_ptr: *mut *const u8, | |||
| @@ -165,7 +165,7 @@ pub unsafe extern "C" fn read_dora_input_id( | |||
| /// freed yet. The returned `out_ptr` must not be used after | |||
| /// freeing the `event`, since it points directly into the event's | |||
| /// memory. | |||
| #[no_mangle] | |||
| #[unsafe(no_mangle)] | |||
| pub unsafe extern "C" fn read_dora_input_data( | |||
| event: *const (), | |||
| out_ptr: *mut *const u8, | |||
| @@ -203,7 +203,7 @@ pub unsafe extern "C" fn read_dora_input_data( | |||
| /// ## Safety | |||
| /// | |||
| /// Return `0` if the given event is not an input event. | |||
| #[no_mangle] | |||
| #[unsafe(no_mangle)] | |||
| pub unsafe extern "C" fn read_dora_input_timestamp(event: *const ()) -> core::ffi::c_ulonglong { | |||
| let event: &Event = unsafe { &*event.cast() }; | |||
| match event { | |||
| @@ -221,7 +221,7 @@ pub unsafe extern "C" fn read_dora_input_timestamp(event: *const ()) -> core::ff | |||
| /// freeing, the pointer and all derived pointers must not be used anymore. | |||
| /// This also applies to the `read_dora_event_*` functions, which return | |||
| /// pointers into the original event structure. | |||
| #[no_mangle] | |||
| #[unsafe(no_mangle)] | |||
| pub unsafe extern "C" fn free_dora_event(event: *mut c_void) { | |||
| let _: Box<Event> = unsafe { Box::from_raw(event.cast()) }; | |||
| } | |||
| @@ -241,7 +241,7 @@ pub unsafe extern "C" fn free_dora_event(event: *mut c_void) { | |||
| /// UTF8-encoded string. | |||
| /// - The `data_ptr` and `data_len` fields must be the start pointer and length | |||
| /// a byte array. | |||
| #[no_mangle] | |||
| #[unsafe(no_mangle)] | |||
| pub unsafe extern "C" fn dora_send_output( | |||
| context: *mut c_void, | |||
| id_ptr: *const u8, | |||
| @@ -11,7 +11,7 @@ use dora_node_api::dora_core::config::NodeId; | |||
| use dora_node_api::dora_core::descriptor::source_is_url; | |||
| use dora_node_api::merged::{MergeExternalSend, MergedEvent}; | |||
| use dora_node_api::{DataflowId, DoraNode, EventStream}; | |||
| use dora_operator_api_python::{pydict_to_metadata, DelayedCleanup, NodeCleanupHandle, PyEvent}; | |||
| use dora_operator_api_python::{DelayedCleanup, NodeCleanupHandle, PyEvent, pydict_to_metadata}; | |||
| use dora_ros2_bridge_python::Ros2Subscription; | |||
| use eyre::Context; | |||
| use futures::{Stream, StreamExt}; | |||
| @@ -5,8 +5,8 @@ use std::{ | |||
| use arrow::pyarrow::ToPyArrow; | |||
| use dora_node_api::{ | |||
| merged::{MergeExternalSend, MergedEvent}, | |||
| DoraNode, Event, EventStream, Metadata, MetadataParameters, Parameter, StopCause, | |||
| merged::{MergeExternalSend, MergedEvent}, | |||
| }; | |||
| use eyre::{Context, Result}; | |||
| use futures::{Stream, StreamExt}; | |||
| @@ -282,7 +282,7 @@ mod tests { | |||
| use aligned_vec::{AVec, ConstAlign}; | |||
| use arrow::{ | |||
| array::{ | |||
| ArrayData, ArrayRef, BooleanArray, Float64Array, Int32Array, Int64Array, Int8Array, | |||
| ArrayData, ArrayRef, BooleanArray, Float64Array, Int8Array, Int32Array, Int64Array, | |||
| ListArray, StructArray, | |||
| }, | |||
| buffer::Buffer, | |||
| @@ -1,10 +1,10 @@ | |||
| use dora_core::{config::NodeId, uhlc::Timestamp}; | |||
| use dora_message::{ | |||
| DataflowId, | |||
| daemon_to_node::DaemonReply, | |||
| node_to_daemon::{DaemonRequest, NodeRegisterRequest, Timestamped}, | |||
| DataflowId, | |||
| }; | |||
| use eyre::{bail, eyre, Context}; | |||
| use eyre::{Context, bail, eyre}; | |||
| use shared_memory_server::{ShmemClient, ShmemConf}; | |||
| #[cfg(unix)] | |||
| use std::os::unix::net::UnixStream; | |||
| @@ -2,7 +2,7 @@ use dora_message::{ | |||
| daemon_to_node::DaemonReply, | |||
| node_to_daemon::{DaemonRequest, Timestamped}, | |||
| }; | |||
| use eyre::{eyre, Context}; | |||
| use eyre::{Context, eyre}; | |||
| use std::{ | |||
| io::{Read, Write}, | |||
| net::TcpStream, | |||
| @@ -42,21 +42,20 @@ fn receive_reply( | |||
| connection: &mut TcpStream, | |||
| serializer: Serializer, | |||
| ) -> eyre::Result<Option<DaemonReply>> { | |||
| let raw = match tcp_receive(connection) { | |||
| Ok(raw) => raw, | |||
| Err(err) => match err.kind() { | |||
| std::io::ErrorKind::UnexpectedEof | std::io::ErrorKind::ConnectionAborted => { | |||
| return Ok(None) | |||
| } | |||
| other => { | |||
| return Err(err).with_context(|| { | |||
| let raw = | |||
| match tcp_receive(connection) { | |||
| Ok(raw) => raw, | |||
| Err(err) => match err.kind() { | |||
| std::io::ErrorKind::UnexpectedEof | std::io::ErrorKind::ConnectionAborted => { | |||
| return Ok(None); | |||
| } | |||
| other => return Err(err).with_context(|| { | |||
| format!( | |||
| "unexpected I/O error (kind {other:?}) while trying to receive DaemonReply" | |||
| ) | |||
| }) | |||
| } | |||
| }, | |||
| }; | |||
| }), | |||
| }, | |||
| }; | |||
| match serializer { | |||
| Serializer::Bincode => bincode::deserialize(&raw) | |||
| .wrap_err("failed to deserialize DaemonReply") | |||
| @@ -2,7 +2,7 @@ use dora_message::{ | |||
| daemon_to_node::DaemonReply, | |||
| node_to_daemon::{DaemonRequest, Timestamped}, | |||
| }; | |||
| use eyre::{eyre, Context}; | |||
| use eyre::{Context, eyre}; | |||
| use std::{ | |||
| io::{Read, Write}, | |||
| os::unix::net::UnixStream, | |||
| @@ -42,21 +42,20 @@ fn receive_reply( | |||
| connection: &mut UnixStream, | |||
| serializer: Serializer, | |||
| ) -> eyre::Result<Option<DaemonReply>> { | |||
| let raw = match stream_receive(connection) { | |||
| Ok(raw) => raw, | |||
| Err(err) => match err.kind() { | |||
| std::io::ErrorKind::UnexpectedEof | std::io::ErrorKind::ConnectionAborted => { | |||
| return Ok(None) | |||
| } | |||
| other => { | |||
| return Err(err).with_context(|| { | |||
| let raw = | |||
| match stream_receive(connection) { | |||
| Ok(raw) => raw, | |||
| Err(err) => match err.kind() { | |||
| std::io::ErrorKind::UnexpectedEof | std::io::ErrorKind::ConnectionAborted => { | |||
| return Ok(None); | |||
| } | |||
| other => return Err(err).with_context(|| { | |||
| format!( | |||
| "unexpected I/O error (kind {other:?}) while trying to receive DaemonReply" | |||
| ) | |||
| }) | |||
| } | |||
| }, | |||
| }; | |||
| }), | |||
| }, | |||
| }; | |||
| match serializer { | |||
| Serializer::Bincode => bincode::deserialize(&raw) | |||
| .wrap_err("failed to deserialize DaemonReply") | |||
| @@ -6,18 +6,18 @@ use std::{ | |||
| }; | |||
| use dora_message::{ | |||
| DataflowId, | |||
| daemon_to_node::{DaemonCommunication, DaemonReply, DataMessage, NodeEvent}, | |||
| id::DataId, | |||
| node_to_daemon::{DaemonRequest, Timestamped}, | |||
| DataflowId, | |||
| }; | |||
| pub use event::{Event, StopCause}; | |||
| use futures::{ | |||
| future::{select, Either}, | |||
| Stream, StreamExt, | |||
| future::{Either, select}, | |||
| }; | |||
| use futures_timer::Delay; | |||
| use scheduler::{Scheduler, NON_INPUT_EVENT}; | |||
| use scheduler::{NON_INPUT_EVENT, Scheduler}; | |||
| use self::thread::{EventItem, EventStreamThreadHandle}; | |||
| use crate::{ | |||
| @@ -28,7 +28,7 @@ use dora_core::{ | |||
| config::{Input, NodeId}, | |||
| uhlc, | |||
| }; | |||
| use eyre::{eyre, Context}; | |||
| use eyre::{Context, eyre}; | |||
| pub use scheduler::Scheduler as EventScheduler; | |||
| @@ -6,7 +6,7 @@ use dora_message::{ | |||
| daemon_to_node::{DaemonReply, NodeEvent}, | |||
| node_to_daemon::{DaemonRequest, DropToken, Timestamped}, | |||
| }; | |||
| use eyre::{eyre, Context}; | |||
| use eyre::{Context, eyre}; | |||
| use flume::RecvTimeoutError; | |||
| use std::{ | |||
| sync::Arc, | |||
| @@ -84,13 +84,13 @@ pub use arrow; | |||
| pub use dora_arrow_convert::*; | |||
| pub use dora_core::{self, uhlc}; | |||
| pub use dora_message::{ | |||
| metadata::{Metadata, MetadataParameters, Parameter}, | |||
| DataflowId, | |||
| metadata::{Metadata, MetadataParameters, Parameter}, | |||
| }; | |||
| pub use event_stream::{merged, Event, EventScheduler, EventStream, StopCause}; | |||
| pub use event_stream::{Event, EventScheduler, EventStream, StopCause, merged}; | |||
| pub use flume::Receiver; | |||
| pub use futures; | |||
| pub use node::{arrow_utils, DataSample, DoraNode, ZERO_COPY_THRESHOLD}; | |||
| pub use node::{DataSample, DoraNode, ZERO_COPY_THRESHOLD, arrow_utils}; | |||
| mod daemon_connection; | |||
| mod event_stream; | |||
| @@ -6,12 +6,12 @@ use dora_core::{ | |||
| uhlc::HLC, | |||
| }; | |||
| use dora_message::{ | |||
| DataflowId, | |||
| daemon_to_node::{DaemonCommunication, DaemonReply}, | |||
| metadata::Metadata, | |||
| node_to_daemon::{DaemonRequest, DataMessage, Timestamped}, | |||
| DataflowId, | |||
| }; | |||
| use eyre::{bail, eyre, Context}; | |||
| use eyre::{Context, bail, eyre}; | |||
| pub(crate) struct ControlChannel { | |||
| channel: DaemonChannel, | |||
| @@ -3,11 +3,11 @@ use std::{sync::Arc, time::Duration}; | |||
| use crate::daemon_connection::DaemonChannel; | |||
| use dora_core::{config::NodeId, uhlc}; | |||
| use dora_message::{ | |||
| DataflowId, | |||
| daemon_to_node::{DaemonCommunication, DaemonReply, NodeDropEvent}, | |||
| node_to_daemon::{DaemonRequest, DropToken, Timestamped}, | |||
| DataflowId, | |||
| }; | |||
| use eyre::{eyre, Context}; | |||
| use eyre::{Context, eyre}; | |||
| use flume::RecvTimeoutError; | |||
| pub struct DropStream { | |||
| @@ -1,4 +1,4 @@ | |||
| use crate::{daemon_connection::DaemonChannel, EventStream}; | |||
| use crate::{EventStream, daemon_connection::DaemonChannel}; | |||
| use self::{ | |||
| arrow_utils::{copy_array_into_sample, required_data_size}, | |||
| @@ -16,12 +16,12 @@ use dora_core::{ | |||
| }; | |||
| use dora_message::{ | |||
| DataflowId, | |||
| daemon_to_node::{DaemonReply, NodeConfig}, | |||
| metadata::{ArrowTypeInfo, Metadata, MetadataParameters}, | |||
| node_to_daemon::{DaemonRequest, DataMessage, DropToken, Timestamped}, | |||
| DataflowId, | |||
| }; | |||
| use eyre::{bail, WrapErr}; | |||
| use eyre::{WrapErr, bail}; | |||
| use shared_memory_extended::{Shmem, ShmemConf}; | |||
| use std::{ | |||
| collections::{BTreeSet, HashMap, VecDeque}, | |||
| @@ -157,7 +157,9 @@ impl DoraNode { | |||
| /// [`init_from_node_id`][Self::init_from_node_id]. | |||
| pub fn init_flexible(node_id: NodeId) -> eyre::Result<(Self, EventStream)> { | |||
| if std::env::var("DORA_NODE_CONFIG").is_ok() { | |||
| info!("Skipping {node_id} specified within the node initialization in favor of `DORA_NODE_CONFIG` specified by `dora start`"); | |||
| info!( | |||
| "Skipping {node_id} specified within the node initialization in favor of `DORA_NODE_CONFIG` specified by `dora start`" | |||
| ); | |||
| Self::init_from_env() | |||
| } else { | |||
| Self::init_from_node_id(node_id) | |||
| @@ -26,7 +26,7 @@ fn register_operator_impl(item: &TokenStream2) -> syn::Result<TokenStream2> { | |||
| .map_err(|e| syn::Error::new(e.span(), "expected type as argument"))?; | |||
| let init = quote! { | |||
| #[no_mangle] | |||
| #[unsafe(no_mangle)] | |||
| pub unsafe extern "C" fn dora_init_operator() -> dora_operator_api::types::DoraInitResult { | |||
| dora_operator_api::raw::dora_init_operator::<#operator_ty>() | |||
| } | |||
| @@ -37,7 +37,7 @@ fn register_operator_impl(item: &TokenStream2) -> syn::Result<TokenStream2> { | |||
| }; | |||
| let drop = quote! { | |||
| #[no_mangle] | |||
| #[unsafe(no_mangle)] | |||
| pub unsafe extern "C" fn dora_drop_operator(operator_context: *mut std::ffi::c_void) | |||
| -> dora_operator_api::types::DoraResult | |||
| { | |||
| @@ -50,7 +50,7 @@ fn register_operator_impl(item: &TokenStream2) -> syn::Result<TokenStream2> { | |||
| }; | |||
| let on_event = quote! { | |||
| #[no_mangle] | |||
| #[unsafe(no_mangle)] | |||
| pub unsafe extern "C" fn dora_on_event( | |||
| event: &mut dora_operator_api::types::RawEvent, | |||
| send_output: &dora_operator_api::types::SendOutput, | |||
| @@ -23,8 +23,8 @@ pub use dora_operator_api_macros::register_operator; | |||
| pub use dora_operator_api_types as types; | |||
| pub use types::DoraStatus; | |||
| use types::{ | |||
| arrow::{self, array::Array}, | |||
| Metadata, Output, SendOutput, | |||
| arrow::{self, array::Array}, | |||
| }; | |||
| pub mod raw; | |||
| @@ -1,6 +1,6 @@ | |||
| use crate::{DoraOperator, DoraOutputSender, DoraStatus, Event}; | |||
| use dora_operator_api_types::{ | |||
| arrow, DoraInitResult, DoraResult, OnEventResult, RawEvent, SendOutput, | |||
| DoraInitResult, DoraResult, OnEventResult, RawEvent, SendOutput, arrow, | |||
| }; | |||
| use std::ffi::c_void; | |||
| @@ -1,13 +1,13 @@ | |||
| use communication_layer_request_reply::{TcpConnection, TcpRequestReplyConnection}; | |||
| use dora_core::descriptor::Descriptor; | |||
| use dora_message::{ | |||
| BuildId, | |||
| cli_to_coordinator::ControlRequest, | |||
| common::{GitSource, LogMessage}, | |||
| coordinator_to_cli::ControlRequestReply, | |||
| id::NodeId, | |||
| BuildId, | |||
| }; | |||
| use eyre::{bail, Context}; | |||
| use eyre::{Context, bail}; | |||
| use std::{ | |||
| collections::BTreeMap, | |||
| net::{SocketAddr, TcpStream}, | |||
| @@ -52,11 +52,11 @@ use dora_core::{ | |||
| descriptor::{CoreNodeKind, CustomNode, Descriptor, DescriptorExt}, | |||
| topics::{DORA_COORDINATOR_PORT_CONTROL_DEFAULT, LOCALHOST}, | |||
| }; | |||
| use dora_message::{descriptor::NodeSource, BuildId}; | |||
| use dora_message::{BuildId, descriptor::NodeSource}; | |||
| use eyre::Context; | |||
| use std::{collections::BTreeMap, net::IpAddr}; | |||
| use super::{default_tracing, Executable}; | |||
| use super::{Executable, default_tracing}; | |||
| use crate::{ | |||
| common::{connect_to_coordinator, local_working_dir, resolve_dataflow}, | |||
| session::DataflowSession, | |||
| @@ -1,10 +1,10 @@ | |||
| use super::{default_tracing, Executable}; | |||
| use crate::{common::connect_to_coordinator, LOCALHOST}; | |||
| use super::{Executable, default_tracing}; | |||
| use crate::{LOCALHOST, common::connect_to_coordinator}; | |||
| use communication_layer_request_reply::TcpRequestReplyConnection; | |||
| use dora_core::descriptor::DescriptorExt; | |||
| use dora_core::{descriptor::Descriptor, topics::DORA_COORDINATOR_PORT_CONTROL_DEFAULT}; | |||
| use dora_message::{cli_to_coordinator::ControlRequest, coordinator_to_cli::ControlRequestReply}; | |||
| use eyre::{bail, Context}; | |||
| use eyre::{Context, bail}; | |||
| use std::{ | |||
| io::{IsTerminal, Write}, | |||
| net::SocketAddr, | |||
| @@ -1,4 +1,4 @@ | |||
| use super::{default_tracing, up, Executable}; | |||
| use super::{Executable, default_tracing, up}; | |||
| use dora_core::topics::{DORA_COORDINATOR_PORT_CONTROL_DEFAULT, LOCALHOST}; | |||
| use std::net::IpAddr; | |||
| use std::path::PathBuf; | |||
| @@ -1,9 +1,9 @@ | |||
| use std::io::Write; | |||
| use super::{default_tracing, Executable}; | |||
| use super::{Executable, default_tracing}; | |||
| use crate::{ | |||
| common::{connect_to_coordinator, query_running_dataflows}, | |||
| LOCALHOST, | |||
| common::{connect_to_coordinator, query_running_dataflows}, | |||
| }; | |||
| use clap::Args; | |||
| use communication_layer_request_reply::TcpRequestReplyConnection; | |||
| @@ -1,11 +1,11 @@ | |||
| use super::{default_tracing, Executable}; | |||
| use super::{Executable, default_tracing}; | |||
| use crate::common::{connect_to_coordinator, query_running_dataflows}; | |||
| use bat::{Input, PrettyPrinter}; | |||
| use clap::Args; | |||
| use communication_layer_request_reply::TcpRequestReplyConnection; | |||
| use dora_core::topics::{DORA_COORDINATOR_PORT_CONTROL_DEFAULT, LOCALHOST}; | |||
| use dora_message::{cli_to_coordinator::ControlRequest, coordinator_to_cli::ControlRequestReply}; | |||
| use eyre::{bail, Context, Result}; | |||
| use eyre::{Context, Result, bail}; | |||
| use uuid::Uuid; | |||
| #[derive(Debug, Args)] | |||
| @@ -80,9 +80,11 @@ pub fn logs( | |||
| .grid(false) | |||
| .line_numbers(false) | |||
| .paging_mode(bat::PagingMode::QuitIfOneScreen) | |||
| .inputs(vec![Input::from_bytes(&logs) | |||
| .name("Logs") | |||
| .title(format!("Logs from {node}.").as_str())]) | |||
| .inputs(vec![ | |||
| Input::from_bytes(&logs) | |||
| .name("Logs") | |||
| .title(format!("Logs from {node}.").as_str()), | |||
| ]) | |||
| .print() | |||
| .wrap_err("Something went wrong with viewing log file")?; | |||
| @@ -1,6 +1,6 @@ | |||
| use clap::Args; | |||
| use super::{default_tracing, Executable}; | |||
| use super::{Executable, default_tracing}; | |||
| #[derive(Debug, Args)] | |||
| /// Generate a new project or node. Choose the language between Rust, Python, C or C++. | |||
| @@ -11,7 +11,7 @@ use crate::{ | |||
| output::print_log_message, | |||
| session::DataflowSession, | |||
| }; | |||
| use dora_daemon::{flume, Daemon, LogDestination}; | |||
| use dora_daemon::{Daemon, LogDestination, flume}; | |||
| use dora_tracing::TracingBuilder; | |||
| use eyre::Context; | |||
| use tokio::runtime::Builder; | |||
| @@ -1,6 +1,6 @@ | |||
| use super::{default_tracing, Executable}; | |||
| use super::{Executable, default_tracing}; | |||
| use clap::Subcommand; | |||
| use eyre::{bail, Context}; | |||
| use eyre::{Context, bail}; | |||
| #[derive(Debug, Subcommand)] | |||
| /// Dora CLI self-management commands | |||
| @@ -1,5 +1,5 @@ | |||
| use communication_layer_request_reply::{TcpConnection, TcpRequestReplyConnection}; | |||
| use dora_core::descriptor::{resolve_path, CoreNodeKind, Descriptor, DescriptorExt}; | |||
| use dora_core::descriptor::{CoreNodeKind, Descriptor, DescriptorExt, resolve_path}; | |||
| use dora_message::cli_to_coordinator::ControlRequest; | |||
| use dora_message::common::LogMessage; | |||
| use dora_message::coordinator_to_cli::ControlRequestReply; | |||
| @@ -2,7 +2,7 @@ | |||
| //! | |||
| //! The `dora start` command does not run any build commands, nor update git dependencies or similar. Use `dora build` for that. | |||
| use super::{default_tracing, Executable}; | |||
| use super::{Executable, default_tracing}; | |||
| use crate::{ | |||
| command::start::attach::attach_dataflow, | |||
| common::{connect_to_coordinator, local_working_dir, resolve_dataflow}, | |||
| @@ -17,7 +17,7 @@ use dora_core::{ | |||
| use dora_message::{ | |||
| cli_to_coordinator::ControlRequest, common::LogMessage, coordinator_to_cli::ControlRequestReply, | |||
| }; | |||
| use eyre::{bail, Context}; | |||
| use eyre::{Context, bail}; | |||
| use std::{ | |||
| net::{IpAddr, SocketAddr, TcpStream}, | |||
| path::PathBuf, | |||
| @@ -1,11 +1,11 @@ | |||
| use super::{default_tracing, Executable}; | |||
| use super::{Executable, default_tracing}; | |||
| use crate::common::{connect_to_coordinator, handle_dataflow_result, query_running_dataflows}; | |||
| use communication_layer_request_reply::TcpRequestReplyConnection; | |||
| use dora_core::topics::{DORA_COORDINATOR_PORT_CONTROL_DEFAULT, LOCALHOST}; | |||
| use dora_message::cli_to_coordinator::ControlRequest; | |||
| use dora_message::coordinator_to_cli::ControlRequestReply; | |||
| use duration_str::parse; | |||
| use eyre::{bail, Context}; | |||
| use eyre::{Context, bail}; | |||
| use std::net::IpAddr; | |||
| use std::time::Duration; | |||
| use uuid::Uuid; | |||
| @@ -1,9 +1,9 @@ | |||
| use super::check::daemon_running; | |||
| use super::{default_tracing, Executable}; | |||
| use crate::{common::connect_to_coordinator, LOCALHOST}; | |||
| use super::{Executable, default_tracing}; | |||
| use crate::{LOCALHOST, common::connect_to_coordinator}; | |||
| use dora_core::topics::DORA_COORDINATOR_PORT_CONTROL_DEFAULT; | |||
| use dora_message::{cli_to_coordinator::ControlRequest, coordinator_to_cli::ControlRequestReply}; | |||
| use eyre::{bail, Context, ContextCompat}; | |||
| use eyre::{Context, ContextCompat, bail}; | |||
| use std::path::PathBuf; | |||
| use std::{fs, net::SocketAddr, path::Path, process::Command, time::Duration}; | |||
| @@ -1,12 +1,12 @@ | |||
| use crate::formatting::FormatDataflowError; | |||
| use communication_layer_request_reply::{RequestReplyLayer, TcpLayer, TcpRequestReplyConnection}; | |||
| use dora_core::descriptor::{source_is_url, Descriptor}; | |||
| use dora_core::descriptor::{Descriptor, source_is_url}; | |||
| use dora_download::download_file; | |||
| use dora_message::{ | |||
| cli_to_coordinator::ControlRequest, | |||
| coordinator_to_cli::{ControlRequestReply, DataflowList, DataflowResult}, | |||
| }; | |||
| use eyre::{bail, Context, ContextCompat}; | |||
| use eyre::{Context, ContextCompat, bail}; | |||
| use std::{ | |||
| env::current_dir, | |||
| net::SocketAddr, | |||
| @@ -65,9 +65,9 @@ pub fn lib_main(args: Args) { | |||
| use clap::Parser; | |||
| #[cfg(feature = "python")] | |||
| use pyo3::{ | |||
| pyfunction, pymodule, | |||
| Bound, PyResult, Python, pyfunction, pymodule, | |||
| types::{PyModule, PyModuleMethods}, | |||
| wrap_pyfunction, Bound, PyResult, Python, | |||
| wrap_pyfunction, | |||
| }; | |||
| #[cfg(feature = "python")] | |||
| @@ -4,7 +4,7 @@ use std::{ | |||
| }; | |||
| use dora_core::build::BuildInfo; | |||
| use dora_message::{common::GitSource, id::NodeId, BuildId, SessionId}; | |||
| use dora_message::{BuildId, SessionId, common::GitSource, id::NodeId}; | |||
| use eyre::{Context, ContextCompat}; | |||
| #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] | |||
| @@ -33,7 +33,9 @@ impl DataflowSession { | |||
| if let Ok(parsed) = deserialize(&session_file) { | |||
| return Ok(parsed); | |||
| } else { | |||
| tracing::warn!("failed to read dataflow session file, regenerating (you might need to run `dora build` again)"); | |||
| tracing::warn!( | |||
| "failed to read dataflow session file, regenerating (you might need to run `dora build` again)" | |||
| ); | |||
| } | |||
| } | |||
| @@ -1,5 +1,5 @@ | |||
| use dora_node_api_c::HEADER_NODE_API; | |||
| use eyre::{bail, Context, ContextCompat}; | |||
| use eyre::{Context, ContextCompat, bail}; | |||
| use std::{ | |||
| fs, | |||
| path::{Path, PathBuf}, | |||
| @@ -1,4 +1,4 @@ | |||
| use eyre::{bail, Context, ContextCompat}; | |||
| use eyre::{Context, ContextCompat, bail}; | |||
| use std::{ | |||
| fs, | |||
| path::{Path, PathBuf}, | |||
| @@ -1,4 +1,4 @@ | |||
| use eyre::{bail, Context}; | |||
| use eyre::{Context, bail}; | |||
| use std::{ | |||
| fs, | |||
| path::{Path, PathBuf}, | |||
| @@ -1,4 +1,4 @@ | |||
| use eyre::{bail, Context}; | |||
| use eyre::{Context, bail}; | |||
| use std::{ | |||
| fs, | |||
| path::{Path, PathBuf}, | |||
| @@ -1,15 +1,15 @@ | |||
| use crate::{ | |||
| tcp_utils::{tcp_receive, tcp_send}, | |||
| Event, | |||
| tcp_utils::{tcp_receive, tcp_send}, | |||
| }; | |||
| use dora_message::{ | |||
| cli_to_coordinator::ControlRequest, coordinator_to_cli::ControlRequestReply, BuildId, | |||
| BuildId, cli_to_coordinator::ControlRequest, coordinator_to_cli::ControlRequestReply, | |||
| }; | |||
| use eyre::{eyre, Context}; | |||
| use eyre::{Context, eyre}; | |||
| use futures::{ | |||
| FutureExt, Stream, StreamExt, | |||
| future::{self, Either}, | |||
| stream::FuturesUnordered, | |||
| FutureExt, Stream, StreamExt, | |||
| }; | |||
| use futures_concurrency::future::Race; | |||
| use std::{io::ErrorKind, net::SocketAddr}; | |||
| @@ -24,7 +24,7 @@ use uuid::Uuid; | |||
| pub(crate) async fn control_events( | |||
| control_listen_addr: SocketAddr, | |||
| tasks: &FuturesUnordered<JoinHandle<()>>, | |||
| ) -> eyre::Result<impl Stream<Item = Event>> { | |||
| ) -> eyre::Result<impl Stream<Item = Event> + use<>> { | |||
| let (tx, rx) = mpsc::channel(10); | |||
| let (finish_tx, mut finish_rx) = mpsc::channel(1); | |||
| @@ -9,6 +9,7 @@ use dora_core::{ | |||
| uhlc::{self, HLC}, | |||
| }; | |||
| use dora_message::{ | |||
| BuildId, DataflowId, SessionId, | |||
| cli_to_coordinator::ControlRequest, | |||
| common::{DaemonId, GitSource}, | |||
| coordinator_to_cli::{ | |||
| @@ -20,10 +21,9 @@ use dora_message::{ | |||
| }, | |||
| daemon_to_coordinator::{DaemonCoordinatorReply, DataflowDaemonResult}, | |||
| descriptor::{Descriptor, ResolvedNode}, | |||
| BuildId, DataflowId, SessionId, | |||
| }; | |||
| use eyre::{bail, eyre, ContextCompat, Result, WrapErr}; | |||
| use futures::{future::join_all, stream::FuturesUnordered, Future, Stream, StreamExt}; | |||
| use eyre::{ContextCompat, Result, WrapErr, bail, eyre}; | |||
| use futures::{Future, Stream, StreamExt, future::join_all, stream::FuturesUnordered}; | |||
| use futures_concurrency::stream::Merge; | |||
| use itertools::Itertools; | |||
| use log_subscriber::LogSubscriber; | |||
| @@ -122,7 +122,9 @@ fn resolve_name( | |||
| Ok(*uuid) | |||
| } else { | |||
| // TODO: Index the archived dataflows in order to return logs based on the index. | |||
| bail!("multiple archived dataflows found with name `{name}`, Please provide the UUID instead."); | |||
| bail!( | |||
| "multiple archived dataflows found with name `{name}`, Please provide the UUID instead." | |||
| ); | |||
| } | |||
| } else if let [uuid] = uuids.as_slice() { | |||
| Ok(*uuid) | |||
| @@ -285,7 +287,9 @@ async fn start_inner( | |||
| ); | |||
| } | |||
| Err(err) => { | |||
| tracing::warn!("failed to register daemon connection for daemon `{daemon_id}`: {err}"); | |||
| tracing::warn!( | |||
| "failed to register daemon connection for daemon `{daemon_id}`: {err}" | |||
| ); | |||
| } | |||
| } | |||
| } | |||
| @@ -341,7 +345,9 @@ async fn start_inner( | |||
| } | |||
| } | |||
| DataflowEvent::DataflowFinishedOnDaemon { daemon_id, result } => { | |||
| tracing::debug!("coordinator received DataflowFinishedOnDaemon ({daemon_id:?}, result: {result:?})"); | |||
| tracing::debug!( | |||
| "coordinator received DataflowFinishedOnDaemon ({daemon_id:?}, result: {result:?})" | |||
| ); | |||
| match running_dataflows.entry(uuid) { | |||
| std::collections::hash_map::Entry::Occupied(mut entry) => { | |||
| let dataflow = entry.get_mut(); | |||
| @@ -474,7 +480,9 @@ async fn start_inner( | |||
| .values() | |||
| .any(|d: &RunningDataflow| d.name.as_deref() == Some(name)) | |||
| { | |||
| bail!("there is already a running dataflow with name `{name}`"); | |||
| bail!( | |||
| "there is already a running dataflow with name `{name}`" | |||
| ); | |||
| } | |||
| } | |||
| let dataflow = start_dataflow( | |||
| @@ -875,7 +883,9 @@ async fn start_inner( | |||
| } | |||
| } | |||
| None => { | |||
| tracing::warn!("received DataflowSpawnResult, but no matching dataflow in `running_dataflows` map"); | |||
| tracing::warn!( | |||
| "received DataflowSpawnResult, but no matching dataflow in `running_dataflows` map" | |||
| ); | |||
| } | |||
| }, | |||
| Event::DataflowSpawnResult { | |||
| @@ -901,7 +911,9 @@ async fn start_inner( | |||
| }; | |||
| } | |||
| None => { | |||
| tracing::warn!("received DataflowSpawnResult, but no matching dataflow in `running_dataflows` map"); | |||
| tracing::warn!( | |||
| "received DataflowSpawnResult, but no matching dataflow in `running_dataflows` map" | |||
| ); | |||
| } | |||
| }, | |||
| } | |||
| @@ -1,4 +1,4 @@ | |||
| use crate::{tcp_utils::tcp_receive, DaemonRequest, DataflowEvent, Event}; | |||
| use crate::{DaemonRequest, DataflowEvent, Event, tcp_utils::tcp_receive}; | |||
| use dora_core::uhlc::HLC; | |||
| use dora_message::daemon_to_coordinator::{CoordinatorRequest, DaemonEvent, Timestamped}; | |||
| use eyre::Context; | |||
| @@ -12,7 +12,7 @@ pub async fn create_listener(bind: SocketAddr) -> eyre::Result<TcpListener> { | |||
| let socket = match TcpListener::bind(bind).await { | |||
| Ok(socket) => socket, | |||
| Err(err) => { | |||
| return Err(eyre::Report::new(err).wrap_err("failed to create local TCP listener")) | |||
| return Err(eyre::Report::new(err).wrap_err("failed to create local TCP listener")); | |||
| } | |||
| }; | |||
| Ok(socket) | |||
| @@ -1,18 +1,18 @@ | |||
| use crate::{ | |||
| tcp_utils::{tcp_receive, tcp_send}, | |||
| DaemonConnections, | |||
| tcp_utils::{tcp_receive, tcp_send}, | |||
| }; | |||
| use dora_core::{descriptor::DescriptorExt, uhlc::HLC}; | |||
| use dora_message::{ | |||
| BuildId, SessionId, | |||
| common::DaemonId, | |||
| coordinator_to_daemon::{DaemonCoordinatorEvent, SpawnDataflowNodes, Timestamped}, | |||
| daemon_to_coordinator::DaemonCoordinatorReply, | |||
| descriptor::{Descriptor, ResolvedNode}, | |||
| id::NodeId, | |||
| BuildId, SessionId, | |||
| }; | |||
| use eyre::{bail, eyre, ContextCompat, WrapErr}; | |||
| use eyre::{ContextCompat, WrapErr, bail, eyre}; | |||
| use itertools::Itertools; | |||
| use std::{ | |||
| collections::{BTreeMap, BTreeSet}, | |||
| @@ -1,6 +1,6 @@ | |||
| use crate::{ | |||
| socket_stream_utils::{socket_stream_receive, socket_stream_send}, | |||
| DaemonCoordinatorEvent, | |||
| socket_stream_utils::{socket_stream_receive, socket_stream_send}, | |||
| }; | |||
| use dora_core::uhlc::HLC; | |||
| use dora_message::{ | |||
| @@ -8,14 +8,14 @@ use dora_message::{ | |||
| coordinator_to_daemon::RegisterResult, | |||
| daemon_to_coordinator::{CoordinatorRequest, DaemonCoordinatorReply, DaemonRegisterRequest}, | |||
| }; | |||
| use eyre::{eyre, Context}; | |||
| use eyre::{Context, eyre}; | |||
| use std::{io::ErrorKind, net::SocketAddr, time::Duration}; | |||
| use tokio::{ | |||
| net::TcpStream, | |||
| sync::{mpsc, oneshot}, | |||
| time::sleep, | |||
| }; | |||
| use tokio_stream::{wrappers::ReceiverStream, Stream}; | |||
| use tokio_stream::{Stream, wrappers::ReceiverStream}; | |||
| use tracing::warn; | |||
| const DAEMON_COORDINATOR_RETRY_INTERVAL: std::time::Duration = Duration::from_secs(1); | |||
| @@ -37,7 +37,9 @@ pub async fn register( | |||
| .wrap_err("failed to connect to dora-coordinator") | |||
| { | |||
| Err(err) => { | |||
| warn!("Could not connect to: {addr}, with error: {err}. Retring in {DAEMON_COORDINATOR_RETRY_INTERVAL:#?}.."); | |||
| warn!( | |||
| "Could not connect to: {addr}, with error: {err}. Retring in {DAEMON_COORDINATOR_RETRY_INTERVAL:#?}.." | |||
| ); | |||
| sleep(DAEMON_COORDINATOR_RETRY_INTERVAL).await; | |||
| } | |||
| Ok(stream) => { | |||
| @@ -5,13 +5,14 @@ use dora_core::{ | |||
| build::{self, BuildInfo, GitManager, PrevGitSource}, | |||
| config::{DataId, Input, InputMapping, NodeId, NodeRunConfig, OperatorId}, | |||
| descriptor::{ | |||
| read_as_descriptor, CoreNodeKind, Descriptor, DescriptorExt, ResolvedNode, RuntimeNode, | |||
| DYNAMIC_SOURCE, | |||
| CoreNodeKind, DYNAMIC_SOURCE, Descriptor, DescriptorExt, ResolvedNode, RuntimeNode, | |||
| read_as_descriptor, | |||
| }, | |||
| topics::LOCALHOST, | |||
| uhlc::{self, HLC}, | |||
| }; | |||
| use dora_message::{ | |||
| BuildId, DataflowId, SessionId, | |||
| common::{ | |||
| DaemonId, DataMessage, DropToken, GitSource, LogLevel, NodeError, NodeErrorCause, | |||
| NodeExitStatus, | |||
| @@ -26,11 +27,10 @@ use dora_message::{ | |||
| descriptor::NodeSource, | |||
| metadata::{self, ArrowTypeInfo}, | |||
| node_to_daemon::{DynamicNodeEvent, Timestamped}, | |||
| BuildId, DataflowId, SessionId, | |||
| }; | |||
| use dora_node_api::{arrow::datatypes::DataType, Parameter}; | |||
| use eyre::{bail, eyre, Context, ContextCompat, Result}; | |||
| use futures::{future, stream, FutureExt, TryFutureExt}; | |||
| use dora_node_api::{Parameter, arrow::datatypes::DataType}; | |||
| use eyre::{Context, ContextCompat, Result, bail, eyre}; | |||
| use futures::{FutureExt, TryFutureExt, future, stream}; | |||
| use futures_concurrency::stream::Merge; | |||
| use local_listener::DynamicNodeEventWrapper; | |||
| use log::{DaemonLogger, DataflowLogger, Logger}; | |||
| @@ -59,7 +59,7 @@ use tokio::{ | |||
| oneshot::{self, Sender}, | |||
| }, | |||
| }; | |||
| use tokio_stream::{wrappers::ReceiverStream, Stream, StreamExt}; | |||
| use tokio_stream::{Stream, StreamExt, wrappers::ReceiverStream}; | |||
| use tracing::{error, warn}; | |||
| use uuid::{NoContext, Timestamp, Uuid}; | |||
| @@ -168,7 +168,7 @@ impl Daemon { | |||
| Some(coordinator_addr), | |||
| daemon_id, | |||
| None, | |||
| clock, | |||
| clock.clone(), | |||
| Some(remote_daemon_events_tx), | |||
| Default::default(), | |||
| log_destination, | |||
| @@ -342,7 +342,9 @@ impl Daemon { | |||
| ) | |||
| .unwrap(); | |||
| if cfg!(target_os = "macos") { | |||
| warn!("disabling multicast on macos systems. Enable it with the ZENOH_CONFIG env variable or file"); | |||
| warn!( | |||
| "disabling multicast on macos systems. Enable it with the ZENOH_CONFIG env variable or file" | |||
| ); | |||
| zenoh_config | |||
| .insert_json5("scouting/multicast", r#"{ enabled: false }"#) | |||
| .unwrap(); | |||
| @@ -374,7 +376,9 @@ impl Daemon { | |||
| ) | |||
| .unwrap(); | |||
| if cfg!(target_os = "macos") { | |||
| warn!("disabling multicast on macos systems. Enable it with the ZENOH_CONFIG env variable or file"); | |||
| warn!( | |||
| "disabling multicast on macos systems. Enable it with the ZENOH_CONFIG env variable or file" | |||
| ); | |||
| zenoh_config | |||
| .insert_json5("scouting/multicast", r#"{ enabled: false }"#) | |||
| .unwrap(); | |||
| @@ -524,7 +528,9 @@ impl Daemon { | |||
| if let Some(dataflow) = self.running.get_mut(&dataflow_id) { | |||
| dataflow.running_nodes.insert(node_id, running_node); | |||
| } else { | |||
| tracing::error!("failed to handle SpawnNodeResult: no running dataflow with ID {dataflow_id}"); | |||
| tracing::error!( | |||
| "failed to handle SpawnNodeResult: no running dataflow with ID {dataflow_id}" | |||
| ); | |||
| } | |||
| } | |||
| Err(error) => { | |||
| @@ -1000,7 +1006,7 @@ impl Daemon { | |||
| dataflow_descriptor: Descriptor, | |||
| local_nodes: BTreeSet<NodeId>, | |||
| uv: bool, | |||
| ) -> eyre::Result<impl Future<Output = eyre::Result<BuildInfo>>> { | |||
| ) -> eyre::Result<impl Future<Output = eyre::Result<BuildInfo>> + use<>> { | |||
| let builder = build::Builder { | |||
| session_id, | |||
| base_working_dir, | |||
| @@ -1095,7 +1101,7 @@ impl Daemon { | |||
| dataflow_descriptor: Descriptor, | |||
| spawn_nodes: BTreeSet<NodeId>, | |||
| uv: bool, | |||
| ) -> eyre::Result<impl Future<Output = eyre::Result<()>>> { | |||
| ) -> eyre::Result<impl Future<Output = eyre::Result<()>> + use<>> { | |||
| let mut logger = self | |||
| .logger | |||
| .for_dataflow(dataflow_id) | |||
| @@ -1270,18 +1276,20 @@ impl Daemon { | |||
| let finished_or_next = | |||
| futures::future::select(finished, subscriber.recv_async()); | |||
| match finished_or_next.await { | |||
| future::Either::Left((finished, _)) => { | |||
| match finished { | |||
| Err(broadcast::error::RecvError::Closed) => { | |||
| tracing::debug!("dataflow finished, breaking from zenoh subscribe task"); | |||
| break; | |||
| } | |||
| other => { | |||
| tracing::warn!("unexpected return value of dataflow finished_rx channel: {other:?}"); | |||
| break; | |||
| } | |||
| future::Either::Left((finished, _)) => match finished { | |||
| Err(broadcast::error::RecvError::Closed) => { | |||
| tracing::debug!( | |||
| "dataflow finished, breaking from zenoh subscribe task" | |||
| ); | |||
| break; | |||
| } | |||
| } | |||
| other => { | |||
| tracing::warn!( | |||
| "unexpected return value of dataflow finished_rx channel: {other:?}" | |||
| ); | |||
| break; | |||
| } | |||
| }, | |||
| future::Either::Right((sample, f)) => { | |||
| finished = f; | |||
| let event = sample.map_err(|e| eyre!(e)).and_then(|s| { | |||
| @@ -137,7 +137,7 @@ async fn receive_message( | |||
| | ErrorKind::ConnectionReset => return Ok(None), | |||
| _other => { | |||
| return Err(err) | |||
| .context("unexpected I/O error while trying to receive DaemonRequest") | |||
| .context("unexpected I/O error while trying to receive DaemonRequest"); | |||
| } | |||
| }, | |||
| }; | |||
| @@ -10,9 +10,9 @@ use dora_core::{ | |||
| uhlc, | |||
| }; | |||
| use dora_message::{ | |||
| BuildId, | |||
| common::{DaemonId, LogLevel, LogMessage, Timestamped}, | |||
| daemon_to_coordinator::{CoordinatorRequest, DaemonEvent}, | |||
| BuildId, | |||
| }; | |||
| use eyre::Context; | |||
| use flume::Sender; | |||
| @@ -5,13 +5,13 @@ use dora_core::{ | |||
| uhlc, | |||
| }; | |||
| use dora_message::{ | |||
| DataflowId, | |||
| common::{DropToken, Timestamped}, | |||
| daemon_to_node::{DaemonCommunication, DaemonReply, NodeDropEvent, NodeEvent}, | |||
| node_to_daemon::DaemonRequest, | |||
| DataflowId, | |||
| }; | |||
| use eyre::{eyre, Context}; | |||
| use futures::{future, task, Future}; | |||
| use eyre::{Context, eyre}; | |||
| use futures::{Future, future, task}; | |||
| use shared_memory_server::{ShmemConf, ShmemServer}; | |||
| use std::{ | |||
| collections::{BTreeMap, VecDeque}, | |||
| @@ -50,7 +50,7 @@ pub async fn spawn_listener_loop( | |||
| Err(err) => { | |||
| return Err( | |||
| eyre::Report::new(err).wrap_err("failed to create local TCP listener") | |||
| ) | |||
| ); | |||
| } | |||
| }; | |||
| let socket_addr = socket | |||
| @@ -157,7 +157,7 @@ pub async fn spawn_listener_loop( | |||
| Ok(socket) => socket, | |||
| Err(err) => { | |||
| return Err(eyre::Report::new(err) | |||
| .wrap_err("failed to create local Unix domain socket")) | |||
| .wrap_err("failed to create local Unix domain socket")); | |||
| } | |||
| }; | |||
| @@ -2,8 +2,8 @@ use std::{collections::BTreeMap, io::ErrorKind, sync::Arc}; | |||
| use super::{Connection, Listener}; | |||
| use crate::{ | |||
| socket_stream_utils::{socket_stream_receive, socket_stream_send}, | |||
| Event, | |||
| socket_stream_utils::{socket_stream_receive, socket_stream_send}, | |||
| }; | |||
| use dora_core::{config::DataId, uhlc::HLC}; | |||
| use dora_message::{ | |||
| @@ -70,7 +70,7 @@ impl Connection for TcpConnection { | |||
| | ErrorKind::ConnectionReset => return Ok(None), | |||
| _other => { | |||
| return Err(err) | |||
| .context("unexpected I/O error while trying to receive DaemonRequest") | |||
| .context("unexpected I/O error while trying to receive DaemonRequest"); | |||
| } | |||
| }, | |||
| }; | |||
| @@ -11,8 +11,8 @@ use tokio::{ | |||
| }; | |||
| use crate::{ | |||
| socket_stream_utils::{socket_stream_receive, socket_stream_send}, | |||
| Event, | |||
| socket_stream_utils::{socket_stream_receive, socket_stream_send}, | |||
| }; | |||
| use super::{Connection, Listener}; | |||
| @@ -68,7 +68,7 @@ impl Connection for UnixConnection { | |||
| | ErrorKind::ConnectionReset => return Ok(None), | |||
| _other => { | |||
| return Err(err) | |||
| .context("unexpected I/O error while trying to receive DaemonRequest") | |||
| .context("unexpected I/O error while trying to receive DaemonRequest"); | |||
| } | |||
| }, | |||
| }; | |||
| @@ -2,18 +2,18 @@ use std::collections::{BTreeSet, HashMap, HashSet}; | |||
| use dora_core::{ | |||
| config::NodeId, | |||
| uhlc::{Timestamp, HLC}, | |||
| uhlc::{HLC, Timestamp}, | |||
| }; | |||
| use dora_message::{ | |||
| DataflowId, | |||
| common::DaemonId, | |||
| daemon_to_coordinator::{CoordinatorRequest, DaemonEvent, LogLevel, LogMessage, Timestamped}, | |||
| daemon_to_node::DaemonReply, | |||
| DataflowId, | |||
| }; | |||
| use eyre::{bail, Context}; | |||
| use eyre::{Context, bail}; | |||
| use tokio::{net::TcpStream, sync::oneshot}; | |||
| use crate::{log::DataflowLogger, socket_stream_utils::socket_stream_send, CascadingErrorCauses}; | |||
| use crate::{CascadingErrorCauses, log::DataflowLogger, socket_stream_utils::socket_stream_send}; | |||
| pub struct PendingNodes { | |||
| dataflow_id: DataflowId, | |||
| @@ -1,7 +1,8 @@ | |||
| use crate::{ | |||
| CoreNodeKindExt, DoraEvent, Event, OutputId, RunningNode, | |||
| log::{self, NodeLogger}, | |||
| node_communication::spawn_listener_loop, | |||
| node_inputs, CoreNodeKindExt, DoraEvent, Event, OutputId, RunningNode, | |||
| node_inputs, | |||
| }; | |||
| use aligned_vec::{AVec, ConstAlign}; | |||
| use crossbeam::queue::ArrayQueue; | |||
| @@ -9,26 +10,26 @@ use dora_arrow_convert::IntoArrow; | |||
| use dora_core::{ | |||
| config::DataId, | |||
| descriptor::{ | |||
| resolve_path, source_is_url, Descriptor, OperatorDefinition, OperatorSource, PythonSource, | |||
| ResolvedNode, ResolvedNodeExt, DYNAMIC_SOURCE, SHELL_SOURCE, | |||
| DYNAMIC_SOURCE, Descriptor, OperatorDefinition, OperatorSource, PythonSource, ResolvedNode, | |||
| ResolvedNodeExt, SHELL_SOURCE, resolve_path, source_is_url, | |||
| }, | |||
| get_python_path, | |||
| uhlc::HLC, | |||
| }; | |||
| use dora_download::download_file; | |||
| use dora_message::{ | |||
| DataflowId, | |||
| common::{LogLevel, LogMessage}, | |||
| daemon_to_coordinator::{DataMessage, NodeExitStatus, Timestamped}, | |||
| daemon_to_node::{NodeConfig, RuntimeConfig}, | |||
| id::NodeId, | |||
| DataflowId, | |||
| }; | |||
| use dora_node_api::{ | |||
| Metadata, | |||
| arrow::array::ArrayData, | |||
| arrow_utils::{copy_array_into_sample, required_data_size}, | |||
| Metadata, | |||
| }; | |||
| use eyre::{bail, ContextCompat, WrapErr}; | |||
| use eyre::{ContextCompat, WrapErr, bail}; | |||
| use std::{ | |||
| future::Future, | |||
| path::{Path, PathBuf}, | |||
| @@ -59,7 +60,7 @@ impl Spawner { | |||
| node_working_dir: PathBuf, | |||
| node_stderr_most_recent: Arc<ArrayQueue<String>>, | |||
| logger: &mut NodeLogger<'_>, | |||
| ) -> eyre::Result<impl Future<Output = eyre::Result<PreparedNode>>> { | |||
| ) -> eyre::Result<impl Future<Output = eyre::Result<PreparedNode>> + use<>> { | |||
| let dataflow_id = self.dataflow_id; | |||
| let node_id = node.id.clone(); | |||
| logger | |||
| @@ -221,9 +222,9 @@ impl Spawner { | |||
| cmd.arg("run"); | |||
| cmd.arg("python"); | |||
| tracing::info!( | |||
| "spawning: uv run python -uc import dora; dora.start_runtime() # {}", | |||
| node.id | |||
| ); | |||
| "spawning: uv run python -uc import dora; dora.start_runtime() # {}", | |||
| node.id | |||
| ); | |||
| cmd | |||
| } else { | |||
| let python = get_python_path() | |||
| @@ -379,7 +380,7 @@ impl PreparedNode { | |||
| return Ok(RunningNode { | |||
| pid: None, | |||
| node_config: self.node_config, | |||
| }) | |||
| }); | |||
| } | |||
| }; | |||
| @@ -8,10 +8,10 @@ use dora_message::daemon_to_node::{NodeConfig, RuntimeConfig}; | |||
| use dora_metrics::run_metrics_monitor; | |||
| use dora_node_api::{DoraNode, Event}; | |||
| use dora_tracing::TracingBuilder; | |||
| use eyre::{bail, Context, Result}; | |||
| use eyre::{Context, Result, bail}; | |||
| use futures::{Stream, StreamExt}; | |||
| use futures_concurrency::stream::Merge; | |||
| use operator::{run_operator, OperatorEvent, StopReason}; | |||
| use operator::{OperatorEvent, StopReason, run_operator}; | |||
| use std::{ | |||
| collections::{BTreeMap, BTreeSet, HashMap}, | |||
| @@ -211,7 +211,9 @@ async fn run( | |||
| OperatorEvent::AllocateOutputSample { len, sample: tx } => { | |||
| let sample = node.allocate_data_sample(len); | |||
| if tx.send(sample).is_err() { | |||
| tracing::warn!("output sample requested, but operator {operator_id} exited already"); | |||
| tracing::warn!( | |||
| "output sample requested, but operator {operator_id} exited already" | |||
| ); | |||
| } | |||
| } | |||
| OperatorEvent::Output { | |||
| @@ -309,7 +311,10 @@ async fn run( | |||
| open_inputs.remove(&input_id); | |||
| if open_inputs.is_empty() { | |||
| // all inputs of the node were closed -> close its event channel | |||
| tracing::trace!("all inputs of operator {}/{operator_id} were closed -> closing event channel", node.id()); | |||
| tracing::trace!( | |||
| "all inputs of operator {}/{operator_id} were closed -> closing event channel", | |||
| node.id() | |||
| ); | |||
| open_operator_inputs.remove(&operator_id); | |||
| operator_channels.remove(&operator_id); | |||
| } | |||
| @@ -1,8 +1,8 @@ | |||
| use dora_core::config::DataId; | |||
| use dora_node_api::Event; | |||
| use futures::{ | |||
| future::{self, FusedFuture}, | |||
| FutureExt, | |||
| future::{self, FusedFuture}, | |||
| }; | |||
| use std::collections::{BTreeMap, VecDeque}; | |||
| @@ -3,21 +3,20 @@ | |||
| use super::{OperatorEvent, StopReason}; | |||
| use dora_core::{ | |||
| config::{NodeId, OperatorId}, | |||
| descriptor::{source_is_url, Descriptor, PythonSource}, | |||
| descriptor::{Descriptor, PythonSource, source_is_url}, | |||
| }; | |||
| use dora_download::download_file; | |||
| use dora_node_api::{merged::MergedEvent, Event, Parameter}; | |||
| use dora_node_api::{Event, Parameter, merged::MergedEvent}; | |||
| use dora_operator_api_python::PyEvent; | |||
| use dora_operator_api_types::DoraStatus; | |||
| use eyre::{bail, eyre, Context, Result}; | |||
| use eyre::{Context, Result, bail, eyre}; | |||
| use pyo3::ffi::c_str; | |||
| use pyo3::{ | |||
| pyclass, | |||
| Py, PyAny, Python, pyclass, | |||
| types::{IntoPyDict, PyAnyMethods, PyDict, PyDictMethods, PyTracebackMethods}, | |||
| Py, PyAny, Python, | |||
| }; | |||
| use std::{ | |||
| panic::{catch_unwind, AssertUnwindSafe}, | |||
| panic::{AssertUnwindSafe, catch_unwind}, | |||
| path::Path, | |||
| }; | |||
| use tokio::sync::{mpsc::Sender, oneshot}; | |||
| @@ -295,16 +294,15 @@ mod callback_impl { | |||
| use dora_core::metadata::ArrowTypeInfoExt; | |||
| use dora_message::metadata::ArrowTypeInfo; | |||
| use dora_node_api::{ | |||
| arrow_utils::{copy_array_into_sample, required_data_size}, | |||
| ZERO_COPY_THRESHOLD, | |||
| arrow_utils::{copy_array_into_sample, required_data_size}, | |||
| }; | |||
| use dora_operator_api_python::pydict_to_metadata; | |||
| use dora_tracing::telemetry::deserialize_context; | |||
| use eyre::{eyre, Context, Result}; | |||
| use eyre::{Context, Result, eyre}; | |||
| use pyo3::{ | |||
| pymethods, | |||
| Bound, PyObject, Python, pymethods, | |||
| types::{PyBytes, PyBytesMethods, PyDict}, | |||
| Bound, PyObject, Python, | |||
| }; | |||
| use tokio::sync::oneshot; | |||
| use tracing::{field, span}; | |||
| @@ -7,19 +7,19 @@ use dora_core::{ | |||
| }; | |||
| use dora_download::download_file; | |||
| use dora_node_api::{ | |||
| arrow_utils::{copy_array_into_sample, required_data_size}, | |||
| Event, Parameter, | |||
| arrow_utils::{copy_array_into_sample, required_data_size}, | |||
| }; | |||
| use dora_operator_api_types::{ | |||
| safer_ffi::closure::ArcDynFn1, DoraDropOperator, DoraInitOperator, DoraInitResult, DoraOnEvent, | |||
| DoraResult, DoraStatus, Metadata, OnEventResult, Output, SendOutput, | |||
| DoraDropOperator, DoraInitOperator, DoraInitResult, DoraOnEvent, DoraResult, DoraStatus, | |||
| Metadata, OnEventResult, Output, SendOutput, safer_ffi::closure::ArcDynFn1, | |||
| }; | |||
| use eyre::{bail, eyre, Context, Result}; | |||
| use eyre::{Context, Result, bail, eyre}; | |||
| use libloading::Symbol; | |||
| use std::{ | |||
| collections::BTreeMap, | |||
| ffi::c_void, | |||
| panic::{catch_unwind, AssertUnwindSafe}, | |||
| panic::{AssertUnwindSafe, catch_unwind}, | |||
| path::Path, | |||
| sync::Arc, | |||
| }; | |||
| @@ -1,7 +1,7 @@ | |||
| [package] | |||
| name = "aloha-client" | |||
| version = "0.1.0" | |||
| edition = "2021" | |||
| edition = "2024" | |||
| # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html | |||
| @@ -1,7 +1,7 @@ | |||
| [package] | |||
| name = "aloha-teleop" | |||
| version = "0.1.0" | |||
| edition = "2021" | |||
| edition = "2024" | |||
| # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html | |||
| @@ -1,4 +1,4 @@ | |||
| use dora_node_api::{self, dora_core::config::DataId, DoraNode}; | |||
| use dora_node_api::{self, DoraNode, dora_core::config::DataId}; | |||
| use eyre::Context; | |||
| use rand::RngCore; | |||
| use std::time::Duration; | |||
| @@ -1,5 +1,5 @@ | |||
| use dora_tracing::set_up_tracing; | |||
| use eyre::{bail, Context}; | |||
| use eyre::{Context, bail}; | |||
| use std::path::Path; | |||
| #[tokio::main] | |||
| @@ -1,5 +1,5 @@ | |||
| use dora_tracing::set_up_tracing; | |||
| use eyre::{bail, Context}; | |||
| use eyre::{Context, bail}; | |||
| use std::{env::consts::EXE_SUFFIX, path::Path, process::Command}; | |||
| struct ArrowConfig { | |||
| @@ -72,18 +72,20 @@ async fn main() -> eyre::Result<()> { | |||
| fn find_arrow_config() -> eyre::Result<ArrowConfig> { | |||
| let output = Command::new("pkg-config") | |||
| .args(&["--cflags", "arrow"]) | |||
| .args(["--cflags", "arrow"]) | |||
| .output() | |||
| .wrap_err("Failed to run pkg-config. Make sure Arrow C++ is installed")?; | |||
| if !output.status.success() { | |||
| bail!("Arrow C++ not found via pkg-config. Make sure it's installed and in your PKG_CONFIG_PATH"); | |||
| bail!( | |||
| "Arrow C++ not found via pkg-config. Make sure it's installed and in your PKG_CONFIG_PATH" | |||
| ); | |||
| } | |||
| let cflags = String::from_utf8(output.stdout)?.trim().to_string(); | |||
| let output = Command::new("pkg-config") | |||
| .args(&["--libs", "arrow"]) | |||
| .args(["--libs", "arrow"]) | |||
| .output() | |||
| .wrap_err("Failed to get Arrow library flags")?; | |||
| @@ -1,5 +1,5 @@ | |||
| use dora_tracing::set_up_tracing; | |||
| use eyre::{bail, Context}; | |||
| use eyre::{Context, bail}; | |||
| use std::{ | |||
| env::consts::{DLL_PREFIX, DLL_SUFFIX, EXE_SUFFIX}, | |||
| path::Path, | |||
| @@ -1,5 +1,5 @@ | |||
| use dora_tracing::set_up_tracing; | |||
| use eyre::{bail, Context}; | |||
| use eyre::{Context, bail}; | |||
| use std::{env::consts::EXE_SUFFIX, path::Path}; | |||
| #[tokio::main] | |||
| @@ -1,5 +1,5 @@ | |||
| use dora_tracing::set_up_tracing; | |||
| use eyre::{bail, Context}; | |||
| use eyre::{Context, bail}; | |||
| use std::{ | |||
| env::consts::{DLL_PREFIX, DLL_SUFFIX, EXE_SUFFIX}, | |||
| path::Path, | |||
| @@ -1,6 +1,6 @@ | |||
| use dora_core::{get_uv_path, run}; | |||
| use dora_tracing::set_up_tracing; | |||
| use eyre::{bail, WrapErr}; | |||
| use eyre::{WrapErr, bail}; | |||
| use std::path::Path; | |||
| #[tokio::main] | |||
| @@ -1,5 +1,5 @@ | |||
| use dora_tracing::set_up_tracing; | |||
| use eyre::{bail, Context}; | |||
| use eyre::{Context, bail}; | |||
| use std::path::Path; | |||
| #[tokio::main] | |||
| @@ -1,4 +1,4 @@ | |||
| use dora_node_api::{self, dora_core::config::DataId, DoraNode, Event, IntoArrow}; | |||
| use dora_node_api::{self, DoraNode, Event, IntoArrow, dora_core::config::DataId}; | |||
| fn main() -> eyre::Result<()> { | |||
| println!("hello"); | |||
| @@ -1,7 +1,7 @@ | |||
| #![warn(unsafe_op_in_unsafe_fn)] | |||
| use dora_operator_api::{ | |||
| register_operator, DoraOperator, DoraOutputSender, DoraStatus, Event, IntoArrow, | |||
| DoraOperator, DoraOutputSender, DoraStatus, Event, IntoArrow, register_operator, | |||
| }; | |||
| register_operator!(ExampleOperator); | |||
| @@ -1,7 +1,7 @@ | |||
| use dora_cli::session::DataflowSession; | |||
| use dora_coordinator::{ControlEvent, Event}; | |||
| use dora_core::{ | |||
| descriptor::{read_as_descriptor, DescriptorExt}, | |||
| descriptor::{DescriptorExt, read_as_descriptor}, | |||
| topics::{DORA_COORDINATOR_PORT_CONTROL_DEFAULT, DORA_COORDINATOR_PORT_DEFAULT}, | |||
| }; | |||
| use dora_message::{ | |||
| @@ -10,7 +10,7 @@ use dora_message::{ | |||
| coordinator_to_cli::{ControlRequestReply, DataflowIdAndName}, | |||
| }; | |||
| use dora_tracing::TracingBuilder; | |||
| use eyre::{bail, Context}; | |||
| use eyre::{Context, bail}; | |||
| use std::{ | |||
| collections::BTreeSet, | |||
| @@ -1,5 +1,5 @@ | |||
| use dora_node_api::{self, DoraNode, Event}; | |||
| use eyre::{bail, Context}; | |||
| use eyre::{Context, bail}; | |||
| fn main() -> eyre::Result<()> { | |||
| let (_node, mut events) = DoraNode::init_from_env()?; | |||
| @@ -16,7 +16,9 @@ fn main() -> eyre::Result<()> { | |||
| TryFrom::try_from(&data).context("expected string message")?; | |||
| println!("sink received message: {received_string}"); | |||
| if !received_string.starts_with("operator received random value ") { | |||
| bail!("unexpected message format (should start with 'operator received random value')") | |||
| bail!( | |||
| "unexpected message format (should start with 'operator received random value')" | |||
| ) | |||
| } | |||
| if !received_string.ends_with(" ticks") { | |||
| bail!("unexpected message format (should end with 'ticks')") | |||
| @@ -1,6 +1,6 @@ | |||
| use dora_core::{get_uv_path, run}; | |||
| use dora_tracing::set_up_tracing; | |||
| use eyre::{bail, WrapErr}; | |||
| use eyre::{WrapErr, bail}; | |||
| use std::path::Path; | |||
| #[tokio::main] | |||
| @@ -1,6 +1,6 @@ | |||
| use dora_core::{get_uv_path, run}; | |||
| use dora_tracing::set_up_tracing; | |||
| use eyre::{bail, WrapErr}; | |||
| use eyre::{WrapErr, bail}; | |||
| use std::path::Path; | |||
| #[tokio::main] | |||
| @@ -1,6 +1,6 @@ | |||
| use dora_core::{get_uv_path, run}; | |||
| use dora_tracing::set_up_tracing; | |||
| use eyre::{bail, WrapErr}; | |||
| use eyre::{WrapErr, bail}; | |||
| use std::path::Path; | |||
| #[tokio::main] | |||
| @@ -1,6 +1,6 @@ | |||
| use dora_core::{get_uv_path, run}; | |||
| use dora_tracing::set_up_tracing; | |||
| use eyre::{bail, WrapErr}; | |||
| use eyre::{WrapErr, bail}; | |||
| use std::path::Path; | |||
| #[tokio::main] | |||
| @@ -1,5 +1,5 @@ | |||
| use dora_tracing::set_up_tracing; | |||
| use eyre::{bail, Context}; | |||
| use eyre::{Context, bail}; | |||
| use std::path::Path; | |||
| #[tokio::main] | |||
| @@ -1,5 +1,5 @@ | |||
| use dora_tracing::set_up_tracing; | |||
| use eyre::{bail, Context}; | |||
| use eyre::{Context, bail}; | |||
| use std::path::Path; | |||
| #[tokio::main] | |||
| @@ -1,4 +1,4 @@ | |||
| use dora_node_api::{self, dora_core::config::DataId, DoraNode, Event, IntoArrow}; | |||
| use dora_node_api::{self, DoraNode, Event, IntoArrow, dora_core::config::DataId}; | |||
| fn main() -> eyre::Result<()> { | |||
| println!("hello"); | |||
| @@ -1,5 +1,5 @@ | |||
| use dora_tracing::set_up_tracing; | |||
| use eyre::{bail, Context}; | |||
| use eyre::{Context, bail}; | |||
| use std::path::Path; | |||
| #[tokio::main] | |||
| @@ -1,5 +1,5 @@ | |||
| use dora_node_api::{self, dora_core::config::NodeId, DoraNode, Event}; | |||
| use eyre::{bail, Context}; | |||
| use dora_node_api::{self, DoraNode, Event, dora_core::config::NodeId}; | |||
| use eyre::{Context, bail}; | |||
| fn main() -> eyre::Result<()> { | |||
| let (_node, mut events) = | |||
| @@ -17,7 +17,9 @@ fn main() -> eyre::Result<()> { | |||
| TryFrom::try_from(&data).context("expected string message")?; | |||
| println!("sink received message: {received_string}"); | |||
| if !received_string.starts_with("operator received random value ") { | |||
| bail!("unexpected message format (should start with 'operator received random value')") | |||
| bail!( | |||
| "unexpected message format (should start with 'operator received random value')" | |||
| ) | |||
| } | |||
| if !received_string.ends_with(" ticks") { | |||
| bail!("unexpected message format (should end with 'ticks')") | |||
| @@ -1,5 +1,5 @@ | |||
| use dora_node_api::{self, DoraNode, Event}; | |||
| use eyre::{bail, Context}; | |||
| use eyre::{Context, bail}; | |||
| fn main() -> eyre::Result<()> { | |||
| let (_node, mut events) = DoraNode::init_from_env()?; | |||
| @@ -16,7 +16,9 @@ fn main() -> eyre::Result<()> { | |||
| TryFrom::try_from(&data).context("expected string message")?; | |||
| println!("sink received message: {received_string}"); | |||
| if !received_string.starts_with("operator received random value ") { | |||
| bail!("unexpected message format (should start with 'operator received random value')") | |||
| bail!( | |||
| "unexpected message format (should start with 'operator received random value')" | |||
| ) | |||
| } | |||
| if !received_string.ends_with(" ticks") { | |||
| bail!("unexpected message format (should end with 'ticks')") | |||
| @@ -1,4 +1,4 @@ | |||
| use dora_node_api::{self, dora_core::config::DataId, DoraNode, Event, IntoArrow}; | |||
| use dora_node_api::{self, DoraNode, Event, IntoArrow, dora_core::config::DataId}; | |||
| use eyre::Context; | |||
| fn main() -> eyre::Result<()> { | |||
| @@ -1,10 +1,9 @@ | |||
| use std::time::Duration; | |||
| use dora_node_api::{ | |||
| self, | |||
| self, DoraNode, Event, | |||
| dora_core::config::DataId, | |||
| merged::{MergeExternal, MergedEvent}, | |||
| DoraNode, Event, | |||
| }; | |||
| use dora_ros2_bridge::{ | |||
| messages::{ | |||
| @@ -12,10 +11,10 @@ use dora_ros2_bridge::{ | |||
| geometry_msgs::msg::{Twist, Vector3}, | |||
| turtlesim::msg::Pose, | |||
| }, | |||
| ros2_client::{self, ros2, NodeOptions}, | |||
| ros2_client::{self, NodeOptions, ros2}, | |||
| rustdds::{self, policy}, | |||
| }; | |||
| use eyre::{eyre, Context}; | |||
| use eyre::{Context, eyre}; | |||
| use futures::task::SpawnExt; | |||
| fn main() -> eyre::Result<()> { | |||
| @@ -1,5 +1,5 @@ | |||
| use dora_tracing::set_up_tracing; | |||
| use eyre::{bail, Context}; | |||
| use eyre::{Context, bail}; | |||
| use std::path::Path; | |||
| #[tokio::main] | |||
| @@ -1,6 +1,6 @@ | |||
| use dora_core::{get_uv_path, run}; | |||
| use dora_tracing::set_up_tracing; | |||
| use eyre::{bail, WrapErr}; | |||
| use eyre::{WrapErr, bail}; | |||
| use std::path::Path; | |||
| #[tokio::main] | |||
| @@ -274,7 +274,7 @@ where | |||
| #[cfg(test)] | |||
| mod tests { | |||
| use arrow::array::{make_array, PrimitiveArray}; | |||
| use arrow::array::{PrimitiveArray, make_array}; | |||
| use crate::ArrowData; | |||
| @@ -1,8 +1,8 @@ | |||
| use crate::IntoArrow; | |||
| use arrow::array::{PrimitiveArray, StringArray, TimestampNanosecondArray}; | |||
| use arrow::datatypes::{ | |||
| ArrowPrimitiveType, ArrowTimestampType, Float16Type, Float32Type, Float64Type, Int16Type, | |||
| Int32Type, Int64Type, Int8Type, UInt16Type, UInt32Type, UInt64Type, UInt8Type, | |||
| ArrowPrimitiveType, ArrowTimestampType, Float16Type, Float32Type, Float64Type, Int8Type, | |||
| Int16Type, Int32Type, Int64Type, UInt8Type, UInt16Type, UInt32Type, UInt64Type, | |||
| }; | |||
| use chrono::{NaiveDate, NaiveDateTime, NaiveTime}; | |||
| use half::f16; | |||
| @@ -3,11 +3,11 @@ | |||
| #![warn(missing_docs)] | |||
| use arrow::array::{ | |||
| Array, Float32Array, Float64Array, Int16Array, Int32Array, Int64Array, Int8Array, UInt16Array, | |||
| UInt32Array, UInt8Array, | |||
| Array, Float32Array, Float64Array, Int8Array, Int16Array, Int32Array, Int64Array, UInt8Array, | |||
| UInt16Array, UInt32Array, | |||
| }; | |||
| use arrow::datatypes::DataType; | |||
| use eyre::{eyre, ContextCompat, Result}; | |||
| use eyre::{ContextCompat, Result, eyre}; | |||
| use num::NumCast; | |||
| use std::ops::{Deref, DerefMut}; | |||
| @@ -4,7 +4,7 @@ use super::{CommunicationLayer, Publisher, Subscriber}; | |||
| use crate::{BoxError, ReceivedSample}; | |||
| use std::{borrow::Cow, sync::Arc, time::Duration}; | |||
| use zenoh::{ | |||
| prelude::{sync::SyncResolve, Config, Priority, SessionDeclarations, SplitBuffer}, | |||
| prelude::{Config, Priority, SessionDeclarations, SplitBuffer, sync::SyncResolve}, | |||
| publication::CongestionControl, | |||
| }; | |||
| @@ -28,10 +28,10 @@ pub trait RequestReplyLayer: Send + Sync { | |||
| Item = Result< | |||
| Box< | |||
| dyn ListenConnection< | |||
| RequestData = Self::RequestData, | |||
| ReplyData = Self::ReplyData, | |||
| Error = Self::Error, | |||
| >, | |||
| RequestData = Self::RequestData, | |||
| ReplyData = Self::ReplyData, | |||
| Error = Self::Error, | |||
| >, | |||
| >, | |||
| Self::Error, | |||
| >, | |||
| @@ -47,10 +47,10 @@ pub trait RequestReplyLayer: Send + Sync { | |||
| ) -> Result< | |||
| Box< | |||
| dyn RequestReplyConnection< | |||
| RequestData = Self::RequestData, | |||
| ReplyData = Self::ReplyData, | |||
| Error = Self::Error, | |||
| >, | |||
| RequestData = Self::RequestData, | |||
| ReplyData = Self::ReplyData, | |||
| Error = Self::Error, | |||
| >, | |||
| >, | |||
| Self::Error, | |||
| >; | |||
| @@ -37,10 +37,10 @@ impl RequestReplyLayer for TcpLayer { | |||
| Item = Result< | |||
| Box< | |||
| dyn crate::ListenConnection< | |||
| RequestData = Self::RequestData, | |||
| ReplyData = Self::ReplyData, | |||
| Error = Self::Error, | |||
| >, | |||
| RequestData = Self::RequestData, | |||
| ReplyData = Self::ReplyData, | |||
| Error = Self::Error, | |||
| >, | |||
| >, | |||
| Self::Error, | |||
| >, | |||
| @@ -56,10 +56,10 @@ impl RequestReplyLayer for TcpLayer { | |||
| r.map(|stream| { | |||
| let connection: Box< | |||
| dyn ListenConnection< | |||
| RequestData = Self::RequestData, | |||
| ReplyData = Self::ReplyData, | |||
| Error = Self::Error, | |||
| >, | |||
| RequestData = Self::RequestData, | |||
| ReplyData = Self::ReplyData, | |||
| Error = Self::Error, | |||
| >, | |||
| > = Box::new(TcpConnection { stream }); | |||
| connection | |||
| }) | |||
| @@ -74,20 +74,20 @@ impl RequestReplyLayer for TcpLayer { | |||
| ) -> Result< | |||
| Box< | |||
| dyn crate::RequestReplyConnection< | |||
| RequestData = Self::RequestData, | |||
| ReplyData = Self::ReplyData, | |||
| Error = Self::Error, | |||
| >, | |||
| RequestData = Self::RequestData, | |||
| ReplyData = Self::ReplyData, | |||
| Error = Self::Error, | |||
| >, | |||
| >, | |||
| Self::Error, | |||
| > { | |||
| TcpStream::connect(addr).map(|s| { | |||
| let connection: Box< | |||
| dyn RequestReplyConnection< | |||
| RequestData = Self::RequestData, | |||
| ReplyData = Self::ReplyData, | |||
| Error = Self::Error, | |||
| >, | |||
| RequestData = Self::RequestData, | |||
| ReplyData = Self::ReplyData, | |||
| Error = Self::Error, | |||
| >, | |||
| > = Box::new(TcpConnection { stream: s }); | |||
| connection | |||
| }) | |||
| @@ -6,7 +6,7 @@ use std::{ | |||
| }; | |||
| use dora_message::descriptor::EnvValue; | |||
| use eyre::{eyre, Context}; | |||
| use eyre::{Context, eyre}; | |||
| pub fn run_build_command( | |||
| build: &str, | |||
| @@ -1,6 +1,6 @@ | |||
| use crate::build::{BuildLogger, PrevGitSource}; | |||
| use dora_message::{common::LogLevel, DataflowId, SessionId}; | |||
| use eyre::{bail, ContextCompat, WrapErr}; | |||
| use dora_message::{DataflowId, SessionId, common::LogLevel}; | |||
| use eyre::{ContextCompat, WrapErr, bail}; | |||
| use git2::FetchOptions; | |||
| use itertools::Itertools; | |||
| use std::{ | |||
| @@ -5,10 +5,10 @@ use std::{collections::BTreeMap, future::Future, path::PathBuf}; | |||
| use crate::descriptor::ResolvedNode; | |||
| use dora_message::{ | |||
| SessionId, | |||
| common::{GitSource, LogLevel}, | |||
| descriptor::{CoreNodeKind, EnvValue}, | |||
| id::NodeId, | |||
| SessionId, | |||
| }; | |||
| use eyre::Context; | |||
| @@ -27,14 +27,17 @@ pub struct Builder { | |||
| } | |||
| impl Builder { | |||
| pub async fn build_node( | |||
| pub async fn build_node<L>( | |||
| self, | |||
| node: ResolvedNode, | |||
| git: Option<GitSource>, | |||
| prev_git: Option<PrevGitSource>, | |||
| mut logger: impl BuildLogger, | |||
| mut logger: L, | |||
| git_manager: &mut GitManager, | |||
| ) -> eyre::Result<impl Future<Output = eyre::Result<BuiltNode>>> { | |||
| ) -> eyre::Result<impl Future<Output = eyre::Result<BuiltNode>> + use<L>> | |||
| where | |||
| L: BuildLogger, | |||
| { | |||
| let prepared_git = if let Some(GitSource { repo, commit_hash }) = git { | |||
| let target_dir = self.base_working_dir.join("git"); | |||
| let git_folder = git_manager.choose_clone_dir( | |||
| @@ -3,7 +3,7 @@ use dora_message::{ | |||
| descriptor::{GitRepoRev, NodeSource}, | |||
| id::{DataId, NodeId, OperatorId}, | |||
| }; | |||
| use eyre::{bail, Context, OptionExt, Result}; | |||
| use eyre::{Context, OptionExt, Result, bail}; | |||
| use std::{ | |||
| collections::{BTreeMap, HashMap}, | |||
| env::consts::EXE_EXTENSION, | |||
| @@ -14,9 +14,9 @@ use tokio::process::Command; | |||
| // reexport for compatibility | |||
| pub use dora_message::descriptor::{ | |||
| CoreNodeKind, CustomNode, Descriptor, Node, OperatorConfig, OperatorDefinition, OperatorSource, | |||
| PythonSource, ResolvedNode, RuntimeNode, SingleOperatorDefinition, DYNAMIC_SOURCE, | |||
| SHELL_SOURCE, | |||
| CoreNodeKind, CustomNode, DYNAMIC_SOURCE, Descriptor, Node, OperatorConfig, OperatorDefinition, | |||
| OperatorSource, PythonSource, ResolvedNode, RuntimeNode, SHELL_SOURCE, | |||
| SingleOperatorDefinition, | |||
| }; | |||
| pub use validate::ResolvedNodeExt; | |||
| pub use visualize::collect_dora_timers; | |||
| @@ -165,7 +165,9 @@ fn node_kind_mut(node: &mut Node) -> eyre::Result<NodeKindMut> { | |||
| (None, Some(tag), None) => Some(GitRepoRev::Tag(tag.clone())), | |||
| (None, None, Some(rev)) => Some(GitRepoRev::Rev(rev.clone())), | |||
| other @ (_, _, _) => { | |||
| eyre::bail!("only one of `branch`, `tag`, and `rev` are allowed (got {other:?})") | |||
| eyre::bail!( | |||
| "only one of `branch`, `tag`, and `rev` are allowed (got {other:?})" | |||
| ) | |||
| } | |||
| }; | |||
| NodeSource::GitBranch { | |||
| @@ -6,14 +6,14 @@ use crate::{ | |||
| use dora_message::{ | |||
| config::{Input, InputMapping, UserInputMapping}, | |||
| descriptor::{CoreNodeKind, OperatorSource, ResolvedNode, DYNAMIC_SOURCE, SHELL_SOURCE}, | |||
| descriptor::{CoreNodeKind, DYNAMIC_SOURCE, OperatorSource, ResolvedNode, SHELL_SOURCE}, | |||
| id::{DataId, NodeId, OperatorId}, | |||
| }; | |||
| use eyre::{bail, eyre, Context}; | |||
| use eyre::{Context, bail, eyre}; | |||
| use std::{collections::BTreeMap, path::Path, process::Command}; | |||
| use tracing::info; | |||
| use super::{resolve_path, Descriptor, DescriptorExt}; | |||
| use super::{Descriptor, DescriptorExt, resolve_path}; | |||
| const VERSION: &str = env!("CARGO_PKG_VERSION"); | |||
| pub fn check_dataflow( | |||
| @@ -145,9 +145,13 @@ impl ResolvedNodeExt for ResolvedNode { | |||
| .filter(|op| op.config.send_stdout_as.is_some()) | |||
| .count(); | |||
| if count == 1 && n.operators.len() > 1 { | |||
| tracing::warn!("All stdout from all operators of a runtime are going to be sent in the selected `send_stdout_as` operator.") | |||
| tracing::warn!( | |||
| "All stdout from all operators of a runtime are going to be sent in the selected `send_stdout_as` operator." | |||
| ) | |||
| } else if count > 1 { | |||
| return Err(eyre!("More than one `send_stdout_as` entries for a runtime node. Please only use one `send_stdout_as` per runtime.")); | |||
| return Err(eyre!( | |||
| "More than one `send_stdout_as` entries for a runtime node. Please only use one `send_stdout_as` per runtime." | |||
| )); | |||
| } | |||
| Ok(n.operators.iter().find_map(|op| { | |||
| op.config | |||
| @@ -1,5 +1,5 @@ | |||
| use dora_message::{ | |||
| config::{format_duration, Input, InputMapping, UserInputMapping}, | |||
| config::{Input, InputMapping, UserInputMapping, format_duration}, | |||
| descriptor::{CoreNodeKind, OperatorDefinition}, | |||
| id::{DataId, NodeId}, | |||
| }; | |||
| @@ -1,4 +1,4 @@ | |||
| use eyre::{bail, eyre, Context}; | |||
| use eyre::{Context, bail, eyre}; | |||
| use std::{ | |||
| env::consts::{DLL_PREFIX, DLL_SUFFIX}, | |||
| ffi::OsStr, | |||
| @@ -8,7 +8,7 @@ fn main() {} | |||
| fn main() { | |||
| use rust_format::Formatter; | |||
| let paths = ament_prefix_paths(); | |||
| let generated = dora_ros2_bridge_msg_gen::gen(paths.as_slice(), false); | |||
| let generated = dora_ros2_bridge_msg_gen::generate(paths.as_slice(), false); | |||
| let generated_string = rust_format::PrettyPlease::default() | |||
| .format_tokens(generated) | |||
| .unwrap(); | |||
| @@ -17,7 +17,7 @@ pub mod types; | |||
| pub use crate::parser::get_packages; | |||
| #[allow(clippy::cognitive_complexity)] | |||
| pub fn gen<P>(paths: &[P], create_cxx_bridge: bool) -> proc_macro2::TokenStream | |||
| pub fn generate<P>(paths: &[P], create_cxx_bridge: bool) -> proc_macro2::TokenStream | |||
| where | |||
| P: AsRef<Path>, | |||
| { | |||
| @@ -61,7 +61,7 @@ mod test { | |||
| use std::path::PathBuf; | |||
| use super::*; | |||
| use crate::types::{primitives::*, sequences::*, MemberType}; | |||
| use crate::types::{MemberType, primitives::*, sequences::*}; | |||
| fn parse_action_def(srv_name: &str) -> Result<Action> { | |||
| let path = PathBuf::from(env!("CARGO_MANIFEST_DIR")) | |||