| @@ -11,7 +11,7 @@ fn required_data_size_inner(array: &ArrayData, next_offset: &mut usize) { | |||||
| for (buffer, spec) in array.buffers().iter().zip(&layout.buffers) { | for (buffer, spec) in array.buffers().iter().zip(&layout.buffers) { | ||||
| // consider alignment padding | // consider alignment padding | ||||
| if let BufferSpec::FixedWidth { alignment, .. } = spec { | if let BufferSpec::FixedWidth { alignment, .. } = spec { | ||||
| *next_offset = (*next_offset + alignment - 1) / alignment * alignment; | |||||
| *next_offset = (*next_offset).div_ceil(*alignment) * alignment; | |||||
| } | } | ||||
| *next_offset += buffer.len(); | *next_offset += buffer.len(); | ||||
| } | } | ||||
| @@ -42,7 +42,7 @@ fn copy_array_into_sample_inner( | |||||
| ); | ); | ||||
| // add alignment padding | // add alignment padding | ||||
| if let BufferSpec::FixedWidth { alignment, .. } = spec { | if let BufferSpec::FixedWidth { alignment, .. } = spec { | ||||
| *next_offset = (*next_offset + alignment - 1) / alignment * alignment; | |||||
| *next_offset = (*next_offset).div_ceil(*alignment) * alignment; | |||||
| } | } | ||||
| target_buffer[*next_offset..][..len].copy_from_slice(buffer.as_slice()); | target_buffer[*next_offset..][..len].copy_from_slice(buffer.as_slice()); | ||||
| @@ -44,6 +44,7 @@ mod drop_stream; | |||||
| pub const ZERO_COPY_THRESHOLD: usize = 4096; | pub const ZERO_COPY_THRESHOLD: usize = 4096; | ||||
| #[allow(dead_code)] | |||||
| enum TokioRuntime { | enum TokioRuntime { | ||||
| Runtime(Runtime), | Runtime(Runtime), | ||||
| Handle(Handle), | Handle(Handle), | ||||
| @@ -54,7 +54,7 @@ pub fn check_dataflow( | |||||
| }; | }; | ||||
| } | } | ||||
| }, | }, | ||||
| dora_message::descriptor::NodeSource::GitBranch { repo, rev } => { | |||||
| dora_message::descriptor::NodeSource::GitBranch { .. } => { | |||||
| info!("skipping check for node with git source"); | info!("skipping check for node with git source"); | ||||
| } | } | ||||
| }, | }, | ||||
| @@ -6,7 +6,7 @@ use std::collections::HashMap; | |||||
| struct MetadataMap<'a>(HashMap<&'a str, &'a str>); | struct MetadataMap<'a>(HashMap<&'a str, &'a str>); | ||||
| impl<'a> Extractor for MetadataMap<'a> { | |||||
| impl Extractor for MetadataMap<'_> { | |||||
| /// Get a value for a key from the MetadataMap. If the value can't be converted to &str, returns None | /// Get a value for a key from the MetadataMap. If the value can't be converted to &str, returns None | ||||
| fn get(&self, key: &str) -> Option<&str> { | fn get(&self, key: &str) -> Option<&str> { | ||||
| self.0.get(key).cloned() | self.0.get(key).cloned() | ||||
| @@ -96,7 +96,7 @@ async fn main() -> eyre::Result<()> { | |||||
| ) | ) | ||||
| .context("failed to send dora output")?; | .context("failed to send dora output")?; | ||||
| reply_channels.push_back((reply, 0 as u64, request.model)); | |||||
| reply_channels.push_back((reply, 0_u64, request.model)); | |||||
| } | } | ||||
| }, | }, | ||||
| dora_node_api::merged::MergedEvent::Dora(event) => match event { | dora_node_api::merged::MergedEvent::Dora(event) => match event { | ||||
| @@ -112,7 +112,7 @@ async fn main() -> eyre::Result<()> { | |||||
| let data = data.as_string::<i32>(); | let data = data.as_string::<i32>(); | ||||
| let string = data.iter().fold("".to_string(), |mut acc, s| { | let string = data.iter().fold("".to_string(), |mut acc, s| { | ||||
| if let Some(s) = s { | if let Some(s) = s { | ||||
| acc.push_str("\n"); | |||||
| acc.push('\n'); | |||||
| acc.push_str(s); | acc.push_str(s); | ||||
| } | } | ||||
| acc | acc | ||||
| @@ -164,6 +164,7 @@ async fn main() -> eyre::Result<()> { | |||||
| Ok(()) | Ok(()) | ||||
| } | } | ||||
| #[allow(clippy::large_enum_variant)] | |||||
| enum ServerEvent { | enum ServerEvent { | ||||
| Result(eyre::Result<()>), | Result(eyre::Result<()>), | ||||
| ChatCompletionRequest { | ChatCompletionRequest { | ||||