Browse Source

change how mapping works

pull/1096/head
rozgo 5 months ago
parent
commit
2ea6695a5a
No known key found for this signature in database GPG Key ID: 4D9A9CFE2A067CE
1 changed files with 393 additions and 368 deletions
  1. +393
    -368
      node-hub/dora-rerun/src/lib.rs

+ 393
- 368
node-hub/dora-rerun/src/lib.rs View File

@@ -4,7 +4,7 @@ use std::{collections::HashMap, env::VarError, path::Path};

use dora_node_api::{
arrow::{
array::{Array, AsArray, Float32Array, Float64Array, StringArray, UInt8Array},
array::{Array, AsArray, Float32Array, StringArray, UInt8Array},
datatypes::Float32Type,
},
dora_core::config::DataId,
@@ -23,21 +23,6 @@ pub mod urdf;
use series::update_series;
use urdf::{init_urdf, update_visualization};

static KEYS: &[&str] = &[
"image",
"depth",
"text",
"boxes2d",
"boxes3d",
"masks",
"jointstate",
"pose",
"series",
"points3d",
"points2d",
"lines3d",
];

pub fn lib_main() -> Result<()> {
// rerun `serve()` requires to have a running Tokio runtime in the current context.
let rt = tokio::runtime::Builder::new_current_thread()
@@ -115,392 +100,432 @@ pub fn lib_main() -> Result<()> {

while let Some(event) = events.recv() {
if let Event::Input { id, data, metadata } = event {
// Check if the id contains more than one key
if KEYS
.iter()
.filter(|&&key| id.as_str().contains(key))
.count()
> 1
{
bail!(
"Event id `{}` contains more than one visualization keyword: {:?}, please only use one of them.",
id,
KEYS.iter()
.filter(|&&key| id.as_str().contains(key))
.collect::<Vec<_>>()
);
}

if id.as_str().contains("image") {
let height =
if let Some(Parameter::Integer(height)) = metadata.parameters.get("height") {
height
let primitive = if let Some(Parameter::String(primitive)) = metadata.parameters.get("primitive") {
primitive.clone()
} else {
bail!("No visualization primitive specified in metadata for input {}", id);
};

match primitive.as_str() {
"image" => {
let height =
if let Some(Parameter::Integer(height)) = metadata.parameters.get("height") {
height
} else {
&480
};
let width =
if let Some(Parameter::Integer(width)) = metadata.parameters.get("width") {
width
} else {
&640
};
let encoding = if let Some(Parameter::String(encoding)) =
metadata.parameters.get("encoding")
{
encoding
} else {
&480
"bgr8"
};
let width =
if let Some(Parameter::Integer(width)) = metadata.parameters.get("width") {
width
} else {
&640

if encoding == "bgr8" {
let buffer: &UInt8Array = data.as_any().downcast_ref().unwrap();
let buffer: &[u8] = buffer.values();

// Transpose values from BGR to RGB
let buffer: Vec<u8> =
buffer.chunks(3).flat_map(|x| [x[2], x[1], x[0]]).collect();
image_cache.insert(id.clone(), buffer.clone());
let image_buffer = ImageBuffer::from(buffer);
// let tensordata = ImageBuffer(buffer);

let image = rerun::Image::new(
image_buffer,
ImageFormat::rgb8([*width as u32, *height as u32]),
);
rec.log(id.as_str(), &image)
.context("could not log image")?;
} else if encoding == "rgb8" {
let buffer: &UInt8Array = data.as_any().downcast_ref().unwrap();
image_cache.insert(id.clone(), buffer.values().to_vec());
let buffer: &[u8] = buffer.values();
let image_buffer = ImageBuffer::from(buffer);

let image = rerun::Image::new(
image_buffer,
ImageFormat::rgb8([*width as u32, *height as u32]),
);
rec.log(id.as_str(), &image)
.context("could not log image")?;
} else if ["jpeg", "png", "avif"].contains(&encoding) {
let buffer: &UInt8Array = data.as_any().downcast_ref().unwrap();
let buffer: &[u8] = buffer.values();

let image = rerun::EncodedImage::from_file_contents(buffer.to_vec());
rec.log(id.as_str(), &image)
.context("could not log image")?;
};
let encoding = if let Some(Parameter::String(encoding)) =
metadata.parameters.get("encoding")
{
encoding
} else {
"bgr8"
};

if encoding == "bgr8" {
let buffer: &UInt8Array = data.as_any().downcast_ref().unwrap();
let buffer: &[u8] = buffer.values();

// Transpose values from BGR to RGB
let buffer: Vec<u8> =
buffer.chunks(3).flat_map(|x| [x[2], x[1], x[0]]).collect();
image_cache.insert(id.clone(), buffer.clone());
let image_buffer = ImageBuffer::from(buffer);
// let tensordata = ImageBuffer(buffer);

let image = rerun::Image::new(
image_buffer,
ImageFormat::rgb8([*width as u32, *height as u32]),
);
rec.log(id.as_str(), &image)
.context("could not log image")?;
} else if encoding == "rgb8" {
let buffer: &UInt8Array = data.as_any().downcast_ref().unwrap();
image_cache.insert(id.clone(), buffer.values().to_vec());
let buffer: &[u8] = buffer.values();
let image_buffer = ImageBuffer::from(buffer);

let image = rerun::Image::new(
image_buffer,
ImageFormat::rgb8([*width as u32, *height as u32]),
);
rec.log(id.as_str(), &image)
.context("could not log image")?;
} else if ["jpeg", "png", "avif"].contains(&encoding) {
let buffer: &UInt8Array = data.as_any().downcast_ref().unwrap();
let buffer: &[u8] = buffer.values();

let image = rerun::EncodedImage::from_file_contents(buffer.to_vec());
rec.log(id.as_str(), &image)
.context("could not log image")?;
};
} else if id.as_str().contains("depth") {
let width =
if let Some(Parameter::Integer(width)) = metadata.parameters.get("width") {
*width as usize
}
"depth" => {
let width =
if let Some(Parameter::Integer(width)) = metadata.parameters.get("width") {
*width as usize
} else {
640
};
let height =
if let Some(Parameter::Integer(height)) = metadata.parameters.get("height") {
*height as usize
} else {
480
};

// Check if we have camera metadata for pinhole camera setup
let has_camera_metadata = metadata.parameters.contains_key("camera_position")
&& metadata.parameters.contains_key("camera_orientation")
&& metadata.parameters.contains_key("focal");

if has_camera_metadata {
// Extract camera parameters
let focal_length = if let Some(Parameter::ListFloat(focals)) =
metadata.parameters.get("focal")
{
(focals[0] as f32, focals[1] as f32)
} else {
(605.0, 605.0)
};

let principal_point = if let Some(Parameter::ListFloat(pp)) =
metadata.parameters.get("principal_point")
{
(pp[0] as f32, pp[1] as f32)
} else {
(width as f32 / 2.0, height as f32 / 2.0)
};

let camera_position = if let Some(Parameter::ListFloat(pos)) =
metadata.parameters.get("camera_position")
{
rerun::Vec3D::new(pos[0] as f32, pos[1] as f32, pos[2] as f32)
} else {
rerun::Vec3D::new(0.0, 0.0, 0.0)
};

let camera_orientation = if let Some(Parameter::ListFloat(quat)) =
metadata.parameters.get("camera_orientation")
{
rerun::Quaternion::from_xyzw([
quat[0] as f32,
quat[1] as f32,
quat[2] as f32,
quat[3] as f32,
])
} else {
rerun::Quaternion::from_xyzw([0.0, 0.0, 0.0, 1.0])
};

// Use the depth ID as parent entity for camera components
let camera_entity = id.as_str();

// Log camera transform
let camera_transform = rerun::Transform3D::from_translation_rotation(
camera_position,
camera_orientation,
);
rec.log(camera_entity, &camera_transform)
.context("could not log camera transform")?;

// Log pinhole camera
let pinhole = rerun::Pinhole::from_focal_length_and_resolution(
focal_length,
(width as f32, height as f32),
)
.with_camera_xyz(rerun::components::ViewCoordinates::RDF)
.with_resolution((width as f32, height as f32))
.with_principal_point(principal_point);

rec.log(camera_entity, &pinhole)
.context("could not log pinhole camera")?;

// Convert depth data to DepthImage
match data.data_type() {
dora_node_api::arrow::datatypes::DataType::Float32 => {
let buffer: &Float32Array = data.as_any().downcast_ref().unwrap();
let depth_values: Vec<f32> = buffer.values().to_vec();

let depth_image = rerun::external::ndarray::Array::from_shape_vec(
(height, width),
depth_values,
)
.context("Failed to create depth array")?;

// Log depth image as a child entity
let depth_entity = format!("{}/raw", camera_entity);
rec.log(
depth_entity.as_str(),
&rerun::DepthImage::try_from(depth_image)
.context("Failed to create depth image")?
.with_meter(1.0),
)
.context("could not log depth image")?;
}
_ => {
return Err(eyre!(
"Depth data must be Float32Array, got {}. Please convert depth values to Float32 before sending.",
data.data_type()
));
}
}
} else {
640
};
let height =
if let Some(Parameter::Integer(height)) = metadata.parameters.get("height") {
*height as usize
// No camera metadata - just log a warning and skip 3D reconstruction
warn!("Depth data received without camera metadata (position, orientation, focal). Skipping 3D reconstruction.");
warn!("To enable proper 3D reconstruction, ensure the depth data includes camera_position, camera_orientation, and focal metadata.");
}
}
"text" => {
let buffer: StringArray = data.to_data().into();
buffer.iter().try_for_each(|string| -> Result<()> {
if let Some(str) = string {
let chars = str.chars().collect::<Vec<_>>();
let mut new_string = vec![];
for char in chars {
// Check if the character is a Chinese character
if char.is_ascii() || char.is_control() {
new_string.push(char);
continue;
}
// If it is a Chinese character, replace it with its pinyin
if let Some(pinyin) = char.to_pinyin() {
for char in pinyin.with_tone().chars() {
new_string.push(char);
}
new_string.push(' ');
}
}
let pinyined_str = new_string.iter().collect::<String>();
rec.log(id.as_str(), &rerun::TextLog::new(pinyined_str))
.wrap_err("Could not log text")
} else {
Ok(())
}
})?;
}
"boxes2d" => {
boxes2d::update_boxes2d(&rec, id, data, metadata).context("update boxes 2d")?;
}
"boxes3d" => {
boxes3d::update_boxes3d(&rec, id, data, metadata).context("update boxes 3d")?;
}
"masks" => {
let masks = if let Some(data) = data.as_primitive_opt::<Float32Type>() {
let data = data
.iter()
.map(|x| if let Some(x) = x { x > 0. } else { false })
.collect::<Vec<_>>();
data
} else if let Some(data) = data.as_boolean_opt() {
let data = data
.iter()
.map(|x| x.unwrap_or_default())
.collect::<Vec<_>>();
data
} else {
480
println!("Got unexpected data type: {}", data.data_type());
continue;
};

// Check if we have camera metadata for pinhole camera setup
let has_camera_metadata = metadata.parameters.contains_key("camera_position")
&& metadata.parameters.contains_key("camera_orientation")
&& metadata.parameters.contains_key("focal");

if has_camera_metadata {
// Extract camera parameters
let focal_length = if let Some(Parameter::ListFloat(focals)) =
metadata.parameters.get("focal")
mask_cache.insert(id.clone(), masks.clone());
}
"jointstate" => {
let encoding = if let Some(Parameter::String(encoding)) =
metadata.parameters.get("encoding")
{
(focals[0] as f32, focals[1] as f32)
encoding
} else {
(605.0, 605.0)
"jointstate"
};
if encoding != "jointstate" {
warn!("Got unexpected encoding: {encoding} on position pose");
continue;
}
// Convert to Vec<f32>
let mut positions: Vec<f32> =
into_vec(&data).context("Could not parse jointstate as vec32")?;

// Match file name
let mut urdf_id = id.as_str().replace("jointstate_", "");
urdf_id.push_str(".urdf");

if let Some(chain) = chains.get(&urdf_id) {
let dof = chain.dof();

// Truncate or pad positions to match the chain's dof
if dof < positions.len() {
positions.truncate(dof);
} else {
#[allow(clippy::same_item_push)]
for _ in 0..(dof - positions.len()) {
positions.push(0.);
}
}

let principal_point = if let Some(Parameter::ListFloat(pp)) =
metadata.parameters.get("principal_point")
update_visualization(&rec, chain, &urdf_id, &positions)?;
} else {
println!("Could not find chain for {urdf_id}. You may not have set its");
}
}
"pose" => {
let encoding = if let Some(Parameter::String(encoding)) =
metadata.parameters.get("encoding")
{
(pp[0] as f32, pp[1] as f32)
encoding
} else {
(width as f32 / 2.0, height as f32 / 2.0)
"jointstate"
};
if encoding != "jointstate" {
warn!("Got unexpected encoding: {encoding} on position pose");
continue;
}
// Convert to Vec<f32>
let mut positions: Vec<f32> =
into_vec(&data).context("Could not parse jointstate as vec32")?;

// Match file name
let mut urdf_id = id.as_str().replace("pose_", "");
urdf_id.push_str(".urdf");

if let Some(chain) = chains.get(&urdf_id) {
let dof = chain.dof();

// Truncate or pad positions to match the chain's dof
if dof < positions.len() {
positions.truncate(dof);
} else {
#[allow(clippy::same_item_push)]
for _ in 0..(dof - positions.len()) {
positions.push(0.);
}
}

let camera_position = if let Some(Parameter::ListFloat(pos)) =
metadata.parameters.get("camera_position")
update_visualization(&rec, chain, &urdf_id, &positions)?;
} else {
println!("Could not find chain for {urdf_id}. You may not have set its");
}
}
"series" => {
update_series(&rec, id, data).context("could not plot series")?;
}
"points3d" => {
// Get color from metadata
let color = if let Some(Parameter::ListInt(rgb)) = metadata.parameters.get("color")
{
rerun::Vec3D::new(pos[0] as f32, pos[1] as f32, pos[2] as f32)
if rgb.len() >= 3 {
rerun::Color::from_rgb(rgb[0] as u8, rgb[1] as u8, rgb[2] as u8)
} else {
rerun::Color::from_rgb(128, 128, 128) // Default gray
}
} else {
rerun::Vec3D::new(0.0, 0.0, 0.0)
rerun::Color::from_rgb(128, 128, 128) // Default gray
};

let camera_orientation = if let Some(Parameter::ListFloat(quat)) =
metadata.parameters.get("camera_orientation")
let dataid = id;

// Get radii from metadata as array
let radii = if let Some(Parameter::ListFloat(radii_list)) =
metadata.parameters.get("radii")
{
rerun::Quaternion::from_xyzw([
quat[0] as f32,
quat[1] as f32,
quat[2] as f32,
quat[3] as f32,
])
radii_list.iter().map(|&r| r as f32).collect::<Vec<f32>>()
} else {
rerun::Quaternion::from_xyzw([0.0, 0.0, 0.0, 1.0])
vec![0.01] // Default 1cm radius
};

// Create entity path for the camera
let camera_entity = format!("{}/camera", id.as_str());

// Log camera transform
let camera_transform = rerun::Transform3D::from_translation_rotation(
camera_position,
camera_orientation,
);
rec.log(camera_entity.as_str(), &camera_transform)
.context("could not log camera transform")?;

// Log pinhole camera with RDF coordinates (matching original implementation)
let pinhole = rerun::Pinhole::from_focal_length_and_resolution(
focal_length,
(width as f32, height as f32),
)
.with_camera_xyz(rerun::components::ViewCoordinates::RDF)
.with_resolution((width as f32, height as f32))
.with_principal_point(principal_point);

rec.log(camera_entity.as_str(), &pinhole)
.context("could not log pinhole camera")?;

// Convert depth data to DepthImage
match data.data_type() {
dora_node_api::arrow::datatypes::DataType::Float32 => {
let buffer: &Float32Array = data.as_any().downcast_ref().unwrap();
let depth_values: Vec<f32> = buffer.values().to_vec();

let depth_image = rerun::external::ndarray::Array::from_shape_vec(
(height, width),
depth_values,
)
.context("Failed to create depth array")?;

let depth_entity = format!("{}/depth_image", camera_entity);
rec.log(
depth_entity.as_str(),
&rerun::DepthImage::try_from(depth_image)
.context("Failed to create depth image")?
.with_meter(1.0),
)
.context("could not log depth image")?;
}
_ => {
return Err(eyre!(
"Depth data must be Float32Array, got {}. Please convert depth values to Float32 before sending.",
data.data_type()
));
}
if let Ok(buffer) = into_vec::<f32>(&data) {
let mut points = vec![];
let mut colors = vec![];
let num_points = buffer.len() / 3;
buffer.chunks(3).for_each(|chunk| {
points.push((chunk[0], chunk[1], chunk[2]));
colors.push(color);
});

// Expand single radius to all points if needed
let radii_vec = if radii.len() == num_points {
radii
} else if radii.len() == 1 {
vec![radii[0]; num_points]
} else {
vec![0.01; num_points] // Default 1cm radius
};

let points = Points3D::new(points).with_radii(radii_vec);

rec.log(dataid.as_str(), &points.with_colors(colors))
.context("could not log points")?;
}
} else {
// No camera metadata - just log a warning and skip 3D reconstruction
warn!("Depth data received without camera metadata (position, orientation, focal). Skipping 3D reconstruction.");
warn!("To enable proper 3D reconstruction, ensure the depth data includes camera_position, camera_orientation, and focal metadata.");
}
} else if id.as_str().contains("text") {
let buffer: StringArray = data.to_data().into();
buffer.iter().try_for_each(|string| -> Result<()> {
if let Some(str) = string {
let chars = str.chars().collect::<Vec<_>>();
let mut new_string = vec![];
for char in chars {
// Check if the character is a Chinese character
if char.is_ascii() || char.is_control() {
new_string.push(char);
continue;
}
// If it is a Chinese character, replace it with its pinyin
if let Some(pinyin) = char.to_pinyin() {
for char in pinyin.with_tone().chars() {
new_string.push(char);
}
new_string.push(' ');
}
}
let pinyined_str = new_string.iter().collect::<String>();
rec.log(id.as_str(), &rerun::TextLog::new(pinyined_str))
.wrap_err("Could not log text")
"points2d" => {
// Get color or assign random color in cache
let color = color_cache.get(&id);
let color = if let Some(color) = color {
*color
} else {
Ok(())
}
})?;
} else if id.as_str().contains("boxes2d") {
boxes2d::update_boxes2d(&rec, id, data, metadata).context("update boxes 2d")?;
} else if id.as_str().contains("boxes3d") {
boxes3d::update_boxes3d(&rec, id, data, metadata).context("update boxes 3d")?;
} else if id.as_str().contains("masks") {
let masks = if let Some(data) = data.as_primitive_opt::<Float32Type>() {
let data = data
.iter()
.map(|x| if let Some(x) = x { x > 0. } else { false })
.collect::<Vec<_>>();
data
} else if let Some(data) = data.as_boolean_opt() {
let data = data
.iter()
.map(|x| x.unwrap_or_default())
.collect::<Vec<_>>();
data
} else {
println!("Got unexpected data type: {}", data.data_type());
continue;
};
mask_cache.insert(id.clone(), masks.clone());
} else if id.as_str().contains("jointstate") || id.as_str().contains("pose") {
let encoding = if let Some(Parameter::String(encoding)) =
metadata.parameters.get("encoding")
{
encoding
} else {
"jointstate"
};
if encoding != "jointstate" {
warn!("Got unexpected encoding: {encoding} on position pose");
continue;
}
// Convert to Vec<f32>
let mut positions: Vec<f32> =
into_vec(&data).context("Could not parse jointstate as vec32")?;

// Match file name
let mut id = id.as_str().replace("jointstate_", "");
id.push_str(".urdf");

if let Some(chain) = chains.get(&id) {
let dof = chain.dof();
let color =
rerun::Color::from_rgb(rand::random::<u8>(), 180, rand::random::<u8>());

// Truncate or pad positions to match the chain's dof
if dof < positions.len() {
positions.truncate(dof);
} else {
#[allow(clippy::same_item_push)]
for _ in 0..(dof - positions.len()) {
positions.push(0.);
}
color_cache.insert(id.clone(), color);
color
};
let dataid = id;

// get a random color
if let Ok(buffer) = into_vec::<f32>(&data) {
let mut points = vec![];
let mut colors = vec![];
buffer.chunks(2).for_each(|chunk| {
points.push((chunk[0], chunk[1]));
colors.push(color);
});
let points = Points2D::new(points);

rec.log(dataid.as_str(), &points.with_colors(colors))
.context("could not log points")?;
}

update_visualization(&rec, chain, &id, &positions)?;
} else {
println!("Could not find chain for {id}. You may not have set its");
}
} else if id.as_str().contains("series") {
update_series(&rec, id, data).context("could not plot series")?;
} else if id.as_str().contains("points3d") {
// Get color from metadata
let color = if let Some(Parameter::ListInt(rgb)) = metadata.parameters.get("color")
{
if rgb.len() >= 3 {
rerun::Color::from_rgb(rgb[0] as u8, rgb[1] as u8, rgb[2] as u8)
} else {
rerun::Color::from_rgb(128, 128, 128) // Default gray
}
} else {
rerun::Color::from_rgb(128, 128, 128) // Default gray
};

let dataid = id;

// Get radii from metadata as array
let radii = if let Some(Parameter::ListFloat(radii_list)) =
metadata.parameters.get("radii")
{
radii_list.iter().map(|&r| r as f32).collect::<Vec<f32>>()
} else {
vec![0.01] // Default 1cm radius
};

if let Ok(buffer) = into_vec::<f32>(&data) {
let mut points = vec![];
let mut colors = vec![];
let num_points = buffer.len() / 3;
buffer.chunks(3).for_each(|chunk| {
points.push((chunk[0], chunk[1], chunk[2]));
colors.push(color);
});

// Expand single radius to all points if needed
let radii_vec = if radii.len() == num_points {
radii
} else if radii.len() == 1 {
vec![radii[0]; num_points]
"lines3d" => {
// Get color from metadata
let color = if let Some(Parameter::ListInt(rgb)) = metadata.parameters.get("color")
{
if rgb.len() >= 3 {
rerun::Color::from_rgb(rgb[0] as u8, rgb[1] as u8, rgb[2] as u8)
} else {
rerun::Color::from_rgb(0, 255, 0) // Default green
}
} else {
vec![0.01; num_points] // Default 1cm radius
rerun::Color::from_rgb(0, 255, 0) // Default green
};

let points = Points3D::new(points).with_radii(radii_vec);

rec.log(dataid.as_str(), &points.with_colors(colors))
.context("could not log points")?;
}
} else if id.as_str().contains("points2d") {
// Get color or assign random color in cache
let color = color_cache.get(&id);
let color = if let Some(color) = color {
*color
} else {
let color =
rerun::Color::from_rgb(rand::random::<u8>(), 180, rand::random::<u8>());

color_cache.insert(id.clone(), color);
color
};
let dataid = id;

// get a random color
if let Ok(buffer) = into_vec::<f32>(&data) {
let mut points = vec![];
let mut colors = vec![];
buffer.chunks(2).for_each(|chunk| {
points.push((chunk[0], chunk[1]));
colors.push(color);
});
let points = Points2D::new(points);

rec.log(dataid.as_str(), &points.with_colors(colors))
.context("could not log points")?;
}
} else if id.as_str().contains("lines3d") {
// Get color from metadata
let color = if let Some(Parameter::ListInt(rgb)) = metadata.parameters.get("color")
{
if rgb.len() >= 3 {
rerun::Color::from_rgb(rgb[0] as u8, rgb[1] as u8, rgb[2] as u8)
// Get radius for line thickness
let radius = if let Some(Parameter::Float(r)) = metadata.parameters.get("radius") {
*r as f32
} else {
rerun::Color::from_rgb(0, 255, 0) // Default green
0.01 // Default radius
};

if let Ok(buffer) = into_vec::<f32>(&data) {
let mut line_points = vec![];
buffer.chunks(3).for_each(|chunk| {
line_points.push((chunk[0], chunk[1], chunk[2]));
});

rec.log(
id.as_str(),
&rerun::LineStrips3D::new([line_points])
.with_colors([color])
.with_radii([radius]),
)
.context("could not log line strips")?;
}
} else {
rerun::Color::from_rgb(0, 255, 0) // Default green
};

// Get radius for line thickness
let radius = if let Some(Parameter::Float(r)) = metadata.parameters.get("radius") {
*r as f32
} else {
0.01 // Default radius
};

if let Ok(buffer) = into_vec::<f32>(&data) {
let mut line_points = vec![];
buffer.chunks(3).for_each(|chunk| {
line_points.push((chunk[0], chunk[1], chunk[2]));
});

rec.log(
id.as_str(),
&rerun::LineStrips3D::new([line_points])
.with_colors([color])
.with_radii([radius]),
)
.context("could not log line strips")?;
}
} else {
println!("Could not find handler for {id}");
_ => bail!("Unknown visualization primitive: {}", primitive),
}
}
}


Loading…
Cancel
Save