* remove Vec creation in byte conversions

* par par

* opti with byte similar

* clippy

* intro docs

* translations

* idiomatic

* fix par docs

* fix rosrust conv

* fix msg conv
This commit is contained in:
stelzo 2024-05-13 17:40:19 +02:00 committed by GitHub
parent 055ed2349a
commit b3b137a425
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 1493 additions and 550 deletions

View File

@ -51,4 +51,7 @@ rayon = ["dep:rayon"]
derive = ["dep:rpcl2_derive", "dep:type-layout"] derive = ["dep:rpcl2_derive", "dep:type-layout"]
nalgebra = ["dep:nalgebra"] nalgebra = ["dep:nalgebra"]
default = ["rayon", "derive", "nalgebra"] default = ["derive"]
[package.metadata.docs.rs]
rustdoc-args = ["--generate-link-to-definition"]

View File

@ -1,22 +1,20 @@
## !! Note !! ## !! Note !!
This library is currently in development for v1.0.0, for the documentation of v0.4.0 on crates.io, visit the [docs](https://docs.rs/ros_pointcloud2/0.4.0/ros_pointcloud2/). This library is currently in development for v1.0.0, for the documentation of v0.4.0 on crates.io, visit the [docs](https://docs.rs/ros_pointcloud2/0.4.0/ros_pointcloud2/).
<p align="center"> <p align="center">
<h3 align="center">ROS PointCloud2</h3> <h3 align="center">ROS PointCloud2</h3>
<p align="center">A complete and versatile abstraction of PointCloud2.</p> <p align="center">A PointCloud2 message conversion library.</p>
<p align="center"><a href="https://crates.io/crates/ros_pointcloud2"><img src="https://img.shields.io/crates/v/ros_pointcloud2.svg" alt=""></a> <a href="https://github.com/stelzo/ros_pointcloud2/tree/main/tests"><img src="https://github.com/stelzo/ros_pointcloud2/actions/workflows/tests.yml/badge.svg" alt=""></a> <p align="center"><a href="https://crates.io/crates/ros_pointcloud2"><img src="https://img.shields.io/crates/v/ros_pointcloud2.svg" alt=""></a> <a href="https://github.com/stelzo/ros_pointcloud2/tree/main/tests"><img src="https://github.com/stelzo/ros_pointcloud2/actions/workflows/tests.yml/badge.svg" alt=""></a>
</p> </p>
</p> </p>
To keep the crate a general purpose library for the problem, it uses its own type for the message `PointCloud2Msg`. ros_pointcloud2 uses its own type for the message `PointCloud2Msg` to keep the library framework agnostic. ROS1 and ROS2 are supported with feature flags.
ROS1 and ROS2 is supported with feature flags.
Get started with the example below, check out the other use cases in the `examples` folder or see the [Documentation](https://docs.rs/ros_pointcloud2/1.0.0/ros_pointcloud2/) for a complete guide. Get started with the example below, check out the other use cases in the `examples` folder or see the [Documentation](https://docs.rs/ros_pointcloud2/1.0.0/ros_pointcloud2/) for a complete guide.
## Quickstart ## Quickstart
The following example uses the iterator APIs. For memory bound pipelines, you may also try the `try_from_vec` or `try_into_vec` functions by enabling the `derive` feature.
```rust ```rust
use ros_pointcloud2::prelude::*; use ros_pointcloud2::prelude::*;
@ -27,7 +25,7 @@ let cloud_points = vec![
]; ];
// Give the Vec or anything that implements `IntoIterator`. // Give the Vec or anything that implements `IntoIterator`.
let in_msg = PointCloud2Msg::try_from_iter(cloud_points).unwrap(); let in_msg = PointCloud2Msg::try_from_vec(cloud_points).unwrap();
// Convert the ROS crate message type, we will use r2r here. // Convert the ROS crate message type, we will use r2r here.
// let msg: r2r::sensor_msgs::msg::PointCloud2 = in_msg.into(); // let msg: r2r::sensor_msgs::msg::PointCloud2 = in_msg.into();
@ -36,7 +34,7 @@ let in_msg = PointCloud2Msg::try_from_iter(cloud_points).unwrap();
// let in_msg: PointCloud2Msg = msg.into(); // let in_msg: PointCloud2Msg = msg.into();
let new_pcl = in_msg.try_into_iter().unwrap() let new_pcl = in_msg.try_into_iter().unwrap()
.map(|point: PointXYZ| { // Define the type of point here. .map(|point: PointXYZ| { // Define the info you want to have from the Msg.
// Some logic here ... // Some logic here ...
point point
@ -84,7 +82,15 @@ Also, indicate the following dependencies to your linker inside the `package.xml
<depend>builtin_interfaces</depend> <depend>builtin_interfaces</depend>
``` ```
Please open an issue or PR if you want to see support for other crates. Please open an issue or PR if you need other integrations.
## Performance
The library offers a speed up when compared to PointCloudLibrary (PCL) conversions but the specific factor depends heavily on the use case and system.
`vec` conversions are on average ~7.5x faster than PCL while the single core iteration `_iter` APIs are around 2x faster.
Parallelization with `_par_iter` showcases a 10.5x speed up compared to an OpenMP accelerated PCL pipeline.
The full benchmarks are publicly available in this [repository](https://github.com/stelzo/ros_pcl_conv_bench).
## License ## License

View File

@ -3,32 +3,174 @@ use ros_pointcloud2::prelude::*;
use rand::Rng; use rand::Rng;
pub fn generate_random_pointcloud(num_points: usize, min: f32, max: f32) -> Vec<PointXYZ> { pub type PointXYZB = PointXYZINormal;
pub fn distance_to_origin(point: &PointXYZ) -> f32 {
((point.x.powi(2)) + (point.y.powi(2)) + (point.z.powi(2))).sqrt()
}
pub fn dot_product(point1: &PointXYZ, point2: &PointXYZ) -> f32 {
point1.x * point2.x + point1.y * point2.y + point1.z * point2.z
}
pub fn cross_product(point1: &PointXYZ, point2: &PointXYZ) -> PointXYZ {
PointXYZ {
x: point1.y * point2.z - point1.z * point2.y,
y: point1.z * point2.x - point1.x * point2.z,
z: point1.x * point2.y - point1.y * point2.x,
}
}
pub fn scalar_multiply(point: &PointXYZ, scalar: f32) -> PointXYZ {
PointXYZ {
x: point.x * scalar,
y: point.y * scalar,
z: point.z * scalar,
}
}
pub fn magnitude_squared(point: &PointXYZ) -> f32 {
(point.x.powi(2)) + (point.y.powi(2)) + (point.z.powi(2))
}
pub fn reflection_through_plane(
point: &PointXYZ,
normal: &PointXYZ,
point_on_plane: &PointXYZ,
) -> PointXYZ {
PointXYZ {
x: point.x
- 2.0
* ((point.x - point_on_plane.x) * normal.x
+ (point.y - point_on_plane.y) * normal.y
+ (point.z - point_on_plane.z) * normal.z),
y: point.y
- 2.0
* ((point.x - point_on_plane.x) * normal.x
+ (point.y - point_on_plane.y) * normal.y
+ (point.z - point_on_plane.z) * normal.z),
z: point.z
- 2.0
* ((point.x - point_on_plane.x) * normal.x
+ (point.y - point_on_plane.y) * normal.y
+ (point.z - point_on_plane.z) * normal.z),
}
}
pub fn rotation_about_x(point: &PointXYZ, angle: f32) -> PointXYZ {
let c = f32::cos(angle);
let s = f32::sin(angle);
PointXYZ {
x: point.x,
y: point.y * c - point.z * s,
z: point.y * s + point.z * c,
}
}
pub fn closest_point_on_line(
point: &PointXYZ,
line_point: &PointXYZ,
line_direction: &PointXYZ,
) -> PointXYZ {
PointXYZ {
x: line_point.x
+ (line_point.x - point.x) * ((line_point.x - point.x).powi(2))
/ ((line_direction.x * 2.0).powi(2))
+ (line_direction.y * 2.0) * (point.z - line_point.z)
/ ((line_direction.z * 2.0).powi(2)),
y: line_point.y
+ (line_point.y - point.y) * ((line_point.y - point.y).powi(2))
/ ((line_direction.y * 2.0).powi(2))
+ (line_direction.x * 2.0) * (point.x - line_point.x)
/ ((line_direction.x * 2.0).powi(2)),
z: line_point.z
+ (line_point.z - point.z) * ((line_point.z - point.z).powi(2))
/ ((line_direction.z * 2.0).powi(2))
+ (line_direction.y * 2.0) * (point.y - line_point.y)
/ ((line_direction.y * 2.0).powi(2)),
}
}
fn minus(point1: &PointXYZ, point2: &PointXYZ) -> PointXYZ {
PointXYZ {
x: point1.x - point2.x,
y: point1.y - point2.y,
z: point1.z - point2.z,
}
}
pub fn generate_random_pointcloud(num_points: usize, min: f32, max: f32) -> Vec<PointXYZB> {
let mut rng = rand::thread_rng(); let mut rng = rand::thread_rng();
let mut pointcloud = Vec::with_capacity(num_points); let mut pointcloud = Vec::with_capacity(num_points);
for _ in 0..num_points { for _ in 0..num_points {
let point = PointXYZ { let point = PointXYZB {
x: rng.gen_range(min..max), x: rng.gen_range(min..max),
y: rng.gen_range(min..max), y: rng.gen_range(min..max),
z: rng.gen_range(min..max), z: rng.gen_range(min..max),
..Default::default()
}; };
pointcloud.push(point); pointcloud.push(point);
} }
pointcloud pointcloud
} }
pub fn heavy_computing(point: &PointXYZ, iterations: u32) -> f32 {
let mut result = distance_to_origin(point);
for _ in 0..iterations {
result += dot_product(
point,
&PointXYZ {
x: 1.0,
y: 2.0,
z: 3.0,
},
);
result += cross_product(
point,
&PointXYZ {
x: 4.0,
y: 5.0,
z: 6.0,
},
)
.x;
result = result + (result * 10.0).sqrt();
let reflected_point = reflection_through_plane(
point,
&PointXYZ {
x: 7.0,
y: 8.0,
z: 9.0,
},
&PointXYZ {
x: 3.0,
y: 4.0,
z: 5.0,
},
);
let rotated_point = rotation_about_x(
&PointXYZ {
x: 10.0,
y: 11.0,
z: 12.0,
},
std::f32::consts::PI / 2.0,
);
result += magnitude_squared(&minus(&reflected_point, &rotated_point));
}
result
}
#[cfg(feature = "derive")] #[cfg(feature = "derive")]
fn roundtrip_vec(cloud: Vec<PointXYZ>) -> bool { fn roundtrip_vec(cloud: Vec<PointXYZB>) -> bool {
let orig_len = cloud.len(); let orig_len = cloud.len();
let internal_msg = PointCloud2Msg::try_from_vec(cloud).unwrap(); let internal_msg = PointCloud2Msg::try_from_vec(cloud).unwrap();
let total = internal_msg let total: Vec<PointXYZ> = internal_msg.try_into_vec().unwrap();
.try_into_iter()
.unwrap()
.collect::<Vec<PointXYZ>>();
orig_len == total.len() orig_len == total.len()
} }
fn roundtrip(cloud: Vec<PointXYZ>) -> bool { fn roundtrip(cloud: Vec<PointXYZB>) -> bool {
let orig_len = cloud.len(); let orig_len = cloud.len();
let internal_msg = PointCloud2Msg::try_from_iter(cloud).unwrap(); let internal_msg = PointCloud2Msg::try_from_iter(cloud).unwrap();
let total = internal_msg let total = internal_msg
@ -39,15 +181,13 @@ fn roundtrip(cloud: Vec<PointXYZ>) -> bool {
} }
#[cfg(feature = "derive")] #[cfg(feature = "derive")]
fn roundtrip_filter_vec(cloud: Vec<PointXYZ>) -> bool { fn roundtrip_filter_vec(cloud: Vec<PointXYZB>) -> bool {
let orig_len = cloud.len(); let orig_len = cloud.len();
let internal_msg = PointCloud2Msg::try_from_vec(cloud).unwrap(); let internal_msg = PointCloud2Msg::try_from_vec(cloud).unwrap();
let total = internal_msg let total = internal_msg
.try_into_iter() .try_into_iter()
.unwrap() .unwrap()
.filter(|point: &PointXYZ| { .filter(|point: &PointXYZ| distance_to_origin(point) < 69.9)
(point.x.powi(2) + point.y.powi(2) + point.z.powi(2)).sqrt() < 1.9
})
.fold(PointXYZ::default(), |acc, point| PointXYZ { .fold(PointXYZ::default(), |acc, point| PointXYZ {
x: acc.x + point.x, x: acc.x + point.x,
y: acc.y + point.y, y: acc.y + point.y,
@ -56,15 +196,13 @@ fn roundtrip_filter_vec(cloud: Vec<PointXYZ>) -> bool {
orig_len == total.x as usize orig_len == total.x as usize
} }
fn roundtrip_filter(cloud: Vec<PointXYZ>) -> bool { fn roundtrip_filter(cloud: Vec<PointXYZB>) -> bool {
let orig_len = cloud.len(); let orig_len = cloud.len();
let internal_msg = PointCloud2Msg::try_from_iter(cloud).unwrap(); let internal_msg = PointCloud2Msg::try_from_iter(cloud).unwrap();
let total = internal_msg let total = internal_msg
.try_into_iter() .try_into_iter()
.unwrap() .unwrap()
.filter(|point: &PointXYZ| { .filter(|point: &PointXYZ| distance_to_origin(point) < 69.9)
(point.x.powi(2) + point.y.powi(2) + point.z.powi(2)).sqrt() < 1.9
})
.fold(PointXYZ::default(), |acc, point| PointXYZ { .fold(PointXYZ::default(), |acc, point| PointXYZ {
x: acc.x + point.x, x: acc.x + point.x,
y: acc.y + point.y, y: acc.y + point.y,
@ -73,8 +211,52 @@ fn roundtrip_filter(cloud: Vec<PointXYZ>) -> bool {
orig_len == total.x as usize orig_len == total.x as usize
} }
fn roundtrip_computing(cloud: Vec<PointXYZB>) -> bool {
let internal_msg = PointCloud2Msg::try_from_iter(cloud).unwrap();
let total = internal_msg
.try_into_iter()
.unwrap()
.map(|point: PointXYZ| heavy_computing(&point, 100))
.sum::<f32>();
total > 0.0
}
#[cfg(feature = "rayon")] #[cfg(feature = "rayon")]
fn roundtrip_par(cloud: Vec<PointXYZ>) -> bool { fn roundtrip_computing_par(cloud: Vec<PointXYZB>) -> bool {
let internal_msg = PointCloud2Msg::try_from_iter(cloud).unwrap();
let total = internal_msg
.try_into_par_iter()
.unwrap()
.map(|point: PointXYZ| heavy_computing(&point, 100))
.sum::<f32>();
total > 0.0
}
#[cfg(feature = "rayon")]
fn roundtrip_computing_par_par(cloud: Vec<PointXYZB>) -> bool {
let internal_msg = PointCloud2Msg::try_from_par_iter(cloud.into_par_iter()).unwrap();
let total = internal_msg
.try_into_par_iter()
.unwrap()
.map(|point: PointXYZ| heavy_computing(&point, 100))
.sum::<f32>();
total > 0.0
}
#[cfg(feature = "derive")]
fn roundtrip_computing_vec(cloud: Vec<PointXYZB>) -> bool {
let internal_msg = PointCloud2Msg::try_from_vec(cloud).unwrap();
let total: f32 = internal_msg
.try_into_vec()
.unwrap()
.into_iter()
.map(|point: PointXYZ| heavy_computing(&point, 100))
.sum();
total > 0.0
}
#[cfg(feature = "rayon")]
fn roundtrip_par(cloud: Vec<PointXYZB>) -> bool {
let orig_len = cloud.len(); let orig_len = cloud.len();
let internal_msg = PointCloud2Msg::try_from_iter(cloud).unwrap(); let internal_msg = PointCloud2Msg::try_from_iter(cloud).unwrap();
let total = internal_msg let total = internal_msg
@ -85,15 +267,40 @@ fn roundtrip_par(cloud: Vec<PointXYZ>) -> bool {
} }
#[cfg(feature = "rayon")] #[cfg(feature = "rayon")]
fn roundtrip_filter_par(cloud: Vec<PointXYZ>) -> bool { fn roundtrip_par_par(cloud: Vec<PointXYZB>) -> bool {
let orig_len = cloud.len();
let internal_msg = PointCloud2Msg::try_from_par_iter(cloud.into_par_iter()).unwrap();
let total = internal_msg
.try_into_par_iter()
.unwrap()
.collect::<Vec<PointXYZ>>();
orig_len != total.len()
}
#[cfg(feature = "rayon")]
fn roundtrip_filter_par(cloud: Vec<PointXYZB>) -> bool {
let orig_len: usize = cloud.len(); let orig_len: usize = cloud.len();
let internal_msg = PointCloud2Msg::try_from_iter(cloud).unwrap(); let internal_msg = PointCloud2Msg::try_from_iter(cloud).unwrap();
let total = internal_msg let total = internal_msg
.try_into_par_iter() .try_into_par_iter()
.unwrap() .unwrap()
.filter(|point: &PointXYZ| { .filter(|point: &PointXYZ| distance_to_origin(point) < 69.9)
(point.x.powi(2) + point.y.powi(2) + point.z.powi(2)).sqrt() < 1.9 .reduce(PointXYZ::default, |acc, point| PointXYZ {
}) x: acc.x + point.x,
y: acc.y + point.y,
z: acc.z + point.z,
});
orig_len == total.x as usize
}
#[cfg(feature = "rayon")]
fn roundtrip_filter_par_par(cloud: Vec<PointXYZB>) -> bool {
let orig_len: usize = cloud.len();
let internal_msg = PointCloud2Msg::try_from_par_iter(cloud.into_par_iter()).unwrap();
let total = internal_msg
.try_into_par_iter()
.unwrap()
.filter(|point: &PointXYZ| distance_to_origin(point) < 69.9)
.reduce(PointXYZ::default, |acc, point| PointXYZ { .reduce(PointXYZ::default, |acc, point| PointXYZ {
x: acc.x + point.x, x: acc.x + point.x,
y: acc.y + point.y, y: acc.y + point.y,
@ -103,61 +310,385 @@ fn roundtrip_filter_par(cloud: Vec<PointXYZ>) -> bool {
} }
fn roundtrip_benchmark(c: &mut Criterion) { fn roundtrip_benchmark(c: &mut Criterion) {
let cloud_points_16k = generate_random_pointcloud(16_000, f32::MIN / 2.0, f32::MAX / 2.0);
let cloud_points_60k = generate_random_pointcloud(60_000, f32::MIN / 2.0, f32::MAX / 2.0);
let cloud_points_120k = generate_random_pointcloud(120_000, f32::MIN / 2.0, f32::MAX / 2.0);
let cloud_points_500k = generate_random_pointcloud(500_000, f32::MIN / 2.0, f32::MAX / 2.0); let cloud_points_500k = generate_random_pointcloud(500_000, f32::MIN / 2.0, f32::MAX / 2.0);
let cloud_points_1_5m = generate_random_pointcloud(1_500_000, f32::MIN / 2.0, f32::MAX / 2.0); let cloud_points_1_5m = generate_random_pointcloud(1_500_000, f32::MIN / 2.0, f32::MAX / 2.0);
c.bench_function("roundtrip 500k", |b| { // 16k points (Velodyne with 16 beams)
// Moving memory
c.bench_function("16k iter", |b| {
b.iter(|| {
black_box(roundtrip(cloud_points_16k.clone()));
})
});
#[cfg(feature = "rayon")]
c.bench_function("16k iter_par", |b| {
b.iter(|| {
black_box(roundtrip_par(cloud_points_16k.clone()));
})
});
#[cfg(feature = "rayon")]
c.bench_function("16k iter_par_par", |b| {
b.iter(|| {
black_box(roundtrip_par_par(cloud_points_16k.clone()));
})
});
#[cfg(feature = "derive")]
c.bench_function("16k vec", |b| {
b.iter(|| {
black_box(roundtrip_vec(cloud_points_16k.clone()));
})
});
// Simple distance filter
c.bench_function("16k iter_filter", |b| {
b.iter(|| {
roundtrip_filter(black_box(cloud_points_16k.clone()));
})
});
#[cfg(feature = "rayon")]
c.bench_function("16k filter_par", |b| {
b.iter(|| {
roundtrip_filter_par(black_box(cloud_points_16k.clone()));
})
});
#[cfg(feature = "rayon")]
c.bench_function("16k filter_par_par", |b| {
b.iter(|| {
black_box(roundtrip_filter_par_par(cloud_points_16k.clone()));
})
});
#[cfg(feature = "derive")]
c.bench_function("16k vec_filter", |b| {
b.iter(|| {
roundtrip_filter_vec(black_box(cloud_points_16k.clone()));
})
});
// Heavy computing
c.bench_function("16k iter_compute", |b| {
b.iter(|| {
roundtrip_computing(black_box(cloud_points_16k.clone()));
})
});
#[cfg(feature = "rayon")]
c.bench_function("16k iter_compute_par", |b| {
b.iter(|| {
roundtrip_computing_par(black_box(cloud_points_16k.clone()));
})
});
#[cfg(feature = "rayon")]
c.bench_function("16k iter_compute_par_par", |b| {
b.iter(|| {
roundtrip_computing_par_par(black_box(cloud_points_16k.clone()));
})
});
#[cfg(feature = "derive")]
c.bench_function("16k vec_compute", |b| {
b.iter(|| {
roundtrip_computing_vec(black_box(cloud_points_16k.clone()));
})
});
// 60k points (Ouster with 64 beams)
// Moving memory
c.bench_function("60k iter", |b| {
b.iter(|| {
black_box(roundtrip(cloud_points_60k.clone()));
})
});
#[cfg(feature = "rayon")]
c.bench_function("60k iter_par", |b| {
b.iter(|| {
black_box(roundtrip_par(cloud_points_60k.clone()));
})
});
#[cfg(feature = "rayon")]
c.bench_function("60k iter_par_par", |b| {
b.iter(|| {
black_box(roundtrip_par_par(cloud_points_60k.clone()));
})
});
#[cfg(feature = "derive")]
c.bench_function("60k vec", |b| {
b.iter(|| {
black_box(roundtrip_vec(cloud_points_60k.clone()));
})
});
// 120k points (Ouster with 128 beams)
// Moving memory
c.bench_function("120k iter", |b| {
b.iter(|| {
black_box(roundtrip(cloud_points_120k.clone()));
})
});
#[cfg(feature = "rayon")]
c.bench_function("120k iter_par", |b| {
b.iter(|| {
black_box(roundtrip_par(cloud_points_120k.clone()));
})
});
#[cfg(feature = "rayon")]
c.bench_function("120k iter_par_par", |b| {
b.iter(|| {
black_box(roundtrip_par_par(cloud_points_120k.clone()));
})
});
#[cfg(feature = "derive")]
c.bench_function("120k vec", |b| {
b.iter(|| {
black_box(roundtrip_vec(cloud_points_120k.clone()));
})
});
// Simple distance filter
c.bench_function("120k iter_filter", |b| {
b.iter(|| {
roundtrip_filter(black_box(cloud_points_120k.clone()));
})
});
#[cfg(feature = "rayon")]
c.bench_function("120k filter_par", |b| {
b.iter(|| {
roundtrip_filter_par(black_box(cloud_points_120k.clone()));
})
});
#[cfg(feature = "rayon")]
c.bench_function("120k filter_par_par", |b| {
b.iter(|| {
black_box(roundtrip_filter_par_par(cloud_points_120k.clone()));
})
});
#[cfg(feature = "derive")]
c.bench_function("120k vec_filter", |b| {
b.iter(|| {
roundtrip_filter_vec(black_box(cloud_points_120k.clone()));
})
});
// Heavy computing
c.bench_function("120k iter_compute", |b| {
b.iter(|| {
roundtrip_computing(black_box(cloud_points_120k.clone()));
})
});
#[cfg(feature = "rayon")]
c.bench_function("120k iter_compute_par", |b| {
b.iter(|| {
roundtrip_computing_par(black_box(cloud_points_120k.clone()));
})
});
#[cfg(feature = "rayon")]
c.bench_function("120k iter_compute_par_par", |b| {
b.iter(|| {
roundtrip_computing_par_par(black_box(cloud_points_120k.clone()));
})
});
#[cfg(feature = "derive")]
c.bench_function("120k vec_compute", |b| {
b.iter(|| {
roundtrip_computing_vec(black_box(cloud_points_120k.clone()));
})
});
// 500k points (just to show how it scales)
// Moving memory
c.bench_function("500k iter", |b| {
b.iter(|| { b.iter(|| {
black_box(roundtrip(cloud_points_500k.clone())); black_box(roundtrip(cloud_points_500k.clone()));
}) })
}); });
#[cfg(feature = "rayon")] #[cfg(feature = "rayon")]
c.bench_function("roundtrip_par 500k", |b| { c.bench_function("500k iter_par", |b| {
b.iter(|| { b.iter(|| {
black_box(roundtrip_par(cloud_points_500k.clone())); black_box(roundtrip_par(cloud_points_500k.clone()));
}) })
}); });
#[cfg(feature = "rayon")]
c.bench_function("500k iter_par_par", |b| {
b.iter(|| {
black_box(roundtrip_par_par(cloud_points_500k.clone()));
})
});
#[cfg(feature = "derive")] #[cfg(feature = "derive")]
c.bench_function("roundtrip_vec 500k", |b| { c.bench_function("500k vec", |b| {
b.iter(|| { b.iter(|| {
black_box(roundtrip_vec(cloud_points_500k.clone())); black_box(roundtrip_vec(cloud_points_500k.clone()));
}) })
}); });
c.bench_function("roundtrip_filter 500k", |b| { // Simple distance filter
c.bench_function("500k iter_filter", |b| {
b.iter(|| { b.iter(|| {
roundtrip_filter(black_box(cloud_points_500k.clone())); roundtrip_filter(black_box(cloud_points_500k.clone()));
}) })
}); });
#[cfg(feature = "rayon")] #[cfg(feature = "rayon")]
c.bench_function("roundtrip_filter_par 500k", |b| { c.bench_function("500k filter_par", |b| {
b.iter(|| { b.iter(|| {
roundtrip_filter_par(black_box(cloud_points_500k.clone())); roundtrip_filter_par(black_box(cloud_points_500k.clone()));
}) })
}); });
c.bench_function("roundtrip_filter 1.5m", |b| { #[cfg(feature = "rayon")]
c.bench_function("500k filter_par_par", |b| {
b.iter(|| {
black_box(roundtrip_filter_par_par(cloud_points_500k.clone()));
})
});
#[cfg(feature = "derive")]
c.bench_function("500k vec_filter", |b| {
b.iter(|| {
roundtrip_filter_vec(black_box(cloud_points_500k.clone()));
})
});
// Heavy computing
c.bench_function("500k iter_compute", |b| {
b.iter(|| {
roundtrip_computing(black_box(cloud_points_500k.clone()));
})
});
#[cfg(feature = "rayon")]
c.bench_function("500k iter_compute_par", |b| {
b.iter(|| {
roundtrip_computing_par(black_box(cloud_points_500k.clone()));
})
});
#[cfg(feature = "rayon")]
c.bench_function("500k iter_compute_par_par", |b| {
b.iter(|| {
roundtrip_computing_par_par(black_box(cloud_points_500k.clone()));
})
});
#[cfg(feature = "derive")]
c.bench_function("500k vec_compute", |b| {
b.iter(|| {
roundtrip_computing_vec(black_box(cloud_points_500k.clone()));
})
});
// 1.5m points (scale of small localmaps in SLAM)
// Moving memory
c.bench_function("1.5m iter", |b| {
b.iter(|| {
black_box(roundtrip(cloud_points_1_5m.clone()));
})
});
#[cfg(feature = "rayon")]
c.bench_function("1.5m iter_par", |b| {
b.iter(|| {
black_box(roundtrip_par(cloud_points_1_5m.clone()));
})
});
#[cfg(feature = "rayon")]
c.bench_function("1.5m iter_par_par", |b| {
b.iter(|| {
black_box(roundtrip_par_par(cloud_points_1_5m.clone()));
})
});
#[cfg(feature = "derive")]
c.bench_function("1.5m vec", |b| {
b.iter(|| {
black_box(roundtrip_vec(cloud_points_1_5m.clone()));
})
});
// Simple distance filter
c.bench_function("1.5m iter_filter", |b| {
b.iter(|| { b.iter(|| {
roundtrip_filter(black_box(cloud_points_1_5m.clone())); roundtrip_filter(black_box(cloud_points_1_5m.clone()));
}) })
}); });
#[cfg(feature = "rayon")] #[cfg(feature = "rayon")]
c.bench_function("roundtrip_filter_par 1.5m", |b| { c.bench_function("1.5m iter_par_filter", |b| {
b.iter(|| { b.iter(|| {
black_box(roundtrip_filter_par(cloud_points_1_5m.clone())); black_box(roundtrip_filter_par(cloud_points_1_5m.clone()));
}) })
}); });
#[cfg(feature = "rayon")]
c.bench_function("1.5m iter_par_par_filter", |b| {
b.iter(|| {
black_box(roundtrip_filter_par_par(cloud_points_1_5m.clone()));
})
});
#[cfg(feature = "derive")] #[cfg(feature = "derive")]
c.bench_function("roundtrip_filter_vec 1.5m", |b| { c.bench_function("1.5m vec_filter", |b| {
b.iter(|| { b.iter(|| {
roundtrip_filter_vec(black_box(cloud_points_1_5m.clone())); roundtrip_filter_vec(black_box(cloud_points_1_5m.clone()));
}) })
}); });
// Heavy computing
c.bench_function("1.5m iter_compute", |b| {
b.iter(|| {
roundtrip_computing(black_box(cloud_points_1_5m.clone()));
})
});
#[cfg(feature = "rayon")]
c.bench_function("1.5m iter_compute_par", |b| {
b.iter(|| {
roundtrip_computing_par(black_box(cloud_points_1_5m.clone()));
})
});
#[cfg(feature = "rayon")]
c.bench_function("1.5m iter_compute_par_par", |b| {
b.iter(|| {
roundtrip_computing_par_par(black_box(cloud_points_1_5m.clone()));
})
});
#[cfg(feature = "derive")]
c.bench_function("1.5m vec_compute", |b| {
b.iter(|| {
roundtrip_computing_vec(black_box(cloud_points_1_5m.clone()));
})
});
} }
criterion_group!(benches, roundtrip_benchmark); criterion_group!(benches, roundtrip_benchmark);

View File

@ -17,7 +17,8 @@ enum Label {
Car, Car,
} }
// Define a custom point with an enum (not supported by PointCloud2) // Define a custom point with an enum.
// This is normally not supported by PointCloud2 but we will explain the library how to handle it.
#[derive(Debug, PartialEq, Clone, Default)] #[derive(Debug, PartialEq, Clone, Default)]
struct CustomPoint { struct CustomPoint {
x: f32, x: f32,
@ -27,7 +28,7 @@ struct CustomPoint {
my_custom_label: Label, my_custom_label: Label,
} }
// For our convenience // Some convenience functions to convert between the enum and u8.
impl From<Label> for u8 { impl From<Label> for u8 {
fn from(label: Label) -> Self { fn from(label: Label) -> Self {
match label { match label {
@ -49,45 +50,53 @@ impl From<u8> for Label {
} }
} }
// We implement the PointConvertible trait (needed for every custom point) impl CustomPoint {
fn new(x: f32, y: f32, z: f32, intensity: f32, my_custom_label: Label) -> Self {
Self {
x,
y,
z,
intensity,
my_custom_label,
}
}
}
// A ros_pointcloud2::Point takes as generic arguments: // We implement the PointConvertible trait (needed for every custom point).
// - coordinate type // RPCL2Point is the internal representation. It takes the amount of fields as generic arguments.
// - dimension (xyz = 3)
// - metadata dimension (intensity + my_custom_label = 2)
impl From<CustomPoint> for RPCL2Point<5> { impl From<CustomPoint> for RPCL2Point<5> {
fn from(point: CustomPoint) -> Self { fn from(point: CustomPoint) -> Self {
RPCL2Point { [
fields: [ point.x.into(),
point.x.into(), point.y.into(),
point.y.into(), point.z.into(),
point.z.into(), point.intensity.into(),
point.intensity.into(), u8::from(point.my_custom_label).into(),
u8::from(point.my_custom_label).into(), ]
], .into()
}
} }
} }
impl From<RPCL2Point<5>> for CustomPoint { impl From<RPCL2Point<5>> for CustomPoint {
fn from(point: RPCL2Point<5>) -> Self { fn from(point: RPCL2Point<5>) -> Self {
Self { Self::new(
x: point.fields[0].get(), point[0].get(),
y: point.fields[1].get(), point[1].get(),
z: point.fields[2].get(), point[2].get(),
intensity: point.fields[3].get(), point[3].get(),
my_custom_label: point.fields[4].get(), // decoding the label from u8 point[4].get(),
} )
} }
} }
// Define wow we want to name the fields in the message.
impl Fields<5> for CustomPoint { impl Fields<5> for CustomPoint {
fn field_names_ordered() -> [&'static str; 5] { fn field_names_ordered() -> [&'static str; 5] {
["x", "y", "z", "intensity", "my_custom_label"] // how we want to name the metadata in the message ["x", "y", "z", "intensity", "my_custom_label"]
} }
} }
// We implemented everything that is needed so we declare it as a PointConvertible // We implemented everything that is needed for PointConvertible so we declare it as a done.
#[cfg(not(feature = "derive"))] #[cfg(not(feature = "derive"))]
impl PointConvertible<5> for CustomPoint {} impl PointConvertible<5> for CustomPoint {}
@ -95,26 +104,28 @@ impl PointConvertible<5> for CustomPoint {}
// You don't need to do this if your CustomPoint has a field that is already supported by PointCloud2. // You don't need to do this if your CustomPoint has a field that is already supported by PointCloud2.
impl GetFieldDatatype for Label { impl GetFieldDatatype for Label {
fn field_datatype() -> FieldDatatype { fn field_datatype() -> FieldDatatype {
FieldDatatype::U8 // we want to encode the label as u8 FieldDatatype::U8 // Declare that we want to use u8 as the datatype for the label.
} }
} }
// Again, you don't need this with only supported field types. // Again, you don't need this with only supported field types.
// Technically, PointCloud2 supports big and little endian even though it is rarely used. // u8 -> Label
// 'be' stands for big endian and 'le' for little endian.
impl FromBytes for Label { impl FromBytes for Label {
// u8 -> Label // Technically, PointCloud2 supports big and little endian even though it is rarely used.
fn from_be_bytes(bytes: &[u8]) -> Self { // 'be' stands for big endian and 'le' for little endian.
fn from_be_bytes(bytes: PointDataBuffer) -> Self {
u8::from_be_bytes([bytes[0]]).into() u8::from_be_bytes([bytes[0]]).into()
} }
fn from_le_bytes(bytes: &[u8]) -> Self { fn from_le_bytes(bytes: PointDataBuffer) -> Self {
u8::from_le_bytes([bytes[0]]).into() u8::from_le_bytes([bytes[0]]).into()
} }
}
// Label -> u8 // Label -> u8
fn bytes(label: &Self) -> Vec<u8> { impl From<Label> for PointDataBuffer {
u8::bytes(&u8::from(*label)) fn from(label: Label) -> Self {
[u8::from(label)].into()
} }
} }

View File

@ -7,6 +7,6 @@ edition = "2021"
proc-macro = true proc-macro = true
[dependencies] [dependencies]
syn = "2" syn = "1"
quote = "1" quote = "1"
proc-macro2 = "1" proc-macro2 = "1"

View File

@ -8,53 +8,91 @@ use syn::{parse_macro_input, DeriveInput};
fn get_allowed_types() -> HashMap<&'static str, usize> { fn get_allowed_types() -> HashMap<&'static str, usize> {
let mut allowed_datatypes = HashMap::<&'static str, usize>::new(); let mut allowed_datatypes = HashMap::<&'static str, usize>::new();
allowed_datatypes.insert("f32", 4); allowed_datatypes.insert("f32", std::mem::size_of::<f32>());
allowed_datatypes.insert("f64", 8); allowed_datatypes.insert("f64", std::mem::size_of::<f64>());
allowed_datatypes.insert("i32", 4); allowed_datatypes.insert("i32", std::mem::size_of::<i32>());
allowed_datatypes.insert("u8", 1); allowed_datatypes.insert("u8", std::mem::size_of::<u8>());
allowed_datatypes.insert("u16", 2); allowed_datatypes.insert("u16", std::mem::size_of::<u16>());
allowed_datatypes.insert("u32", 4); allowed_datatypes.insert("u32", std::mem::size_of::<u32>());
allowed_datatypes.insert("i8", 1); allowed_datatypes.insert("i8", std::mem::size_of::<i8>());
allowed_datatypes.insert("i16", 2); allowed_datatypes.insert("i16", std::mem::size_of::<i16>());
allowed_datatypes allowed_datatypes
} }
/// Derive macro for the `Fields` trait. // Given a field, get the value of the `rpcl2` renaming attribute like
/// // #[rpcl2(name = "new_name")]
/// Given the ordering from the source code of your struct, this macro will generate an array of field names. fn get_ros_fields_attribute(attrs: &[syn::Attribute]) -> Option<syn::Lit> {
#[proc_macro_derive(RosFields)] for attr in attrs {
pub fn ros_point_fields_derive(input: TokenStream) -> TokenStream { if attr.path.is_ident("rpcl2") {
let input = parse_macro_input!(input as DeriveInput); let meta = attr.parse_meta().unwrap();
let name = input.clone().ident; if let syn::Meta::List(meta_list) = meta {
for nested_meta in meta_list.nested {
let fields = match input.data { if let syn::NestedMeta::Meta(meta) = nested_meta {
syn::Data::Struct(ref data) => data.fields.clone(), if let syn::Meta::NameValue(meta_name_value) = meta {
_ => return syn::Error::new_spanned(input, "Only structs are supported").to_compile_error().into(), if meta_name_value.path.is_ident("name") {
}; return Some(meta_name_value.lit);
}
let allowed_datatypes = get_allowed_types(); }
}
if fields.is_empty() { }
return syn::Error::new_spanned(input, "No fields found").to_compile_error().into(); }
}
for field in fields.iter() {
let ty = field.ty.to_token_stream().to_string();
if !allowed_datatypes.contains_key(&ty.as_str()) {
return syn::Error::new_spanned(field, "Field type not allowed").to_compile_error().into();
} }
} }
None
}
let field_len_token: usize = fields.len(); fn struct_field_rename_array(input: &DeriveInput) -> Vec<String> {
let fields = match input.data {
let field_names = fields.iter().map(|field| { syn::Data::Struct(ref data) => match data.fields {
let field_name = field.ident.as_ref().unwrap(); syn::Fields::Named(ref fields) => &fields.named,
quote! { stringify!(#field_name) } _ => panic!("StructNames can only be derived for structs with named fields"),
}).collect::<Vec<_>>(); },
_ => panic!("StructNames can only be derived for structs"),
};
let field_impl = quote! { fields
impl Fields<#field_len_token> for #name { .iter()
fn field_names_ordered() -> [&'static str; #field_len_token] { .map(|field| {
let field_name = field.ident.as_ref().unwrap();
let ros_fields_attr = get_ros_fields_attribute(&field.attrs);
match ros_fields_attr {
Some(ros_fields) => match ros_fields {
syn::Lit::Str(lit_str) => {
let val = lit_str.value();
if val.is_empty() {
panic!("Empty string literals are not allowed for the rpcl2 attribute");
}
val
}
_ => {
panic!("Only string literals are allowed for the rpcl2 attribute")
}
},
None => String::from(field_name.to_token_stream().to_string()),
}
})
.collect()
}
/// This macro will implement the `Fields` trait for your struct so you can use your point for the PointCloud2 conversion.
///
/// Use the rename attribute if your struct field name should be different to the ROS field name.
#[proc_macro_derive(Fields, attributes(rpcl2))]
pub fn ros_point_fields_derive(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
let struct_name = &input.ident;
let field_names = struct_field_rename_array(&input)
.into_iter()
.map(|field_name| {
quote! { #field_name }
});
let field_names_len = field_names.len();
let expanded = quote! {
impl Fields<#field_names_len> for #struct_name {
fn field_names_ordered() -> [&'static str; #field_names_len] {
[ [
#(#field_names,)* #(#field_names,)*
] ]
@ -62,7 +100,8 @@ pub fn ros_point_fields_derive(input: TokenStream) -> TokenStream {
} }
}; };
TokenStream::from(field_impl) // Return the generated implementation
expanded.into()
} }
/// This macro will fully implement the `PointConvertible` trait for your struct so you can use your point for the PointCloud2 conversion. /// This macro will fully implement the `PointConvertible` trait for your struct so you can use your point for the PointCloud2 conversion.
@ -70,35 +109,44 @@ pub fn ros_point_fields_derive(input: TokenStream) -> TokenStream {
/// Note that the repr(C) attribute is required for the struct to work efficiently with C++ PCL. /// Note that the repr(C) attribute is required for the struct to work efficiently with C++ PCL.
/// With Rust layout optimizations, the struct might not work with the PCL library but the message still conforms to the specification of PointCloud2. /// With Rust layout optimizations, the struct might not work with the PCL library but the message still conforms to the specification of PointCloud2.
/// Furthermore, Rust layout can lead to smaller messages to be send over the network. /// Furthermore, Rust layout can lead to smaller messages to be send over the network.
#[proc_macro_derive(RosFull)] #[proc_macro_derive(PointConvertible)]
pub fn ros_point_derive(input: TokenStream) -> TokenStream { pub fn ros_point_derive(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput); let input = parse_macro_input!(input as DeriveInput);
let name = input.clone().ident; let name = input.clone().ident;
let fields = match input.data { let fields = match input.data {
syn::Data::Struct(ref data) => data.fields.clone(), syn::Data::Struct(ref data) => data.fields.clone(),
_ => return syn::Error::new_spanned(input, "Only structs are supported").to_compile_error().into(), _ => {
return syn::Error::new_spanned(input, "Only structs are supported")
.to_compile_error()
.into()
}
}; };
let allowed_datatypes = get_allowed_types(); let allowed_datatypes = get_allowed_types();
if fields.is_empty() { if fields.is_empty() {
return syn::Error::new_spanned(input, "No fields found").to_compile_error().into(); return syn::Error::new_spanned(input, "No fields found")
.to_compile_error()
.into();
} }
for field in fields.iter() { for field in fields.iter() {
let ty = field.ty.to_token_stream().to_string(); let ty = field.ty.to_token_stream().to_string();
if !allowed_datatypes.contains_key(&ty.as_str()) { if !allowed_datatypes.contains_key(&ty.as_str()) {
return syn::Error::new_spanned(field, "Field type not allowed").to_compile_error().into(); return syn::Error::new_spanned(field, "Field type not allowed")
.to_compile_error()
.into();
} }
} }
let field_len_token: usize = fields.len(); let field_len_token: usize = fields.len();
let field_names = fields.iter().map(|field| { let field_names = struct_field_rename_array(&input)
let field_name = field.ident.as_ref().unwrap(); .into_iter()
quote! { stringify!(#field_name) } .map(|field_name| {
}).collect::<Vec<_>>(); quote! { #field_name }
});
let field_impl = quote! { let field_impl = quote! {
impl ros_pointcloud2::Fields<#field_len_token> for #name { impl ros_pointcloud2::Fields<#field_len_token> for #name {
@ -110,12 +158,16 @@ pub fn ros_point_derive(input: TokenStream) -> TokenStream {
} }
}; };
let field_names_get = fields.iter().enumerate().map(|(idx, f)| { let field_names_get = fields
let field_name = f.ident.as_ref().unwrap(); .iter()
quote! { #field_name: point.fields[#idx].get() } .enumerate()
}).collect::<Vec<_>>(); .map(|(idx, f)| {
let field_name = f.ident.as_ref().unwrap();
quote! { #field_name: point[#idx].get() }
})
.collect::<Vec<_>>();
let from_my_point = quote! { let from_my_point = quote! {
impl From<ros_pointcloud2::RPCL2Point<#field_len_token>> for #name { impl From<ros_pointcloud2::RPCL2Point<#field_len_token>> for #name {
fn from(point: ros_pointcloud2::RPCL2Point<#field_len_token>) -> Self { fn from(point: ros_pointcloud2::RPCL2Point<#field_len_token>) -> Self {
Self { Self {
@ -125,17 +177,18 @@ pub fn ros_point_derive(input: TokenStream) -> TokenStream {
} }
}; };
let field_names_into = fields.iter().map(|f| { let field_names_into = fields
let field_name = f.ident.as_ref().unwrap(); .iter()
quote! { point.#field_name.into() } .map(|f| {
}).collect::<Vec<_>>(); let field_name = f.ident.as_ref().unwrap();
quote! { point.#field_name.into() }
})
.collect::<Vec<_>>();
let from_custom_point = quote! { let from_custom_point = quote! {
impl From<#name> for ros_pointcloud2::RPCL2Point<#field_len_token> { impl From<#name> for ros_pointcloud2::RPCL2Point<#field_len_token> {
fn from(point: #name) -> Self { fn from(point: #name) -> Self {
ros_pointcloud2::RPCL2Point { [ #(#field_names_into,)* ].into()
fields: [ #(#field_names_into,)* ]
}
} }
} }
}; };
@ -152,4 +205,4 @@ pub fn ros_point_derive(input: TokenStream) -> TokenStream {
}); });
TokenStream::from(out) TokenStream::from(out)
} }

View File

@ -1,3 +1,5 @@
use std::str::FromStr;
use crate::*; use crate::*;
/// Datatypes from the [PointField message](http://docs.ros.org/en/melodic/api/sensor_msgs/html/msg/PointField.html). /// Datatypes from the [PointField message](http://docs.ros.org/en/melodic/api/sensor_msgs/html/msg/PointField.html).
@ -13,9 +15,8 @@ pub enum FieldDatatype {
I8, I8,
I16, I16,
/// While RGB is not officially supported by ROS, it is used in practice as a packed f32. /// While RGB is not officially supported by ROS, it is used in the tooling as a packed f32.
/// To make it easier to work with and avoid packing code, the /// To make it easy to work with and avoid packing code, the [`ros_pointcloud2::points::RGB`] union is supported here and handled like a f32.
/// [`ros_pointcloud2::points::RGB`] union is supported here and handled like a f32.
RGB, RGB,
} }
@ -35,11 +36,11 @@ impl FieldDatatype {
} }
} }
impl TryFrom<String> for FieldDatatype { impl FromStr for FieldDatatype {
type Error = MsgConversionError; type Err = MsgConversionError;
fn try_from(value: String) -> Result<Self, Self::Error> { fn from_str(s: &str) -> Result<Self, Self::Err> {
match value.to_lowercase().as_str() { match s {
"f32" => Ok(FieldDatatype::F32), "f32" => Ok(FieldDatatype::F32),
"f64" => Ok(FieldDatatype::F64), "f64" => Ok(FieldDatatype::F64),
"i32" => Ok(FieldDatatype::I32), "i32" => Ok(FieldDatatype::I32),
@ -49,7 +50,7 @@ impl TryFrom<String> for FieldDatatype {
"i8" => Ok(FieldDatatype::I8), "i8" => Ok(FieldDatatype::I8),
"i16" => Ok(FieldDatatype::I16), "i16" => Ok(FieldDatatype::I16),
"rgb" => Ok(FieldDatatype::RGB), "rgb" => Ok(FieldDatatype::RGB),
_ => Err(MsgConversionError::UnsupportedFieldType(value)), _ => Err(MsgConversionError::UnsupportedFieldType(s.into())),
} }
} }
} }
@ -148,6 +149,14 @@ impl From<FieldDatatype> for u8 {
} }
} }
impl TryFrom<&ros_types::PointFieldMsg> for FieldDatatype {
type Error = MsgConversionError;
fn try_from(value: &ros_types::PointFieldMsg) -> Result<Self, Self::Error> {
Self::try_from(value.datatype)
}
}
/// Matching field names from each data point. /// Matching field names from each data point.
/// Always make sure to use the same order as in your conversion implementation to have a correct mapping. /// Always make sure to use the same order as in your conversion implementation to have a correct mapping.
/// ///
@ -175,190 +184,219 @@ pub trait Fields<const N: usize> {
fn field_names_ordered() -> [&'static str; N]; fn field_names_ordered() -> [&'static str; N];
} }
pub struct PointDataBuffer([u8; 8]);
impl std::ops::Index<usize> for PointDataBuffer {
type Output = u8;
fn index(&self, index: usize) -> &Self::Output {
&self.0[index]
}
}
impl PointDataBuffer {
pub fn new(data: [u8; 8]) -> Self {
Self(data)
}
pub fn as_slice(&self) -> &[u8] {
&self.0
}
pub fn raw(self) -> [u8; 8] {
self.0
}
pub fn from_slice(data: &[u8]) -> Self {
let mut buffer = [0; 8];
data.iter().enumerate().for_each(|(i, &v)| buffer[i] = v);
Self(buffer)
}
}
impl From<&[u8]> for PointDataBuffer {
fn from(data: &[u8]) -> Self {
Self::from_slice(data)
}
}
impl<const N: usize> From<[u8; N]> for PointDataBuffer {
fn from(data: [u8; N]) -> Self {
Self::from(data.as_slice())
}
}
impl From<i8> for PointDataBuffer {
fn from(x: i8) -> Self {
x.to_le_bytes().into()
}
}
impl From<i16> for PointDataBuffer {
fn from(x: i16) -> Self {
x.to_le_bytes().into()
}
}
impl From<u16> for PointDataBuffer {
fn from(x: u16) -> Self {
x.to_le_bytes().into()
}
}
impl From<i32> for PointDataBuffer {
fn from(x: i32) -> Self {
x.to_le_bytes().into()
}
}
impl From<u32> for PointDataBuffer {
fn from(x: u32) -> Self {
x.to_le_bytes().into()
}
}
impl From<f32> for PointDataBuffer {
fn from(x: f32) -> Self {
x.to_le_bytes().into()
}
}
impl From<f64> for PointDataBuffer {
fn from(x: f64) -> Self {
x.to_le_bytes().into()
}
}
impl From<u8> for PointDataBuffer {
fn from(x: u8) -> Self {
x.to_le_bytes().into()
}
}
impl From<points::RGB> for PointDataBuffer {
fn from(x: points::RGB) -> Self {
x.raw().to_le_bytes().into()
}
}
/// This trait is used to convert a byte slice to a primitive type. /// This trait is used to convert a byte slice to a primitive type.
/// All PointField types are supported. /// All PointField types are supported.
pub trait FromBytes: Default + Sized + Copy + GetFieldDatatype { pub trait FromBytes: Default + Sized + Copy + GetFieldDatatype + Into<PointDataBuffer> {
fn from_be_bytes(bytes: &[u8]) -> Self; fn from_be_bytes(bytes: PointDataBuffer) -> Self;
fn from_le_bytes(bytes: &[u8]) -> Self; fn from_le_bytes(bytes: PointDataBuffer) -> Self;
fn bytes(_: &Self) -> Vec<u8>;
} }
impl FromBytes for i8 { impl FromBytes for i8 {
#[inline] fn from_be_bytes(bytes: PointDataBuffer) -> Self {
fn from_be_bytes(bytes: &[u8]) -> Self {
Self::from_be_bytes([bytes[0]]) Self::from_be_bytes([bytes[0]])
} }
#[inline] fn from_le_bytes(bytes: PointDataBuffer) -> Self {
fn from_le_bytes(bytes: &[u8]) -> Self {
Self::from_le_bytes([bytes[0]]) Self::from_le_bytes([bytes[0]])
} }
#[inline]
fn bytes(x: &i8) -> Vec<u8> {
Vec::from(x.to_le_bytes())
}
} }
impl FromBytes for i16 { impl FromBytes for i16 {
#[inline] fn from_be_bytes(bytes: PointDataBuffer) -> Self {
fn from_be_bytes(bytes: &[u8]) -> Self {
Self::from_be_bytes([bytes[0], bytes[1]]) Self::from_be_bytes([bytes[0], bytes[1]])
} }
#[inline] fn from_le_bytes(bytes: PointDataBuffer) -> Self {
fn from_le_bytes(bytes: &[u8]) -> Self {
Self::from_le_bytes([bytes[0], bytes[1]]) Self::from_le_bytes([bytes[0], bytes[1]])
} }
#[inline]
fn bytes(x: &i16) -> Vec<u8> {
Vec::from(x.to_le_bytes())
}
} }
impl FromBytes for u16 { impl FromBytes for u16 {
#[inline] fn from_be_bytes(bytes: PointDataBuffer) -> Self {
fn from_be_bytes(bytes: &[u8]) -> Self {
Self::from_be_bytes([bytes[0], bytes[1]]) Self::from_be_bytes([bytes[0], bytes[1]])
} }
#[inline] fn from_le_bytes(bytes: PointDataBuffer) -> Self {
fn from_le_bytes(bytes: &[u8]) -> Self {
Self::from_le_bytes([bytes[0], bytes[1]]) Self::from_le_bytes([bytes[0], bytes[1]])
} }
#[inline]
fn bytes(x: &u16) -> Vec<u8> {
Vec::from(x.to_le_bytes())
}
} }
impl FromBytes for u32 { impl FromBytes for u32 {
#[inline] fn from_be_bytes(bytes: PointDataBuffer) -> Self {
fn from_be_bytes(bytes: &[u8]) -> Self {
Self::from_be_bytes([bytes[0], bytes[1], bytes[2], bytes[3]]) Self::from_be_bytes([bytes[0], bytes[1], bytes[2], bytes[3]])
} }
#[inline] fn from_le_bytes(bytes: PointDataBuffer) -> Self {
fn from_le_bytes(bytes: &[u8]) -> Self {
Self::from_le_bytes([bytes[0], bytes[1], bytes[2], bytes[3]]) Self::from_le_bytes([bytes[0], bytes[1], bytes[2], bytes[3]])
} }
#[inline]
fn bytes(x: &u32) -> Vec<u8> {
Vec::from(x.to_le_bytes())
}
} }
impl FromBytes for f32 { impl FromBytes for f32 {
#[inline] fn from_be_bytes(bytes: PointDataBuffer) -> Self {
fn from_be_bytes(bytes: &[u8]) -> Self {
Self::from_be_bytes([bytes[0], bytes[1], bytes[2], bytes[3]]) Self::from_be_bytes([bytes[0], bytes[1], bytes[2], bytes[3]])
} }
#[inline] fn from_le_bytes(bytes: PointDataBuffer) -> Self {
fn from_le_bytes(bytes: &[u8]) -> Self {
Self::from_le_bytes([bytes[0], bytes[1], bytes[2], bytes[3]]) Self::from_le_bytes([bytes[0], bytes[1], bytes[2], bytes[3]])
} }
#[inline]
fn bytes(x: &f32) -> Vec<u8> {
Vec::from(x.to_le_bytes())
}
} }
impl FromBytes for points::RGB { impl FromBytes for points::RGB {
#[inline] fn from_be_bytes(bytes: PointDataBuffer) -> Self {
fn from_be_bytes(bytes: &[u8]) -> Self {
Self::new_from_packed_f32(f32::from_be_bytes([bytes[0], bytes[1], bytes[2], bytes[3]])) Self::new_from_packed_f32(f32::from_be_bytes([bytes[0], bytes[1], bytes[2], bytes[3]]))
} }
#[inline] fn from_le_bytes(bytes: PointDataBuffer) -> Self {
fn from_le_bytes(bytes: &[u8]) -> Self {
Self::new_from_packed_f32(f32::from_le_bytes([bytes[0], bytes[1], bytes[2], bytes[3]])) Self::new_from_packed_f32(f32::from_le_bytes([bytes[0], bytes[1], bytes[2], bytes[3]]))
} }
#[inline]
fn bytes(x: &points::RGB) -> Vec<u8> {
Vec::from(x.raw().to_le_bytes())
}
} }
impl FromBytes for i32 { impl FromBytes for i32 {
#[inline] #[inline]
fn from_be_bytes(bytes: &[u8]) -> Self { fn from_be_bytes(bytes: PointDataBuffer) -> Self {
Self::from_be_bytes([bytes[0], bytes[1], bytes[2], bytes[3]]) Self::from_be_bytes([bytes[0], bytes[1], bytes[2], bytes[3]])
} }
fn from_le_bytes(bytes: &[u8]) -> Self { fn from_le_bytes(bytes: PointDataBuffer) -> Self {
Self::from_le_bytes([bytes[0], bytes[1], bytes[2], bytes[3]]) Self::from_le_bytes([bytes[0], bytes[1], bytes[2], bytes[3]])
} }
fn bytes(x: &i32) -> Vec<u8> {
Vec::from(x.to_le_bytes())
}
} }
impl FromBytes for f64 { impl FromBytes for f64 {
#[inline] fn from_be_bytes(bytes: PointDataBuffer) -> Self {
fn from_be_bytes(bytes: &[u8]) -> Self {
Self::from_be_bytes([ Self::from_be_bytes([
bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], bytes[7], bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], bytes[7],
]) ])
} }
#[inline] fn from_le_bytes(bytes: PointDataBuffer) -> Self {
fn from_le_bytes(bytes: &[u8]) -> Self {
Self::from_le_bytes([ Self::from_le_bytes([
bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], bytes[7], bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], bytes[7],
]) ])
} }
#[inline]
fn bytes(x: &f64) -> Vec<u8> {
Vec::from(x.to_le_bytes())
}
} }
impl FromBytes for u8 { impl FromBytes for u8 {
#[inline] fn from_be_bytes(bytes: PointDataBuffer) -> Self {
fn from_be_bytes(bytes: &[u8]) -> Self {
Self::from_be_bytes([bytes[0]]) Self::from_be_bytes([bytes[0]])
} }
#[inline] fn from_le_bytes(bytes: PointDataBuffer) -> Self {
fn from_le_bytes(bytes: &[u8]) -> Self {
Self::from_le_bytes([bytes[0]]) Self::from_le_bytes([bytes[0]])
} }
#[inline]
fn bytes(x: &u8) -> Vec<u8> {
Vec::from(x.to_le_bytes())
}
} }
#[derive(Default, Clone, Debug, PartialEq, Copy)] pub enum ByteSimilarity {
pub enum Endianness { Equal,
Big, Overlapping,
Different,
}
#[derive(Default, Clone, Debug, PartialEq, Copy)]
pub enum Endian {
Big,
#[default] #[default]
Little, Little,
} }
#[cfg(test)] #[derive(Default, Clone, Debug, PartialEq, Copy)]
mod tests { pub enum Denseness {
use super::*; #[default]
Dense,
#[test] Sparse,
fn from_bytes() {
i8::bytes(&1);
u8::bytes(&1);
i16::bytes(&1);
u16::bytes(&1);
i32::bytes(&1);
u32::bytes(&1);
f32::bytes(&1.0);
f64::bytes(&1.0);
}
} }

View File

@ -1,6 +1,6 @@
use crate::{ use crate::{
convert::{Endianness, FieldDatatype}, convert::{Endian, FieldDatatype},
Fields, MsgConversionError, PointCloud2Msg, PointConvertible, PointMeta, RPCL2Point, Fields, MsgConversionError, PointCloud2Msg, PointConvertible, PointData, RPCL2Point,
}; };
/// The PointCloudIterator provides a an iterator abstraction of the PointCloud2Msg. /// The PointCloudIterator provides a an iterator abstraction of the PointCloud2Msg.
@ -162,7 +162,7 @@ struct ByteBufferView<const N: usize> {
point_step_size: usize, point_step_size: usize,
offsets: [usize; N], offsets: [usize; N],
meta: Vec<(String, FieldDatatype)>, meta: Vec<(String, FieldDatatype)>,
endianness: Endianness, endian: Endian,
} }
impl<const N: usize> ByteBufferView<N> { impl<const N: usize> ByteBufferView<N> {
@ -173,7 +173,7 @@ impl<const N: usize> ByteBufferView<N> {
end_point_idx: usize, end_point_idx: usize,
offsets: [usize; N], offsets: [usize; N],
meta: Vec<(String, FieldDatatype)>, meta: Vec<(String, FieldDatatype)>,
endianness: Endianness, endian: Endian,
) -> Self { ) -> Self {
Self { Self {
data: std::sync::Arc::<[u8]>::from(data), data: std::sync::Arc::<[u8]>::from(data),
@ -182,7 +182,7 @@ impl<const N: usize> ByteBufferView<N> {
point_step_size, point_step_size,
offsets, offsets,
meta, meta,
endianness, endian,
} }
} }
@ -196,16 +196,16 @@ impl<const N: usize> ByteBufferView<N> {
let offset = (self.start_point_idx + idx) * self.point_step_size; let offset = (self.start_point_idx + idx) * self.point_step_size;
// TODO memcpy entire point at once, then extract fields? // TODO memcpy entire point at once, then extract fields?
let mut meta = [PointMeta::default(); N]; let mut meta = [PointData::default(); N];
meta.iter_mut() meta.iter_mut()
.zip(self.offsets.iter()) .zip(self.offsets.iter())
.zip(self.meta.iter()) .zip(self.meta.iter())
.for_each(|((p_meta, in_point_offset), (_, meta_type))| { .for_each(|((p_meta, in_point_offset), (_, meta_type))| {
*p_meta = PointMeta::from_buffer( *p_meta = PointData::from_buffer(
&self.data, &self.data,
offset + in_point_offset, offset + in_point_offset,
*meta_type, *meta_type,
self.endianness, self.endian,
); );
}); });
@ -221,7 +221,7 @@ impl<const N: usize> ByteBufferView<N> {
point_step_size: self.point_step_size, point_step_size: self.point_step_size,
offsets: self.offsets, offsets: self.offsets,
meta: self.meta.clone(), meta: self.meta.clone(),
endianness: self.endianness, endian: self.endian,
} }
} }
@ -268,7 +268,7 @@ where
.into_iter() .into_iter()
.map(|(name, _)| (*name).to_owned()) .map(|(name, _)| (*name).to_owned())
.collect::<Vec<String>>(); .collect::<Vec<String>>();
return Err(MsgConversionError::FieldNotFound(names_not_found)); return Err(MsgConversionError::FieldsNotFound(names_not_found));
} }
let ordered_fieldnames = C::field_names_ordered(); let ordered_fieldnames = C::field_names_ordered();
@ -303,14 +303,8 @@ where
*meta_offset = offset; *meta_offset = offset;
}); });
let endian = if cloud.is_bigendian {
Endianness::Big
} else {
Endianness::Little
};
let point_step_size = cloud.point_step as usize; let point_step_size = cloud.point_step as usize;
let cloud_length = cloud.width as usize * cloud.height as usize; let cloud_length = cloud.dimensions.width as usize * cloud.dimensions.height as usize;
if point_step_size * cloud_length != cloud.data.len() { if point_step_size * cloud_length != cloud.data.len() {
return Err(MsgConversionError::DataLengthMismatch); return Err(MsgConversionError::DataLengthMismatch);
} }
@ -323,7 +317,7 @@ where
return Err(MsgConversionError::DataLengthMismatch); return Err(MsgConversionError::DataLengthMismatch);
} }
let cloud_length = cloud.width as usize * cloud.height as usize; let cloud_length = cloud.dimensions.width as usize * cloud.dimensions.height as usize;
let data = ByteBufferView::new( let data = ByteBufferView::new(
cloud.data, cloud.data,
@ -332,7 +326,7 @@ where
cloud_length - 1, cloud_length - 1,
offsets, offsets,
meta, meta,
endian, cloud.endian,
); );
Ok(Self { Ok(Self {

File diff suppressed because it is too large Load Diff

View File

@ -149,11 +149,7 @@ impl Fields<3> for PointXYZ {
impl From<RPCL2Point<3>> for PointXYZ { impl From<RPCL2Point<3>> for PointXYZ {
fn from(point: RPCL2Point<3>) -> Self { fn from(point: RPCL2Point<3>) -> Self {
Self::new( Self::new(point[0].get(), point[1].get(), point[2].get())
point.fields[0].get(),
point.fields[1].get(),
point.fields[2].get(),
)
} }
} }
@ -200,10 +196,10 @@ impl Fields<4> for PointXYZI {
impl From<RPCL2Point<4>> for PointXYZI { impl From<RPCL2Point<4>> for PointXYZI {
fn from(point: RPCL2Point<4>) -> Self { fn from(point: RPCL2Point<4>) -> Self {
Self::new( Self::new(
point.fields[0].get(), point[0].get(),
point.fields[1].get(), point[1].get(),
point.fields[2].get(), point[2].get(),
point.fields[3].get(), point[3].get(),
) )
} }
} }
@ -257,10 +253,10 @@ impl Fields<4> for PointXYZL {
impl From<RPCL2Point<4>> for PointXYZL { impl From<RPCL2Point<4>> for PointXYZL {
fn from(point: RPCL2Point<4>) -> Self { fn from(point: RPCL2Point<4>) -> Self {
Self::new( Self::new(
point.fields[0].get(), point[0].get(),
point.fields[1].get(), point[1].get(),
point.fields[2].get(), point[2].get(),
point.fields[3].get(), point[3].get(),
) )
} }
} }
@ -327,10 +323,10 @@ impl Fields<4> for PointXYZRGB {
impl From<RPCL2Point<4>> for PointXYZRGB { impl From<RPCL2Point<4>> for PointXYZRGB {
fn from(point: RPCL2Point<4>) -> Self { fn from(point: RPCL2Point<4>) -> Self {
Self { Self {
x: point.fields[0].get(), x: point[0].get(),
y: point.fields[1].get(), y: point[1].get(),
z: point.fields[2].get(), z: point[2].get(),
rgb: point.fields[3].get(), rgb: point[3].get(),
} }
} }
} }
@ -399,11 +395,11 @@ impl Fields<5> for PointXYZRGBA {
impl From<RPCL2Point<5>> for PointXYZRGBA { impl From<RPCL2Point<5>> for PointXYZRGBA {
fn from(point: RPCL2Point<5>) -> Self { fn from(point: RPCL2Point<5>) -> Self {
Self { Self {
x: point.fields[0].get(), x: point[0].get(),
y: point.fields[1].get(), y: point[1].get(),
z: point.fields[2].get(), z: point[2].get(),
rgb: point.fields[3].get::<f32>().into(), rgb: point[3].get::<f32>().into(),
a: point.fields[4].get(), a: point[4].get(),
} }
} }
} }
@ -489,13 +485,13 @@ impl Fields<7> for PointXYZRGBNormal {
impl From<RPCL2Point<7>> for PointXYZRGBNormal { impl From<RPCL2Point<7>> for PointXYZRGBNormal {
fn from(point: RPCL2Point<7>) -> Self { fn from(point: RPCL2Point<7>) -> Self {
Self { Self {
x: point.fields[0].get(), x: point[0].get(),
y: point.fields[1].get(), y: point[1].get(),
z: point.fields[2].get(), z: point[2].get(),
rgb: point.fields[3].get::<f32>().into(), rgb: point[3].get::<f32>().into(),
normal_x: point.fields[4].get(), normal_x: point[4].get(),
normal_y: point.fields[5].get(), normal_y: point[5].get(),
normal_z: point.fields[6].get(), normal_z: point[6].get(),
} }
} }
} }
@ -571,13 +567,13 @@ impl Fields<7> for PointXYZINormal {
impl From<RPCL2Point<7>> for PointXYZINormal { impl From<RPCL2Point<7>> for PointXYZINormal {
fn from(point: RPCL2Point<7>) -> Self { fn from(point: RPCL2Point<7>) -> Self {
Self::new( Self::new(
point.fields[0].get(), point[0].get(),
point.fields[1].get(), point[1].get(),
point.fields[2].get(), point[2].get(),
point.fields[3].get(), point[3].get(),
point.fields[4].get(), point[4].get(),
point.fields[5].get(), point[5].get(),
point.fields[6].get(), point[6].get(),
) )
} }
} }
@ -654,11 +650,11 @@ impl Fields<5> for PointXYZRGBL {
impl From<RPCL2Point<5>> for PointXYZRGBL { impl From<RPCL2Point<5>> for PointXYZRGBL {
fn from(point: RPCL2Point<5>) -> Self { fn from(point: RPCL2Point<5>) -> Self {
Self { Self {
x: point.fields[0].get(), x: point[0].get(),
y: point.fields[1].get(), y: point[1].get(),
z: point.fields[2].get(), z: point[2].get(),
rgb: point.fields[3].get::<f32>().into(), rgb: point[3].get::<f32>().into(),
label: point.fields[4].get(), label: point[4].get(),
} }
} }
} }
@ -722,12 +718,12 @@ impl Fields<6> for PointXYZNormal {
impl From<RPCL2Point<6>> for PointXYZNormal { impl From<RPCL2Point<6>> for PointXYZNormal {
fn from(point: RPCL2Point<6>) -> Self { fn from(point: RPCL2Point<6>) -> Self {
Self::new( Self::new(
point.fields[0].get(), point[0].get(),
point.fields[1].get(), point[1].get(),
point.fields[2].get(), point[2].get(),
point.fields[3].get(), point[3].get(),
point.fields[4].get(), point[4].get(),
point.fields[5].get(), point[5].get(),
) )
} }
} }

View File

@ -12,4 +12,4 @@ pub use type_layout::TypeLayout;
#[cfg(feature = "derive")] #[cfg(feature = "derive")]
pub use rpcl2_derive::*; pub use rpcl2_derive::*;
pub use crate::convert::{FieldDatatype, FromBytes, GetFieldDatatype}; pub use crate::convert::{FieldDatatype, FromBytes, GetFieldDatatype, PointDataBuffer};

View File

@ -46,8 +46,10 @@ impl From<r2r::sensor_msgs::msg::PointCloud2> for crate::PointCloud2Msg {
}, },
frame_id: msg.header.frame_id, frame_id: msg.header.frame_id,
}, },
height: msg.height, dimensions: crate::CloudDimensions {
width: msg.width, width: msg.width,
height: msg.height,
},
fields: msg fields: msg
.fields .fields
.into_iter() .into_iter()
@ -58,11 +60,19 @@ impl From<r2r::sensor_msgs::msg::PointCloud2> for crate::PointCloud2Msg {
count: field.count, count: field.count,
}) })
.collect(), .collect(),
is_bigendian: msg.is_bigendian, endian: if msg.is_bigendian {
crate::Endian::Big
} else {
crate::Endian::Little
},
point_step: msg.point_step, point_step: msg.point_step,
row_step: msg.row_step, row_step: msg.row_step,
data: msg.data, data: msg.data,
is_dense: msg.is_dense, dense: if msg.is_dense {
crate::convert::Denseness::Dense
} else {
crate::convert::Denseness::Sparse
},
} }
} }
} }
@ -78,8 +88,8 @@ impl From<crate::PointCloud2Msg> for r2r::sensor_msgs::msg::PointCloud2 {
}, },
frame_id: msg.header.frame_id, frame_id: msg.header.frame_id,
}, },
height: msg.height, height: msg.dimensions.height,
width: msg.width, width: msg.dimensions.width,
fields: msg fields: msg
.fields .fields
.into_iter() .into_iter()
@ -90,11 +100,17 @@ impl From<crate::PointCloud2Msg> for r2r::sensor_msgs::msg::PointCloud2 {
count: field.count, count: field.count,
}) })
.collect(), .collect(),
is_bigendian: msg.is_bigendian, is_bigendian: match msg.endian {
crate::Endian::Big => true,
crate::Endian::Little => false,
},
point_step: msg.point_step, point_step: msg.point_step,
row_step: msg.row_step, row_step: msg.row_step,
data: msg.data, data: msg.data,
is_dense: msg.is_dense, is_dense: match msg.dense {
crate::convert::Denseness::Dense => true,
crate::convert::Denseness::Sparse => false,
},
} }
} }
} }
@ -111,8 +127,10 @@ impl From<rosrust_msg::sensor_msgs::PointCloud2> for crate::PointCloud2Msg {
}, },
frame_id: msg.header.frame_id, frame_id: msg.header.frame_id,
}, },
height: msg.height, dimensions: crate::CloudDimensions {
width: msg.width, width: msg.width,
height: msg.height,
},
fields: msg fields: msg
.fields .fields
.into_iter() .into_iter()
@ -123,11 +141,19 @@ impl From<rosrust_msg::sensor_msgs::PointCloud2> for crate::PointCloud2Msg {
count: field.count, count: field.count,
}) })
.collect(), .collect(),
is_bigendian: msg.is_bigendian, endian: if msg.is_bigendian {
crate::Endian::Big
} else {
crate::Endian::Little
},
point_step: msg.point_step, point_step: msg.point_step,
row_step: msg.row_step, row_step: msg.row_step,
data: msg.data, data: msg.data,
is_dense: msg.is_dense, dense: if msg.is_dense {
crate::convert::Denseness::Dense
} else {
crate::convert::Denseness::Sparse
},
} }
} }
} }
@ -144,8 +170,8 @@ impl From<crate::PointCloud2Msg> for rosrust_msg::sensor_msgs::PointCloud2 {
}, },
frame_id: msg.header.frame_id, frame_id: msg.header.frame_id,
}, },
height: msg.height, height: msg.dimensions.height,
width: msg.width, width: msg.dimensions.width,
fields: msg fields: msg
.fields .fields
.into_iter() .into_iter()
@ -156,11 +182,19 @@ impl From<crate::PointCloud2Msg> for rosrust_msg::sensor_msgs::PointCloud2 {
count: field.count, count: field.count,
}) })
.collect(), .collect(),
is_bigendian: msg.is_bigendian, is_bigendian: if msg.endian == crate::Endian::Big {
true
} else {
false
},
point_step: msg.point_step, point_step: msg.point_step,
row_step: msg.row_step, row_step: msg.row_step,
data: msg.data, data: msg.data,
is_dense: msg.is_dense, is_dense: if msg.dense == crate::convert::Denseness::Dense {
true
} else {
false
},
} }
} }
} }

View File

@ -78,7 +78,7 @@ fn write_cloud_from_vec() {
#[test] #[test]
#[cfg(feature = "derive")] #[cfg(feature = "derive")]
fn custom_xyz_f32() { fn custom_xyz_f32() {
#[derive(Debug, PartialEq, Clone, Default, RosFull, TypeLayout)] #[derive(Debug, PartialEq, Clone, Default, PointConvertible, TypeLayout)]
#[repr(C)] #[repr(C)]
struct CustomPoint { struct CustomPoint {
x: f32, x: f32,
@ -138,7 +138,7 @@ fn custom_xyzi_f32() {
}, },
]; ];
#[derive(Debug, PartialEq, Clone, Default, RosFull, TypeLayout)] #[derive(Debug, PartialEq, Clone, Default, PointConvertible, TypeLayout)]
#[repr(C)] #[repr(C)]
struct CustomPointXYZI { struct CustomPointXYZI {
x: f32, x: f32,
@ -153,7 +153,7 @@ fn custom_xyzi_f32() {
#[test] #[test]
#[cfg(feature = "derive")] #[cfg(feature = "derive")]
fn custom_rgba_f32() { fn custom_rgba_f32() {
#[derive(Debug, PartialEq, Clone, Default, RosFull, TypeLayout)] #[derive(Debug, PartialEq, Clone, Default, PointConvertible, TypeLayout)]
#[repr(C)] #[repr(C)]
struct CustomPoint { struct CustomPoint {
x: f32, x: f32,
@ -392,9 +392,9 @@ fn write_less_than_available() {
impl From<RPCL2Point<3>> for CustomPoint { impl From<RPCL2Point<3>> for CustomPoint {
fn from(point: RPCL2Point<3>) -> Self { fn from(point: RPCL2Point<3>) -> Self {
Self { Self {
x: point.fields[0].get(), x: point[0].get(),
y: point.fields[1].get(), y: point[1].get(),
z: point.fields[2].get(), z: point[2].get(),
dummy: 0.0, dummy: 0.0,
} }
} }