PointCloud
HORUS provides a pool-backed RAII PointCloud type for LiDAR and 3D sensing workloads. Point cloud data lives in shared memory and is transported zero-copy between nodes — only a lightweight descriptor is transmitted through topics.
Creating a PointCloud
use horus::prelude::*;
// XYZ point cloud: 10,000 points, 3 fields per point (x, y, z), float32
let mut cloud = PointCloud::from_xyz(\&points)? // 10_000 points;
// XYZI point cloud (with intensity): 4 fields per point
let mut cloud_i = PointCloud::from_xyzi(\&points)? // 50_000 points;
// XYZRGB point cloud (with color): 6 fields per point
let mut cloud_rgb = PointCloud::from_xyz(\&points) // 20_000 points, 6 fields?;
Writing Point Data
use horus::prelude::*;
let mut cloud = PointCloud::from_xyz(\&points)? // 3 points;
// Copy raw bytes into the cloud
let points: Vec<f32> = vec![
1.0, 2.0, 3.0, // point 0
4.0, 5.0, 6.0, // point 1
7.0, 8.0, 9.0, // point 2
];
cloud.copy_from_f32(&points);
// Or write directly via mutable data access
let data: &mut [u8] = cloud.data_mut();
// ... fill data ...
Reading Points
use horus::prelude::*;
let cloud = PointCloud::from_xyz(\&points)? // 10_000 points;
// Extract all XYZ coordinates (F32 clouds only)
if let Some(points) = cloud.extract_xyz() {
for p in &points[..5] {
println!("({:.2}, {:.2}, {:.2})", p[0], p[1], p[2]);
}
}
// Access a single point as raw bytes
if let Some(point_bytes) = cloud.point_at(0) {
let floats = cloud.point_as_f32(0);
println!("Point 0: x={}, y={}, z={}", floats[0], floats[1], floats[2]);
}
// Zero-copy access to the entire buffer
let raw: &[u8] = cloud.data();
Metadata and Properties
use horus::prelude::*;
let mut cloud = PointCloud::from_xyzi(\&points)? // 10_000 points;
// Set frame and timestamp (method chaining)
cloud.set_frame_id("velodyne_top")
.set_timestamp_ns(1_700_000_000_000_000_000);
// Read back
println!("Frame: {}", cloud.frame_id());
println!("Timestamp: {} ns", cloud.timestamp_ns());
// Point layout queries
println!("Points: {}", cloud.point_count()); // 10000
println!("Fields/point: {}", cloud.fields_per_point()); // 4
println!("Is XYZ: {}", cloud.is_xyz()); // false (4 fields)
println!("Has intensity: {}", cloud.has_intensity()); // true
println!("Has color: {}", cloud.has_color()); // false
// Type info
println!("Dtype: {:?}", cloud.dtype()); // F32
println!("Total bytes: {}", cloud.nbytes()); // 10000 * 4 * 4
println!("Is CPU: {}", cloud.is_cpu()); // true
Sending and Receiving via Topic
Only a lightweight descriptor is transmitted through topics. The point data stays in shared memory — true zero-copy IPC.
use horus::prelude::*;
// Publisher
let pub_topic: Topic<PointCloud> = Topic::new("lidar.points")?;
let mut cloud = PointCloud::from_xyzi(\&points)? // 64_000 points;
cloud.set_frame_id("velodyne_top");
// ... fill point data from sensor driver ...
pub_topic.send(cloud);
// Subscriber
let sub_topic: Topic<PointCloud> = Topic::new("lidar.points")?;
if let Some(cloud) = sub_topic.recv() {
println!("Received {} points from '{}'",
cloud.point_count(), cloud.frame_id());
if let Some(xyz) = cloud.extract_xyz() {
let closest = xyz.iter()
.map(|p| (p[0]*p[0] + p[1]*p[1] + p[2]*p[2]).sqrt())
.fold(f32::INFINITY, f32::min);
println!("Closest point: {:.2}m", closest);
}
}
LiDAR Processing Pipeline
A complete node that receives raw LiDAR scans, filters ground points, and publishes the result:
use horus::prelude::*;
struct LidarFilterNode {
raw_sub: Topic<PointCloud>,
filtered_pub: Topic<PointCloud>,
ground_threshold: f32,
}
impl Node for LidarFilterNode {
fn name(&self) -> &str { "LidarFilter" }
fn tick(&mut self) {
if let Some(raw) = self.raw_sub.recv() {
if let Some(points) = raw.extract_xyz() {
// Filter out ground points (z below threshold)
let non_ground: Vec<f32> = points.iter()
.filter(|p| p[2] > self.ground_threshold)
.flat_map(|p| p.iter().copied())
.collect();
let num_points = non_ground.len() / 3;
if let Ok(mut filtered) = PointCloud::new(
num_points as u32, 3, TensorDtype::F32
) {
filtered.copy_from_f32(&non_ground)
.set_frame_id(raw.frame_id())
.set_timestamp_ns(raw.timestamp_ns());
self.filtered_pub.send(filtered);
}
}
}
}
}
API Reference
Constructors
from_xyz
pub fn from_xyz(points: &[[f32; 3]]) -> Result<Self>
Create a point cloud from XYZ coordinate arrays. Recommended for most LiDAR and depth camera data.
Parameters:
points: &[[f32; 3]]— Array of [x, y, z] coordinates in meters. Coordinate frame: right-hand, Z-up (horus convention).
Returns: Result<PointCloud> — Err(MemoryError::PoolExhausted) if tensor pool is full.
Memory: Data is copied into the SHM tensor pool. Subsequent Topic::send() is zero-copy.
let points = vec![[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]];
let cloud = PointCloud::from_xyz(&points)?;
assert_eq!(cloud.point_count(), 2);
assert!(cloud.is_xyz());
from_xyzi
pub fn from_xyzi(points: &[[f32; 4]]) -> Result<Self>
Create from XYZI arrays (with intensity). Intensity is typically 0.0-1.0 (normalized reflectance).
Parameters:
points: &[[f32; 4]]— Array of [x, y, z, intensity]. Units: meters + unitless (0.0-1.0).
from_xyzrgb
pub fn from_xyzrgb(points: &[[f32; 6]]) -> Result<Self>
Create from XYZRGB arrays. Color values are 0.0-255.0 as floats.
Parameters:
points: &[[f32; 6]]— Array of [x, y, z, r, g, b]. Units: meters + 0-255 color.
| Constructor | Fields/Point | Use Case |
|---|---|---|
from_xyz() | 3 | LiDAR, depth cameras |
from_xyzi() | 4 | LiDAR with reflectance |
from_xyzrgb() | 6 | RGB-D cameras, colored reconstructions |
point_at
pub fn point_at(&self, idx: u64) -> Option<&[u8]>
Get raw bytes of the i-th point. Returns a slice of length fields_per_point * sizeof(f32).
Returns: None if index is out of bounds.
Example:
if let Some(bytes) = cloud.point_at(0) {
// For XYZ cloud: 12 bytes = 3 x f32
let x = f32::from_le_bytes(bytes[0..4].try_into().unwrap());
println!("First point X: {}", x);
}
extract_xyz
pub fn extract_xyz(&self) -> Option<Vec<[f32; 3]>>
Extract all XYZ coordinates as float arrays. Validates alignment and bounds. Only works for F32 dtype.
Returns: None if dtype is not F32 or data is misaligned.
Example:
if let Some(points) = cloud.extract_xyz() {
for p in &points {
println!("({:.2}, {:.2}, {:.2})", p[0], p[1], p[2]);
}
println!("{} points total", points.len());
}
Metadata
| Method | Returns | Description |
|---|---|---|
point_count() | u64 | Number of points in the cloud |
fields_per_point() | u32 | Fields per point (3=XYZ, 4=XYZI, 6=XYZRGB) |
is_xyz() | bool | True if this is a plain XYZ cloud (3 fields) |
has_intensity() | bool | True if cloud includes an intensity field (4+ fields) |
has_color() | bool | True if cloud includes color fields (6+ fields) |
Data Access (Zero-Copy)
| Method | Returns | Description |
|---|---|---|
data() | &[u8] | Immutable access to the raw point buffer |
data_mut() | &mut [u8] | Mutable access to the raw point buffer |
copy_from(src) | &mut Self | Copy bytes into the point buffer (chainable) |
Frame and Timestamp
| Method | Returns | Description |
|---|---|---|
set_frame_id(id) | &mut Self | Set sensor/coordinate frame identifier (chainable) |
set_timestamp_ns(ts) | &mut Self | Set timestamp in nanoseconds (chainable) |
frame_id() | &str | Get the frame identifier |
timestamp_ns() | u64 | Get timestamp in nanoseconds |
Type Info
| Method | Returns | Description |
|---|---|---|
dtype() | TensorDtype | Data type of each field (e.g., F32, F64) |
nbytes() | u64 | Total size of the point buffer in bytes |
is_cpu() | bool | Whether data resides on CPU (shared memory) |
Point Types
Fixed-size point structs optimized for zero-copy transport.
PointXYZ
Basic 3D point (12 bytes).
use horus::prelude::*;
let p = PointXYZ::new(1.0, 2.0, 3.0);
println!("Distance from origin: {:.2}m", p.distance());
let q = PointXYZ::new(4.0, 6.0, 3.0);
println!("Distance between: {:.2}m", p.distance_to(&q));
| Field | Type | Description |
|---|---|---|
x | f32 | X coordinate (meters) |
y | f32 | Y coordinate (meters) |
z | f32 | Z coordinate (meters) |
| Method | Returns | Description |
|---|---|---|
new(x, y, z) | PointXYZ | Create a point |
distance() | f32 | Euclidean distance from the origin |
distance_to(other) | f32 | Euclidean distance to another point |
PointXYZI
3D point with intensity (16 bytes). Common for LiDAR sensors (Velodyne, Ouster, Livox).
use horus::prelude::*;
let p = PointXYZI::new(1.0, 2.0, 3.0, 128.0);
// Convert from PointXYZ (zero intensity)
let xyz = PointXYZ::new(1.0, 2.0, 3.0);
let with_intensity = PointXYZI::from_xyz(xyz);
// Convert back to PointXYZ (drop intensity)
let xyz_only = p.xyz();
| Field | Type | Description |
|---|---|---|
x, y, z | f32 | Coordinates (meters) |
intensity | f32 | Reflectance intensity (typically 0-255) |
| Method | Returns | Description |
|---|---|---|
new(x, y, z, intensity) | PointXYZI | Create a point with intensity |
from_xyz(xyz) | PointXYZI | Convert from PointXYZ (intensity = 0) |
xyz() | PointXYZ | Convert to PointXYZ (drop intensity) |
PointXYZRGB
3D point with RGB color (16 bytes). Common for RGB-D cameras like Intel RealSense.
use horus::prelude::*;
let p = PointXYZRGB::new(1.0, 2.0, 3.0, 255, 0, 0); // Red point
// Convert from PointXYZ (defaults to white)
let xyz = PointXYZ::new(1.0, 2.0, 3.0);
let colored = PointXYZRGB::from_xyz(xyz);
// Get packed RGB as u32 (0xRRGGBBAA)
let packed = p.rgb_packed();
// Convert back to PointXYZ (drop color)
let xyz_only = p.xyz();
| Field | Type | Description |
|---|---|---|
x, y, z | f32 | Coordinates (meters) |
r, g, b | u8 | Color components (0-255) |
a | u8 | Alpha/padding (255 default) |
| Method | Returns | Description |
|---|---|---|
new(x, y, z, r, g, b) | PointXYZRGB | Create a colored point |
from_xyz(xyz) | PointXYZRGB | Convert from PointXYZ (white, alpha 255) |
rgb_packed() | u32 | Get color as packed 0xRRGGBBAA |
xyz() | PointXYZ | Convert to PointXYZ (drop color) |
Python API (PyPointCloud)
Constructor and Factories
import horus
import numpy as np
# Create from scratch
cloud = horus.PointCloud(num_points=10000, fields=3, dtype="float32")
# Create from NumPy array (shape must be [N, fields])
arr = np.random.randn(10000, 3).astype(np.float32)
cloud = horus.PointCloud.from_numpy(arr)
# Create from PyTorch tensor
import torch
tensor = torch.randn(10000, 4)
cloud = horus.PointCloud.from_torch(tensor)
Conversions
# Convert to NumPy (zero-copy when possible)
arr = cloud.to_numpy() # shape: (N, fields)
# Convert to PyTorch tensor
tensor = cloud.to_torch() # shape: (N, fields)
# Convert to JAX array
jax_arr = cloud.to_jax() # shape: (N, fields)
Access and Properties
cloud = horus.PointCloud(num_points=10000, fields=4, dtype="float32")
# Properties
print(cloud.point_count) # 10000
print(cloud.fields_per_point) # 4
print(cloud.dtype) # "float32"
print(cloud.nbytes) # 160000
# Point access
coords = cloud.point_at(0) # [x, y, z, intensity] as list of floats
# Metadata
cloud.set_frame_id("velodyne_top")
cloud.set_timestamp_ns(1700000000000000000)
print(cloud.frame_id) # "velodyne_top"
print(cloud.timestamp_ns) # 1700000000000000000
# Layout queries
cloud.is_xyz() # False (4 fields)
cloud.has_intensity() # True
cloud.has_color() # False
Python LiDAR Pipeline
import horus
import numpy as np
sub = horus.Topic(horus.PointCloud, endpoint="lidar.points")
pub = horus.Topic(horus.PointCloud, endpoint="lidar.filtered")
while True:
cloud = sub.recv()
if cloud is None:
continue
# Convert to NumPy for processing
points = cloud.to_numpy() # (N, 3)
# Remove points below ground plane
mask = points[:, 2] > -0.3
filtered = points[mask]
# Publish filtered cloud
out = horus.PointCloud.from_numpy(filtered.astype(np.float32))
out.set_frame_id(cloud.frame_id)
out.set_timestamp_ns(cloud.timestamp_ns)
pub.send(out)
See Also
- Perception Messages - PointField, PointCloudHeader, DepthImage, landmarks, tracking
- Tensor Messages - TensorDtype, Device, auto-managed tensor pools
- Tensor API - Advanced pool management
- Sensor Messages - LaserScan for 2D LiDAR
- Vision Messages - Image, CameraInfo, Detection3D
- Python Memory Types — Python PointCloud API with NumPy/PyTorch zero-copy interop
- Python Perception Types — PointCloudBuffer, DetectionList, TrackedObject