Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Chunkified, deserialization-free Point Cloud visualizers #313

Open
wants to merge 8 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions Cargo.lock
Original file line number Diff line number Diff line change
Expand Up @@ -4302,6 +4302,7 @@ dependencies = [
"ahash",
"anyhow",
"backtrace",
"bytemuck",
"criterion",
"crossbeam",
"document-features",
Expand Down Expand Up @@ -4919,7 +4920,9 @@ name = "re_space_view"
version = "0.18.0-alpha.1+dev"
dependencies = [
"ahash",
"bytemuck",
"egui",
"itertools 0.13.0",
"nohash-hasher",
"re_chunk_store",
"re_entity_db",
Expand Down Expand Up @@ -4999,6 +5002,7 @@ dependencies = [
"re_log_types",
"re_math",
"re_query",
"re_query2",
"re_renderer",
"re_space_view",
"re_tracing",
Expand Down
1 change: 1 addition & 0 deletions crates/store/re_chunk/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ arrow2 = { workspace = true, features = [
"compute_filter",
] }
backtrace.workspace = true
bytemuck.workspace = true
document-features.workspace = true
itertools.workspace = true
nohash-hasher.workspace = true
Expand Down
125 changes: 121 additions & 4 deletions crates/store/re_chunk/src/iter.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
use std::sync::Arc;

use arrow2::{
array::{Array as ArrowArray, PrimitiveArray},
array::{
Array as ArrowArray, FixedSizeListArray as ArrowFixedSizeListArray,
PrimitiveArray as ArrowPrimitiveArray, Utf8Array as ArrowUtf8Array,
},
Either,
};
use itertools::izip;
use itertools::{izip, Itertools};

use re_log_types::{TimeInt, Timeline};
use re_types_core::{Component, ComponentName};
use re_types_core::{ArrowString, Component, ComponentName};

use crate::{Chunk, ChunkTimeline, RowId};

Expand Down Expand Up @@ -125,6 +128,7 @@ impl Chunk {
/// See also:
/// * [`Self::iter_component`].
/// * [`Self::iter_primitive`].
#[inline]
pub fn iter_component_arrays(
&self,
component_name: &ComponentName,
Expand All @@ -143,6 +147,9 @@ impl Chunk {
/// Use this when working with simple arrow datatypes and performance matters (e.g. scalars,
/// points, etc).
///
/// See also:
/// * [`Self::iter_primitive_array`]
/// * [`Self::iter_string`]
/// * [`Self::iter_component_arrays`].
/// * [`Self::iter_component`].
#[inline]
Expand All @@ -157,7 +164,7 @@ impl Chunk {
let Some(values) = list_array
.values()
.as_any()
.downcast_ref::<PrimitiveArray<T>>()
.downcast_ref::<ArrowPrimitiveArray<T>>()
else {
if cfg!(debug_assertions) {
panic!("downcast failed for {component_name}, data discarded");
Expand All @@ -174,6 +181,116 @@ impl Chunk {
.map(move |(idx, len)| &values[idx..idx + len]),
)
}

/// Returns an iterator over the raw primitive arrays of a [`Chunk`], for a given component.
///
/// This is a very fast path: the entire column will be downcasted at once, and then every
/// component batch will be a slice reference into that global slice.
/// Use this when working with simple arrow datatypes and performance matters (e.g. scalars,
/// points, etc).
///
/// See also:
/// * [`Self::iter_primitive`]
/// * [`Self::iter_string`]
/// * [`Self::iter_component_arrays`].
/// * [`Self::iter_component`].
pub fn iter_primitive_array<const N: usize, T: arrow2::types::NativeType>(
&self,
component_name: &ComponentName,
) -> impl Iterator<Item = &[[T; N]]> + '_
where
[T; N]: bytemuck::Pod,
{
let Some(list_array) = self.components.get(component_name) else {
return Either::Left(std::iter::empty());
};

let Some(fixed_size_list_array) = list_array
.values()
.as_any()
.downcast_ref::<ArrowFixedSizeListArray>()
else {
if cfg!(debug_assertions) {
panic!("downcast failed for {component_name}, data discarded");
} else {
re_log::error_once!("downcast failed for {component_name}, data discarded");
}
return Either::Left(std::iter::empty());
};

let Some(values) = fixed_size_list_array
.values()
.as_any()
.downcast_ref::<ArrowPrimitiveArray<T>>()
else {
if cfg!(debug_assertions) {
panic!("downcast failed for {component_name}, data discarded");
} else {
re_log::error_once!("downcast failed for {component_name}, data discarded");
}
return Either::Left(std::iter::empty());
};

let size = fixed_size_list_array.size();
let values = values.values().as_slice();

// NOTE: No need for validity checks here, `iter_offsets` already takes care of that.
Either::Right(
self.iter_component_offsets(component_name)
.map(move |(idx, len)| {
bytemuck::cast_slice(&values[idx * size..idx * size + len * size])
}),
)
}

/// Returns an iterator over the raw primitive strings of a [`Chunk`], for a given component.
///
/// This is a very fast path: the entire column will be downcasted at once, and then every
/// component batch will be a slice reference into that global slice.
/// Use this when working with simple arrow datatypes and performance matters (e.g. labels, etc).
///
/// See also:
/// * [`Self::iter_primitive`]
/// * [`Self::iter_primitive_array`]
/// * [`Self::iter_component_arrays`].
/// * [`Self::iter_component`].
pub fn iter_string(
&self,
component_name: &ComponentName,
) -> impl Iterator<Item = Vec<ArrowString>> + '_ {
let Some(list_array) = self.components.get(component_name) else {
return Either::Left(std::iter::empty());
};

let Some(utf8_array) = list_array
.values()
.as_any()
.downcast_ref::<ArrowUtf8Array<i32>>()
else {
if cfg!(debug_assertions) {
panic!("downcast failed for {component_name}, data discarded");
} else {
re_log::error_once!("downcast failed for {component_name}, data discarded");
}
return Either::Left(std::iter::empty());
};

let values = utf8_array.values();
let offsets = utf8_array.offsets();
let lengths = utf8_array.offsets().lengths().collect_vec();

// NOTE: No need for validity checks here, `iter_offsets` already takes care of that.
Either::Right(
self.iter_component_offsets(component_name)
.map(move |(idx, len)| {
let offsets = &offsets.as_slice()[idx..idx + len];
let lengths = &lengths.as_slice()[idx..idx + len];
izip!(offsets, lengths)
.map(|(&idx, &len)| ArrowString(values.clone().sliced(idx as _, len)))
.collect_vec()
}),
)
}
}

// ---
Expand Down
2 changes: 2 additions & 0 deletions crates/viewer/re_space_view/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,5 +35,7 @@ re_viewer_context.workspace = true
re_viewport_blueprint.workspace = true

ahash.workspace = true
bytemuck.workspace = true
egui.workspace = true
itertools.workspace = true
nohash-hasher.workspace = true
2 changes: 1 addition & 1 deletion crates/viewer/re_space_view/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ pub use query2::{
pub use results_ext::{HybridLatestAtResults, HybridResults, RangeResultsExt};
pub use results_ext2::{
HybridLatestAtResults as HybridLatestAtResults2, HybridResults as HybridResults2,
RangeResultsExt as RangeResultsExt2,
HybridResultsChunkIter, RangeResultsExt as RangeResultsExt2,
};
pub use screenshot::ScreenshotMode;
pub use view_property_ui::view_property_ui;
Expand Down
14 changes: 10 additions & 4 deletions crates/viewer/re_space_view/src/query2.rs
Original file line number Diff line number Diff line change
Expand Up @@ -137,22 +137,28 @@ fn query_overrides<'a>(
.resolved_component_overrides
.get(component_name)
{
let current_query = match override_value.store_kind {
re_log_types::StoreKind::Recording => ctx.current_query(),
re_log_types::StoreKind::Blueprint => ctx.blueprint_query.clone(),
};

#[allow(clippy::match_same_arms)] // see @jleibs comment below
let component_override_result = match override_value.store_kind {
re_log_types::StoreKind::Recording => {
// TODO(jleibs): This probably is not right, but this code path is not used
// currently. This may want to use range_query instead depending on how
// component override data-references are resolved.
ctx.store_context.blueprint.query_caches2().latest_at(
ctx.store_context.blueprint.store(),
&ctx.current_query(),
&current_query,
&override_value.path,
[*component_name],
)
}
re_log_types::StoreKind::Blueprint => {
ctx.store_context.blueprint.query_caches2().latest_at(
ctx.store_context.blueprint.store(),
ctx.blueprint_query,
&current_query,
&override_value.path,
[*component_name],
)
Expand All @@ -168,10 +174,10 @@ fn query_overrides<'a>(
// This is extra tricky since the promise hasn't been resolved yet so we can't
// actually look at the data.
if let Some(value) = component_override_result.components.get(component_name) {
let index = value.index(&ctx.current_query().timeline());
let index = value.index(&current_query.timeline());

// NOTE: This can never happen, but I'd rather it happens than an unwrap.
debug_assert!(index.is_some());
debug_assert!(index.is_some(), "{value:#?}");
let index = index.unwrap_or((TimeInt::STATIC, RowId::ZERO));

overrides.add(*component_name, index, value.clone());
Expand Down
80 changes: 80 additions & 0 deletions crates/viewer/re_space_view/src/results_ext2.rs
Original file line number Diff line number Diff line change
Expand Up @@ -361,3 +361,83 @@ impl<'a> RangeResultsExt for HybridResults<'a> {
}
}
}

// ---

use re_chunk::{RowId, TimeInt, Timeline};
use re_chunk_store::external::{re_chunk, re_chunk::external::arrow2};

/// The iterator type backing [`HybridResults::iter_as`].
pub struct HybridResultsChunkIter<'a> {
chunks: Cow<'a, [Chunk]>,
timeline: Timeline,
component_name: ComponentName,
}

impl<'a> HybridResultsChunkIter<'a> {
/// Iterate as indexed primitives.
///
/// See [`Chunk::iter_primitive`] for more information.
pub fn primitive<T: arrow2::types::NativeType>(
&'a self,
) -> impl Iterator<Item = ((TimeInt, RowId), &'a [T])> + 'a {
self.chunks.iter().flat_map(move |chunk| {
itertools::izip!(
chunk.iter_component_indices(&self.timeline, &self.component_name),
chunk.iter_primitive::<T>(&self.component_name)
)
})
}

/// Iterate as indexed primitive arrays.
///
/// See [`Chunk::iter_primitive_array`] for more information.
pub fn primitive_array<const N: usize, T: arrow2::types::NativeType>(
&'a self,
) -> impl Iterator<Item = ((TimeInt, RowId), &'a [[T; N]])> + 'a
where
[T; N]: bytemuck::Pod,
{
self.chunks.iter().flat_map(move |chunk| {
itertools::izip!(
chunk.iter_component_indices(&self.timeline, &self.component_name),
chunk.iter_primitive_array::<N, T>(&self.component_name)
)
})
}

/// Iterate as indexed UTF-8 strings.
///
/// See [`Chunk::iter_string`] for more information.
pub fn string(
&'a self,
) -> impl Iterator<Item = ((TimeInt, RowId), Vec<re_types_core::ArrowString>)> + 'a {
self.chunks.iter().flat_map(|chunk| {
itertools::izip!(
chunk.iter_component_indices(&self.timeline, &self.component_name),
chunk.iter_string(&self.component_name)
)
})
}
}

impl<'a> HybridResults<'a> {
/// Returns a zero-copy iterator over all the results for the given `(timeline, component)` pair.
///
/// Call one of the following methods on the returned [`HybridResultsChunkIter`]:
/// * [`HybridResultsChunkIter::primitive`]
/// * [`HybridResultsChunkIter::primitive_array`]
/// * [`HybridResultsChunkIter::string`]
pub fn iter_as(
&'a self,
timeline: Timeline,
component_name: ComponentName,
) -> HybridResultsChunkIter<'a> {
let chunks = self.get_optional_chunks(&component_name);
HybridResultsChunkIter {
chunks,
timeline,
component_name,
}
}
}
1 change: 1 addition & 0 deletions crates/viewer/re_space_view_spatial/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ re_log_types.workspace = true
re_log.workspace = true
re_math = { workspace = true, features = ["serde"] }
re_query.workspace = true
re_query2.workspace = true
re_renderer = { workspace = true, features = [
"import-gltf",
"import-obj",
Expand Down
4 changes: 2 additions & 2 deletions crates/viewer/re_space_view_spatial/src/visualizers/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@ pub use segmentation_images::SegmentationImageVisualizer;
pub use transform3d_arrows::{add_axis_arrows, AxisLengthDetector, Transform3DArrowsVisualizer};
pub use utilities::{
bounding_box_for_textured_rect, entity_iterator, process_labels_2d, process_labels_3d,
textured_rect_from_image, textured_rect_from_tensor, SpatialViewVisualizerData, UiLabel,
UiLabelTarget, MAX_NUM_LABELS_PER_ENTITY,
process_labels_3d_2, textured_rect_from_image, textured_rect_from_tensor,
SpatialViewVisualizerData, UiLabel, UiLabelTarget, MAX_NUM_LABELS_PER_ENTITY,
};

// ---
Expand Down
Loading