Skip to content

Commit

Permalink
clippy (#2527)
Browse files Browse the repository at this point in the history
* clippy

* clippy

* clippy

* clippy

* convert allow to expect and remove unused

* cargo fmt

* cleanup

* export sample

* clippy
  • Loading branch information
PSeitz authored Oct 22, 2024
1 parent dca508b commit 21d0570
Show file tree
Hide file tree
Showing 73 changed files with 87 additions and 91 deletions.
1 change: 0 additions & 1 deletion bitpacker/src/filter_vec/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ const IMPLS: [FilterImplPerInstructionSet; 2] = [
const IMPLS: [FilterImplPerInstructionSet; 1] = [FilterImplPerInstructionSet::Scalar];

impl FilterImplPerInstructionSet {
#[allow(unused_variables)]
#[inline]
fn from(code: u8) -> FilterImplPerInstructionSet {
#[cfg(target_arch = "x86_64")]
Expand Down
2 changes: 1 addition & 1 deletion columnar/src/block_accessor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ impl<T: PartialOrd + Copy + std::fmt::Debug + Send + Sync + 'static + Default>
&'a self,
docs: &'a [u32],
accessor: &Column<T>,
) -> impl Iterator<Item = (DocId, T)> + '_ {
) -> impl Iterator<Item = (DocId, T)> + 'a {
if accessor.index.get_cardinality().is_full() {
docs.iter().cloned().zip(self.val_cache.iter().cloned())
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ impl<'a> SparseBlock<'a> {
}

#[inline]
#[allow(clippy::comparison_chain)]
#[expect(clippy::comparison_chain)]
// Looks for the element in the block. Returns the positions if found.
fn binary_search(&self, target: u16) -> Result<u16, u16> {
let data = &self.0;
Expand Down
2 changes: 1 addition & 1 deletion columnar/src/column_values/u128_based/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ pub fn open_u128_as_compact_u64(mut bytes: OwnedBytes) -> io::Result<Arc<dyn Col
}

#[cfg(test)]
pub mod tests {
pub(crate) mod tests {
use super::*;
use crate::column_values::u64_based::{
serialize_and_load_u64_based_column_values, serialize_u64_based_column_values,
Expand Down
2 changes: 1 addition & 1 deletion columnar/src/columnar/writer/column_operation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ impl<T> From<T> for ColumnOperation<T> {
// In order to limit memory usage, and in order
// to benefit from the stacker, we do this by serialization our data
// as "Symbols".
#[allow(clippy::from_over_into)]
#[expect(clippy::from_over_into)]

Check warning on line 125 in columnar/src/columnar/writer/column_operation.rs

View workflow job for this annotation

GitHub Actions / clippy

this lint expectation is unfulfilled

warning: this lint expectation is unfulfilled --> columnar/src/columnar/writer/column_operation.rs:125:10 | 125 | #[expect(clippy::from_over_into)] | ^^^^^^^^^^^^^^^^^^^^^^ | = note: `#[warn(unfulfilled_lint_expectations)]` on by default
pub(super) trait SymbolValue: Clone + Copy {
// Serializes the symbol into the given buffer.
// Returns the number of bytes written into the buffer.
Expand Down
2 changes: 1 addition & 1 deletion columnar/src/columnar/writer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -392,7 +392,7 @@ impl ColumnarWriter {

// Serialize [Dictionary, Column, dictionary num bytes U32::LE]
// Column: [Column Index, Column Values, column index num bytes U32::LE]
#[allow(clippy::too_many_arguments)]
#[expect(clippy::too_many_arguments)]
fn serialize_bytes_or_str_column(
cardinality: Cardinality,
num_docs: RowId,
Expand Down
4 changes: 2 additions & 2 deletions common/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ pub fn replace_in_place(needle: u8, replacement: u8, bytes: &mut [u8]) {
}

#[cfg(test)]
pub mod test {
pub(crate) mod test {

use proptest::prelude::*;

Expand All @@ -144,7 +144,7 @@ pub mod test {
assert_eq!(u64_to_f64(f64_to_u64(val)), val);
}

pub fn fixed_size_test<O: BinarySerializable + FixedSize + Default>() {
pub(crate) fn fixed_size_test<O: BinarySerializable + FixedSize + Default>() {
let mut buffer = Vec::new();
O::default().serialize(&mut buffer).unwrap();
assert_eq!(buffer.len(), O::SIZE_IN_BYTES);
Expand Down
1 change: 0 additions & 1 deletion query-grammar/src/infallible.rs
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,6 @@ where F: nom::Parser<I, (O, ErrorList), Infallible> {
Err(Err::Incomplete(needed)) => Err(Err::Incomplete(needed)),
// old versions don't understand this is uninhabited and need the empty match to help,
// newer versions warn because this arm is unreachable (which it is indeed).
#[allow(unreachable_patterns)]
Err(Err::Error(val)) | Err(Err::Failure(val)) => match val {},
}
}
Expand Down
2 changes: 1 addition & 1 deletion query-grammar/src/query_grammar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -767,7 +767,7 @@ fn occur_leaf(inp: &str) -> IResult<&str, (Option<Occur>, UserInputAst)> {
tuple((fallible(occur_symbol), boosted_leaf))(inp)
}

#[allow(clippy::type_complexity)]
#[expect(clippy::type_complexity)]
fn operand_occur_leaf_infallible(
inp: &str,
) -> JResult<&str, (Option<BinaryOperand>, Option<Occur>, Option<UserInputAst>)> {
Expand Down
3 changes: 2 additions & 1 deletion src/aggregation/agg_result.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
//! Contains the final aggregation tree.
//!
//! This tree can be converted via the `into()` method from `IntermediateAggregationResults`.
//! This conversion computes the final result. For example: The intermediate result contains
//! intermediate average results, which is the sum and the number of values. The actual average is
Expand Down Expand Up @@ -187,7 +188,7 @@ pub enum BucketEntries<T> {
}

impl<T> BucketEntries<T> {
fn iter<'a>(&'a self) -> Box<dyn Iterator<Item = &T> + 'a> {
fn iter<'a>(&'a self) -> Box<dyn Iterator<Item = &'a T> + 'a> {
match self {
BucketEntries::Vec(vec) => Box::new(vec.iter()),
BucketEntries::HashMap(map) => Box::new(map.values()),
Expand Down
2 changes: 1 addition & 1 deletion src/aggregation/bucket/histogram/date_histogram.rs
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ fn parse_into_milliseconds(input: &str) -> Result<i64, AggregationError> {
}

#[cfg(test)]
pub mod tests {
pub(crate) mod tests {
use pretty_assertions::assert_eq;

use super::*;
Expand Down
1 change: 1 addition & 0 deletions src/aggregation/bucket/range.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ use crate::aggregation::*;
use crate::TantivyError;

/// Provide user-defined buckets to aggregate on.
///
/// Two special buckets will automatically be created to cover the whole range of values.
/// The provided buckets have to be continuous.
/// During the aggregation, the values extracted from the fast_field `field` will be checked
Expand Down
4 changes: 2 additions & 2 deletions src/aggregation/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ pub(crate) fn deserialize_option_f64<'de, D>(deserializer: D) -> Result<Option<f
where D: Deserializer<'de> {
struct StringOrFloatVisitor;

impl<'de> Visitor<'de> for StringOrFloatVisitor {
impl Visitor<'_> for StringOrFloatVisitor {
type Value = Option<f64>;

fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Expand Down Expand Up @@ -226,7 +226,7 @@ pub(crate) fn deserialize_f64<'de, D>(deserializer: D) -> Result<f64, D::Error>
where D: Deserializer<'de> {
struct StringOrFloatVisitor;

impl<'de> Visitor<'de> for StringOrFloatVisitor {
impl Visitor<'_> for StringOrFloatVisitor {
type Value = f64;

fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Expand Down
4 changes: 2 additions & 2 deletions src/collector/facet_collector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ struct Hit<'a> {
facet: &'a Facet,
}

impl<'a> Eq for Hit<'a> {}
impl Eq for Hit<'_> {}

impl<'a> PartialEq<Hit<'a>> for Hit<'a> {
fn eq(&self, other: &Hit<'_>) -> bool {
Expand All @@ -27,7 +27,7 @@ impl<'a> PartialOrd<Hit<'a>> for Hit<'a> {
}
}

impl<'a> Ord for Hit<'a> {
impl Ord for Hit<'_> {
fn cmp(&self, other: &Self) -> Ordering {
other
.count
Expand Down
1 change: 1 addition & 0 deletions src/collector/filter_collector_wrapper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -182,6 +182,7 @@ where
}

/// A variant of the [`FilterCollector`] specialized for bytes fast fields, i.e.
///
/// it transparently wraps an inner [`Collector`] but filters documents
/// based on the result of applying the predicate to the bytes fast field.
///
Expand Down
2 changes: 1 addition & 1 deletion src/collector/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -495,4 +495,4 @@ where
impl_downcast!(Fruit);

#[cfg(test)]
pub mod tests;
pub(crate) mod tests;
4 changes: 2 additions & 2 deletions src/collector/multi_collector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ impl<TFruit: Fruit> FruitHandle<TFruit> {
/// # Ok(())
/// # }
/// ```
#[allow(clippy::type_complexity)]
#[expect(clippy::type_complexity)]
#[derive(Default)]
pub struct MultiCollector<'a> {
collector_wrappers: Vec<
Expand Down Expand Up @@ -190,7 +190,7 @@ impl<'a> MultiCollector<'a> {
}
}

impl<'a> Collector for MultiCollector<'a> {
impl Collector for MultiCollector<'_> {
type Fruit = MultiFruit;
type Child = MultiCollectorChild;

Expand Down
4 changes: 2 additions & 2 deletions src/core/json_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ pub fn json_path_sep_to_dot(path: &mut str) {
}
}

#[allow(clippy::too_many_arguments)]
#[expect(clippy::too_many_arguments)]
fn index_json_object<'a, V: Value<'a>>(
doc: DocId,
json_visitor: V::ObjectIter,
Expand Down Expand Up @@ -101,7 +101,7 @@ fn index_json_object<'a, V: Value<'a>>(
}
}

#[allow(clippy::too_many_arguments)]
#[expect(clippy::too_many_arguments)]
pub(crate) fn index_json_value<'a, V: Value<'a>>(
doc: DocId,
json_value: V,
Expand Down
2 changes: 1 addition & 1 deletion src/directory/directory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ impl RetryPolicy {
/// The `DirectoryLock` is an object that represents a file lock.
///
/// It is associated with a lock file, that gets deleted on `Drop.`
#[allow(dead_code)]
#[expect(dead_code)]
pub struct DirectoryLock(Box<dyn Send + Sync + 'static>);

struct DirectoryLockGuard {
Expand Down
1 change: 1 addition & 0 deletions src/directory/directory_lock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ pub static INDEX_WRITER_LOCK: Lazy<Lock> = Lazy::new(|| Lock {
});
/// The meta lock file is here to protect the segment files being opened by
/// `IndexReader::reload()` from being garbage collected.
///
/// It makes it possible for another process to safely consume
/// our index in-writing. Ideally, we may have preferred `RWLock` semantics
/// here, but it is difficult to achieve on Windows.
Expand Down
2 changes: 1 addition & 1 deletion src/directory/mmap_directory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ impl MmapDirectory {
directory_path,
)));
}
#[allow(clippy::bind_instead_of_map)]
#[expect(clippy::bind_instead_of_map)]
let canonical_path: PathBuf = directory_path.canonicalize().or_else(|io_err| {
let directory_path = directory_path.to_owned();

Expand Down
2 changes: 1 addition & 1 deletion src/directory/watch_event_router.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ pub struct WatchCallbackList {
/// file change is detected.
#[must_use = "This `WatchHandle` controls the lifetime of the watch and should therefore be used."]
#[derive(Clone)]
#[allow(dead_code)]
#[expect(dead_code)]
pub struct WatchHandle(Arc<WatchCallback>);

impl WatchHandle {
Expand Down
2 changes: 1 addition & 1 deletion src/docset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ pub trait DocSet: Send {
}
}

impl<'a> DocSet for &'a mut dyn DocSet {
impl DocSet for &mut dyn DocSet {
fn advance(&mut self) -> u32 {
(**self).advance()
}
Expand Down
2 changes: 1 addition & 1 deletion src/fieldnorm/reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ impl FieldNormReader {
}

#[cfg(test)]
pub fn for_test(field_norms: &[u32]) -> FieldNormReader {
pub(crate) fn for_test(field_norms: &[u32]) -> FieldNormReader {
let field_norms_id = field_norms
.iter()
.cloned()
Expand Down
3 changes: 0 additions & 3 deletions src/functional_test.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,9 @@
#![allow(deprecated)] // Remove with index sorting

use std::collections::HashSet;

use rand::{thread_rng, Rng};

use crate::indexer::index_writer::MEMORY_BUDGET_NUM_BYTES_MIN;
use crate::schema::*;
#[allow(deprecated)]
use crate::{doc, schema, Index, IndexWriter, Searcher};

fn check_index_content(searcher: &Searcher, vals: &[u64]) -> crate::Result<()> {
Expand Down
1 change: 0 additions & 1 deletion src/index/inverted_index_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@ pub struct InvertedIndexReader {
}

impl InvertedIndexReader {
#[allow(clippy::needless_pass_by_value)] // for symmetry
pub(crate) fn new(
termdict: TermDictionary,
postings_file_slice: FileSlice,
Expand Down
1 change: 1 addition & 0 deletions src/index/segment_component.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use std::slice;

/// Enum describing each component of a tantivy segment.
///
/// Each component is stored in its own file,
/// using the pattern `segment_uuid`.`component_extension`,
/// except the delete component that takes an `segment_uuid`.`delete_opstamp`.`component_extension`
Expand Down
2 changes: 1 addition & 1 deletion src/index/segment_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -478,7 +478,7 @@ pub fn merge_field_meta_data(
.into_iter()
.kmerge_by(|left, right| left < right)
// TODO: Remove allocation
.group_by(|el| (el.field_name.to_string(), el.typ))
.chunk_by(|el| (el.field_name.to_string(), el.typ))
{
let mut merged: FieldMetadata = group.next().unwrap();
for el in group {
Expand Down
1 change: 0 additions & 1 deletion src/indexer/delete_queue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,6 @@ impl DeleteCursor {
}
}

#[allow(clippy::wrong_self_convention)]
fn is_behind_opstamp(&mut self, target_opstamp: Opstamp) -> bool {
self.get()
.map(|operation| operation.opstamp < target_opstamp)
Expand Down
2 changes: 1 addition & 1 deletion src/indexer/doc_opstamp_mapping.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ pub enum DocToOpstampMapping<'a> {
None,
}

impl<'a> DocToOpstampMapping<'a> {
impl DocToOpstampMapping<'_> {
/// Assess whether a document should be considered deleted given that it contains
/// a deleted term that was deleted at the opstamp: `delete_opstamp`.
///
Expand Down
2 changes: 1 addition & 1 deletion src/indexer/log_merge_policy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ impl MergePolicy for LogMergePolicy {

let mut current_max_log_size = f64::MAX;
let mut levels = vec![];
for (_, merge_group) in &size_sorted_segments.into_iter().group_by(|segment| {
for (_, merge_group) in &size_sorted_segments.into_iter().chunk_by(|segment| {
let segment_log_size = f64::from(self.clip_min_size(segment.num_docs())).log2();
if segment_log_size < (current_max_log_size - self.level_log_size) {
// update current_max_log_size to create a new group
Expand Down
2 changes: 1 addition & 1 deletion src/indexer/merge_policy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ impl MergePolicy for NoMergePolicy {
}

#[cfg(test)]
pub mod tests {
pub(crate) mod tests {

use super::*;

Expand Down
2 changes: 1 addition & 1 deletion src/indexer/segment_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ impl SegmentWriter {
let vals_grouped_by_field = doc
.iter_fields_and_values()
.sorted_by_key(|(field, _)| *field)
.group_by(|(field, _)| *field);
.chunk_by(|(field, _)| *field);

for (field, field_values) in &vals_grouped_by_field {
let values = field_values.map(|el| el.1);
Expand Down
4 changes: 2 additions & 2 deletions src/indexer/stamper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ mod test {

use super::Stamper;

#[allow(clippy::redundant_clone)]
#[expect(clippy::redundant_clone)]
#[test]
fn test_stamper() {
let stamper = Stamper::new(7u64);
Expand All @@ -117,7 +117,7 @@ mod test {
assert_eq!(stamper.stamp(), 15u64);
}

#[allow(clippy::redundant_clone)]
#[expect(clippy::redundant_clone)]
#[test]
fn test_stamper_revert() {
let stamper = Stamper::new(7u64);
Expand Down
Loading

0 comments on commit 21d0570

Please sign in to comment.