mirror of
https://github.com/quickwit-oss/tantivy.git
synced 2025-12-23 02:29:57 +00:00
clippy (#2741)
Co-authored-by: Pascal Seitz <pascal.seitz@datadoghq.com>
This commit is contained in:
@@ -258,7 +258,7 @@ mod test {
|
|||||||
bitpacker.write(val, num_bits, &mut data).unwrap();
|
bitpacker.write(val, num_bits, &mut data).unwrap();
|
||||||
}
|
}
|
||||||
bitpacker.close(&mut data).unwrap();
|
bitpacker.close(&mut data).unwrap();
|
||||||
assert_eq!(data.len(), ((num_bits as usize) * len + 7) / 8);
|
assert_eq!(data.len(), ((num_bits as usize) * len).div_ceil(8));
|
||||||
let bitunpacker = BitUnpacker::new(num_bits);
|
let bitunpacker = BitUnpacker::new(num_bits);
|
||||||
(bitunpacker, vals, data)
|
(bitunpacker, vals, data)
|
||||||
}
|
}
|
||||||
@@ -304,7 +304,7 @@ mod test {
|
|||||||
bitpacker.write(val, num_bits, &mut buffer).unwrap();
|
bitpacker.write(val, num_bits, &mut buffer).unwrap();
|
||||||
}
|
}
|
||||||
bitpacker.flush(&mut buffer).unwrap();
|
bitpacker.flush(&mut buffer).unwrap();
|
||||||
assert_eq!(buffer.len(), (vals.len() * num_bits as usize + 7) / 8);
|
assert_eq!(buffer.len(), (vals.len() * num_bits as usize).div_ceil(8));
|
||||||
let bitunpacker = BitUnpacker::new(num_bits);
|
let bitunpacker = BitUnpacker::new(num_bits);
|
||||||
let max_val = if num_bits == 64 {
|
let max_val = if num_bits == 64 {
|
||||||
u64::MAX
|
u64::MAX
|
||||||
|
|||||||
@@ -29,6 +29,7 @@ impl BinarySerializable for VIntU128 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn deserialize<R: Read>(reader: &mut R) -> io::Result<Self> {
|
fn deserialize<R: Read>(reader: &mut R) -> io::Result<Self> {
|
||||||
|
#[allow(clippy::unbuffered_bytes)]
|
||||||
let mut bytes = reader.bytes();
|
let mut bytes = reader.bytes();
|
||||||
let mut result = 0u128;
|
let mut result = 0u128;
|
||||||
let mut shift = 0u64;
|
let mut shift = 0u64;
|
||||||
@@ -196,6 +197,7 @@ impl BinarySerializable for VInt {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn deserialize<R: Read>(reader: &mut R) -> io::Result<Self> {
|
fn deserialize<R: Read>(reader: &mut R) -> io::Result<Self> {
|
||||||
|
#[allow(clippy::unbuffered_bytes)]
|
||||||
let mut bytes = reader.bytes();
|
let mut bytes = reader.bytes();
|
||||||
let mut result = 0u64;
|
let mut result = 0u64;
|
||||||
let mut shift = 0u64;
|
let mut shift = 0u64;
|
||||||
|
|||||||
@@ -608,7 +608,7 @@ mod test {
|
|||||||
term_dictionary_size: Some(ByteCount::from(100u64)),
|
term_dictionary_size: Some(ByteCount::from(100u64)),
|
||||||
postings_size: Some(ByteCount::from(1_000u64)),
|
postings_size: Some(ByteCount::from(1_000u64)),
|
||||||
positions_size: Some(ByteCount::from(2_000u64)),
|
positions_size: Some(ByteCount::from(2_000u64)),
|
||||||
fast_size: Some(ByteCount::from(1_000u64).into()),
|
fast_size: Some(ByteCount::from(1_000u64)),
|
||||||
};
|
};
|
||||||
let field_metadata2 = FieldMetadata {
|
let field_metadata2 = FieldMetadata {
|
||||||
field_name: "a".to_string(),
|
field_name: "a".to_string(),
|
||||||
@@ -617,7 +617,7 @@ mod test {
|
|||||||
term_dictionary_size: Some(ByteCount::from(80u64)),
|
term_dictionary_size: Some(ByteCount::from(80u64)),
|
||||||
postings_size: Some(ByteCount::from(1_500u64)),
|
postings_size: Some(ByteCount::from(1_500u64)),
|
||||||
positions_size: Some(ByteCount::from(2_500u64)),
|
positions_size: Some(ByteCount::from(2_500u64)),
|
||||||
fast_size: Some(ByteCount::from(3_000u64).into()),
|
fast_size: Some(ByteCount::from(3_000u64)),
|
||||||
};
|
};
|
||||||
let expected = FieldMetadata {
|
let expected = FieldMetadata {
|
||||||
field_name: "a".to_string(),
|
field_name: "a".to_string(),
|
||||||
@@ -626,7 +626,7 @@ mod test {
|
|||||||
term_dictionary_size: Some(ByteCount::from(180u64)),
|
term_dictionary_size: Some(ByteCount::from(180u64)),
|
||||||
postings_size: Some(ByteCount::from(2_500u64)),
|
postings_size: Some(ByteCount::from(2_500u64)),
|
||||||
positions_size: Some(ByteCount::from(4_500u64)),
|
positions_size: Some(ByteCount::from(4_500u64)),
|
||||||
fast_size: Some(ByteCount::from(4_000u64).into()),
|
fast_size: Some(ByteCount::from(4_000u64)),
|
||||||
};
|
};
|
||||||
assert_merge(
|
assert_merge(
|
||||||
&[vec![field_metadata1.clone()], vec![field_metadata2]],
|
&[vec![field_metadata1.clone()], vec![field_metadata2]],
|
||||||
|
|||||||
@@ -42,7 +42,6 @@ mod test {
|
|||||||
|
|
||||||
use super::Stamper;
|
use super::Stamper;
|
||||||
|
|
||||||
#[expect(clippy::redundant_clone)]
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_stamper() {
|
fn test_stamper() {
|
||||||
let stamper = Stamper::new(7u64);
|
let stamper = Stamper::new(7u64);
|
||||||
@@ -58,7 +57,6 @@ mod test {
|
|||||||
assert_eq!(stamper.stamp(), 15u64);
|
assert_eq!(stamper.stamp(), 15u64);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[expect(clippy::redundant_clone)]
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_stamper_revert() {
|
fn test_stamper_revert() {
|
||||||
let stamper = Stamper::new(7u64);
|
let stamper = Stamper::new(7u64);
|
||||||
|
|||||||
@@ -696,10 +696,9 @@ mod tests {
|
|||||||
fn read_bytes(&self, range: Range<usize>) -> std::io::Result<OwnedBytes> {
|
fn read_bytes(&self, range: Range<usize>) -> std::io::Result<OwnedBytes> {
|
||||||
let allowed_range = self.allowed_range.lock().unwrap();
|
let allowed_range = self.allowed_range.lock().unwrap();
|
||||||
if !allowed_range.contains(&range.start) || !allowed_range.contains(&(range.end - 1)) {
|
if !allowed_range.contains(&range.start) || !allowed_range.contains(&(range.end - 1)) {
|
||||||
return Err(std::io::Error::new(
|
return Err(std::io::Error::other(format!(
|
||||||
std::io::ErrorKind::Other,
|
"invalid range, allowed {allowed_range:?}, requested {range:?}"
|
||||||
format!("invalid range, allowed {allowed_range:?}, requested {range:?}"),
|
)));
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(self.bytes.slice(range))
|
Ok(self.bytes.slice(range))
|
||||||
|
|||||||
Reference in New Issue
Block a user