diff --git a/benches/agg_bench.rs b/benches/agg_bench.rs index d35124999d..3359c426bb 100644 --- a/benches/agg_bench.rs +++ b/benches/agg_bench.rs @@ -349,7 +349,7 @@ fn get_test_index_bench(cardinality: Cardinality) -> tantivy::Result { let lg_norm = rand_distr::LogNormal::new(2.996f64, 0.979f64).unwrap(); let many_terms_data = (0..150_000) - .map(|num| format!("author{}", num)) + .map(|num| format!("author{num}")) .collect::>(); { let mut rng = StdRng::from_seed([1u8; 32]); diff --git a/benches/index-bench.rs b/benches/index-bench.rs index eb48d4487e..3e4accf40a 100644 --- a/benches/index-bench.rs +++ b/benches/index-bench.rs @@ -141,12 +141,12 @@ pub fn hdfs_index_benchmark(c: &mut Criterion) { let parse_json = false; // for parse_json in [false, true] { let suffix = if parse_json { - format!("{}-with-json-parsing", suffix) + format!("{suffix}-with-json-parsing") } else { suffix.to_string() }; - let bench_name = format!("{}{}", prefix, suffix); + let bench_name = format!("{prefix}{suffix}"); group.bench_function(bench_name, |b| { benchmark(b, HDFS_LOGS, schema.clone(), commit, parse_json, is_dynamic) }); diff --git a/examples/index_from_multiple_threads.rs b/examples/index_from_multiple_threads.rs index 9b8195ebf7..288d306db2 100644 --- a/examples/index_from_multiple_threads.rs +++ b/examples/index_from_multiple_threads.rs @@ -61,7 +61,7 @@ fn main() -> tantivy::Result<()> { debris of the winter’s flooding; and sycamores with mottled, white, recumbent \ limbs and branches that arch over the pool" ))?; - println!("add doc {} from thread 1 - opstamp {}", i, opstamp); + println!("add doc {i} from thread 1 - opstamp {opstamp}"); thread::sleep(Duration::from_millis(20)); } Result::<(), TantivyError>::Ok(()) @@ -82,7 +82,7 @@ fn main() -> tantivy::Result<()> { body => "Some great book description..." ))? }; - println!("add doc {} from thread 2 - opstamp {}", i, opstamp); + println!("add doc {i} from thread 2 - opstamp {opstamp}"); thread::sleep(Duration::from_millis(10)); } Result::<(), TantivyError>::Ok(()) diff --git a/src/aggregation/agg_req_with_accessor.rs b/src/aggregation/agg_req_with_accessor.rs index 64d9b89434..76b09dea04 100644 --- a/src/aggregation/agg_req_with_accessor.rs +++ b/src/aggregation/agg_req_with_accessor.rs @@ -335,8 +335,8 @@ fn get_missing_val( } _ => { return Err(crate::TantivyError::InvalidArgument(format!( - "Missing value {:?} for field {} is not supported for column type {:?}", - missing, field_name, column_type + "Missing value {missing:?} for field {field_name} is not supported for column \ + type {column_type:?}" ))); } }; @@ -403,7 +403,7 @@ fn get_dynamic_columns( .iter() .map(|h| h.open()) .collect::>()?; - assert!(!ff_fields.is_empty(), "field {} not found", field_name); + assert!(!ff_fields.is_empty(), "field {field_name} not found"); Ok(cols) } diff --git a/src/aggregation/bucket/term_agg.rs b/src/aggregation/bucket/term_agg.rs index 39dce7ac64..2853a7a2cb 100644 --- a/src/aggregation/bucket/term_agg.rs +++ b/src/aggregation/bucket/term_agg.rs @@ -357,8 +357,7 @@ impl SegmentTermCollector { ) -> crate::Result { if field_type == ColumnType::Bytes { return Err(TantivyError::InvalidArgument(format!( - "terms aggregation is not supported for column type {:?}", - field_type + "terms aggregation is not supported for column type {field_type:?}" ))); } let term_buckets = TermBuckets::default(); diff --git a/src/aggregation/metric/top_hits.rs b/src/aggregation/metric/top_hits.rs index ee316cf6e6..44ddf737ee 100644 --- a/src/aggregation/metric/top_hits.rs +++ b/src/aggregation/metric/top_hits.rs @@ -131,8 +131,8 @@ impl<'de> Deserialize<'de> for KeyOrder { ))?; if key_order.next().is_some() { return Err(serde::de::Error::custom(format!( - "Expected exactly one key-value pair in sort parameter of top_hits, found {:?}", - key_order + "Expected exactly one key-value pair in sort parameter of top_hits, found \ + {key_order:?}" ))); } Ok(Self { field, order }) @@ -144,27 +144,22 @@ fn globbed_string_to_regex(glob: &str) -> Result { // Replace `*` glob with `.*` regex let sanitized = format!("^{}$", regex::escape(glob).replace(r"\*", ".*")); Regex::new(&sanitized.replace('*', ".*")).map_err(|e| { - crate::TantivyError::SchemaError(format!( - "Invalid regex '{}' in docvalue_fields: {}", - glob, e - )) + crate::TantivyError::SchemaError(format!("Invalid regex '{glob}' in docvalue_fields: {e}")) }) } fn use_doc_value_fields_err(parameter: &str) -> crate::Result<()> { Err(crate::TantivyError::AggregationError( AggregationError::InvalidRequest(format!( - "The `{}` parameter is not supported, only `docvalue_fields` is supported in \ - `top_hits` aggregation", - parameter + "The `{parameter}` parameter is not supported, only `docvalue_fields` is supported in \ + `top_hits` aggregation" )), )) } fn unsupported_err(parameter: &str) -> crate::Result<()> { Err(crate::TantivyError::AggregationError( AggregationError::InvalidRequest(format!( - "The `{}` parameter is not supported in the `top_hits` aggregation", - parameter + "The `{parameter}` parameter is not supported in the `top_hits` aggregation" )), )) } @@ -217,8 +212,7 @@ impl TopHitsAggregation { .collect::>(); assert!( !fields.is_empty(), - "No fields matched the glob '{}' in docvalue_fields", - field + "No fields matched the glob '{field}' in docvalue_fields" ); Ok(fields) }) @@ -254,7 +248,7 @@ impl TopHitsAggregation { .map(|field| { let accessors = accessors .get(field) - .unwrap_or_else(|| panic!("field '{}' not found in accessors", field)); + .unwrap_or_else(|| panic!("field '{field}' not found in accessors")); let values: Vec = accessors .iter() diff --git a/src/aggregation/mod.rs b/src/aggregation/mod.rs index cb45885ac6..d83059a20c 100644 --- a/src/aggregation/mod.rs +++ b/src/aggregation/mod.rs @@ -158,15 +158,14 @@ use serde::de::{self, Visitor}; use serde::{Deserialize, Deserializer, Serialize}; fn parse_str_into_f64(value: &str) -> Result { - let parsed = value.parse::().map_err(|_err| { - de::Error::custom(format!("Failed to parse f64 from string: {:?}", value)) - })?; + let parsed = value + .parse::() + .map_err(|_err| de::Error::custom(format!("Failed to parse f64 from string: {value:?}")))?; // Check if the parsed value is NaN or infinity if parsed.is_nan() || parsed.is_infinite() { Err(de::Error::custom(format!( - "Value is not a valid f64 (NaN or Infinity): {:?}", - value + "Value is not a valid f64 (NaN or Infinity): {value:?}" ))) } else { Ok(parsed) diff --git a/src/collector/facet_collector.rs b/src/collector/facet_collector.rs index 16759f3b26..c603c12703 100644 --- a/src/collector/facet_collector.rs +++ b/src/collector/facet_collector.rs @@ -598,7 +598,7 @@ mod tests { let mid = n % 4; n /= 4; let leaf = n % 5; - Facet::from(&format!("/top{}/mid{}/leaf{}", top, mid, leaf)) + Facet::from(&format!("/top{top}/mid{mid}/leaf{leaf}")) }) .collect(); for i in 0..num_facets * 10 { @@ -737,7 +737,7 @@ mod tests { vec![("a", 10), ("b", 100), ("c", 7), ("d", 12), ("e", 21)] .into_iter() .flat_map(|(c, count)| { - let facet = Facet::from(&format!("/facet/{}", c)); + let facet = Facet::from(&format!("/facet/{c}")); let doc = doc!(facet_field => facet); iter::repeat(doc).take(count) }) @@ -785,7 +785,7 @@ mod tests { let docs: Vec = vec![("b", 2), ("a", 2), ("c", 4)] .into_iter() .flat_map(|(c, count)| { - let facet = Facet::from(&format!("/facet/{}", c)); + let facet = Facet::from(&format!("/facet/{c}")); let doc = doc!(facet_field => facet); iter::repeat(doc).take(count) }) diff --git a/src/core/json_utils.rs b/src/core/json_utils.rs index 65ea075d1b..ee75bb354a 100644 --- a/src/core/json_utils.rs +++ b/src/core/json_utils.rs @@ -338,14 +338,14 @@ mod tests { let mut term = Term::from_field_json_path(field, "attributes.color", false); term.append_type_and_str("red"); assert_eq!( - format!("{:?}", term), + format!("{term:?}"), "Term(field=1, type=Json, path=attributes.color, type=Str, \"red\")" ); let mut term = Term::from_field_json_path(field, "attributes.dimensions.width", false); term.append_type_and_fast_value(400i64); assert_eq!( - format!("{:?}", term), + format!("{term:?}"), "Term(field=1, type=Json, path=attributes.dimensions.width, type=I64, 400)" ); } diff --git a/src/directory/mmap_directory.rs b/src/directory/mmap_directory.rs index f953f46896..80f1d6a2b5 100644 --- a/src/directory/mmap_directory.rs +++ b/src/directory/mmap_directory.rs @@ -566,7 +566,7 @@ mod tests { let mmap_directory = MmapDirectory::create_from_tempdir().unwrap(); let num_paths = 10; let paths: Vec = (0..num_paths) - .map(|i| PathBuf::from(&*format!("file_{}", i))) + .map(|i| PathBuf::from(&*format!("file_{i}"))) .collect(); { for path in &paths { diff --git a/src/index/index.rs b/src/index/index.rs index 89b5933fb7..aab055409e 100644 --- a/src/index/index.rs +++ b/src/index/index.rs @@ -252,9 +252,8 @@ impl IndexBuilder { let field_type = entry.field_type().value_type(); if !supported_field_types.contains(&field_type) { return Err(TantivyError::InvalidArgument(format!( - "Unsupported field type in sort_by_field: {:?}. Supported field types: \ - {:?} ", - field_type, supported_field_types, + "Unsupported field type in sort_by_field: {field_type:?}. Supported field \ + types: {supported_field_types:?} ", ))); } } diff --git a/src/index/segment_reader.rs b/src/index/segment_reader.rs index c86ee5906a..15ce229e9f 100644 --- a/src/index/segment_reader.rs +++ b/src/index/segment_reader.rs @@ -318,14 +318,14 @@ impl SegmentReader { if create_canonical { // Without expand dots enabled dots need to be escaped. let escaped_json_path = json_path.replace('.', "\\."); - let full_path = format!("{}.{}", field_name, escaped_json_path); + let full_path = format!("{field_name}.{escaped_json_path}"); let full_path_unescaped = format!("{}.{}", field_name, &json_path); map_to_canonical.insert(full_path_unescaped, full_path.to_string()); full_path } else { // With expand dots enabled, we can use '.' instead of '\u{1}'. json_path_sep_to_dot(&mut json_path); - format!("{}.{}", field_name, json_path) + format!("{field_name}.{json_path}") } }; indexed_fields.extend( diff --git a/src/indexer/mod.rs b/src/indexer/mod.rs index a14f129711..9a062a7400 100644 --- a/src/indexer/mod.rs +++ b/src/indexer/mod.rs @@ -216,7 +216,7 @@ mod tests_mmap { let test_query = |query_str: &str| { let query = parse_query.parse_query(query_str).unwrap(); let num_docs = searcher.search(&query, &Count).unwrap(); - assert_eq!(num_docs, 1, "{}", query_str); + assert_eq!(num_docs, 1, "{query_str}"); }; test_query(format!("json.{field_name_out}:test1").as_str()); test_query(format!("json.a{field_name_out}:test2").as_str()); @@ -590,10 +590,10 @@ mod tests_mmap { let query_parser = QueryParser::for_index(&index, vec![]); // Test if field name can be queried for (indexed_field, val) in fields_and_vals.iter() { - let query_str = &format!("{}:{}", indexed_field, val); + let query_str = &format!("{indexed_field}:{val}"); let query = query_parser.parse_query(query_str).unwrap(); let count_docs = searcher.search(&*query, &TopDocs::with_limit(2)).unwrap(); - assert!(!count_docs.is_empty(), "{}:{}", indexed_field, val); + assert!(!count_docs.is_empty(), "{indexed_field}:{val}"); } // Test if field name can be used for aggregation for (field_name, val) in fields_and_vals.iter() { diff --git a/src/query/fuzzy_query.rs b/src/query/fuzzy_query.rs index a2e3f2a6ba..143eed1c77 100644 --- a/src/query/fuzzy_query.rs +++ b/src/query/fuzzy_query.rs @@ -138,8 +138,7 @@ impl FuzzyTermQuery { if json_path_type != Type::Str { return Err(InvalidArgument(format!( "The fuzzy term query requires a string path type for a json term. Found \ - {:?}", - json_path_type + {json_path_type:?}" ))); } } diff --git a/src/query/regex_query.rs b/src/query/regex_query.rs index 815832d31e..cc5701744a 100644 --- a/src/query/regex_query.rs +++ b/src/query/regex_query.rs @@ -185,7 +185,7 @@ mod test { Err(crate::TantivyError::InvalidArgument(msg)) => { assert!(msg.contains("error: unclosed group")) } - res => panic!("unexpected result: {:?}", res), + res => panic!("unexpected result: {res:?}"), } } } diff --git a/src/schema/document/default_document.rs b/src/schema/document/default_document.rs index 6af220db80..ee87785fb3 100644 --- a/src/schema/document/default_document.rs +++ b/src/schema/document/default_document.rs @@ -557,7 +557,7 @@ impl BinarySerializable for ValueType { } else { return Err(io::Error::new( io::ErrorKind::InvalidData, - format!("Invalid value type id: {}", num), + format!("Invalid value type id: {num}"), )); }; Ok(type_id) diff --git a/src/schema/document/se.rs b/src/schema/document/se.rs index 98e62da3b0..edc8399b6e 100644 --- a/src/schema/document/se.rs +++ b/src/schema/document/se.rs @@ -58,9 +58,8 @@ where W: Write return Err(io::Error::new( io::ErrorKind::Other, format!( - "Unexpected number of entries written to serializer, expected {} entries, got \ - {} entries", - num_field_values, actual_length, + "Unexpected number of entries written to serializer, expected \ + {num_field_values} entries, got {actual_length} entries", ), )); } diff --git a/src/termdict/fst_termdict/termdict.rs b/src/termdict/fst_termdict/termdict.rs index 8e5db25364..539ddf803c 100644 --- a/src/termdict/fst_termdict/termdict.rs +++ b/src/termdict/fst_termdict/termdict.rs @@ -93,7 +93,7 @@ fn open_fst_index(fst_file: FileSlice) -> io::Result crate::Result<()> { #[test] fn test_term_dictionary_stream() -> crate::Result<()> { let ids: Vec<_> = (0u32..10_000u32) - .map(|i| (format!("doc{:0>6}", i), i)) + .map(|i| (format!("doc{i:0>6}"), i)) .collect(); let buffer: Vec = { let mut term_dictionary_builder = TermDictionaryBuilder::create(vec![]).unwrap(); @@ -156,7 +156,7 @@ fn test_stream_high_range_prefix_suffix() -> crate::Result<()> { #[test] fn test_stream_range() -> crate::Result<()> { let ids: Vec<_> = (0u32..10_000u32) - .map(|i| (format!("doc{:0>6}", i), i)) + .map(|i| (format!("doc{i:0>6}"), i)) .collect(); let buffer: Vec = { let mut term_dictionary_builder = TermDictionaryBuilder::create(vec![]).unwrap(); diff --git a/src/tokenizer/facet_tokenizer.rs b/src/tokenizer/facet_tokenizer.rs index 568d60ae31..db0df294df 100644 --- a/src/tokenizer/facet_tokenizer.rs +++ b/src/tokenizer/facet_tokenizer.rs @@ -96,7 +96,7 @@ mod tests { { let mut add_token = |token: &Token| { let facet = Facet::from_encoded(token.text.as_bytes().to_owned()).unwrap(); - tokens.push(format!("{}", facet)); + tokens.push(format!("{facet}")); }; FacetTokenizer::default() .token_stream(facet.encoded_str()) @@ -116,7 +116,7 @@ mod tests { { let mut add_token = |token: &Token| { let facet = Facet::from_encoded(token.text.as_bytes().to_owned()).unwrap(); // ok test - tokens.push(format!("{}", facet)); + tokens.push(format!("{facet}")); }; FacetTokenizer::default() .token_stream(facet.encoded_str()) // ok test