diff --git a/src/query/query_parser/query_grammar.rs b/src/query/query_parser/query_grammar.rs index 4f794a831..004bd2922 100644 --- a/src/query/query_parser/query_grammar.rs +++ b/src/query/query_parser/query_grammar.rs @@ -102,34 +102,30 @@ parser! { range_term_val()). map(|(comparison_sign, bound): (&str, String)| match comparison_sign { - ">=" => return (UserInputBound::Inclusive(bound), UserInputBound::Unbounded), - "<=" => return (UserInputBound::Unbounded, UserInputBound::Inclusive(bound)), - "<" => return (UserInputBound::Unbounded, UserInputBound::Exclusive(bound)), - ">" => return (UserInputBound::Exclusive(bound), UserInputBound::Unbounded), + ">=" => (UserInputBound::Inclusive(bound), UserInputBound::Unbounded), + "<=" => (UserInputBound::Unbounded, UserInputBound::Inclusive(bound)), + "<" => (UserInputBound::Unbounded, UserInputBound::Exclusive(bound)), + ">" => (UserInputBound::Exclusive(bound), UserInputBound::Unbounded), // default case - _ => return (UserInputBound::Unbounded, UserInputBound::Unbounded) + _ => (UserInputBound::Unbounded, UserInputBound::Unbounded) }); let lower_bound = (one_of("{[".chars()), range_term_val()) .map(|(boundary_char, lower_bound): (char, String)| if lower_bound == "*" { UserInputBound::Unbounded - } else { - if boundary_char == '{' { + } else if boundary_char == '{' { UserInputBound::Exclusive(lower_bound) - } else { - UserInputBound::Inclusive(lower_bound) - } + } else { + UserInputBound::Inclusive(lower_bound) }); let upper_bound = (range_term_val(), one_of("}]".chars())) .map(|(higher_bound, boundary_char): (String, char)| if higher_bound == "*" { UserInputBound::Unbounded + } else if boundary_char == '}' { + UserInputBound::Exclusive(higher_bound) } else { - if boundary_char == '}' { - UserInputBound::Exclusive(higher_bound) - } else { - UserInputBound::Inclusive(higher_bound) - } + UserInputBound::Inclusive(higher_bound) }); // return only lower and upper let lower_to_upper = (lower_bound. diff --git a/src/query/query_parser/query_parser.rs b/src/query/query_parser/query_parser.rs index 0e697f02c..9356b8a7f 100644 --- a/src/query/query_parser/query_parser.rs +++ b/src/query/query_parser/query_parser.rs @@ -629,7 +629,7 @@ mod test { pub fn test_parse_query_untokenized() { test_parse_query_to_logical_ast_helper( "nottokenized:\"wordone wordtwo\"", - "Term([0, 0, 0, 7, 119, 111, 114, 100, 111, 110, \ + "Term(field=7,bytes=[119, 111, 114, 100, 111, 110, \ 101, 32, 119, 111, 114, 100, 116, 119, 111])", false, ); @@ -673,7 +673,7 @@ mod test { .is_ok()); test_parse_query_to_logical_ast_helper( "unsigned:2324", - "Term([0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 9, 20])", + "Term(field=3,bytes=[0, 0, 0, 0, 0, 0, 9, 20])", false, ); @@ -694,19 +694,19 @@ mod test { pub fn test_parse_query_to_ast_single_term() { test_parse_query_to_logical_ast_helper( "title:toto", - "Term([0, 0, 0, 0, 116, 111, 116, 111])", + "Term(field=0,bytes=[116, 111, 116, 111])", false, ); test_parse_query_to_logical_ast_helper( "+title:toto", - "Term([0, 0, 0, 0, 116, 111, 116, 111])", + "Term(field=0,bytes=[116, 111, 116, 111])", false, ); test_parse_query_to_logical_ast_helper( "+title:toto -titi", - "(+Term([0, 0, 0, 0, 116, 111, 116, 111]) \ - -(Term([0, 0, 0, 0, 116, 105, 116, 105]) \ - Term([0, 0, 0, 1, 116, 105, 116, 105])))", + "(+Term(field=0,bytes=[116, 111, 116, 111]) \ + -(Term(field=0,bytes=[116, 105, 116, 105]) \ + Term(field=1,bytes=[116, 105, 116, 105])))", false, ); assert_eq!( @@ -721,14 +721,13 @@ mod test { pub fn test_parse_query_to_ast_two_terms() { test_parse_query_to_logical_ast_helper( "title:a b", - "(Term([0, 0, 0, 0, 97]) (Term([0, 0, 0, 0, 98]) \ - Term([0, 0, 0, 1, 98])))", + "(Term(field=0,bytes=[97]) (Term(field=0,bytes=[98]) Term(field=1,bytes=[98])))", false, ); test_parse_query_to_logical_ast_helper( "title:\"a b\"", - "\"[(0, Term([0, 0, 0, 0, 97])), \ - (1, Term([0, 0, 0, 0, 98]))]\"", + "\"[(0, Term(field=0,bytes=[97])), \ + (1, Term(field=0,bytes=[98]))]\"", false, ); } @@ -737,45 +736,43 @@ mod test { pub fn test_parse_query_to_ast_ranges() { test_parse_query_to_logical_ast_helper( "title:[a TO b]", - "(Included(Term([0, 0, 0, 0, 97])) TO \ - Included(Term([0, 0, 0, 0, 98])))", + "(Included(Term(field=0,bytes=[97])) TO Included(Term(field=0,bytes=[98])))", false, ); test_parse_query_to_logical_ast_helper( "[a TO b]", - "((Included(Term([0, 0, 0, 0, 97])) TO \ - Included(Term([0, 0, 0, 0, 98]))) \ - (Included(Term([0, 0, 0, 1, 97])) TO \ - Included(Term([0, 0, 0, 1, 98]))))", + "((Included(Term(field=0,bytes=[97])) TO \ + Included(Term(field=0,bytes=[98]))) \ + (Included(Term(field=1,bytes=[97])) TO \ + Included(Term(field=1,bytes=[98]))))", false, ); test_parse_query_to_logical_ast_helper( "title:{titi TO toto}", - "(Excluded(Term([0, 0, 0, 0, 116, 105, 116, 105])) TO \ - Excluded(Term([0, 0, 0, 0, 116, 111, 116, 111])))", + "(Excluded(Term(field=0,bytes=[116, 105, 116, 105])) TO \ + Excluded(Term(field=0,bytes=[116, 111, 116, 111])))", false, ); test_parse_query_to_logical_ast_helper( "title:{* TO toto}", - "(Unbounded TO \ - Excluded(Term([0, 0, 0, 0, 116, 111, 116, 111])))", + "(Unbounded TO Excluded(Term(field=0,bytes=[116, 111, 116, 111])))", false, ); test_parse_query_to_logical_ast_helper( "title:{titi TO *}", - "(Excluded(Term([0, 0, 0, 0, 116, 105, 116, 105])) TO Unbounded)", + "(Excluded(Term(field=0,bytes=[116, 105, 116, 105])) TO Unbounded)", false, ); test_parse_query_to_logical_ast_helper( "signed:{-5 TO 3}", - "(Excluded(Term([0, 0, 0, 2, 127, 255, 255, 255, 255, 255, 255, 251])) TO \ - Excluded(Term([0, 0, 0, 2, 128, 0, 0, 0, 0, 0, 0, 3])))", + "(Excluded(Term(field=2,bytes=[127, 255, 255, 255, 255, 255, 255, 251])) TO \ + Excluded(Term(field=2,bytes=[128, 0, 0, 0, 0, 0, 0, 3])))", false, ); test_parse_query_to_logical_ast_helper( "float:{-1.5 TO 1.5}", - "(Excluded(Term([0, 0, 0, 10, 64, 7, 255, 255, 255, 255, 255, 255])) TO \ - Excluded(Term([0, 0, 0, 10, 191, 248, 0, 0, 0, 0, 0, 0])))", + "(Excluded(Term(field=10,bytes=[64, 7, 255, 255, 255, 255, 255, 255])) TO \ + Excluded(Term(field=10,bytes=[191, 248, 0, 0, 0, 0, 0, 0])))", false, ); @@ -880,19 +877,19 @@ mod test { pub fn test_parse_query_to_ast_conjunction() { test_parse_query_to_logical_ast_helper( "title:toto", - "Term([0, 0, 0, 0, 116, 111, 116, 111])", + "Term(field=0,bytes=[116, 111, 116, 111])", true, ); test_parse_query_to_logical_ast_helper( "+title:toto", - "Term([0, 0, 0, 0, 116, 111, 116, 111])", + "Term(field=0,bytes=[116, 111, 116, 111])", true, ); test_parse_query_to_logical_ast_helper( "+title:toto -titi", - "(+Term([0, 0, 0, 0, 116, 111, 116, 111]) \ - -(Term([0, 0, 0, 0, 116, 105, 116, 105]) \ - Term([0, 0, 0, 1, 116, 105, 116, 105])))", + "(+Term(field=0,bytes=[116, 111, 116, 111]) \ + -(Term(field=0,bytes=[116, 105, 116, 105]) \ + Term(field=1,bytes=[116, 105, 116, 105])))", true, ); assert_eq!( @@ -903,15 +900,15 @@ mod test { ); test_parse_query_to_logical_ast_helper( "title:a b", - "(+Term([0, 0, 0, 0, 97]) \ - +(Term([0, 0, 0, 0, 98]) \ - Term([0, 0, 0, 1, 98])))", + "(+Term(field=0,bytes=[97]) \ + +(Term(field=0,bytes=[98]) \ + Term(field=1,bytes=[98])))", true, ); test_parse_query_to_logical_ast_helper( "title:\"a b\"", - "\"[(0, Term([0, 0, 0, 0, 97])), \ - (1, Term([0, 0, 0, 0, 98]))]\"", + "\"[(0, Term(field=0,bytes=[97])), \ + (1, Term(field=0,bytes=[98]))]\"", true, ); } @@ -920,10 +917,8 @@ mod test { pub fn test_query_parser_hyphen() { test_parse_query_to_logical_ast_helper( "title:www-form-encoded", - "\"[(0, Term([0, 0, 0, 0, 119, 119, 119])), \ - (1, Term([0, 0, 0, 0, 102, 111, 114, 109])), \ - (2, Term([0, 0, 0, 0, 101, 110, 99, 111, 100, 101, 100]))]\"", - false, + "\"[(0, Term(field=0,bytes=[119, 119, 119])), (1, Term(field=0,bytes=[102, 111, 114, 109])), (2, Term(field=0,bytes=[101, 110, 99, 111, 100, 101, 100]))]\"", + false ); } } diff --git a/src/query/term_query/mod.rs b/src/query/term_query/mod.rs index 3eeb7feb4..c22c9141e 100644 --- a/src/query/term_query/mod.rs +++ b/src/query/term_query/mod.rs @@ -12,7 +12,7 @@ mod tests { use crate::collector::TopDocs; use crate::docset::DocSet; use crate::query::{Query, QueryParser, Scorer, TermQuery}; - use crate::schema::{IndexRecordOption, Schema, STRING, TEXT}; + use crate::schema::{Field, IndexRecordOption, Schema, STRING, TEXT}; use crate::tests::assert_nearly_equals; use crate::Index; use crate::Term; @@ -114,4 +114,16 @@ mod tests { let reader = index.reader().unwrap(); assert_eq!(term_query.count(&*reader.searcher()).unwrap(), 1); } + + #[test] + fn test_term_query_debug() { + let term_query = TermQuery::new( + Term::from_field_text(Field(1), "hello"), + IndexRecordOption::WithFreqs, + ); + assert_eq!( + format!("{:?}", term_query), + "TermQuery(Term(field=1,bytes=[104, 101, 108, 108, 111]))" + ); + } } diff --git a/src/query/term_query/term_query.rs b/src/query/term_query/term_query.rs index 424532cd3..486ab5994 100644 --- a/src/query/term_query/term_query.rs +++ b/src/query/term_query/term_query.rs @@ -7,6 +7,7 @@ use crate::Result; use crate::Searcher; use crate::Term; use std::collections::BTreeSet; +use std::fmt; /// A Term query matches all of the documents /// containing a specific term. @@ -61,12 +62,18 @@ use std::collections::BTreeSet; /// Ok(()) /// } /// ``` -#[derive(Clone, Debug)] +#[derive(Clone)] pub struct TermQuery { term: Term, index_record_option: IndexRecordOption, } +impl fmt::Debug for TermQuery { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "TermQuery({:?})", self.term) + } +} + impl TermQuery { /// Creates a new term query. pub fn new(term: Term, segment_postings_options: IndexRecordOption) -> TermQuery { diff --git a/src/schema/schema.rs b/src/schema/schema.rs index 5b657af5c..b244585e3 100644 --- a/src/schema/schema.rs +++ b/src/schema/schema.rs @@ -247,7 +247,7 @@ impl Schema { } /// Create a named document off the doc. - pub fn from_named_doc( + pub fn convert_named_doc( &self, named_doc: NamedFieldDocument, ) -> Result { @@ -534,7 +534,7 @@ mod tests { vec![Value::from(14u64), Value::from(-1i64)], ); let doc = schema - .from_named_doc(NamedFieldDocument(named_doc_map)) + .convert_named_doc(NamedFieldDocument(named_doc_map)) .unwrap(); assert_eq!( doc.get_all(title), @@ -551,15 +551,14 @@ mod tests { #[test] pub fn test_document_from_nameddoc_error() { - let mut schema_builder = Schema::builder(); - let schema = schema_builder.build(); + let schema = Schema::builder().build(); let mut named_doc_map = BTreeMap::default(); named_doc_map.insert( "title".to_string(), vec![Value::from("title1"), Value::from("title2")], ); let err = schema - .from_named_doc(NamedFieldDocument(named_doc_map)) + .convert_named_doc(NamedFieldDocument(named_doc_map)) .unwrap_err(); assert_eq!( err, diff --git a/src/schema/term.rs b/src/schema/term.rs index 4800d5742..92afed79a 100644 --- a/src/schema/term.rs +++ b/src/schema/term.rs @@ -224,7 +224,12 @@ where impl fmt::Debug for Term { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Term({:?})", &self.0[..]) + write!( + f, + "Term(field={},bytes={:?})", + self.field().0, + self.value_bytes() + ) } }