mirror of
https://github.com/quickwit-oss/tantivy.git
synced 2026-01-08 18:12:55 +00:00
issue/50 Removed StandardQuery
This commit is contained in:
@@ -8,7 +8,6 @@ use tantivy::Index;
|
||||
use tantivy::schema::*;
|
||||
use tantivy::collector::TopCollector;
|
||||
use tantivy::query::QueryParser;
|
||||
use tantivy::query::Query;
|
||||
|
||||
fn main() {
|
||||
// Let's create a temporary directory for the
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use Result;
|
||||
use super::Weight;
|
||||
use std::any::Any;
|
||||
use Error;
|
||||
use schema::Term;
|
||||
use query::Query;
|
||||
@@ -107,7 +108,11 @@ impl From<Vec<Term>> for MultiTermQuery {
|
||||
}
|
||||
|
||||
impl Query for MultiTermQuery {
|
||||
|
||||
|
||||
fn as_any(&self) -> &Any {
|
||||
self
|
||||
}
|
||||
|
||||
fn weight(&self, searcher: &Searcher) -> Result<Box<Weight>> {
|
||||
let similitude = self.similitude(searcher);
|
||||
Ok(
|
||||
|
||||
@@ -4,14 +4,17 @@ use core::searcher::Searcher;
|
||||
use common::TimerTree;
|
||||
use SegmentLocalId;
|
||||
use super::Weight;
|
||||
use std::fmt;
|
||||
use std::any::Any;
|
||||
|
||||
|
||||
/// Queries represent the query of the user, and are in charge
|
||||
/// of the logic defining the set of documents that should be
|
||||
/// sent to the collector, as well as the way to score the
|
||||
/// documents.
|
||||
pub trait Query {
|
||||
|
||||
pub trait Query: fmt::Debug {
|
||||
|
||||
fn as_any(&self) -> &Any;
|
||||
|
||||
fn weight(&self, searcher: &Searcher) -> Result<Box<Weight>>;
|
||||
|
||||
|
||||
@@ -1,9 +1,4 @@
|
||||
use Result as tantivy_Result;
|
||||
use combine::*;
|
||||
use collector::Collector;
|
||||
use super::Weight;
|
||||
use core::searcher::Searcher;
|
||||
use common::TimerTree;
|
||||
use query::{Query, MultiTermQuery};
|
||||
use schema::{Schema, FieldType, Term, Field};
|
||||
use analyzer::SimpleTokenizer;
|
||||
@@ -60,23 +55,6 @@ pub struct QueryParser {
|
||||
}
|
||||
|
||||
|
||||
/// The `QueryParser` returns a `StandardQuery`.
|
||||
#[derive(Eq, PartialEq, Debug)]
|
||||
pub enum StandardQuery {
|
||||
MultiTerm(MultiTermQuery),
|
||||
}
|
||||
|
||||
impl StandardQuery {
|
||||
/// Number of terms involved in the query.
|
||||
pub fn num_terms(&self,) -> usize {
|
||||
match *self {
|
||||
StandardQuery::MultiTerm(ref q) => {
|
||||
q.num_terms()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl QueryParser {
|
||||
/// Creates a `QueryParser`
|
||||
@@ -141,7 +119,7 @@ impl QueryParser {
|
||||
///
|
||||
/// Implementing a lenient mode for this query parser is tracked
|
||||
/// in [Issue 5](https://github.com/fulmicoton/tantivy/issues/5)
|
||||
pub fn parse_query(&self, query: &str) -> Result<StandardQuery, ParsingError> {
|
||||
pub fn parse_query(&self, query: &str) -> Result<Box<Query>, ParsingError> {
|
||||
match parser(query_language).parse(query.trim()) {
|
||||
Ok(literals) => {
|
||||
let mut terms_result: Vec<(Occur, Term)> = Vec::new();
|
||||
@@ -153,9 +131,7 @@ impl QueryParser {
|
||||
.map(|term| (occur, term) ));
|
||||
}
|
||||
Ok(
|
||||
StandardQuery::MultiTerm(
|
||||
MultiTermQuery::from(terms_result)
|
||||
)
|
||||
box MultiTermQuery::from(terms_result)
|
||||
)
|
||||
}
|
||||
Err(_) => {
|
||||
@@ -166,26 +142,6 @@ impl QueryParser {
|
||||
}
|
||||
|
||||
|
||||
impl Query for StandardQuery {
|
||||
|
||||
|
||||
fn weight(&self, searcher: &Searcher) -> tantivy_Result<Box<Weight>> {
|
||||
match *self {
|
||||
StandardQuery::MultiTerm(ref q) => {
|
||||
q.weight(searcher)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn search(&self, searcher: &Searcher, collector: &mut Collector) -> tantivy_Result<TimerTree> {
|
||||
match *self {
|
||||
StandardQuery::MultiTerm(ref q) => {
|
||||
q.search(searcher, collector)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
fn compute_terms(field: Field, text: &str) -> Vec<Term> {
|
||||
@@ -325,6 +281,10 @@ mod test {
|
||||
assert!(query_parser.parse("f:@e!e").is_err());
|
||||
}
|
||||
|
||||
// fn extract<T: Query>(query_parser: &QueryParser, q: &str) -> T {
|
||||
// query_parser.parse_query(q).unwrap().as_any().downcast_ref::<T>().unwrap(),
|
||||
// }
|
||||
|
||||
#[test]
|
||||
pub fn test_query_parser() {
|
||||
let mut schema_builder = SchemaBuilder::default();
|
||||
@@ -335,9 +295,9 @@ mod test {
|
||||
assert!(query_parser.parse_query("a:b").is_err());
|
||||
{
|
||||
let terms = vec!(Term::from_field_text(title_field, "abctitle"));
|
||||
let query = StandardQuery::MultiTerm(MultiTermQuery::from(terms));
|
||||
let query = MultiTermQuery::from(terms);
|
||||
assert_eq!(
|
||||
query_parser.parse_query("title:abctitle").unwrap(),
|
||||
*query_parser.parse_query("title:abctitle").unwrap().as_any().downcast_ref::<MultiTermQuery>().unwrap(),
|
||||
query
|
||||
);
|
||||
}
|
||||
@@ -346,21 +306,21 @@ mod test {
|
||||
Term::from_field_text(text_field, "abctitle"),
|
||||
Term::from_field_text(author_field, "abctitle"),
|
||||
);
|
||||
let query = StandardQuery::MultiTerm(MultiTermQuery::from(terms));
|
||||
let query = MultiTermQuery::from(terms);
|
||||
assert_eq!(
|
||||
query_parser.parse_query("abctitle").unwrap(),
|
||||
*query_parser.parse_query("abctitle").unwrap().as_any().downcast_ref::<MultiTermQuery>().unwrap(),
|
||||
query
|
||||
);
|
||||
}
|
||||
{
|
||||
let terms = vec!(Term::from_field_text(title_field, "abctitle"));
|
||||
let query = StandardQuery::MultiTerm(MultiTermQuery::from(terms));
|
||||
let query = MultiTermQuery::from(terms);
|
||||
assert_eq!(
|
||||
query_parser.parse_query("title:abctitle ").unwrap(),
|
||||
*query_parser.parse_query("title:abctitle ").unwrap().as_any().downcast_ref::<MultiTermQuery>().unwrap(),
|
||||
query
|
||||
);
|
||||
assert_eq!(
|
||||
query_parser.parse_query(" title:abctitle").unwrap(),
|
||||
*query_parser.parse_query(" title:abctitle").unwrap().as_any().downcast_ref::<MultiTermQuery>().unwrap(),
|
||||
query
|
||||
);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user