mirror of
https://github.com/quickwit-oss/tantivy.git
synced 2026-01-06 01:02:55 +00:00
No more
This commit is contained in:
@@ -86,7 +86,7 @@ impl ManagedDirectory {
|
||||
let managed_files: HashSet<PathBuf> = serde_json::from_str(&managed_files_json)
|
||||
.chain_err(|| ErrorKind::CorruptedFile(MANAGED_FILEPATH.clone()))?;
|
||||
Ok(ManagedDirectory {
|
||||
directory: box directory,
|
||||
directory: Box::new(directory),
|
||||
meta_informations: Arc::new(RwLock::new(MetaInformation {
|
||||
managed_paths: managed_files,
|
||||
protected_files: HashMap::default(),
|
||||
@@ -94,7 +94,7 @@ impl ManagedDirectory {
|
||||
})
|
||||
}
|
||||
Err(OpenReadError::FileDoesNotExist(_)) => Ok(ManagedDirectory {
|
||||
directory: box directory,
|
||||
directory: Box::new(directory),
|
||||
meta_informations: Arc::default(),
|
||||
}),
|
||||
Err(OpenReadError::IOError(e)) => Err(From::from(e)),
|
||||
@@ -265,7 +265,7 @@ impl Directory for ManagedDirectory {
|
||||
}
|
||||
|
||||
fn box_clone(&self) -> Box<Directory> {
|
||||
box self.clone()
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -675,7 +675,7 @@ mod tests {
|
||||
"LogMergePolicy { min_merge_size: 8, min_layer_size: 10000, \
|
||||
level_log_size: 0.75 }"
|
||||
);
|
||||
let merge_policy = box NoMergePolicy::default();
|
||||
let merge_policy = Box::new(NoMergePolicy::default());
|
||||
index_writer.set_merge_policy(merge_policy);
|
||||
assert_eq!(
|
||||
format!("{:?}", index_writer.get_merge_policy()),
|
||||
|
||||
@@ -82,7 +82,7 @@ impl MergePolicy for LogMergePolicy {
|
||||
}
|
||||
|
||||
fn box_clone(&self) -> Box<MergePolicy> {
|
||||
box self.clone()
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ impl MergePolicy for NoMergePolicy {
|
||||
}
|
||||
|
||||
fn box_clone(&self) -> Box<MergePolicy> {
|
||||
box NoMergePolicy
|
||||
Box::new(NoMergePolicy)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -69,7 +69,7 @@ pub mod tests {
|
||||
}
|
||||
|
||||
fn box_clone(&self) -> Box<MergePolicy> {
|
||||
box MergeWheneverPossible
|
||||
Box::new(MergeWheneverPossible)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -171,7 +171,7 @@ impl SegmentUpdater {
|
||||
pool: CpuPool::new(1),
|
||||
index,
|
||||
segment_manager,
|
||||
merge_policy: RwLock::new(box DefaultMergePolicy::default()),
|
||||
merge_policy: RwLock::new(Box::new(DefaultMergePolicy::default())),
|
||||
merging_thread_id: AtomicUsize::default(),
|
||||
merging_threads: RwLock::new(HashMap::new()),
|
||||
generation: AtomicUsize::default(),
|
||||
@@ -494,7 +494,7 @@ mod tests {
|
||||
|
||||
// writing the segment
|
||||
let mut index_writer = index.writer_with_num_threads(1, 40_000_000).unwrap();
|
||||
index_writer.set_merge_policy(box MergeWheneverPossible);
|
||||
index_writer.set_merge_policy(Box::new(MergeWheneverPossible));
|
||||
|
||||
{
|
||||
for _ in 0..100 {
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
#![doc(html_logo_url = "http://fulmicoton.com/tantivy-logo/tantivy-logo.png")]
|
||||
#![cfg_attr(feature = "cargo-clippy", allow(module_inception))]
|
||||
#![cfg_attr(feature = "cargo-clippy", allow(inline_always))]
|
||||
#![feature(box_syntax)]
|
||||
|
||||
#![feature(optin_builtin_traits)]
|
||||
#![feature(conservative_impl_trait)]
|
||||
#![feature(collections_range)]
|
||||
#![feature(integer_atomics)]
|
||||
#![feature(drain_filter)]
|
||||
|
||||
@@ -16,7 +16,7 @@ pub struct AllQuery;
|
||||
|
||||
impl Query for AllQuery {
|
||||
fn weight(&self, _: &Searcher, _: bool) -> Result<Box<Weight>> {
|
||||
Ok(box AllWeight)
|
||||
Ok(Box::new(AllWeight))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,11 +25,11 @@ pub struct AllWeight;
|
||||
|
||||
impl Weight for AllWeight {
|
||||
fn scorer(&self, reader: &SegmentReader) -> Result<Box<Scorer>> {
|
||||
Ok(box AllScorer {
|
||||
Ok(Box::new(AllScorer {
|
||||
started: false,
|
||||
doc: 0u32,
|
||||
max_doc: reader.max_doc(),
|
||||
})
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ impl Query for BooleanQuery {
|
||||
Ok((*occur, subquery.weight(searcher, scoring_enabled)?))
|
||||
})
|
||||
.collect::<Result<_>>()?;
|
||||
Ok(box BooleanWeight::new(sub_weights, scoring_enabled))
|
||||
Ok(Box::new(BooleanWeight::new(sub_weights, scoring_enabled)))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,7 +48,7 @@ impl BooleanQuery {
|
||||
let occur_term_queries: Vec<(Occur, Box<Query>)> = terms
|
||||
.into_iter()
|
||||
.map(|term| {
|
||||
let term_query: Box<Query> = box TermQuery::new(term, IndexRecordOption::WithFreqs);
|
||||
let term_query: Box<Query> = Box::new(TermQuery::new(term, IndexRecordOption::WithFreqs));
|
||||
(Occur::Should, term_query)
|
||||
})
|
||||
.collect();
|
||||
|
||||
@@ -34,12 +34,12 @@ where
|
||||
.into_iter()
|
||||
.map(|scorer| *Downcast::<TermScorer>::downcast(scorer).unwrap())
|
||||
.collect();
|
||||
let scorer: Box<Scorer> = box Union::<TermScorer, TScoreCombiner>::from(scorers);
|
||||
let scorer: Box<Scorer> = Box::new(Union::<TermScorer, TScoreCombiner>::from(scorers));
|
||||
return scorer;
|
||||
}
|
||||
}
|
||||
|
||||
let scorer: Box<Scorer> = box Union::<_, TScoreCombiner>::from(scorers);
|
||||
let scorer: Box<Scorer> = Box::new(Union::<_, TScoreCombiner>::from(scorers));
|
||||
return scorer;
|
||||
|
||||
}
|
||||
@@ -85,10 +85,10 @@ impl BooleanWeight {
|
||||
let positive_scorer: Box<Scorer> = match (should_scorer_opt, must_scorer_opt) {
|
||||
(Some(should_scorer), Some(must_scorer)) => {
|
||||
if self.scoring_enabled {
|
||||
box RequiredOptionalScorer::<_, _, TScoreCombiner>::new(
|
||||
Box::new(RequiredOptionalScorer::<_, _, TScoreCombiner>::new(
|
||||
must_scorer,
|
||||
should_scorer,
|
||||
)
|
||||
))
|
||||
} else {
|
||||
must_scorer
|
||||
}
|
||||
@@ -96,12 +96,12 @@ impl BooleanWeight {
|
||||
(None, Some(must_scorer)) => must_scorer,
|
||||
(Some(should_scorer), None) => should_scorer,
|
||||
(None, None) => {
|
||||
return Ok(box EmptyScorer);
|
||||
return Ok(Box::new(EmptyScorer));
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(exclude_scorer) = exclude_scorer_opt {
|
||||
Ok(box Exclude::new(positive_scorer, exclude_scorer))
|
||||
Ok(Box::new(Exclude::new(positive_scorer, exclude_scorer)))
|
||||
} else {
|
||||
Ok(positive_scorer)
|
||||
}
|
||||
@@ -111,11 +111,11 @@ impl BooleanWeight {
|
||||
impl Weight for BooleanWeight {
|
||||
fn scorer(&self, reader: &SegmentReader) -> Result<Box<Scorer>> {
|
||||
if self.weights.is_empty() {
|
||||
Ok(box EmptyScorer)
|
||||
Ok(Box::new(EmptyScorer))
|
||||
} else if self.weights.len() == 1 {
|
||||
let &(occur, ref weight) = &self.weights[0];
|
||||
if occur == Occur::MustNot {
|
||||
Ok(box EmptyScorer)
|
||||
Ok(Box::new(EmptyScorer))
|
||||
} else {
|
||||
weight.scorer(reader)
|
||||
}
|
||||
|
||||
@@ -124,7 +124,7 @@ mod tests {
|
||||
Term::from_field_text(text_field, text),
|
||||
IndexRecordOption::Basic,
|
||||
);
|
||||
let query: Box<Query> = box term_query;
|
||||
let query: Box<Query> = Box::new(term_query);
|
||||
query
|
||||
};
|
||||
|
||||
@@ -181,7 +181,7 @@ mod tests {
|
||||
Term::from_field_text(text_field, text),
|
||||
IndexRecordOption::Basic,
|
||||
);
|
||||
let query: Box<Query> = box term_query;
|
||||
let query: Box<Query> = Box::new(term_query);
|
||||
query
|
||||
};
|
||||
|
||||
|
||||
@@ -151,10 +151,10 @@ mod tests {
|
||||
fn test_exclude_skip() {
|
||||
test_skip_against_unoptimized(
|
||||
|| {
|
||||
box Exclude::new(
|
||||
Box::new(Exclude::new(
|
||||
VecDocSet::from(vec![1, 2, 5, 8, 10, 15, 24]),
|
||||
VecDocSet::from(vec![1, 2, 3, 10, 16, 24]),
|
||||
)
|
||||
))
|
||||
},
|
||||
vec![1, 2, 5, 8, 10, 15, 24],
|
||||
);
|
||||
@@ -167,10 +167,10 @@ mod tests {
|
||||
let sample_skip = sample_with_seed(10_000, 0.005, 3);
|
||||
test_skip_against_unoptimized(
|
||||
|| {
|
||||
box Exclude::new(
|
||||
Box::new(Exclude::new(
|
||||
VecDocSet::from(sample_include.clone()),
|
||||
VecDocSet::from(sample_exclude.clone()),
|
||||
)
|
||||
))
|
||||
},
|
||||
sample_skip,
|
||||
);
|
||||
|
||||
@@ -22,7 +22,7 @@ pub fn intersect_scorers(mut scorers: Vec<Box<Scorer>>) -> Box<Scorer> {
|
||||
let second_rarest_opt = scorers.pop();
|
||||
scorers.reverse();
|
||||
match (rarest_opt, second_rarest_opt) {
|
||||
(None, None) => box EmptyScorer,
|
||||
(None, None) => Box::new(EmptyScorer),
|
||||
(Some(single_docset), None) => single_docset,
|
||||
(Some(left), Some(right)) => {
|
||||
{
|
||||
@@ -32,20 +32,20 @@ pub fn intersect_scorers(mut scorers: Vec<Box<Scorer>>) -> Box<Scorer> {
|
||||
}) {
|
||||
let left = *Downcast::<TermScorer>::downcast(left).unwrap();
|
||||
let right = *Downcast::<TermScorer>::downcast(right).unwrap();
|
||||
return box Intersection {
|
||||
return Box::new(Intersection {
|
||||
left,
|
||||
right,
|
||||
others: scorers,
|
||||
num_docsets
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
return box Intersection {
|
||||
return Box::new(Intersection {
|
||||
left,
|
||||
right,
|
||||
others: scorers,
|
||||
num_docsets
|
||||
}
|
||||
})
|
||||
}
|
||||
_ => { unreachable!(); }
|
||||
}
|
||||
@@ -271,7 +271,7 @@ mod tests {
|
||||
|| {
|
||||
let left = VecDocSet::from(vec![4]);
|
||||
let right = VecDocSet::from(vec![2, 5]);
|
||||
box Intersection::new(vec![left, right])
|
||||
Box::new(Intersection::new(vec![left, right]))
|
||||
},
|
||||
vec![0, 2, 4, 5, 6],
|
||||
);
|
||||
@@ -281,19 +281,19 @@ mod tests {
|
||||
let mut right = VecDocSet::from(vec![2, 5, 10]);
|
||||
left.advance();
|
||||
right.advance();
|
||||
box Intersection::new(vec![left, right])
|
||||
Box::new(Intersection::new(vec![left, right]))
|
||||
},
|
||||
vec![0, 1, 2, 3, 4, 5, 6, 7, 10, 11],
|
||||
);
|
||||
test_skip_against_unoptimized(
|
||||
|| {
|
||||
box Intersection::new(vec![
|
||||
Box::new(Intersection::new(vec![
|
||||
VecDocSet::from(vec![1, 4, 5, 6]),
|
||||
VecDocSet::from(vec![1, 2, 5, 6]),
|
||||
VecDocSet::from(vec![1, 4, 5, 6]),
|
||||
VecDocSet::from(vec![1, 5, 6]),
|
||||
VecDocSet::from(vec![2, 4, 5, 7, 8]),
|
||||
])
|
||||
]))
|
||||
},
|
||||
vec![0, 1, 2, 3, 4, 5, 6, 7, 10, 11],
|
||||
);
|
||||
|
||||
@@ -48,13 +48,13 @@ impl Query for PhraseQuery {
|
||||
let terms = self.phrase_terms.clone();
|
||||
if scoring_enabled {
|
||||
let bm25_weight = BM25Weight::for_terms(searcher, &terms);
|
||||
Ok(box PhraseWeight::new(
|
||||
Ok(Box::new(PhraseWeight::new(
|
||||
terms,
|
||||
bm25_weight,
|
||||
true
|
||||
))
|
||||
)))
|
||||
} else {
|
||||
Ok(box PhraseWeight::new(terms, BM25Weight::null(), false))
|
||||
Ok(Box::new(PhraseWeight::new(terms, BM25Weight::null(), false)))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -40,10 +40,10 @@ impl Weight for PhraseWeight {
|
||||
.read_postings(term, IndexRecordOption::WithFreqsAndPositions) {
|
||||
term_postings_list.push(postings);
|
||||
} else {
|
||||
return Ok(box EmptyScorer);
|
||||
return Ok(Box::new(EmptyScorer));
|
||||
}
|
||||
}
|
||||
Ok(box PhraseScorer::new(term_postings_list, similarity_weight, fieldnorm_reader, self.score_needed))
|
||||
Ok(Box::new(PhraseScorer::new(term_postings_list, similarity_weight, fieldnorm_reader, self.score_needed)))
|
||||
} else {
|
||||
let mut term_postings_list = Vec::new();
|
||||
for term in &self.phrase_terms {
|
||||
@@ -52,10 +52,10 @@ impl Weight for PhraseWeight {
|
||||
.read_postings_no_deletes(term, IndexRecordOption::WithFreqsAndPositions) {
|
||||
term_postings_list.push(postings);
|
||||
} else {
|
||||
return Ok(box EmptyScorer);
|
||||
return Ok(Box::new(EmptyScorer));
|
||||
}
|
||||
}
|
||||
Ok(box PhraseScorer::new(term_postings_list, similarity_weight, fieldnorm_reader, self.score_needed))
|
||||
Ok(Box::new(PhraseScorer::new(term_postings_list, similarity_weight, fieldnorm_reader, self.score_needed)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -45,7 +45,7 @@ impl fmt::Debug for LogicalAST {
|
||||
|
||||
impl From<LogicalLiteral> for LogicalAST {
|
||||
fn from(literal: LogicalLiteral) -> LogicalAST {
|
||||
LogicalAST::Leaf(box literal)
|
||||
LogicalAST::Leaf(Box::new(literal))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -42,8 +42,8 @@ where
|
||||
I: Stream<Item = char>,
|
||||
{
|
||||
(char('-'), parser(leaf))
|
||||
.map(|(_, expr)| UserInputAST::Not(box expr))
|
||||
.or((char('+'), parser(leaf)).map(|(_, expr)| UserInputAST::Must(box expr)))
|
||||
.map(|(_, expr)| UserInputAST::Not(Box::new(expr)))
|
||||
.or((char('+'), parser(leaf)).map(|(_, expr)| UserInputAST::Must(Box::new(expr))))
|
||||
.or((char('('), parser(parse_to_ast), char(')')).map(|(_, expr, _)| expr))
|
||||
.or(parser(literal))
|
||||
.parse_stream(input)
|
||||
|
||||
@@ -264,7 +264,7 @@ impl QueryParser {
|
||||
let mut asts: Vec<LogicalAST> = Vec::new();
|
||||
for (field, phrase) in term_phrases {
|
||||
if let Some(ast) = self.compute_logical_ast_for_leaf(field, &phrase)? {
|
||||
asts.push(LogicalAST::Leaf(box ast));
|
||||
asts.push(LogicalAST::Leaf(Box::new(ast)));
|
||||
}
|
||||
}
|
||||
let result_ast = if asts.is_empty() {
|
||||
@@ -304,8 +304,8 @@ fn compose_occur(left: Occur, right: Occur) -> Occur {
|
||||
|
||||
fn convert_literal_to_query(logical_literal: LogicalLiteral) -> Box<Query> {
|
||||
match logical_literal {
|
||||
LogicalLiteral::Term(term) => box TermQuery::new(term, IndexRecordOption::WithFreqs),
|
||||
LogicalLiteral::Phrase(terms) => box PhraseQuery::new(terms),
|
||||
LogicalLiteral::Term(term) => Box::new(TermQuery::new(term, IndexRecordOption::WithFreqs)),
|
||||
LogicalLiteral::Phrase(terms) => Box::new(PhraseQuery::new(terms)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -316,7 +316,7 @@ fn convert_to_query(logical_ast: LogicalAST) -> Box<Query> {
|
||||
.into_iter()
|
||||
.map(|(occur, subquery)| (occur, convert_to_query(subquery)))
|
||||
.collect::<Vec<_>>();
|
||||
box BooleanQuery::from(occur_subqueries)
|
||||
Box::new(BooleanQuery::from(occur_subqueries))
|
||||
}
|
||||
LogicalAST::Leaf(logical_literal) => convert_literal_to_query(*logical_literal),
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ pub enum UserInputAST {
|
||||
|
||||
impl From<UserInputLiteral> for UserInputAST {
|
||||
fn from(literal: UserInputLiteral) -> UserInputAST {
|
||||
UserInputAST::Leaf(box literal)
|
||||
UserInputAST::Leaf(Box::new(literal))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -129,11 +129,11 @@ impl RangeQuery {
|
||||
|
||||
impl Query for RangeQuery {
|
||||
fn weight(&self, _searcher: &Searcher, _scoring_enabled: bool) -> Result<Box<Weight>> {
|
||||
Ok(box RangeWeight {
|
||||
Ok(Box::new(RangeWeight {
|
||||
field: self.field,
|
||||
left_bound: self.left_bound.clone(),
|
||||
right_bound: self.right_bound.clone(),
|
||||
})
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -183,7 +183,7 @@ impl Weight for RangeWeight {
|
||||
}
|
||||
}
|
||||
let doc_bitset = BitSetDocSet::from(doc_bitset);
|
||||
Ok(box ConstScorer::new(doc_bitset))
|
||||
Ok(Box::new(ConstScorer::new(doc_bitset)))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -182,10 +182,10 @@ mod tests {
|
||||
let skip_docs = sample_with_seed(10_000, 0.001, 3);
|
||||
test_skip_against_unoptimized(
|
||||
|| {
|
||||
box RequiredOptionalScorer::<_, _, DoNothingCombiner>::new(
|
||||
Box::new(RequiredOptionalScorer::<_, _, DoNothingCombiner>::new(
|
||||
ConstScorer::new(VecDocSet::from(req_docs.clone())),
|
||||
ConstScorer::new(VecDocSet::from(opt_docs.clone())),
|
||||
)
|
||||
))
|
||||
},
|
||||
skip_docs,
|
||||
);
|
||||
|
||||
@@ -54,7 +54,7 @@ impl TermQuery {
|
||||
|
||||
impl Query for TermQuery {
|
||||
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> Result<Box<Weight>> {
|
||||
Ok(box self.specialized_weight(searcher, scoring_enabled))
|
||||
Ok(Box::new(self.specialized_weight(searcher, scoring_enabled)))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -25,14 +25,14 @@ impl Weight for TermWeight {
|
||||
let postings_opt: Option<SegmentPostings> =
|
||||
inverted_index.read_postings(&self.term, self.index_record_option);
|
||||
if let Some(segment_postings) = postings_opt {
|
||||
Ok(box TermScorer::new(segment_postings,
|
||||
Ok(Box::new(TermScorer::new(segment_postings,
|
||||
fieldnorm_reader,
|
||||
similarity_weight))
|
||||
similarity_weight)))
|
||||
} else {
|
||||
Ok(box TermScorer::new(
|
||||
Ok(Box::new(TermScorer::new(
|
||||
SegmentPostings::empty(),
|
||||
fieldnorm_reader,
|
||||
similarity_weight))
|
||||
similarity_weight)))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -313,14 +313,14 @@ mod tests {
|
||||
}
|
||||
}
|
||||
let docset_factory = || {
|
||||
let res: Box<DocSet> = box Union::<_, DoNothingCombiner>::from(
|
||||
let res: Box<DocSet> = Box::new(Union::<_, DoNothingCombiner>::from(
|
||||
docs_list
|
||||
.iter()
|
||||
.map(|docs| docs.clone())
|
||||
.map(VecDocSet::from)
|
||||
.map(ConstScorer::new)
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
));
|
||||
res
|
||||
};
|
||||
let mut docset = docset_factory();
|
||||
|
||||
@@ -102,13 +102,13 @@ where
|
||||
A: 'static + Send + Sync + for<'a> Tokenizer<'a>,
|
||||
{
|
||||
fn token_stream<'a>(&self, text: &'a str) -> Box<TokenStream + 'a> {
|
||||
box self.0.token_stream(text)
|
||||
Box::new(self.0.token_stream(text))
|
||||
}
|
||||
|
||||
fn token_stream_texts<'b>(&self, texts: &'b [&'b str]) -> Box<TokenStream + 'b> {
|
||||
assert!(!texts.is_empty());
|
||||
if texts.len() == 1 {
|
||||
box self.0.token_stream(texts[0])
|
||||
Box::new(self.0.token_stream(texts[0]))
|
||||
} else {
|
||||
let mut offsets = vec![];
|
||||
let mut total_offset = 0;
|
||||
@@ -118,12 +118,12 @@ where
|
||||
}
|
||||
let token_streams: Vec<_> =
|
||||
texts.iter().map(|text| self.0.token_stream(text)).collect();
|
||||
box TokenStreamChain::new(offsets, token_streams)
|
||||
Box::new(TokenStreamChain::new(offsets, token_streams))
|
||||
}
|
||||
}
|
||||
|
||||
fn boxed_clone(&self) -> Box<BoxedTokenizer> {
|
||||
box self.clone()
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -131,7 +131,7 @@ pub(crate) fn box_tokenizer<A>(a: A) -> Box<BoxedTokenizer>
|
||||
where
|
||||
A: 'static + Send + Sync + for<'a> Tokenizer<'a>,
|
||||
{
|
||||
box BoxableTokenizer(a)
|
||||
Box::new(BoxableTokenizer(a))
|
||||
}
|
||||
|
||||
impl<'b> TokenStream for Box<TokenStream + 'b> {
|
||||
|
||||
Reference in New Issue
Block a user