From 486b8fa9c54edb5b03d9d1942f424cc4cd32a5eb Mon Sep 17 00:00:00 2001 From: Paul Masurel Date: Fri, 6 Mar 2020 23:33:58 +0900 Subject: [PATCH] Removing serde-derive dependency (#786) --- Cargo.toml | 1 - examples/pre_tokenized_text.rs | 3 +-- src/core/index_meta.rs | 1 + src/core/segment_id.rs | 1 + src/directory/mmap_directory.rs | 1 + src/lib.rs | 4 +--- src/query/explanation.rs | 1 + src/schema/document.rs | 3 ++- src/schema/field.rs | 5 ++++- src/schema/field_value.rs | 3 ++- src/schema/index_record_option.rs | 2 ++ src/schema/int_options.rs | 1 + src/schema/named_field_document.rs | 1 + src/schema/text_options.rs | 1 + src/space_usage/mod.rs | 1 + src/tokenizer/stemmer.rs | 1 + src/tokenizer/tokenized_string.rs | 1 + src/tokenizer/tokenizer.rs | 1 + 18 files changed, 23 insertions(+), 9 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 8cb88bee8..6151473ab 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,7 +26,6 @@ atomicwrites = {version="0.2.2", optional=true} tempfile = "3.0" log = "0.4" serde = "1.0" -serde_derive = "1.0" serde_json = "1.0" num_cpus = "1.2" fs2={version="0.4", optional=true} diff --git a/examples/pre_tokenized_text.rs b/examples/pre_tokenized_text.rs index 57d867b1b..15c26dfcb 100644 --- a/examples/pre_tokenized_text.rs +++ b/examples/pre_tokenized_text.rs @@ -9,11 +9,10 @@ // - import tokenized text straight from json, // - perform a search on documents with pre-tokenized text -use tantivy::tokenizer::{PreTokenizedString, SimpleTokenizer, Token, Tokenizer}; - use tantivy::collector::{Count, TopDocs}; use tantivy::query::TermQuery; use tantivy::schema::*; +use tantivy::tokenizer::{PreTokenizedString, SimpleTokenizer, Token, Tokenizer}; use tantivy::{doc, Index, ReloadPolicy}; use tempfile::TempDir; diff --git a/src/core/index_meta.rs b/src/core/index_meta.rs index 5cc2e5814..4b257667d 100644 --- a/src/core/index_meta.rs +++ b/src/core/index_meta.rs @@ -4,6 +4,7 @@ use crate::schema::Schema; use crate::Opstamp; use census::{Inventory, TrackedObject}; use serde; +use serde::{Deserialize, Serialize}; use serde_json; use std::collections::HashSet; use std::fmt; diff --git a/src/core/segment_id.rs b/src/core/segment_id.rs index e7c8eea00..0fd16aa2a 100644 --- a/src/core/segment_id.rs +++ b/src/core/segment_id.rs @@ -4,6 +4,7 @@ use uuid::Uuid; #[cfg(test)] use once_cell::sync::Lazy; +use serde::{Deserialize, Serialize}; use std::error::Error; use std::str::FromStr; #[cfg(test)] diff --git a/src/directory/mmap_directory.rs b/src/directory/mmap_directory.rs index 0c48a0775..7ba491938 100644 --- a/src/directory/mmap_directory.rs +++ b/src/directory/mmap_directory.rs @@ -22,6 +22,7 @@ use crate::directory::WatchHandle; use crate::directory::{TerminatingWrite, WritePtr}; use atomicwrites; use memmap::Mmap; +use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::convert::From; use std::fmt; diff --git a/src/lib.rs b/src/lib.rs index 89ce3ba56..55d71bd5f 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -98,9 +98,6 @@ //! [literate programming](https://tantivy-search.github.io/examples/basic_search.html) / //! [source code](https://github.com/tantivy-search/tantivy/blob/master/examples/basic_search.rs)) -#[macro_use] -extern crate serde_derive; - #[cfg_attr(test, macro_use)] extern crate serde_json; @@ -173,6 +170,7 @@ pub use crate::schema::{Document, Term}; use std::fmt; use once_cell::sync::Lazy; +use serde::{Deserialize, Serialize}; /// Index format version. const INDEX_FORMAT_VERSION: u32 = 1; diff --git a/src/query/explanation.rs b/src/query/explanation.rs index cabb08138..cb4048611 100644 --- a/src/query/explanation.rs +++ b/src/query/explanation.rs @@ -1,4 +1,5 @@ use crate::{DocId, TantivyError}; +use serde::Serialize; pub(crate) fn does_not_match(doc: DocId) -> TantivyError { TantivyError::InvalidArgument(format!("Document #({}) does not match", doc)) diff --git a/src/schema/document.rs b/src/schema/document.rs index 4ec5e3549..95a8b3b72 100644 --- a/src/schema/document.rs +++ b/src/schema/document.rs @@ -4,6 +4,7 @@ use crate::common::VInt; use crate::tokenizer::PreTokenizedString; use crate::DateTime; use itertools::Itertools; +use serde; use std::io::{self, Read, Write}; /// Tantivy's Document is the object that can @@ -16,7 +17,7 @@ use std::io::{self, Read, Write}; /// Documents are really just a list of couple `(field, value)`. /// In this list, one field may appear more than once. -#[derive(Clone, Debug, Serialize, Deserialize, Default)] +#[derive(Clone, Debug, serde::Serialize, serde::Deserialize, Default)] pub struct Document { field_values: Vec, } diff --git a/src/schema/field.rs b/src/schema/field.rs index c0d418561..efcb78f9b 100644 --- a/src/schema/field.rs +++ b/src/schema/field.rs @@ -1,11 +1,14 @@ use crate::common::BinarySerializable; +use serde; use std::io; use std::io::Read; use std::io::Write; /// `Field` is represented by an unsigned 32-bit integer type /// The schema holds the mapping between field names and `Field` objects. -#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)] +#[derive( + Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord, Hash, serde::Serialize, serde::Deserialize, +)] pub struct Field(u32); impl Field { diff --git a/src/schema/field_value.rs b/src/schema/field_value.rs index 78b8b9613..a6f9e0924 100644 --- a/src/schema/field_value.rs +++ b/src/schema/field_value.rs @@ -1,12 +1,13 @@ use crate::common::BinarySerializable; use crate::schema::Field; use crate::schema::Value; +use serde; use std::io; use std::io::Read; use std::io::Write; /// `FieldValue` holds together a `Field` and its `Value`. -#[derive(Debug, Clone, Ord, PartialEq, Eq, PartialOrd, Serialize, Deserialize)] +#[derive(Debug, Clone, Ord, PartialEq, Eq, PartialOrd, serde::Serialize, serde::Deserialize)] pub struct FieldValue { field: Field, value: Value, diff --git a/src/schema/index_record_option.rs b/src/schema/index_record_option.rs index 06dc02604..e3a8dfbe3 100644 --- a/src/schema/index_record_option.rs +++ b/src/schema/index_record_option.rs @@ -1,3 +1,5 @@ +use serde::{Deserialize, Serialize}; + /// `IndexRecordOption` describes an amount information associated /// to a given indexed field. /// diff --git a/src/schema/int_options.rs b/src/schema/int_options.rs index 19131d766..565672645 100644 --- a/src/schema/int_options.rs +++ b/src/schema/int_options.rs @@ -1,4 +1,5 @@ use crate::schema::flags::{FastFlag, IndexedFlag, SchemaFlagList, StoredFlag}; +use serde::{Deserialize, Serialize}; use std::ops::BitOr; /// Express whether a field is single-value or multi-valued. diff --git a/src/schema/named_field_document.rs b/src/schema/named_field_document.rs index 4db8ab0e3..c7288b193 100644 --- a/src/schema/named_field_document.rs +++ b/src/schema/named_field_document.rs @@ -1,4 +1,5 @@ use crate::schema::Value; +use serde::Serialize; use std::collections::BTreeMap; /// Internal representation of a document used for JSON diff --git a/src/schema/text_options.rs b/src/schema/text_options.rs index 11ab8accd..9f46921e3 100644 --- a/src/schema/text_options.rs +++ b/src/schema/text_options.rs @@ -1,6 +1,7 @@ use crate::schema::flags::SchemaFlagList; use crate::schema::flags::StoredFlag; use crate::schema::IndexRecordOption; +use serde::{Deserialize, Serialize}; use std::borrow::Cow; use std::ops::BitOr; diff --git a/src/space_usage/mod.rs b/src/space_usage/mod.rs index 93c1c953e..e647bccd4 100644 --- a/src/space_usage/mod.rs +++ b/src/space_usage/mod.rs @@ -11,6 +11,7 @@ under-count actual resultant space usage by up to 4095 bytes per file. use crate::schema::Field; use crate::SegmentComponent; +use serde::{Deserialize, Serialize}; use std::collections::HashMap; /// Indicates space usage in bytes diff --git a/src/tokenizer/stemmer.rs b/src/tokenizer/stemmer.rs index afa9a5249..8facade1a 100644 --- a/src/tokenizer/stemmer.rs +++ b/src/tokenizer/stemmer.rs @@ -1,6 +1,7 @@ use super::{Token, TokenFilter, TokenStream}; use crate::tokenizer::BoxTokenStream; use rust_stemmers::{self, Algorithm}; +use serde::{Deserialize, Serialize}; /// Available stemmer languages. #[derive(Debug, Serialize, Deserialize, Eq, PartialEq, Copy, Clone)] diff --git a/src/tokenizer/tokenized_string.rs b/src/tokenizer/tokenized_string.rs index 609bf2f49..e091c6018 100644 --- a/src/tokenizer/tokenized_string.rs +++ b/src/tokenizer/tokenized_string.rs @@ -1,4 +1,5 @@ use crate::tokenizer::{BoxTokenStream, Token, TokenStream, TokenStreamChain}; +use serde::{Deserialize, Serialize}; use std::cmp::Ordering; /// Struct representing pre-tokenized text diff --git a/src/tokenizer/tokenizer.rs b/src/tokenizer/tokenizer.rs index 0a6f61758..cd4b0c222 100644 --- a/src/tokenizer/tokenizer.rs +++ b/src/tokenizer/tokenizer.rs @@ -1,4 +1,5 @@ use crate::tokenizer::TokenStreamChain; +use serde::{Deserialize, Serialize}; /// The tokenizer module contains all of the tools used to process /// text in `tantivy`. use std::borrow::{Borrow, BorrowMut};