issue/136 Fix following CR

This commit is contained in:
Paul Masurel
2017-05-12 13:51:09 +09:00
parent 4c4c28e2c4
commit fb1b2be782
4 changed files with 14 additions and 11 deletions

View File

@@ -80,8 +80,14 @@ impl Heap {
pub fn set<Item>(&self, addr: u32, val: &Item) {
self.inner().set(addr, val);
}
/// Returns a mutable reference for an object at a given Item.
/// Returns a reference to an `Item` at a given `addr`.
#[cfg(test)]
pub fn get_ref<Item>(&self, addr: u32) -> &Item {
self.inner().get_mut_ref(addr)
}
/// Returns a mutable reference to an `Item` at a given `addr`.
pub fn get_mut_ref<Item>(&self, addr: u32) -> &mut Item {
self.inner().get_mut_ref(addr)
}

View File

@@ -202,7 +202,7 @@ impl IndexMerger {
merged_doc_id_map.push(segment_local_map);
}
let mut field = Field(u32::max_value());
let mut last_field: Option<Field> = None;
while merged_terms.advance() {
// Create the total list of doc ids
@@ -239,10 +239,11 @@ impl IndexMerger {
if let Some(remapped_doc_id) = old_to_new_doc_id[segment_postings.doc() as usize] {
if !term_written {
let current_field = term.field();
if current_field != field {
if last_field != Some(current_field) {
postings_serializer.new_field(current_field);
field = current_field;
last_field = Some(current_field);
}
// we make sure to only write the term iff
// there is at least one document.
postings_serializer.new_term(term.as_slice())?;

View File

@@ -98,8 +98,7 @@ impl<'a> SegmentWriter<'a> {
/// Return true if the term dictionary hashmap is reaching capacity.
/// It is one of the condition that triggers a `SegmentWriter` to
/// be finalized.
#[doc(hidden)]
pub fn is_termdic_saturated(&self,) -> bool {
pub(crate) fn is_termdic_saturated(&self,) -> bool {
self.multifield_postings.is_termdic_saturated()
}
@@ -195,12 +194,10 @@ fn write<'a>(
mut serializer: SegmentSerializer) -> Result<()> {
try!(multifield_postings.serialize(serializer.get_postings_serializer()));
// for per_field_postings_writer in per_field_postings_writers {
// try!(per_field_postings_writer.serialize(serializer.get_postings_serializer(), heap));
// }
try!(fast_field_writers.serialize(serializer.get_fast_field_serializer()));
try!(fieldnorms_writer.serialize(serializer.get_fieldnorms_serializer()));
try!(serializer.close());
Ok(())
}

View File

@@ -16,7 +16,6 @@ const INT_TERM_LEN: usize = 4 + 8;
pub struct Term(Vec<u8>);
/// Extract `field` from Term.
#[doc(hidden)]
pub(crate) fn extract_field_from_term_bytes(term_bytes: &[u8]) -> Field {
Field(BigEndian::read_u32(&term_bytes[..4]))
}