Skip to content

Commit 46b96c2

Browse files
author
Naveen Aiathurai
committed
chore: fix review comments
1 parent c5be8e1 commit 46b96c2

File tree

1 file changed

+7
-20
lines changed

1 file changed

+7
-20
lines changed

src/indexer/segment_writer.rs

+7-20
Original file line numberDiff line numberDiff line change
@@ -458,10 +458,7 @@ mod tests {
458458
use crate::store::{Compressor, StoreReader, StoreWriter};
459459
use crate::time::format_description::well_known::Rfc3339;
460460
use crate::time::OffsetDateTime;
461-
use crate::tokenizer::{
462-
Language, PreTokenizedString, RemoveLongFilter, Stemmer, TextAnalyzer, Token,
463-
WhitespaceTokenizer,
464-
};
461+
use crate::tokenizer::{PreTokenizedString, Token};
465462
use crate::{
466463
DateTime, Directory, DocAddress, DocSet, Document, Index, Postings, Term, TERMINATED,
467464
};
@@ -912,7 +909,6 @@ mod tests {
912909
assert_eq!(positions, &[4]); //< as opposed to 3 if we had a position length of 1.
913910
}
914911

915-
// ISSUE-#2078 - writing and searching shall throw error when the field tokenizer is missing
916912
#[test]
917913
fn test_show_error_when_tokenizer_not_registered() {
918914
let text_field_indexing = TextFieldIndexing::default()
@@ -921,32 +917,23 @@ mod tests {
921917
let text_options = TextOptions::default()
922918
.set_indexing_options(text_field_indexing)
923919
.set_stored();
924-
let custom_en_tokenizer = TextAnalyzer::builder(WhitespaceTokenizer::default())
925-
.filter(RemoveLongFilter::limit(40))
926-
.filter(Stemmer::new(Language::English))
927-
.build();
928920
let mut schema_builder = Schema::builder();
929921
schema_builder.add_text_field("title", text_options);
930922
let schema = schema_builder.build();
931923
let tempdir = TempDir::new().unwrap();
932924
let tempdir_path = PathBuf::from(tempdir.path());
933-
let index = Index::create_in_dir(&tempdir_path, schema).unwrap();
934-
index
935-
.tokenizers()
936-
.register("custom_en", custom_en_tokenizer);
925+
Index::create_in_dir(&tempdir_path, schema).unwrap();
937926
let index = Index::open_in_dir(tempdir_path).unwrap();
938927
let schema = index.schema();
939928
let mut index_writer = index.writer(50_000_000).unwrap();
940929
let title = schema.get_field("title").unwrap();
941930
let mut document = Document::default();
942931
document.add_text(title, "The Old Man and the Sea");
943932
index_writer.add_document(document).unwrap();
944-
match index_writer.commit() {
945-
Ok(_) => panic!("Commit should have failed"),
946-
Err(e) => assert_eq!(
947-
e.to_string(),
948-
"Schema error: 'Error getting tokenizer for field: title'"
949-
),
950-
}
933+
let error = index_writer.commit().unwrap_err();
934+
assert_eq!(
935+
error.to_string(),
936+
"Schema error: 'Error getting tokenizer for field: title'"
937+
);
951938
}
952939
}

0 commit comments

Comments
 (0)