@@ -458,10 +458,7 @@ mod tests {
458
458
use crate :: store:: { Compressor , StoreReader , StoreWriter } ;
459
459
use crate :: time:: format_description:: well_known:: Rfc3339 ;
460
460
use crate :: time:: OffsetDateTime ;
461
- use crate :: tokenizer:: {
462
- Language , PreTokenizedString , RemoveLongFilter , Stemmer , TextAnalyzer , Token ,
463
- WhitespaceTokenizer ,
464
- } ;
461
+ use crate :: tokenizer:: { PreTokenizedString , Token } ;
465
462
use crate :: {
466
463
DateTime , Directory , DocAddress , DocSet , Document , Index , Postings , Term , TERMINATED ,
467
464
} ;
@@ -912,7 +909,6 @@ mod tests {
912
909
assert_eq ! ( positions, & [ 4 ] ) ; //< as opposed to 3 if we had a position length of 1.
913
910
}
914
911
915
- // ISSUE-#2078 - writing and searching shall throw error when the field tokenizer is missing
916
912
#[ test]
917
913
fn test_show_error_when_tokenizer_not_registered ( ) {
918
914
let text_field_indexing = TextFieldIndexing :: default ( )
@@ -921,32 +917,23 @@ mod tests {
921
917
let text_options = TextOptions :: default ( )
922
918
. set_indexing_options ( text_field_indexing)
923
919
. set_stored ( ) ;
924
- let custom_en_tokenizer = TextAnalyzer :: builder ( WhitespaceTokenizer :: default ( ) )
925
- . filter ( RemoveLongFilter :: limit ( 40 ) )
926
- . filter ( Stemmer :: new ( Language :: English ) )
927
- . build ( ) ;
928
920
let mut schema_builder = Schema :: builder ( ) ;
929
921
schema_builder. add_text_field ( "title" , text_options) ;
930
922
let schema = schema_builder. build ( ) ;
931
923
let tempdir = TempDir :: new ( ) . unwrap ( ) ;
932
924
let tempdir_path = PathBuf :: from ( tempdir. path ( ) ) ;
933
- let index = Index :: create_in_dir ( & tempdir_path, schema) . unwrap ( ) ;
934
- index
935
- . tokenizers ( )
936
- . register ( "custom_en" , custom_en_tokenizer) ;
925
+ Index :: create_in_dir ( & tempdir_path, schema) . unwrap ( ) ;
937
926
let index = Index :: open_in_dir ( tempdir_path) . unwrap ( ) ;
938
927
let schema = index. schema ( ) ;
939
928
let mut index_writer = index. writer ( 50_000_000 ) . unwrap ( ) ;
940
929
let title = schema. get_field ( "title" ) . unwrap ( ) ;
941
930
let mut document = Document :: default ( ) ;
942
931
document. add_text ( title, "The Old Man and the Sea" ) ;
943
932
index_writer. add_document ( document) . unwrap ( ) ;
944
- match index_writer. commit ( ) {
945
- Ok ( _) => panic ! ( "Commit should have failed" ) ,
946
- Err ( e) => assert_eq ! (
947
- e. to_string( ) ,
948
- "Schema error: 'Error getting tokenizer for field: title'"
949
- ) ,
950
- }
933
+ let error = index_writer. commit ( ) . unwrap_err ( ) ;
934
+ assert_eq ! (
935
+ error. to_string( ) ,
936
+ "Schema error: 'Error getting tokenizer for field: title'"
937
+ ) ;
951
938
}
952
939
}
0 commit comments