diff options
| author | Michele Calgaro <michele.calgaro@yahoo.it> | 2024-01-23 10:13:00 +0900 |
|---|---|---|
| committer | Michele Calgaro <michele.calgaro@yahoo.it> | 2024-01-26 11:05:01 +0900 |
| commit | 3b1e4bbb3df6a0de8aa0693038449c6f0359ce91 (patch) | |
| tree | 068068e7b1b6202c635bd655e346f838d715373c /indexlib/tests/tokenizer-test.cpp | |
| parent | b0f8eef013163b2098c2bb07e93cb9b194338b80 (diff) | |
| download | tdepim-3b1e4bbb.tar.gz tdepim-3b1e4bbb.zip | |
Replace auto_ptr
Signed-off-by: Michele Calgaro <michele.calgaro@yahoo.it>
(cherry picked from commit d2f343cc239e1fa25c9581cf35bada96692c41db)
Diffstat (limited to 'indexlib/tests/tokenizer-test.cpp')
| -rw-r--r-- | indexlib/tests/tokenizer-test.cpp | 12 |
1 files changed, 6 insertions, 6 deletions
diff --git a/indexlib/tests/tokenizer-test.cpp b/indexlib/tests/tokenizer-test.cpp index 372859d9..1354ddcd 100644 --- a/indexlib/tests/tokenizer-test.cpp +++ b/indexlib/tests/tokenizer-test.cpp @@ -9,8 +9,8 @@ using indexlib::detail::tokenizer; using indexlib::detail::get_tokenizer; void simple() { - std::auto_ptr<tokenizer> tokenizer = get_tokenizer( "latin-1:european" ); - assert(tokenizer.get()); + std::unique_ptr<tokenizer> tokenizer = get_tokenizer( "latin-1:european" ); + assert(tokenizer); std::vector<std::string> tokens = tokenizer->string_to_words( "one ,as, ''#`:ThReE, בבאחי" ); std::vector<std::string> expected; expected.push_back( "ONE" ); @@ -26,8 +26,8 @@ void simple() { } void with_newlines() { - std::auto_ptr<tokenizer> tokenizer = get_tokenizer( "latin-1:european" ); - assert(tokenizer.get()); + std::unique_ptr<tokenizer> tokenizer = get_tokenizer( "latin-1:european" ); + assert(tokenizer); std::vector<std::string> tokens = tokenizer->string_to_words( "one\ntwo\nthree" ); std::vector<std::string> expected; expected.push_back( "ONE" ); @@ -42,8 +42,8 @@ void with_newlines() { } void with_numbers() { - std::auto_ptr<tokenizer> tokenizer = get_tokenizer( "latin-1:european" ); - assert(tokenizer.get()); + std::unique_ptr<tokenizer> tokenizer = get_tokenizer( "latin-1:european" ); + assert(tokenizer); std::vector<std::string> tokens = tokenizer->string_to_words( "one 012 123 four" ); std::vector<std::string> expected; expected.push_back( "ONE" ); |
