Kouhei Sutou
null+****@clear*****
Tue Jun 20 11:50:41 JST 2017
Kouhei Sutou 2017-06-20 11:50:41 +0900 (Tue, 20 Jun 2017) New Revision: 30e2a15168309028e983fbee88929dcc88e7e7e8 https://github.com/pgroonga/pgroonga/commit/30e2a15168309028e983fbee88929dcc88e7e7e8 Message: Ignore tokenizer option in prefix search index creation Added files: expected/term-search/text/options/tokenizer/ignored.out sql/term-search/text/options/tokenizer/ignored.sql Modified files: src/pgrn-create.c Added: expected/term-search/text/options/tokenizer/ignored.out (+12 -0) 100644 =================================================================== --- /dev/null +++ expected/term-search/text/options/tokenizer/ignored.out 2017-06-20 11:50:41 +0900 (7de6d65) @@ -0,0 +1,12 @@ +CREATE TABLE tags ( + name text +); +CREATE INDEX pgrn_index ON tags + USING pgroonga (name pgroonga.text_term_search_ops_v2) + WITH (tokenizer = "TokenDelimit"); +SELECT pgroonga.command('table_list')::json#>>'{1,2,6}'; + ?column? +---------- + +(1 row) + Added: sql/term-search/text/options/tokenizer/ignored.sql (+9 -0) 100644 =================================================================== --- /dev/null +++ sql/term-search/text/options/tokenizer/ignored.sql 2017-06-20 11:50:41 +0900 (a68090f) @@ -0,0 +1,9 @@ +CREATE TABLE tags ( + name text +); + +CREATE INDEX pgrn_index ON tags + USING pgroonga (name pgroonga.text_term_search_ops_v2) + WITH (tokenizer = "TokenDelimit"); + +SELECT pgroonga.command('table_list')::json#>>'{1,2,6}'; Modified: src/pgrn-create.c (+5 -0) =================================================================== --- src/pgrn-create.c 2017-06-19 11:52:36 +0900 (f688450) +++ src/pgrn-create.c 2017-06-20 11:50:41 +0900 (258d3ff) @@ -152,6 +152,11 @@ PGrnCreateLexicon(PGrnCreateData *data) &tokenizer, tokenizerName, &normalizer, normalizerName, tokenFilters); + + if (data->forPrefixSearch) + { + tokenizer = NULL; + } } snprintf(lexiconName, sizeof(lexiconName), -------------- next part -------------- HTML����������������������������...Download