Kouhei Sutou
null+****@clear*****
Fri Dec 9 18:18:29 JST 2016
Kouhei Sutou 2016-12-09 18:18:29 +0900 (Fri, 09 Dec 2016) New Revision: 4040481be2a68c96be45a23ba2d8f1190b6673c8 https://github.com/ranguba/groonga-client/commit/4040481be2a68c96be45a23ba2d8f1190b6673c8 Message: schema response: add predicates for full text search Modified files: lib/groonga/client/response/schema.rb test/response/test-schema.rb Modified: lib/groonga/client/response/schema.rb (+16 -0) =================================================================== --- lib/groonga/client/response/schema.rb 2016-12-09 17:29:02 +0900 (b08b28f) +++ lib/groonga/client/response/schema.rb 2016-12-09 18:18:29 +0900 (21f8062) @@ -155,6 +155,10 @@ module Groonga end end + def full_text_searchable? + table.tokenizer and column.position + end + private def coerce_table(table_name) @schema.tables[table_name] @@ -181,6 +185,12 @@ module Groonga end end + def have_full_text_search_index? + indexes.any? do |index| + index.full_text_searchable? + end + end + private def coerce_indexes(raw_indexes) raw_indexes.collect do |raw_index| @@ -214,6 +224,12 @@ module Groonga end end + def have_full_text_search_index? + indexes.any? do |index| + index.full_text_searchable? + end + end + private def coerce_key_type(raw_key_type) if raw_key_type.nil? Modified: test/response/test-schema.rb (+342 -0) =================================================================== --- test/response/test-schema.rb 2016-12-09 17:29:02 +0900 (7860869) +++ test/response/test-schema.rb 2016-12-09 18:18:29 +0900 (6bf5b12) @@ -157,6 +157,112 @@ class TestResponseSchema < Test::Unit::TestCase assert_equal("Names.users", response.tables["Users"].indexes[0].full_name) end + + sub_test_case "#have_full_text_search_index" do + test "no tokenizer" do + body = { + "tables" => { + "Names" => { + "name" => "Names", + "columns" => { + "users_key" => { + "name" => "users_key", + "position" => true, + }, + }, + "tokenizer" => nil, + }, + "Users" => { + "indexes" => [ + { + "table" => "Names", + "name" => "users_key", + }, + ], + }, + }, + } + response = create_response(body) + table = response.tables["Users"] + assert do + not table.have_full_text_search_index? + end + end + + test "no position" do + body = { + "tokenizers" => { + "TokenDelimit" => { + "name" => "TokenDelimit", + }, + }, + "tables" => { + "Names" => { + "name" => "Names", + "columns" => { + "users_key" => { + "name" => "users_key", + "position" => false, + }, + }, + "tokenizer" => { + "name" => "TokenDelimit", + }, + }, + "Users" => { + "indexes" => [ + { + "table" => "Names", + "name" => "users_key", + }, + ], + }, + }, + } + response = create_response(body) + table = response.tables["Users"] + assert do + not table.have_full_text_search_index? + end + end + + test "have tokenizer and position" do + body = { + "tokenizers" => { + "TokenBigram" => { + "name" => "TokenBigram", + }, + }, + "tables" => { + "Names" => { + "name" => "Names", + "columns" => { + "users_key" => { + "name" => "users_key", + "position" => true, + }, + }, + "tokenizer" => { + "name" => "TokenBigram", + }, + }, + "Users" => { + "indexes" => [ + { + "table" => "Names", + "name" => "users_key", + }, + ], + }, + }, + } + response = create_response(body) + table = response.tables["Users"] + assert do + table.have_full_text_search_index? + end + end + end end class TestColumn < self @@ -180,6 +286,130 @@ class TestResponseSchema < Test::Unit::TestCase assert_equal("Ages.users", response.tables["Users"].columns["age"].indexes[0].full_name) end + + sub_test_case "#have_full_text_search_index" do + test "no tokenizer" do + body = { + "tables" => { + "Names" => { + "name" => "Names", + "columns" => { + "users_name" => { + "name" => "users_names", + "position" => true, + }, + }, + "tokenizer" => nil, + }, + "Users" => { + "columns" => { + "names" => { + "name" => "names", + "type" => "vector", + "indexes" => [ + { + "table" => "Names", + "name" => "users_names", + }, + ], + }, + }, + }, + }, + } + response = create_response(body) + column = response.tables["Users"].columns["names"] + assert do + not column.have_full_text_search_index? + end + end + + test "no position" do + body = { + "tokenizers" => { + "TokenDelimit" => { + "name" => "TokenDelimit", + }, + }, + "tables" => { + "Names" => { + "name" => "Names", + "columns" => { + "users_names" => { + "name" => "users_names", + "position" => false, + }, + }, + "tokenizer" => { + "name" => "TokenDelimit", + }, + }, + "Users" => { + "columns" => { + "names" => { + "name" => "names", + "type" => "vector", + "indexes" => [ + { + "table" => "Names", + "name" => "users_names", + }, + ], + }, + }, + }, + }, + } + response = create_response(body) + column = response.tables["Users"].columns["names"] + assert do + not column.have_full_text_search_index? + end + end + + test "have tokenizer and position" do + body = { + "tokenizers" => { + "TokenBigram" => { + "name" => "TokenBigram", + }, + }, + "tables" => { + "Names" => { + "name" => "Names", + "columns" => { + "users_name" => { + "name" => "users_name", + "position" => true, + }, + }, + "tokenizer" => { + "name" => "TokenBigram", + }, + }, + "Users" => { + "columns" => { + "name" => { + "name" => "name", + "type" => "scalar", + "indexes" => [ + { + "table" => "Names", + "name" => "users_name", + }, + ], + }, + }, + }, + }, + } + response = create_response(body) + column = response.tables["Users"].columns["name"] + assert do + column.have_full_text_search_index? + end + end + end end class TestIndex < self @@ -228,6 +458,118 @@ class TestResponseSchema < Test::Unit::TestCase assert_equal(response.tables["Names"].columns["users_key"], response.tables["Users"].indexes[0].column) end + + sub_test_case("#full_text_searchable?") do + test "no tokenizer" do + body = { + "tables" => { + "Names" => { + "name" => "Names", + "columns" => { + "users_names" => { + "name" => "users_names", + "position" => true, + }, + }, + "tokenizer" => nil, + }, + "Users" => { + "columns" => { + "names" => { + "name" => "names", + "type" => "vector", + "indexes" => [ + { + "table" => "Names", + "name" => "users_names", + }, + ], + }, + }, + }, + }, + } + response = create_response(body) + index = response.tables["Users"].columns["names"].indexes[0] + assert do + not index.full_text_searchable? + end + end + + test "no position" do + body = { + "tokenizers" => { + "TokenBigram" => { + "name" => "TokenBigram", + }, + }, + "tables" => { + "Names" => { + "name" => "Names", + "columns" => { + "users_key" => { + "name" => "users_key", + "position" => false, + }, + }, + "tokenizer" => { + "name" => "TokenBigram", + }, + }, + "Users" => { + "indexes" => [ + { + "table" => "Names", + "name" => "users_key", + }, + ] + } + }, + } + response = create_response(body) + index = response.tables["Users"].indexes[0] + assert do + not index.full_text_searchable? + end + end + + test "have tokenizer and position" do + body = { + "tokenizers" => { + "TokenBigram" => { + "name" => "TokenBigram", + }, + }, + "tables" => { + "Names" => { + "name" => "Names", + "columns" => { + "users_key" => { + "name" => "users_key", + "position" => true, + }, + }, + "tokenizer" => { + "name" => "TokenBigram", + }, + }, + "Users" => { + "indexes" => [ + { + "table" => "Names", + "name" => "users_key", + }, + ], + }, + }, + } + response = create_response(body) + index = response.tables["Users"].indexes[0] + assert do + index.full_text_searchable? + end + end + end end end end -------------- next part -------------- HTML����������������������������... Download