Kouhei Sutou
null+****@clear*****
Thu Nov 6 16:15:15 JST 2014
Kouhei Sutou 2014-11-06 16:15:15 +0900 (Thu, 06 Nov 2014) New Revision: f0f463e78dbd179861ecf15b7cebadcc24022756 https://github.com/ranguba/rroonga/commit/f0f463e78dbd179861ecf15b7cebadcc24022756 Message: drndump: support token filters Modified files: lib/groonga/dumper.rb test/test-schema-dumper.rb Modified: lib/groonga/dumper.rb (+15 -0) =================================================================== --- lib/groonga/dumper.rb 2014-11-06 15:59:53 +0900 (5fc2061) +++ lib/groonga/dumper.rb 2014-11-06 16:15:15 +0900 (ec692a3) @@ -420,6 +420,14 @@ module Groonga if default_tokenizer parameters << ":default_tokenizer => #{default_tokenizer.name.dump}" end + token_filters = table.token_filters + unless token_filters.empty? + dumped_token_filter_names = token_filters.collect do |token_filter| + token_filter.name.dump + end + dumped_token_filters = "[#{dumped_token_filter_names.join(', ')}]" + parameters << ":token_filters => #{dumped_token_filters}" + end if _normalizer_name parameters << ":normalizer => #{_normalizer_name.dump}" end @@ -576,6 +584,13 @@ module Groonga if default_tokenizer parameters << "--default_tokenizer #{default_tokenizer.name}" end + token_filters = table.token_filters + unless token_filters.empty? + token_filter_names = token_filters.collect do |token_filter| + token_filter.name + end + parameters << "--token_filters #{token_filter_names.join(',')}" + end end if _normalizer_name parameters << "--normalizer #{_normalizer_name}" Modified: test/test-schema-dumper.rb (+4 -1) =================================================================== --- test/test-schema-dumper.rb 2014-11-06 15:59:53 +0900 (bf2601a) +++ test/test-schema-dumper.rb 2014-11-06 16:15:15 +0900 (994d2d7) @@ -87,6 +87,7 @@ class SchemaDumperTest < Test::Unit::TestCase end def define_index_schema + context.register_plugin("token_filters/stop_word") Groonga::Schema.define do |schema| schema.create_table("Items", :type => :hash, @@ -98,6 +99,7 @@ class SchemaDumperTest < Test::Unit::TestCase :type => :patricia_trie, :key_type => "ShortText", :default_tokenizer => "TokenBigram", + :token_filters => ["TokenFilterStopWord"], :normalizer => "NormalizerAuto") do |table| table.index("Items", "_key") table.index("Items", "title") @@ -222,6 +224,7 @@ create_table("Terms", :type => :patricia_trie, :key_type => "ShortText", :default_tokenizer => "TokenBigram", + :token_filters => ["TokenFilterStopWord"], :normalizer => "NormalizerAuto", :force => true) do |table| end @@ -307,7 +310,7 @@ column_create Comments item COLUMN_SCALAR Items table_create Items TABLE_HASH_KEY ShortText column_create Items title COLUMN_SCALAR ShortText -table_create Terms TABLE_PAT_KEY ShortText --default_tokenizer TokenBigram --normalizer NormalizerAuto +table_create Terms TABLE_PAT_KEY ShortText --default_tokenizer TokenBigram --token_filters TokenFilterStopWord --normalizer NormalizerAuto column_create Terms Items__key COLUMN_INDEX|WITH_POSITION Items _key column_create Terms Items_title COLUMN_INDEX|WITH_POSITION Items title -------------- next part -------------- HTML����������������������������... 下載