diff --git a/.travis.yml b/.travis.yml index 8d3c306..d207a6d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,11 +2,14 @@ sudo: false matrix: include: - - rvm: 2.3.1 + - rvm: 2.5.0 env: - - MONGOID_VERSION=6.0 + - MONGOID_VERSION=7.0 before_script: - bundle exec danger + - rvm: 2.3.1 + env: + - MONGOID_VERSION=6.0 - rvm: 2.3.1 env: - MONGOID_VERSION=5.0 @@ -25,6 +28,6 @@ services: mongodb addons: apt: sources: - - mongodb-3.2-precise + - mongodb-3.4-precise packages: - mongodb-org-server diff --git a/CHANGELOG.md b/CHANGELOG.md index 68f51cf..6fd4122 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,6 @@ ### 0.8.1 (Next) +* [#35](https://github.com/mongoid/mongoid_fulltext/pull/35): Mongoid 7 compatibility - [@tomasc](https://github.com/tomasc). * Your contribution here. ### 0.8.0 (1/19/2017) diff --git a/Gemfile b/Gemfile index 01dcc47..1a8278b 100644 --- a/Gemfile +++ b/Gemfile @@ -1,6 +1,8 @@ source 'http://rubygems.org' -case version = ENV['MONGOID_VERSION'] || '6' +case version = ENV['MONGOID_VERSION'] || '7' +when /7/ + gem 'mongoid', '~> 7.0' when /6/ gem 'mongoid', '~> 6.0' when /5/ diff --git a/README.md b/README.md index 04ceb96..b0709e4 100644 --- a/README.md +++ b/README.md @@ -249,6 +249,40 @@ the AND of all of the individual results for each of the fields. Finally, if a f but criteria for that filter aren't passed to `fulltext_search`, the result is as if the filter had never been defined - you see both models that both pass and fail the filter in the results. +SCI Support +----------- + +The search respects SCI. From the spec: + +```ruby +class MyDoc + include Mongoid::Document + include Mongoid::FullTextSearch + + field :title + fulltext_search_in :title +end + +class MyInheritedDoc < MyDoc +end +``` + +```ruby +MyDoc.fulltext_search(…) # => will return both MyDoc as well as MyInheritedDoc documents +MyInheritedDoc.fulltext_search(…) # => will return only MyInheritedDoc documents +``` + +Criteria Support +---------------- + +It is also possible to pre-empt the search with Monogid criteria: + +```ruby +MyDoc.where(value: 10).fulltext_search(…) +``` + +Please note that this will not work in case an index is shared by multiple classes (that are not connected through inheritance), since a criteria applies only to one class. + Indexing Options ---------------- @@ -397,4 +431,3 @@ Copyright and License MIT License, see [LICENSE](LICENSE) for details. (c) 2011-2017 [Artsy Inc.](http://artsy.github.io) - diff --git a/lib/mongoid/full_text_search.rb b/lib/mongoid/full_text_search.rb index b9a3fb4..18947f6 100644 --- a/lib/mongoid/full_text_search.rb +++ b/lib/mongoid/full_text_search.rb @@ -1,374 +1,49 @@ -require 'mongoid' -require 'mongoid/compatibility' - -if Mongoid::Compatibility::Version.mongoid3? - require 'mongoid/full_text_search/indexes' -else - require 'mongoid/full_text_search/indexable' -end - -require 'unicode_utils' -require 'cgi' - -module Mongoid::FullTextSearch - extend ActiveSupport::Concern - - included do - cattr_accessor :mongoid_fulltext_config - end - - class UnspecifiedIndexError < StandardError; end - class UnknownFilterQueryOperator < StandardError; end - - module ClassMethods - def fulltext_search_in(*args) - self.mongoid_fulltext_config = {} if mongoid_fulltext_config.nil? - options = args.last.is_a?(Hash) ? args.pop : {} - if options.key?(:index_name) - index_name = options[:index_name] - else - index_name = 'mongoid_fulltext.index_%s_%s' % [name.downcase, mongoid_fulltext_config.count] - end - - config = { - alphabet: 'abcdefghijklmnopqrstuvwxyz0123456789 ', - word_separators: "-_ \n\t", - ngram_width: 3, - max_ngrams_to_search: 6, - apply_prefix_scoring_to_all_words: true, - index_full_words: true, - index_short_prefixes: false, - max_candidate_set_size: 1000, - remove_accents: true, - reindex_immediately: true, - stop_words: Hash[%w(i a s t me my we he it am is be do an if - or as of at by to up in on no so our you him - his she her its who are was has had did the and - but for out off why how all any few nor not own - too can don now ours your hers they them what whom - this that were been have does with into from down over - then once here when both each more most some such only - same than very will just yours their which these those - being doing until while about after above below under - again there where other myself itself theirs having during - before should himself herself because against between through - further yourself ourselves yourselves themselves).map { |x| [x, true] }] - } - - config.update(options) - - args = [:to_s] if args.empty? - config[:ngram_fields] = args - config[:alphabet] = Hash[config[:alphabet].split('').map { |ch| [ch, ch] }] - config[:word_separators] = Hash[config[:word_separators].split('').map { |ch| [ch, ch] }] - mongoid_fulltext_config[index_name] = config - - before_save(:update_ngram_index) if config[:reindex_immediately] - before_destroy :remove_from_ngram_index - end - - def create_fulltext_indexes - return unless mongoid_fulltext_config - mongoid_fulltext_config.each_pair do |index_name, fulltext_config| - fulltext_search_ensure_indexes(index_name, fulltext_config) - end - end - - def fulltext_search_ensure_indexes(index_name, config) - db = collection.database - coll = db[index_name] - - # The order of filters matters when the same index is used from two or more collections. - filter_indexes = (config[:filters] || []).map do |key, _value| - ["filter_values.#{key}", 1] - end.sort_by { |filter_index| filter_index[0] } - - index_definition = [['ngram', 1], ['score', -1]].concat(filter_indexes) - - # Since the definition of the index could have changed, we'll clean up by - # removing any indexes that aren't on the exact. - correct_keys = index_definition.map { |field_def| field_def[0] } - all_filter_keys = filter_indexes.map { |field_def| field_def[0] } - coll.indexes.each do |idef| - keys = idef['key'].keys - next unless keys.member?('ngram') - all_filter_keys |= keys.find_all { |key| key.starts_with?('filter_values.') } - next unless keys & correct_keys != correct_keys - Mongoid.logger.info "Dropping #{idef['name']} [#{keys & correct_keys} <=> #{correct_keys}]" if Mongoid.logger - if Mongoid::Compatibility::Version.mongoid5? || Mongoid::Compatibility::Version.mongoid6? - coll.indexes.drop_one(idef['key']) - else - coll.indexes.drop(idef['key']) - end - end - - if all_filter_keys.length > filter_indexes.length - filter_indexes = all_filter_keys.map { |key| [key, 1] }.sort_by { |filter_index| filter_index[0] } - index_definition = [['ngram', 1], ['score', -1]].concat(filter_indexes) - end - - Mongoid.logger.info "Ensuring fts_index on #{coll.name}: #{index_definition}" if Mongoid.logger - if Mongoid::Compatibility::Version.mongoid5? || Mongoid::Compatibility::Version.mongoid6? - coll.indexes.create_one(Hash[index_definition], name: 'fts_index') - else - coll.indexes.create(Hash[index_definition], name: 'fts_index') - end - - Mongoid.logger.info "Ensuring document_id index on #{coll.name}" if Mongoid.logger - if Mongoid::Compatibility::Version.mongoid5? || Mongoid::Compatibility::Version.mongoid6? - coll.indexes.create_one('document_id' => 1) # to make removes fast - else - coll.indexes.create('document_id' => 1) # to make removes fast - end - end - - def fulltext_search(query_string, options = {}) - max_results = options.key?(:max_results) ? options.delete(:max_results) : 10 - return_scores = options.key?(:return_scores) ? options.delete(:return_scores) : false - if mongoid_fulltext_config.count > 1 && !options.key?(:index) - error_message = '%s is indexed by multiple full-text indexes. You must specify one by passing an :index_name parameter' - fail UnspecifiedIndexError, error_message % name, caller - end - index_name = options.key?(:index) ? options.delete(:index) : mongoid_fulltext_config.keys.first - - # Options hash should only contain filters after this point - - ngrams = all_ngrams(query_string, mongoid_fulltext_config[index_name]) - return [] if ngrams.empty? - - # For each ngram, construct the query we'll use to pull index documents and - # get a count of the number of index documents containing that n-gram - ordering = { 'score' => -1 } - limit = mongoid_fulltext_config[index_name][:max_candidate_set_size] - coll = collection.database[index_name] - cursors = ngrams.map do |ngram| - query = { 'ngram' => ngram[0] } - query.update(map_query_filters options) - count = coll.find(query).count - { ngram: ngram, count: count, query: query } - end.sort! { |record1, record2| record1[:count] <=> record2[:count] } - - # Using the queries we just constructed and the n-gram frequency counts we - # just computed, pull in about *:max_candidate_set_size* candidates by - # considering the n-grams in order of increasing frequency. When we've - # spent all *:max_candidate_set_size* candidates, pull the top-scoring - # *max_results* candidates for each remaining n-gram. - results_so_far = 0 - candidates_list = cursors.map do |doc| - next if doc[:count] == 0 - query_result = coll.find(doc[:query]) - if results_so_far >= limit - query_result = query_result.sort(ordering).limit(max_results) - elsif doc[:count] > limit - results_so_far - query_result = query_result.sort(ordering).limit(limit - results_so_far) - end - results_so_far += doc[:count] - ngram_score = ngrams[doc[:ngram][0]] - Hash[query_result.map do |candidate| - [candidate['document_id'], - { clazz: candidate['class'], score: candidate['score'] * ngram_score }] - end] - end.compact - - # Finally, score all candidates by matching them up with other candidates that are - # associated with the same document. This is similar to how you might process a - # boolean AND query, except that with an AND query, you'd stop after considering - # the first candidate list and matching its candidates up with candidates from other - # lists, whereas here we want the search to be a little fuzzier so we'll run through - # all candidate lists, removing candidates as we match them up. - all_scores = [] - until candidates_list.empty? - candidates = candidates_list.pop - scores = candidates.map do |candidate_id, data| - { id: candidate_id, - clazz: data[:clazz], - score: data[:score] + candidates_list.map { |others| (others.delete(candidate_id) || { score: 0 })[:score] }.sum - } - end - all_scores.concat(scores) - end - all_scores.sort! { |document1, document2| -document1[:score] <=> -document2[:score] } - instantiate_mapreduce_results(all_scores[0..max_results - 1], return_scores: return_scores) - end - - def instantiate_mapreduce_result(result) - result[:clazz].constantize.find(result[:id]) - end - - def instantiate_mapreduce_results(results, options) - if options[:return_scores] - results.map { |result| [instantiate_mapreduce_result(result), result[:score]] }.find_all { |result| !result[0].nil? } - else - results.map { |result| instantiate_mapreduce_result(result) }.compact - end - end - - def all_ngrams(str, config, bound_number_returned = true) - return {} if str.nil? - - if config[:remove_accents] - if defined?(UnicodeUtils) - str = UnicodeUtils.nfkd(str) - elsif defined?(DiacriticsFu) - str = DiacriticsFu.escape(str) - end - end - - # Remove any characters that aren't in the alphabet and aren't word separators - filtered_str = str.mb_chars.downcase.to_s.split('').find_all { |ch| config[:alphabet][ch] || config[:word_separators][ch] }.join('') - - # Figure out how many ngrams to extract from the string. If we can't afford to extract all ngrams, - # step over the string in evenly spaced strides to extract ngrams. For example, to extract 3 3-letter - # ngrams from 'abcdefghijk', we'd want to extract 'abc', 'efg', and 'ijk'. - if bound_number_returned - step_size = [((filtered_str.length - config[:ngram_width]).to_f / config[:max_ngrams_to_search]).ceil, 1].max - else - step_size = 1 - end - - # Create an array of records of the form {:ngram => x, :score => y} for all ngrams that occur in the - # input string using the step size that we just computed. Let score(x,y) be the score of string x - # compared with string y - assigning scores to ngrams with the square root-based scoring function - # below and multiplying scores of matching ngrams together yields a score function that has the - # property that score(x,y) > score(x,z) for any string z containing y and score(x,y) > score(x,z) - # for any string z contained in y. - ngram_array = (0..filtered_str.length - config[:ngram_width]).step(step_size).map do |i| - if i == 0 || (config[:apply_prefix_scoring_to_all_words] && \ - config[:word_separators].key?(filtered_str[i - 1].chr)) - score = Math.sqrt(1 + 1.0 / filtered_str.length) - else - score = Math.sqrt(2.0 / filtered_str.length) - end - { ngram: filtered_str[i..i + config[:ngram_width] - 1], score: score } - end - - # If an ngram appears multiple times in the query string, keep the max score - ngram_array = ngram_array.group_by { |h| h[:ngram] }.map { |key, values| { ngram: key, score: values.map { |v| v[:score] }.max } } - - if config[:index_short_prefixes] || config[:index_full_words] - split_regex_def = config[:word_separators].keys.map { |k| Regexp.escape(k) }.join - split_regex = Regexp.compile("[#{split_regex_def}]") - all_words = filtered_str.split(split_regex) - end - - # Add 'short prefix' records to the array: prefixes of the string that are length (ngram_width - 1) - if config[:index_short_prefixes] - prefixes_seen = {} - all_words.each do |word| - next if word.length < config[:ngram_width] - 1 - prefix = word[0...config[:ngram_width] - 1] - if prefixes_seen[prefix].nil? && (config[:stop_words][word].nil? || word == filtered_str) - ngram_array << { ngram: prefix, score: 1 + 1.0 / filtered_str.length } - prefixes_seen[prefix] = true - end - end - end - - # Add records to the array of ngrams for each full word in the string that isn't a stop word - if config[:index_full_words] - full_words_seen = {} - all_words.each do |word| - if word.length > 1 && full_words_seen[word].nil? && (config[:stop_words][word].nil? || word == filtered_str) - ngram_array << { ngram: word, score: 1 + 1.0 / filtered_str.length } - full_words_seen[word] = true - end - end - end - - # If an ngram appears as any combination of full word, short prefix, and ngram, keep the sum of the two scores - Hash[ngram_array.group_by { |h| h[:ngram] }.map { |key, values| [key, values.map { |v| v[:score] }.sum] }] - end - - def remove_from_ngram_index - mongoid_fulltext_config.each_pair do |index_name, _fulltext_config| - coll = collection.database[index_name] - if Mongoid::Compatibility::Version.mongoid5? || Mongoid::Compatibility::Version.mongoid6? - coll.find('class' => name).delete_many - else - coll.find('class' => name).remove_all - end - end - end - - def update_ngram_index - all.each(&:update_ngram_index) - end - - private - - # Take a list of filters to be mapped so they can update the query - # used upon the fulltext search of the ngrams - def map_query_filters(filters) - Hash[filters.map do|key, value| - case value - when Hash then - if value.key? :any then format_query_filter('$in', key, value[:any]) - elsif value.key? :all then format_query_filter('$all', key, value[:all]) - else fail UnknownFilterQueryOperator, value.keys.join(','), caller end - else format_query_filter('$all', key, value) - end - end] - end - - def format_query_filter(operator, key, value) - ['filter_values.%s' % key, { operator => [value].flatten }] - end - end - - def update_ngram_index - mongoid_fulltext_config.each_pair do |index_name, fulltext_config| - if condition = fulltext_config[:update_if] - case condition - when Symbol then next unless send condition - when String then next unless instance_eval condition - when Proc then next unless condition.call self - else; next - end - end - - # remove existing ngrams from external index - coll = collection.database[index_name.to_sym] - if Mongoid::Compatibility::Version.mongoid5? || Mongoid::Compatibility::Version.mongoid6? - coll.find('document_id' => _id).delete_many - else - coll.find('document_id' => _id).remove_all - end - # extract ngrams from fields - field_values = fulltext_config[:ngram_fields].map { |field| send(field) } - ngrams = field_values.inject({}) { |accum, item| accum.update(self.class.all_ngrams(item, fulltext_config, false)) } - return if ngrams.empty? - # apply filters, if necessary - filter_values = nil - if fulltext_config.key?(:filters) - filter_values = Hash[fulltext_config[:filters].map do |key, value| - begin - [key, value.call(self)] - rescue - # Suppress any exceptions caused by filters - end - end.compact] - end - # insert new ngrams in external index - ngrams.each_pair do |ngram, score| - index_document = { 'ngram' => ngram, 'document_id' => _id, 'score' => score, 'class' => self.class.name } - index_document['filter_values'] = filter_values if fulltext_config.key?(:filters) - if Mongoid::Compatibility::Version.mongoid5? || Mongoid::Compatibility::Version.mongoid6? - coll.insert_one(index_document) - else - coll.insert(index_document) - end - end - end - end - - def remove_from_ngram_index - mongoid_fulltext_config.each_pair do |index_name, _fulltext_config| - coll = collection.database[index_name] - if Mongoid::Compatibility::Version.mongoid5? || Mongoid::Compatibility::Version.mongoid6? - coll.find('document_id' => _id).delete_many - else - coll.find('document_id' => _id).remove_all - end - end +require 'mongoid/full_text_search/config' +require 'mongoid/full_text_search/indexes' +require 'mongoid/full_text_search/mappings' +require 'mongoid/full_text_search/searchable' + +module Mongoid + module FullTextSearch + class UnknownFilterQueryOperator < StandardError; end + class UnspecifiedIndexError < StandardError; end + + CREATE_INDEX_METHOD_NAME = Compatibility::Version.mongoid5_or_newer? ? :create_one : :create + DELETE_FROM_INDEX_METHOD_NAME = Compatibility::Version.mongoid5_or_newer? ? :delete_many : :remove_all + DROP_INDEX_METHOD_NAME = Compatibility::Version.mongoid5_or_newer? ? :drop_one : :drop + INSERT_METHOD_NAME = Compatibility::Version.mongoid5_or_newer? ? :insert_one : :insert + + DEFAULT_CONFIG = { + alphabet: 'abcdefghijklmnopqrstuvwxyz0123456789 ', + apply_prefix_scoring_to_all_words: true, + index_full_words: true, + index_short_prefixes: false, + max_candidate_set_size: 1000, + max_ngrams_to_search: 6, + ngram_width: 3, + reindex_immediately: true, + remove_accents: true, + word_separators: "-_ \n\t", + stop_words: Hash[ + %w[i a s t me my we he it am is be do an if + or as of at by to up in on no so our you him + his she her its who are was has had did the and + but for out off why how all any few nor not own + too can don now ours your hers they them what whom + this that were been have does with into from down over + then once here when both each more most some such only + same than very will just yours their which these those + being doing until while about after above below under + again there where other myself itself theirs having during + before should himself herself because against between through + further yourself ourselves yourselves themselves].map { |x| [x, true] }] + } + + extend ActiveSupport::Concern + + include Config + include Indexes + include Mappings + include Searchable end end diff --git a/lib/mongoid/full_text_search/config.rb b/lib/mongoid/full_text_search/config.rb new file mode 100644 index 0000000..f12fed2 --- /dev/null +++ b/lib/mongoid/full_text_search/config.rb @@ -0,0 +1,13 @@ +module Mongoid + module FullTextSearch + module Config + extend ActiveSupport::Concern + + included do + cattr_accessor :mongoid_fulltext_config do + {} + end + end + end + end +end diff --git a/lib/mongoid/full_text_search/indexable.rb b/lib/mongoid/full_text_search/indexable.rb deleted file mode 100644 index 976578c..0000000 --- a/lib/mongoid/full_text_search/indexable.rb +++ /dev/null @@ -1,13 +0,0 @@ -# hook onto model index creation to create related FT indexes -module Mongoid - module Indexable - module ClassMethods - alias_method :create_fulltext_indexes_hook, :create_indexes - - def create_indexes - create_fulltext_indexes if respond_to?(:create_fulltext_indexes) - create_fulltext_indexes_hook - end - end - end -end diff --git a/lib/mongoid/full_text_search/indexes.rb b/lib/mongoid/full_text_search/indexes.rb index 9c399ca..299a1eb 100644 --- a/lib/mongoid/full_text_search/indexes.rb +++ b/lib/mongoid/full_text_search/indexes.rb @@ -1,12 +1,41 @@ -# hook onto model index creation to create related FT indexes +require 'mongoid/full_text_search/services/index_definition' + module Mongoid - module Indexes - module ClassMethods - alias_method :create_fulltext_indexes_hook, :create_indexes + module FullTextSearch + module Indexes + extend ActiveSupport::Concern + + module ClassMethods + def create_fulltext_indexes + return unless mongoid_fulltext_config + + mongoid_fulltext_config.each_pair do |index_name, fulltext_config| + ::I18n.available_locales.each do |locale| + fulltext_search_ensure_indexes( + localized_index_name(index_name, locale), fulltext_config + ) + end + end + end + + def localized_index_name(index_name, locale) + return index_name unless fields.values.any?(&:localized?) + return index_name unless ::I18n.available_locales.count > 1 + "#{index_name}_#{locale}" + end + + def fulltext_search_ensure_indexes(index_name, config) + db = collection.database + coll = db[index_name] + filters = config.fetch(:filters, []) + index_definition = Services::IndexDefinition.call(coll, filters) + + Mongoid.logger.info("Ensuring fts_index on #{coll.name}: #{index_definition}") if Mongoid.logger + coll.indexes.send CREATE_INDEX_METHOD_NAME, Hash[index_definition], name: 'fts_index' - def create_indexes - create_fulltext_indexes if respond_to?(:create_fulltext_indexes) - create_fulltext_indexes_hook + Mongoid.logger.info("Ensuring document_id index on #{coll.name}") if Mongoid.logger + coll.indexes.send CREATE_INDEX_METHOD_NAME, { document_id: 1 } + end end end end diff --git a/lib/mongoid/full_text_search/mappings.rb b/lib/mongoid/full_text_search/mappings.rb new file mode 100644 index 0000000..a240c9e --- /dev/null +++ b/lib/mongoid/full_text_search/mappings.rb @@ -0,0 +1,114 @@ +require 'mongoid/full_text_search/services/calculate_ngrams' + +module Mongoid + module FullTextSearch + module Mappings + extend ActiveSupport::Concern + + module ClassMethods + def fulltext_search_in(*args) + options = args.last.is_a?(Hash) ? args.pop : {} + + index_name = options.fetch(:index_name) do + "mongoid_fulltext.index_#{name.downcase}_#{mongoid_fulltext_config.count}" + end + + config = DEFAULT_CONFIG.dup.update(options) + + args = [:to_s] if args.empty? + config[:ngram_fields] = args + config[:alphabet] = Hash[config[:alphabet].split('').map { |ch| [ch, ch] }] + config[:word_separators] = Hash[config[:word_separators].split('').map { |ch| [ch, ch] }] + + mongoid_fulltext_config[index_name] = config + + before_save(:update_ngram_index) if config[:reindex_immediately] + before_destroy(:remove_from_ngram_index) + end + + def update_ngram_index + all.each(&:update_ngram_index) + end + + def remove_from_ngram_index + mongoid_fulltext_config.each_pair do |index_name, _| + ::I18n.available_locales.each do |locale| + coll = collection.database[localized_index_name(index_name, locale)] + coll.find(class: name).send(DELETE_FROM_INDEX_METHOD_NAME) + end + end + end + end + + def update_ngram_index + mongoid_fulltext_config.each_pair do |index_name, fulltext_config| + ::I18n.available_locales.each do |locale| + loc_index_name = self.class.localized_index_name(index_name, locale) + + if condition = fulltext_config[:update_if] + case condition + when Symbol then next unless send condition + when String then next unless instance_eval condition + when Proc then next unless condition.call self + else; next + end + end + + # remove existing ngrams from external index + coll = collection.database[loc_index_name.to_sym] + coll.find(document_id: _id).send(DELETE_FROM_INDEX_METHOD_NAME) + + # extract ngrams from fields + field_values = fulltext_config[:ngram_fields].map do |field_name| + next send(field_name) if field_name == :to_s + next unless field = self.class.fields[field_name.to_s] + field.localized? ? send("#{field_name}_translations")[locale] : send(field_name) + end + + ngrams = field_values.inject({}) do |accum, item| + accum.update(Services::CalculateNgrams.call(item, fulltext_config, false)) + end + + return if ngrams.empty? + + # apply filters, if necessary + filter_values = nil + if fulltext_config.key?(:filters) + filter_values = Hash[ + fulltext_config[:filters].map do |key, value| + begin + [key, value.call(self)] + rescue StandardError # Suppress any exceptions caused by filters + end + end.compact + ] + end + + # insert new ngrams in external index + ngrams.each_pair do |ngram, score| + index_document = { + class: self.class.name, + document_id: _id, + ngram: ngram, + score: score + } + + index_document[:filter_values] = filter_values if fulltext_config.key?(:filters) + + coll.send INSERT_METHOD_NAME, index_document + end + end + end + end + + def remove_from_ngram_index + mongoid_fulltext_config.each_pair do |index_name, _| + ::I18n.available_locales.each do |locale| + coll = collection.database[self.class.localized_index_name(index_name, locale)] + coll.find(document_id: _id).send(DELETE_FROM_INDEX_METHOD_NAME) + end + end + end + end + end +end diff --git a/lib/mongoid/full_text_search/searchable.rb b/lib/mongoid/full_text_search/searchable.rb new file mode 100644 index 0000000..3a1cb6f --- /dev/null +++ b/lib/mongoid/full_text_search/searchable.rb @@ -0,0 +1,127 @@ +require 'mongoid/full_text_search/services/calculate_ngrams' + +module Mongoid + module FullTextSearch + module Searchable + extend ActiveSupport::Concern + + module ClassMethods + def fulltext_search(query_string, options = {}) + max_results = options.key?(:max_results) ? options.delete(:max_results) : 10 + return_scores = options.key?(:return_scores) ? options.delete(:return_scores) : false + + if mongoid_fulltext_config.count > 1 && !options.key?(:index) + error_message = '%s is indexed by multiple full-text indexes. You must specify one by passing an :index_name parameter' + raise UnspecifiedIndexError, error_message % name, caller + end + + index_name = options.key?(:index) ? options.delete(:index) : mongoid_fulltext_config.keys.first + + loc_index_name = localized_index_name(index_name, ::I18n.locale) + + # Options hash should only contain filters after this point + + ngrams = Services::CalculateNgrams.call(query_string, mongoid_fulltext_config[index_name]) + return [] if ngrams.empty? + + # For each ngram, construct the query we'll use to pull index documents and + # get a count of the number of index documents containing that n-gram + ordering = { 'score' => -1 } + limit = mongoid_fulltext_config[index_name][:max_candidate_set_size] + coll = collection.database[loc_index_name] + cursors = ngrams.map do |ngram| + query = { 'ngram' => ngram[0] } + query.update(document_type_filters) + query.update(map_query_filters(options)) + count = coll.find(query).count + { ngram: ngram, count: count, query: query } + end.sort! { |record1, record2| record1[:count] <=> record2[:count] } + + # Using the queries we just constructed and the n-gram frequency counts we + # just computed, pull in about *:max_candidate_set_size* candidates by + # considering the n-grams in order of increasing frequency. When we've + # spent all *:max_candidate_set_size* candidates, pull the top-scoring + # *max_results* candidates for each remaining n-gram. + results_so_far = 0 + candidates_list = cursors.map do |doc| + next if doc[:count] == 0 + query_result = coll.find(doc[:query]) + if results_so_far >= limit + query_result = query_result.sort(ordering).limit(max_results) + elsif doc[:count] > limit - results_so_far + query_result = query_result.sort(ordering).limit(limit - results_so_far) + end + results_so_far += doc[:count] + ngram_score = ngrams[doc[:ngram][0]] + Hash[query_result.map do |candidate| + [candidate['document_id'], + { clazz: candidate['class'], score: candidate['score'] * ngram_score }] + end] + end.compact + + # Finally, score all candidates by matching them up with other candidates that are + # associated with the same document. This is similar to how you might process a + # boolean AND query, except that with an AND query, you'd stop after considering + # the first candidate list and matching its candidates up with candidates from other + # lists, whereas here we want the search to be a little fuzzier so we'll run through + # all candidate lists, removing candidates as we match them up. + all_scores = [] + until candidates_list.empty? + candidates = candidates_list.pop + scores = candidates.map do |candidate_id, data| + { id: candidate_id, + clazz: data[:clazz], + score: data[:score] + candidates_list.map { |others| (others.delete(candidate_id) || { score: 0 })[:score] }.sum } + end + all_scores.concat(scores) + end + all_scores.sort! { |document1, document2| -document1[:score] <=> -document2[:score] } + instantiate_mapreduce_results(all_scores[0..max_results - 1], return_scores: return_scores) + end + + def instantiate_mapreduce_result(result) + return result[:clazz].constantize.find(result[:id]) if criteria.selector.empty? + criteria.where(_id: result[:id]).first + end + + def instantiate_mapreduce_results(results, options) + if options[:return_scores] + results.map { |result| [instantiate_mapreduce_result(result), result[:score]] }.find_all { |result| !result[0].nil? } + else + results.map { |result| instantiate_mapreduce_result(result) }.compact + end + end + + private + + # add filter by type according to SCI classes + def document_type_filters + return {} unless fields['_type'].present? + kls = ([self] + descendants).map(&:to_s) + { class: { '$in' => kls } } + end + + # Take a list of filters to be mapped so they can update the query + # used upon the fulltext search of the ngrams + def map_query_filters(filters) + Hash[ + filters.map do |key, value| + case value + when Hash then + if value.key? :any then format_query_filter('$in', key, value[:any]) + elsif value.key? :all then format_query_filter('$all', key, value[:all]) + else raise UnknownFilterQueryOperator, value.keys.join(','), caller + end + else format_query_filter('$all', key, value) + end + end + ] + end + + def format_query_filter(operator, key, value) + [format('filter_values.%s', key), { operator => [value].flatten }] + end + end + end + end +end diff --git a/lib/mongoid/full_text_search/services/calculate_ngrams.rb b/lib/mongoid/full_text_search/services/calculate_ngrams.rb new file mode 100644 index 0000000..bb9b963 --- /dev/null +++ b/lib/mongoid/full_text_search/services/calculate_ngrams.rb @@ -0,0 +1,166 @@ +require 'unicode_utils' + +module Mongoid + module FullTextSearch + module Services + class CalculateNgrams < Struct.new(:str, :config, :bound_number_returned) + def self.call(*args) + new(*args).call + end + + def initialize(str, config, bound_number_returned = true) + if str && config[:remove_accents] + if defined?(UnicodeUtils) + str = UnicodeUtils.nfkd(str) + elsif defined?(DiacriticsFu) + str = DiacriticsFu.escape(super) + end + end + + super(str, config, bound_number_returned) + end + + def call + return {} unless str + + # Create an array of records of the form {:ngram => x, :score => y} for all ngrams that occur in the + # input string using the step size that we just computed. Let score(x,y) be the score of string x + # compared with string y - assigning scores to ngrams with the square root-based scoring function + # below and multiplying scores of matching ngrams together yields a score function that has the + # property that score(x,y) > score(x,z) for any string z containing y and score(x,y) > score(x,z) + # for any string z contained in y. + ngram_array = build_ngram_array + + # If an ngram appears multiple times in the query string, keep the max score + ngram_array = ngram_array.group_by { |h| h[:ngram] }.map do |key, values| + { ngram: key, score: values.map { |v| v[:score] }.max } + end + + # Add 'short prefix' records to the array: prefixes of the string that are length (ngram_width - 1) + ngram_array += short_prefixes if index_short_prefixes? + + # Add records to the array of ngrams for each full word in the string that isn't a stop word + ngram_array += full_words if index_full_words? + + # If an ngram appears as any combination of full word, short prefix, and ngram, keep the sum of the two scores + Hash[ + ngram_array + .group_by { |h| h[:ngram] } + .map do |key, values| + [key, values.map { |v| v[:score] }.sum] + end + ] + end + + private + + def build_ngram_array + (0..filtered_str.length - ngram_width).step(step_size).map do |i| + score = if i == 0 || (apply_prefix_scoring_to_all_words? && word_separators.key?(filtered_str[i - 1].chr)) + Math.sqrt(1 + 1.0 / filtered_str.length) + else + Math.sqrt(2.0 / filtered_str.length) + end + + { ngram: filtered_str[i..i + ngram_width - 1], score: score } + end + end + + def short_prefixes + prefixes_seen = {} + all_words.each_with_object([]) do |word, res| + next res if word.length < ngram_width - 1 + prefix = word[0...ngram_width - 1] + if prefixes_seen[prefix].nil? && (stop_word?(word) || filtered_str?(word)) + res << { ngram: prefix, score: 1 + 1.0 / filtered_str.length } + prefixes_seen[prefix] = true + end + end + end + + def full_words + full_words_seen = {} + all_words.each_with_object([]) do |word, res| + if word.length > 1 && full_words_seen[word].nil? && (stop_word?(word) || filtered_str?(word)) + res << { ngram: word, score: 1 + 1.0 / filtered_str.length } + full_words_seen[word] = true + end + end + end + + def filtered_str?(word) + word == filtered_str + end + + def stop_word?(word) + stop_words[word].nil? + end + + def index_short_prefixes? + config[:index_short_prefixes] + end + + def index_full_words? + config[:index_full_words] + end + + def alphabet + config[:alphabet] + end + + def word_separators + config[:word_separators] + end + + def ngram_width + config[:ngram_width] + end + + def max_ngrams_to_search + config[:max_ngrams_to_search] + end + + def remove_accents? + config[:remove_accents] + end + + def apply_prefix_scoring_to_all_words? + config[:apply_prefix_scoring_to_all_words] + end + + def stop_words + config[:stop_words] + end + + def all_words + filtered_str.split(split_regex) + end + + # Remove any characters that aren't in the alphabet and aren't word separators + def filtered_str + str.mb_chars + .downcase + .to_s.split('') + .find_all { |ch| alphabet[ch] || word_separators[ch] } + .join('') + end + + # Figure out how many ngrams to extract from the string. If we can't afford to extract all ngrams, + # step over the string in evenly spaced strides to extract ngrams. For example, to extract 3 3-letter + # ngrams from 'abcdefghijk', we'd want to extract 'abc', 'efg', and 'ijk'. + def step_size + return 1 unless bound_number_returned + [((filtered_str.length - ngram_width).to_f / max_ngrams_to_search).ceil, 1].max + end + + def split_regex_def + word_separators.keys.map { |k| Regexp.escape(k) }.join + end + + def split_regex + Regexp.compile("[#{split_regex_def}]") + end + end + end + end +end diff --git a/lib/mongoid/full_text_search/services/index_definition.rb b/lib/mongoid/full_text_search/services/index_definition.rb new file mode 100644 index 0000000..25800ac --- /dev/null +++ b/lib/mongoid/full_text_search/services/index_definition.rb @@ -0,0 +1,49 @@ +module Mongoid + module FullTextSearch + module Services + class IndexDefinition < Struct.new(:coll, :filters) + def self.call(*args) + new(*args).call + end + + def call + res = index_definition + all_filter_keys = filter_indexes.map(&:first) + + # Since the definition of the index could have changed, we'll clean up by + # removing any indexes that aren't on the exact. + coll.indexes.each do |idef| + keys = idef['key'].keys + next unless keys.member?('ngram') + all_filter_keys |= keys.find_all { |key| key.starts_with?('filter_values.') } + next unless keys & correct_keys != correct_keys + Mongoid.logger.info "Dropping #{idef['name']} [#{keys & correct_keys} <=> #{correct_keys}]" if Mongoid.logger + coll.indexes.send DROP_INDEX_METHOD_NAME, idef['key'] + end + + if all_filter_keys.length > filter_indexes.length + updated_filter_indexes = all_filter_keys.map { |key| [key, 1] }.sort_by(&:first) + res = [['ngram', 1], ['score', -1]].concat(updated_filter_indexes) + end + + res + end + + private + + def index_definition + [['ngram', 1], ['score', -1]].concat(filter_indexes) + end + + # The order of filters matters when the same index is used from two or more collections. + def filter_indexes + filters.map { |key, _| ["filter_values.#{key}", 1] }.sort_by(&:first) + end + + def correct_keys + index_definition.map { |field_def| field_def[0] } + end + end + end + end +end diff --git a/lib/mongoid/full_text_search/version.rb b/lib/mongoid/full_text_search/version.rb index 7f8fa0f..aaa05f1 100644 --- a/lib/mongoid/full_text_search/version.rb +++ b/lib/mongoid/full_text_search/version.rb @@ -1,5 +1,5 @@ module Mongoid module FullTextSearch - VERSION = '0.8.1' + VERSION = '0.8.1'.freeze end end diff --git a/lib/mongoid_fulltext.rb b/lib/mongoid_fulltext.rb index 25a0538..f307f60 100644 --- a/lib/mongoid_fulltext.rb +++ b/lib/mongoid_fulltext.rb @@ -1 +1,21 @@ +require 'mongoid' +require 'mongoid/compatibility' + require 'mongoid/full_text_search' + +require 'cgi' + +module Mongoid + module CreateIndexesPatch + def create_indexes + create_fulltext_indexes if respond_to?(:create_fulltext_indexes) + super + end + end +end + +if Mongoid::Compatibility::Version.mongoid3? + Mongoid::Indexes::ClassMethods.send(:prepend, Mongoid::CreateIndexesPatch) +else + Mongoid::Indexable::ClassMethods.send(:prepend, Mongoid::CreateIndexesPatch) +end diff --git a/mongoid_fulltext.gemspec b/mongoid_fulltext.gemspec index 361ced4..681aa60 100644 --- a/mongoid_fulltext.gemspec +++ b/mongoid_fulltext.gemspec @@ -13,7 +13,8 @@ Gem::Specification.new do |s| s.homepage = 'https://github.com/mongoid/mongoid_fulltext' s.licenses = ['MIT'] s.summary = 'Full-text search for the Mongoid ORM, using n-grams extracted from text.' - s.add_dependency 'mongoid', '>= 3.0' - s.add_dependency 'mongoid-compatibility' + s.add_dependency 'mongoid', '>= 3.0', '< 8' + s.add_dependency 'mongoid-compatibility', '>= 0.5.1' s.add_dependency 'unicode_utils' + s.add_development_dependency 'database_cleaner' end diff --git a/spec/models/my_doc.rb b/spec/models/my_doc.rb new file mode 100644 index 0000000..0a87892 --- /dev/null +++ b/spec/models/my_doc.rb @@ -0,0 +1,9 @@ +class MyDoc + include Mongoid::Document + include Mongoid::FullTextSearch + + field :title + field :value, type: Integer + + fulltext_search_in :title +end diff --git a/spec/models/my_further_inherited_doc.rb b/spec/models/my_further_inherited_doc.rb new file mode 100644 index 0000000..2184c03 --- /dev/null +++ b/spec/models/my_further_inherited_doc.rb @@ -0,0 +1,2 @@ +class MyFurtherInheritedDoc < MyInheritedDoc +end diff --git a/spec/models/my_inherited_doc.rb b/spec/models/my_inherited_doc.rb new file mode 100644 index 0000000..3cd0109 --- /dev/null +++ b/spec/models/my_inherited_doc.rb @@ -0,0 +1,2 @@ +class MyInheritedDoc < MyDoc +end diff --git a/spec/models/my_localized_doc.rb b/spec/models/my_localized_doc.rb new file mode 100644 index 0000000..5c6e116 --- /dev/null +++ b/spec/models/my_localized_doc.rb @@ -0,0 +1,8 @@ +class MyLocalizedDoc + include Mongoid::Document + include Mongoid::FullTextSearch + + field :title, localize: true + + fulltext_search_in :title +end diff --git a/spec/mongoid/criteria_search_spec.rb b/spec/mongoid/criteria_search_spec.rb new file mode 100644 index 0000000..7c94570 --- /dev/null +++ b/spec/mongoid/criteria_search_spec.rb @@ -0,0 +1,14 @@ +# coding: utf-8 +require 'spec_helper' + +describe Mongoid::FullTextSearch do + context 'Criteria' do + let!(:my_doc_1) { MyDoc.create!(title: 'My Doc 1') } + let!(:my_doc_2) { MyDoc.create!(title: 'My Doc 2', value: 10) } + + let(:result) { MyDoc.where(value: 10).fulltext_search("doc") } + + it { expect(result).not_to include my_doc_1 } + it { expect(result).to include my_doc_2 } + end +end diff --git a/spec/mongoid/full_text_search_spec.rb b/spec/mongoid/full_text_search_spec.rb index a385122..1d81994 100644 --- a/spec/mongoid/full_text_search_spec.rb +++ b/spec/mongoid/full_text_search_spec.rb @@ -355,6 +355,8 @@ # fields as well as the union of all the filter fields to allow for efficient lookups. it 'creates a proper index for searching efficiently' do + # see https://github.com/mongoid/mongoid_fulltext/pull/39 + pending unless Mongoid::Compatibility::Version.mongoid5_or_newer? [FilteredArtwork, FilteredArtist, FilteredOther].each(&:create_indexes) index_collection = FilteredArtwork.collection.database['mongoid_fulltext.artworks_and_artists'] ngram_indexes = [] @@ -597,7 +599,7 @@ context 'incremental' do it 'removes an existing record' do coll = Mongoid.default_session['mongoid_fulltext.index_basicartwork_0'] - if Mongoid::Compatibility::Version.mongoid5? || Mongoid::Compatibility::Version.mongoid6? + if Mongoid::Compatibility::Version.mongoid5_or_newer? coll.find('document_id' => flowers1._id).delete_many else coll.find('document_id' => flowers1._id).remove_all @@ -609,6 +611,8 @@ context 'mongoid indexes' do it 'can re-create dropped indexes' do + # see https://github.com/mongoid/mongoid_fulltext/pull/39 + pending unless Mongoid::Compatibility::Version.mongoid5_or_newer? # there're no indexes by default as Mongoid.autocreate_indexes is set to false # but mongo will automatically attempt to index _id in the background expect(Mongoid.default_session['mongoid_fulltext.index_basicartwork_0'].indexes.count).to be <= 1 diff --git a/spec/mongoid/localized_fields_spec.rb b/spec/mongoid/localized_fields_spec.rb new file mode 100644 index 0000000..2cf46a8 --- /dev/null +++ b/spec/mongoid/localized_fields_spec.rb @@ -0,0 +1,29 @@ +# coding: utf-8 +require 'spec_helper' + +describe Mongoid::FullTextSearch do + context 'Localized fields' do + let!(:my_doc) { MyLocalizedDoc.create!(title_translations: { en: 'Title', cs: "Nazev" }) } + + before(:each) do + @default_locale = ::I18n.locale + ::I18n.locale = locale + end + + after(:each) do + ::I18n.locale = @default_locale + end + + context 'en' do + let(:locale) { :en } + it { expect(MyLocalizedDoc.fulltext_search("title")).to include my_doc } + it { expect(MyLocalizedDoc.fulltext_search("nazev")).not_to include my_doc } + end + + context 'cs' do + let(:locale) { :cs } + it { expect(MyLocalizedDoc.fulltext_search("title")).not_to include my_doc } + it { expect(MyLocalizedDoc.fulltext_search("nazev")).to include my_doc } + end + end +end diff --git a/spec/mongoid/sci_search_spec.rb b/spec/mongoid/sci_search_spec.rb new file mode 100644 index 0000000..d7651eb --- /dev/null +++ b/spec/mongoid/sci_search_spec.rb @@ -0,0 +1,31 @@ +# coding: utf-8 +require 'spec_helper' + +describe Mongoid::FullTextSearch do + context 'SCI' do + let!(:my_doc) { MyDoc.create!(title: 'My Doc') } + let!(:my_inherited_doc) { MyInheritedDoc.create!(title: 'My Inherited Doc') } + let!(:my_further_inherited_doc) { MyFurtherInheritedDoc.create!(title: 'My Inherited Doc') } + + context 'root class returns results for subclasses' do + let(:result) { MyDoc.fulltext_search("doc") } + it { expect(result).to include my_doc } + it { expect(result).to include my_inherited_doc } + it { expect(result).to include my_further_inherited_doc } + end + + context 'child class does not return superclass' do + let(:result) { MyInheritedDoc.fulltext_search("doc") } + it { expect(result).not_to include my_doc } + it { expect(result).to include my_inherited_doc } + it { expect(result).to include my_further_inherited_doc } + end + + context 'child class does not return superclass' do + let(:result) { MyFurtherInheritedDoc.fulltext_search("doc") } + it { expect(result).not_to include my_doc } + it { expect(result).not_to include my_inherited_doc } + it { expect(result).to include my_further_inherited_doc } + end + end +end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 4a41651..57dff46 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -3,6 +3,7 @@ require 'rspec' require 'mongoid' +require 'database_cleaner' ENV['MONGOID_ENV'] = 'test' @@ -11,19 +12,29 @@ Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each { |f| require f } Dir["#{File.dirname(__FILE__)}/models/**/*.rb"].each { |f| require f } +DatabaseCleaner.orm = :mongoid +DatabaseCleaner.strategy = :truncation + +Mongoid.logger.level = Logger::INFO +Mongo::Logger.logger.level = Logger::INFO if Mongoid::Compatibility::Version.mongoid5_or_newer? + Mongoid.configure do |config| config.connect_to('mongoid_fulltext_test') end +Mongoid.logger.level = Logger::INFO +Mongo::Logger.logger.level = Logger::INFO if Mongoid::Compatibility::Version.mongoid5_or_newer? + +DatabaseCleaner.orm = :mongoid +DatabaseCleaner.strategy = :truncation + +::I18n.available_locales = %i(en cs) + RSpec.configure do |c| c.before :each do - Mongoid.purge! + DatabaseCleaner.clean end c.after :all do - Mongoid.purge! - end - c.before :all do - Mongoid.logger.level = Logger::INFO - Mongo::Logger.logger.level = Logger::INFO if Mongoid::Compatibility::Version.mongoid5? || Mongoid::Compatibility::Version.mongoid6? + DatabaseCleaner.clean end end diff --git a/spec/support/mongoid.rb b/spec/support/mongoid.rb index 788b045..d73d317 100644 --- a/spec/support/mongoid.rb +++ b/spec/support/mongoid.rb @@ -2,4 +2,4 @@ module Mongoid def self.default_session default_client end -end if Mongoid::Compatibility::Version.mongoid5? || Mongoid::Compatibility::Version.mongoid6? +end if Mongoid::Compatibility::Version.mongoid5_or_newer?