X-Git-Url: https://git.cworth.org/git?a=blobdiff_plain;f=lib%2Fsup%2Findex.rb;h=0d6318ecc585d07f822198b54094189ae19b5528;hb=4d8d8d84ed3f4abcce85ca313cfcb30f2bdd95b4;hp=59d16f9ae3550ddfcd66de2e6c739f91c8e62bfb;hpb=202c29073a9dfa21828f827ea2e059ff682efd0d;p=sup diff --git a/lib/sup/index.rb b/lib/sup/index.rb index 59d16f9..0d6318e 100644 --- a/lib/sup/index.rb +++ b/lib/sup/index.rb @@ -2,6 +2,7 @@ require 'fileutils' require 'ferret' + begin require 'chronic' $have_chronic = true @@ -23,12 +24,9 @@ class Index include Singleton - attr_reader :index - alias ferret index def initialize dir=BASE_DIR + @index_mutex = Monitor.new @dir = dir - @sources = {} - @sources_dirty = false wsa = Ferret::Analysis::WhiteSpaceAnalyzer.new false sa = Ferret::Analysis::StandardAnalyzer.new [], true @@ -66,14 +64,19 @@ class Index @lock_update_thread = nil end + def possibly_pluralize number_of, kind + "#{number_of} #{kind}" + + if number_of == 1 then "" else "s" end + end + def fancy_lock_error_message_for e - secs = Time.now - e.mtime - mins = secs.to_i / 60 + secs = (Time.now - e.mtime).to_i + mins = secs / 60 time = if mins == 0 - "#{secs.to_i} seconds" + possibly_pluralize secs , "second" else - "#{mins} minutes" + possibly_pluralize mins, "minute" end < dir, :analyzer => @analyzer) - Redwood::log "loaded index of #{@index.size} messages" + @index_mutex.synchronize do + @index = Ferret::Index::Index.new(:path => dir, :analyzer => @analyzer, :id_field => 'message_id') + Redwood::log "loaded index of #{@index.size} messages" + end else Redwood::log "creating index..." - field_infos = Ferret::Index::FieldInfos.new :store => :yes - field_infos.add_field :message_id, :index => :untokenized - field_infos.add_field :source_id - field_infos.add_field :source_info - field_infos.add_field :date, :index => :untokenized - field_infos.add_field :body - field_infos.add_field :label - field_infos.add_field :subject - field_infos.add_field :from - field_infos.add_field :to - field_infos.add_field :refs - field_infos.add_field :snippet, :index => :no, :term_vector => :no - field_infos.create_index dir - @index = Ferret::Index::Index.new(:path => dir, :analyzer => @analyzer) + @index_mutex.synchronize do + field_infos = Ferret::Index::FieldInfos.new :store => :yes + field_infos.add_field :message_id, :index => :untokenized + field_infos.add_field :source_id + field_infos.add_field :source_info + field_infos.add_field :date, :index => :untokenized + field_infos.add_field :body + field_infos.add_field :label + field_infos.add_field :attachments + field_infos.add_field :subject + field_infos.add_field :from + field_infos.add_field :to + field_infos.add_field :refs + field_infos.add_field :snippet, :index => :no, :term_vector => :no + field_infos.create_index dir + @index = Ferret::Index::Index.new(:path => dir, :analyzer => @analyzer, :id_field => 'message_id') + end end end - ## Syncs the message to the index: deleting if it's already there, - ## and adding either way. Index state will be determined by m.labels. - ## - ## docid and entry can be specified if they're already known. - def sync_message m, docid=nil, entry=nil - docid, entry = load_entry_for_id m.id unless docid && entry + ## Syncs the message to the index, replacing any previous version. adding + ## either way. Index state will be determined by the message's #labels + ## accessor. + def sync_message m, opts={} + entry = @index[m.id] raise "no source info for message #{m.id}" unless m.source && m.source_info - raise "trying to delete non-corresponding entry #{docid} with index message-id #{@index[docid][:message_id].inspect} and parameter message id #{m.id.inspect}" if docid && @index[docid][:message_id] != m.id - source_id = - if m.source.is_a? Integer - m.source - else - m.source.id or raise "unregistered source #{m.source} (id #{m.source.id.inspect})" - end + source_id = if m.source.is_a? Integer + m.source + else + m.source.id or raise "unregistered source #{m.source} (id #{m.source.id.inspect})" + end - snippet = - if m.snippet_contains_encrypted_content? && $config[:discard_snippets_from_encrypted_messages] - "" - else - m.snippet - end + snippet = if m.snippet_contains_encrypted_content? && $config[:discard_snippets_from_encrypted_messages] + "" + else + m.snippet + end ## write the new document to the index. if the entry already exists in the ## index, reuse it (which avoids having to reload the entry from the source, ## which can be quite expensive for e.g. large threads of IMAP actions.) ## + ## exception: if the index entry belongs to an earlier version of the + ## message, use everything from the new message instead, but union the + ## flags. this allows messages sent to mailing lists to have their header + ## updated and to have flags set properly. + ## + ## minor hack: messages in sources with lower ids have priority over + ## messages in sources with higher ids. so messages in the inbox will + ## override everyone, and messages in the sent box will be overridden + ## by everyone else. + ## ## written in this manner to support previous versions of the index which ## did not keep around the entry body. upgrading is thus seamless. - entry ||= {} - d = - { - :message_id => (entry[:message_id] || m.id), - :source_id => (entry[:source_id] || source_id), - :source_info => (entry[:source_info] || m.source_info), + labels = m.labels.uniq # override because this is the new state, unless... + + ## if we are a later version of a message, ignore what's in the index, + ## but merge in the labels. + if entry[:source_id] && entry[:source_info] && entry[:label] && + ((entry[:source_id].to_i > source_id) || (entry[:source_info].to_i < m.source_info)) + labels = (entry[:label].symbolistize + m.labels).uniq + #Redwood::log "found updated version of message #{m.id}: #{m.subj}" + #Redwood::log "previous version was at #{entry[:source_id].inspect}:#{entry[:source_info].inspect}, this version at #{source_id.inspect}:#{m.source_info.inspect}" + #Redwood::log "merged labels are #{labels.inspect} (index #{entry[:label].inspect}, message #{m.labels.inspect})" + entry = {} + end + + ## if force_overwite is true, ignore what's in the index. this is used + ## primarily by sup-sync to force index updates. + entry = {} if opts[:force_overwrite] + + d = { + :message_id => m.id, + :source_id => source_id, + :source_info => m.source_info, :date => (entry[:date] || m.date.to_indexable_s), :body => (entry[:body] || m.indexable_content), :snippet => snippet, # always override - :label => m.labels.uniq.join(" "), # always override - :from => (entry[:from] || (m.from ? m.from.indexable_content : "")), - :to => (entry[:to] || (m.to + m.cc + m.bcc).map { |x| x.indexable_content }.join(" ")), - :subject => (entry[:subject] || wrap_subj(m.subj)), + :label => labels.uniq.join(" "), + :attachments => (entry[:attachments] || m.attachments.uniq.join(" ")), + + ## always override :from and :to. + ## older versions of Sup would often store the wrong thing in the index + ## (because they were canonicalizing email addresses, resulting in the + ## wrong name associated with each.) the correct address is read from + ## the original header when these messages are opened in thread-view-mode, + ## so this allows people to forcibly update the address in the index by + ## marking those threads for saving. + :from => (m.from ? m.from.indexable_content : ""), + :to => (m.to + m.cc + m.bcc).map { |x| x.indexable_content }.join(" "), + + :subject => (entry[:subject] || wrap_subj(Message.normalize_subj(m.subj))), :refs => (entry[:refs] || (m.refs + m.replytos).uniq.join(" ")), } - @index.delete docid if docid - @index.add_document d - - docid, entry = load_entry_for_id m.id - ## this hasn't been triggered in a long time. TODO: decide whether it's still a problem. - raise "just added message #{m.id.inspect} but couldn't find it in a search" unless docid - true + @index_mutex.synchronize do + @index.delete m.id + @index.add_document d + end end def save_index fn=File.join(@dir, "ferret") @@ -218,32 +237,37 @@ EOS end def contains_id? id - @index.search(Ferret::Search::TermQuery.new(:message_id, id)).total_hits > 0 + @index_mutex.synchronize { @index.search(Ferret::Search::TermQuery.new(:message_id, id)).total_hits > 0 } end - def contains? m; contains_id? m.id; end - def size; @index.size; end + def contains? m; contains_id? m.id end + def size; @index_mutex.synchronize { @index.size } end + def empty?; size == 0 end ## you should probably not call this on a block that doesn't break ## rather quickly because the results can be very large. EACH_BY_DATE_NUM = 100 - def each_id_by_date opts={} - return if @index.size == 0 # otherwise ferret barfs ###TODO: remove this once my ferret patch is accepted - query = build_query opts + def each_id_by_date query={} + return if empty? # otherwise ferret barfs ###TODO: remove this once my ferret patch is accepted + ferret_query = build_ferret_query query offset = 0 while true - results = @index.search(query, :sort => "date DESC", :limit => EACH_BY_DATE_NUM, :offset => offset) - Redwood::log "got #{results.total_hits} results for query (offset #{offset}) #{query.inspect}" - results.hits.each { |hit| yield @index[hit.doc][:message_id], lambda { build_message hit.doc } } - break if offset >= results.total_hits - EACH_BY_DATE_NUM - offset += EACH_BY_DATE_NUM + limit = (query[:limit])? [EACH_BY_DATE_NUM, query[:limit] - offset].min : EACH_BY_DATE_NUM + results = @index_mutex.synchronize { @index.search ferret_query, :sort => "date DESC", :limit => limit, :offset => offset } + Redwood::log "got #{results.total_hits} results for query (offset #{offset}) #{ferret_query.inspect}" + results.hits.each do |hit| + yield @index_mutex.synchronize { @index[hit.doc][:message_id] }, lambda { build_message hit.doc } + end + break if query[:limit] and offset >= query[:limit] - limit + break if offset >= results.total_hits - limit + offset += limit end end - def num_results_for opts={} - return 0 if @index.size == 0 # otherwise ferret barfs ###TODO: remove this once my ferret patch is accepted + def num_results_for query={} + return 0 if empty? # otherwise ferret barfs ###TODO: remove this once my ferret patch is accepted - q = build_query opts - index.search(q, :limit => 1).total_hits + ferret_query = build_ferret_query query + @index_mutex.synchronize { @index.search(ferret_query, :limit => 1).total_hits } end ## yield all messages in the thread containing 'm' by repeatedly @@ -262,24 +286,28 @@ EOS searched = {} num_queries = 0 + pending = [m.id] if $config[:thread_by_subject] # do subject queries date_min = m.date - (SAME_SUBJECT_DATE_LIMIT * 12 * 3600) date_max = m.date + (SAME_SUBJECT_DATE_LIMIT * 12 * 3600) q = Ferret::Search::BooleanQuery.new true sq = Ferret::Search::PhraseQuery.new(:subject) - wrap_subj(Message.normalize_subj(m.subj)).split(/\s+/).each do |t| + wrap_subj(Message.normalize_subj(m.subj)).split.each do |t| sq.add_term t end q.add_query sq, :must q.add_query Ferret::Search::RangeQuery.new(:date, :>= => date_min.to_indexable_s, :<= => date_max.to_indexable_s), :must - q = build_query :qobj => q + q = build_ferret_query :qobj => q - pending = @index.search(q).hits.map { |hit| @index[hit.doc][:message_id] } - Redwood::log "found #{pending.size} results for subject query #{q}" - else - pending = [m.id] + p1 = @index_mutex.synchronize { @index.search(q).hits.map { |hit| @index[hit.doc][:message_id] } } + Redwood::log "found #{p1.size} results for subject query #{q}" + + p2 = @index_mutex.synchronize { @index.search(q.to_s, :limit => :all).hits.map { |hit| @index[hit.doc][:message_id] } } + Redwood::log "found #{p2.size} results in string form" + + pending = (pending + p1 + p2).uniq end until pending.empty? || (opts[:limit] && messages.size >= opts[:limit]) @@ -295,22 +323,24 @@ EOS end pending = pending[lim .. -1] - q = build_query :qobj => q + q = build_ferret_query :qobj => q num_queries += 1 killed = false - @index.search_each(q, :limit => :all) do |docid, score| - break if opts[:limit] && messages.size >= opts[:limit] - if @index[docid][:label].split(/\s+/).include?("killed") && opts[:skip_killed] - killed = true - break - end - mid = @index[docid][:message_id] - unless messages.member?(mid) - #Redwood::log "got #{mid} as a child of #{id}" - messages[mid] ||= lambda { build_message docid } - refs = @index[docid][:refs].split(" ") - pending += refs.select { |id| !searched[id] } + @index_mutex.synchronize do + @index.search_each(q, :limit => :all) do |docid, score| + break if opts[:limit] && messages.size >= opts[:limit] + if @index[docid][:label].split(/\s+/).include?("killed") && opts[:skip_killed] + killed = true + break + end + mid = @index[docid][:message_id] + unless messages.member?(mid) + #Redwood::log "got #{mid} as a child of #{id}" + messages[mid] ||= lambda { build_message docid } + refs = @index[docid][:refs].split + pending += refs.select { |id| !searched[id] } + end end end end @@ -319,7 +349,7 @@ EOS Redwood::log "thread for #{m.id} is killed, ignoring" false else - Redwood::log "ran #{num_queries} queries to build thread of #{messages.size + 1} messages for #{m.id}: #{m.subj}" if num_queries > 0 + Redwood::log "ran #{num_queries} queries to build thread of #{messages.size} messages for #{m.id}: #{m.subj}" if num_queries > 0 messages.each { |mid, builder| yield mid, builder } true end @@ -327,37 +357,32 @@ EOS ## builds a message object from a ferret result def build_message docid - doc = @index[docid] - source = @sources[doc[:source_id].to_i] - #puts "building message #{doc[:message_id]} (#{source}##{doc[:source_info]})" - raise "invalid source #{doc[:source_id]}" unless source - - fake_header = { - "date" => Time.at(doc[:date].to_i), - "subject" => unwrap_subj(doc[:subject]), - "from" => doc[:from], - "to" => doc[:to].split(/\s+/).join(", "), # reformat - "message-id" => doc[:message_id], - "references" => doc[:refs].split(/\s+/).map { |x| "<#{x}>" }.join(" "), - } - - Message.new :source => source, :source_info => doc[:source_info].to_i, - :labels => doc[:label].split(" ").map { |s| s.intern }, - :snippet => doc[:snippet], :header => fake_header + @index_mutex.synchronize do + doc = @index[docid] or return + + source = SourceManager[doc[:source_id].to_i] + raise "invalid source #{doc[:source_id]}" unless source + + #puts "building message #{doc[:message_id]} (#{source}##{doc[:source_info]})" + + fake_header = { + "date" => Time.at(doc[:date].to_i), + "subject" => unwrap_subj(doc[:subject]), + "from" => doc[:from], + "to" => doc[:to].split.join(", "), # reformat + "message-id" => doc[:message_id], + "references" => doc[:refs].split.map { |x| "<#{x}>" }.join(" "), + } + + m = Message.new :source => source, :source_info => doc[:source_info].to_i, + :labels => doc[:label].symbolistize, + :snippet => doc[:snippet] + m.parse_header fake_header + m + end end - def fresh_thread_id; @next_thread_id += 1; end - def wrap_subj subj; "__START_SUBJECT__ #{subj} __END_SUBJECT__"; end - def unwrap_subj subj; subj =~ /__START_SUBJECT__ (.*?) __END_SUBJECT__/ && $1; end - - def drop_entry docno; @index.delete docno; end - - def load_entry_for_id mid - results = @index.search(Ferret::Search::TermQuery.new(:message_id, mid)) - return if results.total_hits == 0 - docid = results.hits[0].doc - [docid, @index[docid]] - end + def delete id; @index_mutex.synchronize { @index.delete id } end def load_contacts emails, h={} q = Ferret::Search::BooleanQuery.new true @@ -372,54 +397,57 @@ EOS Redwood::log "contact search: #{q}" contacts = {} num = h[:num] || 20 - @index.search_each(q, :sort => "date DESC", :limit => :all) do |docid, score| - break if contacts.size >= num - #Redwood::log "got message #{docid} to: #{@index[docid][:to].inspect} and from: #{@index[docid][:from].inspect}" - f = @index[docid][:from] - t = @index[docid][:to] - - if AccountManager.is_account_email? f - t.split(" ").each { |e| contacts[PersonManager.person_for(e)] = true } - else - contacts[PersonManager.person_for(f)] = true + @index_mutex.synchronize do + @index.search_each q, :sort => "date DESC", :limit => :all do |docid, score| + break if contacts.size >= num + #Redwood::log "got message #{docid} to: #{@index[docid][:to].inspect} and from: #{@index[docid][:from].inspect}" + f = @index[docid][:from] + t = @index[docid][:to] + + if AccountManager.is_account_email? f + t.split(" ").each { |e| contacts[Person.from_address(e)] = true } + else + contacts[Person.from_address(f)] = true + end end end contacts.keys.compact end - def load_sources fn=Redwood::SOURCE_FN - source_array = (Redwood::load_yaml_obj(fn) || []).map { |o| Recoverable.new o } - @sources = Hash[*(source_array).map { |s| [s.id, s] }.flatten] - @sources_dirty = false + def each_docid query={} + ferret_query = build_ferret_query query + results = @index_mutex.synchronize { @index.search ferret_query, :limit => (query[:limit] || :all) } + results.hits.map { |hit| yield hit.doc } end - def has_any_from_source_with_label? source, label - q = Ferret::Search::BooleanQuery.new - q.add_query Ferret::Search::TermQuery.new("source_id", source.id.to_s), :must - q.add_query Ferret::Search::TermQuery.new("label", label.to_s), :must - index.search(q, :limit => 1).total_hits > 0 + def each_message query={} + each_docid query do |docid| + yield build_message(docid) + end end -protected + def optimize + @index_mutex.synchronize { @index.optimize } + end - ## do any specialized parsing - ## returns nil and flashes error message if parsing failed - def parse_user_query_string s - extraopts = {} + def source_for_id id + entry = @index[id] + return unless entry + entry[:source_id].to_i + end - ## this is a little hacky, but it works, at least until ferret changes - ## its api. we parse the user query string with ferret twice: the first - ## time we just turn the resulting object back into a string, which has - ## the next effect of transforming the original string into a nice - ## normalized form with + and - instead of AND, OR, etc. then we do some - ## string substitutions which depend on this normalized form, re-parse - ## the string with Ferret, and return the resulting query object. + class ParseError < StandardError; end - norms = @qparser.parse(s).to_s - Redwood::log "normalized #{s.inspect} to #{norms.inspect}" unless s == norms + ## parse a query string from the user. returns a query object + ## that can be passed to any index method with a 'query' + ## argument, as well as build_ferret_query. + ## + ## raises a ParseError if something went wrong. + def parse_query s + query = {} - subs = norms.gsub(/\b(to|from):(\S+)\b/) do + subs = s.gsub(/\b(to|from):(\S+)\b/) do field, name = $1, $2 if(p = ContactManager.contact_for(name)) [field, p.email] @@ -443,32 +471,43 @@ protected ## final stage of query processing. if the user wants to search spam ## messages, not adding that is the right thing; if he doesn't want to ## search spam messages, then not adding it won't have any effect. - extraopts[:load_spam] = true if subs =~ /\blabel:spam\b/ - extraopts[:load_deleted] = true if subs =~ /\blabel:deleted\b/ + query[:load_spam] = true if subs =~ /\blabel:spam\b/ + query[:load_deleted] = true if subs =~ /\blabel:deleted\b/ ## gmail style "is" operator - subs = subs.gsub(/\b(is):(\S+)\b/) do + subs = subs.gsub(/\b(is|has):(\S+)\b/) do field, label = $1, $2 case label when "read" "-label:unread" when "spam" - extraopts[:load_spam] = true + query[:load_spam] = true "label:spam" when "deleted" - extraopts[:load_deleted] = true + query[:load_deleted] = true "label:deleted" else "label:#{$2}" end end + ## gmail style attachments "filename" and "filetype" searches + subs = subs.gsub(/\b(filename|filetype):(\((.+?)\)\B|(\S+)\b)/) do + field, name = $1, ($3 || $4) + case field + when "filename" + Redwood::log "filename - translated #{field}:#{name} to attachments:(#{name.downcase})" + "attachments:(#{name.downcase})" + when "filetype" + Redwood::log "filetype - translated #{field}:#{name} to attachments:(*.#{name.downcase})" + "attachments:(*.#{name.downcase})" + end + end + if $have_chronic - chronic_failure = false subs = subs.gsub(/\b(before|on|in|during|after):(\((.+?)\)\B|(\S+)\b)/) do - break if chronic_failure field, datestr = $1, ($3 || $4) - realdate = Chronic.parse(datestr, :guess => false, :context => :none) + realdate = Chronic.parse datestr, :guess => false, :context => :past if realdate case field when "after" @@ -482,53 +521,57 @@ protected "date:(<= #{sprintf "%012d", realdate.end.to_i}) date:(>= #{sprintf "%012d", realdate.begin.to_i})" end else - BufferManager.flash "Can't understand date #{datestr.inspect}!" - chronic_failure = true + raise ParseError, "can't understand date #{datestr.inspect}" end end - subs = nil if chronic_failure + end + + ## limit:42 restrict the search to 42 results + subs = subs.gsub(/\blimit:(\S+)\b/) do + lim = $1 + if lim =~ /^\d+$/ + query[:limit] = lim.to_i + '' + else + raise ParseError, "non-numeric limit #{lim.inspect}" + end end - Redwood::log "translated #{norms.inspect} to #{subs.inspect}" unless subs == norms - if subs - [@qparser.parse(subs), extraopts] - else - nil + begin + query[:qobj] = @qparser.parse(subs) + query[:text] = s + query + rescue Ferret::QueryParser::QueryParseException => e + raise ParseError, e.message end end - def build_query opts - query = Ferret::Search::BooleanQuery.new - query.add_query opts[:qobj], :must if opts[:qobj] - labels = ([opts[:label]] + (opts[:labels] || [])).compact - labels.each { |t| query.add_query Ferret::Search::TermQuery.new("label", t.to_s), :must } - if opts[:participants] +private + + def build_ferret_query query + q = Ferret::Search::BooleanQuery.new + q.add_query query[:qobj], :must if query[:qobj] + labels = ([query[:label]] + (query[:labels] || [])).compact + labels.each { |t| q.add_query Ferret::Search::TermQuery.new("label", t.to_s), :must } + if query[:participants] q2 = Ferret::Search::BooleanQuery.new - opts[:participants].each do |p| + query[:participants].each do |p| q2.add_query Ferret::Search::TermQuery.new("from", p.email), :should q2.add_query Ferret::Search::TermQuery.new("to", p.email), :should end - query.add_query q2, :must + q.add_query q2, :must end - query.add_query Ferret::Search::TermQuery.new("label", "spam"), :must_not unless opts[:load_spam] || labels.include?(:spam) - query.add_query Ferret::Search::TermQuery.new("label", "deleted"), :must_not unless opts[:load_deleted] || labels.include?(:deleted) - query.add_query Ferret::Search::TermQuery.new("label", "killed"), :must_not if opts[:skip_killed] - query - end + q.add_query Ferret::Search::TermQuery.new("label", "spam"), :must_not unless query[:load_spam] || labels.include?(:spam) + q.add_query Ferret::Search::TermQuery.new("label", "deleted"), :must_not unless query[:load_deleted] || labels.include?(:deleted) + q.add_query Ferret::Search::TermQuery.new("label", "killed"), :must_not if query[:skip_killed] - def save_sources fn=Redwood::SOURCE_FN - if @sources_dirty || @sources.any? { |id, s| s.dirty? } - bakfn = fn + ".bak" - if File.exists? fn - File.chmod 0600, fn - FileUtils.mv fn, bakfn, :force => true unless File.exists?(bakfn) && File.size(fn) == 0 - end - Redwood::save_yaml_obj sources.sort_by { |s| s.id.to_i }, fn, true - File.chmod 0600, fn - end - @sources_dirty = false + q.add_query Ferret::Search::TermQuery.new("source_id", query[:source_id]), :must if query[:source_id] + q end + + def wrap_subj subj; "__START_SUBJECT__ #{subj} __END_SUBJECT__"; end + def unwrap_subj subj; subj =~ /__START_SUBJECT__ (.*?) __END_SUBJECT__/ && $1; end end end