mirror of
				https://github.com/KevinMidboe/linguist.git
				synced 2025-10-29 17:50:22 +00:00 
			
		
		
		
	use LINGUIST_DEBUG to debug the Bayesian filter
This commit is contained in:
		@@ -77,9 +77,16 @@ module Linguist
 | 
				
			|||||||
      tokens = Tokenizer.tokenize(tokens) if tokens.is_a?(String)
 | 
					      tokens = Tokenizer.tokenize(tokens) if tokens.is_a?(String)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
      scores = {}
 | 
					      scores = {}
 | 
				
			||||||
 | 
					      if verbosity >= 2
 | 
				
			||||||
 | 
					        dump_all_tokens(tokens, languages)
 | 
				
			||||||
 | 
					      end
 | 
				
			||||||
      languages.each do |language|
 | 
					      languages.each do |language|
 | 
				
			||||||
        scores[language] = tokens_probability(tokens, language) +
 | 
					        scores[language] = tokens_probability(tokens, language) +
 | 
				
			||||||
                                   language_probability(language)
 | 
					                                   language_probability(language)
 | 
				
			||||||
 | 
					        if verbosity >= 1
 | 
				
			||||||
 | 
					          printf "%10s = %10.3f + %7.3f = %10.3f\n",
 | 
				
			||||||
 | 
					            language, tokens_probability(tokens, language), language_probability(language), scores[language]
 | 
				
			||||||
 | 
					        end
 | 
				
			||||||
      end
 | 
					      end
 | 
				
			||||||
 | 
					
 | 
				
			||||||
      scores.sort { |a, b| b[1] <=> a[1] }.map { |score| [score[0], score[1]] }
 | 
					      scores.sort { |a, b| b[1] <=> a[1] }.map { |score| [score[0], score[1]] }
 | 
				
			||||||
@@ -119,5 +126,29 @@ module Linguist
 | 
				
			|||||||
    def language_probability(language)
 | 
					    def language_probability(language)
 | 
				
			||||||
      Math.log(@languages[language].to_f / @languages_total.to_f)
 | 
					      Math.log(@languages[language].to_f / @languages_total.to_f)
 | 
				
			||||||
    end
 | 
					    end
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					private
 | 
				
			||||||
 | 
					    def verbosity
 | 
				
			||||||
 | 
					      @verbosity ||= (ENV['LINGUIST_DEBUG']||0).to_i
 | 
				
			||||||
 | 
					    end
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def dump_all_tokens(tokens, languages)
 | 
				
			||||||
 | 
					      maxlen = tokens.map{|tok| tok.size}.max
 | 
				
			||||||
 | 
					      printf "%#{maxlen}s", ""
 | 
				
			||||||
 | 
					      puts "    #" + languages.map{|lang| sprintf("%10s", lang)}.join
 | 
				
			||||||
 | 
					      tokmap = Hash.new(0)
 | 
				
			||||||
 | 
					      tokens.each{|tok| tokmap[tok] += 1}
 | 
				
			||||||
 | 
					      tokmap.sort.each{|tok, count|
 | 
				
			||||||
 | 
					        arr = languages.map{|lang| [lang, token_probability(tok, lang)] }
 | 
				
			||||||
 | 
					        min = arr.map{|a,b| b}.min
 | 
				
			||||||
 | 
					        minlog = Math.log(min)
 | 
				
			||||||
 | 
					        if !arr.inject(true) {|result, n| result && n[1] == arr[0][1]}  # if not all the same
 | 
				
			||||||
 | 
					          printf "%#{maxlen}s%5d", tok, count
 | 
				
			||||||
 | 
					          puts arr.map{|ent|
 | 
				
			||||||
 | 
					            ent[1] == min ? "         -" : sprintf("%10.3f", count*(Math.log(ent[1])-minlog))
 | 
				
			||||||
 | 
					          }.join
 | 
				
			||||||
 | 
					        end
 | 
				
			||||||
 | 
					      }
 | 
				
			||||||
 | 
					    end
 | 
				
			||||||
  end
 | 
					  end
 | 
				
			||||||
end
 | 
					end
 | 
				
			||||||
 
 | 
				
			|||||||
		Reference in New Issue
	
	Block a user