mirror of
				https://github.com/KevinMidboe/linguist.git
				synced 2025-10-29 17:50:22 +00:00 
			
		
		
		
	Skip number literals
This commit is contained in:
		| @@ -73,12 +73,15 @@ module Linguist | |||||||
|         elsif s.scan(/'/) |         elsif s.scan(/'/) | ||||||
|           s.skip_until(/[^\\]'/) |           s.skip_until(/[^\\]'/) | ||||||
|  |  | ||||||
|  |         # Skip number literals | ||||||
|  |         elsif s.scan(/\d+/) | ||||||
|  |  | ||||||
|         # SGML style brackets |         # SGML style brackets | ||||||
|         elsif token = s.scan(/<[^>]+>/) |         elsif token = s.scan(/<[^>]+>/) | ||||||
|           extract_sgml_tokens(token).each { |t| tokens << t } |           extract_sgml_tokens(token).each { |t| tokens << t } | ||||||
|  |  | ||||||
|         # Common programming punctuation |         # Common programming punctuation | ||||||
|         elsif token = s.scan(/;|\{|\}|\(|\)|<<?/) |         elsif token = s.scan(/;|\{|\}|\(|\)|<<?|\+/) | ||||||
|           tokens << token |           tokens << token | ||||||
|  |  | ||||||
|         # Regular token |         # Regular token | ||||||
|   | |||||||
| @@ -14,7 +14,7 @@ class TestTokenizer < Test::Unit::TestCase | |||||||
|     Tokenizer.new(data).tokens |     Tokenizer.new(data).tokens | ||||||
|   end |   end | ||||||
|  |  | ||||||
|   def test_skip_strings |   def test_skip_string_literals | ||||||
|     assert_equal %w(print), tokenize('print ""') |     assert_equal %w(print), tokenize('print ""') | ||||||
|     assert_equal %w(print), tokenize('print "Josh"') |     assert_equal %w(print), tokenize('print "Josh"') | ||||||
|     assert_equal %w(print), tokenize("print 'Josh'") |     assert_equal %w(print), tokenize("print 'Josh'") | ||||||
| @@ -22,6 +22,11 @@ class TestTokenizer < Test::Unit::TestCase | |||||||
|     assert_equal %w(print), tokenize("print 'Hello \\'Josh\\''") |     assert_equal %w(print), tokenize("print 'Hello \\'Josh\\''") | ||||||
|   end |   end | ||||||
|  |  | ||||||
|  |   def test_skip_number_literals | ||||||
|  |     assert_equal %w(+), tokenize('1 + 1') | ||||||
|  |     assert_equal %w(add \( \)), tokenize('add(123, 456)') | ||||||
|  |   end | ||||||
|  |  | ||||||
|   def test_skip_comments |   def test_skip_comments | ||||||
|     assert_equal %w(foo #), tokenize("foo # Comment") |     assert_equal %w(foo #), tokenize("foo # Comment") | ||||||
|     assert_equal %w(foo # bar), tokenize("foo # Comment\nbar") |     assert_equal %w(foo # bar), tokenize("foo # Comment\nbar") | ||||||
| @@ -43,7 +48,8 @@ class TestTokenizer < Test::Unit::TestCase | |||||||
|   end |   end | ||||||
|  |  | ||||||
|   def test_c_tokens |   def test_c_tokens | ||||||
|     assert_equal %w(#include <stdio.h> int main \( \) { printf \( \) ; return 0 ; }), tokenize(:"c/hello.c") |     assert_equal %w(#ifndef HELLO_H #define HELLO_H void hello \( \) ; #endif), tokenize(:"c/hello.h") | ||||||
|  |     assert_equal %w(#include <stdio.h> int main \( \) { printf \( \) ; return ; }), tokenize(:"c/hello.c") | ||||||
|   end |   end | ||||||
|  |  | ||||||
|   def test_cpp_tokens |   def test_cpp_tokens | ||||||
| @@ -54,7 +60,7 @@ class TestTokenizer < Test::Unit::TestCase | |||||||
|   def test_objective_c_tokens |   def test_objective_c_tokens | ||||||
|     assert_equal %w(#import <Foundation/Foundation.h> @interface Foo NSObject { } @end), tokenize(:"objective-c/Foo.h") |     assert_equal %w(#import <Foundation/Foundation.h> @interface Foo NSObject { } @end), tokenize(:"objective-c/Foo.h") | ||||||
|     assert_equal %w(#import @implementation Foo @end), tokenize(:"objective-c/Foo.m") |     assert_equal %w(#import @implementation Foo @end), tokenize(:"objective-c/Foo.m") | ||||||
|     assert_equal %w(#import <Cocoa/Cocoa.h> int main \( int argc char argv \) { NSLog \( @ \) ; return 0 ; }), tokenize(:"objective-c/hello.m") |     assert_equal %w(#import <Cocoa/Cocoa.h> int main \( int argc char argv \) { NSLog \( @ \) ; return ; }), tokenize(:"objective-c/hello.m") | ||||||
|   end |   end | ||||||
|  |  | ||||||
|   def test_javascript_tokens |   def test_javascript_tokens | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user