diff --git a/.gitmodules b/.gitmodules index eb5b0c01..9a37dcd4 100644 --- a/.gitmodules +++ b/.gitmodules @@ -82,9 +82,6 @@ [submodule "vendor/grammars/language-python"] path = vendor/grammars/language-python url = https://github.com/atom/language-python -[submodule "vendor/grammars/language-sass"] - path = vendor/grammars/language-sass - url = https://github.com/atom/language-sass [submodule "vendor/grammars/language-shellscript"] path = vendor/grammars/language-shellscript url = https://github.com/atom/language-shellscript @@ -506,7 +503,6 @@ [submodule "vendor/grammars/sublime-mask"] path = vendor/grammars/sublime-mask url = https://github.com/tenbits/sublime-mask - branch = release [submodule "vendor/grammars/sublime_cobol"] path = vendor/grammars/sublime_cobol url = https://bitbucket.org/bitlang/sublime_cobol @@ -535,3 +531,15 @@ [submodule "vendor/grammars/Sublime-HTTP"] path = vendor/grammars/Sublime-HTTP url = https://github.com/httpspec/sublime-highlighting +[submodule "vendor/grammars/sass-textmate-bundle"] + path = vendor/grammars/sass-textmate-bundle + url = https://github.com/nathos/sass-textmate-bundle +[submodule "vendor/grammars/carto-atom"] + path = vendor/grammars/carto-atom + url = https://github.com/yohanboniface/carto-atom +[submodule "vendor/grammars/Sublime-Nit"] + path = vendor/grammars/Sublime-Nit + url = https://github.com/R4PaSs/Sublime-Nit +[submodule "vendor/grammars/language-hy"] + path = vendor/grammars/language-hy + url = https://github.com/rwtolbert/language-hy diff --git a/.travis.yml b/.travis.yml index e49ae8a6..4d0c2351 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,12 +1,5 @@ -before_install: - - git fetch origin master:master - - git fetch origin v2.0.0:v2.0.0 - - git fetch origin test/attributes:test/attributes - - git fetch origin test/master:test/master - - sudo apt-get install libicu-dev -y - - git submodule init - - git submodule sync --quiet - - script/fast-submodule-update +sudo: false +before_install: script/travis/before_install rvm: - 1.9.3 - 2.0.0 @@ -16,3 +9,4 @@ notifications: disabled: true git: submodules: false +cache: bundler diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 69081657..b6b682f6 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -23,7 +23,7 @@ Great! You'll need to: 0. Add an entry for your language to [`languages.yml`][languages]. 0. Add a grammar for your language. Please only add grammars that have a license that permits redistribution. 0. Add your grammar as a submodule: `git submodule add https://github.com/JaneSmith/MyGrammar vendor/grammars/MyGrammar`. - 0. Add your grammar to [`grammars.yml`][grammars] by running `script/download-grammars --add vendor/grammars/MyGrammar`. + 0. Add your grammar to [`grammars.yml`][grammars] by running `script/convert-grammars --add vendor/grammars/MyGrammar`. 0. Add samples for your language to the [samples directory][samples]. In addition, if your new language defines an extension that's already listed in [`languages.yml`][languages] (such as `.foo`) then sometimes a few more steps will need to be taken: diff --git a/Rakefile b/Rakefile index 068af0bb..b38486c0 100644 --- a/Rakefile +++ b/Rakefile @@ -48,7 +48,7 @@ end task :build_grammars_gem do rm_rf "grammars" - sh "script/download-grammars" + sh "script/convert-grammars" sh "gem", "build", "github-linguist-grammars.gemspec" end diff --git a/grammars.yml b/grammars.yml index a8305007..1421c2f6 100644 --- a/grammars.yml +++ b/grammars.yml @@ -47,9 +47,9 @@ vendor/grammars/LiveScript.tmbundle: vendor/grammars/NSIS: - source.nsis vendor/grammars/NimLime: -- source.nimrod -- source.nimrod_filter -- source.nimrodcfg +- source.nim +- source.nim_filter +- source.nimcfg vendor/grammars/PHP-Twig.tmbundle: - text.html.twig vendor/grammars/RDoc.tmbundle: @@ -76,6 +76,8 @@ vendor/grammars/Sublime-Logos: - source.logos vendor/grammars/Sublime-Loom: - source.loomscript +vendor/grammars/Sublime-Nit: +- source.nit vendor/grammars/Sublime-QML: - source.qml vendor/grammars/Sublime-REBOL: @@ -138,6 +140,8 @@ vendor/grammars/c.tmbundle: - source.c.platform vendor/grammars/capnproto.tmbundle: - source.capnp +vendor/grammars/carto-atom: +- source.css.mss vendor/grammars/ceylon-sublimetext: - module.ceylon - source.ceylon @@ -255,16 +259,16 @@ vendor/grammars/language-csharp: - source.nant-build vendor/grammars/language-gfm: - source.gfm +vendor/grammars/language-hy: +- source.hy vendor/grammars/language-javascript: - source.js - source.js.regexp vendor/grammars/language-python: - source.python - source.regexp.python +- text.python.console - text.python.traceback -vendor/grammars/language-sass: -- source.css.scss -- source.sass vendor/grammars/language-shellscript: - source.shell - text.shell-session @@ -376,6 +380,8 @@ vendor/grammars/ruby.tmbundle: vendor/grammars/sas.tmbundle: - source.SASLog - source.sas +vendor/grammars/sass-textmate-bundle: +- source.sass vendor/grammars/scala.tmbundle: - source.sbt - source.scala diff --git a/lib/linguist/heuristics.rb b/lib/linguist/heuristics.rb index 97c1015a..5e603f23 100644 --- a/lib/linguist/heuristics.rb +++ b/lib/linguist/heuristics.rb @@ -175,7 +175,7 @@ module Linguist disambiguate "Frege", "Forth", "Text" do |data| if /^(: |also |new-device|previous )/.match(data) Language["Forth"] - elsif /\s*(import|module|package|data|type) /.match(data) + elsif /^\s*(import|module|package|data|type) /.match(data) Language["Frege"] else Language["Text"] diff --git a/lib/linguist/languages.yml b/lib/linguist/languages.yml index e2c6df86..7172b86e 100644 --- a/lib/linguist/languages.yml +++ b/lib/linguist/languages.yml @@ -450,12 +450,13 @@ Cap'n Proto: ace_mode: text CartoCSS: + type: programming aliases: - Carto extensions: - .mss ace_mode: text - tm_scope: none + tm_scope: source.css.mss Ceylon: type: programming @@ -1267,13 +1268,13 @@ Haxe: Hy: type: programming - ace_mode: clojure + ace_mode: text color: "#7891b1" extensions: - .hy aliases: - hylang - tm_scope: none + tm_scope: source.hy IDL: type: programming @@ -1381,13 +1382,6 @@ JSON: extensions: - .json - .lock - - .sublime-keymap - - .sublime-mousemap - - .sublime-project - - .sublime-settings - - .sublime-workspace - - .sublime_metrics - - .sublime_session filenames: - .jshintrc - composer.lock @@ -1471,6 +1465,19 @@ JavaScript: - .pac - .sjs - .ssjs + - .sublime-build + - .sublime-commands + - .sublime-completions + - .sublime-keymap + - .sublime-macro + - .sublime-menu + - .sublime-mousemap + - .sublime-project + - .sublime-settings + - .sublime-theme + - .sublime-workspace + - .sublime_metrics + - .sublime_session - .xsjs - .xsjslib filenames: @@ -1891,6 +1898,7 @@ Nimrod: - .nim - .nimrod ace_mode: text + tm_scope: source.nim Ninja: type: data @@ -1904,7 +1912,7 @@ Nit: color: "#0d8921" extensions: - .nit - tm_scope: none + tm_scope: source.nit ace_mode: text Nix: @@ -2620,6 +2628,8 @@ SQL: ace_mode: sql extensions: - .sql + - .cql + - .ddl - .prc - .tab - .udf @@ -3129,6 +3139,8 @@ XML: - .rss - .scxml - .srdf + - .stTheme + - .sublime-snippet - .svg - .targets - .tmCommand diff --git a/lib/linguist/version.rb b/lib/linguist/version.rb index bce0aef5..3f330f79 100644 --- a/lib/linguist/version.rb +++ b/lib/linguist/version.rb @@ -1,3 +1,3 @@ module Linguist - VERSION = "4.2.5" + VERSION = "4.2.6" end diff --git a/samples/Nit/file.nit b/samples/Nit/file.nit new file mode 100644 index 00000000..889526fc --- /dev/null +++ b/samples/Nit/file.nit @@ -0,0 +1,798 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2004-2008 Jean Privat +# Copyright 2008 Floréal Morandat +# Copyright 2008 Jean-Sébastien Gélinas +# +# This file is free software, which comes along with NIT. This software is +# distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; +# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. You can modify it is you want, provided this header +# is kept unaltered, and a notification of the changes is added. +# You are allowed to redistribute it and sell it, alone or is a part of +# another product. + +# File manipulations (create, read, write, etc.) +module file + +intrude import stream +intrude import ropes +import string_search +import time + +in "C Header" `{ + #include + #include + #include + #include + #include + #include + #include + #include +`} + +# File Abstract Stream +abstract class FStream + super IOS + # The path of the file. + var path: nullable String = null + + # The FILE *. + private var file: nullable NativeFile = null + + fun file_stat: FileStat do return _file.file_stat + + # File descriptor of this file + fun fd: Int do return _file.fileno +end + +# File input stream +class IFStream + super FStream + super BufferedIStream + super PollableIStream + # Misc + + # Open the same file again. + # The original path is reused, therefore the reopened file can be a different file. + fun reopen + do + if not eof and not _file.address_is_null then close + _file = new NativeFile.io_open_read(path.to_cstring) + if _file.address_is_null then + last_error = new IOError("Error: Opening file at '{path.as(not null)}' failed with '{sys.errno.strerror}'") + end_reached = true + return + end + end_reached = false + _buffer_pos = 0 + _buffer.clear + end + + redef fun close + do + if _file.address_is_null then return + var i = _file.io_close + _buffer.clear + end_reached = true + end + + redef fun fill_buffer + do + var nb = _file.io_read(_buffer.items, _buffer.capacity) + if nb <= 0 then + end_reached = true + nb = 0 + end + _buffer.length = nb + _buffer_pos = 0 + end + # End of file? + redef var end_reached: Bool = false + + # Open the file at `path` for reading. + init open(path: String) + do + self.path = path + prepare_buffer(10) + _file = new NativeFile.io_open_read(path.to_cstring) + if _file.address_is_null then + last_error = new IOError("Error: Opening file at '{path}' failed with '{sys.errno.strerror}'") + end_reached = true + end + end + + init from_fd(fd: Int) do + self.path = "" + prepare_buffer(10) + _file = fd_to_stream(fd, read_only) + if _file.address_is_null then + last_error = new IOError("Error: Converting fd {fd} to stream failed with '{sys.errno.strerror}'") + end_reached = true + end + end +end + +# File output stream +class OFStream + super FStream + super OStream + + redef fun write(s) + do + if last_error != null then return + if not _is_writable then + last_error = new IOError("Cannot write to non-writable stream") + return + end + if s isa FlatText then + write_native(s.to_cstring, s.length) + else + for i in s.substrings do write_native(i.to_cstring, i.length) + end + end + + redef fun close + do + if _file.address_is_null then + last_error = new IOError("Cannot close non-existing write stream") + _is_writable = false + return + end + var i = _file.io_close + _is_writable = false + end + redef var is_writable = false + + # Write `len` bytes from `native`. + private fun write_native(native: NativeString, len: Int) + do + if last_error != null then return + if not _is_writable then + last_error = new IOError("Cannot write to non-writable stream") + return + end + if _file.address_is_null then + last_error = new IOError("Writing on a null stream") + _is_writable = false + return + end + var err = _file.io_write(native, len) + if err != len then + # Big problem + last_error = new IOError("Problem in writing : {err} {len} \n") + end + end + + # Open the file at `path` for writing. + init open(path: String) + do + _file = new NativeFile.io_open_write(path.to_cstring) + if _file.address_is_null then + last_error = new IOError("Error: Opening file at '{path}' failed with '{sys.errno.strerror}'") + self.path = path + is_writable = false + end + self.path = path + _is_writable = true + end + + # Creates a new File stream from a file descriptor + init from_fd(fd: Int) do + self.path = "" + _file = fd_to_stream(fd, wipe_write) + _is_writable = true + if _file.address_is_null then + last_error = new IOError("Error: Opening stream from file descriptor {fd} failed with '{sys.errno.strerror}'") + _is_writable = false + end + end +end + +redef interface Object + + private fun read_only: NativeString do return "r".to_cstring + + private fun wipe_write: NativeString do return "w".to_cstring + + private fun fd_to_stream(fd: Int, mode: NativeString): NativeFile `{ + return fdopen(fd, mode); + `} + + # returns first available stream to read or write to + # return null on interruption (possibly a signal) + protected fun poll( streams : Sequence[FStream] ) : nullable FStream + do + var in_fds = new Array[Int] + var out_fds = new Array[Int] + var fd_to_stream = new HashMap[Int,FStream] + for s in streams do + var fd = s.fd + if s isa IFStream then in_fds.add( fd ) + if s isa OFStream then out_fds.add( fd ) + + fd_to_stream[fd] = s + end + + var polled_fd = intern_poll( in_fds, out_fds ) + + if polled_fd == null then + return null + else + return fd_to_stream[polled_fd] + end + end + + private fun intern_poll(in_fds: Array[Int], out_fds: Array[Int]) : nullable Int is extern import Array[Int].length, Array[Int].[], Int.as(nullable Int) `{ + int in_len, out_len, total_len; + struct pollfd *c_fds; + sigset_t sigmask; + int i; + int first_polled_fd = -1; + int result; + + in_len = Array_of_Int_length( in_fds ); + out_len = Array_of_Int_length( out_fds ); + total_len = in_len + out_len; + c_fds = malloc( sizeof(struct pollfd) * total_len ); + + /* input streams */ + for ( i=0; i 0 ) { + /* analyse results */ + for ( i=0; i 0 and self.chars[l] == '/' do l -= 1 # remove all trailing `/` + if l == 0 then return "/" + var pos = chars.last_index_of_from('/', l) + var n = self + if pos >= 0 then + n = substring(pos+1, l-pos) + end + return n.strip_extension(ext) + end + + # Extract the dirname of a path + # + # assert "/path/to/a_file.ext".dirname == "/path/to" + # assert "path/to/a_file.ext".dirname == "path/to" + # assert "path/to".dirname == "path" + # assert "path/to/".dirname == "path" + # assert "path".dirname == "." + # assert "/path".dirname == "/" + # assert "/".dirname == "/" + # assert "".dirname == "." + fun dirname: String + do + var l = length - 1 # Index of the last char + while l > 0 and self.chars[l] == '/' do l -= 1 # remove all trailing `/` + var pos = chars.last_index_of_from('/', l) + if pos > 0 then + return substring(0, pos) + else if pos == 0 then + return "/" + else + return "." + end + end + + # Return the canonicalized absolute pathname (see POSIX function `realpath`) + fun realpath: String do + var cs = to_cstring.file_realpath + var res = cs.to_s_with_copy + # cs.free_malloc # FIXME memory leak + return res + end + + # Simplify a file path by remove useless ".", removing "//", and resolving ".." + # ".." are not resolved if they start the path + # starting "/" is not removed + # trainling "/" is removed + # + # Note that the method only wonrk on the string: + # * no I/O access is performed + # * the validity of the path is not checked + # + # assert "some/./complex/../../path/from/../to/a////file//".simplify_path == "path/to/a/file" + # assert "../dir/file".simplify_path == "../dir/file" + # assert "dir/../../".simplify_path == ".." + # assert "dir/..".simplify_path == "." + # assert "//absolute//path/".simplify_path == "/absolute/path" + # assert "//absolute//../".simplify_path == "/" + fun simplify_path: String + do + var a = self.split_with("/") + var a2 = new Array[String] + for x in a do + if x == "." then continue + if x == "" and not a2.is_empty then continue + if x == ".." and not a2.is_empty and a2.last != ".." then + a2.pop + continue + end + a2.push(x) + end + if a2.is_empty then return "." + if a2.length == 1 and a2.first == "" then return "/" + return a2.join("/") + end + + # Correctly join two path using the directory separator. + # + # Using a standard "{self}/{path}" does not work in the following cases: + # + # * `self` is empty. + # * `path` ends with `'/'`. + # * `path` starts with `'/'`. + # + # This method ensures that the join is valid. + # + # assert "hello".join_path("world") == "hello/world" + # assert "hel/lo".join_path("wor/ld") == "hel/lo/wor/ld" + # assert "".join_path("world") == "world" + # assert "hello".join_path("/world") == "/world" + # assert "hello/".join_path("world") == "hello/world" + # assert "hello/".join_path("/world") == "/world" + # + # Note: You may want to use `simplify_path` on the result. + # + # Note: This method works only with POSIX paths. + fun join_path(path: String): String + do + if path.is_empty then return self + if self.is_empty then return path + if path.chars[0] == '/' then return path + if self.last == '/' then return "{self}{path}" + return "{self}/{path}" + end + + # Convert the path (`self`) to a program name. + # + # Ensure the path (`self`) will be treated as-is by POSIX shells when it is + # used as a program name. In order to do that, prepend `./` if needed. + # + # assert "foo".to_program_name == "./foo" + # assert "/foo".to_program_name == "/foo" + # assert "".to_program_name == "./" # At least, your shell will detect the error. + fun to_program_name: String do + if self.has_prefix("/") then + return self + else + return "./{self}" + end + end + + # Alias for `join_path` + # + # assert "hello" / "world" == "hello/world" + # assert "hel/lo" / "wor/ld" == "hel/lo/wor/ld" + # assert "" / "world" == "world" + # assert "/hello" / "/world" == "/world" + # + # This operator is quite useful for chaining changes of path. + # The next one being relative to the previous one. + # + # var a = "foo" + # var b = "/bar" + # var c = "baz/foobar" + # assert a/b/c == "/bar/baz/foobar" + fun /(path: String): String do return join_path(path) + + # Returns the relative path needed to go from `self` to `dest`. + # + # assert "/foo/bar".relpath("/foo/baz") == "../baz" + # assert "/foo/bar".relpath("/baz/bar") == "../../baz/bar" + # + # If `self` or `dest` is relative, they are considered relatively to `getcwd`. + # + # In some cases, the result is still independent of the current directory: + # + # assert "foo/bar".relpath("..") == "../../.." + # + # In other cases, parts of the current directory may be exhibited: + # + # var p = "../foo/bar".relpath("baz") + # var c = getcwd.basename("") + # assert p == "../../{c}/baz" + # + # For path resolution independent of the current directory (eg. for paths in URL), + # or to use an other starting directory than the current directory, + # just force absolute paths: + # + # var start = "/a/b/c/d" + # var p2 = (start/"../foo/bar").relpath(start/"baz") + # assert p2 == "../../d/baz" + # + # + # Neither `self` or `dest` has to be real paths or to exist in directories since + # the resolution is only done with string manipulations and without any access to + # the underlying file system. + # + # If `self` and `dest` are the same directory, the empty string is returned: + # + # assert "foo".relpath("foo") == "" + # assert "foo/../bar".relpath("bar") == "" + # + # The empty string and "." designate both the current directory: + # + # assert "".relpath("foo/bar") == "foo/bar" + # assert ".".relpath("foo/bar") == "foo/bar" + # assert "foo/bar".relpath("") == "../.." + # assert "/" + "/".relpath(".") == getcwd + fun relpath(dest: String): String + do + var cwd = getcwd + var from = (cwd/self).simplify_path.split("/") + if from.last.is_empty then from.pop # case for the root directory + var to = (cwd/dest).simplify_path.split("/") + if to.last.is_empty then to.pop # case for the root directory + + # Remove common prefixes + while not from.is_empty and not to.is_empty and from.first == to.first do + from.shift + to.shift + end + + # Result is going up in `from` with ".." then going down following `to` + var from_len = from.length + if from_len == 0 then return to.join("/") + var up = "../"*(from_len-1) + ".." + if to.is_empty then return up + var res = up + "/" + to.join("/") + return res + end + + # Create a directory (and all intermediate directories if needed) + fun mkdir + do + var dirs = self.split_with("/") + var path = new FlatBuffer + if dirs.is_empty then return + if dirs[0].is_empty then + # it was a starting / + path.add('/') + end + for d in dirs do + if d.is_empty then continue + path.append(d) + path.add('/') + path.to_s.to_cstring.file_mkdir + end + end + + # Delete a directory and all of its content, return `true` on success + # + # Does not go through symbolic links and may get stuck in a cycle if there + # is a cycle in the filesystem. + fun rmdir: Bool + do + var ok = true + for file in self.files do + var file_path = self.join_path(file) + var stat = file_path.file_lstat + if stat.is_dir then + ok = file_path.rmdir and ok + else + ok = file_path.file_delete and ok + end + stat.free + end + + # Delete the directory itself + if ok then to_cstring.rmdir + + return ok + end + + # Change the current working directory + # + # "/etc".chdir + # assert getcwd == "/etc" + # "..".chdir + # assert getcwd == "/" + # + # TODO: errno + fun chdir do to_cstring.file_chdir + + # Return right-most extension (without the dot) + # + # Only the last extension is returned. + # There is no special case for combined extensions. + # + # assert "file.txt".file_extension == "txt" + # assert "file.tar.gz".file_extension == "gz" + # + # For file without extension, `null` is returned. + # Hoever, for trailing dot, `""` is returned. + # + # assert "file".file_extension == null + # assert "file.".file_extension == "" + # + # The starting dot of hidden files is never considered. + # + # assert ".file.txt".file_extension == "txt" + # assert ".file".file_extension == null + fun file_extension: nullable String + do + var last_slash = chars.last_index_of('.') + if last_slash > 0 then + return substring( last_slash+1, length ) + else + return null + end + end + + # returns files contained within the directory represented by self + fun files : Set[ String ] is extern import HashSet[String], HashSet[String].add, NativeString.to_s, String.to_cstring, HashSet[String].as(Set[String]) `{ + char *dir_path; + DIR *dir; + + dir_path = String_to_cstring( recv ); + if ((dir = opendir(dir_path)) == NULL) + { + perror( dir_path ); + exit( 1 ); + } + else + { + HashSet_of_String results; + String file_name; + struct dirent *de; + + results = new_HashSet_of_String(); + + while ( ( de = readdir( dir ) ) != NULL ) + if ( strcmp( de->d_name, ".." ) != 0 && + strcmp( de->d_name, "." ) != 0 ) + { + file_name = NativeString_to_s( strdup( de->d_name ) ); + HashSet_of_String_add( results, file_name ); + } + + closedir( dir ); + return HashSet_of_String_as_Set_of_String( results ); + } + `} +end + +redef class NativeString + private fun file_exists: Bool is extern "string_NativeString_NativeString_file_exists_0" + private fun file_stat: FileStat is extern "string_NativeString_NativeString_file_stat_0" + private fun file_lstat: FileStat `{ + struct stat* stat_element; + int res; + stat_element = malloc(sizeof(struct stat)); + res = lstat(recv, stat_element); + if (res == -1) return NULL; + return stat_element; + `} + private fun file_mkdir: Bool is extern "string_NativeString_NativeString_file_mkdir_0" + private fun rmdir: Bool `{ return rmdir(recv); `} + private fun file_delete: Bool is extern "string_NativeString_NativeString_file_delete_0" + private fun file_chdir is extern "string_NativeString_NativeString_file_chdir_0" + private fun file_realpath: NativeString is extern "file_NativeString_realpath" +end + +# This class is system dependent ... must reify the vfs +extern class FileStat `{ struct stat * `} + # Returns the permission bits of file + fun mode: Int is extern "file_FileStat_FileStat_mode_0" + # Returns the last access time + fun atime: Int is extern "file_FileStat_FileStat_atime_0" + # Returns the last status change time + fun ctime: Int is extern "file_FileStat_FileStat_ctime_0" + # Returns the last modification time + fun mtime: Int is extern "file_FileStat_FileStat_mtime_0" + # Returns the size + fun size: Int is extern "file_FileStat_FileStat_size_0" + + # Returns true if it is a regular file (not a device file, pipe, sockect, ...) + fun is_reg: Bool `{ return S_ISREG(recv->st_mode); `} + # Returns true if it is a directory + fun is_dir: Bool `{ return S_ISDIR(recv->st_mode); `} + # Returns true if it is a character device + fun is_chr: Bool `{ return S_ISCHR(recv->st_mode); `} + # Returns true if it is a block device + fun is_blk: Bool `{ return S_ISBLK(recv->st_mode); `} + # Returns true if the type is fifo + fun is_fifo: Bool `{ return S_ISFIFO(recv->st_mode); `} + # Returns true if the type is a link + fun is_lnk: Bool `{ return S_ISLNK(recv->st_mode); `} + # Returns true if the type is a socket + fun is_sock: Bool `{ return S_ISSOCK(recv->st_mode); `} +end + +# Instance of this class are standard FILE * pointers +private extern class NativeFile `{ FILE* `} + fun io_read(buf: NativeString, len: Int): Int is extern "file_NativeFile_NativeFile_io_read_2" + fun io_write(buf: NativeString, len: Int): Int is extern "file_NativeFile_NativeFile_io_write_2" + fun io_close: Int is extern "file_NativeFile_NativeFile_io_close_0" + fun file_stat: FileStat is extern "file_NativeFile_NativeFile_file_stat_0" + fun fileno: Int `{ return fileno(recv); `} + + new io_open_read(path: NativeString) is extern "file_NativeFileCapable_NativeFileCapable_io_open_read_1" + new io_open_write(path: NativeString) is extern "file_NativeFileCapable_NativeFileCapable_io_open_write_1" + new native_stdin is extern "file_NativeFileCapable_NativeFileCapable_native_stdin_0" + new native_stdout is extern "file_NativeFileCapable_NativeFileCapable_native_stdout_0" + new native_stderr is extern "file_NativeFileCapable_NativeFileCapable_native_stderr_0" +end + +redef class Sys + + # Standard input + var stdin: PollableIStream = new Stdin is protected writable + + # Standard output + var stdout: OStream = new Stdout is protected writable + + # Standard output for errors + var stderr: OStream = new Stderr is protected writable + +end + +# Print `objects` on the standard output (`stdout`). +protected fun printn(objects: Object...) +do + sys.stdout.write(objects.to_s) +end + +# Print an `object` on the standard output (`stdout`) and add a newline. +protected fun print(object: Object) +do + sys.stdout.write(object.to_s) + sys.stdout.write("\n") +end + +# Read a character from the standard input (`stdin`). +protected fun getc: Char +do + return sys.stdin.read_char.ascii +end + +# Read a line from the standard input (`stdin`). +protected fun gets: String +do + return sys.stdin.read_line +end + +# Return the working (current) directory +protected fun getcwd: String do return file_getcwd.to_s +private fun file_getcwd: NativeString is extern "string_NativeString_NativeString_file_getcwd_0" diff --git a/samples/Nit/meetup.nit b/samples/Nit/meetup.nit new file mode 100644 index 00000000..e5d1f561 --- /dev/null +++ b/samples/Nit/meetup.nit @@ -0,0 +1,376 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License + +# Shows a meetup and allows to modify its participants +module meetup + +import opportunity_model +import boilerplate +import welcome +import template + +# Shows a meetup and allows to modify its participants +class OpportunityMeetupPage + super OpportunityPage + + # Meetup the page is supposed to show + var meetup: nullable Meetup = null + # Answer mode for the meetup + var mode = 0 + + init from_id(id: String) do + var db = new OpportunityDB.open("opportunity") + meetup = db.find_meetup_by_id(id) + db.close + if meetup != null then mode = meetup.answer_mode + init + end + + init do + header.page_js = "mode = {mode};\n" + header.page_js += """ + function update_scores(){ + var anss = $('.answer'); + var count = {}; + var scores = {}; + var answers = []; + var maxscore = 0; + for(i=0; i < anss.length; i++){ + var incscore = 0; + var inccount = 0; + var idparts = anss[i].id.split("_"); + var ansid = idparts[1]; + var html = anss[i].innerHTML; + if(html === "
"){ + inccount = 1; + incscore = 2; + }else if(html === "
"){ + incscore = 1; + } + var intansid = parseInt(ansid) + if(answers.indexOf(intansid) == -1){ + answers.push(intansid); + } + if(ansid in count){ + count[ansid] += inccount; + }else{ + count[ansid] = inccount; + } + if(ansid in scores){ + scores[ansid] += incscore; + }else{ + scores[ansid] = incscore; + } + if(scores[ansid] > maxscore){ + maxscore = scores[ansid]; + } + } + for(i=0; i < answers.length; i++){ + var ansid = answers[i].toString(); + var el = $('#total'+ansid)[0]; + var ins = "
"+count[ansid]; + if(scores[ansid] >= maxscore){ + ins += "
"; + } + ins += "
"; + el.innerHTML = ins; + } + } + function change_answer(ele, id){ + // modify only the currently selected entry + if (in_modification_id != id) return; + + var e = document.getElementById(ele.id); + var i = e.innerHTML; + var ans = true;""" + if mode == 0 then + header.page_js += """ + if(i === "
"){ + ans = 0; + e.innerHTML = "
" + e.style.color = "red"; + }else{ + ans = 1; + e.innerHTML = "
"; + e.style.color = "green"; + }""" + + else + header.page_js += """ + if(i === "
"){ + ans = 1; + e.innerHTML = "
" + e.style.color = "#B8860B"; + }else if(i === "
"){ + ans = 0; + e.innerHTML = "
" + e.style.color = "red"; + }else{ + ans = 2; + e.innerHTML = "
"; + e.style.color = "green"; + }""" + end + header.page_js += """ + var a = ele.id.split('_') + var pid = a[1] + var aid = a[2] + update_scores(); + $.ajax({ + type: "POST", + url: "./rest/answer", + data: { + answer_id: aid, + pers_id: pid, + answer: ans + } + }); + } + function change_temp_answer(ele){ + var e = document.getElementById(ele.id); + var i = e.innerHTML;""" + if mode == 0 then + header.page_js += """ + if(i === "
"){ + e.innerHTML = "
" + e.style.color = "red"; + }else{ + e.innerHTML = "
"; + e.style.color = "green"; + } + """ + else + header.page_js += """ + if(i === "
"){ + e.innerHTML = "
"; + e.style.color = "#B8860B"; + }else if(i === "
"){ + e.innerHTML = "
" + e.style.color = "red"; + }else{ + e.innerHTML = "
"; + e.style.color = "green"; + } + """ + end + header.page_js += """ + update_scores(); + } + function add_part(ele){ + var e = document.getElementById(ele.id); + var pname = document.getElementById("new_name").value; + var arr = e.id.split("_"); + var mid = arr[1]; + var ans = $('#' + ele.id).parent().parent().parent().children(".answer"); + ansmap = {}; + for(i=0;i✔"){ + ansmap[curr.attr('id')] = 1 + }else{ + ansmap[curr.attr('id')] = 0 + }""" + else + header.page_js += """ + if(curr[0].innerHTML === "
"){ + ansmap[curr.attr('id')] = 2 + }else if(curr[0].innerHTML === "
"){ + ansmap[curr.attr('id')] = 1 + }else{ + ansmap[curr.attr('id')] = 0 + }""" + end + header.page_js += """ + } + $.ajax({ + type: "POST", + url: "./rest/meetup/new_pers", + data: { + meetup_id: mid, + persname: pname, + answers: $.param(ansmap) + } + }) + .done(function(data){ + location.reload(); + }) + .fail(function(data){ + //TODO: Notify of failure + }); + } + function remove_people(ele){ + var arr = ele.id.split("_") + var pid = arr[1] + $('#' + ele.id).parent().parent().parent().remove(); + update_scores(); + $.ajax({ + type: "POST", + url: "./rest/people", + data: { + method: "DELETE", + p_id: pid + } + }); + } + // ID of line currently open for modification + var in_modification_id = null; + function modify_people(ele, id){ + if (in_modification_id != null) { + // reset to normal values + $('#modify_'+in_modification_id).text("Modify or delete"); + $('#modify_'+in_modification_id).attr("class", "btn btn-xs btn-warning"); + $('#line_'+in_modification_id).css("background-color", ""); + $('#delete_'+in_modification_id).css("display", "none"); + } + if (in_modification_id != id) { + // activate modifiable mode + $('#modify_'+id).text("Done"); + $('#modify_'+id).attr("class", "btn btn-xs btn-success"); + $('#line_'+id).css("background-color", "LightYellow"); + $('#delete_'+id).show(); + + in_modification_id = id; + } else { + in_modification_id = null; + } + } + """ + end + + redef fun rendering do + if meetup == null then + add((new OpportunityHomePage).write_to_string) + return + end + add header + var db = new OpportunityDB.open("opportunity") + add meetup.to_html(db) + db.close + add footer + end +end + +redef class Meetup + # Build the HTML for `self` + fun to_html(db: OpportunityDB): Streamable do + var t = new Template + t.add """ +
+ + +""" + t.add "" + for i in answers(db) do + t.add "" + end + t.add "" + t.add "" + for i in participants(db) do + i.load_answers(db, self) + t.add "" + t.add "" + for j, k in i.answers do + var color + if answer_mode == 0 then + if k == 1 then + color = "green" + else + color = "red" + end + else + if k == 2 then + color = "green" + else if k == 1 then + color = "#B8860B" + else + color = "red" + end + end + t.add """" + end + t.add """""" + t.add "" + end + t.add """ + + + """ + for i in answers(db) do + t.add "" + end + t.add """ + """ + t.add "" + # Compute score for each answer + var scores = new HashMap[Int, Int] + var maxsc = 0 + for i in answers(db) do + scores[i.id] = i.score(db) + if scores[i.id] > maxsc then maxsc = scores[i.id] + end + t.add """ + + + """ + for i in answers(db) do + t.add """" + end + t.add "" + t.add """ + +""" + t.add "
Participant name" + t.add i.to_s + t.add "
" + t.add i.to_s + t.add """" + t.add "
" + if answer_mode == 0 then + if k == 1 then + t.add "✔" + else + t.add "✘" + end + else + if k == 2 then + t.add "✔" + else if k == 1 then + t.add "❓" + else + t.add "✘" + end + end + t.add "
 """ + t.add """
Total
{{{i.count(db)}}}""" + if scores.has_key(i.id) and scores[i.id] >= maxsc then + t.add """
""" + end + t.add "
" + t.add "
" + return t + end +end diff --git a/samples/SQL/videodb.cql b/samples/SQL/videodb.cql new file mode 100644 index 00000000..ffa3170e --- /dev/null +++ b/samples/SQL/videodb.cql @@ -0,0 +1,85 @@ +CREATE KEYSPACE videodb WITH REPLICATION = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 }; + +use videodb; + +// Basic entity table +// Object mapping ? +CREATE TABLE users ( + username varchar, + firstname varchar, + lastname varchar, + email varchar, + password varchar, + created_date timestamp, + total_credits int, + credit_change_date timeuuid, + PRIMARY KEY (username) +); + +// One-to-many entity table +CREATE TABLE videos ( + videoid uuid, + videoname varchar, + username varchar, + description varchar, + tags list, + upload_date timestamp, + PRIMARY KEY (videoid) +); + +// One-to-many from the user point of view +// Also know as a lookup table +CREATE TABLE username_video_index ( + username varchar, + videoid uuid, + upload_date timestamp, + videoname varchar, + PRIMARY KEY (username, videoid) +); + +// Counter table +CREATE TABLE video_rating ( + videoid uuid, + rating_counter counter, + rating_total counter, + PRIMARY KEY (videoid) +); + +// Creating index tables for tab keywords +CREATE TABLE tag_index ( + tag varchar, + videoid uuid, + timestamp timestamp, + PRIMARY KEY (tag, videoid) +); + +// Comments as a many-to-many +// Looking from the video side to many users +CREATE TABLE comments_by_video ( + videoid uuid, + username varchar, + comment_ts timestamp, + comment varchar, + PRIMARY KEY (videoid,comment_ts,username) +) WITH CLUSTERING ORDER BY (comment_ts DESC, username ASC); + +// looking from the user side to many videos +CREATE TABLE comments_by_user ( + username varchar, + videoid uuid, + comment_ts timestamp, + comment varchar, + PRIMARY KEY (username,comment_ts,videoid) +) WITH CLUSTERING ORDER BY (comment_ts DESC, videoid ASC); + + +// Time series wide row with reverse comparator +CREATE TABLE video_event ( + videoid uuid, + username varchar, + event varchar, + event_timestamp timeuuid, + video_timestamp bigint, + PRIMARY KEY ((videoid,username), event_timestamp,event) +) WITH CLUSTERING ORDER BY (event_timestamp DESC,event ASC); + diff --git a/samples/SQL/videodb.ddl b/samples/SQL/videodb.ddl new file mode 100644 index 00000000..ffa3170e --- /dev/null +++ b/samples/SQL/videodb.ddl @@ -0,0 +1,85 @@ +CREATE KEYSPACE videodb WITH REPLICATION = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 }; + +use videodb; + +// Basic entity table +// Object mapping ? +CREATE TABLE users ( + username varchar, + firstname varchar, + lastname varchar, + email varchar, + password varchar, + created_date timestamp, + total_credits int, + credit_change_date timeuuid, + PRIMARY KEY (username) +); + +// One-to-many entity table +CREATE TABLE videos ( + videoid uuid, + videoname varchar, + username varchar, + description varchar, + tags list, + upload_date timestamp, + PRIMARY KEY (videoid) +); + +// One-to-many from the user point of view +// Also know as a lookup table +CREATE TABLE username_video_index ( + username varchar, + videoid uuid, + upload_date timestamp, + videoname varchar, + PRIMARY KEY (username, videoid) +); + +// Counter table +CREATE TABLE video_rating ( + videoid uuid, + rating_counter counter, + rating_total counter, + PRIMARY KEY (videoid) +); + +// Creating index tables for tab keywords +CREATE TABLE tag_index ( + tag varchar, + videoid uuid, + timestamp timestamp, + PRIMARY KEY (tag, videoid) +); + +// Comments as a many-to-many +// Looking from the video side to many users +CREATE TABLE comments_by_video ( + videoid uuid, + username varchar, + comment_ts timestamp, + comment varchar, + PRIMARY KEY (videoid,comment_ts,username) +) WITH CLUSTERING ORDER BY (comment_ts DESC, username ASC); + +// looking from the user side to many videos +CREATE TABLE comments_by_user ( + username varchar, + videoid uuid, + comment_ts timestamp, + comment varchar, + PRIMARY KEY (username,comment_ts,videoid) +) WITH CLUSTERING ORDER BY (comment_ts DESC, videoid ASC); + + +// Time series wide row with reverse comparator +CREATE TABLE video_event ( + videoid uuid, + username varchar, + event varchar, + event_timestamp timeuuid, + video_timestamp bigint, + PRIMARY KEY ((videoid,username), event_timestamp,event) +) WITH CLUSTERING ORDER BY (event_timestamp DESC,event ASC); + diff --git a/samples/Text/messages.fr b/samples/Text/messages.fr index da0269bb..c8108873 100644 --- a/samples/Text/messages.fr +++ b/samples/Text/messages.fr @@ -1 +1,2 @@ the green potato=la pomme de terre verte +le nouveau type de musique=the new type of music diff --git a/script/download-grammars b/script/convert-grammars similarity index 76% rename from script/download-grammars rename to script/convert-grammars index 3792f451..7c45a7c8 100755 --- a/script/download-grammars +++ b/script/convert-grammars @@ -2,8 +2,10 @@ require 'json' require 'net/http' +require 'optparse' require 'plist' require 'set' +require 'thread' require 'tmpdir' require 'uri' require 'yaml' @@ -13,6 +15,13 @@ GRAMMARS_PATH = File.join(ROOT, "grammars") SOURCES_FILE = File.join(ROOT, "grammars.yml") CSONC = File.join(ROOT, "node_modules", ".bin", "csonc") +$options = { + :add => false, + :install => true, + :output => SOURCES_FILE, + :remote => true, +} + class SingleFile def initialize(path) @path = path @@ -35,7 +44,7 @@ class DirectoryPackage path.split('/')[-2] == 'Syntaxes' when '.tmlanguage' true - when '.cson' + when '.cson', '.json' path.split('/')[-2] == 'grammars' else false @@ -143,22 +152,24 @@ def load_grammar(path) cson = `"#{CSONC}" "#{path}"` raise "Failed to convert CSON grammar '#{path}': #{$?.to_s}" unless $?.success? JSON.parse(cson) + when '.json' + JSON.parse(File.read(path)) else raise "Invalid document type #{path}" end end -def install_grammar(tmp_dir, source, all_scopes) +def load_grammars(tmp_dir, source, all_scopes) is_url = source.start_with?("http:", "https:") - is_single_file = source.end_with?('.tmLanguage', '.plist') + return [] if is_url && !$options[:remote] p = if !is_url - if is_single_file - SingleFile.new(source) - else + if File.directory?(source) DirectoryPackage.new(source) + else + SingleFile.new(source) end - elsif is_single_file + elsif source.end_with?('.tmLanguage', '.plist') SingleGrammar.new(source) elsif source.start_with?('https://github.com') GitHubPackage.new(source) @@ -172,9 +183,7 @@ def install_grammar(tmp_dir, source, all_scopes) raise "Unsupported source: #{source}" unless p - installed = [] - - p.fetch(tmp_dir).each do |path| + p.fetch(tmp_dir).map do |path| grammar = load_grammar(path) scope = grammar['scopeName'] @@ -184,13 +193,21 @@ def install_grammar(tmp_dir, source, all_scopes) " Previous package: #{all_scopes[scope]}" next end - - File.write(File.join(GRAMMARS_PATH, "#{scope}.json"), JSON.pretty_generate(grammar)) all_scopes[scope] = p.url + grammar + end +end + +def install_grammars(grammars, path) + installed = [] + + grammars.each do |grammar| + scope = grammar['scopeName'] + File.write(File.join(GRAMMARS_PATH, "#{scope}.json"), JSON.pretty_generate(grammar)) installed << scope end - $stderr.puts("OK #{p.url} (#{installed.join(', ')})") + $stderr.puts("OK #{path} (#{installed.join(', ')})") end def run_thread(queue, all_scopes) @@ -206,7 +223,8 @@ def run_thread(queue, all_scopes) dir = "#{tmpdir}/#{index}" Dir.mkdir(dir) - install_grammar(dir, source, all_scopes) + grammars = load_grammars(dir, source, all_scopes) + install_grammars(grammars, source) if $options[:install] end end end @@ -217,7 +235,7 @@ def generate_yaml(all_scopes, base) out[value] << key end - yaml = yaml.sort.to_h + yaml = Hash[yaml.sort] yaml.each { |k, v| v.sort! } yaml end @@ -232,9 +250,10 @@ def main(sources) all_scopes = {} - if ARGV[0] == '--add' + if source = $options[:add] Dir.mktmpdir do |tmpdir| - install_grammar(tmpdir, ARGV[1], all_scopes) + grammars = load_grammars(tmpdir, source, all_scopes) + install_grammars(grammars, source) if $options[:install] end generate_yaml(all_scopes, sources) else @@ -252,12 +271,34 @@ def main(sources) end end +OptionParser.new do |opts| + opts.banner = "Usage: #{$0} [options]" + + opts.on("--add GRAMMAR", "Add a new grammar. GRAMMAR may be a file path or URL.") do |a| + $options[:add] = a + end + + opts.on("--[no-]install", "Install grammars into grammars/ directory.") do |i| + $options[:install] = i + end + + opts.on("--output FILE", "Write output to FILE. Use - for stdout.") do |o| + $options[:output] = o == "-" ? $stdout : o + end + + opts.on("--[no-]remote", "Download remote grammars.") do |r| + $options[:remote] = r + end +end.parse! + sources = File.open(SOURCES_FILE) do |file| YAML.load(file) end yaml = main(sources) -File.write(SOURCES_FILE, YAML.dump(yaml)) - -$stderr.puts("Done") +if $options[:output].is_a?(IO) + $options[:output].write(YAML.dump(yaml)) +else + File.write($options[:output], YAML.dump(yaml)) +end diff --git a/script/travis/before_install b/script/travis/before_install new file mode 100755 index 00000000..442f6718 --- /dev/null +++ b/script/travis/before_install @@ -0,0 +1,20 @@ +#!/bin/sh + +set -ex + +# Fetch all commits/refs needed to run our tests. +git fetch origin master:master v2.0.0:v2.0.0 test/attributes:test/attributes test/master:test/master + +script/vendor-deb libicu48 libicu-dev +if ruby -e 'exit RUBY_VERSION >= "2.0" && RUBY_VERSION < "2.1"'; then + # Workaround for https://bugs.ruby-lang.org/issues/8074. We can't use this + # solution on all versions of Ruby due to + # https://github.com/bundler/bundler/pull/3338. + bundle config build.charlock_holmes --with-icu-include=$(pwd)/vendor/debs/include --with-icu-lib=$(pwd)/vendor/debs/lib +else + bundle config build.charlock_holmes --with-icu-dir=$(pwd)/vendor/debs +fi + +git submodule init +git submodule sync --quiet +script/fast-submodule-update diff --git a/script/vendor-deb b/script/vendor-deb new file mode 100755 index 00000000..ebad3e76 --- /dev/null +++ b/script/vendor-deb @@ -0,0 +1,13 @@ +#!/bin/sh + +set -ex + +cd "$(dirname "$0")/.." + +mkdir -p vendor/apt vendor/debs + +(cd vendor/apt && apt-get --assume-yes download "$@") + +for deb in vendor/apt/*.deb; do + ar p $deb data.tar.gz | tar -vzxC vendor/debs --strip-components=2 +done diff --git a/test/test_grammars.rb b/test/test_grammars.rb index 57483348..f010b79b 100644 --- a/test/test_grammars.rb +++ b/test/test_grammars.rb @@ -3,6 +3,14 @@ require_relative "./helper" class TestGrammars < Minitest::Test ROOT = File.expand_path("../..", __FILE__) + # These grammars have no license but have been grandfathered in. New grammars + # must have a license that allows redistribution. + UNLICENSED_GRAMMARS_WHITELIST = %w[ + vendor/grammars/Sublime-Lasso + vendor/grammars/Sublime-REBOL + vendor/grammars/x86-assembly-textmate-bundle + ].freeze + def setup @grammars = YAML.load(File.read(File.join(ROOT, "grammars.yml"))) end @@ -14,12 +22,11 @@ class TestGrammars < Minitest::Test end def test_submodules_are_in_sync - submodules = `git config --list --file "#{File.join(ROOT, ".gitmodules")}"`.lines.grep(/\.path=/).map { |line| line.chomp.split("=", 2).last } # Strip off paths inside the submodule so that just the submodule path remains. listed_submodules = @grammars.keys.grep(/vendor\/grammars/).map { |source| source[%r{vendor/grammars/[^/]+}] } - nonexistent_submodules = listed_submodules - submodules - unlisted_submodules = submodules - listed_submodules + nonexistent_submodules = listed_submodules - submodule_paths + unlisted_submodules = submodule_paths - listed_submodules message = "" unless nonexistent_submodules.empty? @@ -36,4 +43,94 @@ class TestGrammars < Minitest::Test assert nonexistent_submodules.empty? && unlisted_submodules.empty?, message end + + def test_local_scopes_are_in_sync + actual = YAML.load(`"#{File.join(ROOT, "script", "convert-grammars")}" --output - --no-install --no-remote`) + assert $?.success?, "script/convert-grammars failed" + + # We're not checking remote grammars. That can take a long time and make CI + # flaky if network conditions are poor. + @grammars.delete_if { |k, v| k.start_with?("http:", "https:") } + + @grammars.each do |k, v| + assert_equal v, actual[k], "The scopes listed for #{k} in grammars.yml don't match the scopes found in that repository" + end + end + + def test_submodules_have_licenses + categories = submodule_paths.group_by do |submodule| + files = Dir[File.join(ROOT, submodule, "*")] + license = files.find { |path| File.basename(path) =~ /\blicen[cs]e\b/i } || files.find { |path| File.basename(path) =~ /\bcopying\b/i } + if license.nil? + if readme = files.find { |path| File.basename(path) =~ /\Areadme\b/i } + license = readme if File.read(readme) =~ /\blicen[cs]e\b/i + end + end + if license.nil? + :unlicensed + elsif classify_license(license) + :licensed + else + :unrecognized + end + end + + unlicensed = categories[:unlicensed] || [] + unrecognized = categories[:unrecognized] || [] + disallowed_unlicensed = unlicensed - UNLICENSED_GRAMMARS_WHITELIST + disallowed_unrecognized = unrecognized - UNLICENSED_GRAMMARS_WHITELIST + extra_whitelist_entries = UNLICENSED_GRAMMARS_WHITELIST - (unlicensed | unrecognized) + + message = "" + if disallowed_unlicensed.any? + message << "The following grammar submodules don't seem to have a license. All grammars must have a license that permits redistribution.\n" + message << disallowed_unlicensed.sort.join("\n") + end + if disallowed_unrecognized.any? + message << "\n\n" unless message.empty? + message << "The following grammar submodules have an unrecognized license. Please update #{__FILE__} to recognize the license.\n" + message << disallowed_unrecognized.sort.join("\n") + end + if extra_whitelist_entries.any? + message << "\n\n" unless message.empty? + message << "The following grammar submodules are listed in UNLICENSED_GRAMMARS_WHITELIST but either have a license (yay!)\n" + message << "or have been removed from the repository. Please remove them from the whitelist.\n" + message << extra_whitelist_entries.sort.join("\n") + end + + assert disallowed_unlicensed.empty? && disallowed_unrecognized.empty? && extra_whitelist_entries.empty?, message + end + + private + + def submodule_paths + @submodule_paths ||= `git config --list --file "#{File.join(ROOT, ".gitmodules")}"`.lines.grep(/\.path=/).map { |line| line.chomp.split("=", 2).last } + end + + def classify_license(path) + content = File.read(path) + if content.include?("Apache License") && content.include?("2.0") + "Apache 2.0" + elsif content.include?("GNU") && content =~ /general/i && content =~ /public/i + if content =~ /version 2/i + "GPLv2" + elsif content =~ /version 3/i + "GPLv3" + end + elsif content.include?("GPL") && content.include?("http://www.gnu.org/licenses/gpl.html") + "GPLv3" + elsif content.include?("Creative Commons") + "CC" + elsif content.include?("tidy-license.txt") || content.include?("If not otherwise specified (see below)") + "textmate" + elsif content =~ /^\s*[*-]\s+Redistribution/ || content.include?("Redistributions of source code") + "BSD" + elsif content.include?("Permission is hereby granted") || content =~ /\bMIT\b/ + "MIT" + elsif content.include?("unlicense.org") + "unlicense" + elsif content.include?("http://www.wtfpl.net/txt/copying/") + "WTFPL" + end + end end diff --git a/vendor/grammars/Agda.tmbundle b/vendor/grammars/Agda.tmbundle index 784f435f..68a218c4 160000 --- a/vendor/grammars/Agda.tmbundle +++ b/vendor/grammars/Agda.tmbundle @@ -1 +1 @@ -Subproject commit 784f435f09c126f6a070f751c678dd9dfd88c969 +Subproject commit 68a218c489c809655f74164f83b78d359a82989a diff --git a/vendor/grammars/IDL-Syntax b/vendor/grammars/IDL-Syntax index 9473b7fa..3baeaeaf 160000 --- a/vendor/grammars/IDL-Syntax +++ b/vendor/grammars/IDL-Syntax @@ -1 +1 @@ -Subproject commit 9473b7faaf709740d08d84823f9e86b2ad585f62 +Subproject commit 3baeaeafac9e30e8a4b0789105641e0b59da32a9 diff --git a/vendor/grammars/NimLime b/vendor/grammars/NimLime index 7a2fb4e7..a7067c60 160000 --- a/vendor/grammars/NimLime +++ b/vendor/grammars/NimLime @@ -1 +1 @@ -Subproject commit 7a2fb4e73a293a009224c35fc421006869344439 +Subproject commit a7067c605b893585c056d32a20a1b953f100e138 diff --git a/vendor/grammars/SCSS.tmbundle b/vendor/grammars/SCSS.tmbundle index d6188e57..41475020 160000 --- a/vendor/grammars/SCSS.tmbundle +++ b/vendor/grammars/SCSS.tmbundle @@ -1 +1 @@ -Subproject commit d6188e579f0808d843e75728e6bd77a170a0d0ac +Subproject commit 41475020634fc07b5c03ff0dfaee64c2491fa32b diff --git a/vendor/grammars/Scalate.tmbundle b/vendor/grammars/Scalate.tmbundle index 4f85314f..0307535a 160000 --- a/vendor/grammars/Scalate.tmbundle +++ b/vendor/grammars/Scalate.tmbundle @@ -1 +1 @@ -Subproject commit 4f85314fca2ebe3641c678010489b80e81acb8ea +Subproject commit 0307535add076965c8cd438d0f4109bec7d68d2d diff --git a/vendor/grammars/Sublime-Nit b/vendor/grammars/Sublime-Nit new file mode 160000 index 00000000..7d8b3503 --- /dev/null +++ b/vendor/grammars/Sublime-Nit @@ -0,0 +1 @@ +Subproject commit 7d8b3503923edb3dd0fa0f8e0684dd33e7897446 diff --git a/vendor/grammars/Sublime-VimL b/vendor/grammars/Sublime-VimL index 6ab7e19a..366fdc64 160000 --- a/vendor/grammars/Sublime-VimL +++ b/vendor/grammars/Sublime-VimL @@ -1 +1 @@ -Subproject commit 6ab7e19a579c1842999a23db697851599a570915 +Subproject commit 366fdc64e3655207a0b2672f630d7167e6aac2c3 diff --git a/vendor/grammars/carto-atom b/vendor/grammars/carto-atom new file mode 160000 index 00000000..8086625a --- /dev/null +++ b/vendor/grammars/carto-atom @@ -0,0 +1 @@ +Subproject commit 8086625aa5deac4ccd7374644b89e715deec2f7f diff --git a/vendor/grammars/factor b/vendor/grammars/factor index 2dc55909..2453a785 160000 --- a/vendor/grammars/factor +++ b/vendor/grammars/factor @@ -1 +1 @@ -Subproject commit 2dc5590966a6212d1911eaba63d698e1c8b04e73 +Subproject commit 2453a785f73429786583684cf729625c7cf7b04b diff --git a/vendor/grammars/fsharpbinding b/vendor/grammars/fsharpbinding index af755c8b..99d2e9a5 160000 --- a/vendor/grammars/fsharpbinding +++ b/vendor/grammars/fsharpbinding @@ -1 +1 @@ -Subproject commit af755c8b01ddb9bd5abebb74f318f6a9183c55e4 +Subproject commit 99d2e9a53924ae5ba850985f3df1dc8c11cb6731 diff --git a/vendor/grammars/haxe-sublime-bundle b/vendor/grammars/haxe-sublime-bundle index 58cad478..e2613bb1 160000 --- a/vendor/grammars/haxe-sublime-bundle +++ b/vendor/grammars/haxe-sublime-bundle @@ -1 +1 @@ -Subproject commit 58cad4780c3ce19a5219dfa38d25556f800ac726 +Subproject commit e2613bb12598d4ae2de5ba57af890ce910195fce diff --git a/vendor/grammars/language-clojure b/vendor/grammars/language-clojure index d649d9f5..bae6eee8 160000 --- a/vendor/grammars/language-clojure +++ b/vendor/grammars/language-clojure @@ -1 +1 @@ -Subproject commit d649d9f5b227722f5a35fd0e9430cb14325b8e83 +Subproject commit bae6eee8557c2158592ac485a7168ccd10fc6dfb diff --git a/vendor/grammars/language-coffee-script b/vendor/grammars/language-coffee-script index c6e8d337..d86c8963 160000 --- a/vendor/grammars/language-coffee-script +++ b/vendor/grammars/language-coffee-script @@ -1 +1 @@ -Subproject commit c6e8d33715fe883da8411b7149cfb3eb76d23698 +Subproject commit d86c8963dcee0ab811da05a175b2218045d0c124 diff --git a/vendor/grammars/language-gfm b/vendor/grammars/language-gfm index c6df027b..6af44a08 160000 --- a/vendor/grammars/language-gfm +++ b/vendor/grammars/language-gfm @@ -1 +1 @@ -Subproject commit c6df027b075654b5faadd8965b9bdc39276e3a45 +Subproject commit 6af44a08718668035f45270898389ae4fc8eeb8b diff --git a/vendor/grammars/language-hy b/vendor/grammars/language-hy new file mode 160000 index 00000000..f9750744 --- /dev/null +++ b/vendor/grammars/language-hy @@ -0,0 +1 @@ +Subproject commit f9750744ae4b8519314dff7d57abc527b91a0ef2 diff --git a/vendor/grammars/language-javascript b/vendor/grammars/language-javascript index 15dc5d1d..51575193 160000 --- a/vendor/grammars/language-javascript +++ b/vendor/grammars/language-javascript @@ -1 +1 @@ -Subproject commit 15dc5d1d8675ba7b2f6af77adf3e373e54a994d0 +Subproject commit 515751937df1d397b495e4a92fec5a0933994cdb diff --git a/vendor/grammars/language-python b/vendor/grammars/language-python index 476a3535..46072e32 160000 --- a/vendor/grammars/language-python +++ b/vendor/grammars/language-python @@ -1 +1 @@ -Subproject commit 476a353595caca2105c5984466390333036d0f11 +Subproject commit 46072e32e3060eb8e2fea98a106a86db89acc842 diff --git a/vendor/grammars/language-sass b/vendor/grammars/language-sass deleted file mode 160000 index 064a8b5a..00000000 --- a/vendor/grammars/language-sass +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 064a8b5a870afad877e06ce9774c1ef974471f6a diff --git a/vendor/grammars/language-shellscript b/vendor/grammars/language-shellscript index e2d62af1..98397197 160000 --- a/vendor/grammars/language-shellscript +++ b/vendor/grammars/language-shellscript @@ -1 +1 @@ -Subproject commit e2d62af11a07f6d65b1664456d85e747a31ac4b4 +Subproject commit 9839719721e3fb67c2df8461b2b296e6ff027e7f diff --git a/vendor/grammars/language-yaml b/vendor/grammars/language-yaml index eddd0793..ce8b4414 160000 --- a/vendor/grammars/language-yaml +++ b/vendor/grammars/language-yaml @@ -1 +1 @@ -Subproject commit eddd079347ee6854d37887168daff4a32688d8c2 +Subproject commit ce8b441467852f766a34d22ba6661099496e22e4 diff --git a/vendor/grammars/latex.tmbundle b/vendor/grammars/latex.tmbundle index 682c4b72..52b2251a 160000 --- a/vendor/grammars/latex.tmbundle +++ b/vendor/grammars/latex.tmbundle @@ -1 +1 @@ -Subproject commit 682c4b725c9504bf3c8c1d4efdd954a8d23800bc +Subproject commit 52b2251aab30577f4b3b3cc8997fb3c7d0e798ce diff --git a/vendor/grammars/mercury-tmlanguage b/vendor/grammars/mercury-tmlanguage index b5a4fd6e..eaef0b06 160000 --- a/vendor/grammars/mercury-tmlanguage +++ b/vendor/grammars/mercury-tmlanguage @@ -1 +1 @@ -Subproject commit b5a4fd6e400cef468dad18d92df65268a6d4a3e9 +Subproject commit eaef0b0643b2cea0d7d26056f2dd264c5a652be9 diff --git a/vendor/grammars/sass-textmate-bundle b/vendor/grammars/sass-textmate-bundle new file mode 160000 index 00000000..8444f979 --- /dev/null +++ b/vendor/grammars/sass-textmate-bundle @@ -0,0 +1 @@ +Subproject commit 8444f9796e7610f7f930e9ed7cae3093d5ce7805 diff --git a/vendor/grammars/sublime-mask b/vendor/grammars/sublime-mask index 2f59519f..6f12d284 160000 --- a/vendor/grammars/sublime-mask +++ b/vendor/grammars/sublime-mask @@ -1 +1 @@ -Subproject commit 2f59519ffdccfb15c63668951847f1c96b6e07c0 +Subproject commit 6f12d2841d008fb02eee912485cebcad7151d4f0 diff --git a/vendor/grammars/swift.tmbundle b/vendor/grammars/swift.tmbundle index 81a01641..3c7eac54 160000 --- a/vendor/grammars/swift.tmbundle +++ b/vendor/grammars/swift.tmbundle @@ -1 +1 @@ -Subproject commit 81a01641457d544a3a16bb5410e5cafef2148ceb +Subproject commit 3c7eac54457aa8f953fa5263cb34ec4dc9555217