mirror of
https://github.com/KevinMidboe/linguist.git
synced 2025-10-29 17:50:22 +00:00
Compare commits
24 Commits
v5.3.2
...
kivikakk/g
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
adfb438a42 | ||
|
|
0ed1b821d7 | ||
|
|
1a769c4665 | ||
|
|
e7e64bf39a | ||
|
|
e4b9430024 | ||
|
|
a76805e40d | ||
|
|
8d27845f8c | ||
|
|
9a8ab45b6f | ||
|
|
e335d48625 | ||
|
|
4f46155c05 | ||
|
|
38901d51d2 | ||
|
|
ded0dc74e0 | ||
|
|
c5d1bb5370 | ||
|
|
c8ca48856b | ||
|
|
7be6fb0138 | ||
|
|
8c516655bc | ||
|
|
9dceffce2f | ||
|
|
33be70eb28 | ||
|
|
9c4dc3047c | ||
|
|
d8e5f3c965 | ||
|
|
71bf640a47 | ||
|
|
c9b3d19c6f | ||
|
|
0f4955e5d5 | ||
|
|
d968b0e9ee |
24
.gitmodules
vendored
24
.gitmodules
vendored
@@ -169,9 +169,6 @@
|
|||||||
[submodule "vendor/grammars/Agda.tmbundle"]
|
[submodule "vendor/grammars/Agda.tmbundle"]
|
||||||
path = vendor/grammars/Agda.tmbundle
|
path = vendor/grammars/Agda.tmbundle
|
||||||
url = https://github.com/mokus0/Agda.tmbundle
|
url = https://github.com/mokus0/Agda.tmbundle
|
||||||
[submodule "vendor/grammars/Julia.tmbundle"]
|
|
||||||
path = vendor/grammars/Julia.tmbundle
|
|
||||||
url = https://github.com/JuliaEditorSupport/Julia.tmbundle
|
|
||||||
[submodule "vendor/grammars/ooc.tmbundle"]
|
[submodule "vendor/grammars/ooc.tmbundle"]
|
||||||
path = vendor/grammars/ooc.tmbundle
|
path = vendor/grammars/ooc.tmbundle
|
||||||
url = https://github.com/nilium/ooc.tmbundle
|
url = https://github.com/nilium/ooc.tmbundle
|
||||||
@@ -442,9 +439,6 @@
|
|||||||
[submodule "vendor/grammars/sublime-golo"]
|
[submodule "vendor/grammars/sublime-golo"]
|
||||||
path = vendor/grammars/sublime-golo
|
path = vendor/grammars/sublime-golo
|
||||||
url = https://github.com/TypeUnsafe/sublime-golo
|
url = https://github.com/TypeUnsafe/sublime-golo
|
||||||
[submodule "vendor/grammars/JSyntax"]
|
|
||||||
path = vendor/grammars/JSyntax
|
|
||||||
url = https://github.com/bcj/JSyntax
|
|
||||||
[submodule "vendor/grammars/TXL"]
|
[submodule "vendor/grammars/TXL"]
|
||||||
path = vendor/grammars/TXL
|
path = vendor/grammars/TXL
|
||||||
url = https://github.com/MikeHoffert/Sublime-Text-TXL-syntax
|
url = https://github.com/MikeHoffert/Sublime-Text-TXL-syntax
|
||||||
@@ -871,6 +865,9 @@
|
|||||||
[submodule "vendor/grammars/language-ballerina"]
|
[submodule "vendor/grammars/language-ballerina"]
|
||||||
path = vendor/grammars/language-ballerina
|
path = vendor/grammars/language-ballerina
|
||||||
url = https://github.com/ballerinalang/plugin-vscode
|
url = https://github.com/ballerinalang/plugin-vscode
|
||||||
|
[submodule "vendor/grammars/language-yara"]
|
||||||
|
path = vendor/grammars/language-yara
|
||||||
|
url = https://github.com/blacktop/language-yara
|
||||||
[submodule "vendor/grammars/language-ruby"]
|
[submodule "vendor/grammars/language-ruby"]
|
||||||
path = vendor/grammars/language-ruby
|
path = vendor/grammars/language-ruby
|
||||||
url = https://github.com/atom/language-ruby
|
url = https://github.com/atom/language-ruby
|
||||||
@@ -883,3 +880,18 @@
|
|||||||
[submodule "vendor/grammars/wdl-sublime-syntax-highlighter"]
|
[submodule "vendor/grammars/wdl-sublime-syntax-highlighter"]
|
||||||
path = vendor/grammars/wdl-sublime-syntax-highlighter
|
path = vendor/grammars/wdl-sublime-syntax-highlighter
|
||||||
url = https://github.com/broadinstitute/wdl-sublime-syntax-highlighter
|
url = https://github.com/broadinstitute/wdl-sublime-syntax-highlighter
|
||||||
|
[submodule "vendor/grammars/atom-language-julia"]
|
||||||
|
path = vendor/grammars/atom-language-julia
|
||||||
|
url = https://github.com/JuliaEditorSupport/atom-language-julia
|
||||||
|
[submodule "vendor/grammars/language-cwl"]
|
||||||
|
path = vendor/grammars/language-cwl
|
||||||
|
url = https://github.com/manabuishii/language-cwl
|
||||||
|
[submodule "vendor/grammars/Syntax-highlighting-for-PostCSS"]
|
||||||
|
path = vendor/grammars/Syntax-highlighting-for-PostCSS
|
||||||
|
url = https://github.com/hudochenkov/Syntax-highlighting-for-PostCSS
|
||||||
|
[submodule "vendor/grammars/javadoc.tmbundle"]
|
||||||
|
path = vendor/grammars/javadoc.tmbundle
|
||||||
|
url = https://github.com/textmate/javadoc.tmbundle
|
||||||
|
[submodule "vendor/grammars/JSyntax"]
|
||||||
|
path = vendor/grammars/JSyntax
|
||||||
|
url = https://github.com/tikkanz/JSyntax
|
||||||
|
|||||||
@@ -93,6 +93,7 @@ Linguist is maintained with :heart: by:
|
|||||||
- **@BenEddy** (GitHub staff)
|
- **@BenEddy** (GitHub staff)
|
||||||
- **@Caged** (GitHub staff)
|
- **@Caged** (GitHub staff)
|
||||||
- **@grantr** (GitHub staff)
|
- **@grantr** (GitHub staff)
|
||||||
|
- **@kivikakk** (GitHub staff)
|
||||||
- **@larsbrinkhoff**
|
- **@larsbrinkhoff**
|
||||||
- **@lildude** (GitHub staff)
|
- **@lildude** (GitHub staff)
|
||||||
- **@pchaigno**
|
- **@pchaigno**
|
||||||
|
|||||||
9
Rakefile
9
Rakefile
@@ -47,21 +47,16 @@ task :samples => :compile do
|
|||||||
File.write 'lib/linguist/samples.json', json
|
File.write 'lib/linguist/samples.json', json
|
||||||
end
|
end
|
||||||
|
|
||||||
FLEX_MIN_VER = [2, 5, 39]
|
|
||||||
task :flex do
|
task :flex do
|
||||||
if `flex -V` !~ /^flex (\d+)\.(\d+)\.(\d+)/
|
if `flex -V` !~ /^flex \d+\.\d+\.\d+/
|
||||||
fail "flex not detected"
|
fail "flex not detected"
|
||||||
end
|
end
|
||||||
maj, min, rev = $1.to_i, $2.to_i, $3.to_i
|
|
||||||
if maj < FLEX_MIN_VER[0] || (maj == FLEX_MIN_VER[0] && (min < FLEX_MIN_VER[1] || (min == FLEX_MIN_VER[1] && rev < FLEX_MIN_VER[2])))
|
|
||||||
fail "building linguist's lexer requires at least flex #{FLEX_MIN_VER.join(".")}"
|
|
||||||
end
|
|
||||||
system "cd ext/linguist && flex tokenizer.l"
|
system "cd ext/linguist && flex tokenizer.l"
|
||||||
end
|
end
|
||||||
|
|
||||||
task :build_gem => :samples do
|
task :build_gem => :samples do
|
||||||
rm_rf "grammars"
|
rm_rf "grammars"
|
||||||
sh "script/convert-grammars"
|
sh "script/grammar-compiler compile -o grammars || true"
|
||||||
languages = YAML.load_file("lib/linguist/languages.yml")
|
languages = YAML.load_file("lib/linguist/languages.yml")
|
||||||
File.write("lib/linguist/languages.json", Yajl.dump(languages))
|
File.write("lib/linguist/languages.json", Yajl.dump(languages))
|
||||||
`gem build github-linguist.gemspec`
|
`gem build github-linguist.gemspec`
|
||||||
|
|||||||
@@ -117,9 +117,8 @@ def git_linguist(args)
|
|||||||
end
|
end
|
||||||
|
|
||||||
parser.parse!(args)
|
parser.parse!(args)
|
||||||
|
|
||||||
git_dir = `git rev-parse --git-dir`.strip
|
git_dir = `git rev-parse --git-dir`.strip
|
||||||
raise "git-linguist must be run in a Git repository (#{Dir.pwd})" unless $?.success?
|
raise "git-linguist must be run in a Git repository" unless $?.success?
|
||||||
wrapper = GitLinguist.new(git_dir, commit, incremental)
|
wrapper = GitLinguist.new(git_dir, commit, incremental)
|
||||||
|
|
||||||
case args.pop
|
case args.pop
|
||||||
@@ -141,6 +140,10 @@ def git_linguist(args)
|
|||||||
$stderr.print(parser.help)
|
$stderr.print(parser.help)
|
||||||
exit 1
|
exit 1
|
||||||
end
|
end
|
||||||
|
rescue Exception => e
|
||||||
|
$stderr.puts e.message
|
||||||
|
$stderr.puts e.backtrace
|
||||||
|
exit 1
|
||||||
end
|
end
|
||||||
|
|
||||||
git_linguist(ARGV)
|
git_linguist(ARGV)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -11,7 +11,7 @@
|
|||||||
#define FLEX_SCANNER
|
#define FLEX_SCANNER
|
||||||
#define YY_FLEX_MAJOR_VERSION 2
|
#define YY_FLEX_MAJOR_VERSION 2
|
||||||
#define YY_FLEX_MINOR_VERSION 5
|
#define YY_FLEX_MINOR_VERSION 5
|
||||||
#define YY_FLEX_SUBMINOR_VERSION 39
|
#define YY_FLEX_SUBMINOR_VERSION 35
|
||||||
#if YY_FLEX_SUBMINOR_VERSION > 0
|
#if YY_FLEX_SUBMINOR_VERSION > 0
|
||||||
#define FLEX_BETA
|
#define FLEX_BETA
|
||||||
#endif
|
#endif
|
||||||
@@ -49,6 +49,7 @@ typedef int16_t flex_int16_t;
|
|||||||
typedef uint16_t flex_uint16_t;
|
typedef uint16_t flex_uint16_t;
|
||||||
typedef int32_t flex_int32_t;
|
typedef int32_t flex_int32_t;
|
||||||
typedef uint32_t flex_uint32_t;
|
typedef uint32_t flex_uint32_t;
|
||||||
|
typedef uint64_t flex_uint64_t;
|
||||||
#else
|
#else
|
||||||
typedef signed char flex_int8_t;
|
typedef signed char flex_int8_t;
|
||||||
typedef short int flex_int16_t;
|
typedef short int flex_int16_t;
|
||||||
@@ -56,6 +57,7 @@ typedef int flex_int32_t;
|
|||||||
typedef unsigned char flex_uint8_t;
|
typedef unsigned char flex_uint8_t;
|
||||||
typedef unsigned short int flex_uint16_t;
|
typedef unsigned short int flex_uint16_t;
|
||||||
typedef unsigned int flex_uint32_t;
|
typedef unsigned int flex_uint32_t;
|
||||||
|
#endif /* ! C99 */
|
||||||
|
|
||||||
/* Limits of integral types. */
|
/* Limits of integral types. */
|
||||||
#ifndef INT8_MIN
|
#ifndef INT8_MIN
|
||||||
@@ -86,8 +88,6 @@ typedef unsigned int flex_uint32_t;
|
|||||||
#define UINT32_MAX (4294967295U)
|
#define UINT32_MAX (4294967295U)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#endif /* ! C99 */
|
|
||||||
|
|
||||||
#endif /* ! FLEXINT_H */
|
#endif /* ! FLEXINT_H */
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
@@ -130,15 +130,7 @@ typedef void* yyscan_t;
|
|||||||
|
|
||||||
/* Size of default input buffer. */
|
/* Size of default input buffer. */
|
||||||
#ifndef YY_BUF_SIZE
|
#ifndef YY_BUF_SIZE
|
||||||
#ifdef __ia64__
|
|
||||||
/* On IA-64, the buffer size is 16k, not 8k.
|
|
||||||
* Moreover, YY_BUF_SIZE is 2*YY_READ_BUF_SIZE in the general case.
|
|
||||||
* Ditto for the __ia64__ case accordingly.
|
|
||||||
*/
|
|
||||||
#define YY_BUF_SIZE 32768
|
|
||||||
#else
|
|
||||||
#define YY_BUF_SIZE 16384
|
#define YY_BUF_SIZE 16384
|
||||||
#endif /* __ia64__ */
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifndef YY_TYPEDEF_YY_BUFFER_STATE
|
#ifndef YY_TYPEDEF_YY_BUFFER_STATE
|
||||||
@@ -277,10 +269,6 @@ int linguist_yyget_lineno (yyscan_t yyscanner );
|
|||||||
|
|
||||||
void linguist_yyset_lineno (int line_number ,yyscan_t yyscanner );
|
void linguist_yyset_lineno (int line_number ,yyscan_t yyscanner );
|
||||||
|
|
||||||
int linguist_yyget_column (yyscan_t yyscanner );
|
|
||||||
|
|
||||||
void linguist_yyset_column (int column_no ,yyscan_t yyscanner );
|
|
||||||
|
|
||||||
/* Macros after this point can all be overridden by user definitions in
|
/* Macros after this point can all be overridden by user definitions in
|
||||||
* section 1.
|
* section 1.
|
||||||
*/
|
*/
|
||||||
@@ -307,12 +295,7 @@ static int yy_flex_strlen (yyconst char * ,yyscan_t yyscanner);
|
|||||||
|
|
||||||
/* Amount of stuff to slurp up with each read. */
|
/* Amount of stuff to slurp up with each read. */
|
||||||
#ifndef YY_READ_BUF_SIZE
|
#ifndef YY_READ_BUF_SIZE
|
||||||
#ifdef __ia64__
|
|
||||||
/* On IA-64, the buffer size is 16k, not 8k */
|
|
||||||
#define YY_READ_BUF_SIZE 16384
|
|
||||||
#else
|
|
||||||
#define YY_READ_BUF_SIZE 8192
|
#define YY_READ_BUF_SIZE 8192
|
||||||
#endif /* __ia64__ */
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/* Number of entries by which start-condition stack grows. */
|
/* Number of entries by which start-condition stack grows. */
|
||||||
@@ -345,9 +328,9 @@ extern int linguist_yylex (yyscan_t yyscanner);
|
|||||||
#undef YY_DECL
|
#undef YY_DECL
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#line 117 "tokenizer.l"
|
#line 118 "tokenizer.l"
|
||||||
|
|
||||||
|
|
||||||
#line 352 "lex.linguist_yy.h"
|
#line 335 "lex.linguist_yy.h"
|
||||||
#undef linguist_yyIN_HEADER
|
#undef linguist_yyIN_HEADER
|
||||||
#endif /* linguist_yyHEADER_H */
|
#endif /* linguist_yyHEADER_H */
|
||||||
|
|||||||
@@ -2,6 +2,9 @@
|
|||||||
#include "linguist.h"
|
#include "linguist.h"
|
||||||
#include "lex.linguist_yy.h"
|
#include "lex.linguist_yy.h"
|
||||||
|
|
||||||
|
// Anything longer is unlikely to be useful.
|
||||||
|
#define MAX_TOKEN_LEN 32
|
||||||
|
|
||||||
int linguist_yywrap(yyscan_t yyscanner) {
|
int linguist_yywrap(yyscan_t yyscanner) {
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
@@ -32,19 +35,27 @@ static VALUE rb_tokenizer_extract_tokens(VALUE self, VALUE rb_data) {
|
|||||||
case NO_ACTION:
|
case NO_ACTION:
|
||||||
break;
|
break;
|
||||||
case REGULAR_TOKEN:
|
case REGULAR_TOKEN:
|
||||||
rb_ary_push(ary, rb_str_new2(extra.token));
|
len = strlen(extra.token);
|
||||||
|
if (len <= MAX_TOKEN_LEN)
|
||||||
|
rb_ary_push(ary, rb_str_new(extra.token, len));
|
||||||
free(extra.token);
|
free(extra.token);
|
||||||
break;
|
break;
|
||||||
case SHEBANG_TOKEN:
|
case SHEBANG_TOKEN:
|
||||||
s = rb_str_new2("SHEBANG#!");
|
len = strlen(extra.token);
|
||||||
rb_str_cat2(s, extra.token);
|
if (len <= MAX_TOKEN_LEN) {
|
||||||
rb_ary_push(ary, s);
|
s = rb_str_new2("SHEBANG#!");
|
||||||
|
rb_str_cat(s, extra.token, len);
|
||||||
|
rb_ary_push(ary, s);
|
||||||
|
}
|
||||||
free(extra.token);
|
free(extra.token);
|
||||||
break;
|
break;
|
||||||
case SGML_TOKEN:
|
case SGML_TOKEN:
|
||||||
s = rb_str_new2(extra.token);
|
len = strlen(extra.token);
|
||||||
rb_str_cat2(s, ">");
|
if (len <= MAX_TOKEN_LEN) {
|
||||||
rb_ary_push(ary, s);
|
s = rb_str_new(extra.token, len);
|
||||||
|
rb_str_cat2(s, ">");
|
||||||
|
rb_ary_push(ary, s);
|
||||||
|
}
|
||||||
free(extra.token);
|
free(extra.token);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,25 +9,25 @@
|
|||||||
|
|
||||||
#define eat_until_eol() do { \
|
#define eat_until_eol() do { \
|
||||||
int c; \
|
int c; \
|
||||||
while ((c = input(yyscanner)) != '\n' && c != EOF); \
|
while ((c = input(yyscanner)) != '\n' && c != EOF && c); \
|
||||||
if (c == EOF) \
|
if (c == EOF || !c) \
|
||||||
yyterminate(); \
|
return 0; \
|
||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
#define eat_until_unescaped(q) do { \
|
#define eat_until_unescaped(q) do { \
|
||||||
int c; \
|
int c; \
|
||||||
while ((c = input(yyscanner)) != EOF) { \
|
while ((c = input(yyscanner)) != EOF && c) { \
|
||||||
if (c == '\n') \
|
if (c == '\n') \
|
||||||
break; \
|
break; \
|
||||||
if (c == '\\') { \
|
if (c == '\\') { \
|
||||||
c = input(yyscanner); \
|
c = input(yyscanner); \
|
||||||
if (c == EOF) \
|
if (c == EOF || !c) \
|
||||||
yyterminate(); \
|
return 0; \
|
||||||
} else if (c == q) \
|
} else if (c == q) \
|
||||||
break; \
|
break; \
|
||||||
} \
|
} \
|
||||||
if (c == EOF) \
|
if (c == EOF || !c) \
|
||||||
yyterminate(); \
|
return 0; \
|
||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
%}
|
%}
|
||||||
@@ -84,7 +84,7 @@
|
|||||||
\" { eat_until_unescaped('"'); }
|
\" { eat_until_unescaped('"'); }
|
||||||
' { eat_until_unescaped('\''); }
|
' { eat_until_unescaped('\''); }
|
||||||
(0x[0-9a-fA-F]([0-9a-fA-F]|\.)*|[0-9]([0-9]|\.)*)([uU][lL]{0,2}|([eE][-+][0-9]*)?[fFlL]*) { /* nothing */ }
|
(0x[0-9a-fA-F]([0-9a-fA-F]|\.)*|[0-9]([0-9]|\.)*)([uU][lL]{0,2}|([eE][-+][0-9]*)?[fFlL]*) { /* nothing */ }
|
||||||
\<[^ \t\n\r<>]+/>|" "[^<>\n]{0,2048}> {
|
\<[[:alnum:]_!./?-]+ {
|
||||||
if (strcmp(yytext, "<!--") == 0) {
|
if (strcmp(yytext, "<!--") == 0) {
|
||||||
BEGIN(xml_comment);
|
BEGIN(xml_comment);
|
||||||
} else {
|
} else {
|
||||||
@@ -93,8 +93,8 @@
|
|||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
<sgml>[[:alnum:]_]+=/\" { feed_token(strdup(yytext), REGULAR_TOKEN); input(yyscanner); eat_until_unescaped('"'); return 1; }
|
<sgml>[[:alnum:]_]+=\" { feed_token(strndup(yytext, strlen(yytext) - 1), REGULAR_TOKEN); eat_until_unescaped('"'); return 1; }
|
||||||
<sgml>[[:alnum:]_]+=/' { feed_token(strdup(yytext), REGULAR_TOKEN); input(yyscanner); eat_until_unescaped('\''); return 1; }
|
<sgml>[[:alnum:]_]+=' { feed_token(strndup(yytext, strlen(yytext) - 1), REGULAR_TOKEN); eat_until_unescaped('\''); return 1; }
|
||||||
<sgml>[[:alnum:]_]+=[[:alnum:]_]* { feed_token(strdup(yytext), REGULAR_TOKEN); *(strchr(yyextra->token, '=') + 1) = 0; return 1; }
|
<sgml>[[:alnum:]_]+=[[:alnum:]_]* { feed_token(strdup(yytext), REGULAR_TOKEN); *(strchr(yyextra->token, '=') + 1) = 0; return 1; }
|
||||||
<sgml>[[:alnum:]_]+ { feed_token(strdup(yytext), REGULAR_TOKEN); return 1; }
|
<sgml>[[:alnum:]_]+ { feed_token(strdup(yytext), REGULAR_TOKEN); return 1; }
|
||||||
<sgml>\> { BEGIN(INITIAL); }
|
<sgml>\> { BEGIN(INITIAL); }
|
||||||
|
|||||||
17
grammars.yml
17
grammars.yml
@@ -1,4 +1,3 @@
|
|||||||
---
|
|
||||||
https://bitbucket.org/Clams/sublimesystemverilog/get/default.tar.gz:
|
https://bitbucket.org/Clams/sublimesystemverilog/get/default.tar.gz:
|
||||||
- source.systemverilog
|
- source.systemverilog
|
||||||
- source.ucfconstraints
|
- source.ucfconstraints
|
||||||
@@ -45,8 +44,6 @@ vendor/grammars/Isabelle.tmbundle:
|
|||||||
- source.isabelle.theory
|
- source.isabelle.theory
|
||||||
vendor/grammars/JSyntax:
|
vendor/grammars/JSyntax:
|
||||||
- source.j
|
- source.j
|
||||||
vendor/grammars/Julia.tmbundle:
|
|
||||||
- source.julia
|
|
||||||
vendor/grammars/Lean.tmbundle:
|
vendor/grammars/Lean.tmbundle:
|
||||||
- source.lean
|
- source.lean
|
||||||
vendor/grammars/LiveScript.tmbundle:
|
vendor/grammars/LiveScript.tmbundle:
|
||||||
@@ -130,6 +127,9 @@ vendor/grammars/SublimePuppet:
|
|||||||
- source.puppet
|
- source.puppet
|
||||||
vendor/grammars/SublimeXtend:
|
vendor/grammars/SublimeXtend:
|
||||||
- source.xtend
|
- source.xtend
|
||||||
|
vendor/grammars/Syntax-highlighting-for-PostCSS:
|
||||||
|
- source.css.postcss.sugarss
|
||||||
|
- source.postcss
|
||||||
vendor/grammars/TLA:
|
vendor/grammars/TLA:
|
||||||
- source.tla
|
- source.tla
|
||||||
vendor/grammars/TXL:
|
vendor/grammars/TXL:
|
||||||
@@ -192,6 +192,9 @@ vendor/grammars/atom-language-1c-bsl:
|
|||||||
vendor/grammars/atom-language-clean:
|
vendor/grammars/atom-language-clean:
|
||||||
- source.clean
|
- source.clean
|
||||||
- text.restructuredtext.clean
|
- text.restructuredtext.clean
|
||||||
|
vendor/grammars/atom-language-julia:
|
||||||
|
- source.julia
|
||||||
|
- source.julia.console
|
||||||
vendor/grammars/atom-language-p4:
|
vendor/grammars/atom-language-p4:
|
||||||
- source.p4
|
- source.p4
|
||||||
vendor/grammars/atom-language-perl6:
|
vendor/grammars/atom-language-perl6:
|
||||||
@@ -340,6 +343,8 @@ vendor/grammars/java.tmbundle:
|
|||||||
- source.java-properties
|
- source.java-properties
|
||||||
- text.html.jsp
|
- text.html.jsp
|
||||||
- text.junit-test-report
|
- text.junit-test-report
|
||||||
|
vendor/grammars/javadoc.tmbundle:
|
||||||
|
- text.html.javadoc
|
||||||
vendor/grammars/javascript-objective-j.tmbundle:
|
vendor/grammars/javascript-objective-j.tmbundle:
|
||||||
- source.js.objj
|
- source.js.objj
|
||||||
vendor/grammars/jflex.tmbundle:
|
vendor/grammars/jflex.tmbundle:
|
||||||
@@ -386,6 +391,8 @@ vendor/grammars/language-csound:
|
|||||||
- source.csound-score
|
- source.csound-score
|
||||||
vendor/grammars/language-css:
|
vendor/grammars/language-css:
|
||||||
- source.css
|
- source.css
|
||||||
|
vendor/grammars/language-cwl:
|
||||||
|
- source.cwl
|
||||||
vendor/grammars/language-emacs-lisp:
|
vendor/grammars/language-emacs-lisp:
|
||||||
- source.emacs.lisp
|
- source.emacs.lisp
|
||||||
vendor/grammars/language-fontforge:
|
vendor/grammars/language-fontforge:
|
||||||
@@ -500,6 +507,8 @@ vendor/grammars/language-yaml:
|
|||||||
- source.yaml
|
- source.yaml
|
||||||
vendor/grammars/language-yang:
|
vendor/grammars/language-yang:
|
||||||
- source.yang
|
- source.yang
|
||||||
|
vendor/grammars/language-yara:
|
||||||
|
- source.yara
|
||||||
vendor/grammars/latex.tmbundle:
|
vendor/grammars/latex.tmbundle:
|
||||||
- text.bibtex
|
- text.bibtex
|
||||||
- text.log.latex
|
- text.log.latex
|
||||||
@@ -566,7 +575,7 @@ vendor/grammars/opa.tmbundle:
|
|||||||
- source.opa
|
- source.opa
|
||||||
vendor/grammars/openscad.tmbundle:
|
vendor/grammars/openscad.tmbundle:
|
||||||
- source.scad
|
- source.scad
|
||||||
vendor/grammars/oz-tmbundle/Syntaxes/Oz.tmLanguage:
|
vendor/grammars/oz-tmbundle:
|
||||||
- source.oz
|
- source.oz
|
||||||
vendor/grammars/parrot:
|
vendor/grammars/parrot:
|
||||||
- source.parrot.pir
|
- source.parrot.pir
|
||||||
|
|||||||
@@ -52,6 +52,8 @@ module Linguist
|
|||||||
# Return true or false
|
# Return true or false
|
||||||
def generated?
|
def generated?
|
||||||
xcode_file? ||
|
xcode_file? ||
|
||||||
|
cocoapods? ||
|
||||||
|
carthage_build? ||
|
||||||
generated_net_designer_file? ||
|
generated_net_designer_file? ||
|
||||||
generated_net_specflow_feature_file? ||
|
generated_net_specflow_feature_file? ||
|
||||||
composer_lock? ||
|
composer_lock? ||
|
||||||
@@ -95,6 +97,20 @@ module Linguist
|
|||||||
['.nib', '.xcworkspacedata', '.xcuserstate'].include?(extname)
|
['.nib', '.xcworkspacedata', '.xcuserstate'].include?(extname)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Internal: Is the blob part of Pods/, which contains dependencies not meant for humans in pull requests.
|
||||||
|
#
|
||||||
|
# Returns true or false.
|
||||||
|
def cocoapods?
|
||||||
|
!!name.match(/(^Pods|\/Pods)\//)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Internal: Is the blob part of Carthage/Build/, which contains dependencies not meant for humans in pull requests.
|
||||||
|
#
|
||||||
|
# Returns true or false.
|
||||||
|
def carthage_build?
|
||||||
|
!!name.match(/(^|\/)Carthage\/Build\//)
|
||||||
|
end
|
||||||
|
|
||||||
# Internal: Is the blob minified files?
|
# Internal: Is the blob minified files?
|
||||||
#
|
#
|
||||||
# Consider a file minified if the average line length is
|
# Consider a file minified if the average line length is
|
||||||
|
|||||||
@@ -73,7 +73,6 @@ module Linguist
|
|||||||
end
|
end
|
||||||
|
|
||||||
# Common heuristics
|
# Common heuristics
|
||||||
ObjectiveCRegex = /^\s*(@(interface|class|protocol|property|end|synchronised|selector|implementation)\b|#import\s+.+\.h[">])/
|
|
||||||
CPlusPlusRegex = Regexp.union(
|
CPlusPlusRegex = Regexp.union(
|
||||||
/^\s*#\s*include <(cstdint|string|vector|map|list|array|bitset|queue|stack|forward_list|unordered_map|unordered_set|(i|o|io)stream)>/,
|
/^\s*#\s*include <(cstdint|string|vector|map|list|array|bitset|queue|stack|forward_list|unordered_map|unordered_set|(i|o|io)stream)>/,
|
||||||
/^\s*template\s*</,
|
/^\s*template\s*</,
|
||||||
@@ -82,6 +81,9 @@ module Linguist
|
|||||||
/^[ \t]*(class|(using[ \t]+)?namespace)\s+\w+/,
|
/^[ \t]*(class|(using[ \t]+)?namespace)\s+\w+/,
|
||||||
/^[ \t]*(private|public|protected):$/,
|
/^[ \t]*(private|public|protected):$/,
|
||||||
/std::\w+/)
|
/std::\w+/)
|
||||||
|
ObjectiveCRegex = /^\s*(@(interface|class|protocol|property|end|synchronised|selector|implementation)\b|#import\s+.+\.h[">])/
|
||||||
|
Perl5Regex = /\buse\s+(?:strict\b|v?5\.)/
|
||||||
|
Perl6Regex = /^\s*(?:use\s+v6\b|\bmodule\b|\b(?:my\s+)?class\b)/
|
||||||
|
|
||||||
disambiguate ".as" do |data|
|
disambiguate ".as" do |data|
|
||||||
if /^\s*(package\s+[a-z0-9_\.]+|import\s+[a-zA-Z0-9_\.]+;|class\s+[A-Za-z0-9_]+\s+extends\s+[A-Za-z0-9_]+)/.match(data)
|
if /^\s*(package\s+[a-z0-9_\.]+|import\s+[a-zA-Z0-9_\.]+;|class\s+[A-Za-z0-9_]+\s+extends\s+[A-Za-z0-9_]+)/.match(data)
|
||||||
@@ -359,17 +361,17 @@ module Linguist
|
|||||||
disambiguate ".pl" do |data|
|
disambiguate ".pl" do |data|
|
||||||
if /^[^#]*:-/.match(data)
|
if /^[^#]*:-/.match(data)
|
||||||
Language["Prolog"]
|
Language["Prolog"]
|
||||||
elsif /use strict|use\s+v?5\./.match(data)
|
elsif Perl5Regex.match(data)
|
||||||
Language["Perl"]
|
Language["Perl"]
|
||||||
elsif /^(use v6|(my )?class|module)/.match(data)
|
elsif Perl6Regex.match(data)
|
||||||
Language["Perl 6"]
|
Language["Perl 6"]
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
disambiguate ".pm" do |data|
|
disambiguate ".pm" do |data|
|
||||||
if /\buse\s+(?:strict\b|v?5\.)/.match(data)
|
if Perl5Regex.match(data)
|
||||||
Language["Perl"]
|
Language["Perl"]
|
||||||
elsif /^\s*(?:use\s+v6\s*;|(?:\bmy\s+)?class|module)\b/.match(data)
|
elsif Perl6Regex.match(data)
|
||||||
Language["Perl 6"]
|
Language["Perl 6"]
|
||||||
elsif /^\s*\/\* XPM \*\//.match(data)
|
elsif /^\s*\/\* XPM \*\//.match(data)
|
||||||
Language["XPM"]
|
Language["XPM"]
|
||||||
@@ -377,7 +379,7 @@ module Linguist
|
|||||||
end
|
end
|
||||||
|
|
||||||
disambiguate ".pro" do |data|
|
disambiguate ".pro" do |data|
|
||||||
if /^[^#]+:-/.match(data)
|
if /^[^\[#]+:-/.match(data)
|
||||||
Language["Prolog"]
|
Language["Prolog"]
|
||||||
elsif data.include?("last_client=")
|
elsif data.include?("last_client=")
|
||||||
Language["INI"]
|
Language["INI"]
|
||||||
@@ -459,12 +461,12 @@ module Linguist
|
|||||||
end
|
end
|
||||||
|
|
||||||
disambiguate ".t" do |data|
|
disambiguate ".t" do |data|
|
||||||
if /^\s*%[ \t]+|^\s*var\s+\w+\s*:=\s*\w+/.match(data)
|
if Perl5Regex.match(data)
|
||||||
Language["Turing"]
|
|
||||||
elsif /^\s*(?:use\s+v6\s*;|\bmodule\b|\b(?:my\s+)?class\b)/.match(data)
|
|
||||||
Language["Perl 6"]
|
|
||||||
elsif /\buse\s+(?:strict\b|v?5\.)/.match(data)
|
|
||||||
Language["Perl"]
|
Language["Perl"]
|
||||||
|
elsif Perl6Regex.match(data)
|
||||||
|
Language["Perl 6"]
|
||||||
|
elsif /^\s*%[ \t]+|^\s*var\s+\w+\s*:=\s*\w+/.match(data)
|
||||||
|
Language["Turing"]
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -477,7 +479,7 @@ module Linguist
|
|||||||
end
|
end
|
||||||
|
|
||||||
disambiguate ".ts" do |data|
|
disambiguate ".ts" do |data|
|
||||||
if data.include?("<TS")
|
if /<TS\b/.match(data)
|
||||||
Language["XML"]
|
Language["XML"]
|
||||||
else
|
else
|
||||||
Language["TypeScript"]
|
Language["TypeScript"]
|
||||||
|
|||||||
@@ -110,7 +110,7 @@ module Linguist
|
|||||||
# Returns the Language or nil if none was found.
|
# Returns the Language or nil if none was found.
|
||||||
def self.find_by_name(name)
|
def self.find_by_name(name)
|
||||||
return nil if !name.is_a?(String) || name.to_s.empty?
|
return nil if !name.is_a?(String) || name.to_s.empty?
|
||||||
name && (@name_index[name.downcase] || @name_index[name.split(',').first.downcase])
|
name && (@name_index[name.downcase] || @name_index[name.split(',', 2).first.downcase])
|
||||||
end
|
end
|
||||||
|
|
||||||
# Public: Look up Language by one of its aliases.
|
# Public: Look up Language by one of its aliases.
|
||||||
@@ -125,7 +125,7 @@ module Linguist
|
|||||||
# Returns the Language or nil if none was found.
|
# Returns the Language or nil if none was found.
|
||||||
def self.find_by_alias(name)
|
def self.find_by_alias(name)
|
||||||
return nil if !name.is_a?(String) || name.to_s.empty?
|
return nil if !name.is_a?(String) || name.to_s.empty?
|
||||||
name && (@alias_index[name.downcase] || @alias_index[name.split(',').first.downcase])
|
name && (@alias_index[name.downcase] || @alias_index[name.split(',', 2).first.downcase])
|
||||||
end
|
end
|
||||||
|
|
||||||
# Public: Look up Languages by filename.
|
# Public: Look up Languages by filename.
|
||||||
@@ -219,10 +219,7 @@ module Linguist
|
|||||||
lang = @index[name.downcase]
|
lang = @index[name.downcase]
|
||||||
return lang if lang
|
return lang if lang
|
||||||
|
|
||||||
name = name.split(',').first
|
@index[name.split(',', 2).first.downcase]
|
||||||
return nil if name.to_s.empty?
|
|
||||||
|
|
||||||
@index[name.downcase]
|
|
||||||
end
|
end
|
||||||
|
|
||||||
# Public: A List of popular languages
|
# Public: A List of popular languages
|
||||||
|
|||||||
@@ -807,6 +807,19 @@ Common Lisp:
|
|||||||
codemirror_mode: commonlisp
|
codemirror_mode: commonlisp
|
||||||
codemirror_mime_type: text/x-common-lisp
|
codemirror_mime_type: text/x-common-lisp
|
||||||
language_id: 66
|
language_id: 66
|
||||||
|
Common Workflow Language:
|
||||||
|
alias: cwl
|
||||||
|
type: programming
|
||||||
|
ace_mode: yaml
|
||||||
|
codemirror_mode: yaml
|
||||||
|
codemirror_mime_type: text/x-yaml
|
||||||
|
extensions:
|
||||||
|
- ".cwl"
|
||||||
|
interpreters:
|
||||||
|
- cwl-runner
|
||||||
|
color: "#B5314C"
|
||||||
|
tm_scope: source.cwl
|
||||||
|
language_id: 988547172
|
||||||
Component Pascal:
|
Component Pascal:
|
||||||
type: programming
|
type: programming
|
||||||
color: "#B0CE4E"
|
color: "#B0CE4E"
|
||||||
@@ -1147,7 +1160,7 @@ Ecere Projects:
|
|||||||
Edje Data Collection:
|
Edje Data Collection:
|
||||||
type: data
|
type: data
|
||||||
extensions:
|
extensions:
|
||||||
- ".edc"
|
- ".edc"
|
||||||
tm_scope: source.json
|
tm_scope: source.json
|
||||||
ace_mode: json
|
ace_mode: json
|
||||||
codemirror_mode: javascript
|
codemirror_mode: javascript
|
||||||
@@ -2245,9 +2258,9 @@ Kotlin:
|
|||||||
language_id: 189
|
language_id: 189
|
||||||
LFE:
|
LFE:
|
||||||
type: programming
|
type: programming
|
||||||
|
color: "#4C3023"
|
||||||
extensions:
|
extensions:
|
||||||
- ".lfe"
|
- ".lfe"
|
||||||
group: Erlang
|
|
||||||
tm_scope: source.lisp
|
tm_scope: source.lisp
|
||||||
ace_mode: lisp
|
ace_mode: lisp
|
||||||
codemirror_mode: commonlisp
|
codemirror_mode: commonlisp
|
||||||
@@ -3427,6 +3440,14 @@ Pony:
|
|||||||
tm_scope: source.pony
|
tm_scope: source.pony
|
||||||
ace_mode: text
|
ace_mode: text
|
||||||
language_id: 290
|
language_id: 290
|
||||||
|
PostCSS:
|
||||||
|
type: markup
|
||||||
|
tm_scope: source.postcss
|
||||||
|
group: CSS
|
||||||
|
extensions:
|
||||||
|
- ".pcss"
|
||||||
|
ace_mode: text
|
||||||
|
language_id: 262764437
|
||||||
PostScript:
|
PostScript:
|
||||||
type: markup
|
type: markup
|
||||||
color: "#da291c"
|
color: "#da291c"
|
||||||
@@ -3592,6 +3613,7 @@ Python:
|
|||||||
- ".gclient"
|
- ".gclient"
|
||||||
- BUCK
|
- BUCK
|
||||||
- BUILD
|
- BUILD
|
||||||
|
- BUILD.bazel
|
||||||
- SConscript
|
- SConscript
|
||||||
- SConstruct
|
- SConstruct
|
||||||
- Snakefile
|
- Snakefile
|
||||||
@@ -4413,6 +4435,14 @@ Sublime Text Config:
|
|||||||
- ".sublime_metrics"
|
- ".sublime_metrics"
|
||||||
- ".sublime_session"
|
- ".sublime_session"
|
||||||
language_id: 423
|
language_id: 423
|
||||||
|
SugarSS:
|
||||||
|
type: markup
|
||||||
|
tm_scope: source.css.postcss.sugarss
|
||||||
|
group: CSS
|
||||||
|
extensions:
|
||||||
|
- ".sss"
|
||||||
|
ace_mode: text
|
||||||
|
language_id: 826404698
|
||||||
SuperCollider:
|
SuperCollider:
|
||||||
type: programming
|
type: programming
|
||||||
color: "#46390b"
|
color: "#46390b"
|
||||||
@@ -5119,6 +5149,14 @@ YANG:
|
|||||||
tm_scope: source.yang
|
tm_scope: source.yang
|
||||||
ace_mode: text
|
ace_mode: text
|
||||||
language_id: 408
|
language_id: 408
|
||||||
|
YARA:
|
||||||
|
type: data
|
||||||
|
ace_mode: text
|
||||||
|
extensions:
|
||||||
|
- ".yar"
|
||||||
|
- ".yara"
|
||||||
|
tm_scope: source.yara
|
||||||
|
language_id: 805122868
|
||||||
Yacc:
|
Yacc:
|
||||||
type: programming
|
type: programming
|
||||||
extensions:
|
extensions:
|
||||||
|
|||||||
@@ -19,9 +19,7 @@
|
|||||||
- (^|/)dist/
|
- (^|/)dist/
|
||||||
|
|
||||||
# C deps
|
# C deps
|
||||||
# https://github.com/joyent/node
|
|
||||||
- ^deps/
|
- ^deps/
|
||||||
- ^tools/
|
|
||||||
- (^|/)configure$
|
- (^|/)configure$
|
||||||
- (^|/)config.guess$
|
- (^|/)config.guess$
|
||||||
- (^|/)config.sub$
|
- (^|/)config.sub$
|
||||||
@@ -65,6 +63,7 @@
|
|||||||
|
|
||||||
# Font Awesome
|
# Font Awesome
|
||||||
- (^|/)font-awesome\.(css|less|scss|styl)$
|
- (^|/)font-awesome\.(css|less|scss|styl)$
|
||||||
|
- (^|/)font-awesome/.*\.(css|less|scss|styl)$
|
||||||
|
|
||||||
# Foundation css
|
# Foundation css
|
||||||
- (^|/)foundation\.(css|less|scss|styl)$
|
- (^|/)foundation\.(css|less|scss|styl)$
|
||||||
@@ -242,10 +241,7 @@
|
|||||||
- \.imageset/
|
- \.imageset/
|
||||||
|
|
||||||
# Carthage
|
# Carthage
|
||||||
- ^Carthage/
|
- (^|/)Carthage/
|
||||||
|
|
||||||
# Cocoapods
|
|
||||||
- ^Pods/
|
|
||||||
|
|
||||||
# Sparkle
|
# Sparkle
|
||||||
- (^|/)Sparkle/
|
- (^|/)Sparkle/
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
module Linguist
|
module Linguist
|
||||||
VERSION = "5.3.2"
|
VERSION = "5.3.3"
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -1,7 +0,0 @@
|
|||||||
{
|
|
||||||
"repository": "https://github.com/github/linguist",
|
|
||||||
"dependencies": {
|
|
||||||
"season": "~>5.4"
|
|
||||||
},
|
|
||||||
"license": "MIT"
|
|
||||||
}
|
|
||||||
36
samples/Common Workflow Language/trunk-peak-score.cwl
Normal file
36
samples/Common Workflow Language/trunk-peak-score.cwl
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
#!/usr/bin/env cwl-runner
|
||||||
|
# Originally from
|
||||||
|
# https://github.com/Duke-GCB/GGR-cwl/blob/54e897263a702ff1074c8ac814b4bf7205d140dd/utils/trunk-peak-score.cwl
|
||||||
|
# Released under the MIT License:
|
||||||
|
# https://github.com/Duke-GCB/GGR-cwl/blob/54e897263a702ff1074c8ac814b4bf7205d140dd/LICENSE
|
||||||
|
# Converted to CWL v1.0 syntax using
|
||||||
|
# https://github.com/common-workflow-language/cwl-upgrader
|
||||||
|
# and polished by Michael R. Crusoe <mrc@commonwl.org>
|
||||||
|
# All modifications also released under the MIT License
|
||||||
|
cwlVersion: v1.0
|
||||||
|
class: CommandLineTool
|
||||||
|
doc: Trunk scores in ENCODE bed6+4 files
|
||||||
|
|
||||||
|
hints:
|
||||||
|
DockerRequirement:
|
||||||
|
dockerPull: dukegcb/workflow-utils
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
peaks:
|
||||||
|
type: File
|
||||||
|
sep:
|
||||||
|
type: string
|
||||||
|
default: \t
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
trunked_scores_peaks:
|
||||||
|
type: stdout
|
||||||
|
|
||||||
|
baseCommand: awk
|
||||||
|
|
||||||
|
arguments:
|
||||||
|
- -F $(inputs.sep)
|
||||||
|
- BEGIN{OFS=FS}$5>1000{$5=1000}{print}
|
||||||
|
- $(inputs.peaks.path)
|
||||||
|
|
||||||
|
stdout: $(inputs.peaks.nameroot).trunked_scores$(inputs.peaks.nameext)
|
||||||
13
samples/PostCSS/sample.pcss
Normal file
13
samples/PostCSS/sample.pcss
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
@define-mixin size $size {
|
||||||
|
width: $size;
|
||||||
|
}
|
||||||
|
|
||||||
|
$big: 100px;
|
||||||
|
|
||||||
|
/* Main block */
|
||||||
|
.block {
|
||||||
|
&_logo {
|
||||||
|
background: inline("./logo.png");
|
||||||
|
@mixin size $big;
|
||||||
|
}
|
||||||
|
}
|
||||||
10
samples/SugarSS/sample.sss
Normal file
10
samples/SugarSS/sample.sss
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
@define-mixin size $size
|
||||||
|
width: $size
|
||||||
|
|
||||||
|
$big: 100px
|
||||||
|
|
||||||
|
// Main block
|
||||||
|
.block
|
||||||
|
&_logo
|
||||||
|
background: inline("./logo.png")
|
||||||
|
@mixin size $big
|
||||||
102
samples/TypeScript/cache.ts
Normal file
102
samples/TypeScript/cache.ts
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
import { DocumentNode } from 'graphql';
|
||||||
|
import { getFragmentQueryDocument } from 'apollo-utilities';
|
||||||
|
|
||||||
|
import { DataProxy, Cache } from './types';
|
||||||
|
|
||||||
|
export type Transaction<T> = (c: ApolloCache<T>) => void;
|
||||||
|
|
||||||
|
export abstract class ApolloCache<TSerialized> implements DataProxy {
|
||||||
|
// required to implement
|
||||||
|
// core API
|
||||||
|
public abstract read<T>(query: Cache.ReadOptions): T;
|
||||||
|
public abstract write(write: Cache.WriteOptions): void;
|
||||||
|
public abstract diff<T>(query: Cache.DiffOptions): Cache.DiffResult<T>;
|
||||||
|
public abstract watch(watch: Cache.WatchOptions): () => void;
|
||||||
|
public abstract evict(query: Cache.EvictOptions): Cache.EvictionResult;
|
||||||
|
public abstract reset(): Promise<void>;
|
||||||
|
|
||||||
|
// intializer / offline / ssr API
|
||||||
|
/**
|
||||||
|
* Replaces existing state in the cache (if any) with the values expressed by
|
||||||
|
* `serializedState`.
|
||||||
|
*
|
||||||
|
* Called when hydrating a cache (server side rendering, or offline storage),
|
||||||
|
* and also (potentially) during hot reloads.
|
||||||
|
*/
|
||||||
|
public abstract restore(
|
||||||
|
serializedState: TSerialized,
|
||||||
|
): ApolloCache<TSerialized>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Exposes the cache's complete state, in a serializable format for later restoration.
|
||||||
|
*/
|
||||||
|
public abstract extract(optimistic: boolean): TSerialized;
|
||||||
|
|
||||||
|
// optimistic API
|
||||||
|
public abstract removeOptimistic(id: string): void;
|
||||||
|
|
||||||
|
// transactional API
|
||||||
|
public abstract performTransaction(
|
||||||
|
transaction: Transaction<TSerialized>,
|
||||||
|
): void;
|
||||||
|
public abstract recordOptimisticTransaction(
|
||||||
|
transaction: Transaction<TSerialized>,
|
||||||
|
id: string,
|
||||||
|
): void;
|
||||||
|
|
||||||
|
// optional API
|
||||||
|
public transformDocument(document: DocumentNode): DocumentNode {
|
||||||
|
return document;
|
||||||
|
}
|
||||||
|
// experimental
|
||||||
|
public transformForLink(document: DocumentNode): DocumentNode {
|
||||||
|
return document;
|
||||||
|
}
|
||||||
|
|
||||||
|
// DataProxy API
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param options
|
||||||
|
* @param optimistic
|
||||||
|
*/
|
||||||
|
public readQuery<QueryType>(
|
||||||
|
options: DataProxy.Query,
|
||||||
|
optimistic: boolean = false,
|
||||||
|
): QueryType {
|
||||||
|
return this.read({
|
||||||
|
query: options.query,
|
||||||
|
variables: options.variables,
|
||||||
|
optimistic,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public readFragment<FragmentType>(
|
||||||
|
options: DataProxy.Fragment,
|
||||||
|
optimistic: boolean = false,
|
||||||
|
): FragmentType | null {
|
||||||
|
return this.read({
|
||||||
|
query: getFragmentQueryDocument(options.fragment, options.fragmentName),
|
||||||
|
variables: options.variables,
|
||||||
|
rootId: options.id,
|
||||||
|
optimistic,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public writeQuery(options: Cache.WriteQueryOptions): void {
|
||||||
|
this.write({
|
||||||
|
dataId: 'ROOT_QUERY',
|
||||||
|
result: options.data,
|
||||||
|
query: options.query,
|
||||||
|
variables: options.variables,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public writeFragment(options: Cache.WriteFragmentOptions): void {
|
||||||
|
this.write({
|
||||||
|
dataId: options.id,
|
||||||
|
result: options.data,
|
||||||
|
variables: options.variables,
|
||||||
|
query: getFragmentQueryDocument(options.fragment, options.fragmentName),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
23
samples/YARA/OfExample.yar
Normal file
23
samples/YARA/OfExample.yar
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
rule OfExample2
|
||||||
|
{
|
||||||
|
strings:
|
||||||
|
$foo1 = "foo1"
|
||||||
|
$foo2 = "foo2"
|
||||||
|
$foo3 = "foo3"
|
||||||
|
|
||||||
|
condition:
|
||||||
|
2 of ($foo*) // equivalent to 2 of ($foo1,$foo2,$foo3)
|
||||||
|
}
|
||||||
|
|
||||||
|
rule OfExample3
|
||||||
|
{
|
||||||
|
strings:
|
||||||
|
$foo1 = "foo1"
|
||||||
|
$foo2 = "foo2"
|
||||||
|
|
||||||
|
$bar1 = "bar1"
|
||||||
|
$bar2 = "bar2"
|
||||||
|
|
||||||
|
condition:
|
||||||
|
3 of ($foo*,$bar1,$bar2)
|
||||||
|
}
|
||||||
13
samples/YARA/example.yara
Normal file
13
samples/YARA/example.yara
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
rule silent_banker : banker
|
||||||
|
{
|
||||||
|
meta:
|
||||||
|
description = "This is just an example"
|
||||||
|
thread_level = 3
|
||||||
|
in_the_wild = true
|
||||||
|
strings:
|
||||||
|
$a = {6A 40 68 00 30 00 00 6A 14 8D 91}
|
||||||
|
$b = {8D 4D B0 2B C1 83 C0 27 99 6A 4E 59 F7 F9}
|
||||||
|
$c = "UVODFRYSIHLNWPEJXQZAKCBGMT"
|
||||||
|
condition:
|
||||||
|
$a or $b or $c
|
||||||
|
}
|
||||||
1
samples/YARA/true.yar
Normal file
1
samples/YARA/true.yar
Normal file
@@ -0,0 +1 @@
|
|||||||
|
rule test { condition: true }
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
#!/usr/bin/env ruby
|
#!/usr/bin/env ruby
|
||||||
|
|
||||||
require "optparse"
|
require "optparse"
|
||||||
|
require "open3"
|
||||||
|
|
||||||
ROOT = File.expand_path("../../", __FILE__)
|
ROOT = File.expand_path("../../", __FILE__)
|
||||||
|
|
||||||
@@ -42,6 +43,17 @@ def log(msg)
|
|||||||
puts msg if $verbose
|
puts msg if $verbose
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def command(*args)
|
||||||
|
log "$ #{args.join(' ')}"
|
||||||
|
output, status = Open3.capture2e(*args)
|
||||||
|
if !status.success?
|
||||||
|
output.each_line do |line|
|
||||||
|
log " > #{line}"
|
||||||
|
end
|
||||||
|
warn "Command failed. Aborting."
|
||||||
|
exit 1
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
usage = """Usage:
|
usage = """Usage:
|
||||||
#{$0} [-v|--verbose] [--replace grammar] url
|
#{$0} [-v|--verbose] [--replace grammar] url
|
||||||
@@ -51,12 +63,12 @@ Examples:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
$replace = nil
|
$replace = nil
|
||||||
$verbose = false
|
$verbose = true
|
||||||
|
|
||||||
OptionParser.new do |opts|
|
OptionParser.new do |opts|
|
||||||
opts.banner = usage
|
opts.banner = usage
|
||||||
opts.on("-v", "--verbose", "Print verbose feedback to STDOUT") do
|
opts.on("-q", "--quiet", "Do not print output unless there's a failure") do
|
||||||
$verbose = true
|
$verbose = false
|
||||||
end
|
end
|
||||||
opts.on("-rSUBMODULE", "--replace=SUBMODDULE", "Replace an existing grammar submodule.") do |name|
|
opts.on("-rSUBMODULE", "--replace=SUBMODDULE", "Replace an existing grammar submodule.") do |name|
|
||||||
$replace = name
|
$replace = name
|
||||||
@@ -82,23 +94,22 @@ Dir.chdir(ROOT)
|
|||||||
|
|
||||||
if repo_old
|
if repo_old
|
||||||
log "Deregistering: #{repo_old}"
|
log "Deregistering: #{repo_old}"
|
||||||
`git submodule deinit #{repo_old}`
|
command('git', 'submodule', 'deinit', repo_old)
|
||||||
`git rm -rf #{repo_old}`
|
command('git', 'rm', '-rf', repo_old)
|
||||||
`script/convert-grammars`
|
command('script/grammar-compiler', 'update', '-f')
|
||||||
end
|
end
|
||||||
|
|
||||||
log "Registering new submodule: #{repo_new}"
|
log "Registering new submodule: #{repo_new}"
|
||||||
`git submodule add -f #{https} #{repo_new}`
|
command('git', 'submodule', 'add', '-f', https, repo_new)
|
||||||
exit 1 if $?.exitstatus > 0
|
command('script/grammar-compiler', 'add', repo_new)
|
||||||
`script/convert-grammars --add #{repo_new}`
|
|
||||||
|
|
||||||
log "Confirming license"
|
log "Confirming license"
|
||||||
if repo_old
|
if repo_old
|
||||||
`script/licensed`
|
command('script/licensed')
|
||||||
else
|
else
|
||||||
`script/licensed --module "#{repo_new}"`
|
command('script/licensed', '--module', repo_new)
|
||||||
end
|
end
|
||||||
|
|
||||||
log "Updating grammar documentation in vendor/README.md"
|
log "Updating grammar documentation in vendor/README.md"
|
||||||
`bundle exec rake samples`
|
command('bundle', 'exec', 'rake', 'samples')
|
||||||
`script/list-grammars`
|
command('script/list-grammars')
|
||||||
|
|||||||
@@ -1,319 +0,0 @@
|
|||||||
#!/usr/bin/env ruby
|
|
||||||
|
|
||||||
require 'bundler/setup'
|
|
||||||
require 'json'
|
|
||||||
require 'net/http'
|
|
||||||
require 'optparse'
|
|
||||||
require 'plist'
|
|
||||||
require 'set'
|
|
||||||
require 'thread'
|
|
||||||
require 'tmpdir'
|
|
||||||
require 'uri'
|
|
||||||
require 'yaml'
|
|
||||||
|
|
||||||
ROOT = File.expand_path("../..", __FILE__)
|
|
||||||
GRAMMARS_PATH = File.join(ROOT, "grammars")
|
|
||||||
SOURCES_FILE = File.join(ROOT, "grammars.yml")
|
|
||||||
CSONC = File.join(ROOT, "node_modules", ".bin", "csonc")
|
|
||||||
|
|
||||||
$options = {
|
|
||||||
:add => false,
|
|
||||||
:install => true,
|
|
||||||
:output => SOURCES_FILE,
|
|
||||||
:remote => true,
|
|
||||||
}
|
|
||||||
|
|
||||||
class SingleFile
|
|
||||||
def initialize(path)
|
|
||||||
@path = path
|
|
||||||
end
|
|
||||||
|
|
||||||
def url
|
|
||||||
@path
|
|
||||||
end
|
|
||||||
|
|
||||||
def fetch(tmp_dir)
|
|
||||||
[@path]
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
class DirectoryPackage
|
|
||||||
def self.fetch(dir)
|
|
||||||
Dir["#{dir}/**/*"].select do |path|
|
|
||||||
case File.extname(path.downcase)
|
|
||||||
when '.plist'
|
|
||||||
path.split('/')[-2] == 'Syntaxes'
|
|
||||||
when '.tmlanguage', '.yaml-tmlanguage'
|
|
||||||
true
|
|
||||||
when '.cson', '.json'
|
|
||||||
path.split('/')[-2] == 'grammars'
|
|
||||||
else
|
|
||||||
false
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def initialize(directory)
|
|
||||||
@directory = directory
|
|
||||||
end
|
|
||||||
|
|
||||||
def url
|
|
||||||
@directory
|
|
||||||
end
|
|
||||||
|
|
||||||
def fetch(tmp_dir)
|
|
||||||
self.class.fetch(File.join(ROOT, @directory))
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
class TarballPackage
|
|
||||||
def self.fetch(tmp_dir, url)
|
|
||||||
`curl --silent --location --max-time 30 --output "#{tmp_dir}/archive" "#{url}"`
|
|
||||||
raise "Failed to fetch GH package: #{url} #{$?.to_s}" unless $?.success?
|
|
||||||
|
|
||||||
output = File.join(tmp_dir, 'extracted')
|
|
||||||
Dir.mkdir(output)
|
|
||||||
`tar -C "#{output}" -xf "#{tmp_dir}/archive"`
|
|
||||||
raise "Failed to uncompress tarball: #{tmp_dir}/archive (from #{url}) #{$?.to_s}" unless $?.success?
|
|
||||||
|
|
||||||
DirectoryPackage.fetch(output)
|
|
||||||
end
|
|
||||||
|
|
||||||
attr_reader :url
|
|
||||||
|
|
||||||
def initialize(url)
|
|
||||||
@url = url
|
|
||||||
end
|
|
||||||
|
|
||||||
def fetch(tmp_dir)
|
|
||||||
self.class.fetch(tmp_dir, url)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
class SingleGrammar
|
|
||||||
attr_reader :url
|
|
||||||
|
|
||||||
def initialize(url)
|
|
||||||
@url = url
|
|
||||||
end
|
|
||||||
|
|
||||||
def fetch(tmp_dir)
|
|
||||||
filename = File.join(tmp_dir, File.basename(url))
|
|
||||||
`curl --silent --location --max-time 10 --output "#{filename}" "#{url}"`
|
|
||||||
raise "Failed to fetch grammar: #{url}: #{$?.to_s}" unless $?.success?
|
|
||||||
[filename]
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
class SVNPackage
|
|
||||||
attr_reader :url
|
|
||||||
|
|
||||||
def initialize(url)
|
|
||||||
@url = url
|
|
||||||
end
|
|
||||||
|
|
||||||
def fetch(tmp_dir)
|
|
||||||
`svn export -q "#{url}/Syntaxes" "#{tmp_dir}/Syntaxes"`
|
|
||||||
raise "Failed to export SVN repository: #{url}: #{$?.to_s}" unless $?.success?
|
|
||||||
Dir["#{tmp_dir}/Syntaxes/*.{plist,tmLanguage,tmlanguage,YAML-tmLanguage}"]
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
class GitHubPackage
|
|
||||||
def self.parse_url(url)
|
|
||||||
url, ref = url.split("@", 2)
|
|
||||||
path = URI.parse(url).path.split('/')
|
|
||||||
[path[1], path[2].chomp('.git'), ref || "master"]
|
|
||||||
end
|
|
||||||
|
|
||||||
attr_reader :user
|
|
||||||
attr_reader :repo
|
|
||||||
attr_reader :ref
|
|
||||||
|
|
||||||
def initialize(url)
|
|
||||||
@user, @repo, @ref = self.class.parse_url(url)
|
|
||||||
end
|
|
||||||
|
|
||||||
def url
|
|
||||||
suffix = "@#{ref}" unless ref == "master"
|
|
||||||
"https://github.com/#{user}/#{repo}#{suffix}"
|
|
||||||
end
|
|
||||||
|
|
||||||
def fetch(tmp_dir)
|
|
||||||
url = "https://github.com/#{user}/#{repo}/archive/#{ref}.tar.gz"
|
|
||||||
TarballPackage.fetch(tmp_dir, url)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def load_grammar(path)
|
|
||||||
case File.extname(path.downcase)
|
|
||||||
when '.plist', '.tmlanguage'
|
|
||||||
Plist::parse_xml(path)
|
|
||||||
when '.yaml-tmlanguage'
|
|
||||||
content = File.read(path)
|
|
||||||
# Attempt to parse YAML file even if it has a YAML 1.2 header
|
|
||||||
if content.lines[0] =~ /^%YAML[ :]1\.2/
|
|
||||||
content = content.lines[1..-1].join
|
|
||||||
end
|
|
||||||
begin
|
|
||||||
YAML.load(content)
|
|
||||||
rescue Psych::SyntaxError => e
|
|
||||||
$stderr.puts "Failed to parse YAML grammar '#{path}'"
|
|
||||||
end
|
|
||||||
when '.cson'
|
|
||||||
cson = `"#{CSONC}" "#{path}"`
|
|
||||||
raise "Failed to convert CSON grammar '#{path}': #{$?.to_s}" unless $?.success?
|
|
||||||
JSON.parse(cson)
|
|
||||||
when '.json'
|
|
||||||
JSON.parse(File.read(path))
|
|
||||||
else
|
|
||||||
raise "Invalid document type #{path}"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def load_grammars(tmp_dir, source, all_scopes)
|
|
||||||
is_url = source.start_with?("http:", "https:")
|
|
||||||
return [] if is_url && !$options[:remote]
|
|
||||||
return [] if !is_url && !File.exist?(source)
|
|
||||||
|
|
||||||
p = if !is_url
|
|
||||||
if File.directory?(source)
|
|
||||||
DirectoryPackage.new(source)
|
|
||||||
else
|
|
||||||
SingleFile.new(source)
|
|
||||||
end
|
|
||||||
elsif source.end_with?('.tmLanguage', '.plist', '.YAML-tmLanguage')
|
|
||||||
SingleGrammar.new(source)
|
|
||||||
elsif source.start_with?('https://github.com')
|
|
||||||
GitHubPackage.new(source)
|
|
||||||
elsif source.start_with?('http://svn.textmate.org')
|
|
||||||
SVNPackage.new(source)
|
|
||||||
elsif source.end_with?('.tar.gz')
|
|
||||||
TarballPackage.new(source)
|
|
||||||
else
|
|
||||||
nil
|
|
||||||
end
|
|
||||||
|
|
||||||
raise "Unsupported source: #{source}" unless p
|
|
||||||
|
|
||||||
p.fetch(tmp_dir).map do |path|
|
|
||||||
grammar = load_grammar(path)
|
|
||||||
scope = grammar['scopeName'] || grammar['scope']
|
|
||||||
|
|
||||||
if all_scopes.key?(scope)
|
|
||||||
unless all_scopes[scope] == p.url
|
|
||||||
$stderr.puts "WARN: Duplicated scope #{scope}\n" +
|
|
||||||
" Current package: #{p.url}\n" +
|
|
||||||
" Previous package: #{all_scopes[scope]}"
|
|
||||||
end
|
|
||||||
next
|
|
||||||
end
|
|
||||||
all_scopes[scope] = p.url
|
|
||||||
grammar
|
|
||||||
end.compact
|
|
||||||
end
|
|
||||||
|
|
||||||
def install_grammars(grammars, path)
|
|
||||||
installed = []
|
|
||||||
|
|
||||||
grammars.each do |grammar|
|
|
||||||
scope = grammar['scopeName'] || grammar['scope']
|
|
||||||
File.write(File.join(GRAMMARS_PATH, "#{scope}.json"), JSON.pretty_generate(grammar))
|
|
||||||
installed << scope
|
|
||||||
end
|
|
||||||
|
|
||||||
$stderr.puts("OK #{path} (#{installed.join(', ')})")
|
|
||||||
end
|
|
||||||
|
|
||||||
def run_thread(queue, all_scopes)
|
|
||||||
Dir.mktmpdir do |tmpdir|
|
|
||||||
loop do
|
|
||||||
source, index = begin
|
|
||||||
queue.pop(true)
|
|
||||||
rescue ThreadError
|
|
||||||
# The queue is empty.
|
|
||||||
break
|
|
||||||
end
|
|
||||||
|
|
||||||
dir = "#{tmpdir}/#{index}"
|
|
||||||
Dir.mkdir(dir)
|
|
||||||
|
|
||||||
grammars = load_grammars(dir, source, all_scopes)
|
|
||||||
install_grammars(grammars, source) if $options[:install]
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def generate_yaml(all_scopes, base)
|
|
||||||
yaml = all_scopes.each_with_object(base) do |(key,value),out|
|
|
||||||
out[value] ||= []
|
|
||||||
out[value] << key
|
|
||||||
end
|
|
||||||
|
|
||||||
yaml = Hash[yaml.sort]
|
|
||||||
yaml.each { |k, v| v.sort! }
|
|
||||||
yaml
|
|
||||||
end
|
|
||||||
|
|
||||||
def main(sources)
|
|
||||||
begin
|
|
||||||
Dir.mkdir(GRAMMARS_PATH)
|
|
||||||
rescue Errno::EEXIST
|
|
||||||
end
|
|
||||||
|
|
||||||
`npm install`
|
|
||||||
|
|
||||||
all_scopes = {}
|
|
||||||
|
|
||||||
if source = $options[:add]
|
|
||||||
Dir.mktmpdir do |tmpdir|
|
|
||||||
grammars = load_grammars(tmpdir, source, all_scopes)
|
|
||||||
install_grammars(grammars, source) if $options[:install]
|
|
||||||
end
|
|
||||||
generate_yaml(all_scopes, sources)
|
|
||||||
else
|
|
||||||
queue = Queue.new
|
|
||||||
|
|
||||||
sources.each do |url, scopes|
|
|
||||||
queue.push([url, queue.length])
|
|
||||||
end
|
|
||||||
|
|
||||||
threads = 8.times.map do
|
|
||||||
Thread.new { run_thread(queue, all_scopes) }
|
|
||||||
end
|
|
||||||
threads.each(&:join)
|
|
||||||
generate_yaml(all_scopes, {})
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
OptionParser.new do |opts|
|
|
||||||
opts.banner = "Usage: #{$0} [options]"
|
|
||||||
|
|
||||||
opts.on("--add GRAMMAR", "Add a new grammar. GRAMMAR may be a file path or URL.") do |a|
|
|
||||||
$options[:add] = a
|
|
||||||
end
|
|
||||||
|
|
||||||
opts.on("--[no-]install", "Install grammars into grammars/ directory.") do |i|
|
|
||||||
$options[:install] = i
|
|
||||||
end
|
|
||||||
|
|
||||||
opts.on("--output FILE", "Write output to FILE. Use - for stdout.") do |o|
|
|
||||||
$options[:output] = o == "-" ? $stdout : o
|
|
||||||
end
|
|
||||||
|
|
||||||
opts.on("--[no-]remote", "Download remote grammars.") do |r|
|
|
||||||
$options[:remote] = r
|
|
||||||
end
|
|
||||||
end.parse!
|
|
||||||
|
|
||||||
sources = File.open(SOURCES_FILE) do |file|
|
|
||||||
YAML.load(file)
|
|
||||||
end
|
|
||||||
|
|
||||||
yaml = main(sources)
|
|
||||||
|
|
||||||
if $options[:output].is_a?(IO)
|
|
||||||
$options[:output].write(YAML.dump(yaml))
|
|
||||||
else
|
|
||||||
File.write($options[:output], YAML.dump(yaml))
|
|
||||||
end
|
|
||||||
12
script/grammar-compiler
Executable file
12
script/grammar-compiler
Executable file
@@ -0,0 +1,12 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
set -e
|
||||||
|
cd "$(dirname "$0")/.."
|
||||||
|
|
||||||
|
image="linguist/grammar-compiler:latest"
|
||||||
|
mkdir -p grammars
|
||||||
|
|
||||||
|
exec docker run --rm \
|
||||||
|
-u $(id -u $USER):$(id -g $USER) \
|
||||||
|
-v $PWD:/src/linguist \
|
||||||
|
-w /src/linguist $image "$@"
|
||||||
@@ -1,60 +0,0 @@
|
|||||||
#!/usr/bin/env ruby
|
|
||||||
|
|
||||||
require "bundler/setup"
|
|
||||||
require "json"
|
|
||||||
require "linguist"
|
|
||||||
require "set"
|
|
||||||
require "yaml"
|
|
||||||
|
|
||||||
ROOT = File.expand_path("../../", __FILE__)
|
|
||||||
|
|
||||||
def find_includes(json)
|
|
||||||
case json
|
|
||||||
when Hash
|
|
||||||
result = []
|
|
||||||
if inc = json["include"]
|
|
||||||
result << inc.split("#", 2).first unless inc.start_with?("#", "$")
|
|
||||||
end
|
|
||||||
result + json.values.flat_map { |v| find_includes(v) }
|
|
||||||
when Array
|
|
||||||
json.flat_map { |v| find_includes(v) }
|
|
||||||
else
|
|
||||||
[]
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def transitive_includes(scope, includes)
|
|
||||||
scopes = Set.new
|
|
||||||
queue = includes[scope] || []
|
|
||||||
while s = queue.shift
|
|
||||||
next if scopes.include?(s)
|
|
||||||
scopes << s
|
|
||||||
queue += includes[s] || []
|
|
||||||
end
|
|
||||||
scopes
|
|
||||||
end
|
|
||||||
|
|
||||||
includes = {}
|
|
||||||
Dir[File.join(ROOT, "grammars/*.json")].each do |path|
|
|
||||||
scope = File.basename(path).sub(/\.json/, '')
|
|
||||||
json = JSON.load(File.read(path))
|
|
||||||
incs = find_includes(json)
|
|
||||||
next if incs.empty?
|
|
||||||
includes[scope] ||= []
|
|
||||||
includes[scope] += incs
|
|
||||||
end
|
|
||||||
|
|
||||||
yaml = YAML.load(File.read(File.join(ROOT, "grammars.yml")))
|
|
||||||
language_scopes = Linguist::Language.all.map(&:tm_scope).to_set
|
|
||||||
|
|
||||||
# The set of used scopes is the scopes for each language, plus all the scopes
|
|
||||||
# they include, transitively.
|
|
||||||
used_scopes = language_scopes + language_scopes.flat_map { |s| transitive_includes(s, includes).to_a }.to_set
|
|
||||||
|
|
||||||
unused = yaml.reject { |repo, scopes| scopes.any? { |scope| used_scopes.include?(scope) } }
|
|
||||||
|
|
||||||
puts "Unused grammar repos"
|
|
||||||
puts unused.map { |repo, scopes| sprintf("%-100s %s", repo, scopes.join(", ")) }.sort.join("\n")
|
|
||||||
|
|
||||||
yaml.delete_if { |k| unused.key?(k) }
|
|
||||||
File.write(File.join(ROOT, "grammars.yml"), YAML.dump(yaml))
|
|
||||||
@@ -188,6 +188,17 @@ class TestFileBlob < Minitest::Test
|
|||||||
assert fixture_blob("Binary/MainMenu.nib").generated?
|
assert fixture_blob("Binary/MainMenu.nib").generated?
|
||||||
assert !sample_blob("XML/project.pbxproj").generated?
|
assert !sample_blob("XML/project.pbxproj").generated?
|
||||||
|
|
||||||
|
# Cocoapods
|
||||||
|
assert sample_blob('Pods/blah').generated?
|
||||||
|
assert !sample_blob('My-Pods/blah').generated?
|
||||||
|
|
||||||
|
# Carthage
|
||||||
|
assert sample_blob('Carthage/Build/blah').generated?
|
||||||
|
assert !sample_blob('Carthage/blah').generated?
|
||||||
|
assert !sample_blob('Carthage/Checkout/blah').generated?
|
||||||
|
assert !sample_blob('My-Carthage/Build/blah').generated?
|
||||||
|
assert !sample_blob('My-Carthage/Build/blah').generated?
|
||||||
|
|
||||||
# Gemfile.lock is NOT generated
|
# Gemfile.lock is NOT generated
|
||||||
assert !sample_blob("Gemfile.lock").generated?
|
assert !sample_blob("Gemfile.lock").generated?
|
||||||
|
|
||||||
@@ -313,8 +324,6 @@ class TestFileBlob < Minitest::Test
|
|||||||
assert sample_blob("deps/http_parser/http_parser.c").vendored?
|
assert sample_blob("deps/http_parser/http_parser.c").vendored?
|
||||||
assert sample_blob("deps/v8/src/v8.h").vendored?
|
assert sample_blob("deps/v8/src/v8.h").vendored?
|
||||||
|
|
||||||
assert sample_blob("tools/something/else.c").vendored?
|
|
||||||
|
|
||||||
# Chart.js
|
# Chart.js
|
||||||
assert sample_blob("some/vendored/path/Chart.js").vendored?
|
assert sample_blob("some/vendored/path/Chart.js").vendored?
|
||||||
assert !sample_blob("some/vendored/path/chart.js").vendored?
|
assert !sample_blob("some/vendored/path/chart.js").vendored?
|
||||||
@@ -490,9 +499,9 @@ class TestFileBlob < Minitest::Test
|
|||||||
|
|
||||||
# Carthage
|
# Carthage
|
||||||
assert sample_blob('Carthage/blah').vendored?
|
assert sample_blob('Carthage/blah').vendored?
|
||||||
|
assert sample_blob('iOS/Carthage/blah').vendored?
|
||||||
# Cocoapods
|
assert !sample_blob('My-Carthage/blah').vendored?
|
||||||
assert sample_blob('Pods/blah').vendored?
|
assert !sample_blob('iOS/My-Carthage/blah').vendored?
|
||||||
|
|
||||||
# Html5shiv
|
# Html5shiv
|
||||||
assert sample_blob("Scripts/html5shiv.js").vendored?
|
assert sample_blob("Scripts/html5shiv.js").vendored?
|
||||||
|
|||||||
@@ -42,6 +42,24 @@ class TestGenerated < Minitest::Test
|
|||||||
generated_sample_without_loading_data("Dummy/foo.xcworkspacedata")
|
generated_sample_without_loading_data("Dummy/foo.xcworkspacedata")
|
||||||
generated_sample_without_loading_data("Dummy/foo.xcuserstate")
|
generated_sample_without_loading_data("Dummy/foo.xcuserstate")
|
||||||
|
|
||||||
|
# Cocoapods
|
||||||
|
generated_sample_without_loading_data("Pods/Pods.xcodeproj")
|
||||||
|
generated_sample_without_loading_data("Pods/SwiftDependency/foo.swift")
|
||||||
|
generated_sample_without_loading_data("Pods/ObjCDependency/foo.h")
|
||||||
|
generated_sample_without_loading_data("Pods/ObjCDependency/foo.m")
|
||||||
|
generated_sample_without_loading_data("Dummy/Pods/Pods.xcodeproj")
|
||||||
|
generated_sample_without_loading_data("Dummy/Pods/SwiftDependency/foo.swift")
|
||||||
|
generated_sample_without_loading_data("Dummy/Pods/ObjCDependency/foo.h")
|
||||||
|
generated_sample_without_loading_data("Dummy/Pods/ObjCDependency/foo.m")
|
||||||
|
|
||||||
|
# Carthage
|
||||||
|
generated_sample_without_loading_data("Carthage/Build/.Dependency.version")
|
||||||
|
generated_sample_without_loading_data("Carthage/Build/iOS/Dependency.framework")
|
||||||
|
generated_sample_without_loading_data("Carthage/Build/Mac/Dependency.framework")
|
||||||
|
generated_sample_without_loading_data("src/Carthage/Build/.Dependency.version")
|
||||||
|
generated_sample_without_loading_data("src/Carthage/Build/iOS/Dependency.framework")
|
||||||
|
generated_sample_without_loading_data("src/Carthage/Build/Mac/Dependency.framework")
|
||||||
|
|
||||||
# Go-specific vendored paths
|
# Go-specific vendored paths
|
||||||
generated_sample_without_loading_data("go/vendor/github.com/foo.go")
|
generated_sample_without_loading_data("go/vendor/github.com/foo.go")
|
||||||
generated_sample_without_loading_data("go/vendor/golang.org/src/foo.c")
|
generated_sample_without_loading_data("go/vendor/golang.org/src/foo.c")
|
||||||
|
|||||||
@@ -23,7 +23,6 @@ class TestGrammars < Minitest::Test
|
|||||||
"8653305b358375d0fced85dc24793b99919b11ef", # language-shellscript
|
"8653305b358375d0fced85dc24793b99919b11ef", # language-shellscript
|
||||||
"9f0c0b0926a18f5038e455e8df60221125fc3111", # elixir-tmbundle
|
"9f0c0b0926a18f5038e455e8df60221125fc3111", # elixir-tmbundle
|
||||||
"a4dadb2374282098c5b8b14df308906f5347d79a", # mako-tmbundle
|
"a4dadb2374282098c5b8b14df308906f5347d79a", # mako-tmbundle
|
||||||
"b9b24778619dce325b651f0d77cbc72e7ae0b0a3", # Julia.tmbundle
|
|
||||||
"e06722add999e7428048abcc067cd85f1f7ca71c", # r.tmbundle
|
"e06722add999e7428048abcc067cd85f1f7ca71c", # r.tmbundle
|
||||||
"50b14a0e3f03d7ca754dac42ffb33302b5882b78", # smalltalk-tmbundle
|
"50b14a0e3f03d7ca754dac42ffb33302b5882b78", # smalltalk-tmbundle
|
||||||
"eafbc4a2f283752858e6908907f3c0c90188785b", # gap-tmbundle
|
"eafbc4a2f283752858e6908907f3c0c90188785b", # gap-tmbundle
|
||||||
@@ -44,6 +43,7 @@ class TestGrammars < Minitest::Test
|
|||||||
"9dafd4e2a79cb13a6793b93877a254bc4d351e74", # sublime-text-ox
|
"9dafd4e2a79cb13a6793b93877a254bc4d351e74", # sublime-text-ox
|
||||||
"8e111741d97ba2e27b3d18a309d426b4a37e604f", # sublime-varnish
|
"8e111741d97ba2e27b3d18a309d426b4a37e604f", # sublime-varnish
|
||||||
"23d2538e33ce62d58abda2c039364b92f64ea6bc", # sublime-angelscript
|
"23d2538e33ce62d58abda2c039364b92f64ea6bc", # sublime-angelscript
|
||||||
|
"53714285caad3c480ebd248c490509695d10404b", # atom-language-julia
|
||||||
].freeze
|
].freeze
|
||||||
|
|
||||||
# List of allowed SPDX license names
|
# List of allowed SPDX license names
|
||||||
@@ -94,19 +94,6 @@ class TestGrammars < Minitest::Test
|
|||||||
assert nonexistent_submodules.empty? && unlisted_submodules.empty?, message.sub(/\.\Z/, "")
|
assert nonexistent_submodules.empty? && unlisted_submodules.empty?, message.sub(/\.\Z/, "")
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_local_scopes_are_in_sync
|
|
||||||
actual = YAML.load(`"#{File.join(ROOT, "script", "convert-grammars")}" --output - --no-install --no-remote`)
|
|
||||||
assert $?.success?, "script/convert-grammars failed"
|
|
||||||
|
|
||||||
# We're not checking remote grammars. That can take a long time and make CI
|
|
||||||
# flaky if network conditions are poor.
|
|
||||||
@grammars.delete_if { |k, v| k.start_with?("http:", "https:") }
|
|
||||||
|
|
||||||
@grammars.each do |k, v|
|
|
||||||
assert_equal v, actual[k], "The scopes listed for #{k} in grammars.yml don't match the scopes found in that repository"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_readme_file_is_in_sync
|
def test_readme_file_is_in_sync
|
||||||
current_data = File.read("#{ROOT}/vendor/README.md").to_s.sub(/\A.+?<!--.+?-->\n/ms, "")
|
current_data = File.read("#{ROOT}/vendor/README.md").to_s.sub(/\A.+?<!--.+?-->\n/ms, "")
|
||||||
updated_data = `script/list-grammars --print`
|
updated_data = `script/list-grammars --print`
|
||||||
|
|||||||
@@ -470,5 +470,7 @@ class TestLanguage < Minitest::Test
|
|||||||
|
|
||||||
def test_non_crash_on_comma
|
def test_non_crash_on_comma
|
||||||
assert_nil Language[',']
|
assert_nil Language[',']
|
||||||
|
assert_nil Language.find_by_name(',')
|
||||||
|
assert_nil Language.find_by_alias(',')
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|||||||
1
tools/grammars/.gitignore
vendored
Normal file
1
tools/grammars/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
/vendor
|
||||||
35
tools/grammars/Dockerfile
Normal file
35
tools/grammars/Dockerfile
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
FROM golang:1.9.2
|
||||||
|
|
||||||
|
RUN apt-get update
|
||||||
|
RUN apt-get upgrade -y
|
||||||
|
RUN apt-get install -y curl gnupg
|
||||||
|
|
||||||
|
RUN curl -sL https://deb.nodesource.com/setup_6.x | bash -
|
||||||
|
RUN apt-get install -y nodejs
|
||||||
|
RUN npm install -g season
|
||||||
|
|
||||||
|
RUN apt-get install -y cmake
|
||||||
|
RUN cd /tmp && git clone https://github.com/vmg/pcre
|
||||||
|
RUN mkdir -p /tmp/pcre/build && cd /tmp/pcre/build && \
|
||||||
|
cmake .. \
|
||||||
|
-DPCRE_SUPPORT_JIT=ON \
|
||||||
|
-DPCRE_SUPPORT_UTF=ON \
|
||||||
|
-DPCRE_SUPPORT_UNICODE_PROPERTIES=ON \
|
||||||
|
-DBUILD_SHARED_LIBS=OFF \
|
||||||
|
-DCMAKE_C_FLAGS="-fPIC $(EXTRA_PCRE_CFLAGS)" \
|
||||||
|
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||||
|
-DPCRE_BUILD_PCRECPP=OFF \
|
||||||
|
-DPCRE_BUILD_PCREGREP=OFF \
|
||||||
|
-DPCRE_BUILD_TESTS=OFF \
|
||||||
|
-G "Unix Makefiles" && \
|
||||||
|
make && make install
|
||||||
|
RUN rm -rf /tmp/pcre
|
||||||
|
|
||||||
|
RUN go get -u github.com/golang/dep/cmd/dep
|
||||||
|
WORKDIR /go/src/github.com/github/linguist/tools/grammars
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
RUN dep ensure
|
||||||
|
RUN go install ./cmd/grammar-compiler
|
||||||
|
|
||||||
|
ENTRYPOINT ["grammar-compiler"]
|
||||||
51
tools/grammars/Gopkg.lock
generated
Normal file
51
tools/grammars/Gopkg.lock
generated
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
|
||||||
|
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
branch = "master"
|
||||||
|
name = "github.com/golang/protobuf"
|
||||||
|
packages = ["proto"]
|
||||||
|
revision = "1e59b77b52bf8e4b449a57e6f79f21226d571845"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
branch = "master"
|
||||||
|
name = "github.com/groob/plist"
|
||||||
|
packages = ["."]
|
||||||
|
revision = "7b367e0aa692e62a223e823f3288c0c00f519a36"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
name = "github.com/mattn/go-runewidth"
|
||||||
|
packages = ["."]
|
||||||
|
revision = "9e777a8366cce605130a531d2cd6363d07ad7317"
|
||||||
|
version = "v0.0.2"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
branch = "master"
|
||||||
|
name = "github.com/mitchellh/mapstructure"
|
||||||
|
packages = ["."]
|
||||||
|
revision = "06020f85339e21b2478f756a78e295255ffa4d6a"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
name = "github.com/urfave/cli"
|
||||||
|
packages = ["."]
|
||||||
|
revision = "cfb38830724cc34fedffe9a2a29fb54fa9169cd1"
|
||||||
|
version = "v1.20.0"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
name = "gopkg.in/cheggaaa/pb.v1"
|
||||||
|
packages = ["."]
|
||||||
|
revision = "657164d0228d6bebe316fdf725c69f131a50fb10"
|
||||||
|
version = "v1.0.18"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
branch = "v2"
|
||||||
|
name = "gopkg.in/yaml.v2"
|
||||||
|
packages = ["."]
|
||||||
|
revision = "287cf08546ab5e7e37d55a84f7ed3fd1db036de5"
|
||||||
|
|
||||||
|
[solve-meta]
|
||||||
|
analyzer-name = "dep"
|
||||||
|
analyzer-version = 1
|
||||||
|
inputs-digest = "ba2e3150d728692b49e3e2d652b6ea23db82777c340e0c432cd4af6f0eef9f55"
|
||||||
|
solver-name = "gps-cdcl"
|
||||||
|
solver-version = 1
|
||||||
23
tools/grammars/Gopkg.toml
Normal file
23
tools/grammars/Gopkg.toml
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
[[constraint]]
|
||||||
|
branch = "v2"
|
||||||
|
name = "gopkg.in/yaml.v2"
|
||||||
|
|
||||||
|
[[constraint]]
|
||||||
|
branch = "master"
|
||||||
|
name = "github.com/groob/plist"
|
||||||
|
|
||||||
|
[[constraint]]
|
||||||
|
branch = "master"
|
||||||
|
name = "github.com/golang/protobuf"
|
||||||
|
|
||||||
|
[[constraint]]
|
||||||
|
branch = "master"
|
||||||
|
name = "github.com/mitchellh/mapstructure"
|
||||||
|
|
||||||
|
[[constraint]]
|
||||||
|
name = "gopkg.in/cheggaaa/pb.v1"
|
||||||
|
version = "1.0.18"
|
||||||
|
|
||||||
|
[[constraint]]
|
||||||
|
name = "github.com/urfave/cli"
|
||||||
|
version = "1.20.0"
|
||||||
120
tools/grammars/cmd/grammar-compiler/main.go
Normal file
120
tools/grammars/cmd/grammar-compiler/main.go
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/github/linguist/tools/grammars/compiler"
|
||||||
|
"github.com/urfave/cli"
|
||||||
|
)
|
||||||
|
|
||||||
|
func cwd() string {
|
||||||
|
cwd, _ := os.Getwd()
|
||||||
|
return cwd
|
||||||
|
}
|
||||||
|
|
||||||
|
func wrap(err error) error {
|
||||||
|
return cli.NewExitError(err, 255)
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
app := cli.NewApp()
|
||||||
|
app.Name = "Linguist Grammars Compiler"
|
||||||
|
app.Usage = "Compile user-submitted grammars and check them for errors"
|
||||||
|
|
||||||
|
app.Flags = []cli.Flag{
|
||||||
|
cli.StringFlag{
|
||||||
|
Name: "linguist-path",
|
||||||
|
Value: cwd(),
|
||||||
|
Usage: "path to Linguist root",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
app.Commands = []cli.Command{
|
||||||
|
{
|
||||||
|
Name: "add",
|
||||||
|
Usage: "add a new grammar source",
|
||||||
|
Flags: []cli.Flag{
|
||||||
|
cli.BoolFlag{
|
||||||
|
Name: "force, f",
|
||||||
|
Usage: "ignore compilation errors",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Action: func(c *cli.Context) error {
|
||||||
|
conv, err := compiler.NewConverter(c.String("linguist-path"))
|
||||||
|
if err != nil {
|
||||||
|
return wrap(err)
|
||||||
|
}
|
||||||
|
if err := conv.AddGrammar(c.Args().First()); err != nil {
|
||||||
|
if !c.Bool("force") {
|
||||||
|
return wrap(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := conv.WriteGrammarList(); err != nil {
|
||||||
|
return wrap(err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "update",
|
||||||
|
Usage: "update grammars.yml with the contents of the grammars library",
|
||||||
|
Flags: []cli.Flag{
|
||||||
|
cli.BoolFlag{
|
||||||
|
Name: "force, f",
|
||||||
|
Usage: "write grammars.yml even if grammars fail to compile",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Action: func(c *cli.Context) error {
|
||||||
|
conv, err := compiler.NewConverter(c.String("linguist-path"))
|
||||||
|
if err != nil {
|
||||||
|
return wrap(err)
|
||||||
|
}
|
||||||
|
if err := conv.ConvertGrammars(true); err != nil {
|
||||||
|
return wrap(err)
|
||||||
|
}
|
||||||
|
if err := conv.Report(); err != nil {
|
||||||
|
if !c.Bool("force") {
|
||||||
|
return wrap(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := conv.WriteGrammarList(); err != nil {
|
||||||
|
return wrap(err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "compile",
|
||||||
|
Usage: "convert the grammars from the library",
|
||||||
|
Flags: []cli.Flag{
|
||||||
|
cli.StringFlag{Name: "proto-out, P"},
|
||||||
|
cli.StringFlag{Name: "out, o"},
|
||||||
|
},
|
||||||
|
Action: func(c *cli.Context) error {
|
||||||
|
conv, err := compiler.NewConverter(c.String("linguist-path"))
|
||||||
|
if err != nil {
|
||||||
|
return cli.NewExitError(err, 1)
|
||||||
|
}
|
||||||
|
if err := conv.ConvertGrammars(false); err != nil {
|
||||||
|
return cli.NewExitError(err, 1)
|
||||||
|
}
|
||||||
|
if out := c.String("proto-out"); out != "" {
|
||||||
|
if err := conv.WriteProto(out); err != nil {
|
||||||
|
return cli.NewExitError(err, 1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if out := c.String("out"); out != "" {
|
||||||
|
if err := conv.WriteJSON(out); err != nil {
|
||||||
|
return cli.NewExitError(err, 1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := conv.Report(); err != nil {
|
||||||
|
return wrap(err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
app.Run(os.Args)
|
||||||
|
}
|
||||||
261
tools/grammars/compiler/converter.go
Normal file
261
tools/grammars/compiler/converter.go
Normal file
@@ -0,0 +1,261 @@
|
|||||||
|
package compiler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"runtime"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
grammar "github.com/github/linguist/tools/grammars/proto"
|
||||||
|
"github.com/golang/protobuf/proto"
|
||||||
|
pb "gopkg.in/cheggaaa/pb.v1"
|
||||||
|
yaml "gopkg.in/yaml.v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Converter struct {
|
||||||
|
root string
|
||||||
|
|
||||||
|
modified bool
|
||||||
|
grammars map[string][]string
|
||||||
|
Loaded map[string]*Repository
|
||||||
|
|
||||||
|
progress *pb.ProgressBar
|
||||||
|
wg sync.WaitGroup
|
||||||
|
queue chan string
|
||||||
|
mu sync.Mutex
|
||||||
|
}
|
||||||
|
|
||||||
|
func (conv *Converter) Load(src string) *Repository {
|
||||||
|
if strings.HasPrefix(src, "http://") || strings.HasPrefix(src, "https://") {
|
||||||
|
return LoadFromURL(src)
|
||||||
|
}
|
||||||
|
return LoadFromFilesystem(conv.root, src)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (conv *Converter) work() {
|
||||||
|
for source := range conv.queue {
|
||||||
|
repo := conv.Load(source)
|
||||||
|
|
||||||
|
conv.mu.Lock()
|
||||||
|
conv.Loaded[source] = repo
|
||||||
|
conv.mu.Unlock()
|
||||||
|
|
||||||
|
conv.progress.Increment()
|
||||||
|
}
|
||||||
|
|
||||||
|
conv.wg.Done()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (conv *Converter) tmpScopes() map[string]bool {
|
||||||
|
scopes := make(map[string]bool)
|
||||||
|
for _, ary := range conv.grammars {
|
||||||
|
for _, s := range ary {
|
||||||
|
scopes[s] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return scopes
|
||||||
|
}
|
||||||
|
|
||||||
|
func (conv *Converter) AddGrammar(source string) error {
|
||||||
|
repo := conv.Load(source)
|
||||||
|
if len(repo.Files) == 0 {
|
||||||
|
return fmt.Errorf("source '%s' contains no grammar files", source)
|
||||||
|
}
|
||||||
|
|
||||||
|
conv.grammars[source] = repo.Scopes()
|
||||||
|
conv.modified = true
|
||||||
|
|
||||||
|
knownScopes := conv.tmpScopes()
|
||||||
|
repo.FixRules(knownScopes)
|
||||||
|
|
||||||
|
if len(repo.Errors) > 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, "The new grammar %s contains %d errors:\n",
|
||||||
|
repo, len(repo.Errors))
|
||||||
|
for _, err := range repo.Errors {
|
||||||
|
fmt.Fprintf(os.Stderr, " - %s\n", err)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, "\n")
|
||||||
|
return fmt.Errorf("failed to compile the given grammar")
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("OK! added grammar source '%s'\n", source)
|
||||||
|
for scope := range repo.Files {
|
||||||
|
fmt.Printf("\tnew scope: %s\n", scope)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (conv *Converter) AllScopes() map[string]bool {
|
||||||
|
// Map from scope -> Repository first to error check
|
||||||
|
// possible duplicates
|
||||||
|
allScopes := make(map[string]*Repository)
|
||||||
|
for _, repo := range conv.Loaded {
|
||||||
|
for scope := range repo.Files {
|
||||||
|
if original := allScopes[scope]; original != nil {
|
||||||
|
repo.Fail(&DuplicateScopeError{original, scope})
|
||||||
|
} else {
|
||||||
|
allScopes[scope] = repo
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert to scope -> bool
|
||||||
|
scopes := make(map[string]bool)
|
||||||
|
for s := range allScopes {
|
||||||
|
scopes[s] = true
|
||||||
|
}
|
||||||
|
return scopes
|
||||||
|
}
|
||||||
|
|
||||||
|
func (conv *Converter) ConvertGrammars(update bool) error {
|
||||||
|
conv.Loaded = make(map[string]*Repository)
|
||||||
|
conv.queue = make(chan string, 128)
|
||||||
|
|
||||||
|
conv.progress = pb.New(len(conv.grammars))
|
||||||
|
conv.progress.Start()
|
||||||
|
|
||||||
|
for i := 0; i < runtime.NumCPU(); i++ {
|
||||||
|
conv.wg.Add(1)
|
||||||
|
go conv.work()
|
||||||
|
}
|
||||||
|
|
||||||
|
for src := range conv.grammars {
|
||||||
|
conv.queue <- src
|
||||||
|
}
|
||||||
|
|
||||||
|
close(conv.queue)
|
||||||
|
conv.wg.Wait()
|
||||||
|
|
||||||
|
done := fmt.Sprintf("done! processed %d grammars\n", len(conv.Loaded))
|
||||||
|
conv.progress.FinishPrint(done)
|
||||||
|
|
||||||
|
if update {
|
||||||
|
conv.grammars = make(map[string][]string)
|
||||||
|
conv.modified = true
|
||||||
|
}
|
||||||
|
|
||||||
|
knownScopes := conv.AllScopes()
|
||||||
|
|
||||||
|
for source, repo := range conv.Loaded {
|
||||||
|
repo.FixRules(knownScopes)
|
||||||
|
|
||||||
|
if update {
|
||||||
|
conv.grammars[source] = repo.Scopes()
|
||||||
|
} else {
|
||||||
|
expected := conv.grammars[source]
|
||||||
|
repo.CompareScopes(expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (conv *Converter) WriteProto(path string) error {
|
||||||
|
library := grammar.Library{
|
||||||
|
Grammars: make(map[string]*grammar.Rule),
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, repo := range conv.Loaded {
|
||||||
|
for scope, file := range repo.Files {
|
||||||
|
library.Grammars[scope] = file.Rule
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pb, err := proto.Marshal(&library)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return ioutil.WriteFile(path, pb, 0666)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (conv *Converter) writeJSONFile(path string, rule *grammar.Rule) error {
|
||||||
|
j, err := os.Create(path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer j.Close()
|
||||||
|
|
||||||
|
enc := json.NewEncoder(j)
|
||||||
|
enc.SetIndent("", " ")
|
||||||
|
return enc.Encode(rule)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (conv *Converter) WriteJSON(rulePath string) error {
|
||||||
|
if err := os.MkdirAll(rulePath, os.ModePerm); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, repo := range conv.Loaded {
|
||||||
|
for scope, file := range repo.Files {
|
||||||
|
p := path.Join(rulePath, scope+".json")
|
||||||
|
if err := conv.writeJSONFile(p, file.Rule); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (conv *Converter) WriteGrammarList() error {
|
||||||
|
if !conv.modified {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
outyml, err := yaml.Marshal(conv.grammars)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
ymlpath := path.Join(conv.root, "grammars.yml")
|
||||||
|
return ioutil.WriteFile(ymlpath, outyml, 0666)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (conv *Converter) Report() error {
|
||||||
|
var failed []*Repository
|
||||||
|
for _, repo := range conv.Loaded {
|
||||||
|
if len(repo.Errors) > 0 {
|
||||||
|
failed = append(failed, repo)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Slice(failed, func(i, j int) bool {
|
||||||
|
return failed[i].Source < failed[j].Source
|
||||||
|
})
|
||||||
|
|
||||||
|
total := 0
|
||||||
|
for _, repo := range failed {
|
||||||
|
fmt.Fprintf(os.Stderr, "- [ ] %s (%d errors)\n", repo, len(repo.Errors))
|
||||||
|
for _, err := range repo.Errors {
|
||||||
|
fmt.Fprintf(os.Stderr, " - [ ] %s\n", err)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(os.Stderr, "\n")
|
||||||
|
total += len(repo.Errors)
|
||||||
|
}
|
||||||
|
|
||||||
|
if total > 0 {
|
||||||
|
return fmt.Errorf("the grammar library contains %d errors", total)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewConverter(root string) (*Converter, error) {
|
||||||
|
yml, err := ioutil.ReadFile(path.Join(root, "grammars.yml"))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
conv := &Converter{root: root}
|
||||||
|
|
||||||
|
if err := yaml.Unmarshal(yml, &conv.grammars); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return conv, nil
|
||||||
|
}
|
||||||
21
tools/grammars/compiler/cson.go
Normal file
21
tools/grammars/compiler/cson.go
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
package compiler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"os/exec"
|
||||||
|
)
|
||||||
|
|
||||||
|
func ConvertCSON(data []byte) ([]byte, error) {
|
||||||
|
stdin := bytes.NewBuffer(data)
|
||||||
|
stdout := &bytes.Buffer{}
|
||||||
|
|
||||||
|
cmd := exec.Command("csonc")
|
||||||
|
cmd.Stdin = stdin
|
||||||
|
cmd.Stdout = stdout
|
||||||
|
|
||||||
|
if err := cmd.Run(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return stdout.Bytes(), nil
|
||||||
|
}
|
||||||
29
tools/grammars/compiler/data.go
Normal file
29
tools/grammars/compiler/data.go
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
package compiler
|
||||||
|
|
||||||
|
var GrammarAliases = map[string]string{
|
||||||
|
"source.erb": "text.html.erb",
|
||||||
|
"source.cpp": "source.c++",
|
||||||
|
"source.less": "source.css.less",
|
||||||
|
"text.html.markdown": "source.gfm",
|
||||||
|
"text.md": "source.gfm",
|
||||||
|
"source.php": "text.html.php",
|
||||||
|
"text.plain": "",
|
||||||
|
"source.asciidoc": "text.html.asciidoc",
|
||||||
|
"source.perl6": "source.perl6fe",
|
||||||
|
"source.css.scss": "source.scss",
|
||||||
|
}
|
||||||
|
|
||||||
|
var KnownFields = map[string]bool{
|
||||||
|
"comment": true,
|
||||||
|
"uuid": true,
|
||||||
|
"author": true,
|
||||||
|
"comments": true,
|
||||||
|
"macros": true,
|
||||||
|
"fileTypes": true,
|
||||||
|
"firstLineMatch": true,
|
||||||
|
"keyEquivalent": true,
|
||||||
|
"foldingStopMarker": true,
|
||||||
|
"foldingStartMarker": true,
|
||||||
|
"foldingEndMarker": true,
|
||||||
|
"limitLineLength": true,
|
||||||
|
}
|
||||||
85
tools/grammars/compiler/errors.go
Normal file
85
tools/grammars/compiler/errors.go
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
package compiler
|
||||||
|
|
||||||
|
import "fmt"
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
type ConversionError struct {
|
||||||
|
Path string
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (err *ConversionError) Error() string {
|
||||||
|
return fmt.Sprintf(
|
||||||
|
"Grammar conversion failed. File `%s` failed to parse: %s",
|
||||||
|
err.Path, err.Err)
|
||||||
|
}
|
||||||
|
|
||||||
|
type DuplicateScopeError struct {
|
||||||
|
Original *Repository
|
||||||
|
Duplicate string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (err *DuplicateScopeError) Error() string {
|
||||||
|
return fmt.Sprintf(
|
||||||
|
"Duplicate scope in repository: scope `%s` was already defined in %s",
|
||||||
|
err.Duplicate, err.Original)
|
||||||
|
}
|
||||||
|
|
||||||
|
type MissingScopeError struct {
|
||||||
|
Scope string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (err *MissingScopeError) Error() string {
|
||||||
|
return fmt.Sprintf(
|
||||||
|
"Missing scope in repository: `%s` is listed in grammars.yml but cannot be found",
|
||||||
|
err.Scope)
|
||||||
|
}
|
||||||
|
|
||||||
|
type UnexpectedScopeError struct {
|
||||||
|
File *LoadedFile
|
||||||
|
Scope string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (err *UnexpectedScopeError) Error() string {
|
||||||
|
return fmt.Sprintf(
|
||||||
|
"Unexpected scope in repository: `%s` declared in %s was not listed in grammars.yml",
|
||||||
|
err.Scope, err.File)
|
||||||
|
}
|
||||||
|
|
||||||
|
type MissingIncludeError struct {
|
||||||
|
File *LoadedFile
|
||||||
|
Include string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (err *MissingIncludeError) Error() string {
|
||||||
|
return fmt.Sprintf(
|
||||||
|
"Missing include in grammar: %s attempts to include `%s` but the scope cannot be found",
|
||||||
|
err.File, err.Include)
|
||||||
|
}
|
||||||
|
|
||||||
|
type UnknownKeysError struct {
|
||||||
|
File *LoadedFile
|
||||||
|
Keys []string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (err *UnknownKeysError) Error() string {
|
||||||
|
var keys []string
|
||||||
|
for _, k := range err.Keys {
|
||||||
|
keys = append(keys, fmt.Sprintf("`%s`", k))
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Sprintf(
|
||||||
|
"Unknown keys in grammar: %s contains invalid keys (%s)",
|
||||||
|
err.File, strings.Join(keys, ", "))
|
||||||
|
}
|
||||||
|
|
||||||
|
type InvalidRegexError struct {
|
||||||
|
File *LoadedFile
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (err *InvalidRegexError) Error() string {
|
||||||
|
return fmt.Sprintf(
|
||||||
|
"Invalid regex in grammar: %s contains a malformed regex (%s)",
|
||||||
|
err.File, err.Err)
|
||||||
|
}
|
||||||
124
tools/grammars/compiler/loader.go
Normal file
124
tools/grammars/compiler/loader.go
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
package compiler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
grammar "github.com/github/linguist/tools/grammars/proto"
|
||||||
|
)
|
||||||
|
|
||||||
|
type LoadedFile struct {
|
||||||
|
Path string
|
||||||
|
Rule *grammar.Rule
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *LoadedFile) String() string {
|
||||||
|
return fmt.Sprintf("`%s` (in `%s`)", f.Rule.ScopeName, f.Path)
|
||||||
|
}
|
||||||
|
|
||||||
|
type Repository struct {
|
||||||
|
Source string
|
||||||
|
Upstream string
|
||||||
|
Files map[string]*LoadedFile
|
||||||
|
Errors []error
|
||||||
|
}
|
||||||
|
|
||||||
|
func newRepository(src string) *Repository {
|
||||||
|
return &Repository{
|
||||||
|
Source: src,
|
||||||
|
Files: make(map[string]*LoadedFile),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (repo *Repository) String() string {
|
||||||
|
str := fmt.Sprintf("repository `%s`", repo.Source)
|
||||||
|
if repo.Upstream != "" {
|
||||||
|
str = str + fmt.Sprintf(" (from %s)", repo.Upstream)
|
||||||
|
}
|
||||||
|
return str
|
||||||
|
}
|
||||||
|
|
||||||
|
func (repo *Repository) Fail(err error) {
|
||||||
|
repo.Errors = append(repo.Errors, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (repo *Repository) AddFile(path string, rule *grammar.Rule, uk []string) {
|
||||||
|
file := &LoadedFile{
|
||||||
|
Path: path,
|
||||||
|
Rule: rule,
|
||||||
|
}
|
||||||
|
|
||||||
|
repo.Files[rule.ScopeName] = file
|
||||||
|
if len(uk) > 0 {
|
||||||
|
repo.Fail(&UnknownKeysError{file, uk})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func toMap(slice []string) map[string]bool {
|
||||||
|
m := make(map[string]bool)
|
||||||
|
for _, s := range slice {
|
||||||
|
m[s] = true
|
||||||
|
}
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
|
||||||
|
func (repo *Repository) CompareScopes(scopes []string) {
|
||||||
|
expected := toMap(scopes)
|
||||||
|
|
||||||
|
for scope, file := range repo.Files {
|
||||||
|
if !expected[scope] {
|
||||||
|
repo.Fail(&UnexpectedScopeError{file, scope})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for scope := range expected {
|
||||||
|
if _, ok := repo.Files[scope]; !ok {
|
||||||
|
repo.Fail(&MissingScopeError{scope})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (repo *Repository) FixRules(knownScopes map[string]bool) {
|
||||||
|
for _, file := range repo.Files {
|
||||||
|
w := walker{
|
||||||
|
File: file,
|
||||||
|
Known: knownScopes,
|
||||||
|
Missing: make(map[string]bool),
|
||||||
|
}
|
||||||
|
|
||||||
|
w.walk(file.Rule)
|
||||||
|
repo.Errors = append(repo.Errors, w.Errors...)
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (repo *Repository) Scopes() (scopes []string) {
|
||||||
|
for s := range repo.Files {
|
||||||
|
scopes = append(scopes, s)
|
||||||
|
}
|
||||||
|
sort.Strings(scopes)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func isValidGrammar(path string, info os.FileInfo) bool {
|
||||||
|
if info.IsDir() {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
dir := filepath.Dir(path)
|
||||||
|
ext := filepath.Ext(path)
|
||||||
|
|
||||||
|
switch strings.ToLower(ext) {
|
||||||
|
case ".plist":
|
||||||
|
return strings.HasSuffix(dir, "/Syntaxes")
|
||||||
|
case ".tmlanguage", ".yaml-tmlanguage":
|
||||||
|
return true
|
||||||
|
case ".cson", ".json":
|
||||||
|
return strings.HasSuffix(dir, "/grammars")
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
80
tools/grammars/compiler/loader_fs.go
Normal file
80
tools/grammars/compiler/loader_fs.go
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
package compiler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type fsLoader struct {
|
||||||
|
*Repository
|
||||||
|
abspath string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *fsLoader) findGrammars() (files []string, err error) {
|
||||||
|
err = filepath.Walk(l.abspath,
|
||||||
|
func(path string, info os.FileInfo, err error) error {
|
||||||
|
if err == nil && isValidGrammar(path, info) {
|
||||||
|
files = append(files, path)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *fsLoader) load() {
|
||||||
|
grammars, err := l.findGrammars()
|
||||||
|
if err != nil {
|
||||||
|
l.Fail(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, path := range grammars {
|
||||||
|
data, err := ioutil.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
l.Fail(err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if rel, err := filepath.Rel(l.abspath, path); err == nil {
|
||||||
|
path = rel
|
||||||
|
}
|
||||||
|
|
||||||
|
rule, unknown, err := ConvertProto(filepath.Ext(path), data)
|
||||||
|
if err != nil {
|
||||||
|
l.Fail(&ConversionError{path, err})
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := l.Files[rule.ScopeName]; ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
l.AddFile(path, rule, unknown)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func gitRemoteName(path string) (string, error) {
|
||||||
|
remote, err := exec.Command("git", "-C", path, "remote", "get-url", "origin").Output()
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return strings.TrimSpace(string(remote)), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func LoadFromFilesystem(root, src string) *Repository {
|
||||||
|
loader := fsLoader{
|
||||||
|
Repository: newRepository(src),
|
||||||
|
abspath: path.Join(root, src),
|
||||||
|
}
|
||||||
|
loader.load()
|
||||||
|
|
||||||
|
if ups, err := gitRemoteName(loader.abspath); err == nil {
|
||||||
|
loader.Repository.Upstream = ups
|
||||||
|
}
|
||||||
|
|
||||||
|
return loader.Repository
|
||||||
|
}
|
||||||
93
tools/grammars/compiler/loader_url.go
Normal file
93
tools/grammars/compiler/loader_url.go
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
package compiler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"archive/tar"
|
||||||
|
"compress/gzip"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type urlLoader struct {
|
||||||
|
*Repository
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *urlLoader) loadTarball(r io.Reader) {
|
||||||
|
gzf, err := gzip.NewReader(r)
|
||||||
|
if err != nil {
|
||||||
|
l.Fail(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer gzf.Close()
|
||||||
|
|
||||||
|
tarReader := tar.NewReader(gzf)
|
||||||
|
for true {
|
||||||
|
header, err := tarReader.Next()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
if err != io.EOF {
|
||||||
|
l.Fail(err)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if isValidGrammar(header.Name, header.FileInfo()) {
|
||||||
|
data, err := ioutil.ReadAll(tarReader)
|
||||||
|
if err != nil {
|
||||||
|
l.Fail(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ext := filepath.Ext(header.Name)
|
||||||
|
rule, unknown, err := ConvertProto(ext, data)
|
||||||
|
if err != nil {
|
||||||
|
l.Fail(&ConversionError{header.Name, err})
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := l.Files[rule.ScopeName]; ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
l.AddFile(header.Name, rule, unknown)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *urlLoader) load() {
|
||||||
|
res, err := http.Get(l.Source)
|
||||||
|
if err != nil {
|
||||||
|
l.Fail(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer res.Body.Close()
|
||||||
|
|
||||||
|
if strings.HasSuffix(l.Source, ".tar.gz") {
|
||||||
|
l.loadTarball(res.Body)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
data, err := ioutil.ReadAll(res.Body)
|
||||||
|
if err != nil {
|
||||||
|
l.Fail(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ext := filepath.Ext(l.Source)
|
||||||
|
filename := filepath.Base(l.Source)
|
||||||
|
rule, unknown, err := ConvertProto(ext, data)
|
||||||
|
if err != nil {
|
||||||
|
l.Fail(&ConversionError{filename, err})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
l.AddFile(filename, rule, unknown)
|
||||||
|
}
|
||||||
|
|
||||||
|
func LoadFromURL(src string) *Repository {
|
||||||
|
loader := urlLoader{newRepository(src)}
|
||||||
|
loader.load()
|
||||||
|
return loader.Repository
|
||||||
|
}
|
||||||
68
tools/grammars/compiler/pcre.go
Normal file
68
tools/grammars/compiler/pcre.go
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
package compiler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/github/linguist/tools/grammars/pcre"
|
||||||
|
)
|
||||||
|
|
||||||
|
type replacement struct {
|
||||||
|
pos int
|
||||||
|
len int
|
||||||
|
val string
|
||||||
|
}
|
||||||
|
|
||||||
|
func fixRegex(re string) (string, bool) {
|
||||||
|
var (
|
||||||
|
replace []replacement
|
||||||
|
escape = false
|
||||||
|
hasBackRefs = false
|
||||||
|
)
|
||||||
|
|
||||||
|
for i, ch := range re {
|
||||||
|
if escape {
|
||||||
|
if ch == 'h' {
|
||||||
|
replace = append(replace, replacement{i - 1, 2, "[[:xdigit:]]"})
|
||||||
|
}
|
||||||
|
if '0' <= ch && ch <= '9' {
|
||||||
|
hasBackRefs = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
escape = !escape && ch == '\\'
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(replace) > 0 {
|
||||||
|
reb := []byte(re)
|
||||||
|
offset := 0
|
||||||
|
for _, repl := range replace {
|
||||||
|
reb = append(
|
||||||
|
reb[:offset+repl.pos],
|
||||||
|
append([]byte(repl.val), reb[offset+repl.pos+repl.len:]...)...)
|
||||||
|
offset += len(repl.val) - repl.len
|
||||||
|
}
|
||||||
|
return string(reb), hasBackRefs
|
||||||
|
}
|
||||||
|
|
||||||
|
return re, hasBackRefs
|
||||||
|
}
|
||||||
|
|
||||||
|
func CheckPCRE(re string) (string, error) {
|
||||||
|
hasBackRefs := false
|
||||||
|
|
||||||
|
if re == "" {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
if len(re) > 32*1024 {
|
||||||
|
return "", fmt.Errorf(
|
||||||
|
"regex %s: definition too long (%d bytes)",
|
||||||
|
pcre.RegexPP(re), len(re))
|
||||||
|
}
|
||||||
|
|
||||||
|
re, hasBackRefs = fixRegex(re)
|
||||||
|
if !hasBackRefs {
|
||||||
|
if err := pcre.CheckRegexp(re, pcre.DefaultFlags); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return re, nil
|
||||||
|
}
|
||||||
27
tools/grammars/compiler/pcre_test.go
Normal file
27
tools/grammars/compiler/pcre_test.go
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
package compiler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Test_fixRegex(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
re string
|
||||||
|
want string
|
||||||
|
}{
|
||||||
|
{"foobar", "foobar"},
|
||||||
|
{`testing\h`, "testing[[:xdigit:]]"},
|
||||||
|
{`\htest`, `[[:xdigit:]]test`},
|
||||||
|
{`abc\hdef`, `abc[[:xdigit:]]def`},
|
||||||
|
{`\\\htest`, `\\[[:xdigit:]]test`},
|
||||||
|
{`\\htest`, `\\htest`},
|
||||||
|
{`\h\h\h\h`, `[[:xdigit:]][[:xdigit:]][[:xdigit:]][[:xdigit:]]`},
|
||||||
|
{`abc\hdef\hghi\h`, `abc[[:xdigit:]]def[[:xdigit:]]ghi[[:xdigit:]]`},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
got, _ := fixRegex(tt.re)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("fixRegex() got = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
96
tools/grammars/compiler/proto.go
Normal file
96
tools/grammars/compiler/proto.go
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
package compiler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
grammar "github.com/github/linguist/tools/grammars/proto"
|
||||||
|
"github.com/groob/plist"
|
||||||
|
"github.com/mitchellh/mapstructure"
|
||||||
|
yaml "gopkg.in/yaml.v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
func looseDecoder(f reflect.Kind, t reflect.Kind, data interface{}) (interface{}, error) {
|
||||||
|
dataVal := reflect.ValueOf(data)
|
||||||
|
switch t {
|
||||||
|
case reflect.Bool:
|
||||||
|
switch f {
|
||||||
|
case reflect.Bool:
|
||||||
|
return dataVal.Bool(), nil
|
||||||
|
case reflect.Float32, reflect.Float64:
|
||||||
|
return (int(dataVal.Float()) != 0), nil
|
||||||
|
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||||
|
return (dataVal.Int() != 0), nil
|
||||||
|
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||||
|
return (dataVal.Uint() != 0), nil
|
||||||
|
case reflect.String:
|
||||||
|
switch dataVal.String() {
|
||||||
|
case "1":
|
||||||
|
return true, nil
|
||||||
|
case "0":
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func filterUnusedKeys(keys []string) (out []string) {
|
||||||
|
for _, k := range keys {
|
||||||
|
parts := strings.Split(k, ".")
|
||||||
|
field := parts[len(parts)-1]
|
||||||
|
if !KnownFields[field] {
|
||||||
|
out = append(out, k)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func ConvertProto(ext string, data []byte) (*grammar.Rule, []string, error) {
|
||||||
|
var (
|
||||||
|
raw map[string]interface{}
|
||||||
|
out grammar.Rule
|
||||||
|
err error
|
||||||
|
md mapstructure.Metadata
|
||||||
|
)
|
||||||
|
|
||||||
|
switch strings.ToLower(ext) {
|
||||||
|
case ".plist", ".tmlanguage":
|
||||||
|
err = plist.Unmarshal(data, &raw)
|
||||||
|
case ".yaml-tmlanguage":
|
||||||
|
err = yaml.Unmarshal(data, &raw)
|
||||||
|
case ".cson":
|
||||||
|
data, err = ConvertCSON(data)
|
||||||
|
if err == nil {
|
||||||
|
err = json.Unmarshal(data, &raw)
|
||||||
|
}
|
||||||
|
case ".json":
|
||||||
|
err = json.Unmarshal(data, &raw)
|
||||||
|
default:
|
||||||
|
err = fmt.Errorf("grammars: unsupported extension '%s'", ext)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
config := mapstructure.DecoderConfig{
|
||||||
|
Result: &out,
|
||||||
|
Metadata: &md,
|
||||||
|
DecodeHook: looseDecoder,
|
||||||
|
}
|
||||||
|
|
||||||
|
decoder, err := mapstructure.NewDecoder(&config)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := decoder.Decode(raw); err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &out, filterUnusedKeys(md.Unused), nil
|
||||||
|
}
|
||||||
79
tools/grammars/compiler/walker.go
Normal file
79
tools/grammars/compiler/walker.go
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
package compiler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
grammar "github.com/github/linguist/tools/grammars/proto"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (w *walker) checkInclude(rule *grammar.Rule) {
|
||||||
|
include := rule.Include
|
||||||
|
|
||||||
|
if include == "" || include[0] == '#' || include[0] == '$' {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if alias, ok := GrammarAliases[include]; ok {
|
||||||
|
rule.Include = alias
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
include = strings.Split(include, "#")[0]
|
||||||
|
ok := w.Known[include]
|
||||||
|
if !ok {
|
||||||
|
if !w.Missing[include] {
|
||||||
|
w.Missing[include] = true
|
||||||
|
w.Errors = append(w.Errors, &MissingIncludeError{w.File, include})
|
||||||
|
}
|
||||||
|
rule.Include = ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *walker) checkRegexps(rule *grammar.Rule) {
|
||||||
|
check := func(re string) string {
|
||||||
|
re2, err := CheckPCRE(re)
|
||||||
|
if err != nil {
|
||||||
|
w.Errors = append(w.Errors, &InvalidRegexError{w.File, err})
|
||||||
|
}
|
||||||
|
return re2
|
||||||
|
}
|
||||||
|
|
||||||
|
rule.Match = check(rule.Match)
|
||||||
|
rule.Begin = check(rule.Begin)
|
||||||
|
rule.While = check(rule.While)
|
||||||
|
rule.End = check(rule.End)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *walker) walk(rule *grammar.Rule) {
|
||||||
|
w.checkInclude(rule)
|
||||||
|
w.checkRegexps(rule)
|
||||||
|
|
||||||
|
for _, rule := range rule.Patterns {
|
||||||
|
w.walk(rule)
|
||||||
|
}
|
||||||
|
for _, rule := range rule.Captures {
|
||||||
|
w.walk(rule)
|
||||||
|
}
|
||||||
|
for _, rule := range rule.BeginCaptures {
|
||||||
|
w.walk(rule)
|
||||||
|
}
|
||||||
|
for _, rule := range rule.WhileCaptures {
|
||||||
|
w.walk(rule)
|
||||||
|
}
|
||||||
|
for _, rule := range rule.EndCaptures {
|
||||||
|
w.walk(rule)
|
||||||
|
}
|
||||||
|
for _, rule := range rule.Repository {
|
||||||
|
w.walk(rule)
|
||||||
|
}
|
||||||
|
for _, rule := range rule.Injections {
|
||||||
|
w.walk(rule)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type walker struct {
|
||||||
|
File *LoadedFile
|
||||||
|
Known map[string]bool
|
||||||
|
Missing map[string]bool
|
||||||
|
Errors []error
|
||||||
|
}
|
||||||
11
tools/grammars/docker/build
Executable file
11
tools/grammars/docker/build
Executable file
@@ -0,0 +1,11 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
set -ex
|
||||||
|
cd "$(dirname "$0")/.."
|
||||||
|
|
||||||
|
image=linguist/grammar-compiler
|
||||||
|
docker build -t $image .
|
||||||
|
|
||||||
|
if [ "$1" = "--push" ]; then
|
||||||
|
docker push $image
|
||||||
|
fi
|
||||||
53
tools/grammars/pcre/pcre.go
Normal file
53
tools/grammars/pcre/pcre.go
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
package pcre
|
||||||
|
|
||||||
|
/*
|
||||||
|
#cgo LDFLAGS: -lpcre
|
||||||
|
#include <pcre.h>
|
||||||
|
*/
|
||||||
|
import "C"
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
|
||||||
|
func RegexPP(re string) string {
|
||||||
|
if len(re) > 32 {
|
||||||
|
re = fmt.Sprintf("\"`%s`...\"", re[:32])
|
||||||
|
} else {
|
||||||
|
re = fmt.Sprintf("\"`%s`\"", re)
|
||||||
|
}
|
||||||
|
return strings.Replace(re, "\n", "", -1)
|
||||||
|
}
|
||||||
|
|
||||||
|
type CompileError struct {
|
||||||
|
Pattern string
|
||||||
|
Message string
|
||||||
|
Offset int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *CompileError) Error() string {
|
||||||
|
return fmt.Sprintf("regex %s: %s (at offset %d)",
|
||||||
|
RegexPP(e.Pattern), e.Message, e.Offset)
|
||||||
|
}
|
||||||
|
|
||||||
|
const DefaultFlags = int(C.PCRE_DUPNAMES | C.PCRE_UTF8 | C.PCRE_NEWLINE_ANYCRLF)
|
||||||
|
|
||||||
|
func CheckRegexp(pattern string, flags int) error {
|
||||||
|
pattern1 := C.CString(pattern)
|
||||||
|
defer C.free(unsafe.Pointer(pattern1))
|
||||||
|
|
||||||
|
var errptr *C.char
|
||||||
|
var erroffset C.int
|
||||||
|
ptr := C.pcre_compile(pattern1, C.int(flags), &errptr, &erroffset, nil)
|
||||||
|
if ptr == nil {
|
||||||
|
return &CompileError{
|
||||||
|
Pattern: pattern,
|
||||||
|
Message: C.GoString(errptr),
|
||||||
|
Offset: int(erroffset),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
C.free(unsafe.Pointer(ptr))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
239
tools/grammars/proto/grammar.pb.go
Normal file
239
tools/grammars/proto/grammar.pb.go
Normal file
@@ -0,0 +1,239 @@
|
|||||||
|
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||||
|
// source: proto/grammar.proto
|
||||||
|
|
||||||
|
/*
|
||||||
|
Package grammar is a generated protocol buffer package.
|
||||||
|
|
||||||
|
It is generated from these files:
|
||||||
|
proto/grammar.proto
|
||||||
|
|
||||||
|
It has these top-level messages:
|
||||||
|
Rule
|
||||||
|
Library
|
||||||
|
*/
|
||||||
|
package grammar
|
||||||
|
|
||||||
|
import proto "github.com/golang/protobuf/proto"
|
||||||
|
import fmt "fmt"
|
||||||
|
import math "math"
|
||||||
|
|
||||||
|
// Reference imports to suppress errors if they are not otherwise used.
|
||||||
|
var _ = proto.Marshal
|
||||||
|
var _ = fmt.Errorf
|
||||||
|
var _ = math.Inf
|
||||||
|
|
||||||
|
// This is a compile-time assertion to ensure that this generated file
|
||||||
|
// is compatible with the proto package it is being compiled against.
|
||||||
|
// A compilation error at this line likely means your copy of the
|
||||||
|
// proto package needs to be updated.
|
||||||
|
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||||
|
|
||||||
|
type Rule struct {
|
||||||
|
Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
|
||||||
|
ScopeName string `protobuf:"bytes,2,opt,name=scopeName" json:"scopeName,omitempty"`
|
||||||
|
ContentName string `protobuf:"bytes,3,opt,name=contentName" json:"contentName,omitempty"`
|
||||||
|
Match string `protobuf:"bytes,4,opt,name=match" json:"match,omitempty"`
|
||||||
|
Begin string `protobuf:"bytes,5,opt,name=begin" json:"begin,omitempty"`
|
||||||
|
While string `protobuf:"bytes,6,opt,name=while" json:"while,omitempty"`
|
||||||
|
End string `protobuf:"bytes,7,opt,name=end" json:"end,omitempty"`
|
||||||
|
Include string `protobuf:"bytes,8,opt,name=include" json:"include,omitempty"`
|
||||||
|
Patterns []*Rule `protobuf:"bytes,9,rep,name=patterns" json:"patterns,omitempty"`
|
||||||
|
Captures map[string]*Rule `protobuf:"bytes,10,rep,name=captures" json:"captures,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
|
||||||
|
BeginCaptures map[string]*Rule `protobuf:"bytes,11,rep,name=beginCaptures" json:"beginCaptures,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
|
||||||
|
WhileCaptures map[string]*Rule `protobuf:"bytes,12,rep,name=whileCaptures" json:"whileCaptures,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
|
||||||
|
EndCaptures map[string]*Rule `protobuf:"bytes,13,rep,name=endCaptures" json:"endCaptures,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
|
||||||
|
Repository map[string]*Rule `protobuf:"bytes,14,rep,name=repository" json:"repository,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
|
||||||
|
Injections map[string]*Rule `protobuf:"bytes,15,rep,name=injections" json:"injections,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
|
||||||
|
Disabled bool `protobuf:"varint,16,opt,name=disabled" json:"disabled,omitempty"`
|
||||||
|
ApplyEndPatternLast bool `protobuf:"varint,17,opt,name=applyEndPatternLast" json:"applyEndPatternLast,omitempty"`
|
||||||
|
IncludeResetBase bool `protobuf:"varint,18,opt,name=includeResetBase" json:"includeResetBase,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Rule) Reset() { *m = Rule{} }
|
||||||
|
func (m *Rule) String() string { return proto.CompactTextString(m) }
|
||||||
|
func (*Rule) ProtoMessage() {}
|
||||||
|
func (*Rule) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
|
||||||
|
|
||||||
|
func (m *Rule) GetName() string {
|
||||||
|
if m != nil {
|
||||||
|
return m.Name
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Rule) GetScopeName() string {
|
||||||
|
if m != nil {
|
||||||
|
return m.ScopeName
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Rule) GetContentName() string {
|
||||||
|
if m != nil {
|
||||||
|
return m.ContentName
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Rule) GetMatch() string {
|
||||||
|
if m != nil {
|
||||||
|
return m.Match
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Rule) GetBegin() string {
|
||||||
|
if m != nil {
|
||||||
|
return m.Begin
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Rule) GetWhile() string {
|
||||||
|
if m != nil {
|
||||||
|
return m.While
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Rule) GetEnd() string {
|
||||||
|
if m != nil {
|
||||||
|
return m.End
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Rule) GetInclude() string {
|
||||||
|
if m != nil {
|
||||||
|
return m.Include
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Rule) GetPatterns() []*Rule {
|
||||||
|
if m != nil {
|
||||||
|
return m.Patterns
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Rule) GetCaptures() map[string]*Rule {
|
||||||
|
if m != nil {
|
||||||
|
return m.Captures
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Rule) GetBeginCaptures() map[string]*Rule {
|
||||||
|
if m != nil {
|
||||||
|
return m.BeginCaptures
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Rule) GetWhileCaptures() map[string]*Rule {
|
||||||
|
if m != nil {
|
||||||
|
return m.WhileCaptures
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Rule) GetEndCaptures() map[string]*Rule {
|
||||||
|
if m != nil {
|
||||||
|
return m.EndCaptures
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Rule) GetRepository() map[string]*Rule {
|
||||||
|
if m != nil {
|
||||||
|
return m.Repository
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Rule) GetInjections() map[string]*Rule {
|
||||||
|
if m != nil {
|
||||||
|
return m.Injections
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Rule) GetDisabled() bool {
|
||||||
|
if m != nil {
|
||||||
|
return m.Disabled
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Rule) GetApplyEndPatternLast() bool {
|
||||||
|
if m != nil {
|
||||||
|
return m.ApplyEndPatternLast
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Rule) GetIncludeResetBase() bool {
|
||||||
|
if m != nil {
|
||||||
|
return m.IncludeResetBase
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
type Library struct {
|
||||||
|
Grammars map[string]*Rule `protobuf:"bytes,1,rep,name=grammars" json:"grammars,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Library) Reset() { *m = Library{} }
|
||||||
|
func (m *Library) String() string { return proto.CompactTextString(m) }
|
||||||
|
func (*Library) ProtoMessage() {}
|
||||||
|
func (*Library) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} }
|
||||||
|
|
||||||
|
func (m *Library) GetGrammars() map[string]*Rule {
|
||||||
|
if m != nil {
|
||||||
|
return m.Grammars
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
proto.RegisterType((*Rule)(nil), "grammar.Rule")
|
||||||
|
proto.RegisterType((*Library)(nil), "grammar.Library")
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() { proto.RegisterFile("proto/grammar.proto", fileDescriptor0) }
|
||||||
|
|
||||||
|
var fileDescriptor0 = []byte{
|
||||||
|
// 486 bytes of a gzipped FileDescriptorProto
|
||||||
|
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x54, 0xcb, 0x8e, 0xd3, 0x30,
|
||||||
|
0x14, 0x55, 0x66, 0xda, 0x69, 0x7a, 0x4b, 0x99, 0x72, 0x87, 0x85, 0x55, 0x1e, 0x8a, 0x86, 0x4d,
|
||||||
|
0x61, 0x51, 0x10, 0x2c, 0x40, 0x23, 0x21, 0xa1, 0x41, 0x05, 0x81, 0xca, 0x43, 0xd9, 0xb0, 0x76,
|
||||||
|
0x13, 0x6b, 0x26, 0x90, 0x3a, 0x91, 0xed, 0x82, 0xf2, 0x19, 0x7c, 0x19, 0xbf, 0x84, 0x7c, 0xed,
|
||||||
|
0xa6, 0x49, 0xdb, 0x5d, 0x76, 0xbe, 0xe7, 0x25, 0x3b, 0x3e, 0x0e, 0x5c, 0x94, 0xaa, 0x30, 0xc5,
|
||||||
|
0xf3, 0x1b, 0xc5, 0xd7, 0x6b, 0xae, 0xe6, 0x34, 0xe1, 0xc0, 0x8f, 0x97, 0xff, 0x86, 0xd0, 0x8b,
|
||||||
|
0x37, 0xb9, 0x40, 0x84, 0x9e, 0xe4, 0x6b, 0xc1, 0x82, 0x28, 0x98, 0x0d, 0x63, 0x5a, 0xe3, 0x43,
|
||||||
|
0x18, 0xea, 0xa4, 0x28, 0xc5, 0x57, 0x4b, 0x9c, 0x10, 0xb1, 0x03, 0x30, 0x82, 0x51, 0x52, 0x48,
|
||||||
|
0x23, 0xa4, 0x21, 0xfe, 0x94, 0xf8, 0x26, 0x84, 0xf7, 0xa1, 0xbf, 0xe6, 0x26, 0xb9, 0x65, 0x3d,
|
||||||
|
0xe2, 0xdc, 0x60, 0xd1, 0x95, 0xb8, 0xc9, 0x24, 0xeb, 0x3b, 0x94, 0x06, 0x8b, 0xfe, 0xb9, 0xcd,
|
||||||
|
0x72, 0xc1, 0xce, 0x1c, 0x4a, 0x03, 0x4e, 0xe0, 0x54, 0xc8, 0x94, 0x0d, 0x08, 0xb3, 0x4b, 0x64,
|
||||||
|
0x30, 0xc8, 0x64, 0x92, 0x6f, 0x52, 0xc1, 0x42, 0x42, 0xb7, 0x23, 0x3e, 0x85, 0xb0, 0xe4, 0xc6,
|
||||||
|
0x08, 0x25, 0x35, 0x1b, 0x46, 0xa7, 0xb3, 0xd1, 0xcb, 0xf1, 0x7c, 0x7b, 0x6a, 0x7b, 0xc4, 0xb8,
|
||||||
|
0xa6, 0xf1, 0x35, 0x84, 0x09, 0x2f, 0xcd, 0x46, 0x09, 0xcd, 0x80, 0xa4, 0x0f, 0x5a, 0xd2, 0xf9,
|
||||||
|
0x7b, 0xcf, 0x2e, 0xa4, 0x51, 0x55, 0x5c, 0x8b, 0xf1, 0x03, 0x8c, 0x69, 0xbb, 0x5b, 0x9e, 0x8d,
|
||||||
|
0xc8, 0x1d, 0xb5, 0xdd, 0xd7, 0x4d, 0x89, 0x8b, 0x68, 0xdb, 0x6c, 0x0e, 0x1d, 0xb0, 0xce, 0xb9,
|
||||||
|
0x73, 0x2c, 0xe7, 0x47, 0x53, 0xe2, 0x73, 0x5a, 0x36, 0x7c, 0x07, 0x23, 0x21, 0xd3, 0x3a, 0x65,
|
||||||
|
0x4c, 0x29, 0x8f, 0xdb, 0x29, 0x8b, 0x9d, 0xc0, 0x65, 0x34, 0x2d, 0xf8, 0x16, 0x40, 0x89, 0xb2,
|
||||||
|
0xd0, 0x99, 0x29, 0x54, 0xc5, 0xee, 0x52, 0xc0, 0xa3, 0x76, 0x40, 0x5c, 0xf3, 0xce, 0xdf, 0x30,
|
||||||
|
0x58, 0x7b, 0x26, 0x7f, 0x8a, 0xc4, 0x64, 0x85, 0xd4, 0xec, 0xfc, 0x98, 0xfd, 0x53, 0xcd, 0x7b,
|
||||||
|
0xfb, 0xce, 0x80, 0x53, 0x08, 0xd3, 0x4c, 0xf3, 0x55, 0x2e, 0x52, 0x36, 0x89, 0x82, 0x59, 0x18,
|
||||||
|
0xd7, 0x33, 0xbe, 0x80, 0x0b, 0x5e, 0x96, 0x79, 0xb5, 0x90, 0xe9, 0x77, 0x77, 0x71, 0x4b, 0xae,
|
||||||
|
0x0d, 0xbb, 0x47, 0xb2, 0x63, 0x14, 0x3e, 0x83, 0x89, 0x2f, 0x43, 0x2c, 0xb4, 0x30, 0xd7, 0x5c,
|
||||||
|
0x0b, 0x86, 0x24, 0x3f, 0xc0, 0xa7, 0x9f, 0x61, 0xdc, 0xfa, 0x2a, 0xb6, 0x6a, 0xbf, 0x44, 0xe5,
|
||||||
|
0xfb, 0x6f, 0x97, 0xf8, 0x04, 0xfa, 0xbf, 0x79, 0xbe, 0x71, 0xd5, 0x3f, 0x68, 0x93, 0xe3, 0xae,
|
||||||
|
0x4e, 0xde, 0x04, 0xd3, 0x6f, 0x80, 0x87, 0x57, 0xde, 0x31, 0xf0, 0xf0, 0xee, 0xbb, 0x04, 0x7e,
|
||||||
|
0x81, 0xc9, 0x7e, 0x0d, 0xba, 0xc4, 0x2d, 0xe1, 0x7c, 0xaf, 0x14, 0x1d, 0xd3, 0xf6, 0x3a, 0xd2,
|
||||||
|
0x21, 0xed, 0xf2, 0x6f, 0x00, 0x83, 0x65, 0xb6, 0x52, 0x5c, 0x55, 0x78, 0x05, 0xa1, 0x97, 0x69,
|
||||||
|
0x16, 0xec, 0xbd, 0x0d, 0xaf, 0x99, 0x7f, 0xf4, 0x02, 0xff, 0xd4, 0xb7, 0x7a, 0x5b, 0x90, 0x16,
|
||||||
|
0xd5, 0x61, 0x4f, 0xab, 0x33, 0xfa, 0xeb, 0xbe, 0xfa, 0x1f, 0x00, 0x00, 0xff, 0xff, 0x2b, 0x2e,
|
||||||
|
0xec, 0x55, 0x8c, 0x05, 0x00, 0x00,
|
||||||
|
}
|
||||||
8
vendor/README.md
vendored
8
vendor/README.md
vendored
@@ -70,6 +70,7 @@ This is a list of grammars that Linguist selects to provide syntax highlighting
|
|||||||
- **ColdFusion CFC:** [SublimeText/ColdFusion](https://github.com/SublimeText/ColdFusion)
|
- **ColdFusion CFC:** [SublimeText/ColdFusion](https://github.com/SublimeText/ColdFusion)
|
||||||
- **COLLADA:** [textmate/xml.tmbundle](https://github.com/textmate/xml.tmbundle)
|
- **COLLADA:** [textmate/xml.tmbundle](https://github.com/textmate/xml.tmbundle)
|
||||||
- **Common Lisp:** [textmate/lisp.tmbundle](https://github.com/textmate/lisp.tmbundle)
|
- **Common Lisp:** [textmate/lisp.tmbundle](https://github.com/textmate/lisp.tmbundle)
|
||||||
|
- **Common Workflow Language:** [manabuishii/language-cwl](https://github.com/manabuishii/language-cwl)
|
||||||
- **Component Pascal:** [textmate/pascal.tmbundle](https://github.com/textmate/pascal.tmbundle)
|
- **Component Pascal:** [textmate/pascal.tmbundle](https://github.com/textmate/pascal.tmbundle)
|
||||||
- **Cool:** [anunayk/cool-tmbundle](https://github.com/anunayk/cool-tmbundle)
|
- **Cool:** [anunayk/cool-tmbundle](https://github.com/anunayk/cool-tmbundle)
|
||||||
- **Coq:** [mkolosick/Sublime-Coq](https://github.com/mkolosick/Sublime-Coq)
|
- **Coq:** [mkolosick/Sublime-Coq](https://github.com/mkolosick/Sublime-Coq)
|
||||||
@@ -168,7 +169,7 @@ This is a list of grammars that Linguist selects to provide syntax highlighting
|
|||||||
- **Ioke:** [vic/ioke-outdated](https://github.com/vic/ioke-outdated)
|
- **Ioke:** [vic/ioke-outdated](https://github.com/vic/ioke-outdated)
|
||||||
- **Isabelle:** [lsf37/Isabelle.tmbundle](https://github.com/lsf37/Isabelle.tmbundle)
|
- **Isabelle:** [lsf37/Isabelle.tmbundle](https://github.com/lsf37/Isabelle.tmbundle)
|
||||||
- **Isabelle ROOT:** [lsf37/Isabelle.tmbundle](https://github.com/lsf37/Isabelle.tmbundle)
|
- **Isabelle ROOT:** [lsf37/Isabelle.tmbundle](https://github.com/lsf37/Isabelle.tmbundle)
|
||||||
- **J:** [bcj/JSyntax](https://github.com/bcj/JSyntax)
|
- **J:** [tikkanz/JSyntax](https://github.com/tikkanz/JSyntax)
|
||||||
- **Jasmin:** [atmarksharp/jasmin-sublime](https://github.com/atmarksharp/jasmin-sublime)
|
- **Jasmin:** [atmarksharp/jasmin-sublime](https://github.com/atmarksharp/jasmin-sublime)
|
||||||
- **Java:** [textmate/java.tmbundle](https://github.com/textmate/java.tmbundle)
|
- **Java:** [textmate/java.tmbundle](https://github.com/textmate/java.tmbundle)
|
||||||
- **Java Server Pages:** [textmate/java.tmbundle](https://github.com/textmate/java.tmbundle)
|
- **Java Server Pages:** [textmate/java.tmbundle](https://github.com/textmate/java.tmbundle)
|
||||||
@@ -182,7 +183,7 @@ This is a list of grammars that Linguist selects to provide syntax highlighting
|
|||||||
- **JSONiq:** [wcandillon/language-jsoniq](https://github.com/wcandillon/language-jsoniq)
|
- **JSONiq:** [wcandillon/language-jsoniq](https://github.com/wcandillon/language-jsoniq)
|
||||||
- **JSONLD:** [atom/language-javascript](https://github.com/atom/language-javascript)
|
- **JSONLD:** [atom/language-javascript](https://github.com/atom/language-javascript)
|
||||||
- **JSX:** [github-linguist/language-babel](https://github.com/github-linguist/language-babel)
|
- **JSX:** [github-linguist/language-babel](https://github.com/github-linguist/language-babel)
|
||||||
- **Julia:** [JuliaEditorSupport/Julia.tmbundle](https://github.com/JuliaEditorSupport/Julia.tmbundle)
|
- **Julia:** [JuliaEditorSupport/atom-language-julia](https://github.com/JuliaEditorSupport/atom-language-julia)
|
||||||
- **Jupyter Notebook:** [textmate/json.tmbundle](https://github.com/textmate/json.tmbundle)
|
- **Jupyter Notebook:** [textmate/json.tmbundle](https://github.com/textmate/json.tmbundle)
|
||||||
- **KiCad Layout:** [Alhadis/language-pcb](https://github.com/Alhadis/language-pcb)
|
- **KiCad Layout:** [Alhadis/language-pcb](https://github.com/Alhadis/language-pcb)
|
||||||
- **KiCad Legacy Layout:** [Alhadis/language-pcb](https://github.com/Alhadis/language-pcb)
|
- **KiCad Legacy Layout:** [Alhadis/language-pcb](https://github.com/Alhadis/language-pcb)
|
||||||
@@ -277,6 +278,7 @@ This is a list of grammars that Linguist selects to provide syntax highlighting
|
|||||||
- **PLpgSQL:** [textmate/sql.tmbundle](https://github.com/textmate/sql.tmbundle)
|
- **PLpgSQL:** [textmate/sql.tmbundle](https://github.com/textmate/sql.tmbundle)
|
||||||
- **PogoScript:** [featurist/PogoScript.tmbundle](https://github.com/featurist/PogoScript.tmbundle)
|
- **PogoScript:** [featurist/PogoScript.tmbundle](https://github.com/featurist/PogoScript.tmbundle)
|
||||||
- **Pony:** [CausalityLtd/sublime-pony](https://github.com/CausalityLtd/sublime-pony)
|
- **Pony:** [CausalityLtd/sublime-pony](https://github.com/CausalityLtd/sublime-pony)
|
||||||
|
- **PostCSS:** [hudochenkov/Syntax-highlighting-for-PostCSS](https://github.com/hudochenkov/Syntax-highlighting-for-PostCSS)
|
||||||
- **PostScript:** [textmate/postscript.tmbundle](https://github.com/textmate/postscript.tmbundle)
|
- **PostScript:** [textmate/postscript.tmbundle](https://github.com/textmate/postscript.tmbundle)
|
||||||
- **POV-Ray SDL:** [c-lipka/language-povray](https://github.com/c-lipka/language-povray)
|
- **POV-Ray SDL:** [c-lipka/language-povray](https://github.com/c-lipka/language-povray)
|
||||||
- **PowerShell:** [SublimeText/PowerShell](https://github.com/SublimeText/PowerShell)
|
- **PowerShell:** [SublimeText/PowerShell](https://github.com/SublimeText/PowerShell)
|
||||||
@@ -349,6 +351,7 @@ This is a list of grammars that Linguist selects to provide syntax highlighting
|
|||||||
- **Stylus:** [billymoon/Stylus](https://github.com/billymoon/Stylus)
|
- **Stylus:** [billymoon/Stylus](https://github.com/billymoon/Stylus)
|
||||||
- **Sublime Text Config:** [atom/language-javascript](https://github.com/atom/language-javascript)
|
- **Sublime Text Config:** [atom/language-javascript](https://github.com/atom/language-javascript)
|
||||||
- **SubRip Text:** [314eter/atom-language-srt](https://github.com/314eter/atom-language-srt)
|
- **SubRip Text:** [314eter/atom-language-srt](https://github.com/314eter/atom-language-srt)
|
||||||
|
- **SugarSS:** [hudochenkov/Syntax-highlighting-for-PostCSS](https://github.com/hudochenkov/Syntax-highlighting-for-PostCSS)
|
||||||
- **SuperCollider:** [supercollider/language-supercollider](https://github.com/supercollider/language-supercollider)
|
- **SuperCollider:** [supercollider/language-supercollider](https://github.com/supercollider/language-supercollider)
|
||||||
- **SVG:** [textmate/xml.tmbundle](https://github.com/textmate/xml.tmbundle)
|
- **SVG:** [textmate/xml.tmbundle](https://github.com/textmate/xml.tmbundle)
|
||||||
- **Swift:** [textmate/swift.tmbundle](https://github.com/textmate/swift.tmbundle)
|
- **Swift:** [textmate/swift.tmbundle](https://github.com/textmate/swift.tmbundle)
|
||||||
@@ -405,4 +408,5 @@ This is a list of grammars that Linguist selects to provide syntax highlighting
|
|||||||
- **Yacc:** [textmate/bison.tmbundle](https://github.com/textmate/bison.tmbundle)
|
- **Yacc:** [textmate/bison.tmbundle](https://github.com/textmate/bison.tmbundle)
|
||||||
- **YAML:** [atom/language-yaml](https://github.com/atom/language-yaml)
|
- **YAML:** [atom/language-yaml](https://github.com/atom/language-yaml)
|
||||||
- **YANG:** [DzonyKalafut/language-yang](https://github.com/DzonyKalafut/language-yang)
|
- **YANG:** [DzonyKalafut/language-yang](https://github.com/DzonyKalafut/language-yang)
|
||||||
|
- **YARA:** [blacktop/language-yara](https://github.com/blacktop/language-yara)
|
||||||
- **Zephir:** [phalcon/zephir-sublime](https://github.com/phalcon/zephir-sublime)
|
- **Zephir:** [phalcon/zephir-sublime](https://github.com/phalcon/zephir-sublime)
|
||||||
|
|||||||
2
vendor/grammars/JSyntax
vendored
2
vendor/grammars/JSyntax
vendored
Submodule vendor/grammars/JSyntax updated: 74971149b5...4647952123
1
vendor/grammars/Julia.tmbundle
vendored
1
vendor/grammars/Julia.tmbundle
vendored
Submodule vendor/grammars/Julia.tmbundle deleted from 443f9e8689
2
vendor/grammars/Stylus
vendored
2
vendor/grammars/Stylus
vendored
Submodule vendor/grammars/Stylus updated: 61bab33f37...4b382d28fb
1
vendor/grammars/Syntax-highlighting-for-PostCSS
vendored
Submodule
1
vendor/grammars/Syntax-highlighting-for-PostCSS
vendored
Submodule
Submodule vendor/grammars/Syntax-highlighting-for-PostCSS added at 575b918985
1
vendor/grammars/atom-language-julia
vendored
Submodule
1
vendor/grammars/atom-language-julia
vendored
Submodule
Submodule vendor/grammars/atom-language-julia added at 4e8896ed0b
2
vendor/grammars/atom-language-perl6
vendored
2
vendor/grammars/atom-language-perl6
vendored
Submodule vendor/grammars/atom-language-perl6 updated: 611c924d0f...382720261a
1
vendor/grammars/javadoc.tmbundle
vendored
Submodule
1
vendor/grammars/javadoc.tmbundle
vendored
Submodule
Submodule vendor/grammars/javadoc.tmbundle added at 5276d7a93f
2
vendor/grammars/language-batchfile
vendored
2
vendor/grammars/language-batchfile
vendored
Submodule vendor/grammars/language-batchfile updated: 6d2b18ef05...163e5ffc8d
1
vendor/grammars/language-cwl
vendored
Submodule
1
vendor/grammars/language-cwl
vendored
Submodule
Submodule vendor/grammars/language-cwl added at 204ab237d3
2
vendor/grammars/language-roff
vendored
2
vendor/grammars/language-roff
vendored
Submodule vendor/grammars/language-roff updated: d456f404b5...0b77518f17
1
vendor/grammars/language-yara
vendored
Submodule
1
vendor/grammars/language-yara
vendored
Submodule
Submodule vendor/grammars/language-yara added at f08eec461a
2
vendor/grammars/sublime-aspectj
vendored
2
vendor/grammars/sublime-aspectj
vendored
Submodule vendor/grammars/sublime-aspectj updated: 043444fc3f...c1928d2335
25
vendor/licenses/grammar/Syntax-highlighting-for-PostCSS.txt
vendored
Normal file
25
vendor/licenses/grammar/Syntax-highlighting-for-PostCSS.txt
vendored
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
---
|
||||||
|
type: grammar
|
||||||
|
name: Syntax-highlighting-for-PostCSS
|
||||||
|
license: mit
|
||||||
|
---
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright 2017 Aleks Hudochenkov <aleks@hudochenkov.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||||
|
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||||
|
subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||||
|
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||||
|
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||||
|
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||||
|
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
27
vendor/licenses/grammar/atom-language-julia.txt
vendored
Normal file
27
vendor/licenses/grammar/atom-language-julia.txt
vendored
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
---
|
||||||
|
type: grammar
|
||||||
|
name: atom-language-julia
|
||||||
|
license: mit
|
||||||
|
---
|
||||||
|
The atom-language-julia package is licensed under the MIT "Expat" License:
|
||||||
|
|
||||||
|
> Copyright (c) 2015
|
||||||
|
>
|
||||||
|
> Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
> a copy of this software and associated documentation files (the
|
||||||
|
> "Software"), to deal in the Software without restriction, including
|
||||||
|
> without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
> distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
> permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
> the following conditions:
|
||||||
|
>
|
||||||
|
> The above copyright notice and this permission notice shall be
|
||||||
|
> included in all copies or substantial portions of the Software.
|
||||||
|
>
|
||||||
|
> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
> EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
> MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||||
|
> IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||||
|
> CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||||
|
> TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||||
|
> SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
@@ -1,10 +1,9 @@
|
|||||||
---
|
---
|
||||||
type: grammar
|
type: grammar
|
||||||
name: ruby.tmbundle
|
name: javadoc.tmbundle
|
||||||
license: permissive
|
license: permissive
|
||||||
curated: true
|
curated: true
|
||||||
---
|
---
|
||||||
|
|
||||||
If not otherwise specified (see below), files in this repository fall under the following license:
|
If not otherwise specified (see below), files in this repository fall under the following license:
|
||||||
|
|
||||||
Permission to copy, use, modify, sell and distribute this
|
Permission to copy, use, modify, sell and distribute this
|
||||||
@@ -1,19 +1,18 @@
|
|||||||
---
|
---
|
||||||
type: grammar
|
type: grammar
|
||||||
name: Julia.tmbundle
|
name: language-cwl
|
||||||
license: mit
|
license: mit
|
||||||
---
|
---
|
||||||
Copyright (c) 2012-2014 Stefan Karpinski, Elliot Saba, Dirk Gadsden,
|
MIT License
|
||||||
Adam Strzelecki, Jonathan Malmaud and other contributors:
|
|
||||||
|
|
||||||
https://github.com/JuliaEditorSupport/Julia.tmbundle/contributors
|
Copyright (c) 2017 Manabu Ishii RIKEN Bioinformatics Research Unit
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
this software and associated documentation files (the "Software"), to deal in
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
the Software without restriction, including without limitation the rights to
|
in the Software without restriction, including without limitation the rights
|
||||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
of the Software, and to permit persons to whom the Software is furnished to do
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
so, subject to the following conditions:
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
The above copyright notice and this permission notice shall be included in all
|
||||||
copies or substantial portions of the Software.
|
copies or substantial portions of the Software.
|
||||||
26
vendor/licenses/grammar/language-yara.txt
vendored
Normal file
26
vendor/licenses/grammar/language-yara.txt
vendored
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
---
|
||||||
|
type: grammar
|
||||||
|
name: language-yara
|
||||||
|
license: mit
|
||||||
|
---
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2014-2016 Blacktop
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
Reference in New Issue
Block a user