mirror of
https://github.com/KevinMidboe/linguist.git
synced 2025-10-29 17:50:22 +00:00
Compare commits
39 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1f5ed3b3fe | ||
|
|
297be948d1 | ||
|
|
b4492e7205 | ||
|
|
c05bc99004 | ||
|
|
99eaf5faf9 | ||
|
|
21babbceb1 | ||
|
|
15885701cd | ||
|
|
9b942086f7 | ||
|
|
93cd47822f | ||
|
|
ea3e79a631 | ||
|
|
0af9a35ff1 | ||
|
|
44048c9ba8 | ||
|
|
e51b5ec9b7 | ||
|
|
a47008ea00 | ||
|
|
a0b38e8207 | ||
|
|
10dfe9f296 | ||
|
|
0b9c05f989 | ||
|
|
95dca67e2b | ||
|
|
e98728595b | ||
|
|
4cd558c374 | ||
|
|
adf6206ef5 | ||
|
|
c2d558b71d | ||
|
|
78c58f956e | ||
|
|
fc1404985a | ||
|
|
5d48ccd757 | ||
|
|
3530a18e46 | ||
|
|
ae8f4f9228 | ||
|
|
7c34d38786 | ||
|
|
38bc5fd336 | ||
|
|
6b06e47c67 | ||
|
|
061712ff78 | ||
|
|
7707585d5e | ||
|
|
fa7d433886 | ||
|
|
998e24cf36 | ||
|
|
63ff51e2ed | ||
|
|
b541b53b78 | ||
|
|
a878620a8e | ||
|
|
5633fd3668 | ||
|
|
9d0af0da40 |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -8,3 +8,6 @@ lib/linguist/samples.json
|
||||
/node_modules
|
||||
test/fixtures/ace_modes.json
|
||||
/vendor/gems/
|
||||
/tmp
|
||||
*.bundle
|
||||
*.so
|
||||
|
||||
31
.gitmodules
vendored
31
.gitmodules
vendored
@@ -400,10 +400,6 @@
|
||||
[submodule "vendor/grammars/sublime_cobol"]
|
||||
path = vendor/grammars/sublime_cobol
|
||||
url = https://bitbucket.org/bitlang/sublime_cobol
|
||||
[submodule "vendor/grammars/ruby.tmbundle"]
|
||||
path = vendor/grammars/ruby.tmbundle
|
||||
url = https://github.com/aroben/ruby.tmbundle
|
||||
branch = pl
|
||||
[submodule "vendor/grammars/IDL-Syntax"]
|
||||
path = vendor/grammars/IDL-Syntax
|
||||
url = https://github.com/andik/IDL-Syntax
|
||||
@@ -569,9 +565,6 @@
|
||||
[submodule "vendor/grammars/sublime-aspectj"]
|
||||
path = vendor/grammars/sublime-aspectj
|
||||
url = https://github.com/pchaigno/sublime-aspectj
|
||||
[submodule "vendor/grammars/sublime-typescript"]
|
||||
path = vendor/grammars/sublime-typescript
|
||||
url = https://github.com/Microsoft/TypeScript-Sublime-Plugin
|
||||
[submodule "vendor/grammars/sublime-pony"]
|
||||
path = vendor/grammars/sublime-pony
|
||||
url = https://github.com/CausalityLtd/sublime-pony
|
||||
@@ -866,3 +859,27 @@
|
||||
[submodule "vendor/grammars/language-reason"]
|
||||
path = vendor/grammars/language-reason
|
||||
url = https://github.com/reasonml-editor/language-reason
|
||||
[submodule "vendor/grammars/sublime-nearley"]
|
||||
path = vendor/grammars/sublime-nearley
|
||||
url = https://github.com/Hardmath123/sublime-nearley
|
||||
[submodule "vendor/grammars/data-weave-tmLanguage"]
|
||||
path = vendor/grammars/data-weave-tmLanguage
|
||||
url = https://github.com/mulesoft-labs/data-weave-tmLanguage
|
||||
[submodule "vendor/grammars/squirrel-language"]
|
||||
path = vendor/grammars/squirrel-language
|
||||
url = https://github.com/mathewmariani/squirrel-language
|
||||
[submodule "vendor/grammars/language-ballerina"]
|
||||
path = vendor/grammars/language-ballerina
|
||||
url = https://github.com/ballerinalang/plugin-vscode
|
||||
[submodule "vendor/grammars/language-ruby"]
|
||||
path = vendor/grammars/language-ruby
|
||||
url = https://github.com/atom/language-ruby
|
||||
[submodule "vendor/grammars/sublime-angelscript"]
|
||||
path = vendor/grammars/sublime-angelscript
|
||||
url = https://github.com/wronex/sublime-angelscript
|
||||
[submodule "vendor/grammars/TypeScript-TmLanguage"]
|
||||
path = vendor/grammars/TypeScript-TmLanguage
|
||||
url = https://github.com/Microsoft/TypeScript-TmLanguage
|
||||
[submodule "vendor/grammars/wdl-sublime-syntax-highlighter"]
|
||||
path = vendor/grammars/wdl-sublime-syntax-highlighter
|
||||
url = https://github.com/broadinstitute/wdl-sublime-syntax-highlighter
|
||||
|
||||
@@ -19,10 +19,6 @@ rvm:
|
||||
- 2.3.3
|
||||
- 2.4.0
|
||||
|
||||
matrix:
|
||||
allow_failures:
|
||||
- rvm: 2.4.0
|
||||
|
||||
notifications:
|
||||
disabled: true
|
||||
|
||||
@@ -32,3 +28,5 @@ git:
|
||||
|
||||
cache: bundler
|
||||
dist: precise
|
||||
|
||||
bundler_args: --without debug
|
||||
|
||||
@@ -17,7 +17,7 @@ To add support for a new extension:
|
||||
In addition, if this extension is already listed in [`languages.yml`][languages] then sometimes a few more steps will need to be taken:
|
||||
|
||||
1. Make sure that example `.yourextension` files are present in the [samples directory][samples] for each language that uses `.yourextension`.
|
||||
1. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.yourextension` files. (ping **@bkeepers** to help with this) to ensure we're not misclassifying files.
|
||||
1. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.yourextension` files. (ping **@lildude** to help with this) to ensure we're not misclassifying files.
|
||||
1. If the Bayesian classifier does a bad job with the sample `.yourextension` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ To add support for a new language:
|
||||
In addition, if your new language defines an extension that's already listed in [`languages.yml`][languages] (such as `.foo`) then sometimes a few more steps will need to be taken:
|
||||
|
||||
1. Make sure that example `.foo` files are present in the [samples directory][samples] for each language that uses `.foo`.
|
||||
1. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.foo` files. (ping **@bkeepers** to help with this) to ensure we're not misclassifying files.
|
||||
1. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.foo` files. (ping **@lildude** to help with this) to ensure we're not misclassifying files.
|
||||
1. If the Bayesian classifier does a bad job with the sample `.foo` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
|
||||
|
||||
Remember, the goal here is to try and avoid false positives!
|
||||
|
||||
5
Gemfile
5
Gemfile
@@ -1,3 +1,6 @@
|
||||
source 'https://rubygems.org'
|
||||
gemspec :name => "github-linguist"
|
||||
gem 'byebug' if RUBY_VERSION >= '2.0'
|
||||
|
||||
group :debug do
|
||||
gem 'byebug' if RUBY_VERSION >= '2.2'
|
||||
end
|
||||
|
||||
23
Rakefile
23
Rakefile
@@ -1,6 +1,7 @@
|
||||
require 'bundler/setup'
|
||||
require 'rake/clean'
|
||||
require 'rake/testtask'
|
||||
require 'rake/extensiontask'
|
||||
require 'yaml'
|
||||
require 'yajl'
|
||||
require 'open-uri'
|
||||
@@ -10,8 +11,14 @@ task :default => :test
|
||||
|
||||
Rake::TestTask.new
|
||||
|
||||
gem_spec = Gem::Specification.load('github-linguist.gemspec')
|
||||
|
||||
Rake::ExtensionTask.new('linguist', gem_spec) do |ext|
|
||||
ext.lib_dir = File.join('lib', 'linguist')
|
||||
end
|
||||
|
||||
# Extend test task to check for samples and fetch latest Ace modes
|
||||
task :test => [:check_samples, :fetch_ace_modes]
|
||||
task :test => [:compile, :check_samples, :fetch_ace_modes]
|
||||
|
||||
desc "Check that we have samples.json generated"
|
||||
task :check_samples do
|
||||
@@ -34,12 +41,24 @@ task :fetch_ace_modes do
|
||||
end
|
||||
end
|
||||
|
||||
task :samples do
|
||||
task :samples => :compile do
|
||||
require 'linguist/samples'
|
||||
json = Yajl.dump(Linguist::Samples.data, :pretty => true)
|
||||
File.write 'lib/linguist/samples.json', json
|
||||
end
|
||||
|
||||
FLEX_MIN_VER = [2, 5, 39]
|
||||
task :flex do
|
||||
if `flex -V` !~ /^flex (\d+)\.(\d+)\.(\d+)/
|
||||
fail "flex not detected"
|
||||
end
|
||||
maj, min, rev = $1.to_i, $2.to_i, $3.to_i
|
||||
if maj < FLEX_MIN_VER[0] || (maj == FLEX_MIN_VER[0] && (min < FLEX_MIN_VER[1] || (min == FLEX_MIN_VER[1] && rev < FLEX_MIN_VER[2])))
|
||||
fail "building linguist's lexer requires at least flex #{FLEX_MIN_VER.join(".")}"
|
||||
end
|
||||
system "cd ext/linguist && flex tokenizer.l"
|
||||
end
|
||||
|
||||
task :build_gem => :samples do
|
||||
rm_rf "grammars"
|
||||
sh "script/convert-grammars"
|
||||
|
||||
3
ext/linguist/extconf.rb
Normal file
3
ext/linguist/extconf.rb
Normal file
@@ -0,0 +1,3 @@
|
||||
require 'mkmf'
|
||||
dir_config('linguist')
|
||||
create_makefile('linguist/linguist')
|
||||
8269
ext/linguist/lex.linguist_yy.c
Normal file
8269
ext/linguist/lex.linguist_yy.c
Normal file
File diff suppressed because it is too large
Load Diff
353
ext/linguist/lex.linguist_yy.h
Normal file
353
ext/linguist/lex.linguist_yy.h
Normal file
@@ -0,0 +1,353 @@
|
||||
#ifndef linguist_yyHEADER_H
|
||||
#define linguist_yyHEADER_H 1
|
||||
#define linguist_yyIN_HEADER 1
|
||||
|
||||
#line 6 "lex.linguist_yy.h"
|
||||
|
||||
#define YY_INT_ALIGNED short int
|
||||
|
||||
/* A lexical scanner generated by flex */
|
||||
|
||||
#define FLEX_SCANNER
|
||||
#define YY_FLEX_MAJOR_VERSION 2
|
||||
#define YY_FLEX_MINOR_VERSION 5
|
||||
#define YY_FLEX_SUBMINOR_VERSION 39
|
||||
#if YY_FLEX_SUBMINOR_VERSION > 0
|
||||
#define FLEX_BETA
|
||||
#endif
|
||||
|
||||
/* First, we deal with platform-specific or compiler-specific issues. */
|
||||
|
||||
/* begin standard C headers. */
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include <errno.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
/* end standard C headers. */
|
||||
|
||||
/* flex integer type definitions */
|
||||
|
||||
#ifndef FLEXINT_H
|
||||
#define FLEXINT_H
|
||||
|
||||
/* C99 systems have <inttypes.h>. Non-C99 systems may or may not. */
|
||||
|
||||
#if defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
|
||||
|
||||
/* C99 says to define __STDC_LIMIT_MACROS before including stdint.h,
|
||||
* if you want the limit (max/min) macros for int types.
|
||||
*/
|
||||
#ifndef __STDC_LIMIT_MACROS
|
||||
#define __STDC_LIMIT_MACROS 1
|
||||
#endif
|
||||
|
||||
#include <inttypes.h>
|
||||
typedef int8_t flex_int8_t;
|
||||
typedef uint8_t flex_uint8_t;
|
||||
typedef int16_t flex_int16_t;
|
||||
typedef uint16_t flex_uint16_t;
|
||||
typedef int32_t flex_int32_t;
|
||||
typedef uint32_t flex_uint32_t;
|
||||
#else
|
||||
typedef signed char flex_int8_t;
|
||||
typedef short int flex_int16_t;
|
||||
typedef int flex_int32_t;
|
||||
typedef unsigned char flex_uint8_t;
|
||||
typedef unsigned short int flex_uint16_t;
|
||||
typedef unsigned int flex_uint32_t;
|
||||
|
||||
/* Limits of integral types. */
|
||||
#ifndef INT8_MIN
|
||||
#define INT8_MIN (-128)
|
||||
#endif
|
||||
#ifndef INT16_MIN
|
||||
#define INT16_MIN (-32767-1)
|
||||
#endif
|
||||
#ifndef INT32_MIN
|
||||
#define INT32_MIN (-2147483647-1)
|
||||
#endif
|
||||
#ifndef INT8_MAX
|
||||
#define INT8_MAX (127)
|
||||
#endif
|
||||
#ifndef INT16_MAX
|
||||
#define INT16_MAX (32767)
|
||||
#endif
|
||||
#ifndef INT32_MAX
|
||||
#define INT32_MAX (2147483647)
|
||||
#endif
|
||||
#ifndef UINT8_MAX
|
||||
#define UINT8_MAX (255U)
|
||||
#endif
|
||||
#ifndef UINT16_MAX
|
||||
#define UINT16_MAX (65535U)
|
||||
#endif
|
||||
#ifndef UINT32_MAX
|
||||
#define UINT32_MAX (4294967295U)
|
||||
#endif
|
||||
|
||||
#endif /* ! C99 */
|
||||
|
||||
#endif /* ! FLEXINT_H */
|
||||
|
||||
#ifdef __cplusplus
|
||||
|
||||
/* The "const" storage-class-modifier is valid. */
|
||||
#define YY_USE_CONST
|
||||
|
||||
#else /* ! __cplusplus */
|
||||
|
||||
/* C99 requires __STDC__ to be defined as 1. */
|
||||
#if defined (__STDC__)
|
||||
|
||||
#define YY_USE_CONST
|
||||
|
||||
#endif /* defined (__STDC__) */
|
||||
#endif /* ! __cplusplus */
|
||||
|
||||
#ifdef YY_USE_CONST
|
||||
#define yyconst const
|
||||
#else
|
||||
#define yyconst
|
||||
#endif
|
||||
|
||||
/* An opaque pointer. */
|
||||
#ifndef YY_TYPEDEF_YY_SCANNER_T
|
||||
#define YY_TYPEDEF_YY_SCANNER_T
|
||||
typedef void* yyscan_t;
|
||||
#endif
|
||||
|
||||
/* For convenience, these vars (plus the bison vars far below)
|
||||
are macros in the reentrant scanner. */
|
||||
#define yyin yyg->yyin_r
|
||||
#define yyout yyg->yyout_r
|
||||
#define yyextra yyg->yyextra_r
|
||||
#define yyleng yyg->yyleng_r
|
||||
#define yytext yyg->yytext_r
|
||||
#define yylineno (YY_CURRENT_BUFFER_LVALUE->yy_bs_lineno)
|
||||
#define yycolumn (YY_CURRENT_BUFFER_LVALUE->yy_bs_column)
|
||||
#define yy_flex_debug yyg->yy_flex_debug_r
|
||||
|
||||
/* Size of default input buffer. */
|
||||
#ifndef YY_BUF_SIZE
|
||||
#ifdef __ia64__
|
||||
/* On IA-64, the buffer size is 16k, not 8k.
|
||||
* Moreover, YY_BUF_SIZE is 2*YY_READ_BUF_SIZE in the general case.
|
||||
* Ditto for the __ia64__ case accordingly.
|
||||
*/
|
||||
#define YY_BUF_SIZE 32768
|
||||
#else
|
||||
#define YY_BUF_SIZE 16384
|
||||
#endif /* __ia64__ */
|
||||
#endif
|
||||
|
||||
#ifndef YY_TYPEDEF_YY_BUFFER_STATE
|
||||
#define YY_TYPEDEF_YY_BUFFER_STATE
|
||||
typedef struct yy_buffer_state *YY_BUFFER_STATE;
|
||||
#endif
|
||||
|
||||
#ifndef YY_TYPEDEF_YY_SIZE_T
|
||||
#define YY_TYPEDEF_YY_SIZE_T
|
||||
typedef size_t yy_size_t;
|
||||
#endif
|
||||
|
||||
#ifndef YY_STRUCT_YY_BUFFER_STATE
|
||||
#define YY_STRUCT_YY_BUFFER_STATE
|
||||
struct yy_buffer_state
|
||||
{
|
||||
FILE *yy_input_file;
|
||||
|
||||
char *yy_ch_buf; /* input buffer */
|
||||
char *yy_buf_pos; /* current position in input buffer */
|
||||
|
||||
/* Size of input buffer in bytes, not including room for EOB
|
||||
* characters.
|
||||
*/
|
||||
yy_size_t yy_buf_size;
|
||||
|
||||
/* Number of characters read into yy_ch_buf, not including EOB
|
||||
* characters.
|
||||
*/
|
||||
yy_size_t yy_n_chars;
|
||||
|
||||
/* Whether we "own" the buffer - i.e., we know we created it,
|
||||
* and can realloc() it to grow it, and should free() it to
|
||||
* delete it.
|
||||
*/
|
||||
int yy_is_our_buffer;
|
||||
|
||||
/* Whether this is an "interactive" input source; if so, and
|
||||
* if we're using stdio for input, then we want to use getc()
|
||||
* instead of fread(), to make sure we stop fetching input after
|
||||
* each newline.
|
||||
*/
|
||||
int yy_is_interactive;
|
||||
|
||||
/* Whether we're considered to be at the beginning of a line.
|
||||
* If so, '^' rules will be active on the next match, otherwise
|
||||
* not.
|
||||
*/
|
||||
int yy_at_bol;
|
||||
|
||||
int yy_bs_lineno; /**< The line count. */
|
||||
int yy_bs_column; /**< The column count. */
|
||||
|
||||
/* Whether to try to fill the input buffer when we reach the
|
||||
* end of it.
|
||||
*/
|
||||
int yy_fill_buffer;
|
||||
|
||||
int yy_buffer_status;
|
||||
|
||||
};
|
||||
#endif /* !YY_STRUCT_YY_BUFFER_STATE */
|
||||
|
||||
void linguist_yyrestart (FILE *input_file ,yyscan_t yyscanner );
|
||||
void linguist_yy_switch_to_buffer (YY_BUFFER_STATE new_buffer ,yyscan_t yyscanner );
|
||||
YY_BUFFER_STATE linguist_yy_create_buffer (FILE *file,int size ,yyscan_t yyscanner );
|
||||
void linguist_yy_delete_buffer (YY_BUFFER_STATE b ,yyscan_t yyscanner );
|
||||
void linguist_yy_flush_buffer (YY_BUFFER_STATE b ,yyscan_t yyscanner );
|
||||
void linguist_yypush_buffer_state (YY_BUFFER_STATE new_buffer ,yyscan_t yyscanner );
|
||||
void linguist_yypop_buffer_state (yyscan_t yyscanner );
|
||||
|
||||
YY_BUFFER_STATE linguist_yy_scan_buffer (char *base,yy_size_t size ,yyscan_t yyscanner );
|
||||
YY_BUFFER_STATE linguist_yy_scan_string (yyconst char *yy_str ,yyscan_t yyscanner );
|
||||
YY_BUFFER_STATE linguist_yy_scan_bytes (yyconst char *bytes,yy_size_t len ,yyscan_t yyscanner );
|
||||
|
||||
void *linguist_yyalloc (yy_size_t ,yyscan_t yyscanner );
|
||||
void *linguist_yyrealloc (void *,yy_size_t ,yyscan_t yyscanner );
|
||||
void linguist_yyfree (void * ,yyscan_t yyscanner );
|
||||
|
||||
/* Begin user sect3 */
|
||||
|
||||
#define yytext_ptr yytext_r
|
||||
|
||||
#ifdef YY_HEADER_EXPORT_START_CONDITIONS
|
||||
#define INITIAL 0
|
||||
#define sgml 1
|
||||
#define c_comment 2
|
||||
#define xml_comment 3
|
||||
#define haskell_comment 4
|
||||
#define ocaml_comment 5
|
||||
#define python_dcomment 6
|
||||
#define python_scomment 7
|
||||
|
||||
#endif
|
||||
|
||||
#ifndef YY_NO_UNISTD_H
|
||||
/* Special case for "unistd.h", since it is non-ANSI. We include it way
|
||||
* down here because we want the user's section 1 to have been scanned first.
|
||||
* The user has a chance to override it with an option.
|
||||
*/
|
||||
#include <unistd.h>
|
||||
#endif
|
||||
|
||||
#define YY_EXTRA_TYPE struct tokenizer_extra *
|
||||
|
||||
int linguist_yylex_init (yyscan_t* scanner);
|
||||
|
||||
int linguist_yylex_init_extra (YY_EXTRA_TYPE user_defined,yyscan_t* scanner);
|
||||
|
||||
/* Accessor methods to globals.
|
||||
These are made visible to non-reentrant scanners for convenience. */
|
||||
|
||||
int linguist_yylex_destroy (yyscan_t yyscanner );
|
||||
|
||||
int linguist_yyget_debug (yyscan_t yyscanner );
|
||||
|
||||
void linguist_yyset_debug (int debug_flag ,yyscan_t yyscanner );
|
||||
|
||||
YY_EXTRA_TYPE linguist_yyget_extra (yyscan_t yyscanner );
|
||||
|
||||
void linguist_yyset_extra (YY_EXTRA_TYPE user_defined ,yyscan_t yyscanner );
|
||||
|
||||
FILE *linguist_yyget_in (yyscan_t yyscanner );
|
||||
|
||||
void linguist_yyset_in (FILE * in_str ,yyscan_t yyscanner );
|
||||
|
||||
FILE *linguist_yyget_out (yyscan_t yyscanner );
|
||||
|
||||
void linguist_yyset_out (FILE * out_str ,yyscan_t yyscanner );
|
||||
|
||||
yy_size_t linguist_yyget_leng (yyscan_t yyscanner );
|
||||
|
||||
char *linguist_yyget_text (yyscan_t yyscanner );
|
||||
|
||||
int linguist_yyget_lineno (yyscan_t yyscanner );
|
||||
|
||||
void linguist_yyset_lineno (int line_number ,yyscan_t yyscanner );
|
||||
|
||||
int linguist_yyget_column (yyscan_t yyscanner );
|
||||
|
||||
void linguist_yyset_column (int column_no ,yyscan_t yyscanner );
|
||||
|
||||
/* Macros after this point can all be overridden by user definitions in
|
||||
* section 1.
|
||||
*/
|
||||
|
||||
#ifndef YY_SKIP_YYWRAP
|
||||
#ifdef __cplusplus
|
||||
extern "C" int linguist_yywrap (yyscan_t yyscanner );
|
||||
#else
|
||||
extern int linguist_yywrap (yyscan_t yyscanner );
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#ifndef yytext_ptr
|
||||
static void yy_flex_strncpy (char *,yyconst char *,int ,yyscan_t yyscanner);
|
||||
#endif
|
||||
|
||||
#ifdef YY_NEED_STRLEN
|
||||
static int yy_flex_strlen (yyconst char * ,yyscan_t yyscanner);
|
||||
#endif
|
||||
|
||||
#ifndef YY_NO_INPUT
|
||||
|
||||
#endif
|
||||
|
||||
/* Amount of stuff to slurp up with each read. */
|
||||
#ifndef YY_READ_BUF_SIZE
|
||||
#ifdef __ia64__
|
||||
/* On IA-64, the buffer size is 16k, not 8k */
|
||||
#define YY_READ_BUF_SIZE 16384
|
||||
#else
|
||||
#define YY_READ_BUF_SIZE 8192
|
||||
#endif /* __ia64__ */
|
||||
#endif
|
||||
|
||||
/* Number of entries by which start-condition stack grows. */
|
||||
#ifndef YY_START_STACK_INCR
|
||||
#define YY_START_STACK_INCR 25
|
||||
#endif
|
||||
|
||||
/* Default declaration of generated scanner - a define so the user can
|
||||
* easily add parameters.
|
||||
*/
|
||||
#ifndef YY_DECL
|
||||
#define YY_DECL_IS_OURS 1
|
||||
|
||||
extern int linguist_yylex (yyscan_t yyscanner);
|
||||
|
||||
#define YY_DECL int linguist_yylex (yyscan_t yyscanner)
|
||||
#endif /* !YY_DECL */
|
||||
|
||||
/* yy_get_previous_state - get the state just before the EOB char was reached */
|
||||
|
||||
#undef YY_NEW_FILE
|
||||
#undef YY_FLUSH_BUFFER
|
||||
#undef yy_set_bol
|
||||
#undef yy_new_buffer
|
||||
#undef yy_set_interactive
|
||||
#undef YY_DO_BEFORE_ACTION
|
||||
|
||||
#ifdef YY_DECL_IS_OURS
|
||||
#undef YY_DECL_IS_OURS
|
||||
#undef YY_DECL
|
||||
#endif
|
||||
|
||||
#line 117 "tokenizer.l"
|
||||
|
||||
|
||||
#line 352 "lex.linguist_yy.h"
|
||||
#undef linguist_yyIN_HEADER
|
||||
#endif /* linguist_yyHEADER_H */
|
||||
64
ext/linguist/linguist.c
Normal file
64
ext/linguist/linguist.c
Normal file
@@ -0,0 +1,64 @@
|
||||
#include "ruby.h"
|
||||
#include "linguist.h"
|
||||
#include "lex.linguist_yy.h"
|
||||
|
||||
int linguist_yywrap(yyscan_t yyscanner) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
static VALUE rb_tokenizer_extract_tokens(VALUE self, VALUE rb_data) {
|
||||
YY_BUFFER_STATE buf;
|
||||
yyscan_t scanner;
|
||||
struct tokenizer_extra extra;
|
||||
VALUE ary, s;
|
||||
long len;
|
||||
int r;
|
||||
|
||||
Check_Type(rb_data, T_STRING);
|
||||
|
||||
len = RSTRING_LEN(rb_data);
|
||||
if (len > 100000)
|
||||
len = 100000;
|
||||
|
||||
linguist_yylex_init_extra(&extra, &scanner);
|
||||
buf = linguist_yy_scan_bytes(RSTRING_PTR(rb_data), (int) len, scanner);
|
||||
|
||||
ary = rb_ary_new();
|
||||
do {
|
||||
extra.type = NO_ACTION;
|
||||
extra.token = NULL;
|
||||
r = linguist_yylex(scanner);
|
||||
switch (extra.type) {
|
||||
case NO_ACTION:
|
||||
break;
|
||||
case REGULAR_TOKEN:
|
||||
rb_ary_push(ary, rb_str_new2(extra.token));
|
||||
free(extra.token);
|
||||
break;
|
||||
case SHEBANG_TOKEN:
|
||||
s = rb_str_new2("SHEBANG#!");
|
||||
rb_str_cat2(s, extra.token);
|
||||
rb_ary_push(ary, s);
|
||||
free(extra.token);
|
||||
break;
|
||||
case SGML_TOKEN:
|
||||
s = rb_str_new2(extra.token);
|
||||
rb_str_cat2(s, ">");
|
||||
rb_ary_push(ary, s);
|
||||
free(extra.token);
|
||||
break;
|
||||
}
|
||||
} while (r);
|
||||
|
||||
linguist_yy_delete_buffer(buf, scanner);
|
||||
linguist_yylex_destroy(scanner);
|
||||
|
||||
return ary;
|
||||
}
|
||||
|
||||
__attribute__((visibility("default"))) void Init_linguist() {
|
||||
VALUE rb_mLinguist = rb_define_module("Linguist");
|
||||
VALUE rb_cTokenizer = rb_define_class_under(rb_mLinguist, "Tokenizer", rb_cObject);
|
||||
|
||||
rb_define_method(rb_cTokenizer, "extract_tokens", rb_tokenizer_extract_tokens, 1);
|
||||
}
|
||||
11
ext/linguist/linguist.h
Normal file
11
ext/linguist/linguist.h
Normal file
@@ -0,0 +1,11 @@
|
||||
enum tokenizer_type {
|
||||
NO_ACTION,
|
||||
REGULAR_TOKEN,
|
||||
SHEBANG_TOKEN,
|
||||
SGML_TOKEN,
|
||||
};
|
||||
|
||||
struct tokenizer_extra {
|
||||
char *token;
|
||||
enum tokenizer_type type;
|
||||
};
|
||||
119
ext/linguist/tokenizer.l
Normal file
119
ext/linguist/tokenizer.l
Normal file
@@ -0,0 +1,119 @@
|
||||
%{
|
||||
|
||||
#include "linguist.h"
|
||||
|
||||
#define feed_token(tok, typ) do { \
|
||||
yyextra->token = (tok); \
|
||||
yyextra->type = (typ); \
|
||||
} while (0)
|
||||
|
||||
#define eat_until_eol() do { \
|
||||
int c; \
|
||||
while ((c = input(yyscanner)) != '\n' && c != EOF); \
|
||||
if (c == EOF) \
|
||||
yyterminate(); \
|
||||
} while (0)
|
||||
|
||||
#define eat_until_unescaped(q) do { \
|
||||
int c; \
|
||||
while ((c = input(yyscanner)) != EOF) { \
|
||||
if (c == '\n') \
|
||||
break; \
|
||||
if (c == '\\') { \
|
||||
c = input(yyscanner); \
|
||||
if (c == EOF) \
|
||||
yyterminate(); \
|
||||
} else if (c == q) \
|
||||
break; \
|
||||
} \
|
||||
if (c == EOF) \
|
||||
yyterminate(); \
|
||||
} while (0)
|
||||
|
||||
%}
|
||||
|
||||
%option never-interactive yywrap reentrant nounput warn nodefault header-file="lex.linguist_yy.h" extra-type="struct tokenizer_extra *" prefix="linguist_yy"
|
||||
%x sgml c_comment xml_comment haskell_comment ocaml_comment python_dcomment python_scomment
|
||||
|
||||
%%
|
||||
|
||||
^#![ \t]*([[:alnum:]_\/]*\/)?env([ \t]+([^ \t=]*=[^ \t]*))*[ \t]+[[:alpha:]_]+ {
|
||||
const char *off = strrchr(yytext, ' ');
|
||||
if (!off)
|
||||
off = yytext;
|
||||
else
|
||||
++off;
|
||||
feed_token(strdup(off), SHEBANG_TOKEN);
|
||||
eat_until_eol();
|
||||
return 1;
|
||||
}
|
||||
|
||||
^#![ \t]*[[:alpha:]_\/]+ {
|
||||
const char *off = strrchr(yytext, '/');
|
||||
if (!off)
|
||||
off = yytext;
|
||||
else
|
||||
++off;
|
||||
if (strcmp(off, "env") == 0) {
|
||||
eat_until_eol();
|
||||
} else {
|
||||
feed_token(strdup(off), SHEBANG_TOKEN);
|
||||
eat_until_eol();
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
^[ \t]*(\/\/|--|\#|%|\")" ".* { /* nothing */ }
|
||||
|
||||
"/*" { BEGIN(c_comment); }
|
||||
/* See below for xml_comment start. */
|
||||
"{-" { BEGIN(haskell_comment); }
|
||||
"(*" { BEGIN(ocaml_comment); }
|
||||
"\"\"\"" { BEGIN(python_dcomment); }
|
||||
"'''" { BEGIN(python_scomment); }
|
||||
|
||||
<c_comment,xml_comment,haskell_comment,ocaml_comment,python_dcomment,python_scomment>.|\n { /* nothing */ }
|
||||
<c_comment>"*/" { BEGIN(INITIAL); }
|
||||
<xml_comment>"-->" { BEGIN(INITIAL); }
|
||||
<haskell_comment>"-}" { BEGIN(INITIAL); }
|
||||
<ocaml_comment>"*)" { BEGIN(INITIAL); }
|
||||
<python_dcomment>"\"\"\"" { BEGIN(INITIAL); }
|
||||
<python_scomment>"'''" { BEGIN(INITIAL); }
|
||||
|
||||
\"\"|'' { /* nothing */ }
|
||||
\" { eat_until_unescaped('"'); }
|
||||
' { eat_until_unescaped('\''); }
|
||||
(0x[0-9a-fA-F]([0-9a-fA-F]|\.)*|[0-9]([0-9]|\.)*)([uU][lL]{0,2}|([eE][-+][0-9]*)?[fFlL]*) { /* nothing */ }
|
||||
\<[^ \t\n\r<>]+/>|" "[^<>\n]{0,2048}> {
|
||||
if (strcmp(yytext, "<!--") == 0) {
|
||||
BEGIN(xml_comment);
|
||||
} else {
|
||||
feed_token(strdup(yytext), SGML_TOKEN);
|
||||
BEGIN(sgml);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
<sgml>[[:alnum:]_]+=/\" { feed_token(strdup(yytext), REGULAR_TOKEN); input(yyscanner); eat_until_unescaped('"'); return 1; }
|
||||
<sgml>[[:alnum:]_]+=/' { feed_token(strdup(yytext), REGULAR_TOKEN); input(yyscanner); eat_until_unescaped('\''); return 1; }
|
||||
<sgml>[[:alnum:]_]+=[[:alnum:]_]* { feed_token(strdup(yytext), REGULAR_TOKEN); *(strchr(yyextra->token, '=') + 1) = 0; return 1; }
|
||||
<sgml>[[:alnum:]_]+ { feed_token(strdup(yytext), REGULAR_TOKEN); return 1; }
|
||||
<sgml>\> { BEGIN(INITIAL); }
|
||||
<sgml>.|\n { /* nothing */ }
|
||||
;|\{|\}|\(|\)|\[|\] { feed_token(strdup(yytext), REGULAR_TOKEN); return 1; }
|
||||
[[:alnum:]_.@#/*]+ {
|
||||
if (strncmp(yytext, "/*", 2) == 0) {
|
||||
if (strlen(yytext) >= 4 && strcmp(yytext + strlen(yytext) - 2, "*/") == 0) {
|
||||
/* nothing */
|
||||
} else {
|
||||
BEGIN(c_comment);
|
||||
}
|
||||
} else {
|
||||
feed_token(strdup(yytext), REGULAR_TOKEN);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
\<\<?|\+|\-|\*|\/|%|&&?|\|\|? { feed_token(strdup(yytext), REGULAR_TOKEN); return 1; }
|
||||
.|\n { /* nothing */ }
|
||||
|
||||
%%
|
||||
|
||||
@@ -10,15 +10,17 @@ Gem::Specification.new do |s|
|
||||
s.homepage = "https://github.com/github/linguist"
|
||||
s.license = "MIT"
|
||||
|
||||
s.files = Dir['lib/**/*'] + Dir['grammars/*'] + ['LICENSE']
|
||||
s.files = Dir['lib/**/*'] + Dir['ext/**/*'] + Dir['grammars/*'] + ['LICENSE']
|
||||
s.executables = ['linguist', 'git-linguist']
|
||||
s.extensions = ['ext/linguist/extconf.rb']
|
||||
|
||||
s.add_dependency 'charlock_holmes', '~> 0.7.3'
|
||||
s.add_dependency 'charlock_holmes', '~> 0.7.5'
|
||||
s.add_dependency 'escape_utils', '~> 1.1.0'
|
||||
s.add_dependency 'mime-types', '>= 1.19'
|
||||
s.add_dependency 'rugged', '>= 0.25.1'
|
||||
|
||||
s.add_development_dependency 'minitest', '>= 5.0'
|
||||
s.add_development_dependency 'rake-compiler', '~> 0.9'
|
||||
s.add_development_dependency 'mocha'
|
||||
s.add_development_dependency 'plist', '~>3.1'
|
||||
s.add_development_dependency 'pry'
|
||||
|
||||
31
grammars.yml
31
grammars.yml
@@ -138,6 +138,11 @@ vendor/grammars/Terraform.tmLanguage:
|
||||
- source.terraform
|
||||
vendor/grammars/Textmate-Gosu-Bundle:
|
||||
- source.gosu.2
|
||||
vendor/grammars/TypeScript-TmLanguage:
|
||||
- source.ts
|
||||
- source.tsx
|
||||
- text.error-list
|
||||
- text.find-refs
|
||||
vendor/grammars/UrWeb-Language-Definition:
|
||||
- source.ur
|
||||
vendor/grammars/VBDotNetSyntax:
|
||||
@@ -252,6 +257,8 @@ vendor/grammars/d.tmbundle:
|
||||
vendor/grammars/dartlang:
|
||||
- source.dart
|
||||
- source.yaml-ext
|
||||
vendor/grammars/data-weave-tmLanguage:
|
||||
- source.data-weave
|
||||
vendor/grammars/desktop.tmbundle:
|
||||
- source.desktop
|
||||
vendor/grammars/diff.tmbundle:
|
||||
@@ -350,6 +357,8 @@ vendor/grammars/language-asn1:
|
||||
vendor/grammars/language-babel:
|
||||
- source.js.jsx
|
||||
- source.regexp.babel
|
||||
vendor/grammars/language-ballerina:
|
||||
- source.ballerina
|
||||
vendor/grammars/language-batchfile:
|
||||
- source.batchfile
|
||||
vendor/grammars/language-blade:
|
||||
@@ -394,6 +403,7 @@ vendor/grammars/language-haml:
|
||||
- text.haml
|
||||
- text.hamlc
|
||||
vendor/grammars/language-haskell:
|
||||
- annotation.liquidhaskell.haskell
|
||||
- hint.haskell
|
||||
- hint.message.haskell
|
||||
- hint.type.haskell
|
||||
@@ -401,6 +411,7 @@ vendor/grammars/language-haskell:
|
||||
- source.cabal
|
||||
- source.haskell
|
||||
- source.hsc2hs
|
||||
- source.hsig
|
||||
- text.tex.latex.haskell
|
||||
vendor/grammars/language-inform7:
|
||||
- source.inform7
|
||||
@@ -459,6 +470,10 @@ vendor/grammars/language-roff:
|
||||
vendor/grammars/language-rpm-spec:
|
||||
- source.changelogs.rpm-spec
|
||||
- source.rpm-spec
|
||||
vendor/grammars/language-ruby:
|
||||
- source.ruby
|
||||
- source.ruby.gemfile
|
||||
- text.html.erb
|
||||
vendor/grammars/language-shellscript:
|
||||
- source.shell
|
||||
- text.shell-session
|
||||
@@ -588,9 +603,6 @@ vendor/grammars/rascal-syntax-highlighting:
|
||||
- source.rascal
|
||||
vendor/grammars/ruby-slim.tmbundle:
|
||||
- text.slim
|
||||
vendor/grammars/ruby.tmbundle:
|
||||
- source.ruby
|
||||
- text.html.erb
|
||||
vendor/grammars/sas.tmbundle:
|
||||
- source.SASLog
|
||||
- source.sas
|
||||
@@ -616,6 +628,8 @@ vendor/grammars/sourcepawn:
|
||||
- source.sp
|
||||
vendor/grammars/sql.tmbundle:
|
||||
- source.sql
|
||||
vendor/grammars/squirrel-language:
|
||||
- source.nut
|
||||
vendor/grammars/st2-zonefile:
|
||||
- text.zone_file
|
||||
vendor/grammars/standard-ml.tmbundle:
|
||||
@@ -623,6 +637,8 @@ vendor/grammars/standard-ml.tmbundle:
|
||||
- source.ml
|
||||
vendor/grammars/sublime-MuPAD:
|
||||
- source.mupad
|
||||
vendor/grammars/sublime-angelscript:
|
||||
- source.angelscript
|
||||
vendor/grammars/sublime-aspectj:
|
||||
- source.aspectj
|
||||
vendor/grammars/sublime-autoit:
|
||||
@@ -644,6 +660,8 @@ vendor/grammars/sublime-golo:
|
||||
- source.golo
|
||||
vendor/grammars/sublime-mask:
|
||||
- source.mask
|
||||
vendor/grammars/sublime-nearley:
|
||||
- source.ne
|
||||
vendor/grammars/sublime-netlinx:
|
||||
- source.netlinx
|
||||
- source.netlinx.erb
|
||||
@@ -669,11 +687,6 @@ vendor/grammars/sublime-terra:
|
||||
- source.terra
|
||||
vendor/grammars/sublime-text-ox:
|
||||
- source.ox
|
||||
vendor/grammars/sublime-typescript:
|
||||
- source.ts
|
||||
- source.tsx
|
||||
- text.error-list
|
||||
- text.find-refs
|
||||
vendor/grammars/sublime-varnish:
|
||||
- source.varnish.vcl
|
||||
vendor/grammars/sublime_cobol:
|
||||
@@ -706,6 +719,8 @@ vendor/grammars/vhdl:
|
||||
- source.vhdl
|
||||
vendor/grammars/vue-syntax-highlight:
|
||||
- text.html.vue
|
||||
vendor/grammars/wdl-sublime-syntax-highlighter:
|
||||
- source.wdl
|
||||
vendor/grammars/xc.tmbundle:
|
||||
- source.xc
|
||||
vendor/grammars/xml.tmbundle:
|
||||
|
||||
@@ -275,10 +275,8 @@ module Linguist
|
||||
# also--importantly--without having to duplicate many (potentially
|
||||
# large) strings.
|
||||
begin
|
||||
encoded_newlines = ["\r\n", "\r", "\n"].
|
||||
map { |nl| nl.encode(ruby_encoding, "ASCII-8BIT").force_encoding(data.encoding) }
|
||||
|
||||
data.split(Regexp.union(encoded_newlines), -1)
|
||||
|
||||
data.split(encoded_newlines_re, -1)
|
||||
rescue Encoding::ConverterNotFoundError
|
||||
# The data is not splittable in the detected encoding. Assume it's
|
||||
# one big line.
|
||||
@@ -289,6 +287,51 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
def encoded_newlines_re
|
||||
@encoded_newlines_re ||= Regexp.union(["\r\n", "\r", "\n"].
|
||||
map { |nl| nl.encode(ruby_encoding, "ASCII-8BIT").force_encoding(data.encoding) })
|
||||
|
||||
end
|
||||
|
||||
def first_lines(n)
|
||||
return lines[0...n] if defined? @lines
|
||||
return [] unless viewable? && data
|
||||
|
||||
i, c = 0, 0
|
||||
while c < n && j = data.index(encoded_newlines_re, i)
|
||||
i = j + $&.length
|
||||
c += 1
|
||||
end
|
||||
data[0...i].split(encoded_newlines_re, -1)
|
||||
end
|
||||
|
||||
def last_lines(n)
|
||||
if defined? @lines
|
||||
if n >= @lines.length
|
||||
@lines
|
||||
else
|
||||
lines[-n..-1]
|
||||
end
|
||||
end
|
||||
return [] unless viewable? && data
|
||||
|
||||
no_eol = true
|
||||
i, c = data.length, 0
|
||||
k = i
|
||||
while c < n && j = data.rindex(encoded_newlines_re, i - 1)
|
||||
if c == 0 && j + $&.length == i
|
||||
no_eol = false
|
||||
n += 1
|
||||
end
|
||||
i = j
|
||||
k = j + $&.length
|
||||
c += 1
|
||||
end
|
||||
r = data[k..-1].split(encoded_newlines_re, -1)
|
||||
r.pop if !no_eol
|
||||
r
|
||||
end
|
||||
|
||||
# Public: Get number of lines of code
|
||||
#
|
||||
# Requires Blob#data
|
||||
|
||||
@@ -3,6 +3,8 @@ require 'linguist/tokenizer'
|
||||
module Linguist
|
||||
# Language bayesian classifier.
|
||||
class Classifier
|
||||
CLASSIFIER_CONSIDER_BYTES = 50 * 1024
|
||||
|
||||
# Public: Use the classifier to detect language of the blob.
|
||||
#
|
||||
# blob - An object that quacks like a blob.
|
||||
@@ -17,7 +19,7 @@ module Linguist
|
||||
# Returns an Array of Language objects, most probable first.
|
||||
def self.call(blob, possible_languages)
|
||||
language_names = possible_languages.map(&:name)
|
||||
classify(Samples.cache, blob.data, language_names).map do |name, _|
|
||||
classify(Samples.cache, blob.data[0...CLASSIFIER_CONSIDER_BYTES], language_names).map do |name, _|
|
||||
Language[name] # Return the actual Language objects
|
||||
end
|
||||
end
|
||||
|
||||
@@ -23,21 +23,21 @@ module Linguist
|
||||
#
|
||||
# Returns a String like '100644'
|
||||
def mode
|
||||
File.stat(@fullpath).mode.to_s(8)
|
||||
@mode ||= File.stat(@fullpath).mode.to_s(8)
|
||||
end
|
||||
|
||||
# Public: Read file contents.
|
||||
#
|
||||
# Returns a String.
|
||||
def data
|
||||
File.read(@fullpath)
|
||||
@data ||= File.read(@fullpath)
|
||||
end
|
||||
|
||||
# Public: Get byte size
|
||||
#
|
||||
# Returns an Integer.
|
||||
def size
|
||||
File.size(@fullpath)
|
||||
@size ||= File.size(@fullpath)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
module Linguist
|
||||
# A collection of simple heuristics that can be used to better analyze languages.
|
||||
class Heuristics
|
||||
HEURISTICS_CONSIDER_BYTES = 50 * 1024
|
||||
|
||||
# Public: Use heuristics to detect language of the blob.
|
||||
#
|
||||
# blob - An object that quacks like a blob.
|
||||
@@ -14,7 +16,7 @@ module Linguist
|
||||
#
|
||||
# Returns an Array of languages, or empty if none matched or were inconclusive.
|
||||
def self.call(blob, candidates)
|
||||
data = blob.data
|
||||
data = blob.data[0...HEURISTICS_CONSIDER_BYTES]
|
||||
|
||||
@heuristics.each do |heuristic|
|
||||
if heuristic.matches?(blob.name, candidates)
|
||||
@@ -72,6 +74,22 @@ module Linguist
|
||||
|
||||
# Common heuristics
|
||||
ObjectiveCRegex = /^\s*(@(interface|class|protocol|property|end|synchronised|selector|implementation)\b|#import\s+.+\.h[">])/
|
||||
CPlusPlusRegex = Regexp.union(
|
||||
/^\s*#\s*include <(cstdint|string|vector|map|list|array|bitset|queue|stack|forward_list|unordered_map|unordered_set|(i|o|io)stream)>/,
|
||||
/^\s*template\s*</,
|
||||
/^[ \t]*try/,
|
||||
/^[ \t]*catch\s*\(/,
|
||||
/^[ \t]*(class|(using[ \t]+)?namespace)\s+\w+/,
|
||||
/^[ \t]*(private|public|protected):$/,
|
||||
/std::\w+/)
|
||||
|
||||
disambiguate ".as" do |data|
|
||||
if /^\s*(package\s+[a-z0-9_\.]+|import\s+[a-zA-Z0-9_\.]+;|class\s+[A-Za-z0-9_]+\s+extends\s+[A-Za-z0-9_]+)/.match(data)
|
||||
Language["ActionScript"]
|
||||
else
|
||||
Language["AngelScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".asc" do |data|
|
||||
if /^(----[- ]BEGIN|ssh-(rsa|dss)) /.match(data)
|
||||
@@ -211,8 +229,7 @@ module Linguist
|
||||
disambiguate ".h" do |data|
|
||||
if ObjectiveCRegex.match(data)
|
||||
Language["Objective-C"]
|
||||
elsif (/^\s*#\s*include <(cstdint|string|vector|map|list|array|bitset|queue|stack|forward_list|unordered_map|unordered_set|(i|o|io)stream)>/.match(data) ||
|
||||
/^\s*template\s*</.match(data) || /^[ \t]*try/.match(data) || /^[ \t]*catch\s*\(/.match(data) || /^[ \t]*(class|(using[ \t]+)?namespace)\s+\w+/.match(data) || /^[ \t]*(private|public|protected):$/.match(data) || /std::\w+/.match(data))
|
||||
elsif CPlusPlusRegex.match(data)
|
||||
Language["C++"]
|
||||
end
|
||||
end
|
||||
@@ -350,23 +367,15 @@ module Linguist
|
||||
end
|
||||
|
||||
disambiguate ".pm" do |data|
|
||||
if /^\s*(?:use\s+v6\s*;|(?:\bmy\s+)?class|module)\b/.match(data)
|
||||
Language["Perl 6"]
|
||||
elsif /\buse\s+(?:strict\b|v?5\.)/.match(data)
|
||||
if /\buse\s+(?:strict\b|v?5\.)/.match(data)
|
||||
Language["Perl"]
|
||||
elsif /^\s*(?:use\s+v6\s*;|(?:\bmy\s+)?class|module)\b/.match(data)
|
||||
Language["Perl 6"]
|
||||
elsif /^\s*\/\* XPM \*\//.match(data)
|
||||
Language["XPM"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pod", "Pod", "Perl" do |data|
|
||||
if /^=\w+\b/.match(data)
|
||||
Language["Pod"]
|
||||
else
|
||||
Language["Perl"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pro" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
@@ -491,5 +500,14 @@ module Linguist
|
||||
Language["XML"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".w" do |data|
|
||||
if (data.include?("&ANALYZE-SUSPEND _UIB-CODE-BLOCK _CUSTOM _DEFINITIONS"))
|
||||
Language["OpenEdge ABL"]
|
||||
elsif /^@(<|\w+\.)/.match(data)
|
||||
Language["CWeb"]
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
||||
@@ -210,6 +210,17 @@ Alpine Abuild:
|
||||
codemirror_mode: shell
|
||||
codemirror_mime_type: text/x-sh
|
||||
language_id: 14
|
||||
AngelScript:
|
||||
type: programming
|
||||
color: "#C7D7DC"
|
||||
extensions:
|
||||
- ".as"
|
||||
- ".angelscript"
|
||||
tm_scope: source.angelscript
|
||||
ace_mode: text
|
||||
codemirror_mode: clike
|
||||
codemirror_mime_type: text/x-c++src
|
||||
language_id: 389477596
|
||||
Ant Build System:
|
||||
type: data
|
||||
tm_scope: text.xml.ant
|
||||
@@ -221,7 +232,7 @@ Ant Build System:
|
||||
codemirror_mime_type: application/xml
|
||||
language_id: 15
|
||||
ApacheConf:
|
||||
type: markup
|
||||
type: data
|
||||
aliases:
|
||||
- aconf
|
||||
- apache
|
||||
@@ -354,6 +365,14 @@ Awk:
|
||||
- nawk
|
||||
ace_mode: text
|
||||
language_id: 28
|
||||
Ballerina:
|
||||
type: programming
|
||||
extensions:
|
||||
- ".bal"
|
||||
tm_scope: source.ballerina
|
||||
ace_mode: text
|
||||
color: "#FF5000"
|
||||
language_id: 720859680
|
||||
Batchfile:
|
||||
type: programming
|
||||
aliases:
|
||||
@@ -625,8 +644,10 @@ CartoCSS:
|
||||
language_id: 53
|
||||
Ceylon:
|
||||
type: programming
|
||||
color: "#dfa535"
|
||||
extensions:
|
||||
- ".ceylon"
|
||||
tm_scope: source.ceylon
|
||||
ace_mode: text
|
||||
language_id: 54
|
||||
Chapel:
|
||||
@@ -855,7 +876,7 @@ Csound:
|
||||
- ".orc"
|
||||
- ".udo"
|
||||
tm_scope: source.csound
|
||||
ace_mode: text
|
||||
ace_mode: csound_orchestra
|
||||
language_id: 73
|
||||
Csound Document:
|
||||
type: programming
|
||||
@@ -864,7 +885,7 @@ Csound Document:
|
||||
extensions:
|
||||
- ".csd"
|
||||
tm_scope: source.csound-document
|
||||
ace_mode: text
|
||||
ace_mode: csound_document
|
||||
language_id: 74
|
||||
Csound Score:
|
||||
type: programming
|
||||
@@ -873,7 +894,7 @@ Csound Score:
|
||||
extensions:
|
||||
- ".sco"
|
||||
tm_scope: source.csound-score
|
||||
ace_mode: text
|
||||
ace_mode: csound_score
|
||||
language_id: 75
|
||||
Cuda:
|
||||
type: programming
|
||||
@@ -986,6 +1007,14 @@ Dart:
|
||||
codemirror_mode: dart
|
||||
codemirror_mime_type: application/dart
|
||||
language_id: 87
|
||||
DataWeave:
|
||||
type: programming
|
||||
color: "#003a52"
|
||||
extensions:
|
||||
- ".dwl"
|
||||
ace_mode: text
|
||||
tm_scope: source.data-weave
|
||||
language_id: 974514097
|
||||
Diff:
|
||||
type: data
|
||||
extensions:
|
||||
@@ -1086,8 +1115,7 @@ EQ:
|
||||
codemirror_mime_type: text/x-csharp
|
||||
language_id: 96
|
||||
Eagle:
|
||||
type: markup
|
||||
color: "#814C05"
|
||||
type: data
|
||||
extensions:
|
||||
- ".sch"
|
||||
- ".brd"
|
||||
@@ -1116,6 +1144,15 @@ Ecere Projects:
|
||||
codemirror_mode: javascript
|
||||
codemirror_mime_type: application/json
|
||||
language_id: 98
|
||||
Edje Data Collection:
|
||||
type: data
|
||||
extensions:
|
||||
- ".edc"
|
||||
tm_scope: source.json
|
||||
ace_mode: json
|
||||
codemirror_mode: javascript
|
||||
codemirror_mime_type: application/json
|
||||
language_id: 342840478
|
||||
Eiffel:
|
||||
type: programming
|
||||
color: "#946d57"
|
||||
@@ -1487,8 +1524,8 @@ Gerber Image:
|
||||
- ".gtp"
|
||||
- ".gts"
|
||||
interpreters:
|
||||
- "gerbv"
|
||||
- "gerbview"
|
||||
- gerbv
|
||||
- gerbview
|
||||
tm_scope: source.gerber
|
||||
ace_mode: text
|
||||
language_id: 404627610
|
||||
@@ -1605,6 +1642,7 @@ GraphQL:
|
||||
type: data
|
||||
extensions:
|
||||
- ".graphql"
|
||||
- ".gql"
|
||||
tm_scope: source.graphql
|
||||
ace_mode: text
|
||||
language_id: 139
|
||||
@@ -1868,6 +1906,8 @@ INI:
|
||||
- ".prefs"
|
||||
- ".pro"
|
||||
- ".properties"
|
||||
filenames:
|
||||
- buildozer.spec
|
||||
tm_scope: source.ini
|
||||
aliases:
|
||||
- dosini
|
||||
@@ -1890,6 +1930,7 @@ IRC log:
|
||||
language_id: 164
|
||||
Idris:
|
||||
type: programming
|
||||
color: "#b30000"
|
||||
extensions:
|
||||
- ".idr"
|
||||
- ".lidr"
|
||||
@@ -2078,6 +2119,7 @@ JavaScript:
|
||||
- ".jsfl"
|
||||
- ".jsm"
|
||||
- ".jss"
|
||||
- ".mjs"
|
||||
- ".njs"
|
||||
- ".pac"
|
||||
- ".sjs"
|
||||
@@ -2149,13 +2191,6 @@ KRL:
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
language_id: 186
|
||||
KiCad Board:
|
||||
type: data
|
||||
extensions:
|
||||
- ".brd"
|
||||
tm_scope: source.pcb.board
|
||||
ace_mode: text
|
||||
language_id: 140848857
|
||||
KiCad Layout:
|
||||
type: data
|
||||
aliases:
|
||||
@@ -2171,6 +2206,13 @@ KiCad Layout:
|
||||
codemirror_mode: commonlisp
|
||||
codemirror_mime_type: text/x-common-lisp
|
||||
language_id: 187
|
||||
KiCad Legacy Layout:
|
||||
type: data
|
||||
extensions:
|
||||
- ".brd"
|
||||
tm_scope: source.pcb.board
|
||||
ace_mode: text
|
||||
language_id: 140848857
|
||||
KiCad Schematic:
|
||||
type: data
|
||||
aliases:
|
||||
@@ -2614,7 +2656,7 @@ Mathematica:
|
||||
language_id: 224
|
||||
Matlab:
|
||||
type: programming
|
||||
color: "#bb92ac"
|
||||
color: "#e16737"
|
||||
aliases:
|
||||
- octave
|
||||
extensions:
|
||||
@@ -2741,6 +2783,7 @@ Monkey:
|
||||
type: programming
|
||||
extensions:
|
||||
- ".monkey"
|
||||
- ".monkey2"
|
||||
ace_mode: text
|
||||
tm_scope: source.monkey
|
||||
language_id: 236
|
||||
@@ -2790,6 +2833,15 @@ NSIS:
|
||||
codemirror_mode: nsis
|
||||
codemirror_mime_type: text/x-nsis
|
||||
language_id: 242
|
||||
Nearley:
|
||||
type: programming
|
||||
ace_mode: text
|
||||
color: "#990000"
|
||||
extensions:
|
||||
- ".ne"
|
||||
- ".nearley"
|
||||
tm_scope: source.ne
|
||||
language_id: 521429430
|
||||
Nemerle:
|
||||
type: programming
|
||||
color: "#3d3c6e"
|
||||
@@ -2841,7 +2893,7 @@ NewLisp:
|
||||
codemirror_mime_type: text/x-common-lisp
|
||||
language_id: 247
|
||||
Nginx:
|
||||
type: markup
|
||||
type: data
|
||||
extensions:
|
||||
- ".nginxconf"
|
||||
- ".vhost"
|
||||
@@ -2853,7 +2905,6 @@ Nginx:
|
||||
ace_mode: text
|
||||
codemirror_mode: nginx
|
||||
codemirror_mime_type: text/x-nginx-conf
|
||||
color: "#9469E9"
|
||||
language_id: 248
|
||||
Nim:
|
||||
type: programming
|
||||
@@ -3028,6 +3079,7 @@ OpenEdge ABL:
|
||||
extensions:
|
||||
- ".p"
|
||||
- ".cls"
|
||||
- ".w"
|
||||
tm_scope: source.abl
|
||||
ace_mode: text
|
||||
language_id: 264
|
||||
@@ -3271,7 +3323,6 @@ Perl:
|
||||
- ".ph"
|
||||
- ".plx"
|
||||
- ".pm"
|
||||
- ".pod"
|
||||
- ".psgi"
|
||||
- ".t"
|
||||
filenames:
|
||||
@@ -3442,7 +3493,7 @@ Propeller Spin:
|
||||
ace_mode: text
|
||||
language_id: 296
|
||||
Protocol Buffer:
|
||||
type: markup
|
||||
type: data
|
||||
aliases:
|
||||
- protobuf
|
||||
- Protocol Buffers
|
||||
@@ -3487,8 +3538,7 @@ Puppet:
|
||||
tm_scope: source.puppet
|
||||
language_id: 299
|
||||
Pure Data:
|
||||
type: programming
|
||||
color: "#91de79"
|
||||
type: data
|
||||
extensions:
|
||||
- ".pd"
|
||||
tm_scope: none
|
||||
@@ -4660,8 +4710,8 @@ UrWeb:
|
||||
ace_mode: text
|
||||
language_id: 383
|
||||
VCL:
|
||||
group: Perl
|
||||
type: programming
|
||||
color: "#0298c3"
|
||||
extensions:
|
||||
- ".vcl"
|
||||
tm_scope: source.varnish.vcl
|
||||
@@ -4773,8 +4823,7 @@ Wavefront Object:
|
||||
ace_mode: text
|
||||
language_id: 393
|
||||
Web Ontology Language:
|
||||
type: markup
|
||||
color: "#9cc9dd"
|
||||
type: data
|
||||
extensions:
|
||||
- ".owl"
|
||||
tm_scope: text.xml
|
||||
@@ -4855,12 +4904,16 @@ XML:
|
||||
- ".ant"
|
||||
- ".axml"
|
||||
- ".builds"
|
||||
- ".ccproj"
|
||||
- ".ccxml"
|
||||
- ".clixml"
|
||||
- ".cproject"
|
||||
- ".cscfg"
|
||||
- ".csdef"
|
||||
- ".csl"
|
||||
- ".csproj"
|
||||
- ".ct"
|
||||
- ".depproj"
|
||||
- ".dita"
|
||||
- ".ditamap"
|
||||
- ".ditaval"
|
||||
@@ -4883,6 +4936,8 @@ XML:
|
||||
- ".mm"
|
||||
- ".mod"
|
||||
- ".mxml"
|
||||
- ".natvis"
|
||||
- ".ndproj"
|
||||
- ".nproj"
|
||||
- ".nuspec"
|
||||
- ".odd"
|
||||
@@ -4890,6 +4945,7 @@ XML:
|
||||
- ".pkgproj"
|
||||
- ".plist"
|
||||
- ".pluginspec"
|
||||
- ".proj"
|
||||
- ".props"
|
||||
- ".ps1xml"
|
||||
- ".psc1"
|
||||
@@ -4900,6 +4956,7 @@ XML:
|
||||
- ".sch"
|
||||
- ".scxml"
|
||||
- ".sfproj"
|
||||
- ".shproj"
|
||||
- ".srdf"
|
||||
- ".storyboard"
|
||||
- ".stTheme"
|
||||
@@ -4961,11 +5018,11 @@ XPM:
|
||||
tm_scope: source.c
|
||||
language_id: 781846279
|
||||
XPages:
|
||||
type: programming
|
||||
type: data
|
||||
extensions:
|
||||
- ".xsp-config"
|
||||
- ".xsp.metadata"
|
||||
tm_scope: none
|
||||
tm_scope: text.xml
|
||||
ace_mode: xml
|
||||
codemirror_mode: xml
|
||||
codemirror_mime_type: text/xml
|
||||
@@ -5050,6 +5107,7 @@ YAML:
|
||||
- ".yml.mysql"
|
||||
filenames:
|
||||
- ".clang-format"
|
||||
- ".clang-tidy"
|
||||
ace_mode: yaml
|
||||
codemirror_mode: yaml
|
||||
codemirror_mime_type: text/x-yaml
|
||||
@@ -5159,6 +5217,14 @@ reStructuredText:
|
||||
codemirror_mode: rst
|
||||
codemirror_mime_type: text/x-rst
|
||||
language_id: 419
|
||||
wdl:
|
||||
type: programming
|
||||
color: "#42f1f4"
|
||||
extensions:
|
||||
- ".wdl"
|
||||
tm_scope: source.wdl
|
||||
ace_mode: text
|
||||
language_id: 374521672
|
||||
wisp:
|
||||
type: programming
|
||||
ace_mode: clojure
|
||||
|
||||
@@ -109,8 +109,8 @@ module Linguist
|
||||
# Returns an Array with one Language if the blob has a Vim or Emacs modeline
|
||||
# that matches a Language name or alias. Returns an empty array if no match.
|
||||
def self.call(blob, _ = nil)
|
||||
header = blob.lines.first(SEARCH_SCOPE).join("\n")
|
||||
footer = blob.lines.last(SEARCH_SCOPE).join("\n")
|
||||
header = blob.first_lines(SEARCH_SCOPE).join("\n")
|
||||
footer = blob.last_lines(SEARCH_SCOPE).join("\n")
|
||||
Array(Language.find_by_alias(modeline(header + footer)))
|
||||
end
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
require 'strscan'
|
||||
require 'linguist/linguist'
|
||||
|
||||
module Linguist
|
||||
# Generic programming language tokenizer.
|
||||
@@ -15,191 +16,5 @@ module Linguist
|
||||
def self.tokenize(data)
|
||||
new.extract_tokens(data)
|
||||
end
|
||||
|
||||
# Read up to 100KB
|
||||
BYTE_LIMIT = 100_000
|
||||
|
||||
# Start state on token, ignore anything till the next newline
|
||||
SINGLE_LINE_COMMENTS = [
|
||||
'//', # C
|
||||
'--', # Ada, Haskell, AppleScript
|
||||
'#', # Ruby
|
||||
'%', # Tex
|
||||
'"', # Vim
|
||||
]
|
||||
|
||||
# Start state on opening token, ignore anything until the closing
|
||||
# token is reached.
|
||||
MULTI_LINE_COMMENTS = [
|
||||
['/*', '*/'], # C
|
||||
['<!--', '-->'], # XML
|
||||
['{-', '-}'], # Haskell
|
||||
['(*', '*)'], # Coq
|
||||
['"""', '"""'], # Python
|
||||
["'''", "'''"] # Python
|
||||
]
|
||||
|
||||
START_SINGLE_LINE_COMMENT = Regexp.compile(SINGLE_LINE_COMMENTS.map { |c|
|
||||
"\s*#{Regexp.escape(c)} "
|
||||
}.join("|"))
|
||||
|
||||
START_MULTI_LINE_COMMENT = Regexp.compile(MULTI_LINE_COMMENTS.map { |c|
|
||||
Regexp.escape(c[0])
|
||||
}.join("|"))
|
||||
|
||||
# Internal: Extract generic tokens from data.
|
||||
#
|
||||
# data - String to scan.
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# extract_tokens("printf('Hello')")
|
||||
# # => ['printf', '(', ')']
|
||||
#
|
||||
# Returns Array of token Strings.
|
||||
def extract_tokens(data)
|
||||
s = StringScanner.new(data)
|
||||
|
||||
tokens = []
|
||||
until s.eos?
|
||||
break if s.pos >= BYTE_LIMIT
|
||||
|
||||
if token = s.scan(/^#!.+$/)
|
||||
if name = extract_shebang(token)
|
||||
tokens << "SHEBANG#!#{name}"
|
||||
end
|
||||
|
||||
# Single line comment
|
||||
elsif s.beginning_of_line? && token = s.scan(START_SINGLE_LINE_COMMENT)
|
||||
# tokens << token.strip
|
||||
s.skip_until(/\n|\Z/)
|
||||
|
||||
# Multiline comments
|
||||
elsif token = s.scan(START_MULTI_LINE_COMMENT)
|
||||
# tokens << token
|
||||
close_token = MULTI_LINE_COMMENTS.assoc(token)[1]
|
||||
s.skip_until(Regexp.compile(Regexp.escape(close_token)))
|
||||
# tokens << close_token
|
||||
|
||||
# Skip single or double quoted strings
|
||||
elsif s.scan(/"/)
|
||||
if s.peek(1) == "\""
|
||||
s.getch
|
||||
else
|
||||
s.skip_until(/(?<!\\)"/)
|
||||
end
|
||||
elsif s.scan(/'/)
|
||||
if s.peek(1) == "'"
|
||||
s.getch
|
||||
else
|
||||
s.skip_until(/(?<!\\)'/)
|
||||
end
|
||||
|
||||
# Skip number literals
|
||||
elsif s.scan(/(0x\h(\h|\.)*|\d(\d|\.)*)([uU][lL]{0,2}|([eE][-+]\d*)?[fFlL]*)/)
|
||||
|
||||
# SGML style brackets
|
||||
elsif token = s.scan(/<[^\s<>][^<>]*>/)
|
||||
extract_sgml_tokens(token).each { |t| tokens << t }
|
||||
|
||||
# Common programming punctuation
|
||||
elsif token = s.scan(/;|\{|\}|\(|\)|\[|\]/)
|
||||
tokens << token
|
||||
|
||||
# Regular token
|
||||
elsif token = s.scan(/[\w\.@#\/\*]+/)
|
||||
tokens << token
|
||||
|
||||
# Common operators
|
||||
elsif token = s.scan(/<<?|\+|\-|\*|\/|%|&&?|\|\|?/)
|
||||
tokens << token
|
||||
|
||||
else
|
||||
s.getch
|
||||
end
|
||||
end
|
||||
|
||||
tokens
|
||||
end
|
||||
|
||||
# Internal: Extract normalized shebang command token.
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# extract_shebang("#!/usr/bin/ruby")
|
||||
# # => "ruby"
|
||||
#
|
||||
# extract_shebang("#!/usr/bin/env node")
|
||||
# # => "node"
|
||||
#
|
||||
# extract_shebang("#!/usr/bin/env A=B foo=bar awk -f")
|
||||
# # => "awk"
|
||||
#
|
||||
# Returns String token or nil it couldn't be parsed.
|
||||
def extract_shebang(data)
|
||||
s = StringScanner.new(data)
|
||||
|
||||
if path = s.scan(/^#!\s*\S+/)
|
||||
script = path.split('/').last
|
||||
if script == 'env'
|
||||
s.scan(/\s+/)
|
||||
s.scan(/.*=[^\s]+\s+/)
|
||||
script = s.scan(/\S+/)
|
||||
end
|
||||
script = script[/[^\d]+/, 0] if script
|
||||
return script
|
||||
end
|
||||
|
||||
nil
|
||||
end
|
||||
|
||||
# Internal: Extract tokens from inside SGML tag.
|
||||
#
|
||||
# data - SGML tag String.
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# extract_sgml_tokens("<a href='' class=foo>")
|
||||
# # => ["<a>", "href="]
|
||||
#
|
||||
# Returns Array of token Strings.
|
||||
def extract_sgml_tokens(data)
|
||||
s = StringScanner.new(data)
|
||||
|
||||
tokens = []
|
||||
|
||||
until s.eos?
|
||||
# Emit start token
|
||||
if token = s.scan(/<\/?[^\s>]+/)
|
||||
tokens << "#{token}>"
|
||||
|
||||
# Emit attributes with trailing =
|
||||
elsif token = s.scan(/\w+=/)
|
||||
tokens << token
|
||||
|
||||
# Then skip over attribute value
|
||||
if s.scan(/"/)
|
||||
s.skip_until(/[^\\]"/)
|
||||
elsif s.scan(/'/)
|
||||
s.skip_until(/[^\\]'/)
|
||||
else
|
||||
s.skip_until(/\w+/)
|
||||
end
|
||||
|
||||
# Emit lone attributes
|
||||
elsif token = s.scan(/\w+/)
|
||||
tokens << token
|
||||
|
||||
# Stop at the end of the tag
|
||||
elsif s.scan(/>/)
|
||||
s.terminate
|
||||
|
||||
else
|
||||
s.getch
|
||||
end
|
||||
end
|
||||
|
||||
tokens
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -81,6 +81,9 @@
|
||||
# Animate.css
|
||||
- (^|/)animate\.(css|less|scss|styl)$
|
||||
|
||||
# Select2
|
||||
- (^|/)select2/.*\.(css|scss|js)$
|
||||
|
||||
# Vendored dependencies
|
||||
- third[-_]?party/
|
||||
- 3rd[-_]?party/
|
||||
@@ -119,6 +122,15 @@
|
||||
# jQuery File Upload
|
||||
- (^|/)jquery\.fileupload(-\w+)?\.js$
|
||||
|
||||
# jQuery dataTables
|
||||
- jquery.dataTables.js
|
||||
|
||||
# bootboxjs
|
||||
- bootbox.js
|
||||
|
||||
# pdf-worker
|
||||
- pdf.worker.js
|
||||
|
||||
# Slick
|
||||
- (^|/)slick\.\w+.js$
|
||||
|
||||
@@ -135,6 +147,9 @@
|
||||
- .sublime-project
|
||||
- .sublime-workspace
|
||||
|
||||
# VS Code workspace files
|
||||
- .vscode
|
||||
|
||||
# Prototype
|
||||
- (^|/)prototype(.*)\.js$
|
||||
- (^|/)effects\.js$
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
module Linguist
|
||||
VERSION = "5.2.0"
|
||||
VERSION = "5.3.2"
|
||||
end
|
||||
|
||||
35
samples/ActionScript/FooBar.as
Normal file
35
samples/ActionScript/FooBar.as
Normal file
@@ -0,0 +1,35 @@
|
||||
// A sample for Actionscript.
|
||||
|
||||
package foobar
|
||||
{
|
||||
import flash.display.MovieClip;
|
||||
|
||||
class Bar
|
||||
{
|
||||
public function getNumber():Number
|
||||
{
|
||||
return 10;
|
||||
}
|
||||
}
|
||||
|
||||
class Foo extends Bar
|
||||
{
|
||||
private var ourNumber:Number = 25;
|
||||
|
||||
override public function getNumber():Number
|
||||
{
|
||||
return ourNumber;
|
||||
}
|
||||
}
|
||||
|
||||
class Main extends MovieClip
|
||||
{
|
||||
public function Main()
|
||||
{
|
||||
var x:Bar = new Bar();
|
||||
var y:Foo = new Foo();
|
||||
trace(x.getNumber());
|
||||
trace(y.getNumber());
|
||||
}
|
||||
}
|
||||
}
|
||||
13
samples/ActionScript/HelloWorld.as
Normal file
13
samples/ActionScript/HelloWorld.as
Normal file
@@ -0,0 +1,13 @@
|
||||
package mypackage
|
||||
{
|
||||
public class Hello
|
||||
{
|
||||
/* Let's say hello!
|
||||
* This is just a test script for Linguist's Actionscript detection.
|
||||
*/
|
||||
public function sayHello():void
|
||||
{
|
||||
trace("Hello, world");
|
||||
}
|
||||
}
|
||||
}
|
||||
77
samples/AngelScript/botmanager.as
Normal file
77
samples/AngelScript/botmanager.as
Normal file
@@ -0,0 +1,77 @@
|
||||
/*
|
||||
* This is a sample script.
|
||||
*/
|
||||
|
||||
#include "BotManagerInterface.acs"
|
||||
|
||||
BotManager::BotManager g_BotManager( @CreateDumbBot );
|
||||
|
||||
CConCommand@ m_pAddBot;
|
||||
|
||||
void PluginInit()
|
||||
{
|
||||
g_BotManager.PluginInit();
|
||||
|
||||
@m_pAddBot = @CConCommand( "addbot", "Adds a new bot with the given name", @AddBotCallback );
|
||||
}
|
||||
|
||||
void AddBotCallback( const CCommand@ args )
|
||||
{
|
||||
if( args.ArgC() < 2 )
|
||||
{
|
||||
g_Game.AlertMessage( at_console, "Usage: addbot <name>" );
|
||||
return;
|
||||
}
|
||||
|
||||
BotManager::BaseBot@ pBot = g_BotManager.CreateBot( args[ 1 ] );
|
||||
|
||||
if( pBot !is null )
|
||||
{
|
||||
g_Game.AlertMessage( at_console, "Created bot " + args[ 1 ] + "\n" );
|
||||
}
|
||||
else
|
||||
{
|
||||
g_Game.AlertMessage( at_console, "Could not create bot\n" );
|
||||
}
|
||||
}
|
||||
|
||||
final class DumbBot : BotManager::BaseBot
|
||||
{
|
||||
DumbBot( CBasePlayer@ pPlayer )
|
||||
{
|
||||
super( pPlayer );
|
||||
}
|
||||
|
||||
void Think()
|
||||
{
|
||||
BotManager::BaseBot::Think();
|
||||
|
||||
// If the bot is dead and can be respawned, send a button press
|
||||
if( Player.pev.deadflag >= DEAD_RESPAWNABLE )
|
||||
{
|
||||
Player.pev.button |= IN_ATTACK;
|
||||
}
|
||||
else
|
||||
Player.pev.button &= ~IN_ATTACK;
|
||||
|
||||
KeyValueBuffer@ pInfoBuffer = g_EngineFuncs.GetInfoKeyBuffer( Player.edict() );
|
||||
|
||||
pInfoBuffer.SetValue( "topcolor", Math.RandomLong( 0, 255 ) );
|
||||
pInfoBuffer.SetValue( "bottomcolor", Math.RandomLong( 0, 255 ) );
|
||||
|
||||
if( Math.RandomLong( 0, 100 ) > 10 )
|
||||
Player.pev.button |= IN_ATTACK;
|
||||
else
|
||||
Player.pev.button &= ~IN_ATTACK;
|
||||
|
||||
for( uint uiIndex = 0; uiIndex < 3; ++uiIndex )
|
||||
{
|
||||
m_vecVelocity[ uiIndex ] = Math.RandomLong( -50, 50 );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
BotManager::BaseBot@ CreateDumbBot( CBasePlayer@ pPlayer )
|
||||
{
|
||||
return @DumbBot( pPlayer );
|
||||
}
|
||||
396
samples/AngelScript/payload.as
Normal file
396
samples/AngelScript/payload.as
Normal file
@@ -0,0 +1,396 @@
|
||||
// Sample script.
|
||||
// Source: https://github.com/codecat/ssbd-payload
|
||||
|
||||
array<WorldScript::PayloadBeginTrigger@> g_payloadBeginTriggers;
|
||||
array<WorldScript::PayloadTeamForcefield@> g_teamForceFields;
|
||||
|
||||
[GameMode]
|
||||
class Payload : TeamVersusGameMode
|
||||
{
|
||||
[Editable]
|
||||
UnitFeed PayloadUnit;
|
||||
|
||||
[Editable]
|
||||
UnitFeed FirstNode;
|
||||
|
||||
[Editable default=10]
|
||||
int PrepareTime;
|
||||
|
||||
[Editable default=300]
|
||||
int TimeLimit;
|
||||
|
||||
[Editable default=90]
|
||||
int TimeAddCheckpoint;
|
||||
|
||||
[Editable default=2]
|
||||
float TimeOvertime;
|
||||
|
||||
[Editable default=1000]
|
||||
int TimePayloadHeal;
|
||||
|
||||
[Editable default=1]
|
||||
int PayloadHeal;
|
||||
|
||||
PayloadBehavior@ m_payload;
|
||||
|
||||
int m_tmStarting;
|
||||
int m_tmStarted;
|
||||
int m_tmLimitCustom;
|
||||
int m_tmOvertime;
|
||||
int m_tmInOvertime;
|
||||
|
||||
PayloadHUD@ m_payloadHUD;
|
||||
PayloadClassSwitchWindow@ m_switchClass;
|
||||
|
||||
array<SValue@>@ m_switchedSidesData;
|
||||
|
||||
Payload(Scene@ scene)
|
||||
{
|
||||
super(scene);
|
||||
|
||||
m_tmRespawnCountdown = 5000;
|
||||
|
||||
@m_payloadHUD = PayloadHUD(m_guiBuilder);
|
||||
@m_switchTeam = PayloadTeamSwitchWindow(m_guiBuilder);
|
||||
@m_switchClass = PayloadClassSwitchWindow(m_guiBuilder);
|
||||
}
|
||||
|
||||
void UpdateFrame(int ms, GameInput& gameInput, MenuInput& menuInput) override
|
||||
{
|
||||
TeamVersusGameMode::UpdateFrame(ms, gameInput, menuInput);
|
||||
|
||||
m_payloadHUD.Update(ms);
|
||||
|
||||
if (Network::IsServer())
|
||||
{
|
||||
uint64 tmNow = CurrPlaytimeLevel();
|
||||
|
||||
if (m_tmStarting == 0)
|
||||
{
|
||||
if (GetPlayersInTeam(0) > 0 && GetPlayersInTeam(1) > 0)
|
||||
{
|
||||
m_tmStarting = tmNow;
|
||||
(Network::Message("GameStarting") << m_tmStarting).SendToAll();
|
||||
}
|
||||
}
|
||||
|
||||
if (m_tmStarting > 0 && m_tmStarted == 0 && tmNow - m_tmStarting > PrepareTime * 1000)
|
||||
{
|
||||
m_tmStarted = tmNow;
|
||||
(Network::Message("GameStarted") << m_tmStarted).SendToAll();
|
||||
|
||||
for (uint i = 0; i < g_payloadBeginTriggers.length(); i++)
|
||||
{
|
||||
WorldScript@ ws = WorldScript::GetWorldScript(g_scene, g_payloadBeginTriggers[i]);
|
||||
ws.Execute();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!m_ended && m_tmStarted > 0)
|
||||
CheckTimeReached(ms);
|
||||
}
|
||||
|
||||
string NameForTeam(int index) override
|
||||
{
|
||||
if (index == 0)
|
||||
return "Defenders";
|
||||
else if (index == 1)
|
||||
return "Attackers";
|
||||
|
||||
return "Unknown";
|
||||
}
|
||||
|
||||
void CheckTimeReached(int dt)
|
||||
{
|
||||
// Check if time limit is not reached yet
|
||||
if (m_tmLimitCustom - (CurrPlaytimeLevel() - m_tmStarted) > 0)
|
||||
{
|
||||
// Don't need to continue checking
|
||||
m_tmOvertime = 0;
|
||||
m_tmInOvertime = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
// Count how long we're in overtime for later time limit fixing when we reach a checkpoint
|
||||
if (m_tmOvertime > 0)
|
||||
m_tmInOvertime += dt;
|
||||
|
||||
// Check if there are any attackers still inside
|
||||
if (m_payload.AttackersInside() > 0)
|
||||
{
|
||||
// We have overtime
|
||||
m_tmOvertime = int(TimeOvertime * 1000);
|
||||
return;
|
||||
}
|
||||
|
||||
// If we have overtime
|
||||
if (m_tmOvertime > 0)
|
||||
{
|
||||
// Decrease timer
|
||||
m_tmOvertime -= dt;
|
||||
if (m_tmOvertime <= 0)
|
||||
{
|
||||
// Overtime countdown reached, time limit reached
|
||||
TimeReached();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// No overtime, so time limit is reached
|
||||
TimeReached();
|
||||
}
|
||||
}
|
||||
|
||||
void TimeReached()
|
||||
{
|
||||
if (!Network::IsServer())
|
||||
return;
|
||||
|
||||
(Network::Message("TimeReached")).SendToAll();
|
||||
SetWinner(false);
|
||||
}
|
||||
|
||||
bool ShouldFreezeControls() override
|
||||
{
|
||||
return m_switchClass.m_visible
|
||||
|| TeamVersusGameMode::ShouldFreezeControls();
|
||||
}
|
||||
|
||||
bool ShouldDisplayCursor() override
|
||||
{
|
||||
return m_switchClass.m_visible
|
||||
|| TeamVersusGameMode::ShouldDisplayCursor();
|
||||
}
|
||||
|
||||
bool CanSwitchTeams() override
|
||||
{
|
||||
return m_tmStarted == 0;
|
||||
}
|
||||
|
||||
PlayerRecord@ CreatePlayerRecord() override
|
||||
{
|
||||
return PayloadPlayerRecord();
|
||||
}
|
||||
|
||||
int GetPlayerClassCount(PlayerClass playerClass, TeamVersusScore@ team)
|
||||
{
|
||||
if (team is null)
|
||||
return 0;
|
||||
|
||||
int ret = 0;
|
||||
for (uint i = 0; i < team.m_players.length(); i++)
|
||||
{
|
||||
if (team.m_players[i].peer == 255)
|
||||
continue;
|
||||
auto record = cast<PayloadPlayerRecord>(team.m_players[i]);
|
||||
if (record.playerClass == playerClass)
|
||||
ret++;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
void PlayerClassesUpdated()
|
||||
{
|
||||
m_switchClass.PlayerClassesUpdated();
|
||||
}
|
||||
|
||||
void SetWinner(bool attackers)
|
||||
{
|
||||
if (attackers)
|
||||
print("Attackers win!");
|
||||
else
|
||||
print("Defenders win!");
|
||||
|
||||
m_payloadHUD.Winner(attackers);
|
||||
EndMatch();
|
||||
}
|
||||
|
||||
void DisplayPlayerName(int idt, SpriteBatch& sb, PlayerRecord@ record, PlayerHusk@ plr, vec2 pos) override
|
||||
{
|
||||
TeamVersusGameMode::DisplayPlayerName(idt, sb, record, plr, pos);
|
||||
|
||||
m_payloadHUD.DisplayPlayerName(idt, sb, cast<PayloadPlayerRecord>(record), plr, pos);
|
||||
}
|
||||
|
||||
void RenderFrame(int idt, SpriteBatch& sb) override
|
||||
{
|
||||
Player@ player = GetLocalPlayer();
|
||||
if (player !is null)
|
||||
{
|
||||
PlayerHealgun@ healgun = cast<PlayerHealgun>(player.m_currWeapon);
|
||||
if (healgun !is null)
|
||||
healgun.RenderMarkers(idt, sb);
|
||||
}
|
||||
|
||||
TeamVersusGameMode::RenderFrame(idt, sb);
|
||||
}
|
||||
|
||||
void RenderWidgets(PlayerRecord@ player, int idt, SpriteBatch& sb) override
|
||||
{
|
||||
m_payloadHUD.Draw(sb, idt);
|
||||
|
||||
TeamVersusGameMode::RenderWidgets(player, idt, sb);
|
||||
|
||||
m_switchClass.Draw(sb, idt);
|
||||
}
|
||||
|
||||
void GoNextMap() override
|
||||
{
|
||||
if (m_switchedSidesData !is null)
|
||||
{
|
||||
TeamVersusGameMode::GoNextMap();
|
||||
return;
|
||||
}
|
||||
|
||||
ChangeLevel(GetCurrentLevelFilename());
|
||||
}
|
||||
|
||||
void SpawnPlayers() override
|
||||
{
|
||||
if (m_switchedSidesData is null)
|
||||
{
|
||||
TeamVersusGameMode::SpawnPlayers();
|
||||
return;
|
||||
}
|
||||
|
||||
if (Network::IsServer())
|
||||
{
|
||||
for (uint i = 0; i < m_switchedSidesData.length(); i += 2)
|
||||
{
|
||||
uint peer = uint(m_switchedSidesData[i].GetInteger());
|
||||
uint team = uint(m_switchedSidesData[i + 1].GetInteger());
|
||||
|
||||
TeamVersusScore@ joinScore = FindTeamScore(team);
|
||||
if (joinScore is m_teamScores[0])
|
||||
@joinScore = m_teamScores[1];
|
||||
else
|
||||
@joinScore = m_teamScores[0];
|
||||
|
||||
for (uint j = 0; j < g_players.length(); j++)
|
||||
{
|
||||
if (g_players[j].peer != peer)
|
||||
continue;
|
||||
SpawnPlayer(j, vec2(), 0, joinScore.m_team);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Save(SValueBuilder& builder) override
|
||||
{
|
||||
if (m_switchedSidesData is null)
|
||||
{
|
||||
builder.PushArray("teams");
|
||||
for (uint i = 0; i < g_players.length(); i++)
|
||||
{
|
||||
if (g_players[i].peer == 255)
|
||||
continue;
|
||||
builder.PushInteger(g_players[i].peer);
|
||||
builder.PushInteger(g_players[i].team);
|
||||
}
|
||||
builder.PopArray();
|
||||
}
|
||||
|
||||
TeamVersusGameMode::Save(builder);
|
||||
}
|
||||
|
||||
void Start(uint8 peer, SValue@ save, StartMode sMode) override
|
||||
{
|
||||
if (save !is null)
|
||||
@m_switchedSidesData = GetParamArray(UnitPtr(), save, "teams", false);
|
||||
|
||||
TeamVersusGameMode::Start(peer, save, sMode);
|
||||
|
||||
m_tmLimit = 0; // infinite time limit as far as VersusGameMode is concerned
|
||||
m_tmLimitCustom = TimeLimit * 1000; // 5 minutes by default
|
||||
|
||||
@m_payload = cast<PayloadBehavior>(PayloadUnit.FetchFirst().GetScriptBehavior());
|
||||
|
||||
if (m_payload is null)
|
||||
PrintError("PayloadUnit is not a PayloadBehavior!");
|
||||
|
||||
UnitPtr unitFirstNode = FirstNode.FetchFirst();
|
||||
if (unitFirstNode.IsValid())
|
||||
{
|
||||
auto node = cast<WorldScript::PayloadNode>(unitFirstNode.GetScriptBehavior());
|
||||
if (node !is null)
|
||||
@m_payload.m_targetNode = node;
|
||||
else
|
||||
PrintError("First target node is not a PayloadNode script!");
|
||||
}
|
||||
else
|
||||
PrintError("First target node was not set!");
|
||||
|
||||
WorldScript::PayloadNode@ prevNode;
|
||||
|
||||
float totalDistance = 0.0f;
|
||||
|
||||
UnitPtr unitNode = unitFirstNode;
|
||||
while (unitNode.IsValid())
|
||||
{
|
||||
auto node = cast<WorldScript::PayloadNode>(unitNode.GetScriptBehavior());
|
||||
if (node is null)
|
||||
break;
|
||||
|
||||
unitNode = node.NextNode.FetchFirst();
|
||||
|
||||
@node.m_prevNode = prevNode;
|
||||
@node.m_nextNode = cast<WorldScript::PayloadNode>(unitNode.GetScriptBehavior());
|
||||
|
||||
if (prevNode !is null)
|
||||
totalDistance += dist(prevNode.Position, node.Position);
|
||||
|
||||
@prevNode = node;
|
||||
}
|
||||
|
||||
float currDistance = 0.0f;
|
||||
|
||||
auto distNode = cast<WorldScript::PayloadNode>(unitFirstNode.GetScriptBehavior());
|
||||
while (distNode !is null)
|
||||
{
|
||||
if (distNode.m_prevNode is null)
|
||||
distNode.m_locationFactor = 0.0f;
|
||||
else
|
||||
{
|
||||
currDistance += dist(distNode.m_prevNode.Position, distNode.Position);
|
||||
distNode.m_locationFactor = currDistance / totalDistance;
|
||||
}
|
||||
|
||||
@distNode = distNode.m_nextNode;
|
||||
}
|
||||
|
||||
m_payloadHUD.AddCheckpoints();
|
||||
}
|
||||
|
||||
void SpawnPlayer(int i, vec2 pos = vec2(), int unitId = 0, uint team = 0) override
|
||||
{
|
||||
TeamVersusGameMode::SpawnPlayer(i, pos, unitId, team);
|
||||
|
||||
PayloadPlayerRecord@ record = cast<PayloadPlayerRecord>(g_players[i]);
|
||||
record.HandlePlayerClass();
|
||||
|
||||
if (g_players[i].local)
|
||||
{
|
||||
//TODO: This doesn't work well
|
||||
bool localAttackers = (team == HashString("player_1"));
|
||||
for (uint j = 0; j < g_teamForceFields.length(); j++)
|
||||
{
|
||||
bool hasCollision = (localAttackers != g_teamForceFields[j].Attackers);
|
||||
|
||||
auto units = g_teamForceFields[j].Units.FetchAll();
|
||||
for (uint k = 0; k < units.length(); k++)
|
||||
{
|
||||
PhysicsBody@ body = units[k].GetPhysicsBody();
|
||||
if (body is null)
|
||||
{
|
||||
PrintError("PhysicsBody for unit " + units[k].GetDebugName() + "is null");
|
||||
continue;
|
||||
}
|
||||
body.SetActive(hasCollision);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
16
samples/Ballerina/hello-world-service.bal
Normal file
16
samples/Ballerina/hello-world-service.bal
Normal file
@@ -0,0 +1,16 @@
|
||||
import ballerina.lang.messages;
|
||||
import ballerina.net.http;
|
||||
import ballerina.doc;
|
||||
|
||||
@doc:Description {value:"By default Ballerina assumes that the service is to be exposed via HTTP/1.1 using the system default port and that all requests coming to the HTTP server will be delivered to this service."}
|
||||
service<http> helloWorld {
|
||||
@doc:Description {value:"All resources are invoked with an argument of type message, the built-in reference type representing a network invocation."}
|
||||
resource sayHello (message m) {
|
||||
// Creates an empty message.
|
||||
message response = {};
|
||||
// A util method that can be used to set string payload.
|
||||
messages:setStringPayload(response, "Hello, World!");
|
||||
// Reply keyword sends the response back to the client.
|
||||
reply response;
|
||||
}
|
||||
}
|
||||
6
samples/Ballerina/hello-world.bal
Normal file
6
samples/Ballerina/hello-world.bal
Normal file
@@ -0,0 +1,6 @@
|
||||
import ballerina.lang.system;
|
||||
|
||||
function main (string[] args) {
|
||||
system:println("Hello, World!");
|
||||
}
|
||||
|
||||
31
samples/Ballerina/json.bal
Normal file
31
samples/Ballerina/json.bal
Normal file
@@ -0,0 +1,31 @@
|
||||
import ballerina.lang.system;
|
||||
|
||||
function main (string[] args) {
|
||||
// JSON string value.
|
||||
json j1 = "Apple";
|
||||
system:println(j1);
|
||||
|
||||
// JSON number value.
|
||||
json j2 = 5.36;
|
||||
system:println(j2);
|
||||
|
||||
// JSON true value.
|
||||
json j3 = true;
|
||||
system:println(j3);
|
||||
|
||||
// JSON false value.
|
||||
json j4 = false;
|
||||
system:println(j4);
|
||||
|
||||
// JSON null value.
|
||||
json j5 = null;
|
||||
|
||||
//JSON Objects.
|
||||
json j6 = {name:"apple", color:"red", price:j2};
|
||||
system:println(j6);
|
||||
|
||||
//JSON Arrays. They are arrays of any JSON value.
|
||||
json j7 = [1, false, null, "foo",
|
||||
{first:"John", last:"Pala"}];
|
||||
system:println(j7);
|
||||
}
|
||||
28
samples/Ballerina/var.bal
Normal file
28
samples/Ballerina/var.bal
Normal file
@@ -0,0 +1,28 @@
|
||||
import ballerina.lang.system;
|
||||
|
||||
function divideBy10 (int d) (int, int) {
|
||||
return d / 10, d % 10;
|
||||
}
|
||||
|
||||
function main (string[] args) {
|
||||
//Here the variable type is inferred type from the initial value. This is same as "int k = 5";
|
||||
var k = 5;
|
||||
system:println(10 + k);
|
||||
|
||||
//Here the type of the 'strVar' is 'string'.
|
||||
var strVar = "Hello!";
|
||||
system:println(strVar);
|
||||
|
||||
//Multiple assignment with 'var' allows you to define the variable then and there.
|
||||
//Variable type is inferred from the right-hand side.
|
||||
var q, r = divideBy10(6);
|
||||
system:println("06/10: " + "quotient=" + q + " " +
|
||||
"remainder=" + r);
|
||||
|
||||
//To ignore a particular return value in a multiple assignment statement, use '_'.
|
||||
var q1, _ = divideBy10(57);
|
||||
system:println("57/10: " + "quotient=" + q1);
|
||||
|
||||
var _, r1 = divideBy10(9);
|
||||
system:println("09/10: " + "remainder=" + r1);
|
||||
}
|
||||
26
samples/Ballerina/xml.bal
Normal file
26
samples/Ballerina/xml.bal
Normal file
@@ -0,0 +1,26 @@
|
||||
import ballerina.lang.system;
|
||||
|
||||
function main (string[] args) {
|
||||
|
||||
// XML element. Can only have one root element.
|
||||
xml x1 = xml `<book>The Lost World</book>`;
|
||||
system:println(x1);
|
||||
|
||||
// XML text
|
||||
xml x2 = xml `Hello, world!`;
|
||||
system:println(x2);
|
||||
|
||||
// XML comment
|
||||
xml x3 = xml `<!--I am a comment-->`;
|
||||
system:println(x3);
|
||||
|
||||
// XML processing instruction
|
||||
xml x4 = xml `<?target data?>`;
|
||||
system:println(x4);
|
||||
|
||||
// Multiple XML items can be combined to form a sequence of XML. The resulting sequence is again an XML on its own.
|
||||
xml x5 = x1 + x2 + x3 + x4;
|
||||
system:println("\nResulting XML sequence:");
|
||||
system:println(x5);
|
||||
|
||||
}
|
||||
12
samples/DataWeave/customInterpolator.dwl
Normal file
12
samples/DataWeave/customInterpolator.dwl
Normal file
@@ -0,0 +1,12 @@
|
||||
fun SQL(literals, parts) = ''
|
||||
---
|
||||
[
|
||||
SQL `SELECT * FROM table WHERE id = $(1) AND name = $('a')`,
|
||||
SQL `$('p')`,
|
||||
SQL `$('a')$('b')`,
|
||||
SQL `$('a')---$('b')`,
|
||||
SQL `---$('a')---$('b')---`,
|
||||
SQL `$('p')bbb`,
|
||||
SQL `aaa$('p')`,
|
||||
SQL `aaa$('p')bbb`
|
||||
]
|
||||
9
samples/DataWeave/directives.dwl
Normal file
9
samples/DataWeave/directives.dwl
Normal file
@@ -0,0 +1,9 @@
|
||||
%dw 2.0
|
||||
var number = 1234
|
||||
fun foo(func,name="Mariano") = func(name)
|
||||
input payload application/test arg="value"
|
||||
output application/json
|
||||
---
|
||||
{
|
||||
foo: "bar"
|
||||
}
|
||||
27
samples/DataWeave/functions.dwl
Normal file
27
samples/DataWeave/functions.dwl
Normal file
@@ -0,0 +1,27 @@
|
||||
%dw 2.0
|
||||
var x=(param1, param2) -> { "$param1": param2 }
|
||||
var y=(param1, param2 = "c") -> { "$param1": param2 }
|
||||
var toUser = (user) -> { name: user.name, lastName: user.lastName }
|
||||
fun z(param1, param2) = { "$param1": param2 }
|
||||
var a = { name: "Mariano" , toUser: ((param1, param2) -> { "$param1": param2 }) }
|
||||
var applyFirst = (array, func) -> (func(array[0]) ++ array[1 to -1])
|
||||
|
||||
var nested = (array, func) -> (a) -> (b) -> (c) -> array map func(a ++ b ++ c)
|
||||
|
||||
|
||||
fun f2(a1, a2) = ""
|
||||
fun f3(a1:String, a2:Number):String = a1
|
||||
fun f4(a1:String, a2:(a:Number) -> Number):String = a1
|
||||
---
|
||||
result: {
|
||||
a: x("a", "b"),
|
||||
b: y("a"),
|
||||
c: y("a", "b"),
|
||||
users: { (in1 map ((user) -> { user: (toUser(user) ++ user) })) },
|
||||
d: z("a", "b"),
|
||||
e: a.toUser("name","Mariano"),
|
||||
f: a.toUser("name","Mariano").name,
|
||||
f: applyFirst("mariano", (s) -> upper(s) ),
|
||||
g: [] map (s) -> upper(s),
|
||||
h: 1 f2 2
|
||||
}
|
||||
36
samples/DataWeave/literals.dwl
Normal file
36
samples/DataWeave/literals.dwl
Normal file
@@ -0,0 +1,36 @@
|
||||
%dw 2.0
|
||||
---
|
||||
{
|
||||
"boolean":{
|
||||
"true" : true,
|
||||
"false": false
|
||||
},
|
||||
"Number": {
|
||||
"int": 123,
|
||||
"decimal": 123.23
|
||||
},
|
||||
"string": {
|
||||
"singleQuote" : 'A String',
|
||||
"doubleQuote" : "A String"
|
||||
},
|
||||
"regex": /foo/,
|
||||
"date": {
|
||||
a: |2003-10-01|,
|
||||
b: |2005-045|,
|
||||
c: |2003-W14-3|,
|
||||
d: |23:57:59|,
|
||||
e: |23:57:30.700|,
|
||||
f: |23:50:30Z|,
|
||||
g: |+13:00|,
|
||||
h: |Z|,
|
||||
i: |-02:00|,
|
||||
j: |2005-06-02T15:10:16|,
|
||||
k: |2005-06-02T15:10:16Z|,
|
||||
l: |2005-06-02T15:10:16+03:00|,
|
||||
m: |P12Y7M11D|,
|
||||
n: |P12Y5M|,
|
||||
o: |P45DT9H20M8S|,
|
||||
p: |PT9H20M8S|
|
||||
}
|
||||
}
|
||||
|
||||
33
samples/DataWeave/match.dwl
Normal file
33
samples/DataWeave/match.dwl
Normal file
@@ -0,0 +1,33 @@
|
||||
{
|
||||
// Regex Pattern Matching (Can be named or unnamed)
|
||||
a: in0.phones map $ match {
|
||||
case matches /\+(\d+)\s\((\d+)\)\s(\d+\-\d+)/ -> { country: $[0], area: $[1], number: $[2] }
|
||||
case matches /\((\d+)\)\s(\d+\-\d+)/ -> { area: $[1], number: $[2] }
|
||||
case phone matches /\((\d+)\)\s(\d+\-\d+)/ -> { area: phone[1], number: phone[2] }
|
||||
},
|
||||
// Type Pattern Matching (Can be named or unnamed)
|
||||
b: in0.object match {
|
||||
case is Object -> { object: $ }
|
||||
case is Number -> { number: $ }
|
||||
// This is how you name variables if needed
|
||||
case y is Boolean -> { boolean: y }
|
||||
},
|
||||
// Literal Pattern Matching (Can be named or unnamed)
|
||||
c: in0.value match {
|
||||
case "Emiliano" -> { string: $ }
|
||||
case 123 -> { number: $ }
|
||||
// This is how you name variables if needed
|
||||
case value: "Mariano" -> { name: value }
|
||||
},
|
||||
// Boolean Expression Pattern Matching (Always named)
|
||||
d: in0.value match {
|
||||
case x if x > 30 -> { biggerThan30: x }
|
||||
case x if x == 9 -> { nine: x }
|
||||
},
|
||||
// Default matches
|
||||
e: in0.value match {
|
||||
case "Emiliano" -> "string"
|
||||
case 3.14 -> number
|
||||
else -> "1234"
|
||||
}
|
||||
}
|
||||
2061
samples/Edje Data Collection/mild.edc
Normal file
2061
samples/Edje Data Collection/mild.edc
Normal file
File diff suppressed because it is too large
Load Diff
227
samples/INI/filenames/buildozer.spec
Normal file
227
samples/INI/filenames/buildozer.spec
Normal file
@@ -0,0 +1,227 @@
|
||||
[app]
|
||||
|
||||
# (str) Title of your application
|
||||
title = Kivy Kazam
|
||||
|
||||
# (str) Package name
|
||||
package.name = kivykazam
|
||||
|
||||
# (str) Package domain (needed for android/ios packaging)
|
||||
package.domain = org.test
|
||||
|
||||
# (str) Source code where the main.py live
|
||||
source.dir = .
|
||||
|
||||
# (list) Source files to include (let empty to include all the files)
|
||||
source.include_exts = py,png,jpg,kv,atlas
|
||||
|
||||
# (list) List of inclusions using pattern matching
|
||||
#source.include_patterns = assets/*,images/*.png
|
||||
|
||||
# (list) Source files to exclude (let empty to not exclude anything)
|
||||
#source.exclude_exts = spec
|
||||
|
||||
# (list) List of directory to exclude (let empty to not exclude anything)
|
||||
#source.exclude_dirs = tests, bin
|
||||
|
||||
# (list) List of exclusions using pattern matching
|
||||
#source.exclude_patterns = license,images/*/*.jpg
|
||||
|
||||
# (str) Application versioning (method 1)
|
||||
version = 0.1
|
||||
|
||||
# (str) Application versioning (method 2)
|
||||
# version.regex = __version__ = ['"](.*)['"]
|
||||
# version.filename = %(source.dir)s/main.py
|
||||
|
||||
# (list) Application requirements
|
||||
# comma seperated e.g. requirements = sqlite3,kivy
|
||||
requirements = kivy
|
||||
|
||||
# (str) Custom source folders for requirements
|
||||
# Sets custom source for any requirements with recipes
|
||||
# requirements.source.kivy = ../../kivy
|
||||
|
||||
# (list) Garden requirements
|
||||
#garden_requirements =
|
||||
|
||||
# (str) Presplash of the application
|
||||
#presplash.filename = %(source.dir)s/data/presplash.png
|
||||
|
||||
# (str) Icon of the application
|
||||
#icon.filename = %(source.dir)s/data/icon.png
|
||||
|
||||
# (str) Supported orientation (one of landscape, portrait or all)
|
||||
orientation = all
|
||||
|
||||
# (list) List of service to declare
|
||||
#services = NAME:ENTRYPOINT_TO_PY,NAME2:ENTRYPOINT2_TO_PY
|
||||
|
||||
#
|
||||
# OSX Specific
|
||||
#
|
||||
|
||||
#
|
||||
# author = © Copyright Info
|
||||
|
||||
#
|
||||
# Android specific
|
||||
#
|
||||
|
||||
# (bool) Indicate if the application should be fullscreen or not
|
||||
fullscreen = 1
|
||||
|
||||
# (list) Permissions
|
||||
#android.permissions = INTERNET
|
||||
|
||||
# (int) Android API to use
|
||||
#android.api = 19
|
||||
|
||||
# (int) Minimum API required
|
||||
android.minapi = 13
|
||||
|
||||
# (int) Android SDK version to use
|
||||
#android.sdk = 20
|
||||
|
||||
# (str) Android NDK version to use
|
||||
#android.ndk = 9c
|
||||
|
||||
# (bool) Use --private data storage (True) or --dir public storage (False)
|
||||
#android.private_storage = True
|
||||
|
||||
# (str) Android NDK directory (if empty, it will be automatically downloaded.)
|
||||
#android.ndk_path =
|
||||
|
||||
# (str) Android SDK directory (if empty, it will be automatically downloaded.)
|
||||
#android.sdk_path =
|
||||
|
||||
# (str) ANT directory (if empty, it will be automatically downloaded.)
|
||||
#android.ant_path =
|
||||
|
||||
# (str) python-for-android git clone directory (if empty, it will be automatically cloned from github)
|
||||
#android.p4a_dir =
|
||||
|
||||
# (list) python-for-android whitelist
|
||||
#android.p4a_whitelist =
|
||||
|
||||
# (bool) If True, then skip trying to update the Android sdk
|
||||
# This can be useful to avoid excess Internet downloads or save time
|
||||
# when an update is due and you just want to test/build your package
|
||||
# android.skip_update = False
|
||||
|
||||
# (str) Android entry point, default is ok for Kivy-based app
|
||||
#android.entrypoint = org.renpy.android.PythonActivity
|
||||
|
||||
# (list) List of Java .jar files to add to the libs so that pyjnius can access
|
||||
# their classes. Don't add jars that you do not need, since extra jars can slow
|
||||
# down the build process. Allows wildcards matching, for example:
|
||||
# OUYA-ODK/libs/*.jar
|
||||
#android.add_jars = foo.jar,bar.jar,path/to/more/*.jar
|
||||
|
||||
# (list) List of Java files to add to the android project (can be java or a
|
||||
# directory containing the files)
|
||||
#android.add_src =
|
||||
|
||||
# (str) python-for-android branch to use, if not master, useful to try
|
||||
# not yet merged features.
|
||||
#android.branch = master
|
||||
|
||||
# (str) OUYA Console category. Should be one of GAME or APP
|
||||
# If you leave this blank, OUYA support will not be enabled
|
||||
#android.ouya.category = GAME
|
||||
|
||||
# (str) Filename of OUYA Console icon. It must be a 732x412 png image.
|
||||
#android.ouya.icon.filename = %(source.dir)s/data/ouya_icon.png
|
||||
|
||||
# (str) XML file to include as an intent filters in <activity> tag
|
||||
#android.manifest.intent_filters =
|
||||
|
||||
# (list) Android additionnal libraries to copy into libs/armeabi
|
||||
#android.add_libs_armeabi = libs/android/*.so
|
||||
#android.add_libs_armeabi_v7a = libs/android-v7/*.so
|
||||
#android.add_libs_x86 = libs/android-x86/*.so
|
||||
#android.add_libs_mips = libs/android-mips/*.so
|
||||
|
||||
# (bool) Indicate whether the screen should stay on
|
||||
# Don't forget to add the WAKE_LOCK permission if you set this to True
|
||||
#android.wakelock = False
|
||||
|
||||
# (list) Android application meta-data to set (key=value format)
|
||||
#android.meta_data =
|
||||
|
||||
# (list) Android library project to add (will be added in the
|
||||
# project.properties automatically.)
|
||||
#android.library_references =
|
||||
|
||||
# (str) Android logcat filters to use
|
||||
#android.logcat_filters = *:S python:D
|
||||
|
||||
# (bool) Copy library instead of making a libpymodules.so
|
||||
#android.copy_libs = 1
|
||||
|
||||
#
|
||||
# iOS specific
|
||||
#
|
||||
|
||||
# (str) Path to a custom kivy-ios folder
|
||||
#ios.kivy_ios_dir = ../kivy-ios
|
||||
|
||||
# (str) Name of the certificate to use for signing the debug version
|
||||
# Get a list of available identities: buildozer ios list_identities
|
||||
#ios.codesign.debug = "iPhone Developer: <lastname> <firstname> (<hexstring>)"
|
||||
|
||||
# (str) Name of the certificate to use for signing the release version
|
||||
#ios.codesign.release = %(ios.codesign.debug)s
|
||||
|
||||
|
||||
[buildozer]
|
||||
|
||||
# (int) Log level (0 = error only, 1 = info, 2 = debug (with command output))
|
||||
log_level = 1
|
||||
|
||||
# (int) Display warning if buildozer is run as root (0 = False, 1 = True)
|
||||
warn_on_root = 1
|
||||
|
||||
# (str) Path to build artifact storage, absolute or relative to spec file
|
||||
# build_dir = ./.buildozer
|
||||
|
||||
# (str) Path to build output (i.e. .apk, .ipa) storage
|
||||
# bin_dir = ./bin
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# List as sections
|
||||
#
|
||||
# You can define all the "list" as [section:key].
|
||||
# Each line will be considered as a option to the list.
|
||||
# Let's take [app] / source.exclude_patterns.
|
||||
# Instead of doing:
|
||||
#
|
||||
#[app]
|
||||
#source.exclude_patterns = license,data/audio/*.wav,data/images/original/*
|
||||
#
|
||||
# This can be translated into:
|
||||
#
|
||||
#[app:source.exclude_patterns]
|
||||
#license
|
||||
#data/audio/*.wav
|
||||
#data/images/original/*
|
||||
#
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Profiles
|
||||
#
|
||||
# You can extend section / key with a profile
|
||||
# For example, you want to deploy a demo version of your application without
|
||||
# HD content. You could first change the title to add "(demo)" in the name
|
||||
# and extend the excluded directories to remove the HD content.
|
||||
#
|
||||
#[app@demo]
|
||||
#title = My Application (demo)
|
||||
#
|
||||
#[app:source.exclude_patterns@demo]
|
||||
#images/hd/*
|
||||
#
|
||||
# Then, invoke the command line with the "demo" profile:
|
||||
#
|
||||
#buildozer --profile demo android debug
|
||||
955
samples/JavaScript/constant_fold.mjs
Normal file
955
samples/JavaScript/constant_fold.mjs
Normal file
@@ -0,0 +1,955 @@
|
||||
// consumes <stdin> and performs constant folding
|
||||
// echo '"use strict";"_"[0],1+2;' | node constant_fold.js
|
||||
import _NodePath from '../NodePath';
|
||||
const {NodePath} = _NodePath;
|
||||
import _WalkCombinator from '../WalkCombinator';
|
||||
const {WalkCombinator} = _WalkCombinator;
|
||||
|
||||
const $CONSTEXPR = Symbol.for('$CONSTEXTR');
|
||||
const $CONSTVALUE = Symbol.for('$CONSTVALUE');
|
||||
const IS_EMPTY = path => {
|
||||
return (path.node.type === 'BlockStatement' && path.node.body.length === 0) ||
|
||||
path.node.type === 'EmptyStatement';
|
||||
};
|
||||
const IN_PRAGMA_POS = path => {
|
||||
if (path.parent && Array.isArray(path.parent.node)) {
|
||||
const siblings = path.parent.node;
|
||||
for (let i = 0; i < path.key; i++) {
|
||||
// preceded by non-pragma
|
||||
if (
|
||||
siblings[i].type !== 'ExpressionStatement' ||
|
||||
!IS_CONSTEXPR(siblings[i].expression) ||
|
||||
typeof CONSTVALUE(siblings[i].expression) !== 'string'
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
const IS_PRAGMA = path => {
|
||||
if (path.parent && Array.isArray(path.parent.node)) {
|
||||
const siblings = path.parent.node;
|
||||
for (let i = 0; i < path.key + 1; i++) {
|
||||
// preceded by non-pragma
|
||||
if (
|
||||
siblings[i].type !== 'ExpressionStatement' ||
|
||||
!IS_CONSTEXPR(siblings[i].expression) ||
|
||||
typeof CONSTVALUE(siblings[i].expression) !== 'string'
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
// worst case is the completion value
|
||||
const IS_NOT_COMPLETION = path => {
|
||||
while (true) {
|
||||
if (!path.parent) {
|
||||
return true;
|
||||
}
|
||||
if (
|
||||
Array.isArray(path.parent.node) &&
|
||||
path.key !== path.parent.node.length - 1
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
path = path.parent;
|
||||
while (Array.isArray(path.node)) {
|
||||
path = path.parent;
|
||||
}
|
||||
if (/Function/.test(path.node.type)) {
|
||||
return true;
|
||||
} else if (path.node.type === 'Program') {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
};
|
||||
const REMOVE_IF_EMPTY = path => {
|
||||
if (IS_EMPTY(path)) REMOVE(path);
|
||||
return null;
|
||||
};
|
||||
const REPLACE_IF_EMPTY = (path, folded) => {
|
||||
if (IS_EMPTY(path)) return REPLACE(path, folded);
|
||||
return path;
|
||||
};
|
||||
const REMOVE = path => {
|
||||
if (Array.isArray(path.parent.node)) {
|
||||
path.parent.node.splice(path.key, 1);
|
||||
} else {
|
||||
path.parent.node[path.key] = null;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
const REPLACE = (path, folded) => {
|
||||
const replacement = new NodePath(path.parent, folded, path.key);
|
||||
path.parent.node[path.key] = folded;
|
||||
return replacement;
|
||||
};
|
||||
// no mutation, this is an atomic value
|
||||
const NEG_ZERO = Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'UnaryExpression',
|
||||
operator: '-',
|
||||
argument: Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'Literal',
|
||||
value: 0,
|
||||
}),
|
||||
});
|
||||
const INFINITY = Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'BinaryExpression',
|
||||
operator: '/',
|
||||
left: Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'Literal',
|
||||
value: 1,
|
||||
}),
|
||||
right: Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'Literal',
|
||||
value: 0,
|
||||
}),
|
||||
});
|
||||
const NEG_INFINITY = Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'BinaryExpression',
|
||||
operator: '/',
|
||||
left: Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'Literal',
|
||||
value: 1,
|
||||
}),
|
||||
right: NEG_ZERO,
|
||||
});
|
||||
const EMPTY = Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'EmptyStatement',
|
||||
});
|
||||
const NULL = Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'Literal',
|
||||
value: null,
|
||||
});
|
||||
const NAN = Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'BinaryExpression',
|
||||
operator: '/',
|
||||
left: Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'Literal',
|
||||
value: 0,
|
||||
}),
|
||||
right: Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'Literal',
|
||||
value: 0,
|
||||
}),
|
||||
});
|
||||
const UNDEFINED = Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'UnaryExpression',
|
||||
operator: 'void',
|
||||
argument: Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'Literal',
|
||||
value: 0,
|
||||
}),
|
||||
});
|
||||
// ESTree doesn't like negative numeric literals
|
||||
// this also preserves -0
|
||||
const IS_UNARY_NEGATIVE = node => {
|
||||
if (
|
||||
node.type === 'UnaryExpression' &&
|
||||
node.operator === '-' &&
|
||||
typeof node.argument.value === 'number' &&
|
||||
node.argument.value === node.argument.value &&
|
||||
node.argument.type === 'Literal'
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
const IS_CONSTEXPR = node => {
|
||||
if (typeof node !== 'object' || node === null) {
|
||||
return false;
|
||||
}
|
||||
// DONT CALCULATE THINGS MULTIPLE TIMES!!@!@#
|
||||
if (node[$CONSTEXPR]) return true;
|
||||
if (node.type === 'ArrayExpression') {
|
||||
for (let i = 0; i < node.elements.length; i++) {
|
||||
const element = node.elements[i];
|
||||
// hole == null
|
||||
if (element !== null && !IS_CONSTEXPR(element)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
if (node.type === 'ObjectExpression') {
|
||||
for (let i = 0; i < node.properties.length; i++) {
|
||||
const element = node.properties[i];
|
||||
if (element.kind !== 'init') return false;
|
||||
if (element.method) return false;
|
||||
let key;
|
||||
if (element.computed) {
|
||||
// be sure {["y"]:1} works
|
||||
if (!IS_CONSTEXPR(element.key)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (!IS_CONSTEXPR(element.value)) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
if (node.type === 'Literal' || IS_UNDEFINED(node) || IS_NAN(node)) {
|
||||
return true;
|
||||
}
|
||||
if (IS_UNARY_NEGATIVE(node)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
const IS_NAN = node => {
|
||||
return node === NAN;
|
||||
};
|
||||
const IS_UNDEFINED = node => {
|
||||
return node === UNDEFINED;
|
||||
};
|
||||
const CONSTVALUE = node => {
|
||||
if (node[$CONSTVALUE]) {
|
||||
return node[$CONSTVALUE];
|
||||
}
|
||||
if (IS_UNDEFINED(node)) return void 0;
|
||||
if (IS_NAN(node)) return +'_';
|
||||
if (!IS_CONSTEXPR(node)) throw new Error('Not a CONSTEXPR');
|
||||
if (node.type === 'ArrayExpression') {
|
||||
let ret = [];
|
||||
ret.length = node.elements.length;
|
||||
for (let i = 0; i < node.elements.length; i++) {
|
||||
if (node.elements[i] !== null) {
|
||||
ret[i] = CONSTVALUE(node.elements[i]);
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
if (node.type === 'ObjectExpression') {
|
||||
let ret = Object.create(null);
|
||||
for (let i = 0; i < node.properties.length; i++) {
|
||||
const element = node.properties[i];
|
||||
let key;
|
||||
if (element.computed) {
|
||||
key = `${CONSTVALUE(element.key)}`;
|
||||
}
|
||||
else {
|
||||
key = element.key.name;
|
||||
}
|
||||
Object.defineProperty(ret, key, {
|
||||
// duplicate keys...
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: CONSTVALUE(element.value),
|
||||
enumerable: true
|
||||
});
|
||||
}
|
||||
Object.freeze(ret);
|
||||
return ret;
|
||||
}
|
||||
if (IS_UNARY_NEGATIVE(node)) {
|
||||
return -node.argument.value;
|
||||
}
|
||||
if (node.regex !== void 0) {
|
||||
return new RegExp(node.regex.pattern, node.regex.flags);
|
||||
}
|
||||
return node.value;
|
||||
};
|
||||
const CONSTEXPRS = new Map();
|
||||
CONSTEXPRS.set(void 0, UNDEFINED);
|
||||
CONSTEXPRS.set(+'_', NAN);
|
||||
CONSTEXPRS.set(null, NULL);
|
||||
const TO_CONSTEXPR = value => {
|
||||
if (value === -Infinity) {
|
||||
return NEG_INFINITY;
|
||||
}
|
||||
if (value === Infinity) {
|
||||
return INFINITY;
|
||||
}
|
||||
let is_neg_zero = 1 / value === -Infinity;
|
||||
if (is_neg_zero) return NEG_ZERO;
|
||||
if (CONSTEXPRS.has(value)) {
|
||||
return CONSTEXPRS.get(value);
|
||||
}
|
||||
if (typeof value === 'number') {
|
||||
if (value < 0) {
|
||||
const CONSTEXPR = Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
[$CONSTVALUE]: value,
|
||||
type: 'UnaryExpression',
|
||||
operator: '-',
|
||||
argument: Object.freeze({ type: 'Literal', value: -value }),
|
||||
});
|
||||
CONSTEXPRS.set(value, CONSTEXPR);
|
||||
return CONSTEXPR;
|
||||
}
|
||||
}
|
||||
if (
|
||||
value === null ||
|
||||
typeof value === 'number' ||
|
||||
typeof value === 'boolean' ||
|
||||
typeof value === 'string'
|
||||
) {
|
||||
const CONSTEXPR = Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
[$CONSTVALUE]: value,
|
||||
type: 'Literal',
|
||||
value,
|
||||
});
|
||||
CONSTEXPRS.set(value, CONSTEXPR);
|
||||
return CONSTEXPR;
|
||||
}
|
||||
// have to generate new one every time :-/
|
||||
if (Array.isArray(value)) {
|
||||
return Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'ArrayExpression',
|
||||
elements: Object.freeze(value.map(TO_CONSTEXPR)),
|
||||
});
|
||||
}
|
||||
if (typeof value === 'object' && Object.getPrototypeOf(value) === Object.getPrototypeOf({}) && [...Object.getOwnPropertySymbols(value)].length === 0) {
|
||||
return Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'ObjectExpression',
|
||||
properties: Object.freeze(
|
||||
[...Object.getOwnPropertyKeys(value)].map(key => {
|
||||
if (!('value' in Object.getOwnProperty(value, key))) {
|
||||
throw Error('Not a CONSTVALUE (found a setter or getter?)');
|
||||
}
|
||||
return {
|
||||
type: 'Property',
|
||||
kind: 'init',
|
||||
method: false,
|
||||
shorthand: false,
|
||||
computed: true,
|
||||
key: {
|
||||
type: 'Literal',
|
||||
value: key
|
||||
},
|
||||
value: TO_CONSTEXPR(value[key])
|
||||
}
|
||||
})),
|
||||
});
|
||||
}
|
||||
throw Error('Not a CONSTVALUE (did you pass a RegExp?)');
|
||||
};
|
||||
|
||||
// THIS DOES NOT HANDLE NODE SPECIFIC CASES LIKE IfStatement
|
||||
const FOLD_EMPTY = function*(path) {
|
||||
if (
|
||||
path &&
|
||||
path.node &&
|
||||
path.parent &&
|
||||
Array.isArray(path.parent.node) &&
|
||||
IS_EMPTY(path)
|
||||
) {
|
||||
REMOVE(path);
|
||||
return yield;
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
|
||||
// THIS DOES NOT HANDLE NODE SPECIFIC CASES LIKE IfStatement
|
||||
const FOLD_TEMPLATE = function*(path) {
|
||||
if (
|
||||
path &&
|
||||
path.node &&
|
||||
path.type === 'TemplateLiteral'
|
||||
) {
|
||||
let updated = false;
|
||||
for (let i = 0; i < path.node.exressions.length; i++) {
|
||||
if (IS_CONSTEXPR(path.node.expressions[i])) {
|
||||
//let
|
||||
}
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
const FOLD_EXPR_STMT = function*(path) {
|
||||
// TODO: enforce completion value checking
|
||||
if (path && path.node && path.node.type === 'ExpressionStatement') {
|
||||
// merge all the adjacent expression statements into sequences
|
||||
if (Array.isArray(path.parent.node)) {
|
||||
// could have nodes after it
|
||||
const siblings = path.parent.node;
|
||||
if (!IS_PRAGMA(path)) {
|
||||
if (path.key < siblings.length - 1) {
|
||||
const mergeable = [path.node];
|
||||
for (let needle = path.key + 1; needle < siblings.length; needle++) {
|
||||
if (siblings[needle].type !== 'ExpressionStatement') {
|
||||
break;
|
||||
}
|
||||
mergeable.push(siblings[needle]);
|
||||
}
|
||||
if (mergeable.length > 1) {
|
||||
siblings.splice(path.key, mergeable.length, {
|
||||
type: 'ExpressionStatement',
|
||||
expression: {
|
||||
type: 'SequenceExpression',
|
||||
expressions: mergeable.reduce(
|
||||
(acc, es) => {
|
||||
if (es.expression.type == 'SequenceExpression') {
|
||||
return [...acc, ...es.expression.expressions];
|
||||
} else {
|
||||
return [...acc, es.expression];
|
||||
}
|
||||
},
|
||||
[]
|
||||
),
|
||||
},
|
||||
});
|
||||
return path;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (IS_NOT_COMPLETION(path) && IS_CONSTEXPR(path.node.expression)) {
|
||||
return REPLACE(path, EMPTY);
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
const FOLD_WHILE = function*(path) {
|
||||
if (path && path.node) {
|
||||
if (path.node.type === 'DoWhileStatement') {
|
||||
console.error('FOLD_DOWHILE');
|
||||
REPLACE_IF_EMPTY(path.get(['body']), EMPTY);
|
||||
}
|
||||
if (path.node.type === 'WhileStatement') {
|
||||
console.error('FOLD_WHILE');
|
||||
let { test, consequent, alternate } = path.node;
|
||||
if (IS_CONSTEXPR(test)) {
|
||||
test = CONSTVALUE(test);
|
||||
if (!test) {
|
||||
return REPLACE(path, EMPTY);
|
||||
}
|
||||
}
|
||||
REPLACE_IF_EMPTY(path.get(['body']), EMPTY);
|
||||
}
|
||||
if (path.node.type === 'ForStatement') {
|
||||
console.error('FOLD_FOR');
|
||||
REPLACE_IF_EMPTY(path.get(['body']), EMPTY);
|
||||
let { init, test, update } = path.node;
|
||||
let updated = false;
|
||||
if (init && IS_CONSTEXPR(init)) {
|
||||
updated = true;
|
||||
REPLACE(path.get(['init']), null);
|
||||
}
|
||||
if (test && IS_CONSTEXPR(test)) {
|
||||
let current = CONSTVALUE(test);
|
||||
let coerced = Boolean(current);
|
||||
// remove the test if it is always true
|
||||
if (coerced === true) {
|
||||
updated = true;
|
||||
REPLACE(path.get(['test']), null);
|
||||
} else if (coerced !== current) {
|
||||
updated = true;
|
||||
REPLACE(path.get(['test']), TO_CONSTEXPR(coerced));
|
||||
}
|
||||
}
|
||||
if (update && IS_CONSTEXPR(update)) {
|
||||
updated = true;
|
||||
REPLACE(path.get(['update']), null);
|
||||
}
|
||||
if (updated) {
|
||||
return path;
|
||||
}
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
const FOLD_IF = function*(path) {
|
||||
if (path && path.node && path.node.type === 'IfStatement') {
|
||||
let { test, consequent, alternate } = path.node;
|
||||
const is_not_completion = IS_NOT_COMPLETION(path);
|
||||
if (is_not_completion && !alternate) {
|
||||
if (IS_EMPTY(path.get(['consequent']))) {
|
||||
console.error('FOLD_IF_EMPTY_CONSEQUENT');
|
||||
REPLACE(path, {
|
||||
type: 'ExpressionStatement',
|
||||
expression: test,
|
||||
});
|
||||
return path.parent;
|
||||
}
|
||||
}
|
||||
if (alternate) {
|
||||
if (alternate.type === consequent.type) {
|
||||
if (consequent.type === 'ExpressionStatement') {
|
||||
console.error('FOLD_IF_BOTH_EXPRSTMT');
|
||||
REPLACE(path, {
|
||||
type: 'ExpressionStatement', expression:
|
||||
{
|
||||
type: 'ConditionalExpression',
|
||||
test: test,
|
||||
consequent: consequent.expression,
|
||||
alternate: alternate.expression,
|
||||
}});
|
||||
return path.parent;
|
||||
}
|
||||
else if (consequent.type === 'ReturnStatement' ||
|
||||
consequent.type === 'ThrowStatement') {
|
||||
console.error('FOLD_IF_BOTH_COMPLETIONS');
|
||||
REPLACE(path, {
|
||||
type: 'ExpressionStatement', expression:{
|
||||
type: consequent.type,
|
||||
argument: {
|
||||
type: 'ConditionalExpression',
|
||||
test: test,
|
||||
consequent: consequent.argument,
|
||||
alternate: alternate.argument,
|
||||
}}
|
||||
});
|
||||
return path.parent;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (is_not_completion && consequent.type === 'ExpressionStatement') {
|
||||
console.error('FOLD_IF_NON_COMPLETION_TO_&&');
|
||||
REPLACE(path, {
|
||||
type: 'ExpressionStatement',
|
||||
expression: {
|
||||
type: 'BinaryExpression',
|
||||
operator: '&&',
|
||||
left: test,
|
||||
right: consequent.expression,
|
||||
}
|
||||
});
|
||||
return path.parent;
|
||||
}
|
||||
if (IS_CONSTEXPR(test)) {
|
||||
test = CONSTVALUE(test);
|
||||
if (test) {
|
||||
return REPLACE(path, consequent);
|
||||
}
|
||||
if (alternate) {
|
||||
return REPLACE(path, alternate);
|
||||
}
|
||||
return REPLACE(path, EMPTY);
|
||||
}
|
||||
consequent = path.get(['consequent']);
|
||||
let updated;
|
||||
if (consequent.node !== EMPTY) {
|
||||
REPLACE_IF_EMPTY(consequent, EMPTY);
|
||||
if (consequent.parent.node[consequent.key] === EMPTY) {
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
if (alternate) {
|
||||
alternate = path.get(['alternate']);
|
||||
REMOVE_IF_EMPTY(alternate);
|
||||
if (path.node.alternate === null) {
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
if (updated) {
|
||||
return path;
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
const FOLD_SEQUENCE = function*(path) {
|
||||
if (path && path.node && path.node.type === 'SequenceExpression') {
|
||||
console.error('FOLD_SEQUENCE');
|
||||
// never delete the last value
|
||||
for (let i = 0; i < path.node.expressions.length - 1; i++) {
|
||||
if (IS_CONSTEXPR(path.node.expressions[i])) {
|
||||
path.node.expressions.splice(i, 1);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
if (path.node.expressions.length === 1) {
|
||||
return REPLACE(path, path.node.expressions[0]);
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
const FOLD_LOGICAL = function*(path) {
|
||||
if (path && path.node && path.node.type === 'LogicalExpression') {
|
||||
console.error('FOLD_LOGICAL');
|
||||
let { left, right, operator } = path.node;
|
||||
if (IS_CONSTEXPR(left)) {
|
||||
left = CONSTVALUE(left);
|
||||
if (operator === '||') {
|
||||
if (left) {
|
||||
return REPLACE(path, TO_CONSTEXPR(left));
|
||||
}
|
||||
return REPLACE(path, right);
|
||||
} else if (operator === '&&') {
|
||||
if (!left) {
|
||||
return REPLACE(path, TO_CONSTEXPR(left));
|
||||
}
|
||||
return REPLACE(path, right);
|
||||
}
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
const FOLD_SWITCH = function*(path) {
|
||||
if (path && path.node && path.node.type === 'SwitchStatement') {
|
||||
let { discriminant, cases } = path.node;
|
||||
// if there are no cases, just become an expression
|
||||
if (cases.length === 0 && IS_NOT_COMPLETION(path)) {
|
||||
return REPLACE(path, {
|
||||
type: 'ExpressionStatement',
|
||||
expression: discriminant
|
||||
});
|
||||
}
|
||||
// if the discriminant is static
|
||||
// remove any preceding non-matching static cases
|
||||
// fold any trailing cases into the matching case
|
||||
if (cases.length > 1 && IS_CONSTEXPR(discriminant)) {
|
||||
const discriminant_value = CONSTVALUE(discriminant);
|
||||
for (var i = 0; i < cases.length; i++) {
|
||||
const test = cases[i].test;
|
||||
if (IS_CONSTEXPR(test)) {
|
||||
let test_value = CONSTVALUE(test);
|
||||
if (discriminant_value === test_value) {
|
||||
let new_consequent = cases[i].consequent;
|
||||
if (i < cases.length - 1) {
|
||||
for (let fallthrough of cases.slice(i+1)) {
|
||||
new_consequent.push(...fallthrough.consequent);
|
||||
}
|
||||
}
|
||||
cases[i].consequent = new_consequent;
|
||||
REPLACE(path.get(['cases']), [cases[i]]);
|
||||
return path;
|
||||
}
|
||||
}
|
||||
else {
|
||||
// we had a dynamic case need to bail
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
const FOLD_UNREACHABLE = function*(path) {
|
||||
if (path && path.node && path.parent && Array.isArray(path.parent.node)) {
|
||||
if (path.node.type === 'ReturnStatement' ||
|
||||
path.node.type === 'ContinueStatement' ||
|
||||
path.node.type === 'BreakStatement' ||
|
||||
path.node.type === 'ThrowStatement') {
|
||||
const next_key = path.key + 1;
|
||||
path.parent.node.splice(next_key, path.parent.node.length - next_key);
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
}
|
||||
const FOLD_CONDITIONAL = function*(path) {
|
||||
if (path && path.node && path.node.type === 'ConditionalExpression') {
|
||||
console.error('FOLD_CONDITIONAL');
|
||||
let { test, consequent, alternate } = path.node;
|
||||
if (IS_CONSTEXPR(test)) {
|
||||
test = CONSTVALUE(test);
|
||||
if (test) {
|
||||
return REPLACE(path, consequent);
|
||||
}
|
||||
return REPLACE(path, alternate);
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
const FOLD_BINARY = function*(path) {
|
||||
if (
|
||||
path &&
|
||||
path.node &&
|
||||
path.node.type === 'BinaryExpression' &&
|
||||
!IS_NAN(path.node)
|
||||
) {
|
||||
console.error('FOLD_BINARY');
|
||||
let { left, right, operator } = path.node;
|
||||
if (operator === '==' || operator === '!=') {
|
||||
let updated = false;
|
||||
if (IS_UNDEFINED(left)) {
|
||||
updated = true;
|
||||
REPLACE(path.get(['left']), NULL);
|
||||
}
|
||||
if (IS_UNDEFINED(right)) {
|
||||
updated = true;
|
||||
REPLACE(path.get(['right']), NULL);
|
||||
}
|
||||
if (updated) {
|
||||
return path;
|
||||
}
|
||||
}
|
||||
if (path.node !== INFINITY && path.node !== NEG_INFINITY && IS_CONSTEXPR(left) && IS_CONSTEXPR(right)) {
|
||||
left = CONSTVALUE(left);
|
||||
right = CONSTVALUE(right);
|
||||
let value;
|
||||
if ((!left || typeof left !== 'object') && (!right || typeof right !== 'object')) {
|
||||
if (operator === '+') {
|
||||
value = left + right;
|
||||
} else if (operator === '-') {
|
||||
value = left - right;
|
||||
} else if (operator === '*') {
|
||||
value = left * right;
|
||||
} else if (operator === '/') {
|
||||
value = left / right;
|
||||
} else if (operator === '%') {
|
||||
value = left % right;
|
||||
} else if (operator === '==') {
|
||||
value = left == right;
|
||||
} else if (operator === '!=') {
|
||||
value = left != right;
|
||||
} else if (operator === '===') {
|
||||
value = left === right;
|
||||
} else if (operator === '!==') {
|
||||
value = left !== right;
|
||||
} else if (operator === '<') {
|
||||
value = left < right;
|
||||
} else if (operator === '<=') {
|
||||
value = left <= right;
|
||||
} else if (operator === '>') {
|
||||
value = left > right;
|
||||
} else if (operator === '>=') {
|
||||
value = left >= right;
|
||||
} else if (operator === '<<') {
|
||||
value = left << right;
|
||||
} else if (operator === '>>') {
|
||||
value = left >> right;
|
||||
} else if (operator === '>>>') {
|
||||
value = left >>> right;
|
||||
} else if (operator === '|') {
|
||||
value = left | right;
|
||||
} else if (operator === '&') {
|
||||
value = left & right;
|
||||
} else if (operator === '^') {
|
||||
value = left ^ right;
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (operator === '==') value = false;
|
||||
if (operator === '===') value = false;
|
||||
if (operator === '!=') value = true;
|
||||
if (operator === '!==') value = true;
|
||||
if (operator === 'in' && typeof right === 'object' && right) {
|
||||
value = Boolean(Object.getOwnPropertyDescriptor(right, left));
|
||||
}
|
||||
}
|
||||
if (value !== void 0) {
|
||||
if (typeof value === 'string' || typeof value === 'boolean' || value === null) {
|
||||
return REPLACE(path, TO_CONSTEXPR(value));
|
||||
}
|
||||
if (typeof value === 'number') {
|
||||
return REPLACE(path, TO_CONSTEXPR(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
const FOLD_UNARY = function*(path) {
|
||||
if (path && path.node && path.node.type === 'UnaryExpression') {
|
||||
console.error('FOLD_UNARY');
|
||||
if (IS_CONSTEXPR(path.node)) {
|
||||
return yield path;
|
||||
}
|
||||
let { argument, operator } = path.node;
|
||||
if (IS_CONSTEXPR(argument)) {
|
||||
if (operator === 'void') {
|
||||
return REPLACE(path, UNDEFINED);
|
||||
}
|
||||
let value = CONSTVALUE(argument);
|
||||
if (operator === '-') {
|
||||
value = -value;
|
||||
} else if (operator === '+') {
|
||||
value = +value;
|
||||
} else if (operator === '~') {
|
||||
value = ~value;
|
||||
} else if (operator === '!') {
|
||||
value = !value;
|
||||
} else if (operator === 'typeof') {
|
||||
value = typeof value;
|
||||
} else if (operator === 'delete') {
|
||||
value = true;
|
||||
}
|
||||
return REPLACE(path, TO_CONSTEXPR(value));
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
const FOLD_EVAL = function*(path) {
|
||||
if (path && path.node && path.node.type === 'CallExpression' &&
|
||||
path.node.callee.type === 'Identifier' && path.node.callee.name === 'eval') {
|
||||
console.error('FOLD_EVAL');
|
||||
if (path.node.arguments.length === 1 && path.node.arguments[0].type === 'Literal') {
|
||||
let result = esprima.parse(`${
|
||||
CONSTVALUE(path.node.arguments[0])
|
||||
}`);
|
||||
if (result.body.length === 1 && result.body[0].type === 'ExpressionStatement') {
|
||||
return REPLACE(path, result.body[0].expression);
|
||||
}
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
}
|
||||
const FOLD_MEMBER = function*(path) {
|
||||
if (path && path.node && path.node.type === 'MemberExpression') {
|
||||
console.error('FOLD_MEMBER');
|
||||
if (path.node.computed && path.node.property.type === 'Literal') {
|
||||
const current = `${CONSTVALUE(path.node.property)}`;
|
||||
if (typeof current === 'string' && /^[$_a-z][$_a-z\d]*$/i.test(current)) {
|
||||
path.node.computed = false;
|
||||
path.node.property = {
|
||||
type: 'Identifier',
|
||||
name: current,
|
||||
};
|
||||
return path;
|
||||
}
|
||||
}
|
||||
if (IS_CONSTEXPR(path.node.object)) {
|
||||
const value = CONSTVALUE(path.node.object);
|
||||
if (typeof value === 'string' || Array.isArray(value) || (value && typeof value === 'object')) {
|
||||
let key;
|
||||
if (IS_CONSTEXPR(path.node.property)) {
|
||||
key = `${CONSTVALUE(path.node.property)}`;
|
||||
}
|
||||
else if (!path.node.computed) {
|
||||
key = path.node.property.name;
|
||||
}
|
||||
if (key !== void 0) {
|
||||
const desc = Object.getOwnPropertyDescriptor(value, key);
|
||||
if (desc) {
|
||||
const folded = value[key];
|
||||
console.error('FOLDING', JSON.stringify(folded));
|
||||
if (IN_PRAGMA_POS(path) && typeof folded === 'string') {
|
||||
if (value.length > 1) {
|
||||
REPLACE(
|
||||
path.get(['object']),
|
||||
TO_CONSTEXPR(value.slice(key, key + 1))
|
||||
);
|
||||
REPLACE(path.get(['property']), TO_CONSTEXPR(0));
|
||||
return path;
|
||||
}
|
||||
} else {
|
||||
return REPLACE(path, TO_CONSTEXPR(value[key]));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
|
||||
const $MIN = Symbol();
|
||||
const MIN_TRUE = Object.freeze({
|
||||
[$MIN]: true,
|
||||
type: 'UnaryExpression',
|
||||
operator: '!',
|
||||
argument: Object.freeze({
|
||||
[$MIN]: true,
|
||||
type: 'Literal',
|
||||
value: 0
|
||||
})
|
||||
});
|
||||
const MIN_FALSE = Object.freeze({
|
||||
[$MIN]: true,
|
||||
type: 'UnaryExpression',
|
||||
operator: '!',
|
||||
argument: Object.freeze({
|
||||
[$MIN]: true,
|
||||
type: 'Literal',
|
||||
value: 1
|
||||
})
|
||||
});
|
||||
const MIN_REPLACEMENTS = new Map;
|
||||
MIN_REPLACEMENTS.set(true, MIN_TRUE);
|
||||
MIN_REPLACEMENTS.set(false, MIN_FALSE);
|
||||
const MIN_VALUES = function*(path) {
|
||||
if (path && path.node && !path.node[$MIN] && IS_CONSTEXPR(path.node)) {
|
||||
let value = CONSTVALUE(path.node);
|
||||
if (MIN_REPLACEMENTS.has(value)) {
|
||||
console.error('MIN_VALUE', value)
|
||||
return REPLACE(path, MIN_REPLACEMENTS.get(value));
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
}
|
||||
|
||||
import esprima from 'esprima';
|
||||
import util from 'util';
|
||||
import escodegen from 'escodegen';
|
||||
const optimize = (src) => {
|
||||
const ROOT = new NodePath(
|
||||
null,
|
||||
esprima.parse(
|
||||
src,
|
||||
{
|
||||
// loc: true,
|
||||
// source: '<stdin>',
|
||||
}
|
||||
),
|
||||
null
|
||||
);
|
||||
// all of these are things that could affect completion value positions
|
||||
const walk_expressions = WalkCombinator.pipe(
|
||||
...[
|
||||
WalkCombinator.DEPTH_FIRST,
|
||||
{
|
||||
// We never work on Arrays
|
||||
*inputs(path) {
|
||||
if (Array.isArray(path)) return;
|
||||
return yield path;
|
||||
},
|
||||
},
|
||||
{ inputs: FOLD_UNREACHABLE },
|
||||
{ inputs: FOLD_IF },
|
||||
{ inputs: FOLD_SWITCH },
|
||||
{ inputs: FOLD_EXPR_STMT },
|
||||
{ inputs: FOLD_CONDITIONAL },
|
||||
{ inputs: FOLD_LOGICAL },
|
||||
{ inputs: FOLD_BINARY },
|
||||
{ inputs: FOLD_UNARY },
|
||||
{ inputs: FOLD_SEQUENCE },
|
||||
{ inputs: FOLD_MEMBER },
|
||||
{ inputs: FOLD_EMPTY },
|
||||
{ inputs: FOLD_WHILE },
|
||||
{ inputs: FOLD_EVAL },
|
||||
]
|
||||
).walk(ROOT);
|
||||
for (const _ of walk_expressions) {
|
||||
}
|
||||
const minify = WalkCombinator.pipe(
|
||||
...[
|
||||
WalkCombinator.DEPTH_FIRST,
|
||||
{
|
||||
// We never work on Arrays
|
||||
*inputs(path) {
|
||||
if (Array.isArray(path)) return;
|
||||
return yield path;
|
||||
},
|
||||
},
|
||||
{ inputs: MIN_VALUES },
|
||||
]
|
||||
).walk(ROOT);
|
||||
for (const _ of minify) {
|
||||
}
|
||||
return ROOT;
|
||||
}
|
||||
import mississippi from 'mississippi';
|
||||
process.stdin.pipe(
|
||||
mississippi.concat(buff => {
|
||||
const ROOT = optimize(`${buff}`)
|
||||
console.error(
|
||||
'%s',
|
||||
util.inspect(ROOT.node, {
|
||||
depth: null,
|
||||
colors: true,
|
||||
})
|
||||
);
|
||||
const out = escodegen.generate(ROOT.node);
|
||||
console.log(out);
|
||||
})
|
||||
);
|
||||
6
samples/JavaScript/entry.mjs
Normal file
6
samples/JavaScript/entry.mjs
Normal file
@@ -0,0 +1,6 @@
|
||||
import bar from './module.mjs';
|
||||
function foo() {
|
||||
return "I am foo";
|
||||
}
|
||||
export {foo};
|
||||
console.log(bar);
|
||||
5
samples/JavaScript/module.mjs
Normal file
5
samples/JavaScript/module.mjs
Normal file
@@ -0,0 +1,5 @@
|
||||
import {foo} from './entry.mjs';
|
||||
console.log(foo());
|
||||
|
||||
const bar = "I am bar.";
|
||||
export {bar as default};
|
||||
106
samples/Nearley/nearley-language-bootstrapped.ne
Normal file
106
samples/Nearley/nearley-language-bootstrapped.ne
Normal file
@@ -0,0 +1,106 @@
|
||||
# nearley grammar
|
||||
@builtin "string.ne"
|
||||
|
||||
@{%
|
||||
|
||||
function insensitive(sl) {
|
||||
var s = sl.literal;
|
||||
result = [];
|
||||
for (var i=0; i<s.length; i++) {
|
||||
var c = s.charAt(i);
|
||||
if (c.toUpperCase() !== c || c.toLowerCase() !== c) {
|
||||
result.push(new RegExp("[" + c.toLowerCase() + c.toUpperCase() + "]"));
|
||||
} else {
|
||||
result.push({literal: c});
|
||||
}
|
||||
}
|
||||
return {subexpression: [{tokens: result, postprocess: function(d) {return d.join(""); }}]};
|
||||
}
|
||||
|
||||
%}
|
||||
|
||||
final -> whit? prog whit? {% function(d) { return d[1]; } %}
|
||||
|
||||
prog -> prod {% function(d) { return [d[0]]; } %}
|
||||
| prod whit prog {% function(d) { return [d[0]].concat(d[2]); } %}
|
||||
|
||||
prod -> word whit? ("-"|"="):+ ">" whit? expression+ {% function(d) { return {name: d[0], rules: d[5]}; } %}
|
||||
| word "[" wordlist "]" whit? ("-"|"="):+ ">" whit? expression+ {% function(d) {return {macro: d[0], args: d[2], exprs: d[8]}} %}
|
||||
| "@" whit? js {% function(d) { return {body: d[2]}; } %}
|
||||
| "@" word whit word {% function(d) { return {config: d[1], value: d[3]}; } %}
|
||||
| "@include" whit? string {% function(d) {return {include: d[2].literal, builtin: false}} %}
|
||||
| "@builtin" whit? string {% function(d) {return {include: d[2].literal, builtin: true }} %}
|
||||
|
||||
expression+ -> completeexpression
|
||||
| expression+ whit? "|" whit? completeexpression {% function(d) { return d[0].concat([d[4]]); } %}
|
||||
|
||||
expressionlist -> completeexpression
|
||||
| expressionlist whit? "," whit? completeexpression {% function(d) { return d[0].concat([d[4]]); } %}
|
||||
|
||||
wordlist -> word
|
||||
| wordlist whit? "," whit? word {% function(d) { return d[0].concat([d[4]]); } %}
|
||||
|
||||
completeexpression -> expr {% function(d) { return {tokens: d[0]}; } %}
|
||||
| expr whit? js {% function(d) { return {tokens: d[0], postprocess: d[2]}; } %}
|
||||
|
||||
expr_member ->
|
||||
word {% id %}
|
||||
| "$" word {% function(d) {return {mixin: d[1]}} %}
|
||||
| word "[" expressionlist "]" {% function(d) {return {macrocall: d[0], args: d[2]}} %}
|
||||
| string "i":? {% function(d) { if (d[1]) {return insensitive(d[0]); } else {return d[0]; } } %}
|
||||
| "%" word {% function(d) {return {token: d[1]}} %}
|
||||
| charclass {% id %}
|
||||
| "(" whit? expression+ whit? ")" {% function(d) {return {'subexpression': d[2]} ;} %}
|
||||
| expr_member whit? ebnf_modifier {% function(d) {return {'ebnf': d[0], 'modifier': d[2]}; } %}
|
||||
|
||||
ebnf_modifier -> ":+" {% id %} | ":*" {% id %} | ":?" {% id %}
|
||||
|
||||
expr -> expr_member
|
||||
| expr whit expr_member {% function(d){ return d[0].concat([d[2]]); } %}
|
||||
|
||||
word -> [\w\?\+] {% function(d){ return d[0]; } %}
|
||||
| word [\w\?\+] {% function(d){ return d[0]+d[1]; } %}
|
||||
|
||||
string -> dqstring {% function(d) {return { literal: d[0] }; } %}
|
||||
#string -> "\"" charset "\"" {% function(d) { return { literal: d[1].join("") }; } %}
|
||||
#
|
||||
#charset -> null
|
||||
# | charset char {% function(d) { return d[0].concat([d[1]]); } %}
|
||||
#
|
||||
#char -> [^\\"] {% function(d) { return d[0]; } %}
|
||||
# | "\\" . {% function(d) { return JSON.parse("\""+"\\"+d[1]+"\""); } %}
|
||||
|
||||
charclass -> "." {% function(d) { return new RegExp("."); } %}
|
||||
| "[" charclassmembers "]" {% function(d) { return new RegExp("[" + d[1].join('') + "]"); } %}
|
||||
|
||||
charclassmembers -> null
|
||||
| charclassmembers charclassmember {% function(d) { return d[0].concat([d[1]]); } %}
|
||||
|
||||
charclassmember -> [^\\\]] {% function(d) { return d[0]; } %}
|
||||
| "\\" . {% function(d) { return d[0] + d[1]; } %}
|
||||
|
||||
js -> "{" "%" jscode "%" "}" {% function(d) { return d[2]; } %}
|
||||
|
||||
jscode -> null {% function() {return "";} %}
|
||||
| jscode [^%] {% function(d) {return d[0] + d[1];} %}
|
||||
| jscode "%" [^}] {% function(d) {return d[0] + d[1] + d[2]; } %}
|
||||
|
||||
# Whitespace with a comment
|
||||
whit -> whitraw
|
||||
| whitraw? comment whit?
|
||||
|
||||
# Optional whitespace with a comment
|
||||
whit? -> null
|
||||
| whit
|
||||
|
||||
# Literally a string of whitespace
|
||||
whitraw -> [\s]
|
||||
| whitraw [\s]
|
||||
|
||||
# A string of whitespace OR the empty string
|
||||
whitraw? -> null
|
||||
| whitraw
|
||||
|
||||
comment -> "#" commentchars "\n"
|
||||
commentchars -> null
|
||||
| commentchars [^\n]
|
||||
230
samples/OpenEdge ABL/test-win.w
Normal file
230
samples/OpenEdge ABL/test-win.w
Normal file
@@ -0,0 +1,230 @@
|
||||
&ANALYZE-SUSPEND _VERSION-NUMBER AB_v10r12 GUI
|
||||
&ANALYZE-RESUME
|
||||
&Scoped-define WINDOW-NAME C-Win
|
||||
&ANALYZE-SUSPEND _UIB-CODE-BLOCK _CUSTOM _DEFINITIONS C-Win
|
||||
/*------------------------------------------------------------------------
|
||||
|
||||
File:
|
||||
|
||||
Description:
|
||||
|
||||
Input Parameters:
|
||||
<none>
|
||||
|
||||
Output Parameters:
|
||||
<none>
|
||||
|
||||
Author:
|
||||
|
||||
Created:
|
||||
|
||||
------------------------------------------------------------------------*/
|
||||
/* This .W file was created with the Progress AppBuilder. */
|
||||
/*----------------------------------------------------------------------*/
|
||||
|
||||
/* Create an unnamed pool to store all the widgets created
|
||||
by this procedure. This is a good default which assures
|
||||
that this procedure's triggers and internal procedures
|
||||
will execute in this procedure's storage, and that proper
|
||||
cleanup will occur on deletion of the procedure. */
|
||||
|
||||
CREATE WIDGET-POOL.
|
||||
|
||||
/* *************************** Definitions ************************** */
|
||||
|
||||
/* Parameters Definitions --- */
|
||||
|
||||
/* Local Variable Definitions --- */
|
||||
|
||||
/* _UIB-CODE-BLOCK-END */
|
||||
&ANALYZE-RESUME
|
||||
|
||||
|
||||
&ANALYZE-SUSPEND _UIB-PREPROCESSOR-BLOCK
|
||||
|
||||
/* ******************** Preprocessor Definitions ******************** */
|
||||
|
||||
&Scoped-define PROCEDURE-TYPE Window
|
||||
&Scoped-define DB-AWARE no
|
||||
|
||||
/* Name of designated FRAME-NAME and/or first browse and/or first query */
|
||||
&Scoped-define FRAME-NAME DEFAULT-FRAME
|
||||
|
||||
/* Custom List Definitions */
|
||||
/* List-1,List-2,List-3,List-4,List-5,List-6 */
|
||||
|
||||
/* _UIB-PREPROCESSOR-BLOCK-END */
|
||||
&ANALYZE-RESUME
|
||||
|
||||
|
||||
|
||||
/* *********************** Control Definitions ********************** */
|
||||
|
||||
/* Define the widget handle for the window */
|
||||
DEFINE VAR C-Win AS WIDGET-HANDLE NO-UNDO.
|
||||
|
||||
/* ************************ Frame Definitions *********************** */
|
||||
|
||||
DEFINE FRAME DEFAULT-FRAME
|
||||
WITH 1 DOWN NO-BOX KEEP-TAB-ORDER OVERLAY
|
||||
SIDE-LABELS NO-UNDERLINE THREE-D
|
||||
AT COL 1 ROW 1
|
||||
SIZE 80 BY 16 WIDGET-ID 100.
|
||||
|
||||
|
||||
/* *********************** Procedure Settings ************************ */
|
||||
|
||||
&ANALYZE-SUSPEND _PROCEDURE-SETTINGS
|
||||
/* Settings for THIS-PROCEDURE
|
||||
Type: Window
|
||||
Allow: Basic,Browse,DB-Fields,Window,Query
|
||||
Other Settings: COMPILE
|
||||
*/
|
||||
&ANALYZE-RESUME _END-PROCEDURE-SETTINGS
|
||||
|
||||
/* ************************* Create Window ************************** */
|
||||
|
||||
&ANALYZE-SUSPEND _CREATE-WINDOW
|
||||
IF SESSION:DISPLAY-TYPE = "GUI":U THEN
|
||||
CREATE WINDOW C-Win ASSIGN
|
||||
HIDDEN = YES
|
||||
TITLE = "<insert window title>"
|
||||
HEIGHT = 16
|
||||
WIDTH = 80
|
||||
MAX-HEIGHT = 16
|
||||
MAX-WIDTH = 80
|
||||
VIRTUAL-HEIGHT = 16
|
||||
VIRTUAL-WIDTH = 80
|
||||
RESIZE = yes
|
||||
SCROLL-BARS = no
|
||||
STATUS-AREA = no
|
||||
BGCOLOR = ?
|
||||
FGCOLOR = ?
|
||||
KEEP-FRAME-Z-ORDER = yes
|
||||
THREE-D = yes
|
||||
MESSAGE-AREA = no
|
||||
SENSITIVE = yes.
|
||||
ELSE {&WINDOW-NAME} = CURRENT-WINDOW.
|
||||
/* END WINDOW DEFINITION */
|
||||
&ANALYZE-RESUME
|
||||
|
||||
|
||||
|
||||
/* *********** Runtime Attributes and AppBuilder Settings *********** */
|
||||
|
||||
&ANALYZE-SUSPEND _RUN-TIME-ATTRIBUTES
|
||||
/* SETTINGS FOR WINDOW C-Win
|
||||
VISIBLE,,RUN-PERSISTENT */
|
||||
/* SETTINGS FOR FRAME DEFAULT-FRAME
|
||||
FRAME-NAME */
|
||||
IF SESSION:DISPLAY-TYPE = "GUI":U AND VALID-HANDLE(C-Win)
|
||||
THEN C-Win:HIDDEN = no.
|
||||
|
||||
/* _RUN-TIME-ATTRIBUTES-END */
|
||||
&ANALYZE-RESUME
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/* ************************ Control Triggers ************************ */
|
||||
|
||||
&Scoped-define SELF-NAME C-Win
|
||||
&ANALYZE-SUSPEND _UIB-CODE-BLOCK _CONTROL C-Win C-Win
|
||||
ON END-ERROR OF C-Win /* <insert window title> */
|
||||
OR ENDKEY OF {&WINDOW-NAME} ANYWHERE DO:
|
||||
/* This case occurs when the user presses the "Esc" key.
|
||||
In a persistently run window, just ignore this. If we did not, the
|
||||
application would exit. */
|
||||
IF THIS-PROCEDURE:PERSISTENT THEN RETURN NO-APPLY.
|
||||
END.
|
||||
|
||||
/* _UIB-CODE-BLOCK-END */
|
||||
&ANALYZE-RESUME
|
||||
|
||||
|
||||
&ANALYZE-SUSPEND _UIB-CODE-BLOCK _CONTROL C-Win C-Win
|
||||
ON WINDOW-CLOSE OF C-Win /* <insert window title> */
|
||||
DO:
|
||||
/* This event will close the window and terminate the procedure. */
|
||||
APPLY "CLOSE":U TO THIS-PROCEDURE.
|
||||
RETURN NO-APPLY.
|
||||
END.
|
||||
|
||||
/* _UIB-CODE-BLOCK-END */
|
||||
&ANALYZE-RESUME
|
||||
|
||||
|
||||
&UNDEFINE SELF-NAME
|
||||
|
||||
&ANALYZE-SUSPEND _UIB-CODE-BLOCK _CUSTOM _MAIN-BLOCK C-Win
|
||||
|
||||
|
||||
/* *************************** Main Block *************************** */
|
||||
|
||||
/* Set CURRENT-WINDOW: this will parent dialog-boxes and frames. */
|
||||
ASSIGN CURRENT-WINDOW = {&WINDOW-NAME}
|
||||
THIS-PROCEDURE:CURRENT-WINDOW = {&WINDOW-NAME}.
|
||||
|
||||
/* The CLOSE event can be used from inside or outside the procedure to */
|
||||
/* terminate it. */
|
||||
ON CLOSE OF THIS-PROCEDURE
|
||||
RUN disable_UI.
|
||||
|
||||
/* Best default for GUI applications is... */
|
||||
PAUSE 0 BEFORE-HIDE.
|
||||
|
||||
/* Now enable the interface and wait for the exit condition. */
|
||||
/* (NOTE: handle ERROR and END-KEY so cleanup code will always fire. */
|
||||
MAIN-BLOCK:
|
||||
DO ON ERROR UNDO MAIN-BLOCK, LEAVE MAIN-BLOCK
|
||||
ON END-KEY UNDO MAIN-BLOCK, LEAVE MAIN-BLOCK:
|
||||
RUN enable_UI.
|
||||
IF NOT THIS-PROCEDURE:PERSISTENT THEN
|
||||
WAIT-FOR CLOSE OF THIS-PROCEDURE.
|
||||
END.
|
||||
|
||||
/* _UIB-CODE-BLOCK-END */
|
||||
&ANALYZE-RESUME
|
||||
|
||||
|
||||
/* ********************** Internal Procedures *********************** */
|
||||
|
||||
&ANALYZE-SUSPEND _UIB-CODE-BLOCK _PROCEDURE disable_UI C-Win _DEFAULT-DISABLE
|
||||
PROCEDURE disable_UI :
|
||||
/*------------------------------------------------------------------------------
|
||||
Purpose: DISABLE the User Interface
|
||||
Parameters: <none>
|
||||
Notes: Here we clean-up the user-interface by deleting
|
||||
dynamic widgets we have created and/or hide
|
||||
frames. This procedure is usually called when
|
||||
we are ready to "clean-up" after running.
|
||||
------------------------------------------------------------------------------*/
|
||||
/* Delete the WINDOW we created */
|
||||
IF SESSION:DISPLAY-TYPE = "GUI":U AND VALID-HANDLE(C-Win)
|
||||
THEN DELETE WIDGET C-Win.
|
||||
IF THIS-PROCEDURE:PERSISTENT THEN DELETE PROCEDURE THIS-PROCEDURE.
|
||||
END PROCEDURE.
|
||||
|
||||
/* _UIB-CODE-BLOCK-END */
|
||||
&ANALYZE-RESUME
|
||||
|
||||
&ANALYZE-SUSPEND _UIB-CODE-BLOCK _PROCEDURE enable_UI C-Win _DEFAULT-ENABLE
|
||||
PROCEDURE enable_UI :
|
||||
/*------------------------------------------------------------------------------
|
||||
Purpose: ENABLE the User Interface
|
||||
Parameters: <none>
|
||||
Notes: Here we display/view/enable the widgets in the
|
||||
user-interface. In addition, OPEN all queries
|
||||
associated with each FRAME and BROWSE.
|
||||
These statements here are based on the "Other
|
||||
Settings" section of the widget Property Sheets.
|
||||
------------------------------------------------------------------------------*/
|
||||
VIEW FRAME DEFAULT-FRAME IN WINDOW C-Win.
|
||||
{&OPEN-BROWSERS-IN-QUERY-DEFAULT-FRAME}
|
||||
VIEW C-Win.
|
||||
END PROCEDURE.
|
||||
|
||||
/* _UIB-CODE-BLOCK-END */
|
||||
&ANALYZE-RESUME
|
||||
|
||||
1093
samples/Visual Basic/VBAllInOne.vb
Normal file
1093
samples/Visual Basic/VBAllInOne.vb
Normal file
File diff suppressed because it is too large
Load Diff
77
samples/XML/NDepends_Example.ndproj
Normal file
77
samples/XML/NDepends_Example.ndproj
Normal file
@@ -0,0 +1,77 @@
|
||||
<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
||||
<NDepend AppName="ExampleNDApp" Platform="DotNet">
|
||||
<OutputDir KeepHistoric="True" KeepXmlFiles="True">c:\temp</OutputDir>
|
||||
<Assemblies />
|
||||
<FrameworkAssemblies />
|
||||
<Dirs>
|
||||
<Dir>C:\Windows\Microsoft.NET\Framework\v4.0.30319</Dir>
|
||||
<Dir>C:\Windows\Microsoft.NET\Framework\v4.0.30319\WPF</Dir>
|
||||
</Dirs>
|
||||
<Report Kind="0" SectionsEnabled="12287" XslPath="" Flags="64512">
|
||||
<Section Enabled="True">Application Metrics</Section>
|
||||
<Section Enabled="True">.NET Assemblies Metrics</Section>
|
||||
<Section Enabled="True">Treemap Metric View</Section>
|
||||
<Section Enabled="True">.NET Assemblies Abstractness vs. Instability</Section>
|
||||
<Section Enabled="True">.NET Assemblies Dependencies</Section>
|
||||
<Section Enabled="True">.NET Assemblies Dependency Graph</Section>
|
||||
<Section Enabled="True">.NET Assemblies Build Order</Section>
|
||||
<Section Enabled="True">Analysis Log</Section>
|
||||
<Section Enabled="True">CQL Rules Violated</Section>
|
||||
<Section Enabled="True">Types Metrics</Section>
|
||||
<Section Enabled="False">Types Dependencies</Section>
|
||||
</Report>
|
||||
<BuildComparisonSetting ProjectMode="DontCompare" BuildMode="MostRecentAnalysisResultAvailable" ProjectFileToCompareWith="" BuildFileToCompareWith="" NDaysAgo="1" />
|
||||
<BaselineInUISetting ProjectMode="DontCompare" BuildMode="MostRecentAnalysisResultAvailable" ProjectFileToCompareWith="" BuildFileToCompareWith="" NDaysAgo="1" />
|
||||
<CoverageFiles UncoverableAttribute="" />
|
||||
<SourceFileRebasing FromPath="" ToPath="" />
|
||||
<Queries>
|
||||
<Group Name="Code Quality" Active="True" ShownInReport="False">
|
||||
<Query Active="True" DisplayList="True" DisplayStat="True" DisplaySelectionView="False" IsCriticalRule="False"><![CDATA[// <Name>Discard generated and designer Methods from JustMyCode</Name>
|
||||
// --- Make sure to make this query richer to discard generated methods from NDepend rules results ---
|
||||
notmycode
|
||||
|
||||
//
|
||||
// First define source files paths to discard
|
||||
//
|
||||
from a in Application.Assemblies
|
||||
where a.SourceFileDeclAvailable
|
||||
let asmSourceFilesPaths = a.SourceDecls.Select(s => s.SourceFile.FilePath)
|
||||
|
||||
let sourceFilesPathsToDiscard = (
|
||||
from filePath in asmSourceFilesPaths
|
||||
let filePathLower= filePath.ToString().ToLower()
|
||||
where
|
||||
filePathLower.EndsWithAny(
|
||||
".g.cs", // Popular pattern to name generated files.
|
||||
".g.vb",
|
||||
".xaml", // notmycode WPF xaml code
|
||||
".designer.cs", // notmycode C# Windows Forms designer code
|
||||
".designer.vb") // notmycode VB.NET Windows Forms designer code
|
||||
||
|
||||
// notmycode methods in source files in a directory containing generated
|
||||
filePathLower.Contains("generated")
|
||||
select filePath
|
||||
).ToHashSet()
|
||||
|
||||
//
|
||||
// Second: discard methods in sourceFilesPathsToDiscard
|
||||
//
|
||||
from m in a.ChildMethods
|
||||
where (m.SourceFileDeclAvailable &&
|
||||
sourceFilesPathsToDiscard.Contains(m.SourceDecls.First().SourceFile.FilePath)) ||
|
||||
// Generated methods might be tagged with this attribute
|
||||
m.HasAttribute ("System.CodeDom.Compiler.GeneratedCodeAttribute".AllowNoMatch())
|
||||
select new { m, m.NbLinesOfCode }]]></Query>
|
||||
<Query Active="True" DisplayList="True" DisplayStat="True" DisplaySelectionView="False" IsCriticalRule="False"><![CDATA[// <Name>Discard generated Fields from JustMyCode</Name>
|
||||
// --- Make sure to make this query richer to discard generated fields from NDepend rules results ---
|
||||
notmycode
|
||||
from f in Application.Fields where
|
||||
f.HasAttribute ("System.CodeDom.Compiler.GeneratedCodeAttribute".AllowNoMatch()) ||
|
||||
|
||||
// Eliminate "components" generated in Windows Form Conrol context
|
||||
f.Name == "components" && f.ParentType.DeriveFrom("System.Windows.Forms.Control".AllowNoMatch())
|
||||
select f]]></Query>
|
||||
</Group>
|
||||
</Queries>
|
||||
<WarnFilter />
|
||||
</NDepend>
|
||||
183
samples/XML/chrome.natvis
Normal file
183
samples/XML/chrome.natvis
Normal file
@@ -0,0 +1,183 @@
|
||||
<?xml version="1.0" encoding="utf-8" ?>
|
||||
<!--
|
||||
Copyright 2015 The Chromium Authors. All rights reserved.
|
||||
|
||||
https://cs.chromium.org/chromium/src/tools/win/DebugVisualizers/chrome.natvis
|
||||
-->
|
||||
<AutoVisualizer
|
||||
xmlns="http://schemas.microsoft.com/vstudio/debugger/natvis/2010">
|
||||
<Type Name="gfx::Point">
|
||||
<AlternativeType Name="gfx::PointF"/>
|
||||
<DisplayString>({x_}, {y_})</DisplayString>
|
||||
</Type>
|
||||
<Type Name="gfx::Size">
|
||||
<AlternativeType Name="gfx::SizeF"/>
|
||||
<DisplayString>({width_}, {height_})</DisplayString>
|
||||
</Type>
|
||||
<Type Name="gfx::Rect">
|
||||
<AlternativeType Name="gfx::RectF"/>
|
||||
<DisplayString>({origin_.x_}, {origin_.y_}) x ({size_.width_}, {size_.height_})</DisplayString>
|
||||
</Type>
|
||||
<Type Name="scoped_refptr<*>">
|
||||
<DisplayString Condition="ptr_ == 0">null</DisplayString>
|
||||
<DisplayString>[{((base::subtle::RefCountedBase*)ptr_)->ref_count_}] {(void*)ptr_} {*ptr_}</DisplayString>
|
||||
<Expand>
|
||||
<Item Name="Ptr">ptr_</Item>
|
||||
<Item Name="RefCount">((base::subtle::RefCountedBase*)ptr_)->ref_count_</Item>
|
||||
</Expand>
|
||||
</Type>
|
||||
<Type Name="base::Optional<*>">
|
||||
<DisplayString Condition="storage_.is_null_">(null)</DisplayString>
|
||||
<DisplayString>{storage_.value_}</DisplayString>
|
||||
</Type>
|
||||
<Type Name="base::RefCounted<*>">
|
||||
<DisplayString>RefCount: {ref_count_}</DisplayString>
|
||||
<Expand>
|
||||
<Item Name="RefCount">ref_count_</Item>
|
||||
</Expand>
|
||||
</Type>
|
||||
<Type Name="IPC::Message::Header">
|
||||
<DisplayString>{{Routing: {routing}, Type: {type}}}</DisplayString>
|
||||
<Expand>
|
||||
<Item Name="RoutingId">routing</Item>
|
||||
<Item Name="Type">type</Item>
|
||||
<Synthetic Name="Priority"
|
||||
Condition="(flags & IPC::Message::PRIORITY_MASK) ==
|
||||
IPC::Message::PRIORITY_LOW">
|
||||
<DisplayString>Low</DisplayString>
|
||||
</Synthetic>
|
||||
<Synthetic Name="Priority"
|
||||
Condition="(flags & IPC::Message::PRIORITY_MASK) ==
|
||||
IPC::Message::PRIORITY_NORMAL">
|
||||
<DisplayString>Normal</DisplayString>
|
||||
</Synthetic>
|
||||
<Synthetic Name="Priority"
|
||||
Condition="(flags & IPC::Message::PRIORITY_MASK) ==
|
||||
IPC::Message::PRIORITY_HIGH">
|
||||
<DisplayString>High</DisplayString>
|
||||
</Synthetic>
|
||||
<Synthetic Name="Sync"
|
||||
Condition="(flags & IPC::Message::SYNC_BIT) != 0">
|
||||
<DisplayString>true</DisplayString>
|
||||
</Synthetic>
|
||||
<Synthetic Name="Sync"
|
||||
Condition="(flags & IPC::Message::SYNC_BIT) == 0">
|
||||
<DisplayString>false</DisplayString>
|
||||
</Synthetic>
|
||||
<Synthetic Name="Reply"
|
||||
Condition="(flags & IPC::Message::REPLY_BIT) != 0">
|
||||
<DisplayString>true</DisplayString>
|
||||
</Synthetic>
|
||||
<Synthetic Name="Reply"
|
||||
Condition="(flags & IPC::Message::REPLY_BIT) == 0">
|
||||
<DisplayString>false</DisplayString>
|
||||
</Synthetic>
|
||||
<Synthetic Name="ReplyError"
|
||||
Condition="(flags & IPC::Message::REPLY_ERROR_BIT) != 0">
|
||||
<DisplayString>true</DisplayString>
|
||||
</Synthetic>
|
||||
<Synthetic Name="ReplyError"
|
||||
Condition="(flags & IPC::Message::REPLY_ERROR_BIT) == 0">
|
||||
<DisplayString>false</DisplayString>
|
||||
</Synthetic>
|
||||
<Synthetic Name="Unblock"
|
||||
Condition="(flags & IPC::Message::UNBLOCK_BIT) != 0">
|
||||
<DisplayString>true</DisplayString>
|
||||
</Synthetic>
|
||||
<Synthetic Name="Unblock"
|
||||
Condition="(flags & IPC::Message::UNBLOCK_BIT) == 0">
|
||||
<DisplayString>false</DisplayString>
|
||||
</Synthetic>
|
||||
<Synthetic Name="PumpingMessages"
|
||||
Condition="(flags & IPC::Message::PUMPING_MSGS_BIT) != 0">
|
||||
<DisplayString>true</DisplayString>
|
||||
</Synthetic>
|
||||
<Synthetic Name="PumpingMessages"
|
||||
Condition="(flags & IPC::Message::PUMPING_MSGS_BIT) == 0">
|
||||
<DisplayString>false</DisplayString>
|
||||
</Synthetic>
|
||||
<Synthetic Name="HasSentTime"
|
||||
Condition="(flags & IPC::Message::HAS_SENT_TIME_BIT) != 0">
|
||||
<DisplayString>true</DisplayString>
|
||||
</Synthetic>
|
||||
<Synthetic Name="HasSentTime"
|
||||
Condition="(flags & IPC::Message::HAS_SENT_TIME_BIT) == 0">
|
||||
<DisplayString>false</DisplayString>
|
||||
</Synthetic>
|
||||
</Expand>
|
||||
</Type>
|
||||
<Type Name="IPC::Message">
|
||||
<DisplayString>{{size = {header_size_+capacity_after_header_}}}</DisplayString>
|
||||
<Expand>
|
||||
<ExpandedItem>*((IPC::Message::Header*)header_),nd</ExpandedItem>
|
||||
<Item Name="Payload">(void*)((char*)header_ + header_size_)</Item>
|
||||
</Expand>
|
||||
</Type>
|
||||
<Type Name="base::TimeDelta">
|
||||
<DisplayString>{delta_}</DisplayString>
|
||||
<Expand>
|
||||
<Synthetic Name="Days">
|
||||
<DisplayString>{(int)(delta_ / {,,base.dll}base::Time::kMicrosecondsPerDay)}</DisplayString>
|
||||
</Synthetic>
|
||||
<Synthetic Name="Hours">
|
||||
<DisplayString>{(int)(delta_ / {,,base.dll}base::Time::kMicrosecondsPerHour)}</DisplayString>
|
||||
</Synthetic>
|
||||
<Synthetic Name="Minutes">
|
||||
<DisplayString>{(int)(delta_ / {,,base.dll}base::Time::kMicrosecondsPerMinute)}</DisplayString>
|
||||
</Synthetic>
|
||||
<Synthetic Name="Seconds">
|
||||
<DisplayString>{(int)(delta_ / {,,base.dll}base::Time::kMicrosecondsPerSecond)}</DisplayString>
|
||||
</Synthetic>
|
||||
<Synthetic Name="Milliseconds">
|
||||
<DisplayString>{(int)(delta_ / {,,base.dll}base::Time::kMicrosecondsPerMillisecond)}</DisplayString>
|
||||
</Synthetic>
|
||||
<Item Name="Microseconds">delta_</Item>
|
||||
</Expand>
|
||||
</Type>
|
||||
<Type Name="GURL">
|
||||
<DisplayString>{spec_}</DisplayString>
|
||||
</Type>
|
||||
<Type Name="base::ManualConstructor<*>">
|
||||
<!-- $T1 expands to the first "*" in the name which is the template
|
||||
type. Use that to cast to the correct value. -->
|
||||
<DisplayString>{*($T1*)space_.data_}</DisplayString>
|
||||
<Expand>
|
||||
<ExpandedItem>*($T1*)space_.data_</ExpandedItem>
|
||||
</Expand>
|
||||
</Type>
|
||||
<Type Name="base::internal::flat_tree<*>">
|
||||
<AlternativeType Name="base::flat_set<*>"/>
|
||||
<DisplayString>{impl_.body_}</DisplayString>
|
||||
<Expand>
|
||||
<ExpandedItem>impl_.body_</ExpandedItem>
|
||||
</Expand>
|
||||
</Type>
|
||||
<Type Name="base::flat_map<*>">
|
||||
<DisplayString>{impl_.body_}</DisplayString>
|
||||
<Expand>
|
||||
<ExpandedItem>impl_.body_</ExpandedItem>
|
||||
</Expand>
|
||||
</Type>
|
||||
<Type Name="base::Value">
|
||||
<DisplayString Condition="type_ == NONE">NONE</DisplayString>
|
||||
<DisplayString Condition="type_ == BOOLEAN">BOOLEAN {bool_value_}</DisplayString>
|
||||
<DisplayString Condition="type_ == INTEGER">INTEGER {int_value_}</DisplayString>
|
||||
<DisplayString Condition="type_ == DOUBLE">DOUBLE {double_value_}</DisplayString>
|
||||
<DisplayString Condition="type_ == STRING">STRING {string_value_}</DisplayString>
|
||||
<DisplayString Condition="type_ == BINARY">BINARY {binary_value_}</DisplayString>
|
||||
<DisplayString Condition="type_ == DICTIONARY">DICTIONARY {dict_}</DisplayString>
|
||||
<DisplayString Condition="type_ == LIST">LIST {list_}</DisplayString>
|
||||
<Expand>
|
||||
<Item Name="[type]">type_</Item>
|
||||
<Item Condition="type_ == BOOLEAN" Name="[boolean]">bool_value_</Item>
|
||||
<Item Condition="type_ == INTEGER" Name="[integer]">int_value_</Item>
|
||||
<Item Condition="type_ == DOUBLE" Name="[double]">double_value_</Item>
|
||||
<Item Condition="type_ == STRING" Name="[string]">string_value_</Item>
|
||||
<Item Condition="type_ == BINARY" Name="[binary]">binary_value_</Item>
|
||||
<!-- Put the members for dictionary and list directly inline without
|
||||
requiring a separate expansion to view. -->
|
||||
<ExpandedItem Condition="type_ == DICTIONARY">dict_</ExpandedItem>
|
||||
<ExpandedItem Condition="type_ == LIST">list_</ExpandedItem>
|
||||
</Expand>
|
||||
</Type>
|
||||
</AutoVisualizer>
|
||||
9
samples/XML/cloudconfig.cscfg
Normal file
9
samples/XML/cloudconfig.cscfg
Normal file
@@ -0,0 +1,9 @@
|
||||
<?xml version="1.0"?>
|
||||
<ServiceConfiguration serviceName="MyDef" xmlns="http://schemas.microsoft.com/ServiceHosting/2008/10/ServiceConfiguration">
|
||||
<Role name="My.Web">
|
||||
<Instances count="1" />
|
||||
<ConfigurationSettings>
|
||||
<Setting name="DiagnosticsConnectionString" value="UseDevelopmentStorage=true" />
|
||||
</ConfigurationSettings>
|
||||
</Role>
|
||||
</ServiceConfiguration>
|
||||
11
samples/XML/clouddef.csdef
Normal file
11
samples/XML/clouddef.csdef
Normal file
@@ -0,0 +1,11 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<ServiceDefinition name="MyDef" xmlns="http://schemas.microsoft.com/ServiceHosting/2008/10/ServiceDefinition">
|
||||
<WebRole name="My.Web">
|
||||
<InputEndpoints>
|
||||
<InputEndpoint name="HttpIn" protocol="http" port="80" />
|
||||
</InputEndpoints>
|
||||
<ConfigurationSettings>
|
||||
<Setting name="DiagnosticsConnectionString" />
|
||||
</ConfigurationSettings>
|
||||
</WebRole>
|
||||
</ServiceDefinition>
|
||||
9
samples/XML/configdef.cscfg
Normal file
9
samples/XML/configdef.cscfg
Normal file
@@ -0,0 +1,9 @@
|
||||
<?xml version="1.0"?>
|
||||
<ServiceConfiguration serviceName="MyDef" xmlns="http://schemas.microsoft.com/ServiceHosting/2008/10/ServiceConfiguration">
|
||||
<Role name="My.Web">
|
||||
<Instances count="1" />
|
||||
<ConfigurationSettings>
|
||||
<Setting name="DiagnosticsConnectionString" value="UseDevelopmentStorage=true" />
|
||||
</ConfigurationSettings>
|
||||
</Role>
|
||||
</ServiceConfiguration>
|
||||
14
samples/XML/dependency-example.depproj
Normal file
14
samples/XML/dependency-example.depproj
Normal file
@@ -0,0 +1,14 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" DefaultTargets="Build">
|
||||
<Import Project="$([MSBuild]::GetDirectoryNameOfFileAbove($(MSBuildThisFileDirectory), dir.props))\dir.props" />
|
||||
<PropertyGroup>
|
||||
<AssemblyVersion>3.9.0.0</AssemblyVersion>
|
||||
<OutputType>Library</OutputType>
|
||||
<PackageTargetFramework>dotnet5.1</PackageTargetFramework>
|
||||
<NuGetTargetMoniker>.NETPlatform,Version=v5.1</NuGetTargetMoniker>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<None Include="project.json" />
|
||||
</ItemGroup>
|
||||
<Import Project="$([MSBuild]::GetDirectoryNameOfFileAbove($(MSBuildThisFileDirectory), dir.targets))\dir.targets" />
|
||||
</Project>
|
||||
11
samples/XML/example-sharedproj.shproj
Normal file
11
samples/XML/example-sharedproj.shproj
Normal file
@@ -0,0 +1,11 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<PropertyGroup>
|
||||
<ProjectGuid>{86244B26-C4AE-4F69-9315-B6148C0FE270}</ProjectGuid>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
|
||||
<Import Project="$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\CodeSharing\Microsoft.CodeSharing.Common.Default.props" />
|
||||
<Import Project="$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\CodeSharing\Microsoft.CodeSharing.Common.props" />
|
||||
<Import Project="SharedProject.projitems" Label="Shared" />
|
||||
<Import Project="$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\CodeSharing\Microsoft.CodeSharing.CSharp.targets" />
|
||||
</Project>
|
||||
38
samples/XML/example.ccproj
Normal file
38
samples/XML/example.ccproj
Normal file
@@ -0,0 +1,38 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<PropertyGroup>
|
||||
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
|
||||
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
|
||||
<ProductVersion>1.0.0</ProductVersion>
|
||||
<ProjectGuid>{0beae469-c1c6-4648-a2e5-0ae0ea9efffa}</ProjectGuid>
|
||||
<OutputType>Library</OutputType>
|
||||
<AppDesignerFolder>Properties</AppDesignerFolder>
|
||||
<RootNamespace>MyDef</RootNamespace>
|
||||
<AssemblyName>MyDef</AssemblyName>
|
||||
<StartDevelopmentStorage>True</StartDevelopmentStorage>
|
||||
<Name>My</Name>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
|
||||
<DebugSymbols>true</DebugSymbols>
|
||||
<DebugType>full</DebugType>
|
||||
<Optimize>false</Optimize>
|
||||
<OutputPath>bin\Debug\</OutputPath>
|
||||
<DefineConstants>DEBUG;TRACE</DefineConstants>
|
||||
<ErrorReport>prompt</ErrorReport>
|
||||
<WarningLevel>4</WarningLevel>
|
||||
</PropertyGroup>
|
||||
<!-- Items for the project -->
|
||||
<ItemGroup>
|
||||
<ServiceDefinition Include="ServiceDefinition.csdef" />
|
||||
<ServiceConfiguration Include="ServiceConfiguration.cscfg" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\My.Web\My.Web.csproj">
|
||||
<Name>My.Web</Name>
|
||||
<Project>{1515c2c3-0b57-422c-a6f9-0891b86fb7d3}</Project>
|
||||
<Private>True</Private>
|
||||
<RoleType>Web</RoleType>
|
||||
<RoleName>My.Web</RoleName>
|
||||
</ProjectReference>
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
85
samples/XML/msbuild-example.proj
Normal file
85
samples/XML/msbuild-example.proj
Normal file
@@ -0,0 +1,85 @@
|
||||
<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<Import Project="$(MSBuildExtensionsPath)\MSBuildCommunityTasks\MSBuild.Community.Tasks.Targets"/>
|
||||
<UsingTask TaskName="Microsoft.Build.Tasks.XmlPeek" AssemblyName="Microsoft.Build.Tasks.v4.0, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a"/>
|
||||
<UsingTask TaskName="Microsoft.Build.Tasks.XmlPoke" AssemblyName="Microsoft.Build.Tasks.v4.0, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a"/>
|
||||
<PropertyGroup>
|
||||
<SolutionRoot>$(MSBuildProjectDirectory)\..</SolutionRoot>
|
||||
<ProjectRoot>$(SolutionRoot)\Src\Bowerbird.Website</ProjectRoot>
|
||||
<ArtifactsDir>$(SolutionRoot)\Release</ArtifactsDir>
|
||||
<CurrentBuildDateStamp>$([System.DateTime]::Now.ToString("yyyyMMdd"))</CurrentBuildDateStamp>
|
||||
<CurrentBuildTimeStamp>$([System.DateTime]::Now.ToString("hhmm"))</CurrentBuildTimeStamp>
|
||||
<CurrentBuildDir>$(ArtifactsDir)\$(CurrentBuildDateStamp)-$(Configuration)</CurrentBuildDir>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup>
|
||||
<VersionMajor>0</VersionMajor>
|
||||
<VersionMinor>1</VersionMinor>
|
||||
<VersionPatch>0</VersionPatch>
|
||||
<VersionPreRelease></VersionPreRelease>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup>
|
||||
<WebConfig>$(CurrentBuildDir)\Web.config</WebConfig>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageFiles Include="$(ProjectRoot)\**\*.*"
|
||||
Exclude="$(ProjectRoot)\bin\*.pdb;
|
||||
$(ProjectRoot)\bin\*.xml;
|
||||
$(ProjectRoot)\Logs\**\*.*;
|
||||
$(ProjectRoot)\obj\**\*.*;
|
||||
$(ProjectRoot)\test\**\*.*;
|
||||
$(ProjectRoot)\media\**\*.*;
|
||||
$(ProjectRoot)\**\*.orig;
|
||||
$(ProjectRoot)\*.config;
|
||||
$(ProjectRoot)\*.xml;
|
||||
$(ProjectRoot)\**\*.csproj;
|
||||
$(ProjectRoot)\*.csproj.user;">
|
||||
</PackageFiles>
|
||||
<ConfigFiles Include="$(ProjectRoot)\Web.config" >
|
||||
</ConfigFiles>
|
||||
</ItemGroup>
|
||||
<Target Name="UpdateWebConfig" Condition=" '$(CurrentBuildDateStamp)' != '' ">
|
||||
<XmlPoke Namespaces="<Namespace Prefix='msb' Uri='http://schemas.microsoft.com/developer/msbuild/2003'/>"
|
||||
XmlInputPath="$(WebConfig)"
|
||||
Query="//add[@key='staticContentIncrement']/@value"
|
||||
Value="$(CurrentBuildDateStamp)-$(CurrentBuildTimeStamp)" />
|
||||
</Target>
|
||||
<Target Name="CreateOutputDir">
|
||||
<Message Text="Creating Directory $(CurrentBuildDir)" />
|
||||
<RemoveDir Directories="$(CurrentBuildDir)" />
|
||||
<Delete Files="$(CurrentBuildDir)" />
|
||||
<MakeDir Directories="$(CurrentBuildDir)" />
|
||||
</Target>
|
||||
<Target Name="BuildMediaDirectories">
|
||||
<MakeDir Directories="$(CurrentBuildDir)\media" />
|
||||
</Target>
|
||||
<Target Name="ConfigSettingsMessages">
|
||||
<Message Text="Configuration is $(Configuration)" />
|
||||
<Message Text="BuildNumber is $(BuildNumber)" />
|
||||
<Message Text="ProjectRoot is $(ProjectRoot)" />
|
||||
<Message Text="CurrentBuildDir is $(CurrentBuildDir)" />
|
||||
</Target>
|
||||
<Target Name="BuildSolution">
|
||||
<MSBuild Projects="$(SolutionRoot)\Bowerbird.sln" Targets="Build" Properties="Configuration=$(Configuration)" />
|
||||
</Target>
|
||||
<Target Name="CopyFilesToReleaseDir">
|
||||
<Copy SourceFiles="@(PackageFiles)" DestinationFiles="@(PackageFiles->'$(CurrentBuildDir)\%(RecursiveDir)%(Filename)%(Extension)')" />
|
||||
<Copy SourceFiles="@(ConfigFiles)" DestinationFiles="$(CurrentBuildDir)\web.config" />
|
||||
</Target>
|
||||
<Target Name="ZipUpReleaseFiles">
|
||||
<ItemGroup>
|
||||
<ZipFiles Include="$(CurrentBuildDir)\**\*.*" Exclude="*.zip" />
|
||||
</ItemGroup>
|
||||
<Zip Files="@(ZipFiles)" WorkingDirectory="$(CurrentBuildDir)\$(Configuration)\" ZipFileName="$(CurrentBuildDateStamp)-$(Configuration).zip" ZipLevel="9" />
|
||||
</Target>
|
||||
<Target Name="CopyZipToReleaseDir" DependsOnTargets="ZipUpReleaseFiles">
|
||||
<Copy SourceFiles="$(MSBuildProjectDirectory)\$(CurrentBuildDateStamp)-$(Configuration).zip" DestinationFiles="$(ArtifactsDir)\$(CurrentBuildDateStamp)-$(Configuration).zip" />
|
||||
<Delete Files="$(MSBuildProjectDirectory)\$(CurrentBuildDateStamp)-$(Configuration).zip" />
|
||||
</Target>
|
||||
<Target Name="Build" DependsOnTargets="CreateOutputDir">
|
||||
<CallTarget Targets="BuildMediaDirectories"/>
|
||||
<CallTarget Targets="ConfigSettingsMessages"/>
|
||||
<CallTarget Targets="BuildSolution"/>
|
||||
<CallTarget Targets="CopyFilesToReleaseDir"/>
|
||||
<CallTarget Targets="UpdateWebConfig" />
|
||||
<CallTarget Targets="CopyZipToReleaseDir"/>
|
||||
</Target>
|
||||
</Project>
|
||||
30
samples/YAML/filenames/.clang-tidy
Normal file
30
samples/YAML/filenames/.clang-tidy
Normal file
@@ -0,0 +1,30 @@
|
||||
---
|
||||
Checks: 'clang-diagnostic-*,clang-analyzer-*'
|
||||
WarningsAsErrors: ''
|
||||
HeaderFilterRegex: ''
|
||||
AnalyzeTemporaryDtors: false
|
||||
FormatStyle: none
|
||||
User: linguist-user
|
||||
CheckOptions:
|
||||
- key: google-readability-braces-around-statements.ShortStatementLines
|
||||
value: '1'
|
||||
- key: google-readability-function-size.StatementThreshold
|
||||
value: '800'
|
||||
- key: google-readability-namespace-comments.ShortNamespaceLines
|
||||
value: '10'
|
||||
- key: google-readability-namespace-comments.SpacesBeforeComments
|
||||
value: '2'
|
||||
- key: modernize-loop-convert.MaxCopySize
|
||||
value: '16'
|
||||
- key: modernize-loop-convert.MinConfidence
|
||||
value: reasonable
|
||||
- key: modernize-loop-convert.NamingStyle
|
||||
value: CamelCase
|
||||
- key: modernize-pass-by-value.IncludeStyle
|
||||
value: llvm
|
||||
- key: modernize-replace-auto-ptr.IncludeStyle
|
||||
value: llvm
|
||||
- key: modernize-use-nullptr.NullMacros
|
||||
value: 'NULL'
|
||||
...
|
||||
|
||||
21
samples/wdl/hello.wdl
Normal file
21
samples/wdl/hello.wdl
Normal file
@@ -0,0 +1,21 @@
|
||||
# Sample originally from https://github.com/broadinstitute/centaur
|
||||
|
||||
task hello {
|
||||
String addressee
|
||||
command {
|
||||
echo "Hello ${addressee}!"
|
||||
}
|
||||
output {
|
||||
String salutation = read_string(stdout())
|
||||
}
|
||||
runtime {
|
||||
docker: "ubuntu@sha256:71cd81252a3563a03ad8daee81047b62ab5d892ebbfbf71cf53415f29c130950"
|
||||
}
|
||||
}
|
||||
|
||||
workflow wf_hello {
|
||||
call hello
|
||||
output {
|
||||
hello.salutation
|
||||
}
|
||||
}
|
||||
44
samples/wdl/ifs_in_scatters.wdl
Normal file
44
samples/wdl/ifs_in_scatters.wdl
Normal file
@@ -0,0 +1,44 @@
|
||||
# Sample originally from https://github.com/broadinstitute/centaur
|
||||
|
||||
task validate_int {
|
||||
Int i
|
||||
command {
|
||||
echo $(( ${i} % 2 ))
|
||||
}
|
||||
output {
|
||||
Boolean validation = read_int(stdout()) == 1
|
||||
}
|
||||
runtime {
|
||||
docker: "ubuntu:latest"
|
||||
}
|
||||
}
|
||||
|
||||
task mirror {
|
||||
Int i
|
||||
command {
|
||||
echo ${i}
|
||||
}
|
||||
output {
|
||||
Int out = read_int(stdout())
|
||||
}
|
||||
runtime {
|
||||
docker: "ubuntu:latest"
|
||||
}
|
||||
}
|
||||
|
||||
workflow ifs_in_scatters {
|
||||
Array[Int] numbers = range(5)
|
||||
|
||||
scatter (n in numbers) {
|
||||
|
||||
call validate_int { input: i = n }
|
||||
if (validate_int.validation) {
|
||||
Int incremented = n + 1
|
||||
call mirror { input: i = incremented }
|
||||
}
|
||||
}
|
||||
|
||||
output {
|
||||
Array[Int?] mirrors = mirror.out
|
||||
}
|
||||
}
|
||||
42
samples/wdl/passingfiles.wdl
Normal file
42
samples/wdl/passingfiles.wdl
Normal file
@@ -0,0 +1,42 @@
|
||||
# Sample originally from https://github.com/broadinstitute/centaur
|
||||
|
||||
##
|
||||
# Check that we can:
|
||||
# - Create a file from a task and feed it into subsequent commands.
|
||||
# - Create a file output by interpolating a file name
|
||||
# - Use engine functions on an interpolated file name
|
||||
##
|
||||
|
||||
task mkFile {
|
||||
command {
|
||||
echo "small file contents" > out.txt
|
||||
}
|
||||
output { File out = "out.txt" }
|
||||
runtime { docker: "ubuntu:latest" }
|
||||
}
|
||||
|
||||
task consumeFile {
|
||||
File in_file
|
||||
String out_name
|
||||
|
||||
command {
|
||||
cat ${in_file} > ${out_name}
|
||||
}
|
||||
runtime {
|
||||
docker: "ubuntu:latest"
|
||||
}
|
||||
output {
|
||||
File out_interpolation = "${out_name}"
|
||||
String contents = read_string("${out_name}")
|
||||
String contentsAlt = read_string(out_interpolation)
|
||||
}
|
||||
}
|
||||
|
||||
workflow filepassing {
|
||||
call mkFile
|
||||
call consumeFile {input: in_file=mkFile.out, out_name = "myFileName.abc.txt" }
|
||||
output {
|
||||
consumeFile.contents
|
||||
consumeFile.contentsAlt
|
||||
}
|
||||
}
|
||||
@@ -99,6 +99,6 @@ else
|
||||
`script/licensed --module "#{repo_new}"`
|
||||
end
|
||||
|
||||
log "Updating grammar documentation in vendor/REAEDME.md"
|
||||
log "Updating grammar documentation in vendor/README.md"
|
||||
`bundle exec rake samples`
|
||||
`script/list-grammars`
|
||||
|
||||
@@ -99,4 +99,8 @@ class GrammarList
|
||||
end
|
||||
|
||||
list = GrammarList.new
|
||||
list.update_readme()
|
||||
if ARGV.include? "--print"
|
||||
puts list.to_markdown
|
||||
else
|
||||
list.update_readme
|
||||
end
|
||||
|
||||
8
test/fixtures/Perl/Module.pm
vendored
Normal file
8
test/fixtures/Perl/Module.pm
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
use 5.006;
|
||||
use strict;
|
||||
|
||||
=head1
|
||||
|
||||
module
|
||||
|
||||
=cut
|
||||
@@ -43,6 +43,7 @@ class TestGrammars < Minitest::Test
|
||||
"82c356d6ecb143a8a20e1658b0d6a2d77ea8126f", # idl.tmbundle
|
||||
"9dafd4e2a79cb13a6793b93877a254bc4d351e74", # sublime-text-ox
|
||||
"8e111741d97ba2e27b3d18a309d426b4a37e604f", # sublime-varnish
|
||||
"23d2538e33ce62d58abda2c039364b92f64ea6bc", # sublime-angelscript
|
||||
].freeze
|
||||
|
||||
# List of allowed SPDX license names
|
||||
@@ -90,7 +91,7 @@ class TestGrammars < Minitest::Test
|
||||
message << unlisted_submodules.sort.join("\n")
|
||||
end
|
||||
|
||||
assert nonexistent_submodules.empty? && unlisted_submodules.empty?, message
|
||||
assert nonexistent_submodules.empty? && unlisted_submodules.empty?, message.sub(/\.\Z/, "")
|
||||
end
|
||||
|
||||
def test_local_scopes_are_in_sync
|
||||
@@ -106,18 +107,24 @@ class TestGrammars < Minitest::Test
|
||||
end
|
||||
end
|
||||
|
||||
def test_readme_file_is_in_sync
|
||||
current_data = File.read("#{ROOT}/vendor/README.md").to_s.sub(/\A.+?<!--.+?-->\n/ms, "")
|
||||
updated_data = `script/list-grammars --print`
|
||||
assert_equal current_data, updated_data, "Grammar list is out-of-date. Run `script/list-grammars`"
|
||||
end
|
||||
|
||||
def test_submodules_have_recognized_licenses
|
||||
unrecognized = submodule_licenses.select { |k,v| v.nil? && Licensee::FSProject.new(k).license_file }
|
||||
unrecognized.reject! { |k,v| PROJECT_WHITELIST.include?(k) }
|
||||
message = "The following submodules have unrecognized licenses:\n* #{unrecognized.keys.join("\n* ")}\n"
|
||||
message << "Please ensure that the project's LICENSE file contains the full text of the license."
|
||||
message << "Please ensure that the project's LICENSE file contains the full text of the license"
|
||||
assert_equal Hash.new, unrecognized, message
|
||||
end
|
||||
|
||||
def test_submodules_have_licenses
|
||||
unlicensed = submodule_licenses.select { |k,v| v.nil? }.reject { |k,v| PROJECT_WHITELIST.include?(k) }
|
||||
message = "The following submodules don't have licenses:\n* #{unlicensed.keys.join("\n* ")}\n"
|
||||
message << "Please ensure that the project has a LICENSE file, and that the LICENSE file contains the full text of the license."
|
||||
message << "Please ensure that the project has a LICENSE file, and that the LICENSE file contains the full text of the license"
|
||||
assert_equal Hash.new, unlicensed, message
|
||||
end
|
||||
|
||||
@@ -127,14 +134,14 @@ class TestGrammars < Minitest::Test
|
||||
HASH_WHITELIST.include?(v) }
|
||||
.map { |k,v| "#{k}: #{v}"}
|
||||
message = "The following submodules have unapproved licenses:\n* #{unapproved.join("\n* ")}\n"
|
||||
message << "The license must be added to the LICENSE_WHITELIST in /test/test_grammars.rb once approved."
|
||||
message << "The license must be added to the LICENSE_WHITELIST in /test/test_grammars.rb once approved"
|
||||
assert_equal [], unapproved, message
|
||||
end
|
||||
|
||||
def test_whitelisted_submodules_dont_have_licenses
|
||||
licensed = submodule_licenses.reject { |k,v| v.nil? }.select { |k,v| PROJECT_WHITELIST.include?(k) }
|
||||
message = "The following whitelisted submodules have a license:\n* #{licensed.keys.join("\n* ")}\n"
|
||||
message << "Please remove them from the project whitelist."
|
||||
message << "Please remove them from the project whitelist"
|
||||
assert_equal Hash.new, licensed, message
|
||||
end
|
||||
|
||||
@@ -142,7 +149,7 @@ class TestGrammars < Minitest::Test
|
||||
used_hashes = submodule_licenses.values.reject { |v| v.nil? || LICENSE_WHITELIST.include?(v) }
|
||||
unused_hashes = HASH_WHITELIST - used_hashes
|
||||
message = "The following whitelisted license hashes are unused:\n* #{unused_hashes.join("\n* ")}\n"
|
||||
message << "Please remove them from the hash whitelist."
|
||||
message << "Please remove them from the hash whitelist"
|
||||
assert_equal Array.new, unused_hashes, message
|
||||
end
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
require_relative "./helper"
|
||||
|
||||
class TestHeuristcs < Minitest::Test
|
||||
class TestHeuristics < Minitest::Test
|
||||
include Linguist
|
||||
|
||||
def fixture(name)
|
||||
@@ -44,6 +44,13 @@ class TestHeuristcs < Minitest::Test
|
||||
assert_equal Language["Objective-C"], match
|
||||
end
|
||||
|
||||
def test_as_by_heuristics
|
||||
assert_heuristics({
|
||||
"ActionScript" => all_fixtures("ActionScript", "*.as"),
|
||||
"AngelScript" => all_fixtures("AngelScript", "*.as")
|
||||
})
|
||||
end
|
||||
|
||||
# Candidate languages = ["AGS Script", "AsciiDoc", "Public Key"]
|
||||
def test_asc_by_heuristics
|
||||
assert_heuristics({
|
||||
@@ -230,14 +237,6 @@ class TestHeuristcs < Minitest::Test
|
||||
})
|
||||
end
|
||||
|
||||
# Candidate languages = ["Pod", "Perl"]
|
||||
def test_pod_by_heuristics
|
||||
assert_heuristics({
|
||||
"Perl" => all_fixtures("Perl", "*.pod"),
|
||||
"Pod" => all_fixtures("Pod", "*.pod")
|
||||
})
|
||||
end
|
||||
|
||||
# Candidate languages = ["IDL", "Prolog", "QMake", "INI"]
|
||||
def test_pro_by_heuristics
|
||||
assert_heuristics({
|
||||
|
||||
2
vendor/CodeMirror
vendored
2
vendor/CodeMirror
vendored
Submodule vendor/CodeMirror updated: 974b698fac...97290a687e
21
vendor/README.md
vendored
21
vendor/README.md
vendored
@@ -9,11 +9,13 @@ This is a list of grammars that Linguist selects to provide syntax highlighting
|
||||
- **ABNF:** [sanssecours/ABNF.tmbundle](https://github.com/sanssecours/ABNF.tmbundle)
|
||||
- **ActionScript:** [simongregory/actionscript3-tmbundle](https://github.com/simongregory/actionscript3-tmbundle)
|
||||
- **Ada:** [textmate/ada.tmbundle](https://github.com/textmate/ada.tmbundle)
|
||||
- **Adobe Font Metrics:** [Alhadis/language-fontforge](https://github.com/Alhadis/language-fontforge)
|
||||
- **Agda:** [mokus0/Agda.tmbundle](https://github.com/mokus0/Agda.tmbundle)
|
||||
- **AGS Script:** [textmate/c.tmbundle](https://github.com/textmate/c.tmbundle)
|
||||
- **Alloy:** [macekond/Alloy.tmbundle](https://github.com/macekond/Alloy.tmbundle)
|
||||
- **Alpine Abuild:** [atom/language-shellscript](https://github.com/atom/language-shellscript)
|
||||
- **AMPL:** [ampl/sublime-ampl](https://github.com/ampl/sublime-ampl)
|
||||
- **AngelScript:** [wronex/sublime-angelscript](https://github.com/wronex/sublime-angelscript)
|
||||
- **Ant Build System:** [textmate/ant.tmbundle](https://github.com/textmate/ant.tmbundle)
|
||||
- **ANTLR:** [textmate/antlr.tmbundle](https://github.com/textmate/antlr.tmbundle)
|
||||
- **ApacheConf:** [textmate/apache.tmbundle](https://github.com/textmate/apache.tmbundle)
|
||||
@@ -32,6 +34,7 @@ This is a list of grammars that Linguist selects to provide syntax highlighting
|
||||
- **AutoHotkey:** [ahkscript/SublimeAutoHotkey](https://github.com/ahkscript/SublimeAutoHotkey)
|
||||
- **AutoIt:** [AutoIt/SublimeAutoItScript](https://github.com/AutoIt/SublimeAutoItScript)
|
||||
- **Awk:** [github-linguist/awk-sublime](https://github.com/github-linguist/awk-sublime)
|
||||
- **Ballerina:** [ballerinalang/plugin-vscode](https://github.com/ballerinalang/plugin-vscode)
|
||||
- **Batchfile:** [mmims/language-batchfile](https://github.com/mmims/language-batchfile)
|
||||
- **Befunge:** [johanasplund/sublime-befunge](https://github.com/johanasplund/sublime-befunge)
|
||||
- **Bison:** [textmate/bison.tmbundle](https://github.com/textmate/bison.tmbundle)
|
||||
@@ -84,6 +87,7 @@ This is a list of grammars that Linguist selects to provide syntax highlighting
|
||||
- **D:** [textmate/d.tmbundle](https://github.com/textmate/d.tmbundle)
|
||||
- **D-ObjDump:** [nanoant/assembly.tmbundle](https://github.com/nanoant/assembly.tmbundle)
|
||||
- **Dart:** [dart-atom/dartlang](https://github.com/dart-atom/dartlang)
|
||||
- **DataWeave:** [mulesoft-labs/data-weave-tmLanguage](https://github.com/mulesoft-labs/data-weave-tmLanguage)
|
||||
- **desktop:** [Mailaender/desktop.tmbundle](https://github.com/Mailaender/desktop.tmbundle)
|
||||
- **Diff:** [textmate/diff.tmbundle](https://github.com/textmate/diff.tmbundle)
|
||||
- **DM:** [PJB3005/atomic-dreams](https://github.com/PJB3005/atomic-dreams)
|
||||
@@ -97,6 +101,7 @@ This is a list of grammars that Linguist selects to provide syntax highlighting
|
||||
- **eC:** [ecere/ec.tmbundle](https://github.com/ecere/ec.tmbundle)
|
||||
- **Ecere Projects:** [textmate/json.tmbundle](https://github.com/textmate/json.tmbundle)
|
||||
- **ECLiPSe:** [alnkpa/sublimeprolog](https://github.com/alnkpa/sublimeprolog)
|
||||
- **Edje Data Collection:** [textmate/json.tmbundle](https://github.com/textmate/json.tmbundle)
|
||||
- **edn:** [atom/language-clojure](https://github.com/atom/language-clojure)
|
||||
- **Eiffel:** [textmate/eiffel.tmbundle](https://github.com/textmate/eiffel.tmbundle)
|
||||
- **EJS:** [gregory-m/ejs-tmbundle](https://github.com/gregory-m/ejs-tmbundle)
|
||||
@@ -123,6 +128,7 @@ This is a list of grammars that Linguist selects to provide syntax highlighting
|
||||
- **Genshi:** [genshi.edgewall.org/query](https://genshi.edgewall.org/query)
|
||||
- **Gentoo Ebuild:** [atom/language-shellscript](https://github.com/atom/language-shellscript)
|
||||
- **Gentoo Eclass:** [atom/language-shellscript](https://github.com/atom/language-shellscript)
|
||||
- **Gerber Image:** [Alhadis/language-pcb](https://github.com/Alhadis/language-pcb)
|
||||
- **Gettext Catalog:** [textmate/gettext.tmbundle](https://github.com/textmate/gettext.tmbundle)
|
||||
- **Gherkin:** [cucumber/cucumber-tmbundle](https://github.com/cucumber/cucumber-tmbundle)
|
||||
- **GLSL:** [euler0/sublime-glsl](https://github.com/euler0/sublime-glsl)
|
||||
@@ -151,7 +157,7 @@ This is a list of grammars that Linguist selects to provide syntax highlighting
|
||||
- **HTML+Django:** [textmate/python-django.tmbundle](https://github.com/textmate/python-django.tmbundle)
|
||||
- **HTML+ECR:** [atom-crystal/language-crystal](https://github.com/atom-crystal/language-crystal)
|
||||
- **HTML+EEX:** [elixir-lang/elixir-tmbundle](https://github.com/elixir-lang/elixir-tmbundle)
|
||||
- **HTML+ERB:** [aroben/ruby.tmbundle](https://github.com/aroben/ruby.tmbundle)
|
||||
- **HTML+ERB:** [atom/language-ruby](https://github.com/atom/language-ruby)
|
||||
- **HTML+PHP:** [textmate/php.tmbundle](https://github.com/textmate/php.tmbundle)
|
||||
- **HTTP:** [httpspec/sublime-highlighting](https://github.com/httpspec/sublime-highlighting)
|
||||
- **IDL:** [mgalloy/idl.tmbundle](https://github.com/mgalloy/idl.tmbundle)
|
||||
@@ -178,8 +184,8 @@ This is a list of grammars that Linguist selects to provide syntax highlighting
|
||||
- **JSX:** [github-linguist/language-babel](https://github.com/github-linguist/language-babel)
|
||||
- **Julia:** [JuliaEditorSupport/Julia.tmbundle](https://github.com/JuliaEditorSupport/Julia.tmbundle)
|
||||
- **Jupyter Notebook:** [textmate/json.tmbundle](https://github.com/textmate/json.tmbundle)
|
||||
- **KiCad Board:** [Alhadis/language-pcb](https://github.com/Alhadis/language-pcb)
|
||||
- **KiCad Layout:** [Alhadis/language-pcb](https://github.com/Alhadis/language-pcb)
|
||||
- **KiCad Legacy Layout:** [Alhadis/language-pcb](https://github.com/Alhadis/language-pcb)
|
||||
- **KiCad Schematic:** [Alhadis/language-pcb](https://github.com/Alhadis/language-pcb)
|
||||
- **Kit:** [textmate/html.tmbundle](https://github.com/textmate/html.tmbundle)
|
||||
- **Kotlin:** [vkostyukov/kotlin-sublime-package](https://github.com/vkostyukov/kotlin-sublime-package)
|
||||
@@ -215,7 +221,7 @@ This is a list of grammars that Linguist selects to provide syntax highlighting
|
||||
- **Mercury:** [sebgod/mercury-tmlanguage](https://github.com/sebgod/mercury-tmlanguage)
|
||||
- **Meson:** [TingPing/language-meson](https://github.com/TingPing/language-meson)
|
||||
- **Metal:** [textmate/c.tmbundle](https://github.com/textmate/c.tmbundle)
|
||||
- **Mirah:** [aroben/ruby.tmbundle](https://github.com/aroben/ruby.tmbundle)
|
||||
- **Mirah:** [atom/language-ruby](https://github.com/atom/language-ruby)
|
||||
- **Modelica:** [BorisChumichev/modelicaSublimeTextPackage](https://github.com/BorisChumichev/modelicaSublimeTextPackage)
|
||||
- **Modula-2:** [harogaston/Sublime-Modula-2](https://github.com/harogaston/Sublime-Modula-2)
|
||||
- **Monkey:** [gingerbeardman/monkey.tmbundle](https://github.com/gingerbeardman/monkey.tmbundle)
|
||||
@@ -225,6 +231,7 @@ This is a list of grammars that Linguist selects to provide syntax highlighting
|
||||
- **MTML:** [textmate/html.tmbundle](https://github.com/textmate/html.tmbundle)
|
||||
- **mupad:** [ccreutzig/sublime-MuPAD](https://github.com/ccreutzig/sublime-MuPAD)
|
||||
- **NCL:** [rpavlick/language-ncl](https://github.com/rpavlick/language-ncl)
|
||||
- **Nearley:** [Hardmath123/sublime-nearley](https://github.com/Hardmath123/sublime-nearley)
|
||||
- **Nemerle:** [textmate/nemerle.tmbundle](https://github.com/textmate/nemerle.tmbundle)
|
||||
- **nesC:** [cdwilson/nesC.tmbundle](https://github.com/cdwilson/nesC.tmbundle)
|
||||
- **NetLinx:** [amclain/sublime-netlinx](https://github.com/amclain/sublime-netlinx)
|
||||
@@ -298,14 +305,14 @@ This is a list of grammars that Linguist selects to provide syntax highlighting
|
||||
- **Ren'Py:** [williamd1k0/language-renpy](https://github.com/williamd1k0/language-renpy)
|
||||
- **reStructuredText:** [Lukasa/language-restructuredtext](https://github.com/Lukasa/language-restructuredtext)
|
||||
- **REXX:** [mblocker/rexx-sublime](https://github.com/mblocker/rexx-sublime)
|
||||
- **RHTML:** [aroben/ruby.tmbundle](https://github.com/aroben/ruby.tmbundle)
|
||||
- **RHTML:** [atom/language-ruby](https://github.com/atom/language-ruby)
|
||||
- **Ring:** [MahmoudFayed/atom-language-ring](https://github.com/MahmoudFayed/atom-language-ring)
|
||||
- **RMarkdown:** [atom/language-gfm](https://github.com/atom/language-gfm)
|
||||
- **RobotFramework:** [shellderp/sublime-robot-plugin](https://github.com/shellderp/sublime-robot-plugin)
|
||||
- **Roff:** [Alhadis/language-roff](https://github.com/Alhadis/language-roff)
|
||||
- **Rouge:** [atom/language-clojure](https://github.com/atom/language-clojure)
|
||||
- **RPM Spec:** [waveclaw/language-rpm-spec](https://github.com/waveclaw/language-rpm-spec)
|
||||
- **Ruby:** [aroben/ruby.tmbundle](https://github.com/aroben/ruby.tmbundle)
|
||||
- **Ruby:** [atom/language-ruby](https://github.com/atom/language-ruby)
|
||||
- **RUNOFF:** [Alhadis/language-roff](https://github.com/Alhadis/language-roff)
|
||||
- **Rust:** [zargony/atom-language-rust](https://github.com/zargony/atom-language-rust)
|
||||
- **Sage:** [MagicStack/MagicPython](https://github.com/MagicStack/MagicPython)
|
||||
@@ -359,7 +366,7 @@ This is a list of grammars that Linguist selects to provide syntax highlighting
|
||||
- **Twig:** [Anomareh/PHP-Twig.tmbundle](https://github.com/Anomareh/PHP-Twig.tmbundle)
|
||||
- **TXL:** [MikeHoffert/Sublime-Text-TXL-syntax](https://github.com/MikeHoffert/Sublime-Text-TXL-syntax)
|
||||
- **Type Language:** [goodmind/language-typelanguage](https://github.com/goodmind/language-typelanguage)
|
||||
- **TypeScript:** [Microsoft/TypeScript-Sublime-Plugin](https://github.com/Microsoft/TypeScript-Sublime-Plugin)
|
||||
- **TypeScript:** [Microsoft/TypeScript-TmLanguage](https://github.com/Microsoft/TypeScript-TmLanguage)
|
||||
- **Unified Parallel C:** [textmate/c.tmbundle](https://github.com/textmate/c.tmbundle)
|
||||
- **Unity3D Asset:** [atom/language-yaml](https://github.com/atom/language-yaml)
|
||||
- **Unix Assembly:** [Nessphoro/sublimeassembly](https://github.com/Nessphoro/sublimeassembly)
|
||||
@@ -376,6 +383,7 @@ This is a list of grammars that Linguist selects to provide syntax highlighting
|
||||
- **Vue:** [vuejs/vue-syntax-highlight](https://github.com/vuejs/vue-syntax-highlight)
|
||||
- **Wavefront Material:** [Alhadis/language-wavefront](https://github.com/Alhadis/language-wavefront)
|
||||
- **Wavefront Object:** [Alhadis/language-wavefront](https://github.com/Alhadis/language-wavefront)
|
||||
- **wdl:** [broadinstitute/wdl-sublime-syntax-highlighter](https://github.com/broadinstitute/wdl-sublime-syntax-highlighter)
|
||||
- **Web Ontology Language:** [textmate/xml.tmbundle](https://github.com/textmate/xml.tmbundle)
|
||||
- **WebAssembly:** [Alhadis/language-webassembly](https://github.com/Alhadis/language-webassembly)
|
||||
- **WebIDL:** [andik/IDL-Syntax](https://github.com/andik/IDL-Syntax)
|
||||
@@ -387,6 +395,7 @@ This is a list of grammars that Linguist selects to provide syntax highlighting
|
||||
- **XCompose:** [samcv/language-xcompose](https://github.com/samcv/language-xcompose)
|
||||
- **XML:** [textmate/xml.tmbundle](https://github.com/textmate/xml.tmbundle)
|
||||
- **Xojo:** [angryant0007/VBDotNetSyntax](https://github.com/angryant0007/VBDotNetSyntax)
|
||||
- **XPages:** [textmate/xml.tmbundle](https://github.com/textmate/xml.tmbundle)
|
||||
- **XPM:** [textmate/c.tmbundle](https://github.com/textmate/c.tmbundle)
|
||||
- **XProc:** [textmate/xml.tmbundle](https://github.com/textmate/xml.tmbundle)
|
||||
- **XQuery:** [wcandillon/language-jsoniq](https://github.com/wcandillon/language-jsoniq)
|
||||
|
||||
2
vendor/grammars/Docker.tmbundle
vendored
2
vendor/grammars/Docker.tmbundle
vendored
Submodule vendor/grammars/Docker.tmbundle updated: 378d7eb156...41b5d53ca4
2
vendor/grammars/MagicPython
vendored
2
vendor/grammars/MagicPython
vendored
Submodule vendor/grammars/MagicPython updated: d39070f621...935abefebf
2
vendor/grammars/Modelica
vendored
2
vendor/grammars/Modelica
vendored
Submodule vendor/grammars/Modelica updated: 9e12e9d23c...c64d74b009
2
vendor/grammars/Sublime-SQF-Language
vendored
2
vendor/grammars/Sublime-SQF-Language
vendored
Submodule vendor/grammars/Sublime-SQF-Language updated: ee30a860e0...2a2e9223a0
2
vendor/grammars/SublimePuppet
vendored
2
vendor/grammars/SublimePuppet
vendored
Submodule vendor/grammars/SublimePuppet updated: b523a061c1...97484d7f9b
2
vendor/grammars/Terraform.tmLanguage
vendored
2
vendor/grammars/Terraform.tmLanguage
vendored
Submodule vendor/grammars/Terraform.tmLanguage updated: 3ca5d5d3de...2ccc988046
1
vendor/grammars/TypeScript-TmLanguage
vendored
Submodule
1
vendor/grammars/TypeScript-TmLanguage
vendored
Submodule
Submodule vendor/grammars/TypeScript-TmLanguage added at 4b614e2efd
2
vendor/grammars/VBDotNetSyntax
vendored
2
vendor/grammars/VBDotNetSyntax
vendored
Submodule vendor/grammars/VBDotNetSyntax updated: 4c14dda6f7...efaa4187fe
2
vendor/grammars/atom-language-p4
vendored
2
vendor/grammars/atom-language-p4
vendored
Submodule vendor/grammars/atom-language-p4 updated: 999e3af389...a349af898d
2
vendor/grammars/atom-language-purescript
vendored
2
vendor/grammars/atom-language-purescript
vendored
Submodule vendor/grammars/atom-language-purescript updated: c590de5bcb...9d7e24f2f1
2
vendor/grammars/atom-language-rust
vendored
2
vendor/grammars/atom-language-rust
vendored
Submodule vendor/grammars/atom-language-rust updated: 2d9f9899be...59893b659a
2
vendor/grammars/chapel-tmbundle
vendored
2
vendor/grammars/chapel-tmbundle
vendored
Submodule vendor/grammars/chapel-tmbundle updated: d1f8d3555b...fe50e0bf1e
2
vendor/grammars/dartlang
vendored
2
vendor/grammars/dartlang
vendored
Submodule vendor/grammars/dartlang updated: 74a079f999...18fa15059b
1
vendor/grammars/data-weave-tmLanguage
vendored
Submodule
1
vendor/grammars/data-weave-tmLanguage
vendored
Submodule
Submodule vendor/grammars/data-weave-tmLanguage added at ad3c9b39ec
2
vendor/grammars/elixir-tmbundle
vendored
2
vendor/grammars/elixir-tmbundle
vendored
Submodule vendor/grammars/elixir-tmbundle updated: 1b4315ffd8...f7cd93d6b4
1
vendor/grammars/language-ballerina
vendored
Submodule
1
vendor/grammars/language-ballerina
vendored
Submodule
Submodule vendor/grammars/language-ballerina added at 91c724bec6
2
vendor/grammars/language-blade
vendored
2
vendor/grammars/language-blade
vendored
Submodule vendor/grammars/language-blade updated: ac908bc124...5b2611c46b
2
vendor/grammars/language-coffee-script
vendored
2
vendor/grammars/language-coffee-script
vendored
Submodule vendor/grammars/language-coffee-script updated: 7c4025c386...b0465e3e80
2
vendor/grammars/language-crystal
vendored
2
vendor/grammars/language-crystal
vendored
Submodule vendor/grammars/language-crystal updated: 0e3da8fc21...6e966bb589
2
vendor/grammars/language-csharp
vendored
2
vendor/grammars/language-csharp
vendored
Submodule vendor/grammars/language-csharp updated: 2d5f04229a...364ba38ee2
2
vendor/grammars/language-csound
vendored
2
vendor/grammars/language-csound
vendored
Submodule vendor/grammars/language-csound updated: ca43af5995...b24783820b
2
vendor/grammars/language-css
vendored
2
vendor/grammars/language-css
vendored
Submodule vendor/grammars/language-css updated: 8d8c6c1a0a...f4e032d6f3
2
vendor/grammars/language-emacs-lisp
vendored
2
vendor/grammars/language-emacs-lisp
vendored
Submodule vendor/grammars/language-emacs-lisp updated: 1be3d89bbe...ee4168aeac
2
vendor/grammars/language-fontforge
vendored
2
vendor/grammars/language-fontforge
vendored
Submodule vendor/grammars/language-fontforge updated: f513ccabea...c96542a1e7
2
vendor/grammars/language-gfm
vendored
2
vendor/grammars/language-gfm
vendored
Submodule vendor/grammars/language-gfm updated: 866e7fb7df...76ddd7e2fd
2
vendor/grammars/language-haml
vendored
2
vendor/grammars/language-haml
vendored
Submodule vendor/grammars/language-haml updated: 9abf167c37...4e7460af03
2
vendor/grammars/language-haskell
vendored
2
vendor/grammars/language-haskell
vendored
Submodule vendor/grammars/language-haskell updated: d81dbcb6c4...c8778adf83
2
vendor/grammars/language-javascript
vendored
2
vendor/grammars/language-javascript
vendored
Submodule vendor/grammars/language-javascript updated: 4740b9fc1c...58cb09d773
2
vendor/grammars/language-jison
vendored
2
vendor/grammars/language-jison
vendored
Submodule vendor/grammars/language-jison updated: 632867ac57...0e897be16c
2
vendor/grammars/language-less
vendored
2
vendor/grammars/language-less
vendored
Submodule vendor/grammars/language-less updated: 733b900b63...c244adad61
2
vendor/grammars/language-meson
vendored
2
vendor/grammars/language-meson
vendored
Submodule vendor/grammars/language-meson updated: cf2644ddf4...4846c73f01
2
vendor/grammars/language-pan
vendored
2
vendor/grammars/language-pan
vendored
Submodule vendor/grammars/language-pan updated: 0e362fd246...47914b9872
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user