mirror of
https://github.com/KevinMidboe/linguist.git
synced 2025-10-29 17:50:22 +00:00
Compare commits
85 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d761658f8b | ||
|
|
3719214aba | ||
|
|
47b109be36 | ||
|
|
1ec4db97c2 | ||
|
|
9fe5fe0de2 | ||
|
|
b36ea7ac9d | ||
|
|
625b06c30d | ||
|
|
28bce533b2 | ||
|
|
93ec1922cb | ||
|
|
5d09fb67dd | ||
|
|
93dcb61742 | ||
|
|
3a03594685 | ||
|
|
5ce2c254f9 | ||
|
|
d7814c4899 | ||
|
|
50c08bf29e | ||
|
|
34928baee6 | ||
|
|
27bb41aa4d | ||
|
|
1415f4b52d | ||
|
|
ae8ffcad22 | ||
|
|
f43633bf10 | ||
|
|
a604de9846 | ||
|
|
3e224e0039 | ||
|
|
15b04f86c3 | ||
|
|
42af436c20 | ||
|
|
2b08c66f0b | ||
|
|
f98ab593fb | ||
|
|
f951ec07de | ||
|
|
e9ac71590f | ||
|
|
210cd19876 | ||
|
|
f473c555ac | ||
|
|
48e4394d87 | ||
|
|
e1ce88920d | ||
|
|
675cee1d72 | ||
|
|
1c4baf6dc2 | ||
|
|
8f2820e9cc | ||
|
|
04c268e535 | ||
|
|
ec749b3f8d | ||
|
|
08b63e7033 | ||
|
|
7867b946b9 | ||
|
|
a4d12cc8e4 | ||
|
|
a1165b74b1 | ||
|
|
0fa1fa5581 | ||
|
|
d8b91bd5c4 | ||
|
|
9b941a34f0 | ||
|
|
9d8392dab8 | ||
|
|
2c78dd2c66 | ||
|
|
3988f3e7a7 | ||
|
|
d9a4e831b4 | ||
|
|
45c27f26a2 | ||
|
|
0fbc29bf68 | ||
|
|
5569d2056d | ||
|
|
be262d0b4f | ||
|
|
33ce2d7264 | ||
|
|
c486f56204 | ||
|
|
9f3b7d0ba5 | ||
|
|
79f20e8057 | ||
|
|
cd30c7613c | ||
|
|
5aa53c0711 | ||
|
|
c17cdca896 | ||
|
|
ecdae83364 | ||
|
|
31aafa2c78 | ||
|
|
8a911b8ff3 | ||
|
|
9233f1d17f | ||
|
|
77eb36a982 | ||
|
|
4e6e58a099 | ||
|
|
c87976330f | ||
|
|
0e9109c3fc | ||
|
|
12f9295dd7 | ||
|
|
581723748b | ||
|
|
0980e304b1 | ||
|
|
d46a529b6a | ||
|
|
1d2ec4dbc3 | ||
|
|
829eea0139 | ||
|
|
78b2853d70 | ||
|
|
202f3c08cd | ||
|
|
b958779e3d | ||
|
|
00dc775daf | ||
|
|
009a4e67b6 | ||
|
|
faaa4470af | ||
|
|
2a320cb988 | ||
|
|
74931d1bd5 | ||
|
|
3ca93a84b9 | ||
|
|
aa27f18ea6 | ||
|
|
d3e2ea3f71 | ||
|
|
359699c454 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,3 +1,4 @@
|
|||||||
|
*.gem
|
||||||
/Gemfile.lock
|
/Gemfile.lock
|
||||||
.bundle/
|
.bundle/
|
||||||
.idea
|
.idea
|
||||||
|
|||||||
54
.gitmodules
vendored
54
.gitmodules
vendored
@@ -130,9 +130,6 @@
|
|||||||
[submodule "vendor/grammars/Sublime-Text-2-OpenEdge-ABL"]
|
[submodule "vendor/grammars/Sublime-Text-2-OpenEdge-ABL"]
|
||||||
path = vendor/grammars/Sublime-Text-2-OpenEdge-ABL
|
path = vendor/grammars/Sublime-Text-2-OpenEdge-ABL
|
||||||
url = https://github.com/jfairbank/Sublime-Text-2-OpenEdge-ABL
|
url = https://github.com/jfairbank/Sublime-Text-2-OpenEdge-ABL
|
||||||
[submodule "vendor/grammars/sublime-rust"]
|
|
||||||
path = vendor/grammars/sublime-rust
|
|
||||||
url = https://github.com/jhasse/sublime-rust
|
|
||||||
[submodule "vendor/grammars/sublime-befunge"]
|
[submodule "vendor/grammars/sublime-befunge"]
|
||||||
path = vendor/grammars/sublime-befunge
|
path = vendor/grammars/sublime-befunge
|
||||||
url = https://github.com/johanasplund/sublime-befunge
|
url = https://github.com/johanasplund/sublime-befunge
|
||||||
@@ -247,9 +244,6 @@
|
|||||||
[submodule "vendor/grammars/cpp-qt.tmbundle"]
|
[submodule "vendor/grammars/cpp-qt.tmbundle"]
|
||||||
path = vendor/grammars/cpp-qt.tmbundle
|
path = vendor/grammars/cpp-qt.tmbundle
|
||||||
url = https://github.com/textmate/cpp-qt.tmbundle
|
url = https://github.com/textmate/cpp-qt.tmbundle
|
||||||
[submodule "vendor/grammars/css.tmbundle"]
|
|
||||||
path = vendor/grammars/css.tmbundle
|
|
||||||
url = https://github.com/textmate/css.tmbundle
|
|
||||||
[submodule "vendor/grammars/d.tmbundle"]
|
[submodule "vendor/grammars/d.tmbundle"]
|
||||||
path = vendor/grammars/d.tmbundle
|
path = vendor/grammars/d.tmbundle
|
||||||
url = https://github.com/textmate/d.tmbundle
|
url = https://github.com/textmate/d.tmbundle
|
||||||
@@ -325,9 +319,6 @@
|
|||||||
[submodule "vendor/grammars/nemerle.tmbundle"]
|
[submodule "vendor/grammars/nemerle.tmbundle"]
|
||||||
path = vendor/grammars/nemerle.tmbundle
|
path = vendor/grammars/nemerle.tmbundle
|
||||||
url = https://github.com/textmate/nemerle.tmbundle
|
url = https://github.com/textmate/nemerle.tmbundle
|
||||||
[submodule "vendor/grammars/ninja.tmbundle"]
|
|
||||||
path = vendor/grammars/ninja.tmbundle
|
|
||||||
url = https://github.com/textmate/ninja.tmbundle
|
|
||||||
[submodule "vendor/grammars/objective-c.tmbundle"]
|
[submodule "vendor/grammars/objective-c.tmbundle"]
|
||||||
path = vendor/grammars/objective-c.tmbundle
|
path = vendor/grammars/objective-c.tmbundle
|
||||||
url = https://github.com/textmate/objective-c.tmbundle
|
url = https://github.com/textmate/objective-c.tmbundle
|
||||||
@@ -355,9 +346,6 @@
|
|||||||
[submodule "vendor/grammars/r.tmbundle"]
|
[submodule "vendor/grammars/r.tmbundle"]
|
||||||
path = vendor/grammars/r.tmbundle
|
path = vendor/grammars/r.tmbundle
|
||||||
url = https://github.com/textmate/r.tmbundle
|
url = https://github.com/textmate/r.tmbundle
|
||||||
[submodule "vendor/grammars/ruby-haml.tmbundle"]
|
|
||||||
path = vendor/grammars/ruby-haml.tmbundle
|
|
||||||
url = https://github.com/textmate/ruby-haml.tmbundle
|
|
||||||
[submodule "vendor/grammars/scheme.tmbundle"]
|
[submodule "vendor/grammars/scheme.tmbundle"]
|
||||||
path = vendor/grammars/scheme.tmbundle
|
path = vendor/grammars/scheme.tmbundle
|
||||||
url = https://github.com/textmate/scheme.tmbundle
|
url = https://github.com/textmate/scheme.tmbundle
|
||||||
@@ -449,9 +437,6 @@
|
|||||||
[submodule "vendor/grammars/Sublime-Nit"]
|
[submodule "vendor/grammars/Sublime-Nit"]
|
||||||
path = vendor/grammars/Sublime-Nit
|
path = vendor/grammars/Sublime-Nit
|
||||||
url = https://github.com/R4PaSs/Sublime-Nit
|
url = https://github.com/R4PaSs/Sublime-Nit
|
||||||
[submodule "vendor/grammars/language-hy"]
|
|
||||||
path = vendor/grammars/language-hy
|
|
||||||
url = https://github.com/rwtolbert/language-hy
|
|
||||||
[submodule "vendor/grammars/Racket"]
|
[submodule "vendor/grammars/Racket"]
|
||||||
path = vendor/grammars/Racket
|
path = vendor/grammars/Racket
|
||||||
url = https://github.com/soegaard/racket-highlight-for-github
|
url = https://github.com/soegaard/racket-highlight-for-github
|
||||||
@@ -629,9 +614,6 @@
|
|||||||
[submodule "vendor/grammars/language-yang"]
|
[submodule "vendor/grammars/language-yang"]
|
||||||
path = vendor/grammars/language-yang
|
path = vendor/grammars/language-yang
|
||||||
url = https://github.com/DzonyKalafut/language-yang.git
|
url = https://github.com/DzonyKalafut/language-yang.git
|
||||||
[submodule "vendor/grammars/perl6fe"]
|
|
||||||
path = vendor/grammars/perl6fe
|
|
||||||
url = https://github.com/MadcapJake/language-perl6fe.git
|
|
||||||
[submodule "vendor/grammars/language-less"]
|
[submodule "vendor/grammars/language-less"]
|
||||||
path = vendor/grammars/language-less
|
path = vendor/grammars/language-less
|
||||||
url = https://github.com/atom/language-less.git
|
url = https://github.com/atom/language-less.git
|
||||||
@@ -776,9 +758,6 @@
|
|||||||
[submodule "vendor/grammars/vhdl"]
|
[submodule "vendor/grammars/vhdl"]
|
||||||
path = vendor/grammars/vhdl
|
path = vendor/grammars/vhdl
|
||||||
url = https://github.com/textmate/vhdl.tmbundle
|
url = https://github.com/textmate/vhdl.tmbundle
|
||||||
[submodule "vendor/grammars/xquery"]
|
|
||||||
path = vendor/grammars/xquery
|
|
||||||
url = https://github.com/textmate/xquery.tmbundle
|
|
||||||
[submodule "vendor/grammars/language-rpm-spec"]
|
[submodule "vendor/grammars/language-rpm-spec"]
|
||||||
path = vendor/grammars/language-rpm-spec
|
path = vendor/grammars/language-rpm-spec
|
||||||
url = https://github.com/waveclaw/language-rpm-spec
|
url = https://github.com/waveclaw/language-rpm-spec
|
||||||
@@ -803,3 +782,36 @@
|
|||||||
[submodule "vendor/grammars/EBNF.tmbundle"]
|
[submodule "vendor/grammars/EBNF.tmbundle"]
|
||||||
path = vendor/grammars/EBNF.tmbundle
|
path = vendor/grammars/EBNF.tmbundle
|
||||||
url = https://github.com/sanssecours/EBNF.tmbundle
|
url = https://github.com/sanssecours/EBNF.tmbundle
|
||||||
|
[submodule "vendor/grammars/language-haml"]
|
||||||
|
path = vendor/grammars/language-haml
|
||||||
|
url = https://github.com/ezekg/language-haml
|
||||||
|
[submodule "vendor/grammars/language-ninja"]
|
||||||
|
path = vendor/grammars/language-ninja
|
||||||
|
url = https://github.com/khyo/language-ninja
|
||||||
|
[submodule "vendor/grammars/language-fontforge"]
|
||||||
|
path = vendor/grammars/language-fontforge
|
||||||
|
url = https://github.com/Alhadis/language-fontforge
|
||||||
|
[submodule "vendor/grammars/language-gn"]
|
||||||
|
path = vendor/grammars/language-gn
|
||||||
|
url = https://github.com/devoncarew/language-gn
|
||||||
|
[submodule "vendor/grammars/rascal-syntax-highlighting"]
|
||||||
|
path = vendor/grammars/rascal-syntax-highlighting
|
||||||
|
url = https://github.com/usethesource/rascal-syntax-highlighting
|
||||||
|
[submodule "vendor/grammars/atom-language-perl6"]
|
||||||
|
path = vendor/grammars/atom-language-perl6
|
||||||
|
url = https://github.com/perl6/atom-language-perl6
|
||||||
|
[submodule "vendor/grammars/reason"]
|
||||||
|
path = vendor/grammars/reason
|
||||||
|
url = https://github.com/facebook/reason
|
||||||
|
[submodule "vendor/grammars/language-xcompose"]
|
||||||
|
path = vendor/grammars/language-xcompose
|
||||||
|
url = https://github.com/samcv/language-xcompose
|
||||||
|
[submodule "vendor/grammars/SublimeEthereum"]
|
||||||
|
path = vendor/grammars/SublimeEthereum
|
||||||
|
url = https://github.com/davidhq/SublimeEthereum.git
|
||||||
|
[submodule "vendor/grammars/atom-language-rust"]
|
||||||
|
path = vendor/grammars/atom-language-rust
|
||||||
|
url = https://github.com/zargony/atom-language-rust
|
||||||
|
[submodule "vendor/grammars/language-css"]
|
||||||
|
path = vendor/grammars/language-css
|
||||||
|
url = https://github.com/atom/language-css
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ To add support for a new extension:
|
|||||||
In addition, if this extension is already listed in [`languages.yml`][languages] then sometimes a few more steps will need to be taken:
|
In addition, if this extension is already listed in [`languages.yml`][languages] then sometimes a few more steps will need to be taken:
|
||||||
|
|
||||||
0. Make sure that example `.yourextension` files are present in the [samples directory][samples] for each language that uses `.yourextension`.
|
0. Make sure that example `.yourextension` files are present in the [samples directory][samples] for each language that uses `.yourextension`.
|
||||||
0. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.yourextension` files. (ping **@arfon** or **@bkeepers** to help with this) to ensure we're not misclassifying files.
|
0. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.yourextension` files. (ping **@bkeepers** to help with this) to ensure we're not misclassifying files.
|
||||||
0. If the Bayesian classifier does a bad job with the sample `.yourextension` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
|
0. If the Bayesian classifier does a bad job with the sample `.yourextension` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
|
||||||
|
|
||||||
|
|
||||||
@@ -36,7 +36,7 @@ To add support for a new language:
|
|||||||
In addition, if your new language defines an extension that's already listed in [`languages.yml`][languages] (such as `.foo`) then sometimes a few more steps will need to be taken:
|
In addition, if your new language defines an extension that's already listed in [`languages.yml`][languages] (such as `.foo`) then sometimes a few more steps will need to be taken:
|
||||||
|
|
||||||
0. Make sure that example `.foo` files are present in the [samples directory][samples] for each language that uses `.foo`.
|
0. Make sure that example `.foo` files are present in the [samples directory][samples] for each language that uses `.foo`.
|
||||||
0. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.foo` files. (ping **@arfon** or **@bkeepers** to help with this) to ensure we're not misclassifying files.
|
0. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.foo` files. (ping **@bkeepers** to help with this) to ensure we're not misclassifying files.
|
||||||
0. If the Bayesian classifier does a bad job with the sample `.foo` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
|
0. If the Bayesian classifier does a bad job with the sample `.foo` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
|
||||||
|
|
||||||
Remember, the goal here is to try and avoid false positives!
|
Remember, the goal here is to try and avoid false positives!
|
||||||
@@ -79,9 +79,12 @@ Here's our current build status: [
|
|
||||||
- **@Alhadis**
|
- **@Alhadis**
|
||||||
|
- **@brandonblack** (GitHub staff)
|
||||||
- **@larsbrinkhoff**
|
- **@larsbrinkhoff**
|
||||||
|
- **@lildude** (GitHub staff)
|
||||||
|
- **@lizzhale** (GitHub staff)
|
||||||
|
- **@mikemcquaid** (GitHub staff)
|
||||||
- **@pchaigno**
|
- **@pchaigno**
|
||||||
|
|
||||||
As Linguist is a production dependency for GitHub we have a couple of workflow restrictions:
|
As Linguist is a production dependency for GitHub we have a couple of workflow restrictions:
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
#!/usr/bin/env ruby
|
#!/usr/bin/env ruby
|
||||||
|
|
||||||
|
$LOAD_PATH[0, 0] = File.join(File.dirname(__FILE__), '..', 'lib')
|
||||||
|
|
||||||
require 'linguist'
|
require 'linguist'
|
||||||
require 'rugged'
|
require 'rugged'
|
||||||
require 'optparse'
|
require 'optparse'
|
||||||
@@ -102,10 +104,16 @@ def git_linguist(args)
|
|||||||
commit = nil
|
commit = nil
|
||||||
|
|
||||||
parser = OptionParser.new do |opts|
|
parser = OptionParser.new do |opts|
|
||||||
opts.banner = "Usage: git-linguist [OPTIONS] stats|breakdown|dump-cache|clear|disable"
|
opts.banner = <<-HELP
|
||||||
|
Linguist v#{Linguist::VERSION}
|
||||||
|
Detect language type and determine language breakdown for a given Git repository.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
git-linguist [OPTIONS] stats|breakdown|dump-cache|clear|disable"
|
||||||
|
HELP
|
||||||
|
|
||||||
opts.on("-f", "--force", "Force a full rescan") { incremental = false }
|
opts.on("-f", "--force", "Force a full rescan") { incremental = false }
|
||||||
opts.on("--commit=COMMIT", "Commit to index") { |v| commit = v}
|
opts.on("-c", "--commit=COMMIT", "Commit to index") { |v| commit = v}
|
||||||
end
|
end
|
||||||
|
|
||||||
parser.parse!(args)
|
parser.parse!(args)
|
||||||
|
|||||||
35
bin/linguist
35
bin/linguist
@@ -1,29 +1,37 @@
|
|||||||
#!/usr/bin/env ruby
|
#!/usr/bin/env ruby
|
||||||
|
|
||||||
# linguist — detect language type for a file, or, given a directory, determine language breakdown
|
$LOAD_PATH[0, 0] = File.join(File.dirname(__FILE__), '..', 'lib')
|
||||||
# usage: linguist <path> [<--breakdown>]
|
|
||||||
#
|
|
||||||
require 'linguist'
|
require 'linguist'
|
||||||
require 'rugged'
|
require 'rugged'
|
||||||
|
require 'json'
|
||||||
|
require 'optparse'
|
||||||
|
|
||||||
path = ARGV[0] || Dir.pwd
|
path = ARGV[0] || Dir.pwd
|
||||||
|
|
||||||
# special case if not given a directory but still given the --breakdown option
|
# special case if not given a directory
|
||||||
|
# but still given the --breakdown or --json options/
|
||||||
if path == "--breakdown"
|
if path == "--breakdown"
|
||||||
path = Dir.pwd
|
path = Dir.pwd
|
||||||
breakdown = true
|
breakdown = true
|
||||||
|
elsif path == "--json"
|
||||||
|
path = Dir.pwd
|
||||||
|
json_breakdown = true
|
||||||
end
|
end
|
||||||
|
|
||||||
ARGV.shift
|
ARGV.shift
|
||||||
breakdown = true if ARGV[0] == "--breakdown"
|
breakdown = true if ARGV[0] == "--breakdown"
|
||||||
|
json_breakdown = true if ARGV[0] == "--json"
|
||||||
|
|
||||||
if File.directory?(path)
|
if File.directory?(path)
|
||||||
rugged = Rugged::Repository.new(path)
|
rugged = Rugged::Repository.new(path)
|
||||||
repo = Linguist::Repository.new(rugged, rugged.head.target_id)
|
repo = Linguist::Repository.new(rugged, rugged.head.target_id)
|
||||||
repo.languages.sort_by { |_, size| size }.reverse.each do |language, size|
|
if !json_breakdown
|
||||||
percentage = ((size / repo.size.to_f) * 100)
|
repo.languages.sort_by { |_, size| size }.reverse.each do |language, size|
|
||||||
percentage = sprintf '%.2f' % percentage
|
percentage = ((size / repo.size.to_f) * 100)
|
||||||
puts "%-7s %s" % ["#{percentage}%", language]
|
percentage = sprintf '%.2f' % percentage
|
||||||
|
puts "%-7s %s" % ["#{percentage}%", language]
|
||||||
|
end
|
||||||
end
|
end
|
||||||
if breakdown
|
if breakdown
|
||||||
puts
|
puts
|
||||||
@@ -35,6 +43,8 @@ if File.directory?(path)
|
|||||||
end
|
end
|
||||||
puts
|
puts
|
||||||
end
|
end
|
||||||
|
elsif json_breakdown
|
||||||
|
puts JSON.dump(repo.breakdown_by_file)
|
||||||
end
|
end
|
||||||
elsif File.file?(path)
|
elsif File.file?(path)
|
||||||
blob = Linguist::FileBlob.new(path, Dir.pwd)
|
blob = Linguist::FileBlob.new(path, Dir.pwd)
|
||||||
@@ -63,5 +73,12 @@ elsif File.file?(path)
|
|||||||
puts " appears to be a vendored file"
|
puts " appears to be a vendored file"
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
abort "usage: linguist <path>"
|
abort <<-HELP
|
||||||
|
Linguist v#{Linguist::VERSION}
|
||||||
|
Detect language type for a file, or, given a directory, determine language breakdown.
|
||||||
|
|
||||||
|
Usage: linguist <path>
|
||||||
|
linguist <path> [--breakdown] [--json]
|
||||||
|
linguist [--breakdown] [--json]
|
||||||
|
HELP
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ Gem::Specification.new do |s|
|
|||||||
s.add_dependency 'charlock_holmes', '~> 0.7.3'
|
s.add_dependency 'charlock_holmes', '~> 0.7.3'
|
||||||
s.add_dependency 'escape_utils', '~> 1.1.0'
|
s.add_dependency 'escape_utils', '~> 1.1.0'
|
||||||
s.add_dependency 'mime-types', '>= 1.19'
|
s.add_dependency 'mime-types', '>= 1.19'
|
||||||
s.add_dependency 'rugged', '>= 0.23.0b'
|
s.add_dependency 'rugged', '0.25.1.1'
|
||||||
|
|
||||||
s.add_development_dependency 'minitest', '>= 5.0'
|
s.add_development_dependency 'minitest', '>= 5.0'
|
||||||
s.add_development_dependency 'mocha'
|
s.add_development_dependency 'mocha'
|
||||||
@@ -27,5 +27,4 @@ Gem::Specification.new do |s|
|
|||||||
s.add_development_dependency 'color-proximity', '~> 0.2.1'
|
s.add_development_dependency 'color-proximity', '~> 0.2.1'
|
||||||
s.add_development_dependency 'licensed'
|
s.add_development_dependency 'licensed'
|
||||||
s.add_development_dependency 'licensee', '>= 8.6.0'
|
s.add_development_dependency 'licensee', '>= 8.6.0'
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|||||||
51
grammars.yml
51
grammars.yml
@@ -1,9 +1,9 @@
|
|||||||
---
|
---
|
||||||
http://svn.edgewall.org/repos/genshi/contrib/textmate/Genshi.tmbundle/Syntaxes/Markup%20Template%20%28XML%29.tmLanguage:
|
|
||||||
- text.xml.genshi
|
|
||||||
https://bitbucket.org/Clams/sublimesystemverilog/get/default.tar.gz:
|
https://bitbucket.org/Clams/sublimesystemverilog/get/default.tar.gz:
|
||||||
- source.systemverilog
|
- source.systemverilog
|
||||||
- source.ucfconstraints
|
- source.ucfconstraints
|
||||||
|
https://svn.edgewall.org/repos/genshi/contrib/textmate/Genshi.tmbundle/Syntaxes/Markup%20Template%20%28XML%29.tmLanguage:
|
||||||
|
- text.xml.genshi
|
||||||
vendor/grammars/ABNF.tmbundle:
|
vendor/grammars/ABNF.tmbundle:
|
||||||
- source.abnf
|
- source.abnf
|
||||||
vendor/grammars/Agda.tmbundle:
|
vendor/grammars/Agda.tmbundle:
|
||||||
@@ -113,7 +113,9 @@ vendor/grammars/SublimeBrainfuck:
|
|||||||
- source.bf
|
- source.bf
|
||||||
vendor/grammars/SublimeClarion:
|
vendor/grammars/SublimeClarion:
|
||||||
- source.clarion
|
- source.clarion
|
||||||
vendor/grammars/SublimeGDB:
|
vendor/grammars/SublimeEthereum:
|
||||||
|
- source.solidity
|
||||||
|
vendor/grammars/SublimeGDB/:
|
||||||
- source.disasm
|
- source.disasm
|
||||||
- source.gdb
|
- source.gdb
|
||||||
- source.gdb.session
|
- source.gdb.session
|
||||||
@@ -178,8 +180,15 @@ vendor/grammars/atom-language-1c-bsl:
|
|||||||
- source.sdbl
|
- source.sdbl
|
||||||
vendor/grammars/atom-language-clean:
|
vendor/grammars/atom-language-clean:
|
||||||
- source.clean
|
- source.clean
|
||||||
|
vendor/grammars/atom-language-perl6:
|
||||||
|
- source.meta-info
|
||||||
|
- source.perl6fe
|
||||||
|
- source.quoting.perl6fe
|
||||||
|
- source.regexp.perl6fe
|
||||||
vendor/grammars/atom-language-purescript:
|
vendor/grammars/atom-language-purescript:
|
||||||
- source.purescript
|
- source.purescript
|
||||||
|
vendor/grammars/atom-language-rust:
|
||||||
|
- source.rust
|
||||||
vendor/grammars/atom-language-srt:
|
vendor/grammars/atom-language-srt:
|
||||||
- text.srt
|
- text.srt
|
||||||
vendor/grammars/atom-language-stan:
|
vendor/grammars/atom-language-stan:
|
||||||
@@ -225,8 +234,6 @@ vendor/grammars/cpp-qt.tmbundle:
|
|||||||
- source.qmake
|
- source.qmake
|
||||||
vendor/grammars/creole:
|
vendor/grammars/creole:
|
||||||
- text.html.creole
|
- text.html.creole
|
||||||
vendor/grammars/css.tmbundle:
|
|
||||||
- source.css
|
|
||||||
vendor/grammars/cucumber-tmbundle:
|
vendor/grammars/cucumber-tmbundle:
|
||||||
- source.ruby.rspec.cucumber.steps
|
- source.ruby.rspec.cucumber.steps
|
||||||
- text.gherkin.feature
|
- text.gherkin.feature
|
||||||
@@ -360,12 +367,23 @@ vendor/grammars/language-csound:
|
|||||||
- source.csound
|
- source.csound
|
||||||
- source.csound-document
|
- source.csound-document
|
||||||
- source.csound-score
|
- source.csound-score
|
||||||
|
vendor/grammars/language-css:
|
||||||
|
- source.css
|
||||||
vendor/grammars/language-emacs-lisp:
|
vendor/grammars/language-emacs-lisp:
|
||||||
- source.emacs.lisp
|
- source.emacs.lisp
|
||||||
|
vendor/grammars/language-fontforge:
|
||||||
|
- source.fontforge
|
||||||
|
- source.opentype
|
||||||
|
- text.sfd
|
||||||
vendor/grammars/language-gfm:
|
vendor/grammars/language-gfm:
|
||||||
- source.gfm
|
- source.gfm
|
||||||
|
vendor/grammars/language-gn:
|
||||||
|
- source.gn
|
||||||
vendor/grammars/language-graphql:
|
vendor/grammars/language-graphql:
|
||||||
- source.graphql
|
- source.graphql
|
||||||
|
vendor/grammars/language-haml:
|
||||||
|
- text.haml
|
||||||
|
- text.hamlc
|
||||||
vendor/grammars/language-haskell:
|
vendor/grammars/language-haskell:
|
||||||
- hint.haskell
|
- hint.haskell
|
||||||
- hint.message.haskell
|
- hint.message.haskell
|
||||||
@@ -375,13 +393,10 @@ vendor/grammars/language-haskell:
|
|||||||
- source.haskell
|
- source.haskell
|
||||||
- source.hsc2hs
|
- source.hsc2hs
|
||||||
- text.tex.latex.haskell
|
- text.tex.latex.haskell
|
||||||
vendor/grammars/language-hy:
|
|
||||||
- source.hy
|
|
||||||
vendor/grammars/language-inform7:
|
vendor/grammars/language-inform7:
|
||||||
- source.inform7
|
- source.inform7
|
||||||
vendor/grammars/language-javascript:
|
vendor/grammars/language-javascript:
|
||||||
- source.js
|
- source.js
|
||||||
- source.js.embedded.html
|
|
||||||
- source.js.regexp
|
- source.js.regexp
|
||||||
- source.js.regexp.replacement
|
- source.js.regexp.replacement
|
||||||
vendor/grammars/language-jsoniq:
|
vendor/grammars/language-jsoniq:
|
||||||
@@ -393,6 +408,8 @@ vendor/grammars/language-maxscript:
|
|||||||
- source.maxscript
|
- source.maxscript
|
||||||
vendor/grammars/language-ncl:
|
vendor/grammars/language-ncl:
|
||||||
- source.ncl
|
- source.ncl
|
||||||
|
vendor/grammars/language-ninja:
|
||||||
|
- source.ninja
|
||||||
vendor/grammars/language-povray:
|
vendor/grammars/language-povray:
|
||||||
- source.pov-ray sdl
|
- source.pov-ray sdl
|
||||||
vendor/grammars/language-python:
|
vendor/grammars/language-python:
|
||||||
@@ -426,6 +443,8 @@ vendor/grammars/language-wavefront:
|
|||||||
- source.wavefront.obj
|
- source.wavefront.obj
|
||||||
vendor/grammars/language-xbase:
|
vendor/grammars/language-xbase:
|
||||||
- source.harbour
|
- source.harbour
|
||||||
|
vendor/grammars/language-xcompose:
|
||||||
|
- config.xcompose
|
||||||
vendor/grammars/language-yaml:
|
vendor/grammars/language-yaml:
|
||||||
- source.yaml
|
- source.yaml
|
||||||
vendor/grammars/language-yang:
|
vendor/grammars/language-yang:
|
||||||
@@ -474,8 +493,6 @@ vendor/grammars/nemerle.tmbundle:
|
|||||||
- source.nemerle
|
- source.nemerle
|
||||||
vendor/grammars/nesC:
|
vendor/grammars/nesC:
|
||||||
- source.nesc
|
- source.nesc
|
||||||
vendor/grammars/ninja.tmbundle:
|
|
||||||
- source.ninja
|
|
||||||
vendor/grammars/nix:
|
vendor/grammars/nix:
|
||||||
- source.nix
|
- source.nix
|
||||||
vendor/grammars/nu.tmbundle:
|
vendor/grammars/nu.tmbundle:
|
||||||
@@ -505,10 +522,6 @@ vendor/grammars/pawn-sublime-language:
|
|||||||
vendor/grammars/perl.tmbundle:
|
vendor/grammars/perl.tmbundle:
|
||||||
- source.perl
|
- source.perl
|
||||||
- source.perl.6
|
- source.perl.6
|
||||||
vendor/grammars/perl6fe:
|
|
||||||
- source.meta-info
|
|
||||||
- source.perl6fe
|
|
||||||
- source.regexp.perl6fe
|
|
||||||
vendor/grammars/php-smarty.tmbundle:
|
vendor/grammars/php-smarty.tmbundle:
|
||||||
- text.html.smarty
|
- text.html.smarty
|
||||||
vendor/grammars/php.tmbundle:
|
vendor/grammars/php.tmbundle:
|
||||||
@@ -531,8 +544,10 @@ vendor/grammars/python-django.tmbundle:
|
|||||||
vendor/grammars/r.tmbundle:
|
vendor/grammars/r.tmbundle:
|
||||||
- source.r
|
- source.r
|
||||||
- text.tex.latex.rd
|
- text.tex.latex.rd
|
||||||
vendor/grammars/ruby-haml.tmbundle:
|
vendor/grammars/rascal-syntax-highlighting:
|
||||||
- text.haml
|
- source.rascal
|
||||||
|
vendor/grammars/reason:
|
||||||
|
- source.reason
|
||||||
vendor/grammars/ruby-slim.tmbundle:
|
vendor/grammars/ruby-slim.tmbundle:
|
||||||
- text.slim
|
- text.slim
|
||||||
vendor/grammars/ruby.tmbundle:
|
vendor/grammars/ruby.tmbundle:
|
||||||
@@ -600,8 +615,6 @@ vendor/grammars/sublime-rexx:
|
|||||||
- source.rexx
|
- source.rexx
|
||||||
vendor/grammars/sublime-robot-plugin:
|
vendor/grammars/sublime-robot-plugin:
|
||||||
- text.robot
|
- text.robot
|
||||||
vendor/grammars/sublime-rust:
|
|
||||||
- source.rust
|
|
||||||
vendor/grammars/sublime-spintools:
|
vendor/grammars/sublime-spintools:
|
||||||
- source.regexp.spin
|
- source.regexp.spin
|
||||||
- source.spin
|
- source.spin
|
||||||
@@ -653,7 +666,5 @@ vendor/grammars/xc.tmbundle:
|
|||||||
vendor/grammars/xml.tmbundle:
|
vendor/grammars/xml.tmbundle:
|
||||||
- text.xml
|
- text.xml
|
||||||
- text.xml.xsl
|
- text.xml.xsl
|
||||||
vendor/grammars/xquery:
|
|
||||||
- source.xquery
|
|
||||||
vendor/grammars/zephir-sublime:
|
vendor/grammars/zephir-sublime:
|
||||||
- source.php.zephir
|
- source.php.zephir
|
||||||
|
|||||||
@@ -59,8 +59,9 @@ class << Linguist
|
|||||||
# Strategies are called in turn until a single Language is returned.
|
# Strategies are called in turn until a single Language is returned.
|
||||||
STRATEGIES = [
|
STRATEGIES = [
|
||||||
Linguist::Strategy::Modeline,
|
Linguist::Strategy::Modeline,
|
||||||
Linguist::Shebang,
|
|
||||||
Linguist::Strategy::Filename,
|
Linguist::Strategy::Filename,
|
||||||
|
Linguist::Shebang,
|
||||||
|
Linguist::Strategy::Extension,
|
||||||
Linguist::Heuristics,
|
Linguist::Heuristics,
|
||||||
Linguist::Classifier
|
Linguist::Classifier
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -63,7 +63,7 @@ module Linguist
|
|||||||
#
|
#
|
||||||
# Returns an Array
|
# Returns an Array
|
||||||
def extensions
|
def extensions
|
||||||
_, *segments = name.downcase.split(".")
|
_, *segments = name.downcase.split(".", -1)
|
||||||
|
|
||||||
segments.map.with_index do |segment, index|
|
segments.map.with_index do |segment, index|
|
||||||
"." + segments[index..-1].join(".")
|
"." + segments[index..-1].join(".")
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ module Linguist
|
|||||||
# Public: Is the blob a generated file?
|
# Public: Is the blob a generated file?
|
||||||
#
|
#
|
||||||
# name - String filename
|
# name - String filename
|
||||||
# data - String blob data. A block also maybe passed in for lazy
|
# data - String blob data. A block also may be passed in for lazy
|
||||||
# loading. This behavior is deprecated and you should always
|
# loading. This behavior is deprecated and you should always
|
||||||
# pass in a String.
|
# pass in a String.
|
||||||
#
|
#
|
||||||
@@ -70,6 +70,7 @@ module Linguist
|
|||||||
compiled_cython_file? ||
|
compiled_cython_file? ||
|
||||||
generated_go? ||
|
generated_go? ||
|
||||||
generated_protocol_buffer? ||
|
generated_protocol_buffer? ||
|
||||||
|
generated_javascript_protocol_buffer? ||
|
||||||
generated_apache_thrift? ||
|
generated_apache_thrift? ||
|
||||||
generated_jni_header? ||
|
generated_jni_header? ||
|
||||||
vcr_cassette? ||
|
vcr_cassette? ||
|
||||||
@@ -77,7 +78,10 @@ module Linguist
|
|||||||
generated_unity3d_meta? ||
|
generated_unity3d_meta? ||
|
||||||
generated_racc? ||
|
generated_racc? ||
|
||||||
generated_jflex? ||
|
generated_jflex? ||
|
||||||
generated_grammarkit?
|
generated_grammarkit? ||
|
||||||
|
generated_roxygen2? ||
|
||||||
|
generated_jison? ||
|
||||||
|
generated_yarn_lock?
|
||||||
end
|
end
|
||||||
|
|
||||||
# Internal: Is the blob an Xcode file?
|
# Internal: Is the blob an Xcode file?
|
||||||
@@ -275,16 +279,25 @@ module Linguist
|
|||||||
return lines[0].include?("Generated by the protocol buffer compiler. DO NOT EDIT!")
|
return lines[0].include?("Generated by the protocol buffer compiler. DO NOT EDIT!")
|
||||||
end
|
end
|
||||||
|
|
||||||
APACHE_THRIFT_EXTENSIONS = ['.rb', '.py', '.go', '.js', '.m', '.java', '.h', '.cc', '.cpp']
|
# Internal: Is the blob a Javascript source file generated by the
|
||||||
|
# Protocol Buffer compiler?
|
||||||
|
#
|
||||||
|
# Returns true of false.
|
||||||
|
def generated_javascript_protocol_buffer?
|
||||||
|
return false unless extname == ".js"
|
||||||
|
return false unless lines.count > 6
|
||||||
|
|
||||||
|
return lines[5].include?("GENERATED CODE -- DO NOT EDIT!")
|
||||||
|
end
|
||||||
|
|
||||||
|
APACHE_THRIFT_EXTENSIONS = ['.rb', '.py', '.go', '.js', '.m', '.java', '.h', '.cc', '.cpp', '.php']
|
||||||
|
|
||||||
# Internal: Is the blob generated by Apache Thrift compiler?
|
# Internal: Is the blob generated by Apache Thrift compiler?
|
||||||
#
|
#
|
||||||
# Returns true or false
|
# Returns true or false
|
||||||
def generated_apache_thrift?
|
def generated_apache_thrift?
|
||||||
return false unless APACHE_THRIFT_EXTENSIONS.include?(extname)
|
return false unless APACHE_THRIFT_EXTENSIONS.include?(extname)
|
||||||
return false unless lines.count > 1
|
return lines.first(6).any? { |l| l.include?("Autogenerated by Thrift Compiler") }
|
||||||
|
|
||||||
return lines[0].include?("Autogenerated by Thrift Compiler") || lines[1].include?("Autogenerated by Thrift Compiler")
|
|
||||||
end
|
end
|
||||||
|
|
||||||
# Internal: Is the blob a C/C++ header generated by the Java JNI tool javah?
|
# Internal: Is the blob a C/C++ header generated by the Java JNI tool javah?
|
||||||
@@ -313,7 +326,7 @@ module Linguist
|
|||||||
!!name.match(/vendor\/((?!-)[-0-9A-Za-z]+(?<!-)\.)+(com|edu|gov|in|me|net|org|fm|io)/)
|
!!name.match(/vendor\/((?!-)[-0-9A-Za-z]+(?<!-)\.)+(com|edu|gov|in|me|net|org|fm|io)/)
|
||||||
end
|
end
|
||||||
|
|
||||||
# Internal: Is the blob a generated npm shrinkwrap file.
|
# Internal: Is the blob a generated npm shrinkwrap file?
|
||||||
#
|
#
|
||||||
# Returns true or false.
|
# Returns true or false.
|
||||||
def npm_shrinkwrap?
|
def npm_shrinkwrap?
|
||||||
@@ -335,7 +348,7 @@ module Linguist
|
|||||||
!!name.match(/composer\.lock/)
|
!!name.match(/composer\.lock/)
|
||||||
end
|
end
|
||||||
|
|
||||||
# Internal: Is the blob a generated by Zephir
|
# Internal: Is the blob generated by Zephir?
|
||||||
#
|
#
|
||||||
# Returns true or false.
|
# Returns true or false.
|
||||||
def generated_by_zephir?
|
def generated_by_zephir?
|
||||||
@@ -435,5 +448,46 @@ module Linguist
|
|||||||
return false unless lines.count > 1
|
return false unless lines.count > 1
|
||||||
return lines[0].start_with?("// This is a generated file. Not intended for manual editing.")
|
return lines[0].start_with?("// This is a generated file. Not intended for manual editing.")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Internal: Is this a roxygen2-generated file?
|
||||||
|
#
|
||||||
|
# A roxygen2-generated file typically contain:
|
||||||
|
# % Generated by roxygen2: do not edit by hand
|
||||||
|
# on the first line.
|
||||||
|
#
|
||||||
|
# Return true or false
|
||||||
|
def generated_roxygen2?
|
||||||
|
return false unless extname == '.Rd'
|
||||||
|
return false unless lines.count > 1
|
||||||
|
|
||||||
|
return lines[0].include?("% Generated by roxygen2: do not edit by hand")
|
||||||
|
end
|
||||||
|
|
||||||
|
# Internal: Is this a Jison-generated file?
|
||||||
|
#
|
||||||
|
# Jison-generated parsers typically contain:
|
||||||
|
# /* parser generated by jison
|
||||||
|
# on the first line.
|
||||||
|
#
|
||||||
|
# Jison-generated lexers typically contain:
|
||||||
|
# /* generated by jison-lex
|
||||||
|
# on the first line.
|
||||||
|
#
|
||||||
|
# Return true or false
|
||||||
|
def generated_jison?
|
||||||
|
return false unless extname == '.js'
|
||||||
|
return false unless lines.count > 1
|
||||||
|
return lines[0].start_with?("/* parser generated by jison ") ||
|
||||||
|
lines[0].start_with?("/* generated by jison-lex ")
|
||||||
|
end
|
||||||
|
|
||||||
|
# Internal: Is the blob a generated yarn lockfile?
|
||||||
|
#
|
||||||
|
# Returns true or false.
|
||||||
|
def generated_yarn_lock?
|
||||||
|
return false unless name.match(/yarn\.lock/)
|
||||||
|
return false unless lines.count > 0
|
||||||
|
return lines[0].include?("# THIS IS AN AUTOGENERATED FILE")
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -110,6 +110,12 @@ module Linguist
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
disambiguate ".cls" do |data|
|
||||||
|
if /\\\w+{/.match(data)
|
||||||
|
Language["TeX"]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
disambiguate ".cs" do |data|
|
disambiguate ".cs" do |data|
|
||||||
if /![\w\s]+methodsFor: /.match(data)
|
if /![\w\s]+methodsFor: /.match(data)
|
||||||
Language["Smalltalk"]
|
Language["Smalltalk"]
|
||||||
@@ -272,7 +278,7 @@ module Linguist
|
|||||||
disambiguate ".mod" do |data|
|
disambiguate ".mod" do |data|
|
||||||
if data.include?('<!ENTITY ')
|
if data.include?('<!ENTITY ')
|
||||||
Language["XML"]
|
Language["XML"]
|
||||||
elsif /MODULE\s\w+\s*;/i.match(data) || /^\s*END \w+;$/i.match(data)
|
elsif /^\s*MODULE [\w\.]+;/i.match(data) || /^\s*END [\w\.]+;/i.match(data)
|
||||||
Language["Modula-2"]
|
Language["Modula-2"]
|
||||||
else
|
else
|
||||||
[Language["Linux Kernel Module"], Language["AMPL"]]
|
[Language["Linux Kernel Module"], Language["AMPL"]]
|
||||||
@@ -320,7 +326,7 @@ module Linguist
|
|||||||
end
|
end
|
||||||
|
|
||||||
disambiguate ".pl" do |data|
|
disambiguate ".pl" do |data|
|
||||||
if /^[^#]+:-/.match(data)
|
if /^[^#]*:-/.match(data)
|
||||||
Language["Prolog"]
|
Language["Prolog"]
|
||||||
elsif /use strict|use\s+v?5\./.match(data)
|
elsif /use strict|use\s+v?5\./.match(data)
|
||||||
Language["Perl"]
|
Language["Perl"]
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ require 'linguist/samples'
|
|||||||
require 'linguist/file_blob'
|
require 'linguist/file_blob'
|
||||||
require 'linguist/blob_helper'
|
require 'linguist/blob_helper'
|
||||||
require 'linguist/strategy/filename'
|
require 'linguist/strategy/filename'
|
||||||
|
require 'linguist/strategy/extension'
|
||||||
require 'linguist/strategy/modeline'
|
require 'linguist/strategy/modeline'
|
||||||
require 'linguist/shebang'
|
require 'linguist/shebang'
|
||||||
|
|
||||||
@@ -90,17 +91,6 @@ module Linguist
|
|||||||
language
|
language
|
||||||
end
|
end
|
||||||
|
|
||||||
# Public: Detects the Language of the blob.
|
|
||||||
#
|
|
||||||
# blob - an object that includes the Linguist `BlobHelper` interface;
|
|
||||||
# see Linguist::LazyBlob and Linguist::FileBlob for examples
|
|
||||||
#
|
|
||||||
# Returns Language or nil.
|
|
||||||
def self.detect(blob)
|
|
||||||
warn "[DEPRECATED] `Linguist::Language.detect` is deprecated. Use `Linguist.detect`. #{caller[0]}"
|
|
||||||
Linguist.detect(blob)
|
|
||||||
end
|
|
||||||
|
|
||||||
# Public: Get all Languages
|
# Public: Get all Languages
|
||||||
#
|
#
|
||||||
# Returns an Array of Languages
|
# Returns an Array of Languages
|
||||||
@@ -140,46 +130,46 @@ module Linguist
|
|||||||
|
|
||||||
# Public: Look up Languages by filename.
|
# Public: Look up Languages by filename.
|
||||||
#
|
#
|
||||||
|
# The behaviour of this method recently changed.
|
||||||
|
# See the second example below.
|
||||||
|
#
|
||||||
# filename - The path String.
|
# filename - The path String.
|
||||||
#
|
#
|
||||||
# Examples
|
# Examples
|
||||||
#
|
#
|
||||||
|
# Language.find_by_filename('Cakefile')
|
||||||
|
# # => [#<Language name="CoffeeScript">]
|
||||||
# Language.find_by_filename('foo.rb')
|
# Language.find_by_filename('foo.rb')
|
||||||
# # => [#<Language name="Ruby">]
|
# # => []
|
||||||
#
|
#
|
||||||
# Returns all matching Languages or [] if none were found.
|
# Returns all matching Languages or [] if none were found.
|
||||||
def self.find_by_filename(filename)
|
def self.find_by_filename(filename)
|
||||||
basename = File.basename(filename)
|
basename = File.basename(filename)
|
||||||
|
@filename_index[basename]
|
||||||
# find the first extension with language definitions
|
|
||||||
extname = FileBlob.new(filename).extensions.detect do |e|
|
|
||||||
!@extension_index[e].empty?
|
|
||||||
end
|
|
||||||
|
|
||||||
(@filename_index[basename] + @extension_index[extname]).compact.uniq
|
|
||||||
end
|
end
|
||||||
|
|
||||||
# Public: Look up Languages by file extension.
|
# Public: Look up Languages by file extension.
|
||||||
#
|
#
|
||||||
# extname - The extension String.
|
# The behaviour of this method recently changed.
|
||||||
|
# See the second example below.
|
||||||
|
#
|
||||||
|
# filename - The path String.
|
||||||
#
|
#
|
||||||
# Examples
|
# Examples
|
||||||
#
|
#
|
||||||
# Language.find_by_extension('.rb')
|
# Language.find_by_extension('dummy.rb')
|
||||||
# # => [#<Language name="Ruby">]
|
# # => [#<Language name="Ruby">]
|
||||||
#
|
|
||||||
# Language.find_by_extension('rb')
|
# Language.find_by_extension('rb')
|
||||||
# # => [#<Language name="Ruby">]
|
# # => []
|
||||||
#
|
#
|
||||||
# Returns all matching Languages or [] if none were found.
|
# Returns all matching Languages or [] if none were found.
|
||||||
def self.find_by_extension(extname)
|
def self.find_by_extension(filename)
|
||||||
extname = ".#{extname}" unless extname.start_with?(".")
|
# find the first extension with language definitions
|
||||||
@extension_index[extname.downcase]
|
extname = FileBlob.new(filename.downcase).extensions.detect do |e|
|
||||||
end
|
!@extension_index[e].empty?
|
||||||
|
end
|
||||||
|
|
||||||
# DEPRECATED
|
@extension_index[extname]
|
||||||
def self.find_by_shebang(data)
|
|
||||||
@interpreter_index[Shebang.interpreter(data)]
|
|
||||||
end
|
end
|
||||||
|
|
||||||
# Public: Look up Languages by interpreter.
|
# Public: Look up Languages by interpreter.
|
||||||
@@ -225,7 +215,14 @@ module Linguist
|
|||||||
# Returns the Language or nil if none was found.
|
# Returns the Language or nil if none was found.
|
||||||
def self.[](name)
|
def self.[](name)
|
||||||
return nil if name.to_s.empty?
|
return nil if name.to_s.empty?
|
||||||
name && (@index[name.downcase] || @index[name.split(',').first.downcase])
|
|
||||||
|
lang = @index[name.downcase]
|
||||||
|
return lang if lang
|
||||||
|
|
||||||
|
name = name.split(',').first
|
||||||
|
return nil if name.to_s.empty?
|
||||||
|
|
||||||
|
@index[name.downcase]
|
||||||
end
|
end
|
||||||
|
|
||||||
# Public: A List of popular languages
|
# Public: A List of popular languages
|
||||||
@@ -259,18 +256,6 @@ module Linguist
|
|||||||
@colors ||= all.select(&:color).sort_by { |lang| lang.name.downcase }
|
@colors ||= all.select(&:color).sort_by { |lang| lang.name.downcase }
|
||||||
end
|
end
|
||||||
|
|
||||||
# Public: A List of languages compatible with Ace.
|
|
||||||
#
|
|
||||||
# TODO: Remove this method in a 5.x release. Every language now needs an ace_mode
|
|
||||||
# key, so this function isn't doing anything unique anymore.
|
|
||||||
#
|
|
||||||
# Returns an Array of Languages.
|
|
||||||
def self.ace_modes
|
|
||||||
warn "This method will be deprecated in a future 5.x release. Every language now has an `ace_mode` set."
|
|
||||||
warn caller
|
|
||||||
@ace_modes ||= all.select(&:ace_mode).sort_by { |lang| lang.name.downcase }
|
|
||||||
end
|
|
||||||
|
|
||||||
# Internal: Initialize a new Language
|
# Internal: Initialize a new Language
|
||||||
#
|
#
|
||||||
# attributes - A hash of attributes
|
# attributes - A hash of attributes
|
||||||
@@ -287,7 +272,7 @@ module Linguist
|
|||||||
@color = attributes[:color]
|
@color = attributes[:color]
|
||||||
|
|
||||||
# Set aliases
|
# Set aliases
|
||||||
@aliases = [default_alias_name] + (attributes[:aliases] || [])
|
@aliases = [default_alias] + (attributes[:aliases] || [])
|
||||||
|
|
||||||
# Load the TextMate scope name or try to guess one
|
# Load the TextMate scope name or try to guess one
|
||||||
@tm_scope = attributes[:tm_scope] || begin
|
@tm_scope = attributes[:tm_scope] || begin
|
||||||
@@ -305,9 +290,6 @@ module Linguist
|
|||||||
@codemirror_mime_type = attributes[:codemirror_mime_type]
|
@codemirror_mime_type = attributes[:codemirror_mime_type]
|
||||||
@wrap = attributes[:wrap] || false
|
@wrap = attributes[:wrap] || false
|
||||||
|
|
||||||
# Set legacy search term
|
|
||||||
@search_term = attributes[:search_term] || default_alias_name
|
|
||||||
|
|
||||||
# Set the language_id
|
# Set the language_id
|
||||||
@language_id = attributes[:language_id]
|
@language_id = attributes[:language_id]
|
||||||
|
|
||||||
@@ -362,17 +344,6 @@ module Linguist
|
|||||||
# Returns an Array of String names
|
# Returns an Array of String names
|
||||||
attr_reader :aliases
|
attr_reader :aliases
|
||||||
|
|
||||||
# Deprecated: Get code search term
|
|
||||||
#
|
|
||||||
# Examples
|
|
||||||
#
|
|
||||||
# # => "ruby"
|
|
||||||
# # => "python"
|
|
||||||
# # => "perl"
|
|
||||||
#
|
|
||||||
# Returns the name String
|
|
||||||
attr_reader :search_term
|
|
||||||
|
|
||||||
# Public: Get language_id (used in GitHub search)
|
# Public: Get language_id (used in GitHub search)
|
||||||
#
|
#
|
||||||
# Examples
|
# Examples
|
||||||
@@ -457,22 +428,6 @@ module Linguist
|
|||||||
# Returns the extensions Array
|
# Returns the extensions Array
|
||||||
attr_reader :filenames
|
attr_reader :filenames
|
||||||
|
|
||||||
# Deprecated: Get primary extension
|
|
||||||
#
|
|
||||||
# Defaults to the first extension but can be overridden
|
|
||||||
# in the languages.yml.
|
|
||||||
#
|
|
||||||
# The primary extension can not be nil. Tests should verify this.
|
|
||||||
#
|
|
||||||
# This method is only used by app/helpers/gists_helper.rb for creating
|
|
||||||
# the language dropdown. It really should be using `name` instead.
|
|
||||||
# Would like to drop primary extension.
|
|
||||||
#
|
|
||||||
# Returns the extension String.
|
|
||||||
def primary_extension
|
|
||||||
extensions.first
|
|
||||||
end
|
|
||||||
|
|
||||||
# Public: Get URL escaped name.
|
# Public: Get URL escaped name.
|
||||||
#
|
#
|
||||||
# Examples
|
# Examples
|
||||||
@@ -486,12 +441,13 @@ module Linguist
|
|||||||
EscapeUtils.escape_url(name).gsub('+', '%20')
|
EscapeUtils.escape_url(name).gsub('+', '%20')
|
||||||
end
|
end
|
||||||
|
|
||||||
# Internal: Get default alias name
|
# Public: Get default alias name
|
||||||
#
|
#
|
||||||
# Returns the alias name String
|
# Returns the alias name String
|
||||||
def default_alias_name
|
def default_alias
|
||||||
name.downcase.gsub(/\s/, '-')
|
name.downcase.gsub(/\s/, '-')
|
||||||
end
|
end
|
||||||
|
alias_method :default_alias_name, :default_alias
|
||||||
|
|
||||||
# Public: Get Language group
|
# Public: Get Language group
|
||||||
#
|
#
|
||||||
@@ -606,7 +562,6 @@ module Linguist
|
|||||||
:wrap => options['wrap'],
|
:wrap => options['wrap'],
|
||||||
:group_name => options['group'],
|
:group_name => options['group'],
|
||||||
:searchable => options.fetch('searchable', true),
|
:searchable => options.fetch('searchable', true),
|
||||||
:search_term => options['search_term'],
|
|
||||||
:language_id => options['language_id'],
|
:language_id => options['language_id'],
|
||||||
:extensions => Array(options['extensions']),
|
:extensions => Array(options['extensions']),
|
||||||
:interpreters => options['interpreters'].sort,
|
:interpreters => options['interpreters'].sort,
|
||||||
|
|||||||
@@ -14,12 +14,10 @@
|
|||||||
# listed alphabetically)
|
# listed alphabetically)
|
||||||
# interpreters - An Array of associated interpreters
|
# interpreters - An Array of associated interpreters
|
||||||
# searchable - Boolean flag to enable searching (defaults to true)
|
# searchable - Boolean flag to enable searching (defaults to true)
|
||||||
# search_term - Deprecated: Some languages may be indexed under a
|
|
||||||
# different alias. Avoid defining new exceptions.
|
|
||||||
# language_id - Integer used as a language-name-independent indexed field so that we can rename
|
# language_id - Integer used as a language-name-independent indexed field so that we can rename
|
||||||
# languages in Linguist without reindexing all the code on GitHub. Must not be
|
# languages in Linguist without reindexing all the code on GitHub. Must not be
|
||||||
# changed for existing languages without the explicit permission of GitHub staff.
|
# changed for existing languages without the explicit permission of GitHub staff.
|
||||||
# color - CSS hex color to represent the language. Only used if type is "programming" or "prose"
|
# color - CSS hex color to represent the language. Only used if type is "programming" or "prose".
|
||||||
# tm_scope - The TextMate scope that represents this programming
|
# tm_scope - The TextMate scope that represents this programming
|
||||||
# language. This should match one of the scopes listed in
|
# language. This should match one of the scopes listed in
|
||||||
# the grammars.yml file. Use "none" if there is no grammar
|
# the grammars.yml file. Use "none" if there is no grammar
|
||||||
@@ -121,7 +119,6 @@ ASN.1:
|
|||||||
ASP:
|
ASP:
|
||||||
type: programming
|
type: programming
|
||||||
color: "#6a40fd"
|
color: "#6a40fd"
|
||||||
search_term: aspx-vb
|
|
||||||
tm_scope: text.html.asp
|
tm_scope: text.html.asp
|
||||||
aliases:
|
aliases:
|
||||||
- aspx
|
- aspx
|
||||||
@@ -154,7 +151,6 @@ ActionScript:
|
|||||||
type: programming
|
type: programming
|
||||||
tm_scope: source.actionscript.3
|
tm_scope: source.actionscript.3
|
||||||
color: "#882B0F"
|
color: "#882B0F"
|
||||||
search_term: as3
|
|
||||||
aliases:
|
aliases:
|
||||||
- actionscript 3
|
- actionscript 3
|
||||||
- actionscript3
|
- actionscript3
|
||||||
@@ -291,7 +287,6 @@ AspectJ:
|
|||||||
Assembly:
|
Assembly:
|
||||||
type: programming
|
type: programming
|
||||||
color: "#6E4C13"
|
color: "#6E4C13"
|
||||||
search_term: nasm
|
|
||||||
aliases:
|
aliases:
|
||||||
- nasm
|
- nasm
|
||||||
extensions:
|
extensions:
|
||||||
@@ -349,7 +344,6 @@ Awk:
|
|||||||
language_id: 28
|
language_id: 28
|
||||||
Batchfile:
|
Batchfile:
|
||||||
type: programming
|
type: programming
|
||||||
search_term: bat
|
|
||||||
aliases:
|
aliases:
|
||||||
- bat
|
- bat
|
||||||
- batch
|
- batch
|
||||||
@@ -474,7 +468,6 @@ C#:
|
|||||||
codemirror_mode: clike
|
codemirror_mode: clike
|
||||||
codemirror_mime_type: text/x-csharp
|
codemirror_mime_type: text/x-csharp
|
||||||
tm_scope: source.cs
|
tm_scope: source.cs
|
||||||
search_term: csharp
|
|
||||||
color: "#178600"
|
color: "#178600"
|
||||||
aliases:
|
aliases:
|
||||||
- csharp
|
- csharp
|
||||||
@@ -489,7 +482,6 @@ C++:
|
|||||||
ace_mode: c_cpp
|
ace_mode: c_cpp
|
||||||
codemirror_mode: clike
|
codemirror_mode: clike
|
||||||
codemirror_mime_type: text/x-c++src
|
codemirror_mime_type: text/x-c++src
|
||||||
search_term: cpp
|
|
||||||
color: "#f34b7d"
|
color: "#f34b7d"
|
||||||
aliases:
|
aliases:
|
||||||
- cpp
|
- cpp
|
||||||
@@ -507,6 +499,7 @@ C++:
|
|||||||
- ".inc"
|
- ".inc"
|
||||||
- ".inl"
|
- ".inl"
|
||||||
- ".ipp"
|
- ".ipp"
|
||||||
|
- ".re"
|
||||||
- ".tcc"
|
- ".tcc"
|
||||||
- ".tpp"
|
- ".tpp"
|
||||||
language_id: 43
|
language_id: 43
|
||||||
@@ -719,7 +712,6 @@ ColdFusion:
|
|||||||
type: programming
|
type: programming
|
||||||
ace_mode: coldfusion
|
ace_mode: coldfusion
|
||||||
color: "#ed2cd6"
|
color: "#ed2cd6"
|
||||||
search_term: cfm
|
|
||||||
aliases:
|
aliases:
|
||||||
- cfm
|
- cfm
|
||||||
- cfml
|
- cfml
|
||||||
@@ -733,7 +725,6 @@ ColdFusion CFC:
|
|||||||
type: programming
|
type: programming
|
||||||
group: ColdFusion
|
group: ColdFusion
|
||||||
ace_mode: coldfusion
|
ace_mode: coldfusion
|
||||||
search_term: cfc
|
|
||||||
aliases:
|
aliases:
|
||||||
- cfc
|
- cfc
|
||||||
extensions:
|
extensions:
|
||||||
@@ -854,16 +845,6 @@ Csound Score:
|
|||||||
tm_scope: source.csound-score
|
tm_scope: source.csound-score
|
||||||
ace_mode: text
|
ace_mode: text
|
||||||
language_id: 75
|
language_id: 75
|
||||||
Cucumber:
|
|
||||||
type: programming
|
|
||||||
extensions:
|
|
||||||
- ".feature"
|
|
||||||
tm_scope: text.gherkin.feature
|
|
||||||
aliases:
|
|
||||||
- gherkin
|
|
||||||
ace_mode: text
|
|
||||||
color: "#5B2063"
|
|
||||||
language_id: 76
|
|
||||||
Cuda:
|
Cuda:
|
||||||
type: programming
|
type: programming
|
||||||
extensions:
|
extensions:
|
||||||
@@ -956,7 +937,6 @@ DTrace:
|
|||||||
language_id: 85
|
language_id: 85
|
||||||
Darcs Patch:
|
Darcs Patch:
|
||||||
type: data
|
type: data
|
||||||
search_term: dpatch
|
|
||||||
aliases:
|
aliases:
|
||||||
- dpatch
|
- dpatch
|
||||||
extensions:
|
extensions:
|
||||||
@@ -1141,9 +1121,10 @@ Emacs Lisp:
|
|||||||
- ".gnus"
|
- ".gnus"
|
||||||
- ".spacemacs"
|
- ".spacemacs"
|
||||||
- ".viper"
|
- ".viper"
|
||||||
- "Project.ede"
|
- Cask
|
||||||
- "_emacs"
|
- Project.ede
|
||||||
- "abbrev_defs"
|
- _emacs
|
||||||
|
- abbrev_defs
|
||||||
extensions:
|
extensions:
|
||||||
- ".el"
|
- ".el"
|
||||||
- ".emacs"
|
- ".emacs"
|
||||||
@@ -1187,7 +1168,6 @@ Erlang:
|
|||||||
F#:
|
F#:
|
||||||
type: programming
|
type: programming
|
||||||
color: "#b845fc"
|
color: "#b845fc"
|
||||||
search_term: fsharp
|
|
||||||
aliases:
|
aliases:
|
||||||
- fsharp
|
- fsharp
|
||||||
extensions:
|
extensions:
|
||||||
@@ -1208,23 +1188,6 @@ FLUX:
|
|||||||
tm_scope: none
|
tm_scope: none
|
||||||
ace_mode: text
|
ace_mode: text
|
||||||
language_id: 106
|
language_id: 106
|
||||||
FORTRAN:
|
|
||||||
type: programming
|
|
||||||
color: "#4d41b1"
|
|
||||||
extensions:
|
|
||||||
- ".f90"
|
|
||||||
- ".f"
|
|
||||||
- ".f03"
|
|
||||||
- ".f08"
|
|
||||||
- ".f77"
|
|
||||||
- ".f95"
|
|
||||||
- ".for"
|
|
||||||
- ".fpp"
|
|
||||||
tm_scope: source.fortran.modern
|
|
||||||
ace_mode: text
|
|
||||||
codemirror_mode: fortran
|
|
||||||
codemirror_mime_type: text/x-fortran
|
|
||||||
language_id: 107
|
|
||||||
Factor:
|
Factor:
|
||||||
type: programming
|
type: programming
|
||||||
color: "#636746"
|
color: "#636746"
|
||||||
@@ -1294,6 +1257,23 @@ Forth:
|
|||||||
codemirror_mode: forth
|
codemirror_mode: forth
|
||||||
codemirror_mime_type: text/x-forth
|
codemirror_mime_type: text/x-forth
|
||||||
language_id: 114
|
language_id: 114
|
||||||
|
Fortran:
|
||||||
|
type: programming
|
||||||
|
color: "#4d41b1"
|
||||||
|
extensions:
|
||||||
|
- ".f90"
|
||||||
|
- ".f"
|
||||||
|
- ".f03"
|
||||||
|
- ".f08"
|
||||||
|
- ".f77"
|
||||||
|
- ".f95"
|
||||||
|
- ".for"
|
||||||
|
- ".fpp"
|
||||||
|
tm_scope: source.fortran.modern
|
||||||
|
ace_mode: text
|
||||||
|
codemirror_mode: fortran
|
||||||
|
codemirror_mime_type: text/x-fortran
|
||||||
|
language_id: 107
|
||||||
FreeMarker:
|
FreeMarker:
|
||||||
type: programming
|
type: programming
|
||||||
color: "#0050b2"
|
color: "#0050b2"
|
||||||
@@ -1339,15 +1319,6 @@ GAP:
|
|||||||
tm_scope: source.gap
|
tm_scope: source.gap
|
||||||
ace_mode: text
|
ace_mode: text
|
||||||
language_id: 119
|
language_id: 119
|
||||||
GAS:
|
|
||||||
type: programming
|
|
||||||
group: Assembly
|
|
||||||
extensions:
|
|
||||||
- ".s"
|
|
||||||
- ".ms"
|
|
||||||
tm_scope: source.assembly
|
|
||||||
ace_mode: assembly_x86
|
|
||||||
language_id: 120
|
|
||||||
GCC Machine Description:
|
GCC Machine Description:
|
||||||
type: programming
|
type: programming
|
||||||
extensions:
|
extensions:
|
||||||
@@ -1393,6 +1364,18 @@ GLSL:
|
|||||||
- ".vshader"
|
- ".vshader"
|
||||||
ace_mode: glsl
|
ace_mode: glsl
|
||||||
language_id: 124
|
language_id: 124
|
||||||
|
GN:
|
||||||
|
type: data
|
||||||
|
extensions:
|
||||||
|
- ".gn"
|
||||||
|
- ".gni"
|
||||||
|
interpreters:
|
||||||
|
- gn
|
||||||
|
tm_scope: source.gn
|
||||||
|
ace_mode: python
|
||||||
|
codemirror_mode: python
|
||||||
|
codemirror_mime_type: text/x-python
|
||||||
|
language_id: 302957008
|
||||||
Game Maker Language:
|
Game Maker Language:
|
||||||
type: programming
|
type: programming
|
||||||
color: "#8fb200"
|
color: "#8fb200"
|
||||||
@@ -1403,6 +1386,14 @@ Game Maker Language:
|
|||||||
codemirror_mode: clike
|
codemirror_mode: clike
|
||||||
codemirror_mime_type: text/x-c++src
|
codemirror_mime_type: text/x-c++src
|
||||||
language_id: 125
|
language_id: 125
|
||||||
|
Genie:
|
||||||
|
type: programming
|
||||||
|
ace_mode: text
|
||||||
|
extensions:
|
||||||
|
- ".gs"
|
||||||
|
color: "#fb855d"
|
||||||
|
tm_scope: none
|
||||||
|
language_id: 792408528
|
||||||
Genshi:
|
Genshi:
|
||||||
type: programming
|
type: programming
|
||||||
extensions:
|
extensions:
|
||||||
@@ -1437,7 +1428,6 @@ Gentoo Eclass:
|
|||||||
language_id: 128
|
language_id: 128
|
||||||
Gettext Catalog:
|
Gettext Catalog:
|
||||||
type: prose
|
type: prose
|
||||||
search_term: pot
|
|
||||||
searchable: false
|
searchable: false
|
||||||
aliases:
|
aliases:
|
||||||
- pot
|
- pot
|
||||||
@@ -1447,6 +1437,16 @@ Gettext Catalog:
|
|||||||
tm_scope: source.po
|
tm_scope: source.po
|
||||||
ace_mode: text
|
ace_mode: text
|
||||||
language_id: 129
|
language_id: 129
|
||||||
|
Gherkin:
|
||||||
|
type: programming
|
||||||
|
extensions:
|
||||||
|
- ".feature"
|
||||||
|
tm_scope: text.gherkin.feature
|
||||||
|
aliases:
|
||||||
|
- cucumber
|
||||||
|
ace_mode: text
|
||||||
|
color: "#5B2063"
|
||||||
|
language_id: 76
|
||||||
Glyph:
|
Glyph:
|
||||||
type: programming
|
type: programming
|
||||||
color: "#e4cc98"
|
color: "#e4cc98"
|
||||||
@@ -1473,6 +1473,8 @@ Gnuplot:
|
|||||||
Go:
|
Go:
|
||||||
type: programming
|
type: programming
|
||||||
color: "#375eab"
|
color: "#375eab"
|
||||||
|
aliases:
|
||||||
|
- golang
|
||||||
extensions:
|
extensions:
|
||||||
- ".go"
|
- ".go"
|
||||||
ace_mode: golang
|
ace_mode: golang
|
||||||
@@ -1548,45 +1550,6 @@ Graphviz (DOT):
|
|||||||
- ".gv"
|
- ".gv"
|
||||||
ace_mode: text
|
ace_mode: text
|
||||||
language_id: 140
|
language_id: 140
|
||||||
Groff:
|
|
||||||
type: markup
|
|
||||||
color: "#ecdebe"
|
|
||||||
extensions:
|
|
||||||
- ".man"
|
|
||||||
- ".1"
|
|
||||||
- ".1in"
|
|
||||||
- ".1m"
|
|
||||||
- ".1x"
|
|
||||||
- ".2"
|
|
||||||
- ".3"
|
|
||||||
- ".3in"
|
|
||||||
- ".3m"
|
|
||||||
- ".3qt"
|
|
||||||
- ".3x"
|
|
||||||
- ".4"
|
|
||||||
- ".5"
|
|
||||||
- ".6"
|
|
||||||
- ".7"
|
|
||||||
- ".8"
|
|
||||||
- ".9"
|
|
||||||
- ".l"
|
|
||||||
- ".me"
|
|
||||||
- ".ms"
|
|
||||||
- ".n"
|
|
||||||
- ".rno"
|
|
||||||
- ".roff"
|
|
||||||
- ".tmac"
|
|
||||||
filenames:
|
|
||||||
- mmn
|
|
||||||
- mmt
|
|
||||||
tm_scope: text.roff
|
|
||||||
aliases:
|
|
||||||
- nroff
|
|
||||||
- troff
|
|
||||||
ace_mode: text
|
|
||||||
codemirror_mode: troff
|
|
||||||
codemirror_mime_type: text/troff
|
|
||||||
language_id: 141
|
|
||||||
Groovy:
|
Groovy:
|
||||||
type: programming
|
type: programming
|
||||||
ace_mode: groovy
|
ace_mode: groovy
|
||||||
@@ -1659,13 +1622,16 @@ HTML+Django:
|
|||||||
tm_scope: text.html.django
|
tm_scope: text.html.django
|
||||||
group: HTML
|
group: HTML
|
||||||
extensions:
|
extensions:
|
||||||
- ".mustache"
|
|
||||||
- ".jinja"
|
- ".jinja"
|
||||||
|
- ".mustache"
|
||||||
|
- ".njk"
|
||||||
aliases:
|
aliases:
|
||||||
- django
|
- django
|
||||||
- html+django/jinja
|
- html+django/jinja
|
||||||
- html+jinja
|
- html+jinja
|
||||||
- htmldjango
|
- htmldjango
|
||||||
|
- njk
|
||||||
|
- nunjucks
|
||||||
ace_mode: django
|
ace_mode: django
|
||||||
codemirror_mode: django
|
codemirror_mode: django
|
||||||
codemirror_mime_type: text/x-django
|
codemirror_mime_type: text/x-django
|
||||||
@@ -1798,7 +1764,7 @@ Hy:
|
|||||||
- ".hy"
|
- ".hy"
|
||||||
aliases:
|
aliases:
|
||||||
- hylang
|
- hylang
|
||||||
tm_scope: source.hy
|
tm_scope: none
|
||||||
language_id: 159
|
language_id: 159
|
||||||
HyPhy:
|
HyPhy:
|
||||||
type: programming
|
type: programming
|
||||||
@@ -1844,7 +1810,6 @@ INI:
|
|||||||
language_id: 163
|
language_id: 163
|
||||||
IRC log:
|
IRC log:
|
||||||
type: data
|
type: data
|
||||||
search_term: irc
|
|
||||||
aliases:
|
aliases:
|
||||||
- irc
|
- irc
|
||||||
- irc logs
|
- irc logs
|
||||||
@@ -1959,6 +1924,8 @@ JSON5:
|
|||||||
type: data
|
type: data
|
||||||
extensions:
|
extensions:
|
||||||
- ".json5"
|
- ".json5"
|
||||||
|
filenames:
|
||||||
|
- ".babelrc"
|
||||||
tm_scope: source.js
|
tm_scope: source.js
|
||||||
ace_mode: javascript
|
ace_mode: javascript
|
||||||
codemirror_mode: javascript
|
codemirror_mode: javascript
|
||||||
@@ -1992,17 +1959,6 @@ JSX:
|
|||||||
codemirror_mode: jsx
|
codemirror_mode: jsx
|
||||||
codemirror_mime_type: text/jsx
|
codemirror_mime_type: text/jsx
|
||||||
language_id: 178
|
language_id: 178
|
||||||
Jade:
|
|
||||||
group: HTML
|
|
||||||
type: markup
|
|
||||||
extensions:
|
|
||||||
- ".jade"
|
|
||||||
- ".pug"
|
|
||||||
tm_scope: text.jade
|
|
||||||
ace_mode: jade
|
|
||||||
codemirror_mode: pug
|
|
||||||
codemirror_mime_type: text/x-pug
|
|
||||||
language_id: 179
|
|
||||||
Jasmin:
|
Jasmin:
|
||||||
type: programming
|
type: programming
|
||||||
ace_mode: java
|
ace_mode: java
|
||||||
@@ -2022,7 +1978,6 @@ Java:
|
|||||||
Java Server Pages:
|
Java Server Pages:
|
||||||
type: programming
|
type: programming
|
||||||
group: Java
|
group: Java
|
||||||
search_term: jsp
|
|
||||||
aliases:
|
aliases:
|
||||||
- jsp
|
- jsp
|
||||||
extensions:
|
extensions:
|
||||||
@@ -2279,7 +2234,6 @@ Literate CoffeeScript:
|
|||||||
group: CoffeeScript
|
group: CoffeeScript
|
||||||
ace_mode: text
|
ace_mode: text
|
||||||
wrap: true
|
wrap: true
|
||||||
search_term: litcoffee
|
|
||||||
aliases:
|
aliases:
|
||||||
- litcoffee
|
- litcoffee
|
||||||
extensions:
|
extensions:
|
||||||
@@ -2288,7 +2242,6 @@ Literate CoffeeScript:
|
|||||||
Literate Haskell:
|
Literate Haskell:
|
||||||
type: programming
|
type: programming
|
||||||
group: Haskell
|
group: Haskell
|
||||||
search_term: lhs
|
|
||||||
aliases:
|
aliases:
|
||||||
- lhaskell
|
- lhaskell
|
||||||
- lhs
|
- lhs
|
||||||
@@ -2550,7 +2503,6 @@ Max:
|
|||||||
aliases:
|
aliases:
|
||||||
- max/msp
|
- max/msp
|
||||||
- maxmsp
|
- maxmsp
|
||||||
search_term: max/msp
|
|
||||||
extensions:
|
extensions:
|
||||||
- ".maxpat"
|
- ".maxpat"
|
||||||
- ".maxhelp"
|
- ".maxhelp"
|
||||||
@@ -2602,7 +2554,6 @@ MiniD:
|
|||||||
language_id: 231
|
language_id: 231
|
||||||
Mirah:
|
Mirah:
|
||||||
type: programming
|
type: programming
|
||||||
search_term: mirah
|
|
||||||
color: "#c7a938"
|
color: "#c7a938"
|
||||||
extensions:
|
extensions:
|
||||||
- ".druby"
|
- ".druby"
|
||||||
@@ -2759,7 +2710,7 @@ Nginx:
|
|||||||
codemirror_mime_type: text/x-nginx-conf
|
codemirror_mime_type: text/x-nginx-conf
|
||||||
color: "#9469E9"
|
color: "#9469E9"
|
||||||
language_id: 248
|
language_id: 248
|
||||||
Nimrod:
|
Nim:
|
||||||
type: programming
|
type: programming
|
||||||
color: "#37775b"
|
color: "#37775b"
|
||||||
extensions:
|
extensions:
|
||||||
@@ -2954,6 +2905,15 @@ OpenSCAD:
|
|||||||
tm_scope: none
|
tm_scope: none
|
||||||
ace_mode: scad
|
ace_mode: scad
|
||||||
language_id: 266
|
language_id: 266
|
||||||
|
OpenType Feature File:
|
||||||
|
type: data
|
||||||
|
aliases:
|
||||||
|
- AFDKO
|
||||||
|
extensions:
|
||||||
|
- ".fea"
|
||||||
|
tm_scope: source.opentype
|
||||||
|
ace_mode: text
|
||||||
|
language_id: 374317347
|
||||||
Org:
|
Org:
|
||||||
type: prose
|
type: prose
|
||||||
wrap: true
|
wrap: true
|
||||||
@@ -3017,6 +2977,8 @@ PHP:
|
|||||||
- ".phps"
|
- ".phps"
|
||||||
- ".phpt"
|
- ".phpt"
|
||||||
filenames:
|
filenames:
|
||||||
|
- ".php_cs"
|
||||||
|
- ".php_cs.dist"
|
||||||
- Phakefile
|
- Phakefile
|
||||||
interpreters:
|
interpreters:
|
||||||
- php
|
- php
|
||||||
@@ -3171,8 +3133,8 @@ Perl6:
|
|||||||
language_id: 283
|
language_id: 283
|
||||||
Pic:
|
Pic:
|
||||||
type: markup
|
type: markup
|
||||||
group: Groff
|
group: Roff
|
||||||
tm_scope: "source.pic"
|
tm_scope: source.pic
|
||||||
extensions:
|
extensions:
|
||||||
- ".pic"
|
- ".pic"
|
||||||
- ".chem"
|
- ".chem"
|
||||||
@@ -3325,6 +3287,17 @@ Public Key:
|
|||||||
codemirror_mode: asciiarmor
|
codemirror_mode: asciiarmor
|
||||||
codemirror_mime_type: application/pgp
|
codemirror_mime_type: application/pgp
|
||||||
language_id: 298
|
language_id: 298
|
||||||
|
Pug:
|
||||||
|
group: HTML
|
||||||
|
type: markup
|
||||||
|
extensions:
|
||||||
|
- ".jade"
|
||||||
|
- ".pug"
|
||||||
|
tm_scope: text.jade
|
||||||
|
ace_mode: jade
|
||||||
|
codemirror_mode: pug
|
||||||
|
codemirror_mime_type: text/x-pug
|
||||||
|
language_id: 179
|
||||||
Puppet:
|
Puppet:
|
||||||
type: programming
|
type: programming
|
||||||
color: "#302B6D"
|
color: "#302B6D"
|
||||||
@@ -3376,7 +3349,9 @@ Python:
|
|||||||
- ".cgi"
|
- ".cgi"
|
||||||
- ".fcgi"
|
- ".fcgi"
|
||||||
- ".gyp"
|
- ".gyp"
|
||||||
|
- ".gypi"
|
||||||
- ".lmi"
|
- ".lmi"
|
||||||
|
- ".py3"
|
||||||
- ".pyde"
|
- ".pyde"
|
||||||
- ".pyp"
|
- ".pyp"
|
||||||
- ".pyt"
|
- ".pyt"
|
||||||
@@ -3387,6 +3362,7 @@ Python:
|
|||||||
- ".wsgi"
|
- ".wsgi"
|
||||||
- ".xpy"
|
- ".xpy"
|
||||||
filenames:
|
filenames:
|
||||||
|
- ".gclient"
|
||||||
- BUCK
|
- BUCK
|
||||||
- BUILD
|
- BUILD
|
||||||
- SConscript
|
- SConscript
|
||||||
@@ -3554,7 +3530,7 @@ Racket:
|
|||||||
tm_scope: source.racket
|
tm_scope: source.racket
|
||||||
ace_mode: lisp
|
ace_mode: lisp
|
||||||
language_id: 316
|
language_id: 316
|
||||||
Ragel in Ruby Host:
|
Ragel:
|
||||||
type: programming
|
type: programming
|
||||||
color: "#9d5200"
|
color: "#9d5200"
|
||||||
extensions:
|
extensions:
|
||||||
@@ -3565,9 +3541,16 @@ Ragel in Ruby Host:
|
|||||||
tm_scope: none
|
tm_scope: none
|
||||||
ace_mode: text
|
ace_mode: text
|
||||||
language_id: 317
|
language_id: 317
|
||||||
|
Rascal:
|
||||||
|
type: programming
|
||||||
|
color: "#fffaa0"
|
||||||
|
extensions:
|
||||||
|
- ".rsc"
|
||||||
|
tm_scope: source.rascal
|
||||||
|
ace_mode: text
|
||||||
|
language_id: 173616037
|
||||||
Raw token data:
|
Raw token data:
|
||||||
type: data
|
type: data
|
||||||
search_term: raw
|
|
||||||
aliases:
|
aliases:
|
||||||
- raw
|
- raw
|
||||||
extensions:
|
extensions:
|
||||||
@@ -3575,6 +3558,19 @@ Raw token data:
|
|||||||
tm_scope: none
|
tm_scope: none
|
||||||
ace_mode: text
|
ace_mode: text
|
||||||
language_id: 318
|
language_id: 318
|
||||||
|
Reason:
|
||||||
|
type: programming
|
||||||
|
group: OCaml
|
||||||
|
ace_mode: rust
|
||||||
|
codemirror_mode: rust
|
||||||
|
codemirror_mime_type: text/x-rustsrc
|
||||||
|
extensions:
|
||||||
|
- ".re"
|
||||||
|
- ".rei"
|
||||||
|
interpreters:
|
||||||
|
- ocaml
|
||||||
|
tm_scope: source.reason
|
||||||
|
language_id: 869538413
|
||||||
Rebol:
|
Rebol:
|
||||||
type: programming
|
type: programming
|
||||||
color: "#358a5b"
|
color: "#358a5b"
|
||||||
@@ -3630,6 +3626,44 @@ RobotFramework:
|
|||||||
tm_scope: text.robot
|
tm_scope: text.robot
|
||||||
ace_mode: text
|
ace_mode: text
|
||||||
language_id: 324
|
language_id: 324
|
||||||
|
Roff:
|
||||||
|
type: markup
|
||||||
|
color: "#ecdebe"
|
||||||
|
extensions:
|
||||||
|
- ".man"
|
||||||
|
- ".1"
|
||||||
|
- ".1in"
|
||||||
|
- ".1m"
|
||||||
|
- ".1x"
|
||||||
|
- ".2"
|
||||||
|
- ".3"
|
||||||
|
- ".3in"
|
||||||
|
- ".3m"
|
||||||
|
- ".3qt"
|
||||||
|
- ".3x"
|
||||||
|
- ".4"
|
||||||
|
- ".5"
|
||||||
|
- ".6"
|
||||||
|
- ".7"
|
||||||
|
- ".8"
|
||||||
|
- ".9"
|
||||||
|
- ".l"
|
||||||
|
- ".me"
|
||||||
|
- ".ms"
|
||||||
|
- ".n"
|
||||||
|
- ".rno"
|
||||||
|
- ".roff"
|
||||||
|
- ".tmac"
|
||||||
|
filenames:
|
||||||
|
- mmn
|
||||||
|
- mmt
|
||||||
|
tm_scope: text.roff
|
||||||
|
aliases:
|
||||||
|
- nroff
|
||||||
|
ace_mode: text
|
||||||
|
codemirror_mode: troff
|
||||||
|
codemirror_mime_type: text/troff
|
||||||
|
language_id: 141
|
||||||
Rouge:
|
Rouge:
|
||||||
type: programming
|
type: programming
|
||||||
ace_mode: clojure
|
ace_mode: clojure
|
||||||
@@ -3685,6 +3719,7 @@ Ruby:
|
|||||||
- Berksfile
|
- Berksfile
|
||||||
- Brewfile
|
- Brewfile
|
||||||
- Buildfile
|
- Buildfile
|
||||||
|
- Dangerfile
|
||||||
- Deliverfile
|
- Deliverfile
|
||||||
- Fastfile
|
- Fastfile
|
||||||
- Gemfile
|
- Gemfile
|
||||||
@@ -3778,6 +3813,7 @@ SQL:
|
|||||||
- ".cql"
|
- ".cql"
|
||||||
- ".ddl"
|
- ".ddl"
|
||||||
- ".inc"
|
- ".inc"
|
||||||
|
- ".mysql"
|
||||||
- ".prc"
|
- ".prc"
|
||||||
- ".tab"
|
- ".tab"
|
||||||
- ".udf"
|
- ".udf"
|
||||||
@@ -3913,7 +3949,6 @@ Self:
|
|||||||
language_id: 345
|
language_id: 345
|
||||||
Shell:
|
Shell:
|
||||||
type: programming
|
type: programming
|
||||||
search_term: bash
|
|
||||||
color: "#89e051"
|
color: "#89e051"
|
||||||
aliases:
|
aliases:
|
||||||
- sh
|
- sh
|
||||||
@@ -4026,6 +4061,13 @@ SourcePawn:
|
|||||||
tm_scope: source.sp
|
tm_scope: source.sp
|
||||||
ace_mode: text
|
ace_mode: text
|
||||||
language_id: 354
|
language_id: 354
|
||||||
|
Spline Font Database:
|
||||||
|
type: data
|
||||||
|
extensions:
|
||||||
|
- ".sfd"
|
||||||
|
tm_scope: text.sfd
|
||||||
|
ace_mode: yaml
|
||||||
|
language_id: 767169629
|
||||||
Squirrel:
|
Squirrel:
|
||||||
type: programming
|
type: programming
|
||||||
color: "#800000"
|
color: "#800000"
|
||||||
@@ -4256,11 +4298,15 @@ Text:
|
|||||||
- ".no"
|
- ".no"
|
||||||
filenames:
|
filenames:
|
||||||
- COPYING
|
- COPYING
|
||||||
|
- FONTLOG
|
||||||
- INSTALL
|
- INSTALL
|
||||||
|
- INSTALL.mysql
|
||||||
- LICENSE
|
- LICENSE
|
||||||
|
- LICENSE.mysql
|
||||||
- NEWS
|
- NEWS
|
||||||
- README.1ST
|
- README.1ST
|
||||||
- README.me
|
- README.me
|
||||||
|
- README.mysql
|
||||||
- click.me
|
- click.me
|
||||||
- delete.me
|
- delete.me
|
||||||
- keep.me
|
- keep.me
|
||||||
@@ -4351,6 +4397,15 @@ Unity3D Asset:
|
|||||||
- ".unity"
|
- ".unity"
|
||||||
tm_scope: source.yaml
|
tm_scope: source.yaml
|
||||||
language_id: 380
|
language_id: 380
|
||||||
|
Unix Assembly:
|
||||||
|
type: programming
|
||||||
|
group: Assembly
|
||||||
|
extensions:
|
||||||
|
- ".s"
|
||||||
|
- ".ms"
|
||||||
|
tm_scope: source.assembly
|
||||||
|
ace_mode: assembly_x86
|
||||||
|
language_id: 120
|
||||||
Uno:
|
Uno:
|
||||||
type: programming
|
type: programming
|
||||||
extensions:
|
extensions:
|
||||||
@@ -4423,13 +4478,13 @@ Verilog:
|
|||||||
codemirror_mode: verilog
|
codemirror_mode: verilog
|
||||||
codemirror_mime_type: text/x-verilog
|
codemirror_mime_type: text/x-verilog
|
||||||
language_id: 387
|
language_id: 387
|
||||||
VimL:
|
Vim script:
|
||||||
type: programming
|
type: programming
|
||||||
color: "#199f4b"
|
color: "#199f4b"
|
||||||
search_term: vim
|
|
||||||
tm_scope: source.viml
|
tm_scope: source.viml
|
||||||
aliases:
|
aliases:
|
||||||
- vim
|
- vim
|
||||||
|
- viml
|
||||||
- nvim
|
- nvim
|
||||||
extensions:
|
extensions:
|
||||||
- ".vim"
|
- ".vim"
|
||||||
@@ -4538,6 +4593,15 @@ XC:
|
|||||||
codemirror_mode: clike
|
codemirror_mode: clike
|
||||||
codemirror_mime_type: text/x-csrc
|
codemirror_mime_type: text/x-csrc
|
||||||
language_id: 398
|
language_id: 398
|
||||||
|
XCompose:
|
||||||
|
type: data
|
||||||
|
filenames:
|
||||||
|
- ".XCompose"
|
||||||
|
- "XCompose"
|
||||||
|
- "xcompose"
|
||||||
|
tm_scope: 'config.xcompose'
|
||||||
|
ace_mode: text
|
||||||
|
language_id: 225167241
|
||||||
XML:
|
XML:
|
||||||
type: data
|
type: data
|
||||||
ace_mode: xml
|
ace_mode: xml
|
||||||
@@ -4731,6 +4795,7 @@ YAML:
|
|||||||
- ".syntax"
|
- ".syntax"
|
||||||
- ".yaml"
|
- ".yaml"
|
||||||
- ".yaml-tmlanguage"
|
- ".yaml-tmlanguage"
|
||||||
|
- ".yml.mysql"
|
||||||
filenames:
|
filenames:
|
||||||
- ".clang-format"
|
- ".clang-format"
|
||||||
ace_mode: yaml
|
ace_mode: yaml
|
||||||
@@ -4782,7 +4847,6 @@ desktop:
|
|||||||
eC:
|
eC:
|
||||||
type: programming
|
type: programming
|
||||||
color: "#913960"
|
color: "#913960"
|
||||||
search_term: ec
|
|
||||||
extensions:
|
extensions:
|
||||||
- ".ec"
|
- ".ec"
|
||||||
- ".eh"
|
- ".eh"
|
||||||
@@ -4832,7 +4896,6 @@ ooc:
|
|||||||
reStructuredText:
|
reStructuredText:
|
||||||
type: prose
|
type: prose
|
||||||
wrap: true
|
wrap: true
|
||||||
search_term: rst
|
|
||||||
aliases:
|
aliases:
|
||||||
- rst
|
- rst
|
||||||
extensions:
|
extensions:
|
||||||
|
|||||||
@@ -26,4 +26,4 @@
|
|||||||
- Shell
|
- Shell
|
||||||
- Swift
|
- Swift
|
||||||
- TeX
|
- TeX
|
||||||
- VimL
|
- Vim script
|
||||||
|
|||||||
10
lib/linguist/strategy/extension.rb
Normal file
10
lib/linguist/strategy/extension.rb
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
module Linguist
|
||||||
|
module Strategy
|
||||||
|
# Detects language based on extension
|
||||||
|
class Extension
|
||||||
|
def self.call(blob, _)
|
||||||
|
Language.find_by_extension(blob.name.to_s)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
@@ -1,9 +1,10 @@
|
|||||||
module Linguist
|
module Linguist
|
||||||
module Strategy
|
module Strategy
|
||||||
# Detects language based on filename and/or extension
|
# Detects language based on filename
|
||||||
class Filename
|
class Filename
|
||||||
def self.call(blob, _)
|
def self.call(blob, _)
|
||||||
Language.find_by_filename(blob.name.to_s)
|
name = blob.name.to_s
|
||||||
|
Language.find_by_filename(name)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -50,6 +50,9 @@
|
|||||||
# Go dependencies
|
# Go dependencies
|
||||||
- Godeps/_workspace/
|
- Godeps/_workspace/
|
||||||
|
|
||||||
|
# GNU indent profiles
|
||||||
|
- .indent.pro
|
||||||
|
|
||||||
# Minified JavaScript and CSS
|
# Minified JavaScript and CSS
|
||||||
- (\.|-)min\.(js|css)$
|
- (\.|-)min\.(js|css)$
|
||||||
|
|
||||||
@@ -235,6 +238,12 @@
|
|||||||
# BuddyBuild
|
# BuddyBuild
|
||||||
- BuddyBuildSDK.framework/
|
- BuddyBuildSDK.framework/
|
||||||
|
|
||||||
|
# Realm
|
||||||
|
- Realm.framework
|
||||||
|
|
||||||
|
# RealmSwift
|
||||||
|
- RealmSwift.framework
|
||||||
|
|
||||||
# git config files
|
# git config files
|
||||||
- gitattributes$
|
- gitattributes$
|
||||||
- gitignore$
|
- gitignore$
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
module Linguist
|
module Linguist
|
||||||
VERSION = "4.8.16"
|
VERSION = "5.0.4"
|
||||||
end
|
end
|
||||||
|
|||||||
46
samples/C++/bug1163046.--skeleton.re
Normal file
46
samples/C++/bug1163046.--skeleton.re
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
#include <iostream>
|
||||||
|
|
||||||
|
#define YYCTYPE unsigned char
|
||||||
|
#define YYCURSOR cursor
|
||||||
|
#define YYLIMIT cursor
|
||||||
|
#define YYMARKER marker
|
||||||
|
#define YYFILL(n)
|
||||||
|
|
||||||
|
bool scan(const char *text)
|
||||||
|
{
|
||||||
|
YYCTYPE *start = (YYCTYPE *)text;
|
||||||
|
YYCTYPE *cursor = (YYCTYPE *)text;
|
||||||
|
YYCTYPE *marker = (YYCTYPE *)text;
|
||||||
|
next:
|
||||||
|
YYCTYPE *token = cursor;
|
||||||
|
/*!re2c
|
||||||
|
'(This file must be converted with BinHex 4.0)'
|
||||||
|
{
|
||||||
|
if (token == start || *(token - 1) == '\n')
|
||||||
|
return true; else goto next;
|
||||||
|
}
|
||||||
|
[\001-\377]
|
||||||
|
{ goto next; }
|
||||||
|
[\000]
|
||||||
|
{ return false; }
|
||||||
|
*/
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
#define do_scan(str, expect) \
|
||||||
|
res = scan(str) == expect ? 0 : 1; \
|
||||||
|
std::cerr << str << "\t-\t" << (res ? "fail" : "ok") << std::endl; \
|
||||||
|
result += res
|
||||||
|
|
||||||
|
/*!max:re2c */
|
||||||
|
|
||||||
|
int main(int,void**)
|
||||||
|
{
|
||||||
|
int res, result = 0;
|
||||||
|
do_scan("(This file must be converted with BinHex 4.0)", 1);
|
||||||
|
do_scan("x(This file must be converted with BinHex 4.0)", 0);
|
||||||
|
do_scan("(This file must be converted with BinHex 4.0)x", 1);
|
||||||
|
do_scan("x(This file must be converted with BinHex 4.0)x", 0);
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
239
samples/C++/cnokw.re
Normal file
239
samples/C++/cnokw.re
Normal file
@@ -0,0 +1,239 @@
|
|||||||
|
#include <stdlib.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <string.h>
|
||||||
|
|
||||||
|
#define ADDEQ 257
|
||||||
|
#define ANDAND 258
|
||||||
|
#define ANDEQ 259
|
||||||
|
#define ARRAY 260
|
||||||
|
#define ASM 261
|
||||||
|
#define AUTO 262
|
||||||
|
#define BREAK 263
|
||||||
|
#define CASE 264
|
||||||
|
#define CHAR 265
|
||||||
|
#define CONST 266
|
||||||
|
#define CONTINUE 267
|
||||||
|
#define DECR 268
|
||||||
|
#define DEFAULT 269
|
||||||
|
#define DEREF 270
|
||||||
|
#define DIVEQ 271
|
||||||
|
#define DO 272
|
||||||
|
#define DOUBLE 273
|
||||||
|
#define ELLIPSIS 274
|
||||||
|
#define ELSE 275
|
||||||
|
#define ENUM 276
|
||||||
|
#define EQL 277
|
||||||
|
#define EXTERN 278
|
||||||
|
#define FCON 279
|
||||||
|
#define FLOAT 280
|
||||||
|
#define FOR 281
|
||||||
|
#define FUNCTION 282
|
||||||
|
#define GEQ 283
|
||||||
|
#define GOTO 284
|
||||||
|
#define ICON 285
|
||||||
|
#define ID 286
|
||||||
|
#define IF 287
|
||||||
|
#define INCR 288
|
||||||
|
#define INT 289
|
||||||
|
#define LEQ 290
|
||||||
|
#define LONG 291
|
||||||
|
#define LSHIFT 292
|
||||||
|
#define LSHIFTEQ 293
|
||||||
|
#define MODEQ 294
|
||||||
|
#define MULEQ 295
|
||||||
|
#define NEQ 296
|
||||||
|
#define OREQ 297
|
||||||
|
#define OROR 298
|
||||||
|
#define POINTER 299
|
||||||
|
#define REGISTER 300
|
||||||
|
#define RETURN 301
|
||||||
|
#define RSHIFT 302
|
||||||
|
#define RSHIFTEQ 303
|
||||||
|
#define SCON 304
|
||||||
|
#define SHORT 305
|
||||||
|
#define SIGNED 306
|
||||||
|
#define SIZEOF 307
|
||||||
|
#define STATIC 308
|
||||||
|
#define STRUCT 309
|
||||||
|
#define SUBEQ 310
|
||||||
|
#define SWITCH 311
|
||||||
|
#define TYPEDEF 312
|
||||||
|
#define UNION 313
|
||||||
|
#define UNSIGNED 314
|
||||||
|
#define VOID 315
|
||||||
|
#define VOLATILE 316
|
||||||
|
#define WHILE 317
|
||||||
|
#define XOREQ 318
|
||||||
|
#define EOI 319
|
||||||
|
|
||||||
|
typedef unsigned int uint;
|
||||||
|
typedef unsigned char uchar;
|
||||||
|
|
||||||
|
#define BSIZE 8192
|
||||||
|
|
||||||
|
#define YYCTYPE uchar
|
||||||
|
#define YYCURSOR cursor
|
||||||
|
#define YYLIMIT s->lim
|
||||||
|
#define YYMARKER s->ptr
|
||||||
|
#define YYFILL(n) {cursor = fill(s, cursor);}
|
||||||
|
|
||||||
|
#define RET(i) {s->cur = cursor; return i;}
|
||||||
|
|
||||||
|
typedef struct Scanner {
|
||||||
|
int fd;
|
||||||
|
uchar *bot, *tok, *ptr, *cur, *pos, *lim, *top, *eof;
|
||||||
|
uint line;
|
||||||
|
} Scanner;
|
||||||
|
|
||||||
|
uchar *fill(Scanner *s, uchar *cursor){
|
||||||
|
if(!s->eof){
|
||||||
|
uint cnt = s->tok - s->bot;
|
||||||
|
if(cnt){
|
||||||
|
memcpy(s->bot, s->tok, s->lim - s->tok);
|
||||||
|
s->tok = s->bot;
|
||||||
|
s->ptr -= cnt;
|
||||||
|
cursor -= cnt;
|
||||||
|
s->pos -= cnt;
|
||||||
|
s->lim -= cnt;
|
||||||
|
}
|
||||||
|
if((s->top - s->lim) < BSIZE){
|
||||||
|
uchar *buf = (uchar*) malloc(((s->lim - s->bot) + BSIZE)*sizeof(uchar));
|
||||||
|
memcpy(buf, s->tok, s->lim - s->tok);
|
||||||
|
s->tok = buf;
|
||||||
|
s->ptr = &buf[s->ptr - s->bot];
|
||||||
|
cursor = &buf[cursor - s->bot];
|
||||||
|
s->pos = &buf[s->pos - s->bot];
|
||||||
|
s->lim = &buf[s->lim - s->bot];
|
||||||
|
s->top = &s->lim[BSIZE];
|
||||||
|
free(s->bot);
|
||||||
|
s->bot = buf;
|
||||||
|
}
|
||||||
|
if((cnt = read(s->fd, (char*) s->lim, BSIZE)) != BSIZE){
|
||||||
|
s->eof = &s->lim[cnt]; *(s->eof)++ = '\n';
|
||||||
|
}
|
||||||
|
s->lim += cnt;
|
||||||
|
}
|
||||||
|
return cursor;
|
||||||
|
}
|
||||||
|
|
||||||
|
int scan(Scanner *s){
|
||||||
|
uchar *cursor = s->cur;
|
||||||
|
std:
|
||||||
|
s->tok = cursor;
|
||||||
|
/*!re2c
|
||||||
|
any = [\000-\377];
|
||||||
|
O = [0-7];
|
||||||
|
D = [0-9];
|
||||||
|
L = [a-zA-Z_];
|
||||||
|
H = [a-fA-F0-9];
|
||||||
|
E = [Ee] [+-]? D+;
|
||||||
|
FS = [fFlL];
|
||||||
|
IS = [uUlL]*;
|
||||||
|
ESC = [\\] ([abfnrtv?'"\\] | "x" H+ | O+);
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*!re2c
|
||||||
|
"/*" { goto comment; }
|
||||||
|
|
||||||
|
L (L|D)* { RET(ID); }
|
||||||
|
|
||||||
|
("0" [xX] H+ IS?) | ("0" D+ IS?) | (D+ IS?) |
|
||||||
|
(['] (ESC|any\[\n\\'])* ['])
|
||||||
|
{ RET(ICON); }
|
||||||
|
|
||||||
|
(D+ E FS?) | (D* "." D+ E? FS?) | (D+ "." D* E? FS?)
|
||||||
|
{ RET(FCON); }
|
||||||
|
|
||||||
|
(["] (ESC|any\[\n\\"])* ["])
|
||||||
|
{ RET(SCON); }
|
||||||
|
|
||||||
|
"..." { RET(ELLIPSIS); }
|
||||||
|
">>=" { RET(RSHIFTEQ); }
|
||||||
|
"<<=" { RET(LSHIFTEQ); }
|
||||||
|
"+=" { RET(ADDEQ); }
|
||||||
|
"-=" { RET(SUBEQ); }
|
||||||
|
"*=" { RET(MULEQ); }
|
||||||
|
"/=" { RET(DIVEQ); }
|
||||||
|
"%=" { RET(MODEQ); }
|
||||||
|
"&=" { RET(ANDEQ); }
|
||||||
|
"^=" { RET(XOREQ); }
|
||||||
|
"|=" { RET(OREQ); }
|
||||||
|
">>" { RET(RSHIFT); }
|
||||||
|
"<<" { RET(LSHIFT); }
|
||||||
|
"++" { RET(INCR); }
|
||||||
|
"--" { RET(DECR); }
|
||||||
|
"->" { RET(DEREF); }
|
||||||
|
"&&" { RET(ANDAND); }
|
||||||
|
"||" { RET(OROR); }
|
||||||
|
"<=" { RET(LEQ); }
|
||||||
|
">=" { RET(GEQ); }
|
||||||
|
"==" { RET(EQL); }
|
||||||
|
"!=" { RET(NEQ); }
|
||||||
|
";" { RET(';'); }
|
||||||
|
"{" { RET('{'); }
|
||||||
|
"}" { RET('}'); }
|
||||||
|
"," { RET(','); }
|
||||||
|
":" { RET(':'); }
|
||||||
|
"=" { RET('='); }
|
||||||
|
"(" { RET('('); }
|
||||||
|
")" { RET(')'); }
|
||||||
|
"[" { RET('['); }
|
||||||
|
"]" { RET(']'); }
|
||||||
|
"." { RET('.'); }
|
||||||
|
"&" { RET('&'); }
|
||||||
|
"!" { RET('!'); }
|
||||||
|
"~" { RET('~'); }
|
||||||
|
"-" { RET('-'); }
|
||||||
|
"+" { RET('+'); }
|
||||||
|
"*" { RET('*'); }
|
||||||
|
"/" { RET('/'); }
|
||||||
|
"%" { RET('%'); }
|
||||||
|
"<" { RET('<'); }
|
||||||
|
">" { RET('>'); }
|
||||||
|
"^" { RET('^'); }
|
||||||
|
"|" { RET('|'); }
|
||||||
|
"?" { RET('?'); }
|
||||||
|
|
||||||
|
|
||||||
|
[ \t\v\f]+ { goto std; }
|
||||||
|
|
||||||
|
"\n"
|
||||||
|
{
|
||||||
|
if(cursor == s->eof) RET(EOI);
|
||||||
|
s->pos = cursor; s->line++;
|
||||||
|
goto std;
|
||||||
|
}
|
||||||
|
|
||||||
|
any
|
||||||
|
{
|
||||||
|
printf("unexpected character: %c\n", *s->tok);
|
||||||
|
goto std;
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
comment:
|
||||||
|
/*!re2c
|
||||||
|
"*/" { goto std; }
|
||||||
|
"\n"
|
||||||
|
{
|
||||||
|
if(cursor == s->eof) RET(EOI);
|
||||||
|
s->tok = s->pos = cursor; s->line++;
|
||||||
|
goto comment;
|
||||||
|
}
|
||||||
|
any { goto comment; }
|
||||||
|
*/
|
||||||
|
}
|
||||||
|
|
||||||
|
main(){
|
||||||
|
Scanner in;
|
||||||
|
int t;
|
||||||
|
memset((char*) &in, 0, sizeof(in));
|
||||||
|
in.fd = 0;
|
||||||
|
while((t = scan(&in)) != EOI){
|
||||||
|
/*
|
||||||
|
printf("%d\t%.*s\n", t, in.cur - in.tok, in.tok);
|
||||||
|
printf("%d\n", t);
|
||||||
|
*/
|
||||||
|
}
|
||||||
|
close(in.fd);
|
||||||
|
}
|
||||||
63
samples/C++/cvsignore.re
Normal file
63
samples/C++/cvsignore.re
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
|
||||||
|
#define YYFILL(n) if (cursor >= limit) break;
|
||||||
|
#define YYCTYPE char
|
||||||
|
#define YYCURSOR cursor
|
||||||
|
#define YYLIMIT limit
|
||||||
|
#define YYMARKER marker
|
||||||
|
|
||||||
|
/*!re2c
|
||||||
|
any = (.|"\n");
|
||||||
|
value = (":" (.\"$")+)?;
|
||||||
|
cvsdat = "Date";
|
||||||
|
cvsid = "Id";
|
||||||
|
cvslog = "Log";
|
||||||
|
cvsrev = "Revision";
|
||||||
|
cvssrc = "Source";
|
||||||
|
*/
|
||||||
|
|
||||||
|
#define APPEND(text) \
|
||||||
|
append(output, outsize, text, sizeof(text) - sizeof(YYCTYPE))
|
||||||
|
|
||||||
|
inline void append(YYCTYPE *output, size_t & outsize, const YYCTYPE * text, size_t len)
|
||||||
|
{
|
||||||
|
memcpy(output + outsize, text, len);
|
||||||
|
outsize += (len / sizeof(YYCTYPE));
|
||||||
|
}
|
||||||
|
|
||||||
|
void scan(YYCTYPE *pText, size_t *pSize, int *pbChanged)
|
||||||
|
{
|
||||||
|
// rule
|
||||||
|
// scan lines
|
||||||
|
// find $ in lines
|
||||||
|
// compact $<keyword>: .. $ to $<keyword>$
|
||||||
|
|
||||||
|
YYCTYPE *output;
|
||||||
|
const YYCTYPE *cursor, *limit, *marker;
|
||||||
|
|
||||||
|
cursor = marker = output = *pText;
|
||||||
|
|
||||||
|
size_t insize = *pSize;
|
||||||
|
size_t outsize = 0;
|
||||||
|
|
||||||
|
limit = cursor + insize;
|
||||||
|
|
||||||
|
while(1) {
|
||||||
|
loop:
|
||||||
|
/*!re2c
|
||||||
|
|
||||||
|
"$" cvsdat value "$" { APPEND(L"$" L"Date$"); goto loop; }
|
||||||
|
"$" cvsid value "$" { APPEND(L"$" L"Id$"); goto loop; }
|
||||||
|
"$" cvslog value "$" { APPEND(L"$" L"Log$"); goto loop; }
|
||||||
|
"$" cvsrev value "$" { APPEND(L"$" L"Revision$"); goto loop; }
|
||||||
|
"$" cvssrc value "$" { APPEND(L"$" L"Source$"); goto loop; }
|
||||||
|
any { output[outsize++] = cursor[-1]; if (cursor >= limit) break; goto loop; }
|
||||||
|
|
||||||
|
*/
|
||||||
|
}
|
||||||
|
output[outsize] = '\0';
|
||||||
|
|
||||||
|
// set the new size
|
||||||
|
*pSize = outsize;
|
||||||
|
|
||||||
|
*pbChanged = (insize == outsize) ? 0 : 1;
|
||||||
|
}
|
||||||
13
samples/C++/simple.re
Normal file
13
samples/C++/simple.re
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
#define NULL ((char*) 0)
|
||||||
|
char *scan(char *p){
|
||||||
|
char *q;
|
||||||
|
#define YYCTYPE char
|
||||||
|
#define YYCURSOR p
|
||||||
|
#define YYLIMIT p
|
||||||
|
#define YYMARKER q
|
||||||
|
#define YYFILL(n)
|
||||||
|
/*!re2c
|
||||||
|
[0-9]+ {return YYCURSOR;}
|
||||||
|
[\000-\377] {return NULL;}
|
||||||
|
*/
|
||||||
|
}
|
||||||
9
samples/Emacs Lisp/filenames/Cask
Normal file
9
samples/Emacs Lisp/filenames/Cask
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
(package "composer" "0.0.7" "Interface to PHP Composer")
|
||||||
|
(source "melpa" "https://melpa.org/packages/")
|
||||||
|
|
||||||
|
(package-file "composer.el")
|
||||||
|
|
||||||
|
(depends-on "f")
|
||||||
|
(depends-on "s")
|
||||||
|
(depends-on "request")
|
||||||
|
(depends-on "seq")
|
||||||
59
samples/GN/BUILD.2.gn
Normal file
59
samples/GN/BUILD.2.gn
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
# Copyright 2016 the V8 project authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import("../gni/isolate.gni")
|
||||||
|
|
||||||
|
group("gn_all") {
|
||||||
|
testonly = true
|
||||||
|
|
||||||
|
if (v8_test_isolation_mode != "noop") {
|
||||||
|
deps = [
|
||||||
|
":check-static-initializers_run",
|
||||||
|
":jsfunfuzz_run",
|
||||||
|
":run-deopt-fuzzer_run",
|
||||||
|
":run-gcmole_run",
|
||||||
|
":run-valgrind_run",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
v8_isolate_run("check-static-initializers") {
|
||||||
|
deps = [
|
||||||
|
"..:d8_run",
|
||||||
|
]
|
||||||
|
|
||||||
|
isolate = "check-static-initializers.isolate"
|
||||||
|
}
|
||||||
|
|
||||||
|
v8_isolate_run("jsfunfuzz") {
|
||||||
|
deps = [
|
||||||
|
"..:d8_run",
|
||||||
|
]
|
||||||
|
|
||||||
|
isolate = "jsfunfuzz/jsfunfuzz.isolate"
|
||||||
|
}
|
||||||
|
|
||||||
|
v8_isolate_run("run-deopt-fuzzer") {
|
||||||
|
deps = [
|
||||||
|
"..:d8_run",
|
||||||
|
]
|
||||||
|
|
||||||
|
isolate = "run-deopt-fuzzer.isolate"
|
||||||
|
}
|
||||||
|
|
||||||
|
v8_isolate_run("run-gcmole") {
|
||||||
|
deps = [
|
||||||
|
"..:d8_run",
|
||||||
|
]
|
||||||
|
|
||||||
|
isolate = "gcmole/run-gcmole.isolate"
|
||||||
|
}
|
||||||
|
|
||||||
|
v8_isolate_run("run-valgrind") {
|
||||||
|
deps = [
|
||||||
|
"..:d8_run",
|
||||||
|
]
|
||||||
|
|
||||||
|
isolate = "run-valgrind.isolate"
|
||||||
|
}
|
||||||
1646
samples/GN/BUILD.3.gn
Normal file
1646
samples/GN/BUILD.3.gn
Normal file
File diff suppressed because it is too large
Load Diff
2583
samples/GN/BUILD.gn
Normal file
2583
samples/GN/BUILD.gn
Normal file
File diff suppressed because it is too large
Load Diff
2781
samples/GN/android-rules.gni
Normal file
2781
samples/GN/android-rules.gni
Normal file
File diff suppressed because it is too large
Load Diff
13
samples/GN/clang.gni
Normal file
13
samples/GN/clang.gni
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# Copyright 2014 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import("//build/toolchain/toolchain.gni")
|
||||||
|
|
||||||
|
declare_args() {
|
||||||
|
# Indicates if the build should use the Chrome-specific plugins for enforcing
|
||||||
|
# coding guidelines, etc. Only used when compiling with Clang.
|
||||||
|
clang_use_chrome_plugins = is_clang && !is_nacl && !use_xcode_clang
|
||||||
|
|
||||||
|
clang_base_path = "//third_party/llvm-build/Release+Asserts"
|
||||||
|
}
|
||||||
25
samples/GN/filenames/.gn
Normal file
25
samples/GN/filenames/.gn
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
# This file is used by the GN meta build system to find the root of the source
|
||||||
|
# tree and to set startup options. For documentation on the values set in this
|
||||||
|
# file, run "gn help dotfile" at the command line.
|
||||||
|
|
||||||
|
import("//build/dotfile_settings.gni")
|
||||||
|
|
||||||
|
# The location of the build configuration file.
|
||||||
|
buildconfig = "//build/config/BUILDCONFIG.gn"
|
||||||
|
|
||||||
|
# The secondary source root is a parallel directory tree where
|
||||||
|
# GN build files are placed when they can not be placed directly
|
||||||
|
# in the source tree, e.g. for third party source trees.
|
||||||
|
secondary_source = "//build/secondary/"
|
||||||
|
|
||||||
|
# These are the targets to check headers for by default. The files in targets
|
||||||
|
# matching these patterns (see "gn help label_pattern" for format) will have
|
||||||
|
# their includes checked for proper dependencies when you run either
|
||||||
|
# "gn check" or "gn gen --check".
|
||||||
|
check_targets = []
|
||||||
|
|
||||||
|
# These are the list of GN files that run exec_script. This whitelist exists
|
||||||
|
# to force additional review for new uses of exec_script, which is strongly
|
||||||
|
# discouraged except for gypi_to_gn calls.
|
||||||
|
exec_script_whitelist =
|
||||||
|
build_dotfile_settings.exec_script_whitelist + [ "//test/test262/BUILD.gn" ]
|
||||||
503
samples/GN/gcc_toolchain.gni
Normal file
503
samples/GN/gcc_toolchain.gni
Normal file
@@ -0,0 +1,503 @@
|
|||||||
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import("//build/config/android/config.gni")
|
||||||
|
import("//build/config/clang/clang.gni")
|
||||||
|
import("//build/config/nacl/config.gni")
|
||||||
|
import("//build/config/sanitizers/sanitizers.gni")
|
||||||
|
import("//build/config/v8_target_cpu.gni")
|
||||||
|
import("//build/toolchain/cc_wrapper.gni")
|
||||||
|
import("//build/toolchain/goma.gni")
|
||||||
|
import("//build/toolchain/toolchain.gni")
|
||||||
|
|
||||||
|
# This template defines a toolchain for something that works like gcc
|
||||||
|
# (including clang).
|
||||||
|
#
|
||||||
|
# It requires the following variables specifying the executables to run:
|
||||||
|
# - ar
|
||||||
|
# - cc
|
||||||
|
# - cxx
|
||||||
|
# - ld
|
||||||
|
#
|
||||||
|
# Optional parameters that control the tools:
|
||||||
|
#
|
||||||
|
# - extra_cflags
|
||||||
|
# Extra flags to be appended when compiling C files (but not C++ files).
|
||||||
|
# - extra_cppflags
|
||||||
|
# Extra flags to be appended when compiling both C and C++ files. "CPP"
|
||||||
|
# stands for "C PreProcessor" in this context, although it can be
|
||||||
|
# used for non-preprocessor flags as well. Not to be confused with
|
||||||
|
# "CXX" (which follows).
|
||||||
|
# - extra_cxxflags
|
||||||
|
# Extra flags to be appended when compiling C++ files (but not C files).
|
||||||
|
# - extra_ldflags
|
||||||
|
# Extra flags to be appended when linking
|
||||||
|
#
|
||||||
|
# - libs_section_prefix
|
||||||
|
# - libs_section_postfix
|
||||||
|
# The contents of these strings, if specified, will be placed around
|
||||||
|
# the libs section of the linker line. It allows one to inject libraries
|
||||||
|
# at the beginning and end for all targets in a toolchain.
|
||||||
|
# - solink_libs_section_prefix
|
||||||
|
# - solink_libs_section_postfix
|
||||||
|
# Same as libs_section_{pre,post}fix except used for solink instead of link.
|
||||||
|
# - link_outputs
|
||||||
|
# The content of this array, if specified, will be added to the list of
|
||||||
|
# outputs from the link command. This can be useful in conjunction with
|
||||||
|
# the post_link parameter.
|
||||||
|
# - post_link
|
||||||
|
# The content of this string, if specified, will be run as a separate
|
||||||
|
# command following the the link command.
|
||||||
|
# - deps
|
||||||
|
# Just forwarded to the toolchain definition.
|
||||||
|
# - executable_extension
|
||||||
|
# If this string is specified it will be used for the file extension
|
||||||
|
# for an executable, rather than using no extension; targets will
|
||||||
|
# still be able to override the extension using the output_extension
|
||||||
|
# variable.
|
||||||
|
# - rebuild_define
|
||||||
|
# The contents of this string, if specified, will be passed as a #define
|
||||||
|
# to the toolchain. It can be used to force recompiles whenever a
|
||||||
|
# toolchain is updated.
|
||||||
|
# - shlib_extension
|
||||||
|
# If this string is specified it will be used for the file extension
|
||||||
|
# for a shared library, rather than default value specified in
|
||||||
|
# toolchain.gni
|
||||||
|
# - strip
|
||||||
|
# Location of the strip executable. When specified, strip will be run on
|
||||||
|
# all shared libraries and executables as they are built. The pre-stripped
|
||||||
|
# artifacts will be put in lib.unstripped/ and exe.unstripped/.
|
||||||
|
template("gcc_toolchain") {
|
||||||
|
toolchain(target_name) {
|
||||||
|
assert(defined(invoker.ar), "gcc_toolchain() must specify a \"ar\" value")
|
||||||
|
assert(defined(invoker.cc), "gcc_toolchain() must specify a \"cc\" value")
|
||||||
|
assert(defined(invoker.cxx), "gcc_toolchain() must specify a \"cxx\" value")
|
||||||
|
assert(defined(invoker.ld), "gcc_toolchain() must specify a \"ld\" value")
|
||||||
|
|
||||||
|
# This define changes when the toolchain changes, forcing a rebuild.
|
||||||
|
# Nothing should ever use this define.
|
||||||
|
if (defined(invoker.rebuild_define)) {
|
||||||
|
rebuild_string = "-D" + invoker.rebuild_define + " "
|
||||||
|
} else {
|
||||||
|
rebuild_string = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
# GN's syntax can't handle more than one scope dereference at once, like
|
||||||
|
# "invoker.toolchain_args.foo", so make a temporary to hold the toolchain
|
||||||
|
# args so we can do "invoker_toolchain_args.foo".
|
||||||
|
assert(defined(invoker.toolchain_args),
|
||||||
|
"Toolchains must specify toolchain_args")
|
||||||
|
invoker_toolchain_args = invoker.toolchain_args
|
||||||
|
assert(defined(invoker_toolchain_args.current_cpu),
|
||||||
|
"toolchain_args must specify a current_cpu")
|
||||||
|
assert(defined(invoker_toolchain_args.current_os),
|
||||||
|
"toolchain_args must specify a current_os")
|
||||||
|
|
||||||
|
# When invoking this toolchain not as the default one, these args will be
|
||||||
|
# passed to the build. They are ignored when this is the default toolchain.
|
||||||
|
toolchain_args = {
|
||||||
|
# Populate toolchain args from the invoker.
|
||||||
|
forward_variables_from(invoker_toolchain_args, "*")
|
||||||
|
|
||||||
|
# The host toolchain value computed by the default toolchain's setup
|
||||||
|
# needs to be passed through unchanged to all secondary toolchains to
|
||||||
|
# ensure that it's always the same, regardless of the values that may be
|
||||||
|
# set on those toolchains.
|
||||||
|
host_toolchain = host_toolchain
|
||||||
|
|
||||||
|
if (!defined(invoker_toolchain_args.v8_current_cpu)) {
|
||||||
|
v8_current_cpu = invoker_toolchain_args.current_cpu
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# When the invoker has explicitly overridden use_goma or cc_wrapper in the
|
||||||
|
# toolchain args, use those values, otherwise default to the global one.
|
||||||
|
# This works because the only reasonable override that toolchains might
|
||||||
|
# supply for these values are to force-disable them.
|
||||||
|
if (defined(toolchain_args.use_goma)) {
|
||||||
|
toolchain_uses_goma = toolchain_args.use_goma
|
||||||
|
} else {
|
||||||
|
toolchain_uses_goma = use_goma
|
||||||
|
}
|
||||||
|
if (defined(toolchain_args.cc_wrapper)) {
|
||||||
|
toolchain_cc_wrapper = toolchain_args.cc_wrapper
|
||||||
|
} else {
|
||||||
|
toolchain_cc_wrapper = cc_wrapper
|
||||||
|
}
|
||||||
|
|
||||||
|
# Compute the compiler prefix.
|
||||||
|
if (toolchain_uses_goma) {
|
||||||
|
assert(toolchain_cc_wrapper == "",
|
||||||
|
"Goma and cc_wrapper can't be used together.")
|
||||||
|
compiler_prefix = "$goma_dir/gomacc "
|
||||||
|
} else if (toolchain_cc_wrapper != "") {
|
||||||
|
compiler_prefix = toolchain_cc_wrapper + " "
|
||||||
|
} else {
|
||||||
|
compiler_prefix = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
cc = compiler_prefix + invoker.cc
|
||||||
|
cxx = compiler_prefix + invoker.cxx
|
||||||
|
ar = invoker.ar
|
||||||
|
ld = invoker.ld
|
||||||
|
if (defined(invoker.readelf)) {
|
||||||
|
readelf = invoker.readelf
|
||||||
|
} else {
|
||||||
|
readelf = "readelf"
|
||||||
|
}
|
||||||
|
if (defined(invoker.nm)) {
|
||||||
|
nm = invoker.nm
|
||||||
|
} else {
|
||||||
|
nm = "nm"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (defined(invoker.shlib_extension)) {
|
||||||
|
default_shlib_extension = invoker.shlib_extension
|
||||||
|
} else {
|
||||||
|
default_shlib_extension = shlib_extension
|
||||||
|
}
|
||||||
|
|
||||||
|
if (defined(invoker.executable_extension)) {
|
||||||
|
default_executable_extension = invoker.executable_extension
|
||||||
|
} else {
|
||||||
|
default_executable_extension = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
# Bring these into our scope for string interpolation with default values.
|
||||||
|
if (defined(invoker.libs_section_prefix)) {
|
||||||
|
libs_section_prefix = invoker.libs_section_prefix
|
||||||
|
} else {
|
||||||
|
libs_section_prefix = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if (defined(invoker.libs_section_postfix)) {
|
||||||
|
libs_section_postfix = invoker.libs_section_postfix
|
||||||
|
} else {
|
||||||
|
libs_section_postfix = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if (defined(invoker.solink_libs_section_prefix)) {
|
||||||
|
solink_libs_section_prefix = invoker.solink_libs_section_prefix
|
||||||
|
} else {
|
||||||
|
solink_libs_section_prefix = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if (defined(invoker.solink_libs_section_postfix)) {
|
||||||
|
solink_libs_section_postfix = invoker.solink_libs_section_postfix
|
||||||
|
} else {
|
||||||
|
solink_libs_section_postfix = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if (defined(invoker.extra_cflags) && invoker.extra_cflags != "") {
|
||||||
|
extra_cflags = " " + invoker.extra_cflags
|
||||||
|
} else {
|
||||||
|
extra_cflags = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if (defined(invoker.extra_cppflags) && invoker.extra_cppflags != "") {
|
||||||
|
extra_cppflags = " " + invoker.extra_cppflags
|
||||||
|
} else {
|
||||||
|
extra_cppflags = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if (defined(invoker.extra_cxxflags) && invoker.extra_cxxflags != "") {
|
||||||
|
extra_cxxflags = " " + invoker.extra_cxxflags
|
||||||
|
} else {
|
||||||
|
extra_cxxflags = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if (defined(invoker.extra_ldflags) && invoker.extra_ldflags != "") {
|
||||||
|
extra_ldflags = " " + invoker.extra_ldflags
|
||||||
|
} else {
|
||||||
|
extra_ldflags = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
# These library switches can apply to all tools below.
|
||||||
|
lib_switch = "-l"
|
||||||
|
lib_dir_switch = "-L"
|
||||||
|
|
||||||
|
# Object files go in this directory.
|
||||||
|
object_subdir = "{{target_out_dir}}/{{label_name}}"
|
||||||
|
|
||||||
|
tool("cc") {
|
||||||
|
depfile = "{{output}}.d"
|
||||||
|
command = "$cc -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}}${extra_cppflags}${extra_cflags} -c {{source}} -o {{output}}"
|
||||||
|
depsformat = "gcc"
|
||||||
|
description = "CC {{output}}"
|
||||||
|
outputs = [
|
||||||
|
# The whitelist file is also an output, but ninja does not
|
||||||
|
# currently support multiple outputs for tool("cc").
|
||||||
|
"$object_subdir/{{source_name_part}}.o",
|
||||||
|
]
|
||||||
|
if (enable_resource_whitelist_generation) {
|
||||||
|
compile_wrapper =
|
||||||
|
rebase_path("//build/toolchain/gcc_compile_wrapper.py",
|
||||||
|
root_build_dir)
|
||||||
|
command = "$python_path \"$compile_wrapper\" --resource-whitelist=\"{{output}}.whitelist\" $command"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tool("cxx") {
|
||||||
|
depfile = "{{output}}.d"
|
||||||
|
command = "$cxx -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}}${extra_cppflags}${extra_cxxflags} -c {{source}} -o {{output}}"
|
||||||
|
depsformat = "gcc"
|
||||||
|
description = "CXX {{output}}"
|
||||||
|
outputs = [
|
||||||
|
# The whitelist file is also an output, but ninja does not
|
||||||
|
# currently support multiple outputs for tool("cxx").
|
||||||
|
"$object_subdir/{{source_name_part}}.o",
|
||||||
|
]
|
||||||
|
if (enable_resource_whitelist_generation) {
|
||||||
|
compile_wrapper =
|
||||||
|
rebase_path("//build/toolchain/gcc_compile_wrapper.py",
|
||||||
|
root_build_dir)
|
||||||
|
command = "$python_path \"$compile_wrapper\" --resource-whitelist=\"{{output}}.whitelist\" $command"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tool("asm") {
|
||||||
|
# For GCC we can just use the C compiler to compile assembly.
|
||||||
|
depfile = "{{output}}.d"
|
||||||
|
command = "$cc -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{asmflags}} -c {{source}} -o {{output}}"
|
||||||
|
depsformat = "gcc"
|
||||||
|
description = "ASM {{output}}"
|
||||||
|
outputs = [
|
||||||
|
"$object_subdir/{{source_name_part}}.o",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
tool("alink") {
|
||||||
|
rspfile = "{{output}}.rsp"
|
||||||
|
whitelist_flag = " "
|
||||||
|
if (enable_resource_whitelist_generation) {
|
||||||
|
whitelist_flag = " --resource-whitelist=\"{{output}}.whitelist\""
|
||||||
|
}
|
||||||
|
|
||||||
|
# This needs a Python script to avoid using simple sh features in this
|
||||||
|
# command, in case the host does not use a POSIX shell (e.g. compiling
|
||||||
|
# POSIX-like toolchains such as NaCl on Windows).
|
||||||
|
ar_wrapper =
|
||||||
|
rebase_path("//build/toolchain/gcc_ar_wrapper.py", root_build_dir)
|
||||||
|
command = "$python_path \"$ar_wrapper\"$whitelist_flag --output={{output}} --ar=\"$ar\" {{arflags}} rcsD @\"$rspfile\""
|
||||||
|
description = "AR {{output}}"
|
||||||
|
rspfile_content = "{{inputs}}"
|
||||||
|
outputs = [
|
||||||
|
"{{output_dir}}/{{target_output_name}}{{output_extension}}",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Shared libraries go in the target out directory by default so we can
|
||||||
|
# generate different targets with the same name and not have them collide.
|
||||||
|
default_output_dir = "{{target_out_dir}}"
|
||||||
|
default_output_extension = ".a"
|
||||||
|
output_prefix = "lib"
|
||||||
|
}
|
||||||
|
|
||||||
|
tool("solink") {
|
||||||
|
soname = "{{target_output_name}}{{output_extension}}" # e.g. "libfoo.so".
|
||||||
|
sofile = "{{output_dir}}/$soname" # Possibly including toolchain dir.
|
||||||
|
rspfile = sofile + ".rsp"
|
||||||
|
pool = "//build/toolchain:link_pool($default_toolchain)"
|
||||||
|
whitelist_flag = " "
|
||||||
|
if (enable_resource_whitelist_generation) {
|
||||||
|
whitelist_file = "$sofile.whitelist"
|
||||||
|
whitelist_flag = " --resource-whitelist=\"$whitelist_file\""
|
||||||
|
}
|
||||||
|
|
||||||
|
if (defined(invoker.strip)) {
|
||||||
|
unstripped_sofile = "{{root_out_dir}}/lib.unstripped/$soname"
|
||||||
|
} else {
|
||||||
|
unstripped_sofile = sofile
|
||||||
|
}
|
||||||
|
|
||||||
|
# These variables are not built into GN but are helpers that
|
||||||
|
# implement (1) linking to produce a .so, (2) extracting the symbols
|
||||||
|
# from that file (3) if the extracted list differs from the existing
|
||||||
|
# .TOC file, overwrite it, otherwise, don't change it.
|
||||||
|
tocfile = sofile + ".TOC"
|
||||||
|
|
||||||
|
link_command = "$ld -shared {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" -Wl,-soname=\"$soname\" @\"$rspfile\""
|
||||||
|
|
||||||
|
assert(defined(readelf), "to solink you must have a readelf")
|
||||||
|
assert(defined(nm), "to solink you must have an nm")
|
||||||
|
strip_switch = ""
|
||||||
|
if (defined(invoker.strip)) {
|
||||||
|
strip_switch = "--strip=${invoker.strip}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# This needs a Python script to avoid using a complex shell command
|
||||||
|
# requiring sh control structures, pipelines, and POSIX utilities.
|
||||||
|
# The host might not have a POSIX shell and utilities (e.g. Windows).
|
||||||
|
solink_wrapper = rebase_path("//build/toolchain/gcc_solink_wrapper.py")
|
||||||
|
command = "$python_path \"$solink_wrapper\" --readelf=\"$readelf\" --nm=\"$nm\" $strip_switch --sofile=\"$unstripped_sofile\" --tocfile=\"$tocfile\" --output=\"$sofile\"$whitelist_flag -- $link_command"
|
||||||
|
|
||||||
|
rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive $solink_libs_section_prefix {{libs}} $solink_libs_section_postfix"
|
||||||
|
|
||||||
|
description = "SOLINK $sofile"
|
||||||
|
|
||||||
|
# Use this for {{output_extension}} expansions unless a target manually
|
||||||
|
# overrides it (in which case {{output_extension}} will be what the target
|
||||||
|
# specifies).
|
||||||
|
default_output_extension = default_shlib_extension
|
||||||
|
|
||||||
|
default_output_dir = "{{root_out_dir}}"
|
||||||
|
if (shlib_subdir != ".") {
|
||||||
|
default_output_dir += "/$shlib_subdir"
|
||||||
|
}
|
||||||
|
|
||||||
|
output_prefix = "lib"
|
||||||
|
|
||||||
|
# Since the above commands only updates the .TOC file when it changes, ask
|
||||||
|
# Ninja to check if the timestamp actually changed to know if downstream
|
||||||
|
# dependencies should be recompiled.
|
||||||
|
restat = true
|
||||||
|
|
||||||
|
# Tell GN about the output files. It will link to the sofile but use the
|
||||||
|
# tocfile for dependency management.
|
||||||
|
outputs = [
|
||||||
|
sofile,
|
||||||
|
tocfile,
|
||||||
|
]
|
||||||
|
if (enable_resource_whitelist_generation) {
|
||||||
|
outputs += [ whitelist_file ]
|
||||||
|
}
|
||||||
|
if (sofile != unstripped_sofile) {
|
||||||
|
outputs += [ unstripped_sofile ]
|
||||||
|
}
|
||||||
|
link_output = sofile
|
||||||
|
depend_output = tocfile
|
||||||
|
}
|
||||||
|
|
||||||
|
tool("solink_module") {
|
||||||
|
soname = "{{target_output_name}}{{output_extension}}" # e.g. "libfoo.so".
|
||||||
|
sofile = "{{output_dir}}/$soname"
|
||||||
|
rspfile = sofile + ".rsp"
|
||||||
|
pool = "//build/toolchain:link_pool($default_toolchain)"
|
||||||
|
|
||||||
|
if (defined(invoker.strip)) {
|
||||||
|
unstripped_sofile = "{{root_out_dir}}/lib.unstripped/$soname"
|
||||||
|
} else {
|
||||||
|
unstripped_sofile = sofile
|
||||||
|
}
|
||||||
|
|
||||||
|
command = "$ld -shared {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" -Wl,-soname=\"$soname\" @\"$rspfile\""
|
||||||
|
|
||||||
|
if (defined(invoker.strip)) {
|
||||||
|
strip_command = "${invoker.strip} --strip-unneeded -o \"$sofile\" \"$unstripped_sofile\""
|
||||||
|
command += " && " + strip_command
|
||||||
|
}
|
||||||
|
rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive $solink_libs_section_prefix {{libs}} $solink_libs_section_postfix"
|
||||||
|
|
||||||
|
description = "SOLINK_MODULE $sofile"
|
||||||
|
|
||||||
|
# Use this for {{output_extension}} expansions unless a target manually
|
||||||
|
# overrides it (in which case {{output_extension}} will be what the target
|
||||||
|
# specifies).
|
||||||
|
if (defined(invoker.loadable_module_extension)) {
|
||||||
|
default_output_extension = invoker.loadable_module_extension
|
||||||
|
} else {
|
||||||
|
default_output_extension = default_shlib_extension
|
||||||
|
}
|
||||||
|
|
||||||
|
default_output_dir = "{{root_out_dir}}"
|
||||||
|
if (shlib_subdir != ".") {
|
||||||
|
default_output_dir += "/$shlib_subdir"
|
||||||
|
}
|
||||||
|
|
||||||
|
output_prefix = "lib"
|
||||||
|
|
||||||
|
outputs = [
|
||||||
|
sofile,
|
||||||
|
]
|
||||||
|
if (sofile != unstripped_sofile) {
|
||||||
|
outputs += [ unstripped_sofile ]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tool("link") {
|
||||||
|
exename = "{{target_output_name}}{{output_extension}}"
|
||||||
|
outfile = "{{output_dir}}/$exename"
|
||||||
|
rspfile = "$outfile.rsp"
|
||||||
|
unstripped_outfile = outfile
|
||||||
|
pool = "//build/toolchain:link_pool($default_toolchain)"
|
||||||
|
|
||||||
|
# Use this for {{output_extension}} expansions unless a target manually
|
||||||
|
# overrides it (in which case {{output_extension}} will be what the target
|
||||||
|
# specifies).
|
||||||
|
default_output_extension = default_executable_extension
|
||||||
|
|
||||||
|
default_output_dir = "{{root_out_dir}}"
|
||||||
|
|
||||||
|
if (defined(invoker.strip)) {
|
||||||
|
unstripped_outfile = "{{root_out_dir}}/exe.unstripped/$exename"
|
||||||
|
}
|
||||||
|
|
||||||
|
command = "$ld {{ldflags}}${extra_ldflags} -o \"$unstripped_outfile\" -Wl,--start-group @\"$rspfile\" {{solibs}} -Wl,--end-group $libs_section_prefix {{libs}} $libs_section_postfix"
|
||||||
|
if (defined(invoker.strip)) {
|
||||||
|
link_wrapper =
|
||||||
|
rebase_path("//build/toolchain/gcc_link_wrapper.py", root_build_dir)
|
||||||
|
command = "$python_path \"$link_wrapper\" --strip=\"${invoker.strip}\" --unstripped-file=\"$unstripped_outfile\" --output=\"$outfile\" -- $command"
|
||||||
|
}
|
||||||
|
description = "LINK $outfile"
|
||||||
|
rspfile_content = "{{inputs}}"
|
||||||
|
outputs = [
|
||||||
|
outfile,
|
||||||
|
]
|
||||||
|
if (outfile != unstripped_outfile) {
|
||||||
|
outputs += [ unstripped_outfile ]
|
||||||
|
}
|
||||||
|
if (defined(invoker.link_outputs)) {
|
||||||
|
outputs += invoker.link_outputs
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# These two are really entirely generic, but have to be repeated in
|
||||||
|
# each toolchain because GN doesn't allow a template to be used here.
|
||||||
|
# See //build/toolchain/toolchain.gni for details.
|
||||||
|
tool("stamp") {
|
||||||
|
command = stamp_command
|
||||||
|
description = stamp_description
|
||||||
|
}
|
||||||
|
tool("copy") {
|
||||||
|
command = copy_command
|
||||||
|
description = copy_description
|
||||||
|
}
|
||||||
|
|
||||||
|
forward_variables_from(invoker, [ "deps" ])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# This is a shorthand for gcc_toolchain instances based on the Chromium-built
|
||||||
|
# version of Clang. Only the toolchain_cpu and toolchain_os variables need to
|
||||||
|
# be specified by the invoker, and optionally toolprefix if it's a
|
||||||
|
# cross-compile case. Note that for a cross-compile case this toolchain
|
||||||
|
# requires a config to pass the appropriate -target option, or else it will
|
||||||
|
# actually just be doing a native compile. The invoker can optionally override
|
||||||
|
# use_gold too.
|
||||||
|
template("clang_toolchain") {
|
||||||
|
if (defined(invoker.toolprefix)) {
|
||||||
|
toolprefix = invoker.toolprefix
|
||||||
|
} else {
|
||||||
|
toolprefix = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
gcc_toolchain(target_name) {
|
||||||
|
prefix = rebase_path("$clang_base_path/bin", root_build_dir)
|
||||||
|
cc = "$prefix/clang"
|
||||||
|
cxx = "$prefix/clang++"
|
||||||
|
ld = cxx
|
||||||
|
|
||||||
|
readelf = "${toolprefix}readelf"
|
||||||
|
ar = "${toolprefix}ar"
|
||||||
|
nm = "${toolprefix}nm"
|
||||||
|
|
||||||
|
forward_variables_from(invoker, [ "strip" ])
|
||||||
|
|
||||||
|
toolchain_args = {
|
||||||
|
if (defined(invoker.toolchain_args)) {
|
||||||
|
forward_variables_from(invoker.toolchain_args, "*")
|
||||||
|
}
|
||||||
|
is_clang = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
235
samples/GN/icu.gn
Normal file
235
samples/GN/icu.gn
Normal file
@@ -0,0 +1,235 @@
|
|||||||
|
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import("//build/config/linux/pkg_config.gni")
|
||||||
|
import("//build/shim_headers.gni")
|
||||||
|
|
||||||
|
group("icu") {
|
||||||
|
public_deps = [
|
||||||
|
":icui18n",
|
||||||
|
":icuuc",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
config("icu_config") {
|
||||||
|
defines = [
|
||||||
|
"USING_SYSTEM_ICU=1",
|
||||||
|
"ICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_STATIC",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
pkg_config("system_icui18n") {
|
||||||
|
packages = [ "icu-i18n" ]
|
||||||
|
}
|
||||||
|
|
||||||
|
pkg_config("system_icuuc") {
|
||||||
|
packages = [ "icu-uc" ]
|
||||||
|
}
|
||||||
|
|
||||||
|
source_set("icui18n") {
|
||||||
|
deps = [
|
||||||
|
":icui18n_shim",
|
||||||
|
]
|
||||||
|
public_configs = [
|
||||||
|
":icu_config",
|
||||||
|
":system_icui18n",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
source_set("icuuc") {
|
||||||
|
deps = [
|
||||||
|
":icuuc_shim",
|
||||||
|
]
|
||||||
|
public_configs = [
|
||||||
|
":icu_config",
|
||||||
|
":system_icuuc",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
shim_headers("icui18n_shim") {
|
||||||
|
root_path = "source/i18n"
|
||||||
|
headers = [
|
||||||
|
# This list can easily be updated using the command below:
|
||||||
|
# find third_party/icu/source/i18n/unicode \
|
||||||
|
# -iname '*.h' -printf '"%p",\n' | \
|
||||||
|
# sed -e 's|third_party/icu/i18n/common/||' | sort -u
|
||||||
|
"unicode/alphaindex.h",
|
||||||
|
"unicode/basictz.h",
|
||||||
|
"unicode/calendar.h",
|
||||||
|
"unicode/choicfmt.h",
|
||||||
|
"unicode/coleitr.h",
|
||||||
|
"unicode/coll.h",
|
||||||
|
"unicode/compactdecimalformat.h",
|
||||||
|
"unicode/curramt.h",
|
||||||
|
"unicode/currpinf.h",
|
||||||
|
"unicode/currunit.h",
|
||||||
|
"unicode/datefmt.h",
|
||||||
|
"unicode/dcfmtsym.h",
|
||||||
|
"unicode/decimfmt.h",
|
||||||
|
"unicode/dtfmtsym.h",
|
||||||
|
"unicode/dtitvfmt.h",
|
||||||
|
"unicode/dtitvinf.h",
|
||||||
|
"unicode/dtptngen.h",
|
||||||
|
"unicode/dtrule.h",
|
||||||
|
"unicode/fieldpos.h",
|
||||||
|
"unicode/fmtable.h",
|
||||||
|
"unicode/format.h",
|
||||||
|
"unicode/fpositer.h",
|
||||||
|
"unicode/gender.h",
|
||||||
|
"unicode/gregocal.h",
|
||||||
|
"unicode/locdspnm.h",
|
||||||
|
"unicode/measfmt.h",
|
||||||
|
"unicode/measunit.h",
|
||||||
|
"unicode/measure.h",
|
||||||
|
"unicode/msgfmt.h",
|
||||||
|
"unicode/numfmt.h",
|
||||||
|
"unicode/numsys.h",
|
||||||
|
"unicode/plurfmt.h",
|
||||||
|
"unicode/plurrule.h",
|
||||||
|
"unicode/rbnf.h",
|
||||||
|
"unicode/rbtz.h",
|
||||||
|
"unicode/regex.h",
|
||||||
|
"unicode/region.h",
|
||||||
|
"unicode/reldatefmt.h",
|
||||||
|
"unicode/scientificnumberformatter.h",
|
||||||
|
"unicode/search.h",
|
||||||
|
"unicode/selfmt.h",
|
||||||
|
"unicode/simpletz.h",
|
||||||
|
"unicode/smpdtfmt.h",
|
||||||
|
"unicode/sortkey.h",
|
||||||
|
"unicode/stsearch.h",
|
||||||
|
"unicode/tblcoll.h",
|
||||||
|
"unicode/timezone.h",
|
||||||
|
"unicode/tmunit.h",
|
||||||
|
"unicode/tmutamt.h",
|
||||||
|
"unicode/tmutfmt.h",
|
||||||
|
"unicode/translit.h",
|
||||||
|
"unicode/tzfmt.h",
|
||||||
|
"unicode/tznames.h",
|
||||||
|
"unicode/tzrule.h",
|
||||||
|
"unicode/tztrans.h",
|
||||||
|
"unicode/ucal.h",
|
||||||
|
"unicode/ucol.h",
|
||||||
|
"unicode/ucoleitr.h",
|
||||||
|
"unicode/ucsdet.h",
|
||||||
|
"unicode/ucurr.h",
|
||||||
|
"unicode/udat.h",
|
||||||
|
"unicode/udateintervalformat.h",
|
||||||
|
"unicode/udatpg.h",
|
||||||
|
"unicode/udisplaycontext.h",
|
||||||
|
"unicode/ufieldpositer.h",
|
||||||
|
"unicode/uformattable.h",
|
||||||
|
"unicode/ugender.h",
|
||||||
|
"unicode/uldnames.h",
|
||||||
|
"unicode/ulocdata.h",
|
||||||
|
"unicode/umsg.h",
|
||||||
|
"unicode/unirepl.h",
|
||||||
|
"unicode/unum.h",
|
||||||
|
"unicode/unumsys.h",
|
||||||
|
"unicode/upluralrules.h",
|
||||||
|
"unicode/uregex.h",
|
||||||
|
"unicode/uregion.h",
|
||||||
|
"unicode/usearch.h",
|
||||||
|
"unicode/uspoof.h",
|
||||||
|
"unicode/utmscale.h",
|
||||||
|
"unicode/utrans.h",
|
||||||
|
"unicode/vtzone.h",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
shim_headers("icuuc_shim") {
|
||||||
|
root_path = "source/common"
|
||||||
|
headers = [
|
||||||
|
# This list can easily be updated using the command below:
|
||||||
|
# find third_party/icu/source/common/unicode \
|
||||||
|
# -iname '*.h' -printf '"%p",\n' | \
|
||||||
|
# sed -e 's|third_party/icu/source/common/||' | sort -u
|
||||||
|
"unicode/appendable.h",
|
||||||
|
"unicode/brkiter.h",
|
||||||
|
"unicode/bytestream.h",
|
||||||
|
"unicode/bytestrie.h",
|
||||||
|
"unicode/bytestriebuilder.h",
|
||||||
|
"unicode/caniter.h",
|
||||||
|
"unicode/chariter.h",
|
||||||
|
"unicode/dbbi.h",
|
||||||
|
"unicode/docmain.h",
|
||||||
|
"unicode/dtintrv.h",
|
||||||
|
"unicode/enumset.h",
|
||||||
|
"unicode/errorcode.h",
|
||||||
|
"unicode/filteredbrk.h",
|
||||||
|
"unicode/icudataver.h",
|
||||||
|
"unicode/icuplug.h",
|
||||||
|
"unicode/idna.h",
|
||||||
|
"unicode/listformatter.h",
|
||||||
|
"unicode/localpointer.h",
|
||||||
|
"unicode/locid.h",
|
||||||
|
"unicode/messagepattern.h",
|
||||||
|
"unicode/normalizer2.h",
|
||||||
|
"unicode/normlzr.h",
|
||||||
|
"unicode/parseerr.h",
|
||||||
|
"unicode/parsepos.h",
|
||||||
|
"unicode/platform.h",
|
||||||
|
"unicode/ptypes.h",
|
||||||
|
"unicode/putil.h",
|
||||||
|
"unicode/rbbi.h",
|
||||||
|
"unicode/rep.h",
|
||||||
|
"unicode/resbund.h",
|
||||||
|
"unicode/schriter.h",
|
||||||
|
"unicode/std_string.h",
|
||||||
|
"unicode/strenum.h",
|
||||||
|
"unicode/stringpiece.h",
|
||||||
|
"unicode/stringtriebuilder.h",
|
||||||
|
"unicode/symtable.h",
|
||||||
|
"unicode/ubidi.h",
|
||||||
|
"unicode/ubrk.h",
|
||||||
|
"unicode/ucasemap.h",
|
||||||
|
"unicode/ucat.h",
|
||||||
|
"unicode/uchar.h",
|
||||||
|
"unicode/ucharstrie.h",
|
||||||
|
"unicode/ucharstriebuilder.h",
|
||||||
|
"unicode/uchriter.h",
|
||||||
|
"unicode/uclean.h",
|
||||||
|
"unicode/ucnv.h",
|
||||||
|
"unicode/ucnv_cb.h",
|
||||||
|
"unicode/ucnv_err.h",
|
||||||
|
"unicode/ucnvsel.h",
|
||||||
|
"unicode/uconfig.h",
|
||||||
|
"unicode/udata.h",
|
||||||
|
"unicode/uenum.h",
|
||||||
|
"unicode/uidna.h",
|
||||||
|
"unicode/uiter.h",
|
||||||
|
"unicode/ulistformatter.h",
|
||||||
|
"unicode/uloc.h",
|
||||||
|
"unicode/umachine.h",
|
||||||
|
"unicode/umisc.h",
|
||||||
|
"unicode/unifilt.h",
|
||||||
|
"unicode/unifunct.h",
|
||||||
|
"unicode/unimatch.h",
|
||||||
|
"unicode/uniset.h",
|
||||||
|
"unicode/unistr.h",
|
||||||
|
"unicode/unorm.h",
|
||||||
|
"unicode/unorm2.h",
|
||||||
|
"unicode/uobject.h",
|
||||||
|
"unicode/urename.h",
|
||||||
|
"unicode/urep.h",
|
||||||
|
"unicode/ures.h",
|
||||||
|
"unicode/uscript.h",
|
||||||
|
"unicode/uset.h",
|
||||||
|
"unicode/usetiter.h",
|
||||||
|
"unicode/ushape.h",
|
||||||
|
"unicode/usprep.h",
|
||||||
|
"unicode/ustring.h",
|
||||||
|
"unicode/ustringtrie.h",
|
||||||
|
"unicode/utext.h",
|
||||||
|
"unicode/utf.h",
|
||||||
|
"unicode/utf16.h",
|
||||||
|
"unicode/utf32.h",
|
||||||
|
"unicode/utf8.h",
|
||||||
|
"unicode/utf_old.h",
|
||||||
|
"unicode/utrace.h",
|
||||||
|
"unicode/utypes.h",
|
||||||
|
"unicode/uvernum.h",
|
||||||
|
"unicode/uversion.h",
|
||||||
|
]
|
||||||
|
}
|
||||||
2788
samples/GN/internal_rules.gni
Normal file
2788
samples/GN/internal_rules.gni
Normal file
File diff suppressed because it is too large
Load Diff
1422
samples/GN/ios-rules.gni
Normal file
1422
samples/GN/ios-rules.gni
Normal file
File diff suppressed because it is too large
Load Diff
193
samples/GN/isolate.gni
Normal file
193
samples/GN/isolate.gni
Normal file
@@ -0,0 +1,193 @@
|
|||||||
|
# Copyright 2016 the V8 project authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import("//build/config/sanitizers/sanitizers.gni")
|
||||||
|
import("//third_party/icu/config.gni")
|
||||||
|
import("v8.gni")
|
||||||
|
|
||||||
|
declare_args() {
|
||||||
|
# Sets the test isolation mode (noop|prepare|check).
|
||||||
|
v8_test_isolation_mode = "noop"
|
||||||
|
}
|
||||||
|
|
||||||
|
template("v8_isolate_run") {
|
||||||
|
forward_variables_from(invoker,
|
||||||
|
"*",
|
||||||
|
[
|
||||||
|
"deps",
|
||||||
|
"isolate",
|
||||||
|
])
|
||||||
|
|
||||||
|
# Remember target name as within the action scope the target name will be
|
||||||
|
# different.
|
||||||
|
name = target_name
|
||||||
|
|
||||||
|
assert(defined(invoker.deps))
|
||||||
|
assert(defined(invoker.isolate))
|
||||||
|
|
||||||
|
if (name != "" && v8_test_isolation_mode != "noop") {
|
||||||
|
action(name + "_run") {
|
||||||
|
testonly = true
|
||||||
|
|
||||||
|
deps = invoker.deps
|
||||||
|
|
||||||
|
script = "//tools/isolate_driver.py"
|
||||||
|
|
||||||
|
sources = [
|
||||||
|
invoker.isolate,
|
||||||
|
]
|
||||||
|
|
||||||
|
inputs = [
|
||||||
|
# Files that are known to be involved in this step.
|
||||||
|
"//tools/swarming_client/isolate.py",
|
||||||
|
"//tools/swarming_client/run_isolated.py",
|
||||||
|
]
|
||||||
|
|
||||||
|
if (v8_test_isolation_mode == "prepare") {
|
||||||
|
outputs = [
|
||||||
|
"$root_out_dir/$name.isolated.gen.json",
|
||||||
|
]
|
||||||
|
} else if (v8_test_isolation_mode == "check") {
|
||||||
|
outputs = [
|
||||||
|
"$root_out_dir/$name.isolated",
|
||||||
|
"$root_out_dir/$name.isolated.state",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Translate gn to gyp variables.
|
||||||
|
if (is_asan) {
|
||||||
|
asan = "1"
|
||||||
|
} else {
|
||||||
|
asan = "0"
|
||||||
|
}
|
||||||
|
if (is_msan) {
|
||||||
|
msan = "1"
|
||||||
|
} else {
|
||||||
|
msan = "0"
|
||||||
|
}
|
||||||
|
if (is_tsan) {
|
||||||
|
tsan = "1"
|
||||||
|
} else {
|
||||||
|
tsan = "0"
|
||||||
|
}
|
||||||
|
if (is_cfi) {
|
||||||
|
cfi_vptr = "1"
|
||||||
|
} else {
|
||||||
|
cfi_vptr = "0"
|
||||||
|
}
|
||||||
|
if (target_cpu == "x86") {
|
||||||
|
target_arch = "ia32"
|
||||||
|
} else {
|
||||||
|
target_arch = target_cpu
|
||||||
|
}
|
||||||
|
if (is_debug) {
|
||||||
|
configuration_name = "Debug"
|
||||||
|
} else {
|
||||||
|
configuration_name = "Release"
|
||||||
|
}
|
||||||
|
if (is_component_build) {
|
||||||
|
component = "shared_library"
|
||||||
|
} else {
|
||||||
|
component = "static_library"
|
||||||
|
}
|
||||||
|
if (icu_use_data_file) {
|
||||||
|
icu_use_data_file_flag = "1"
|
||||||
|
} else {
|
||||||
|
icu_use_data_file_flag = "0"
|
||||||
|
}
|
||||||
|
if (v8_enable_inspector) {
|
||||||
|
enable_inspector = "1"
|
||||||
|
} else {
|
||||||
|
enable_inspector = "0"
|
||||||
|
}
|
||||||
|
if (v8_use_external_startup_data) {
|
||||||
|
use_external_startup_data = "1"
|
||||||
|
} else {
|
||||||
|
use_external_startup_data = "0"
|
||||||
|
}
|
||||||
|
if (v8_use_snapshot) {
|
||||||
|
use_snapshot = "true"
|
||||||
|
} else {
|
||||||
|
use_snapshot = "false"
|
||||||
|
}
|
||||||
|
if (v8_has_valgrind) {
|
||||||
|
has_valgrind = "1"
|
||||||
|
} else {
|
||||||
|
has_valgrind = "0"
|
||||||
|
}
|
||||||
|
if (v8_gcmole) {
|
||||||
|
gcmole = "1"
|
||||||
|
} else {
|
||||||
|
gcmole = "0"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Note, all paths will be rebased in isolate_driver.py to be relative to
|
||||||
|
# the isolate file.
|
||||||
|
args = [
|
||||||
|
v8_test_isolation_mode,
|
||||||
|
"--isolated",
|
||||||
|
rebase_path("$root_out_dir/$name.isolated", root_build_dir),
|
||||||
|
"--isolate",
|
||||||
|
rebase_path(invoker.isolate, root_build_dir),
|
||||||
|
|
||||||
|
# Path variables are used to replace file paths when loading a .isolate
|
||||||
|
# file
|
||||||
|
"--path-variable",
|
||||||
|
"DEPTH",
|
||||||
|
rebase_path("//", root_build_dir),
|
||||||
|
"--path-variable",
|
||||||
|
"PRODUCT_DIR",
|
||||||
|
rebase_path(root_out_dir, root_build_dir),
|
||||||
|
|
||||||
|
# TODO(machenbach): Set variables for remaining features.
|
||||||
|
"--config-variable",
|
||||||
|
"CONFIGURATION_NAME=$configuration_name",
|
||||||
|
"--config-variable",
|
||||||
|
"OS=$target_os",
|
||||||
|
"--config-variable",
|
||||||
|
"asan=$asan",
|
||||||
|
"--config-variable",
|
||||||
|
"cfi_vptr=$cfi_vptr",
|
||||||
|
"--config-variable",
|
||||||
|
"gcmole=$gcmole",
|
||||||
|
"--config-variable",
|
||||||
|
"has_valgrind=$has_valgrind",
|
||||||
|
"--config-variable",
|
||||||
|
"icu_use_data_file_flag=$icu_use_data_file_flag",
|
||||||
|
"--config-variable",
|
||||||
|
"is_gn=1",
|
||||||
|
"--config-variable",
|
||||||
|
"msan=$msan",
|
||||||
|
"--config-variable",
|
||||||
|
"tsan=$tsan",
|
||||||
|
"--config-variable",
|
||||||
|
"coverage=0",
|
||||||
|
"--config-variable",
|
||||||
|
"sanitizer_coverage=0",
|
||||||
|
"--config-variable",
|
||||||
|
"component=$component",
|
||||||
|
"--config-variable",
|
||||||
|
"target_arch=$target_arch",
|
||||||
|
"--config-variable",
|
||||||
|
"v8_enable_inspector=$enable_inspector",
|
||||||
|
"--config-variable",
|
||||||
|
"v8_use_external_startup_data=$use_external_startup_data",
|
||||||
|
"--config-variable",
|
||||||
|
"v8_use_snapshot=$use_snapshot",
|
||||||
|
]
|
||||||
|
|
||||||
|
if (is_win) {
|
||||||
|
args += [
|
||||||
|
"--config-variable",
|
||||||
|
"msvs_version=2015",
|
||||||
|
]
|
||||||
|
} else {
|
||||||
|
args += [
|
||||||
|
"--config-variable",
|
||||||
|
"msvs_version=0",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
12
samples/Genie/Class.gs
Normal file
12
samples/Genie/Class.gs
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
init
|
||||||
|
new Demo( "Demonstration class" ).run()
|
||||||
|
|
||||||
|
class Demo
|
||||||
|
_message:string = ""
|
||||||
|
|
||||||
|
construct ( message:string = "Optional argument - no message passed in constructor" )
|
||||||
|
_message = message
|
||||||
|
|
||||||
|
def run()
|
||||||
|
print( _message )
|
||||||
|
|
||||||
2
samples/Genie/Hello.gs
Normal file
2
samples/Genie/Hello.gs
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
init
|
||||||
|
print( "Hello, World!" )
|
||||||
48
samples/HTML+Django/nunjucks.njk
Normal file
48
samples/HTML+Django/nunjucks.njk
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
{% from "forms.html" import label as description %}
|
||||||
|
|
||||||
|
|
||||||
|
{% macro field(name, value='', type='text') %}
|
||||||
|
<div class="field">
|
||||||
|
<input type="{{ type }}" name="{{ name }}"
|
||||||
|
value="{{ value | escape }}" />
|
||||||
|
</div>
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
{% extends "head.html" %}
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
{% if horse %}
|
||||||
|
Chuck Norris once kicked a horse in the chin. Its descendants are known today as Giraffes.
|
||||||
|
{% elif optimus %}
|
||||||
|
Chuck Norris once urinated in a semi truck's gas tank as a joke....that truck is now known as Optimus Prime.
|
||||||
|
{% else %}
|
||||||
|
Chuck Norris threw a grenade and killed 50 people, then the grenade exploded.
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% block left %}
|
||||||
|
This is the left side!
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block right %}
|
||||||
|
This is the right side!
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{{ description('Username') }}
|
||||||
|
{{ field('user') }}
|
||||||
|
{{ field('pass', type='password') }}
|
||||||
|
|
||||||
|
<h1>Posts</h1>
|
||||||
|
<ul>
|
||||||
|
{% for item in items %}
|
||||||
|
<li>{{ item.title }}</li>
|
||||||
|
{% else %}
|
||||||
|
<li>This would display if the 'item' collection were empty</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
|
||||||
|
{# Don't escape foo #}
|
||||||
|
{{ foo | safe }}
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
6
samples/JSON5/filenames/.babelrc
Normal file
6
samples/JSON5/filenames/.babelrc
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"presets": [
|
||||||
|
"es2015",
|
||||||
|
"es2016"
|
||||||
|
]
|
||||||
|
}
|
||||||
923
samples/JavaScript/ccalc-lex.js
Normal file
923
samples/JavaScript/ccalc-lex.js
Normal file
@@ -0,0 +1,923 @@
|
|||||||
|
/* generated by jison-lex 0.3.4-159 */
|
||||||
|
var ccalcLex = (function () {
|
||||||
|
// See also:
|
||||||
|
// http://stackoverflow.com/questions/1382107/whats-a-good-way-to-extend-error-in-javascript/#35881508
|
||||||
|
// but we keep the prototype.constructor and prototype.name assignment lines too for compatibility
|
||||||
|
// with userland code which might access the derived class in a 'classic' way.
|
||||||
|
function JisonLexerError(msg, hash) {
|
||||||
|
Object.defineProperty(this, 'name', {
|
||||||
|
enumerable: false,
|
||||||
|
writable: false,
|
||||||
|
value: 'JisonLexerError'
|
||||||
|
});
|
||||||
|
|
||||||
|
if (msg == null) msg = '???';
|
||||||
|
|
||||||
|
Object.defineProperty(this, 'message', {
|
||||||
|
enumerable: false,
|
||||||
|
writable: true,
|
||||||
|
value: msg
|
||||||
|
});
|
||||||
|
|
||||||
|
this.hash = hash;
|
||||||
|
|
||||||
|
var stacktrace;
|
||||||
|
if (hash && hash.exception instanceof Error) {
|
||||||
|
var ex2 = hash.exception;
|
||||||
|
this.message = ex2.message || msg;
|
||||||
|
stacktrace = ex2.stack;
|
||||||
|
}
|
||||||
|
if (!stacktrace) {
|
||||||
|
if (Error.hasOwnProperty('captureStackTrace')) { // V8
|
||||||
|
Error.captureStackTrace(this, this.constructor);
|
||||||
|
} else {
|
||||||
|
stacktrace = (new Error(msg)).stack;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (stacktrace) {
|
||||||
|
Object.defineProperty(this, 'stack', {
|
||||||
|
enumerable: false,
|
||||||
|
writable: false,
|
||||||
|
value: stacktrace
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof Object.setPrototypeOf === 'function') {
|
||||||
|
Object.setPrototypeOf(JisonLexerError.prototype, Error.prototype);
|
||||||
|
} else {
|
||||||
|
JisonLexerError.prototype = Object.create(Error.prototype);
|
||||||
|
}
|
||||||
|
JisonLexerError.prototype.constructor = JisonLexerError;
|
||||||
|
JisonLexerError.prototype.name = 'JisonLexerError';
|
||||||
|
|
||||||
|
|
||||||
|
var lexer = {
|
||||||
|
EOF: 1,
|
||||||
|
ERROR: 2,
|
||||||
|
|
||||||
|
// JisonLexerError: JisonLexerError, // <-- injected by the code generator
|
||||||
|
|
||||||
|
// options: {}, // <-- injected by the code generator
|
||||||
|
|
||||||
|
// yy: ..., // <-- injected by setInput()
|
||||||
|
|
||||||
|
__currentRuleSet__: null, // <-- internal rule set cache for the current lexer state
|
||||||
|
|
||||||
|
__error_infos: [], // INTERNAL USE ONLY: the set of lexErrorInfo objects created since the last cleanup
|
||||||
|
|
||||||
|
__decompressed: false, // INTERNAL USE ONLY: mark whether the lexer instance has been 'unfolded' completely and is now ready for use
|
||||||
|
|
||||||
|
done: false, // INTERNAL USE ONLY
|
||||||
|
_backtrack: false, // INTERNAL USE ONLY
|
||||||
|
_input: '', // INTERNAL USE ONLY
|
||||||
|
_more: false, // INTERNAL USE ONLY
|
||||||
|
_signaled_error_token: false, // INTERNAL USE ONLY
|
||||||
|
|
||||||
|
conditionStack: [], // INTERNAL USE ONLY; managed via `pushState()`, `popState()`, `topState()` and `stateStackSize()`
|
||||||
|
|
||||||
|
match: '', // READ-ONLY EXTERNAL ACCESS - ADVANCED USE ONLY: tracks input which has been matched so far for the lexer token under construction. `match` is identical to `yytext` except that this one still contains the matched input string after `lexer.performAction()` has been invoked, where userland code MAY have changed/replaced the `yytext` value entirely!
|
||||||
|
matched: '', // READ-ONLY EXTERNAL ACCESS - ADVANCED USE ONLY: tracks entire input which has been matched so far
|
||||||
|
matches: false, // READ-ONLY EXTERNAL ACCESS - ADVANCED USE ONLY: tracks RE match result for last (successful) match attempt
|
||||||
|
yytext: '', // ADVANCED USE ONLY: tracks input which has been matched so far for the lexer token under construction; this value is transferred to the parser as the 'token value' when the parser consumes the lexer token produced through a call to the `lex()` API.
|
||||||
|
offset: 0, // READ-ONLY EXTERNAL ACCESS - ADVANCED USE ONLY: tracks the 'cursor position' in the input string, i.e. the number of characters matched so far
|
||||||
|
yyleng: 0, // READ-ONLY EXTERNAL ACCESS - ADVANCED USE ONLY: length of matched input for the token under construction (`yytext`)
|
||||||
|
yylineno: 0, // READ-ONLY EXTERNAL ACCESS - ADVANCED USE ONLY: 'line number' at which the token under construction is located
|
||||||
|
yylloc: null, // READ-ONLY EXTERNAL ACCESS - ADVANCED USE ONLY: tracks location info (lines + columns) for the token under construction
|
||||||
|
|
||||||
|
// INTERNAL USE: construct a suitable error info hash object instance for `parseError`.
|
||||||
|
constructLexErrorInfo: function lexer_constructLexErrorInfo(msg, recoverable) {
|
||||||
|
var pei = {
|
||||||
|
errStr: msg,
|
||||||
|
recoverable: !!recoverable,
|
||||||
|
text: this.match, // This one MAY be empty; userland code should use the `upcomingInput` API to obtain more text which follows the 'lexer cursor position'...
|
||||||
|
token: null,
|
||||||
|
line: this.yylineno,
|
||||||
|
loc: this.yylloc,
|
||||||
|
yy: this.yy,
|
||||||
|
lexer: this,
|
||||||
|
|
||||||
|
// and make sure the error info doesn't stay due to potential
|
||||||
|
// ref cycle via userland code manipulations.
|
||||||
|
// These would otherwise all be memory leak opportunities!
|
||||||
|
//
|
||||||
|
// Note that only array and object references are nuked as those
|
||||||
|
// constitute the set of elements which can produce a cyclic ref.
|
||||||
|
// The rest of the members is kept intact as they are harmless.
|
||||||
|
destroy: function destructLexErrorInfo() {
|
||||||
|
// remove cyclic references added to error info:
|
||||||
|
// info.yy = null;
|
||||||
|
// info.lexer = null;
|
||||||
|
// ...
|
||||||
|
var rec = !!this.recoverable;
|
||||||
|
for (var key in this) {
|
||||||
|
if (this.hasOwnProperty(key) && typeof key === 'object') {
|
||||||
|
this[key] = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.recoverable = rec;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
// track this instance so we can `destroy()` it once we deem it superfluous and ready for garbage collection!
|
||||||
|
this.__error_infos.push(pei);
|
||||||
|
return pei;
|
||||||
|
},
|
||||||
|
|
||||||
|
parseError: function lexer_parseError(str, hash) {
|
||||||
|
if (this.yy.parser && typeof this.yy.parser.parseError === 'function') {
|
||||||
|
return this.yy.parser.parseError(str, hash) || this.ERROR;
|
||||||
|
} else if (typeof this.yy.parseError === 'function') {
|
||||||
|
return this.yy.parseError.call(this, str, hash) || this.ERROR;
|
||||||
|
} else {
|
||||||
|
throw new this.JisonLexerError(str);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
// final cleanup function for when we have completed lexing the input;
|
||||||
|
// make it an API so that external code can use this one once userland
|
||||||
|
// code has decided it's time to destroy any lingering lexer error
|
||||||
|
// hash object instances and the like: this function helps to clean
|
||||||
|
// up these constructs, which *may* carry cyclic references which would
|
||||||
|
// otherwise prevent the instances from being properly and timely
|
||||||
|
// garbage-collected, i.e. this function helps prevent memory leaks!
|
||||||
|
cleanupAfterLex: function lexer_cleanupAfterLex(do_not_nuke_errorinfos) {
|
||||||
|
var rv;
|
||||||
|
|
||||||
|
// prevent lingering circular references from causing memory leaks:
|
||||||
|
this.setInput('', {});
|
||||||
|
|
||||||
|
// nuke the error hash info instances created during this run.
|
||||||
|
// Userland code must COPY any data/references
|
||||||
|
// in the error hash instance(s) it is more permanently interested in.
|
||||||
|
if (!do_not_nuke_errorinfos) {
|
||||||
|
for (var i = this.__error_infos.length - 1; i >= 0; i--) {
|
||||||
|
var el = this.__error_infos[i];
|
||||||
|
if (el && typeof el.destroy === 'function') {
|
||||||
|
el.destroy();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.__error_infos.length = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
return this;
|
||||||
|
},
|
||||||
|
|
||||||
|
// clear the lexer token context; intended for internal use only
|
||||||
|
clear: function lexer_clear() {
|
||||||
|
this.yytext = '';
|
||||||
|
this.yyleng = 0;
|
||||||
|
this.match = '';
|
||||||
|
this.matches = false;
|
||||||
|
this._more = false;
|
||||||
|
this._backtrack = false;
|
||||||
|
},
|
||||||
|
|
||||||
|
// resets the lexer, sets new input
|
||||||
|
setInput: function lexer_setInput(input, yy) {
|
||||||
|
this.yy = yy || this.yy || {};
|
||||||
|
|
||||||
|
// also check if we've fully initialized the lexer instance,
|
||||||
|
// including expansion work to be done to go from a loaded
|
||||||
|
// lexer to a usable lexer:
|
||||||
|
if (!this.__decompressed) {
|
||||||
|
// step 1: decompress the regex list:
|
||||||
|
var rules = this.rules;
|
||||||
|
for (var i = 0, len = rules.length; i < len; i++) {
|
||||||
|
var rule_re = rules[i];
|
||||||
|
|
||||||
|
// compression: is the RE an xref to another RE slot in the rules[] table?
|
||||||
|
if (typeof rule_re === 'number') {
|
||||||
|
rules[i] = rules[rule_re];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// step 2: unfold the conditions[] set to make these ready for use:
|
||||||
|
var conditions = this.conditions;
|
||||||
|
for (var k in conditions) {
|
||||||
|
var spec = conditions[k];
|
||||||
|
|
||||||
|
var rule_ids = spec.rules;
|
||||||
|
|
||||||
|
var len = rule_ids.length;
|
||||||
|
var rule_regexes = new Array(len + 1); // slot 0 is unused; we use a 1-based index approach here to keep the hottest code in `lexer_next()` fast and simple!
|
||||||
|
var rule_new_ids = new Array(len + 1);
|
||||||
|
|
||||||
|
if (this.rules_prefix1) {
|
||||||
|
var rule_prefixes = new Array(65536);
|
||||||
|
var first_catch_all_index = 0;
|
||||||
|
|
||||||
|
for (var i = 0; i < len; i++) {
|
||||||
|
var idx = rule_ids[i];
|
||||||
|
var rule_re = rules[idx];
|
||||||
|
rule_regexes[i + 1] = rule_re;
|
||||||
|
rule_new_ids[i + 1] = idx;
|
||||||
|
|
||||||
|
var prefix = this.rules_prefix1[idx];
|
||||||
|
// compression: is the PREFIX-STRING an xref to another PREFIX-STRING slot in the rules_prefix1[] table?
|
||||||
|
if (typeof prefix === 'number') {
|
||||||
|
prefix = this.rules_prefix1[prefix];
|
||||||
|
}
|
||||||
|
// init the prefix lookup table: first come, first serve...
|
||||||
|
if (!prefix) {
|
||||||
|
if (!first_catch_all_index) {
|
||||||
|
first_catch_all_index = i + 1;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for (var j = 0, pfxlen = prefix.length; j < pfxlen; j++) {
|
||||||
|
var pfxch = prefix.charCodeAt(j);
|
||||||
|
// first come, first serve:
|
||||||
|
if (!rule_prefixes[pfxch]) {
|
||||||
|
rule_prefixes[pfxch] = i + 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// if no catch-all prefix has been encountered yet, it means all
|
||||||
|
// rules have limited prefix sets and it MAY be that particular
|
||||||
|
// input characters won't be recognized by any rule in this
|
||||||
|
// condition state.
|
||||||
|
//
|
||||||
|
// To speed up their discovery at run-time while keeping the
|
||||||
|
// remainder of the lexer kernel code very simple (and fast),
|
||||||
|
// we point these to an 'illegal' rule set index *beyond*
|
||||||
|
// the end of the rule set.
|
||||||
|
if (!first_catch_all_index) {
|
||||||
|
first_catch_all_index = len + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (var i = 0; i < 65536; i++) {
|
||||||
|
if (!rule_prefixes[i]) {
|
||||||
|
rule_prefixes[i] = first_catch_all_index;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
spec.__dispatch_lut = rule_prefixes;
|
||||||
|
} else {
|
||||||
|
for (var i = 0; i < len; i++) {
|
||||||
|
var idx = rule_ids[i];
|
||||||
|
var rule_re = rules[idx];
|
||||||
|
rule_regexes[i + 1] = rule_re;
|
||||||
|
rule_new_ids[i + 1] = idx;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
spec.rules = rule_new_ids;
|
||||||
|
spec.__rule_regexes = rule_regexes;
|
||||||
|
spec.__rule_count = len;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.__decompressed = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
this._input = input || '';
|
||||||
|
this.clear();
|
||||||
|
this._signaled_error_token = false;
|
||||||
|
this.done = false;
|
||||||
|
this.yylineno = 0;
|
||||||
|
this.matched = '';
|
||||||
|
this.conditionStack = ['INITIAL'];
|
||||||
|
this.__currentRuleSet__ = null;
|
||||||
|
this.yylloc = {
|
||||||
|
first_line: 1,
|
||||||
|
first_column: 0,
|
||||||
|
last_line: 1,
|
||||||
|
last_column: 0
|
||||||
|
};
|
||||||
|
if (this.options.ranges) {
|
||||||
|
this.yylloc.range = [0, 0];
|
||||||
|
}
|
||||||
|
this.offset = 0;
|
||||||
|
return this;
|
||||||
|
},
|
||||||
|
|
||||||
|
// consumes and returns one char from the input
|
||||||
|
input: function lexer_input() {
|
||||||
|
if (!this._input) {
|
||||||
|
this.done = true;
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
var ch = this._input[0];
|
||||||
|
this.yytext += ch;
|
||||||
|
this.yyleng++;
|
||||||
|
this.offset++;
|
||||||
|
this.match += ch;
|
||||||
|
this.matched += ch;
|
||||||
|
// Count the linenumber up when we hit the LF (or a stand-alone CR).
|
||||||
|
// On CRLF, the linenumber is incremented when you fetch the CR or the CRLF combo
|
||||||
|
// and we advance immediately past the LF as well, returning both together as if
|
||||||
|
// it was all a single 'character' only.
|
||||||
|
var slice_len = 1;
|
||||||
|
var lines = false;
|
||||||
|
if (ch === '\n') {
|
||||||
|
lines = true;
|
||||||
|
} else if (ch === '\r') {
|
||||||
|
lines = true;
|
||||||
|
var ch2 = this._input[1];
|
||||||
|
if (ch2 === '\n') {
|
||||||
|
slice_len++;
|
||||||
|
ch += ch2;
|
||||||
|
this.yytext += ch2;
|
||||||
|
this.yyleng++;
|
||||||
|
this.offset++;
|
||||||
|
this.match += ch2;
|
||||||
|
this.matched += ch2;
|
||||||
|
if (this.options.ranges) {
|
||||||
|
this.yylloc.range[1]++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (lines) {
|
||||||
|
this.yylineno++;
|
||||||
|
this.yylloc.last_line++;
|
||||||
|
} else {
|
||||||
|
this.yylloc.last_column++;
|
||||||
|
}
|
||||||
|
if (this.options.ranges) {
|
||||||
|
this.yylloc.range[1]++;
|
||||||
|
}
|
||||||
|
|
||||||
|
this._input = this._input.slice(slice_len);
|
||||||
|
return ch;
|
||||||
|
},
|
||||||
|
|
||||||
|
// unshifts one char (or a string) into the input
|
||||||
|
unput: function lexer_unput(ch) {
|
||||||
|
var len = ch.length;
|
||||||
|
var lines = ch.split(/(?:\r\n?|\n)/g);
|
||||||
|
|
||||||
|
this._input = ch + this._input;
|
||||||
|
this.yytext = this.yytext.substr(0, this.yytext.length - len);
|
||||||
|
//this.yyleng -= len;
|
||||||
|
this.offset -= len;
|
||||||
|
var oldLines = this.match.split(/(?:\r\n?|\n)/g);
|
||||||
|
this.match = this.match.substr(0, this.match.length - len);
|
||||||
|
this.matched = this.matched.substr(0, this.matched.length - len);
|
||||||
|
|
||||||
|
if (lines.length - 1) {
|
||||||
|
this.yylineno -= lines.length - 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.yylloc.last_line = this.yylineno + 1;
|
||||||
|
this.yylloc.last_column = (lines ?
|
||||||
|
(lines.length === oldLines.length ? this.yylloc.first_column : 0)
|
||||||
|
+ oldLines[oldLines.length - lines.length].length - lines[0].length :
|
||||||
|
this.yylloc.first_column - len);
|
||||||
|
|
||||||
|
if (this.options.ranges) {
|
||||||
|
this.yylloc.range[1] = this.yylloc.range[0] + this.yyleng - len;
|
||||||
|
}
|
||||||
|
this.yyleng = this.yytext.length;
|
||||||
|
this.done = false;
|
||||||
|
return this;
|
||||||
|
},
|
||||||
|
|
||||||
|
// When called from action, caches matched text and appends it on next action
|
||||||
|
more: function lexer_more() {
|
||||||
|
this._more = true;
|
||||||
|
return this;
|
||||||
|
},
|
||||||
|
|
||||||
|
// When called from action, signals the lexer that this rule fails to match the input, so the next matching rule (regex) should be tested instead.
|
||||||
|
reject: function lexer_reject() {
|
||||||
|
if (this.options.backtrack_lexer) {
|
||||||
|
this._backtrack = true;
|
||||||
|
} else {
|
||||||
|
// when the parseError() call returns, we MUST ensure that the error is registered.
|
||||||
|
// We accomplish this by signaling an 'error' token to be produced for the current
|
||||||
|
// .lex() run.
|
||||||
|
var p = this.constructLexErrorInfo('Lexical error on line ' + (this.yylineno + 1) + '. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n' + this.showPosition(), false);
|
||||||
|
this._signaled_error_token = (this.parseError(p.errStr, p) || this.ERROR);
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
},
|
||||||
|
|
||||||
|
// retain first n characters of the match
|
||||||
|
less: function lexer_less(n) {
|
||||||
|
return this.unput(this.match.slice(n));
|
||||||
|
},
|
||||||
|
|
||||||
|
// return (part of the) already matched input, i.e. for error messages.
|
||||||
|
// Limit the returned string length to `maxSize` (default: 20).
|
||||||
|
// Limit the returned string to the `maxLines` number of lines of input (default: 1).
|
||||||
|
// Negative limit values equal *unlimited*.
|
||||||
|
pastInput: function lexer_pastInput(maxSize, maxLines) {
|
||||||
|
var past = this.matched.substring(0, this.matched.length - this.match.length);
|
||||||
|
if (maxSize < 0)
|
||||||
|
maxSize = past.length;
|
||||||
|
else if (!maxSize)
|
||||||
|
maxSize = 20;
|
||||||
|
if (maxLines < 0)
|
||||||
|
maxLines = past.length; // can't ever have more input lines than this!
|
||||||
|
else if (!maxLines)
|
||||||
|
maxLines = 1;
|
||||||
|
// `substr` anticipation: treat \r\n as a single character and take a little
|
||||||
|
// more than necessary so that we can still properly check against maxSize
|
||||||
|
// after we've transformed and limited the newLines in here:
|
||||||
|
past = past.substr(-maxSize * 2 - 2);
|
||||||
|
// now that we have a significantly reduced string to process, transform the newlines
|
||||||
|
// and chop them, then limit them:
|
||||||
|
var a = past.replace(/\r\n|\r/g, '\n').split('\n');
|
||||||
|
a = a.slice(-maxLines);
|
||||||
|
past = a.join('\n');
|
||||||
|
// When, after limiting to maxLines, we still have too much to return,
|
||||||
|
// do add an ellipsis prefix...
|
||||||
|
if (past.length > maxSize) {
|
||||||
|
past = '...' + past.substr(-maxSize);
|
||||||
|
}
|
||||||
|
return past;
|
||||||
|
},
|
||||||
|
|
||||||
|
// return (part of the) upcoming input, i.e. for error messages.
|
||||||
|
// Limit the returned string length to `maxSize` (default: 20).
|
||||||
|
// Limit the returned string to the `maxLines` number of lines of input (default: 1).
|
||||||
|
// Negative limit values equal *unlimited*.
|
||||||
|
upcomingInput: function lexer_upcomingInput(maxSize, maxLines) {
|
||||||
|
var next = this.match;
|
||||||
|
if (maxSize < 0)
|
||||||
|
maxSize = next.length + this._input.length;
|
||||||
|
else if (!maxSize)
|
||||||
|
maxSize = 20;
|
||||||
|
if (maxLines < 0)
|
||||||
|
maxLines = maxSize; // can't ever have more input lines than this!
|
||||||
|
else if (!maxLines)
|
||||||
|
maxLines = 1;
|
||||||
|
// `substring` anticipation: treat \r\n as a single character and take a little
|
||||||
|
// more than necessary so that we can still properly check against maxSize
|
||||||
|
// after we've transformed and limited the newLines in here:
|
||||||
|
if (next.length < maxSize * 2 + 2) {
|
||||||
|
next += this._input.substring(0, maxSize * 2 + 2); // substring is faster on Chrome/V8
|
||||||
|
}
|
||||||
|
// now that we have a significantly reduced string to process, transform the newlines
|
||||||
|
// and chop them, then limit them:
|
||||||
|
var a = next.replace(/\r\n|\r/g, '\n').split('\n');
|
||||||
|
a = a.slice(0, maxLines);
|
||||||
|
next = a.join('\n');
|
||||||
|
// When, after limiting to maxLines, we still have too much to return,
|
||||||
|
// do add an ellipsis postfix...
|
||||||
|
if (next.length > maxSize) {
|
||||||
|
next = next.substring(0, maxSize) + '...';
|
||||||
|
}
|
||||||
|
return next;
|
||||||
|
},
|
||||||
|
|
||||||
|
// return a string which displays the character position where the lexing error occurred, i.e. for error messages
|
||||||
|
showPosition: function lexer_showPosition(maxPrefix, maxPostfix) {
|
||||||
|
var pre = this.pastInput(maxPrefix).replace(/\s/g, ' ');
|
||||||
|
var c = new Array(pre.length + 1).join('-');
|
||||||
|
return pre + this.upcomingInput(maxPostfix).replace(/\s/g, ' ') + '\n' + c + '^';
|
||||||
|
},
|
||||||
|
|
||||||
|
// helper function, used to produce a human readable description as a string, given
|
||||||
|
// the input `yylloc` location object.
|
||||||
|
// Set `display_range_too` to TRUE to include the string character index position(s)
|
||||||
|
// in the description if the `yylloc.range` is available.
|
||||||
|
describeYYLLOC: function lexer_describe_yylloc(yylloc, display_range_too) {
|
||||||
|
var l1 = yylloc.first_line;
|
||||||
|
var l2 = yylloc.last_line;
|
||||||
|
var o1 = yylloc.first_column;
|
||||||
|
var o2 = yylloc.last_column - 1;
|
||||||
|
var dl = l2 - l1;
|
||||||
|
var d_o = (dl === 0 ? o2 - o1 : 1000);
|
||||||
|
var rv;
|
||||||
|
if (dl === 0) {
|
||||||
|
rv = 'line ' + l1 + ', ';
|
||||||
|
if (d_o === 0) {
|
||||||
|
rv += 'column ' + o1;
|
||||||
|
} else {
|
||||||
|
rv += 'columns ' + o1 + ' .. ' + o2;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
rv = 'lines ' + l1 + '(column ' + o1 + ') .. ' + l2 + '(column ' + o2 + ')';
|
||||||
|
}
|
||||||
|
if (yylloc.range && display_range_too) {
|
||||||
|
var r1 = yylloc.range[0];
|
||||||
|
var r2 = yylloc.range[1] - 1;
|
||||||
|
if (r2 === r1) {
|
||||||
|
rv += ' {String Offset: ' + r1 + '}';
|
||||||
|
} else {
|
||||||
|
rv += ' {String Offset range: ' + r1 + ' .. ' + r2 + '}';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return rv;
|
||||||
|
// return JSON.stringify(yylloc);
|
||||||
|
},
|
||||||
|
|
||||||
|
// test the lexed token: return FALSE when not a match, otherwise return token.
|
||||||
|
//
|
||||||
|
// `match` is supposed to be an array coming out of a regex match, i.e. `match[0]`
|
||||||
|
// contains the actually matched text string.
|
||||||
|
//
|
||||||
|
// Also move the input cursor forward and update the match collectors:
|
||||||
|
// - yytext
|
||||||
|
// - yyleng
|
||||||
|
// - match
|
||||||
|
// - matches
|
||||||
|
// - yylloc
|
||||||
|
// - offset
|
||||||
|
test_match: function lexer_test_match(match, indexed_rule) {
|
||||||
|
var token,
|
||||||
|
lines,
|
||||||
|
backup,
|
||||||
|
match_str;
|
||||||
|
|
||||||
|
if (this.options.backtrack_lexer) {
|
||||||
|
// save context
|
||||||
|
backup = {
|
||||||
|
yylineno: this.yylineno,
|
||||||
|
yylloc: {
|
||||||
|
first_line: this.yylloc.first_line,
|
||||||
|
last_line: this.last_line,
|
||||||
|
first_column: this.yylloc.first_column,
|
||||||
|
last_column: this.yylloc.last_column
|
||||||
|
},
|
||||||
|
yytext: this.yytext,
|
||||||
|
match: this.match,
|
||||||
|
matches: this.matches,
|
||||||
|
matched: this.matched,
|
||||||
|
yyleng: this.yyleng,
|
||||||
|
offset: this.offset,
|
||||||
|
_more: this._more,
|
||||||
|
_input: this._input,
|
||||||
|
yy: this.yy,
|
||||||
|
conditionStack: this.conditionStack.slice(0),
|
||||||
|
done: this.done
|
||||||
|
};
|
||||||
|
if (this.options.ranges) {
|
||||||
|
backup.yylloc.range = this.yylloc.range.slice(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match_str = match[0];
|
||||||
|
lines = match_str.match(/(?:\r\n?|\n).*/g);
|
||||||
|
if (lines) {
|
||||||
|
this.yylineno += lines.length;
|
||||||
|
}
|
||||||
|
this.yylloc = {
|
||||||
|
first_line: this.yylloc.last_line,
|
||||||
|
last_line: this.yylineno + 1,
|
||||||
|
first_column: this.yylloc.last_column,
|
||||||
|
last_column: lines ?
|
||||||
|
lines[lines.length - 1].length - lines[lines.length - 1].match(/\r?\n?/)[0].length :
|
||||||
|
this.yylloc.last_column + match_str.length
|
||||||
|
};
|
||||||
|
this.yytext += match_str;
|
||||||
|
this.match += match_str;
|
||||||
|
this.matches = match;
|
||||||
|
this.yyleng = this.yytext.length;
|
||||||
|
if (this.options.ranges) {
|
||||||
|
this.yylloc.range = [this.offset, this.offset + this.yyleng];
|
||||||
|
}
|
||||||
|
// previous lex rules MAY have invoked the `more()` API rather than producing a token:
|
||||||
|
// those rules will already have moved this `offset` forward matching their match lengths,
|
||||||
|
// hence we must only add our own match length now:
|
||||||
|
this.offset += match_str.length;
|
||||||
|
this._more = false;
|
||||||
|
this._backtrack = false;
|
||||||
|
this._input = this._input.slice(match_str.length);
|
||||||
|
this.matched += match_str;
|
||||||
|
|
||||||
|
// calling this method:
|
||||||
|
//
|
||||||
|
// function lexer__performAction(yy, yy_, $avoiding_name_collisions, YY_START) {...}
|
||||||
|
token = this.performAction.call(this, this.yy, this, indexed_rule, this.conditionStack[this.conditionStack.length - 1] /* = YY_START */);
|
||||||
|
// otherwise, when the action codes are all simple return token statements:
|
||||||
|
//token = this.simpleCaseActionClusters[indexed_rule];
|
||||||
|
|
||||||
|
if (this.done && this._input) {
|
||||||
|
this.done = false;
|
||||||
|
}
|
||||||
|
if (token) {
|
||||||
|
return token;
|
||||||
|
} else if (this._backtrack) {
|
||||||
|
// recover context
|
||||||
|
for (var k in backup) {
|
||||||
|
this[k] = backup[k];
|
||||||
|
}
|
||||||
|
this.__currentRuleSet__ = null;
|
||||||
|
return false; // rule action called reject() implying the next rule should be tested instead.
|
||||||
|
} else if (this._signaled_error_token) {
|
||||||
|
// produce one 'error' token as .parseError() in reject() did not guarantee a failure signal by throwing an exception!
|
||||||
|
token = this._signaled_error_token;
|
||||||
|
this._signaled_error_token = false;
|
||||||
|
return token;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
},
|
||||||
|
|
||||||
|
// return next match in input
|
||||||
|
next: function lexer_next() {
|
||||||
|
if (this.done) {
|
||||||
|
this.clear();
|
||||||
|
return this.EOF;
|
||||||
|
}
|
||||||
|
if (!this._input) {
|
||||||
|
this.done = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
var token,
|
||||||
|
match,
|
||||||
|
tempMatch,
|
||||||
|
index;
|
||||||
|
if (!this._more) {
|
||||||
|
this.clear();
|
||||||
|
}
|
||||||
|
var spec = this.__currentRuleSet__;
|
||||||
|
if (!spec) {
|
||||||
|
// Update the ruleset cache as we apparently encountered a state change or just started lexing.
|
||||||
|
// The cache is set up for fast lookup -- we assume a lexer will switch states much less often than it will
|
||||||
|
// invoke the `lex()` token-producing API and related APIs, hence caching the set for direct access helps
|
||||||
|
// speed up those activities a tiny bit.
|
||||||
|
spec = this.__currentRuleSet__ = this._currentRules();
|
||||||
|
}
|
||||||
|
|
||||||
|
var rule_ids = spec.rules;
|
||||||
|
// var dispatch = spec.__dispatch_lut;
|
||||||
|
var regexes = spec.__rule_regexes;
|
||||||
|
var len = spec.__rule_count;
|
||||||
|
|
||||||
|
// var c0 = this._input[0];
|
||||||
|
|
||||||
|
// Note: the arrays are 1-based, while `len` itself is a valid index,
|
||||||
|
// hence the non-standard less-or-equal check in the next loop condition!
|
||||||
|
//
|
||||||
|
// `dispatch` is a lookup table which lists the *first* rule which matches the 1-char *prefix* of the rule-to-match.
|
||||||
|
// By using that array as a jumpstart, we can cut down on the otherwise O(n*m) behaviour of this lexer, down to
|
||||||
|
// O(n) ideally, where:
|
||||||
|
//
|
||||||
|
// - N is the number of input particles -- which is not precisely characters
|
||||||
|
// as we progress on a per-regex-match basis rather than on a per-character basis
|
||||||
|
//
|
||||||
|
// - M is the number of rules (regexes) to test in the active condition state.
|
||||||
|
//
|
||||||
|
for (var i = 1 /* (dispatch[c0] || 1) */ ; i <= len; i++) {
|
||||||
|
tempMatch = this._input.match(regexes[i]);
|
||||||
|
if (tempMatch && (!match || tempMatch[0].length > match[0].length)) {
|
||||||
|
match = tempMatch;
|
||||||
|
index = i;
|
||||||
|
if (this.options.backtrack_lexer) {
|
||||||
|
token = this.test_match(tempMatch, rule_ids[i]);
|
||||||
|
if (token !== false) {
|
||||||
|
return token;
|
||||||
|
} else if (this._backtrack) {
|
||||||
|
match = undefined;
|
||||||
|
continue; // rule action called reject() implying a rule MISmatch.
|
||||||
|
} else {
|
||||||
|
// else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
} else if (!this.options.flex) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (match) {
|
||||||
|
token = this.test_match(match, rule_ids[index]);
|
||||||
|
if (token !== false) {
|
||||||
|
return token;
|
||||||
|
}
|
||||||
|
// else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (this._input === '') {
|
||||||
|
this.done = true;
|
||||||
|
return this.EOF;
|
||||||
|
} else {
|
||||||
|
var p = this.constructLexErrorInfo('Lexical error on line ' + (this.yylineno + 1) + '. Unrecognized text.\n' + this.showPosition(), this.options.lexer_errors_are_recoverable);
|
||||||
|
token = (this.parseError(p.errStr, p) || this.ERROR);
|
||||||
|
if (token === this.ERROR) {
|
||||||
|
// we can try to recover from a lexer error that parseError() did not 'recover' for us, by moving forward at least one character at a time:
|
||||||
|
if (!this.match.length) {
|
||||||
|
this.input();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return token;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
// return next match that has a token
|
||||||
|
lex: function lexer_lex() {
|
||||||
|
var r;
|
||||||
|
// allow the PRE/POST handlers set/modify the return token for maximum flexibility of the generated lexer:
|
||||||
|
if (typeof this.options.pre_lex === 'function') {
|
||||||
|
r = this.options.pre_lex.call(this);
|
||||||
|
}
|
||||||
|
while (!r) {
|
||||||
|
r = this.next();
|
||||||
|
}
|
||||||
|
if (typeof this.options.post_lex === 'function') {
|
||||||
|
// (also account for a userdef function which does not return any value: keep the token as is)
|
||||||
|
r = this.options.post_lex.call(this, r) || r;
|
||||||
|
}
|
||||||
|
return r;
|
||||||
|
},
|
||||||
|
|
||||||
|
// backwards compatible alias for `pushState()`;
|
||||||
|
// the latter is symmetrical with `popState()` and we advise to use
|
||||||
|
// those APIs in any modern lexer code, rather than `begin()`.
|
||||||
|
begin: function lexer_begin(condition) {
|
||||||
|
return this.pushState(condition);
|
||||||
|
},
|
||||||
|
|
||||||
|
// activates a new lexer condition state (pushes the new lexer condition state onto the condition stack)
|
||||||
|
pushState: function lexer_pushState(condition) {
|
||||||
|
this.conditionStack.push(condition);
|
||||||
|
this.__currentRuleSet__ = null;
|
||||||
|
return this;
|
||||||
|
},
|
||||||
|
|
||||||
|
// pop the previously active lexer condition state off the condition stack
|
||||||
|
popState: function lexer_popState() {
|
||||||
|
var n = this.conditionStack.length - 1;
|
||||||
|
if (n > 0) {
|
||||||
|
this.__currentRuleSet__ = null;
|
||||||
|
return this.conditionStack.pop();
|
||||||
|
} else {
|
||||||
|
return this.conditionStack[0];
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
// return the currently active lexer condition state; when an index argument is provided it produces the N-th previous condition state, if available
|
||||||
|
topState: function lexer_topState(n) {
|
||||||
|
n = this.conditionStack.length - 1 - Math.abs(n || 0);
|
||||||
|
if (n >= 0) {
|
||||||
|
return this.conditionStack[n];
|
||||||
|
} else {
|
||||||
|
return 'INITIAL';
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
// (internal) determine the lexer rule set which is active for the currently active lexer condition state
|
||||||
|
_currentRules: function lexer__currentRules() {
|
||||||
|
if (this.conditionStack.length && this.conditionStack[this.conditionStack.length - 1]) {
|
||||||
|
return this.conditions[this.conditionStack[this.conditionStack.length - 1]];
|
||||||
|
} else {
|
||||||
|
return this.conditions['INITIAL'];
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
// return the number of states currently on the stack
|
||||||
|
stateStackSize: function lexer_stateStackSize() {
|
||||||
|
return this.conditionStack.length;
|
||||||
|
},
|
||||||
|
options: {},
|
||||||
|
JisonLexerError: JisonLexerError,
|
||||||
|
performAction: function lexer__performAction(yy, yy_, $avoiding_name_collisions, YY_START) {
|
||||||
|
|
||||||
|
var YYSTATE = YY_START;
|
||||||
|
switch($avoiding_name_collisions) {
|
||||||
|
case 0 :
|
||||||
|
/*! Conditions:: INITIAL */
|
||||||
|
/*! Rule:: [ \t\r\n]+ */
|
||||||
|
|
||||||
|
/* eat up whitespace */
|
||||||
|
BeginToken(yy_.yytext);
|
||||||
|
|
||||||
|
break;
|
||||||
|
case 1 :
|
||||||
|
/*! Conditions:: INITIAL */
|
||||||
|
/*! Rule:: {DIGIT}+ */
|
||||||
|
|
||||||
|
BeginToken(yy_.yytext);
|
||||||
|
yylval.value = atof(yy_.yytext);
|
||||||
|
return VALUE;
|
||||||
|
|
||||||
|
break;
|
||||||
|
case 2 :
|
||||||
|
/*! Conditions:: INITIAL */
|
||||||
|
/*! Rule:: {DIGIT}+\.{DIGIT}* */
|
||||||
|
|
||||||
|
BeginToken(yy_.yytext);
|
||||||
|
yylval.value = atof(yy_.yytext);
|
||||||
|
return VALUE;
|
||||||
|
|
||||||
|
break;
|
||||||
|
case 3 :
|
||||||
|
/*! Conditions:: INITIAL */
|
||||||
|
/*! Rule:: {DIGIT}+[eE]["+""-"]?{DIGIT}* */
|
||||||
|
|
||||||
|
BeginToken(yy_.yytext);
|
||||||
|
yylval.value = atof(yy_.yytext);
|
||||||
|
return VALUE;
|
||||||
|
|
||||||
|
break;
|
||||||
|
case 4 :
|
||||||
|
/*! Conditions:: INITIAL */
|
||||||
|
/*! Rule:: {DIGIT}+\.{DIGIT}*[eE]["+""-"]?{DIGIT}* */
|
||||||
|
|
||||||
|
BeginToken(yy_.yytext);
|
||||||
|
yylval.value = atof(yy_.yytext);
|
||||||
|
return VALUE;
|
||||||
|
|
||||||
|
break;
|
||||||
|
case 5 :
|
||||||
|
/*! Conditions:: INITIAL */
|
||||||
|
/*! Rule:: {ID} */
|
||||||
|
|
||||||
|
BeginToken(yy_.yytext);
|
||||||
|
yylval.string = malloc(strlen(yy_.yytext)+1);
|
||||||
|
strcpy(yylval.string, yy_.yytext);
|
||||||
|
return IDENTIFIER;
|
||||||
|
|
||||||
|
break;
|
||||||
|
case 6 :
|
||||||
|
/*! Conditions:: INITIAL */
|
||||||
|
/*! Rule:: \+ */
|
||||||
|
BeginToken(yy_.yytext); return ADD;
|
||||||
|
break;
|
||||||
|
case 7 :
|
||||||
|
/*! Conditions:: INITIAL */
|
||||||
|
/*! Rule:: - */
|
||||||
|
BeginToken(yy_.yytext); return SUB;
|
||||||
|
break;
|
||||||
|
case 8 :
|
||||||
|
/*! Conditions:: INITIAL */
|
||||||
|
/*! Rule:: \* */
|
||||||
|
BeginToken(yy_.yytext); return MULT;
|
||||||
|
break;
|
||||||
|
case 9 :
|
||||||
|
/*! Conditions:: INITIAL */
|
||||||
|
/*! Rule:: \/ */
|
||||||
|
BeginToken(yy_.yytext); return DIV;
|
||||||
|
break;
|
||||||
|
case 10 :
|
||||||
|
/*! Conditions:: INITIAL */
|
||||||
|
/*! Rule:: \( */
|
||||||
|
BeginToken(yy_.yytext); return LBRACE;
|
||||||
|
break;
|
||||||
|
case 11 :
|
||||||
|
/*! Conditions:: INITIAL */
|
||||||
|
/*! Rule:: \) */
|
||||||
|
BeginToken(yy_.yytext); return RBRACE;
|
||||||
|
break;
|
||||||
|
case 12 :
|
||||||
|
/*! Conditions:: INITIAL */
|
||||||
|
/*! Rule:: ; */
|
||||||
|
BeginToken(yy_.yytext); return SEMICOLON;
|
||||||
|
break;
|
||||||
|
case 13 :
|
||||||
|
/*! Conditions:: INITIAL */
|
||||||
|
/*! Rule:: = */
|
||||||
|
BeginToken(yy_.yytext); return ASSIGN;
|
||||||
|
break;
|
||||||
|
case 14 :
|
||||||
|
/*! Conditions:: INITIAL */
|
||||||
|
/*! Rule:: . */
|
||||||
|
|
||||||
|
BeginToken(yy_.yytext);
|
||||||
|
return yy_.yytext[0];
|
||||||
|
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
return this.simpleCaseActionClusters[$avoiding_name_collisions];
|
||||||
|
}
|
||||||
|
},
|
||||||
|
simpleCaseActionClusters: {
|
||||||
|
|
||||||
|
},
|
||||||
|
rules: [
|
||||||
|
/^(?:[ \t\r\n]+)/,
|
||||||
|
/^(?:(\d)+)/,
|
||||||
|
/^(?:(\d)+\.(\d)*)/,
|
||||||
|
/^(?:(\d)+[Ee]["+]?(\d)*)/,
|
||||||
|
/^(?:(\d)+\.(\d)*[Ee]["+]?(\d)*)/,
|
||||||
|
/^(?:([^\W\d]\w*))/,
|
||||||
|
/^(?:\+)/,
|
||||||
|
/^(?:-)/,
|
||||||
|
/^(?:\*)/,
|
||||||
|
/^(?:\/)/,
|
||||||
|
/^(?:\()/,
|
||||||
|
/^(?:\))/,
|
||||||
|
/^(?:;)/,
|
||||||
|
/^(?:=)/,
|
||||||
|
/^(?:.)/
|
||||||
|
],
|
||||||
|
conditions: {
|
||||||
|
"INITIAL": {
|
||||||
|
rules: [
|
||||||
|
0,
|
||||||
|
1,
|
||||||
|
2,
|
||||||
|
3,
|
||||||
|
4,
|
||||||
|
5,
|
||||||
|
6,
|
||||||
|
7,
|
||||||
|
8,
|
||||||
|
9,
|
||||||
|
10,
|
||||||
|
11,
|
||||||
|
12,
|
||||||
|
13,
|
||||||
|
14
|
||||||
|
],
|
||||||
|
inclusive: true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/*--------------------------------------------------------------------
|
||||||
|
* lex.l
|
||||||
|
*------------------------------------------------------------------*/;
|
||||||
|
return lexer;
|
||||||
|
})();
|
||||||
2145
samples/JavaScript/ccalc-parse.js
Normal file
2145
samples/JavaScript/ccalc-parse.js
Normal file
File diff suppressed because it is too large
Load Diff
31
samples/JavaScript/proto.js
Normal file
31
samples/JavaScript/proto.js
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
/**
|
||||||
|
* @fileoverview
|
||||||
|
* @enhanceable
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
// GENERATED CODE -- DO NOT EDIT!
|
||||||
|
|
||||||
|
goog.provide('proto.google.protobuf.Timestamp');
|
||||||
|
|
||||||
|
goog.require('jspb.Message');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generated by JsPbCodeGenerator.
|
||||||
|
* @param {Array=} opt_data Optional initial data array, typically from a
|
||||||
|
* server response, or constructed directly in Javascript. The array is used
|
||||||
|
* in place and becomes part of the constructed object. It is not cloned.
|
||||||
|
* If no data is provided, the constructed object will be empty, but still
|
||||||
|
* valid.
|
||||||
|
* @extends {jspb.Message}
|
||||||
|
* @constructor
|
||||||
|
*/
|
||||||
|
proto.google.protobuf.Timestamp = function(opt_data) {
|
||||||
|
jspb.Message.initialize(this, opt_data, 0, -1, null, null);
|
||||||
|
};
|
||||||
|
goog.inherits(proto.google.protobuf.Timestamp, jspb.Message);
|
||||||
|
if (goog.DEBUG && !COMPILED) {
|
||||||
|
proto.google.protobuf.Timestamp.displayName = 'proto.google.protobuf.Timestamp';
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Remainder elided
|
||||||
93
samples/PHP/ThriftGenerated.php
Normal file
93
samples/PHP/ThriftGenerated.php
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
<?php
|
||||||
|
namespace github\com;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Autogenerated by Thrift Compiler (0.9.3)
|
||||||
|
*
|
||||||
|
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||||
|
* @generated
|
||||||
|
*/
|
||||||
|
use Thrift\Base\TBase;
|
||||||
|
use Thrift\Type\TType;
|
||||||
|
use Thrift\Type\TMessageType;
|
||||||
|
use Thrift\Exception\TException;
|
||||||
|
use Thrift\Exception\TProtocolException;
|
||||||
|
use Thrift\Protocol\TProtocol;
|
||||||
|
use Thrift\Protocol\TBinaryProtocolAccelerated;
|
||||||
|
use Thrift\Exception\TApplicationException;
|
||||||
|
|
||||||
|
|
||||||
|
class PullRequest {
|
||||||
|
static $_TSPEC;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @var string
|
||||||
|
*/
|
||||||
|
public $title = null;
|
||||||
|
|
||||||
|
public function __construct($vals=null) {
|
||||||
|
if (!isset(self::$_TSPEC)) {
|
||||||
|
self::$_TSPEC = array(
|
||||||
|
1 => array(
|
||||||
|
'var' => 'title',
|
||||||
|
'type' => TType::STRING,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (is_array($vals)) {
|
||||||
|
if (isset($vals['title'])) {
|
||||||
|
$this->title = $vals['title'];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public function getName() {
|
||||||
|
return 'PullRequest';
|
||||||
|
}
|
||||||
|
|
||||||
|
public function read($input)
|
||||||
|
{
|
||||||
|
$xfer = 0;
|
||||||
|
$fname = null;
|
||||||
|
$ftype = 0;
|
||||||
|
$fid = 0;
|
||||||
|
$xfer += $input->readStructBegin($fname);
|
||||||
|
while (true)
|
||||||
|
{
|
||||||
|
$xfer += $input->readFieldBegin($fname, $ftype, $fid);
|
||||||
|
if ($ftype == TType::STOP) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
switch ($fid)
|
||||||
|
{
|
||||||
|
case 1:
|
||||||
|
if ($ftype == TType::STRING) {
|
||||||
|
$xfer += $input->readString($this->title);
|
||||||
|
} else {
|
||||||
|
$xfer += $input->skip($ftype);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
$xfer += $input->skip($ftype);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
$xfer += $input->readFieldEnd();
|
||||||
|
}
|
||||||
|
$xfer += $input->readStructEnd();
|
||||||
|
return $xfer;
|
||||||
|
}
|
||||||
|
|
||||||
|
public function write($output) {
|
||||||
|
$xfer = 0;
|
||||||
|
$xfer += $output->writeStructBegin('PullRequest');
|
||||||
|
if ($this->title !== null) {
|
||||||
|
$xfer += $output->writeFieldBegin('title', TType::STRING, 1);
|
||||||
|
$xfer += $output->writeString($this->title);
|
||||||
|
$xfer += $output->writeFieldEnd();
|
||||||
|
}
|
||||||
|
$xfer += $output->writeFieldStop();
|
||||||
|
$xfer += $output->writeStructEnd();
|
||||||
|
return $xfer;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
37
samples/PHP/filenames/.php_cs
Normal file
37
samples/PHP/filenames/.php_cs
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
<?php
|
||||||
|
|
||||||
|
$header = <<<'EOF'
|
||||||
|
This file is part of PHP CS Fixer.
|
||||||
|
|
||||||
|
(c) Fabien Potencier <fabien@symfony.com>
|
||||||
|
Dariusz Rumiński <dariusz.ruminski@gmail.com>
|
||||||
|
|
||||||
|
This source file is subject to the MIT license that is bundled
|
||||||
|
with this source code in the file LICENSE.
|
||||||
|
EOF;
|
||||||
|
|
||||||
|
return PhpCsFixer\Config::create()
|
||||||
|
->setRiskyAllowed(true)
|
||||||
|
->setRules(array(
|
||||||
|
'@Symfony' => true,
|
||||||
|
'@Symfony:risky' => true,
|
||||||
|
'combine_consecutive_unsets' => true,
|
||||||
|
'header_comment' => array('header' => $header),
|
||||||
|
'array_syntax' => array('syntax' => 'long'),
|
||||||
|
'no_extra_consecutive_blank_lines' => array('break', 'continue', 'extra', 'return', 'throw', 'use', 'parenthesis_brace_block', 'square_brace_block', 'curly_brace_block'),
|
||||||
|
'no_useless_else' => true,
|
||||||
|
'no_useless_return' => true,
|
||||||
|
'ordered_class_elements' => true,
|
||||||
|
'ordered_imports' => true,
|
||||||
|
'php_unit_strict' => true,
|
||||||
|
'phpdoc_add_missing_param_annotation' => true,
|
||||||
|
'psr4' => true,
|
||||||
|
'strict_comparison' => true,
|
||||||
|
'strict_param' => true,
|
||||||
|
))
|
||||||
|
->setFinder(
|
||||||
|
PhpCsFixer\Finder::create()
|
||||||
|
->exclude('tests/Fixtures')
|
||||||
|
->in(__DIR__)
|
||||||
|
)
|
||||||
|
;
|
||||||
37
samples/PHP/filenames/.php_cs.dist
Normal file
37
samples/PHP/filenames/.php_cs.dist
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
<?php
|
||||||
|
|
||||||
|
$header = <<<'EOF'
|
||||||
|
This file is part of PHP CS Fixer.
|
||||||
|
|
||||||
|
(c) Fabien Potencier <fabien@symfony.com>
|
||||||
|
Dariusz Rumiński <dariusz.ruminski@gmail.com>
|
||||||
|
|
||||||
|
This source file is subject to the MIT license that is bundled
|
||||||
|
with this source code in the file LICENSE.
|
||||||
|
EOF;
|
||||||
|
|
||||||
|
return PhpCsFixer\Config::create()
|
||||||
|
->setRiskyAllowed(true)
|
||||||
|
->setRules(array(
|
||||||
|
'@Symfony' => true,
|
||||||
|
'@Symfony:risky' => true,
|
||||||
|
'combine_consecutive_unsets' => true,
|
||||||
|
'header_comment' => array('header' => $header),
|
||||||
|
'array_syntax' => array('syntax' => 'long'),
|
||||||
|
'no_extra_consecutive_blank_lines' => array('break', 'continue', 'extra', 'return', 'throw', 'use', 'parenthesis_brace_block', 'square_brace_block', 'curly_brace_block'),
|
||||||
|
'no_useless_else' => true,
|
||||||
|
'no_useless_return' => true,
|
||||||
|
'ordered_class_elements' => true,
|
||||||
|
'ordered_imports' => true,
|
||||||
|
'php_unit_strict' => true,
|
||||||
|
'phpdoc_add_missing_param_annotation' => true,
|
||||||
|
'psr4' => true,
|
||||||
|
'strict_comparison' => true,
|
||||||
|
'strict_param' => true,
|
||||||
|
))
|
||||||
|
->setFinder(
|
||||||
|
PhpCsFixer\Finder::create()
|
||||||
|
->exclude('tests/Fixtures')
|
||||||
|
->in(__DIR__)
|
||||||
|
)
|
||||||
|
;
|
||||||
9
samples/Python/filenames/.gclient
Normal file
9
samples/Python/filenames/.gclient
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
solutions = [
|
||||||
|
{
|
||||||
|
"url": "https://chromium.googlesource.com/v8/v8.git",
|
||||||
|
"managed": False,
|
||||||
|
"name": "v8",
|
||||||
|
"deps_file": "DEPS",
|
||||||
|
"custom_deps": {},
|
||||||
|
},
|
||||||
|
]
|
||||||
20
samples/Python/py3.py3
Normal file
20
samples/Python/py3.py3
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import random
|
||||||
|
|
||||||
|
guesses = 0
|
||||||
|
|
||||||
|
number = random.randint(1, 20)
|
||||||
|
|
||||||
|
print("Guess the number between 1 and 20! You have 6 tries.")
|
||||||
|
while guesses < 6:
|
||||||
|
guess = int(input("Is it... "))
|
||||||
|
|
||||||
|
if guess == number:
|
||||||
|
print("Hooray! You guessed it right!")
|
||||||
|
break
|
||||||
|
elif guess < number:
|
||||||
|
print("It's bigger...")
|
||||||
|
elif guess > number:
|
||||||
|
print("It's not so big.")
|
||||||
|
guesses += 1
|
||||||
|
if guesses == 6:
|
||||||
|
print("You've ran out of tries.")
|
||||||
1522
samples/Python/standalone.gypi
Normal file
1522
samples/Python/standalone.gypi
Normal file
File diff suppressed because it is too large
Load Diff
1420
samples/Python/toolchain.gypi
Normal file
1420
samples/Python/toolchain.gypi
Normal file
File diff suppressed because it is too large
Load Diff
72
samples/R/import.Rd
Normal file
72
samples/R/import.Rd
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
% Generated by roxygen2: do not edit by hand
|
||||||
|
% Please edit documentation in R/hello.R
|
||||||
|
\name{import}
|
||||||
|
\alias{import}
|
||||||
|
\title{Import a module into the current scope}
|
||||||
|
\usage{
|
||||||
|
import(module, attach, attach_operators = TRUE)
|
||||||
|
}
|
||||||
|
\arguments{
|
||||||
|
\item{module}{an identifier specifying the full module path}
|
||||||
|
|
||||||
|
\item{attach}{if \code{TRUE}, attach the newly loaded module to the object
|
||||||
|
search path (see \code{Details})}
|
||||||
|
|
||||||
|
\item{attach_operators}{if \code{TRUE}, attach operators of module to the
|
||||||
|
object search path, even if \code{attach} is \code{FALSE}}
|
||||||
|
}
|
||||||
|
\value{
|
||||||
|
the loaded module environment (invisible)
|
||||||
|
}
|
||||||
|
\description{
|
||||||
|
\code{module = import('module')} imports a specified module and makes its
|
||||||
|
code available via the environment-like object it returns.
|
||||||
|
}
|
||||||
|
\details{
|
||||||
|
Modules are loaded in an isolated environment which is returned, and
|
||||||
|
optionally attached to the object search path of the current scope (if
|
||||||
|
argument \code{attach} is \code{TRUE}).
|
||||||
|
\code{attach} defaults to \code{FALSE}. However, in interactive code it is
|
||||||
|
often helpful to attach packages by default. Therefore, in interactive code
|
||||||
|
invoked directly from the terminal only (i.e. not within modules),
|
||||||
|
\code{attach} defaults to the value of \code{options('import.attach')}, which
|
||||||
|
can be set to \code{TRUE} or \code{FALSE} depending on the user’s preference.
|
||||||
|
|
||||||
|
\code{attach_operators} causes \emph{operators} to be attached by default,
|
||||||
|
because operators can only be invoked in R if they re found in the search
|
||||||
|
path. Not attaching them therefore drastically limits a module’s usefulness.
|
||||||
|
|
||||||
|
Modules are searched in the module search path \code{options('import.path')}.
|
||||||
|
This is a vector of paths to consider, from the highest to the lowest
|
||||||
|
priority. The current directory is \emph{always} considered first. That is,
|
||||||
|
if a file \code{a.r} exists both in the current directory and in a module
|
||||||
|
search path, the local file \code{./a.r} will be loaded.
|
||||||
|
|
||||||
|
Module names can be fully qualified to refer to nested paths. See
|
||||||
|
\code{Examples}.
|
||||||
|
}
|
||||||
|
\note{
|
||||||
|
Unlike for packages, attaching happens \emph{locally}: if
|
||||||
|
\code{import} is executed in the global environment, the effect is the same.
|
||||||
|
Otherwise, the imported module is inserted as the parent of the current
|
||||||
|
\code{environment()}. When used (globally) \emph{inside} a module, the newly
|
||||||
|
imported module is only available inside the module’s search path, not
|
||||||
|
outside it (nor in other modules which might be loaded).
|
||||||
|
}
|
||||||
|
\examples{
|
||||||
|
# `a.r` is a file in the local directory containing a function `f`.
|
||||||
|
a = import('a')
|
||||||
|
a$f()
|
||||||
|
|
||||||
|
# b/c.r is a file in path `b`, containing a function `g`.
|
||||||
|
import('b/c', attach = TRUE)
|
||||||
|
g() # No module name qualification necessary
|
||||||
|
|
||||||
|
}
|
||||||
|
\seealso{
|
||||||
|
\code{unload}
|
||||||
|
|
||||||
|
\code{reload}
|
||||||
|
|
||||||
|
\code{module_name}
|
||||||
|
}
|
||||||
10
samples/Rascal/Analyze.rsc
Normal file
10
samples/Rascal/Analyze.rsc
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
module Analyze
|
||||||
|
|
||||||
|
import Syntax;
|
||||||
|
|
||||||
|
set[Id] unreachable(Machine m) {
|
||||||
|
r = { <q1,q2> | (State)`state <Id q1> <Trans* ts>` <- m.states,
|
||||||
|
(Trans)`<Id _>: <Id q2>` <- ts }+;
|
||||||
|
qs = [ q.name | /State q := m ];
|
||||||
|
return { q | q <- qs, q notin r[qs[0]] };
|
||||||
|
}
|
||||||
18
samples/Rascal/Compile.rsc
Normal file
18
samples/Rascal/Compile.rsc
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
module Compile
|
||||||
|
|
||||||
|
import Syntax;
|
||||||
|
|
||||||
|
str compile(Machine m) =
|
||||||
|
"while (true) {
|
||||||
|
' event = input.next();
|
||||||
|
' switch (current) {
|
||||||
|
' <for (q <- m.states) {>
|
||||||
|
' case \"<q.name>\":
|
||||||
|
' <for (t <- q.out) {>
|
||||||
|
' if (event.equals(\"<t.event>\"))
|
||||||
|
' current = \"<t.to>\";
|
||||||
|
' <}>
|
||||||
|
' break;
|
||||||
|
' <}>
|
||||||
|
' }
|
||||||
|
'}";
|
||||||
887
samples/Rascal/Rascal.rsc
Normal file
887
samples/Rascal/Rascal.rsc
Normal file
@@ -0,0 +1,887 @@
|
|||||||
|
@license{
|
||||||
|
Copyright (c) 2009-2015 CWI
|
||||||
|
All rights reserved. This program and the accompanying materials
|
||||||
|
are made available under the terms of the Eclipse Public License v1.0
|
||||||
|
which accompanies this distribution, and is available at
|
||||||
|
http://www.eclipse.org/legal/epl-v10.html
|
||||||
|
}
|
||||||
|
@contributor{Jurgen J. Vinju - Jurgen.Vinju@cwi.nl - CWI}
|
||||||
|
@contributor{Tijs van der Storm - Tijs.van.der.Storm@cwi.nl}
|
||||||
|
@contributor{Paul Klint - Paul.Klint@cwi.nl - CWI}
|
||||||
|
@contributor{Arnold Lankamp - Arnold.Lankamp@cwi.nl}
|
||||||
|
@contributor{Michael Steindorfer - Michael.Steindorfer@cwi.nl - CWI}
|
||||||
|
@doc{The syntax definition of Rascal, excluding concrete syntax fragments}
|
||||||
|
module lang::rascal::\syntax::Rascal
|
||||||
|
|
||||||
|
lexical BooleanLiteral
|
||||||
|
= "true"
|
||||||
|
| "false" ;
|
||||||
|
|
||||||
|
syntax Literal
|
||||||
|
= integer: IntegerLiteral integerLiteral
|
||||||
|
| regExp: RegExpLiteral regExpLiteral
|
||||||
|
| \real: RealLiteral realLiteral
|
||||||
|
| boolean: BooleanLiteral booleanLiteral
|
||||||
|
| string: StringLiteral stringLiteral
|
||||||
|
| dateTime: DateTimeLiteral dateTimeLiteral
|
||||||
|
| location: LocationLiteral locationLiteral
|
||||||
|
| rational: RationalLiteral rationalLiteral
|
||||||
|
;
|
||||||
|
|
||||||
|
syntax Expression = concrete: Concrete concrete;
|
||||||
|
syntax Pattern = concrete: Concrete concrete;
|
||||||
|
|
||||||
|
lexical Concrete
|
||||||
|
= typed: "(" LAYOUTLIST l1 Sym symbol LAYOUTLIST l2 ")" LAYOUTLIST l3 "`" ConcretePart* parts "`";
|
||||||
|
|
||||||
|
lexical ConcretePart
|
||||||
|
= @category="MetaSkipped" text : ![`\<\>\\\n]+ !>> ![`\<\>\\\n]
|
||||||
|
| newline: "\n" [\ \t \u00A0 \u1680 \u2000-\u200A \u202F \u205F \u3000]* "\'"
|
||||||
|
| @category="MetaVariable" hole : ConcreteHole hole
|
||||||
|
| @category="MetaSkipped" lt: "\\\<"
|
||||||
|
| @category="MetaSkipped" gt: "\\\>"
|
||||||
|
| @category="MetaSkipped" bq: "\\`"
|
||||||
|
| @category="MetaSkipped" bs: "\\\\"
|
||||||
|
;
|
||||||
|
|
||||||
|
syntax ConcreteHole
|
||||||
|
= \one: "\<" Sym symbol Name name "\>"
|
||||||
|
;
|
||||||
|
|
||||||
|
start syntax Module
|
||||||
|
= \default: Header header Body body ;
|
||||||
|
|
||||||
|
syntax ModuleParameters
|
||||||
|
= \default: "[" {TypeVar ","}+ parameters "]" ;
|
||||||
|
|
||||||
|
lexical DateAndTime
|
||||||
|
= "$" DatePart "T" TimePartNoTZ !>> [+\-] "$"
|
||||||
|
| "$" DatePart "T" TimePartNoTZ TimeZonePart "$";
|
||||||
|
|
||||||
|
syntax Strategy
|
||||||
|
= topDownBreak: "top-down-break"
|
||||||
|
| topDown: "top-down"
|
||||||
|
| bottomUp: "bottom-up"
|
||||||
|
| bottomUpBreak: "bottom-up-break"
|
||||||
|
| outermost: "outermost"
|
||||||
|
| innermost: "innermost" ;
|
||||||
|
|
||||||
|
lexical UnicodeEscape
|
||||||
|
= utf16: "\\" [u] [0-9 A-F a-f] [0-9 A-F a-f] [0-9 A-F a-f] [0-9 A-F a-f]
|
||||||
|
| utf32: "\\" [U] (("0" [0-9 A-F a-f]) | "10") [0-9 A-F a-f] [0-9 A-F a-f] [0-9 A-F a-f] [0-9 A-F a-f] // 24 bits
|
||||||
|
| ascii: "\\" [a] [0-7] [0-9A-Fa-f]
|
||||||
|
;
|
||||||
|
|
||||||
|
syntax Variable
|
||||||
|
= initialized: Name name "=" Expression initial
|
||||||
|
| unInitialized: Name name ;
|
||||||
|
|
||||||
|
lexical OctalIntegerLiteral
|
||||||
|
= [0] [0-7]+ !>> [0-9 A-Z _ a-z] ;
|
||||||
|
|
||||||
|
syntax TypeArg
|
||||||
|
= \default: Type type
|
||||||
|
| named: Type type Name name ;
|
||||||
|
|
||||||
|
syntax Renaming
|
||||||
|
= \default: Name from "=\>" Name to ;
|
||||||
|
|
||||||
|
syntax Catch
|
||||||
|
= \default: "catch" ":" Statement body
|
||||||
|
| binding: "catch" Pattern pattern ":" Statement body ;
|
||||||
|
|
||||||
|
lexical PathChars
|
||||||
|
= URLChars [|] ;
|
||||||
|
|
||||||
|
syntax Signature
|
||||||
|
= withThrows: FunctionModifiers modifiers Type type Name name Parameters parameters "throws" {Type ","}+ exceptions
|
||||||
|
| noThrows: FunctionModifiers modifiers Type type Name name Parameters parameters ;
|
||||||
|
|
||||||
|
syntax Sym
|
||||||
|
// named non-terminals
|
||||||
|
= nonterminal: Nonterminal nonterminal !>> "["
|
||||||
|
| parameter: "&" Nonterminal nonterminal
|
||||||
|
| parametrized: Nonterminal nonterminal >> "[" "[" {Sym ","}+ parameters "]"
|
||||||
|
| \start: "start" "[" Nonterminal nonterminal "]"
|
||||||
|
| labeled: Sym symbol NonterminalLabel label
|
||||||
|
// literals
|
||||||
|
| characterClass: Class charClass
|
||||||
|
| literal: StringConstant string
|
||||||
|
| caseInsensitiveLiteral: CaseInsensitiveStringConstant cistring
|
||||||
|
// regular expressions
|
||||||
|
| iter: Sym symbol "+"
|
||||||
|
| iterStar: Sym symbol "*"
|
||||||
|
| iterSep: "{" Sym symbol Sym sep "}" "+"
|
||||||
|
| iterStarSep: "{" Sym symbol Sym sep "}" "*"
|
||||||
|
| optional: Sym symbol "?"
|
||||||
|
| alternative: "(" Sym first "|" {Sym "|"}+ alternatives ")"
|
||||||
|
| sequence: "(" Sym first Sym+ sequence ")"
|
||||||
|
// TODO: MinimalIter: Sym symbol IntegerConstant minimal "+"
|
||||||
|
// TODO: MinimalIterSep: "{" Sym symbol Symbol sep "}" IntegerConstant minimal "+"
|
||||||
|
// TODO | Permutation: "(" Sym first "~" {Sym "~"}+ participants ")"
|
||||||
|
// TODO | Combination: "(" Sym first "#" {Sym "#"}+ elements ")"
|
||||||
|
| empty: "(" ")"
|
||||||
|
// conditionals
|
||||||
|
| column: Sym symbol "@" IntegerLiteral column
|
||||||
|
| endOfLine: Sym symbol "$"
|
||||||
|
| startOfLine: "^" Sym symbol
|
||||||
|
| except: Sym symbol "!" NonterminalLabel label
|
||||||
|
>
|
||||||
|
assoc (
|
||||||
|
left ( follow: Sym symbol "\>\>" Sym match
|
||||||
|
| notFollow: Sym symbol "!\>\>" Sym match
|
||||||
|
)
|
||||||
|
|
|
||||||
|
right ( precede: Sym match "\<\<" Sym symbol
|
||||||
|
| notPrecede: Sym match "!\<\<" Sym symbol
|
||||||
|
)
|
||||||
|
)
|
||||||
|
>
|
||||||
|
left unequal: Sym symbol "\\" Sym match
|
||||||
|
;
|
||||||
|
|
||||||
|
lexical TimePartNoTZ
|
||||||
|
= [0-2] [0-9] [0-5] [0-9] [0-5] [0-9] ([, .] [0-9] ([0-9] [0-9]?)?)?
|
||||||
|
| [0-2] [0-9] ":" [0-5] [0-9] ":" [0-5] [0-9] ([, .] [0-9] ([0-9] [0-9]?)?)?
|
||||||
|
;
|
||||||
|
|
||||||
|
syntax Header
|
||||||
|
= parameters: Tags tags "module" QualifiedName name ModuleParameters params Import* imports
|
||||||
|
| \default: Tags tags "module" QualifiedName name Import* imports ;
|
||||||
|
|
||||||
|
lexical Name
|
||||||
|
// Names are surrounded by non-alphabetical characters, i.e. we want longest match.
|
||||||
|
= ([A-Z a-z _] !<< [A-Z _ a-z] [0-9 A-Z _ a-z]* !>> [0-9 A-Z _ a-z]) \ RascalKeywords
|
||||||
|
| [\\] [A-Z _ a-z] [\- 0-9 A-Z _ a-z]* !>> [\- 0-9 A-Z _ a-z]
|
||||||
|
;
|
||||||
|
|
||||||
|
syntax SyntaxDefinition
|
||||||
|
= @Foldable \layout : Visibility vis "layout" Sym defined "=" Prod production ";"
|
||||||
|
| @Foldable \lexical : "lexical" Sym defined "=" Prod production ";"
|
||||||
|
| @Foldable \keyword : "keyword" Sym defined "=" Prod production ";"
|
||||||
|
| @Foldable language: Start start "syntax" Sym defined "=" Prod production ";" ;
|
||||||
|
|
||||||
|
syntax Kind
|
||||||
|
= function: "function"
|
||||||
|
| variable: "variable"
|
||||||
|
| \all: "all"
|
||||||
|
| \anno: "anno"
|
||||||
|
| \data: "data"
|
||||||
|
| view: "view"
|
||||||
|
| \alias: "alias"
|
||||||
|
| \module: "module"
|
||||||
|
| \tag: "tag" ;
|
||||||
|
|
||||||
|
syntax ImportedModule
|
||||||
|
= \default: QualifiedName name
|
||||||
|
| actualsRenaming: QualifiedName name ModuleActuals actuals Renamings renamings
|
||||||
|
| renamings: QualifiedName name Renamings renamings
|
||||||
|
| actuals: QualifiedName name ModuleActuals actuals
|
||||||
|
;
|
||||||
|
|
||||||
|
syntax Target
|
||||||
|
= empty:
|
||||||
|
| labeled: Name name ;
|
||||||
|
|
||||||
|
syntax IntegerLiteral
|
||||||
|
= /*prefer()*/ decimalIntegerLiteral: DecimalIntegerLiteral decimal
|
||||||
|
| /*prefer()*/ hexIntegerLiteral: HexIntegerLiteral hex
|
||||||
|
| /*prefer()*/ octalIntegerLiteral: OctalIntegerLiteral octal ;
|
||||||
|
|
||||||
|
syntax FunctionBody
|
||||||
|
= \default: "{" Statement* statements "}" ;
|
||||||
|
|
||||||
|
syntax Expression
|
||||||
|
= nonEmptyBlock : "{" Statement+ statements "}"
|
||||||
|
| bracket \bracket: "(" Expression expression ")"
|
||||||
|
| closure : Type type Parameters parameters "{" Statement+ statements "}"
|
||||||
|
| stepRange : "[" Expression first "," Expression second ".." Expression last "]"
|
||||||
|
| voidClosure : Parameters parameters "{" Statement* statements0 "}"
|
||||||
|
| \visit : Label label Visit visit
|
||||||
|
| reducer : "(" Expression init "|" Expression result "|" {Expression ","}+ generators ")"
|
||||||
|
| reifiedType : "type" "(" Expression symbol "," Expression definitions ")"
|
||||||
|
| callOrTree : Expression!transitiveClosure!transitiveReflexiveClosure!isDefined expression "(" {Expression ","}* arguments KeywordArguments[Expression] keywordArguments ")"
|
||||||
|
| literal : Literal literal
|
||||||
|
| \any : "any" "(" {Expression ","}+ generators ")"
|
||||||
|
| \all : "all" "(" {Expression ","}+ generators ")"
|
||||||
|
| comprehension : Comprehension comprehension
|
||||||
|
| \set : "{" {Expression ","}* elements0 "}"
|
||||||
|
| \list : "[" {Expression ","}* elements0 "]"
|
||||||
|
| reifyType : "#" Type type !>> "[" !selector
|
||||||
|
| range : "[" Expression first ".." Expression last "]"
|
||||||
|
| \tuple : "\<" {Expression ","}+ elements "\>"
|
||||||
|
| \map : "(" {Mapping[Expression] ","}* mappings ")"
|
||||||
|
| \it : [A-Z a-z _] !<< "it" !>> [A-Z a-z _]
|
||||||
|
| qualifiedName : QualifiedName qualifiedName
|
||||||
|
| subscript : Expression expression!transitiveClosure!transitiveReflexiveClosure!isDefined "[" {Expression ","}+ subscripts "]"
|
||||||
|
| slice : Expression expression!transitiveClosure!transitiveReflexiveClosure!isDefined "[" OptionalExpression optFirst ".." OptionalExpression optLast "]"
|
||||||
|
| sliceStep : Expression expression!transitiveClosure!transitiveReflexiveClosure!isDefined "[" OptionalExpression optFirst "," Expression second ".." OptionalExpression optLast "]"
|
||||||
|
| fieldAccess : Expression expression "." Name field
|
||||||
|
| fieldUpdate : Expression expression "[" Name key "=" Expression replacement "]"
|
||||||
|
| fieldProject : Expression expression!transitiveClosure!transitiveReflexiveClosure!isDefined "\<" {Field ","}+ fields "\>"
|
||||||
|
| setAnnotation: Expression expression "[" "@" Name name "=" Expression value "]"
|
||||||
|
| getAnnotation: Expression expression >> "@" "@" Name name
|
||||||
|
| is : Expression expression "is" Name name
|
||||||
|
| has : Expression expression "has" Name name
|
||||||
|
| transitiveClosure: Expression argument "+" !>> "="
|
||||||
|
| transitiveReflexiveClosure: Expression argument "*" !>> "="
|
||||||
|
> isDefined : Expression argument "?"
|
||||||
|
> negation : "!" Expression!match!noMatch argument
|
||||||
|
| negative : "-" Expression argument
|
||||||
|
| non-assoc splice : "*" Expression argument
|
||||||
|
| asType : "[" Type type "]" Expression!match!noMatch argument
|
||||||
|
> left composition: Expression lhs "o" Expression rhs
|
||||||
|
> left ( product: Expression lhs "*" () !>> "*" Expression!noMatch!match rhs
|
||||||
|
| \join : Expression lhs "join" Expression rhs
|
||||||
|
| remainder: Expression lhs "%" Expression rhs
|
||||||
|
| division: Expression lhs "/" Expression rhs
|
||||||
|
)
|
||||||
|
> left intersection: Expression lhs "&" !>> "&" Expression rhs
|
||||||
|
> left ( addition : Expression lhs "+" Expression!noMatch!match rhs
|
||||||
|
| subtraction: Expression!transitiveClosure!transitiveReflexiveClosure lhs "-" Expression rhs
|
||||||
|
| appendAfter: Expression lhs "\<\<" !>> "=" Expression rhs
|
||||||
|
| insertBefore: Expression lhs "\>\>" Expression rhs
|
||||||
|
)
|
||||||
|
> left modulo: Expression lhs "mod" Expression rhs
|
||||||
|
> non-assoc ( notIn: Expression lhs "notin" Expression rhs
|
||||||
|
| \in: Expression lhs "in" Expression rhs
|
||||||
|
)
|
||||||
|
> non-assoc ( greaterThanOrEq: Expression lhs "\>=" Expression rhs
|
||||||
|
| lessThanOrEq : Expression lhs "\<=" Expression rhs
|
||||||
|
| lessThan : Expression lhs "\<" !>> "-" Expression rhs
|
||||||
|
| greaterThan : Expression lhs "\>" Expression rhs
|
||||||
|
)
|
||||||
|
> non-assoc ( equals : Expression lhs "==" Expression rhs
|
||||||
|
| nonEquals : Expression lhs "!=" Expression rhs
|
||||||
|
)
|
||||||
|
> non-assoc ifDefinedOtherwise: Expression lhs "?" Expression rhs
|
||||||
|
> non-assoc ( noMatch: Pattern pattern "!:=" Expression expression
|
||||||
|
| match: Pattern pattern ":=" Expression expression
|
||||||
|
| enumerator: Pattern pattern "\<-" Expression expression
|
||||||
|
)
|
||||||
|
> non-assoc ( implication: Expression lhs "==\>" Expression rhs
|
||||||
|
| equivalence: Expression lhs "\<==\>" Expression rhs
|
||||||
|
)
|
||||||
|
> left and: Expression lhs "&&" Expression rhs
|
||||||
|
> left or: Expression lhs "||" Expression rhs
|
||||||
|
> right ifThenElse: Expression condition "?" Expression thenExp ":" Expression elseExp
|
||||||
|
;
|
||||||
|
|
||||||
|
syntax OptionalExpression
|
||||||
|
= expression: Expression expression
|
||||||
|
| noExpression: ()
|
||||||
|
;
|
||||||
|
|
||||||
|
syntax UserType
|
||||||
|
= name: QualifiedName name
|
||||||
|
| parametric: QualifiedName name >> "[" "[" {Type ","}+ parameters "]" ;
|
||||||
|
|
||||||
|
syntax Import
|
||||||
|
= \extend: "extend" ImportedModule module ";"
|
||||||
|
| \default: "import" ImportedModule module ";"
|
||||||
|
| \external: "import" QualifiedName name "=" LocationLiteral at ";"
|
||||||
|
| \syntax: SyntaxDefinition syntax ;
|
||||||
|
|
||||||
|
syntax Body
|
||||||
|
= toplevels: Toplevel* toplevels ;
|
||||||
|
|
||||||
|
lexical URLChars
|
||||||
|
= ![\t-\n \r \ \< |]* ;
|
||||||
|
|
||||||
|
lexical TimeZonePart
|
||||||
|
= [+ \-] [0-1] [0-9] ":" [0-5] [0-9]
|
||||||
|
| "Z"
|
||||||
|
| [+ \-] [0-1] [0-9]
|
||||||
|
| [+ \-] [0-1] [0-9] [0-5] [0-9]
|
||||||
|
;
|
||||||
|
|
||||||
|
syntax ProtocolPart
|
||||||
|
= nonInterpolated: ProtocolChars protocolChars
|
||||||
|
| interpolated: PreProtocolChars pre Expression expression ProtocolTail tail ;
|
||||||
|
|
||||||
|
syntax StringTemplate
|
||||||
|
= ifThen : "if" "(" {Expression ","}+ conditions ")" "{" Statement* preStats StringMiddle body Statement* postStats "}"
|
||||||
|
| ifThenElse: "if" "(" {Expression ","}+ conditions ")" "{" Statement* preStatsThen StringMiddle thenString Statement* postStatsThen "}" "else" "{" Statement* preStatsElse StringMiddle elseString Statement* postStatsElse "}"
|
||||||
|
| \for : "for" "(" {Expression ","}+ generators ")" "{" Statement* preStats StringMiddle body Statement* postStats "}"
|
||||||
|
| doWhile : "do" "{" Statement* preStats StringMiddle body Statement* postStats "}" "while" "(" Expression condition ")"
|
||||||
|
| \while : "while" "(" Expression condition ")" "{" Statement* preStats StringMiddle body Statement* postStats "}" ;
|
||||||
|
|
||||||
|
lexical PreStringChars
|
||||||
|
= @category="Constant" [\"] StringCharacter* [\<] ;
|
||||||
|
|
||||||
|
lexical CaseInsensitiveStringConstant
|
||||||
|
= @category="Constant" "\'" StringCharacter* chars "\'" ;
|
||||||
|
|
||||||
|
lexical Backslash
|
||||||
|
= [\\] !>> [/ \< \> \\] ;
|
||||||
|
|
||||||
|
syntax Label
|
||||||
|
= \default: Name name ":"
|
||||||
|
| empty: ;
|
||||||
|
|
||||||
|
lexical MidProtocolChars
|
||||||
|
= "\>" URLChars "\<" ;
|
||||||
|
|
||||||
|
lexical NamedBackslash
|
||||||
|
= [\\] !>> [\< \> \\] ;
|
||||||
|
|
||||||
|
syntax Field
|
||||||
|
= index: IntegerLiteral fieldIndex
|
||||||
|
| name: Name fieldName ;
|
||||||
|
|
||||||
|
lexical JustDate
|
||||||
|
= "$" DatePart "$";
|
||||||
|
|
||||||
|
lexical PostPathChars
|
||||||
|
= "\>" URLChars "|" ;
|
||||||
|
|
||||||
|
syntax PathPart
|
||||||
|
= nonInterpolated: PathChars pathChars
|
||||||
|
| interpolated: PrePathChars pre Expression expression PathTail tail ;
|
||||||
|
|
||||||
|
lexical DatePart
|
||||||
|
= [0-9] [0-9] [0-9] [0-9] "-" [0-1] [0-9] "-" [0-3] [0-9]
|
||||||
|
| [0-9] [0-9] [0-9] [0-9] [0-1] [0-9] [0-3] [0-9] ;
|
||||||
|
|
||||||
|
syntax FunctionModifier
|
||||||
|
= java: "java"
|
||||||
|
| \test: "test"
|
||||||
|
| \default: "default";
|
||||||
|
|
||||||
|
syntax Assignment
|
||||||
|
= ifDefined: "?="
|
||||||
|
| division: "/="
|
||||||
|
| product: "*="
|
||||||
|
| intersection: "&="
|
||||||
|
| subtraction: "-="
|
||||||
|
| \default: "="
|
||||||
|
| addition: "+="
|
||||||
|
| \append: "\<\<="
|
||||||
|
;
|
||||||
|
|
||||||
|
syntax Assignable
|
||||||
|
= bracket \bracket : "(" Assignable arg ")"
|
||||||
|
| variable : QualifiedName qualifiedName
|
||||||
|
| subscript : Assignable receiver "[" Expression subscript "]"
|
||||||
|
| slice : Assignable receiver "[" OptionalExpression optFirst ".." OptionalExpression optLast "]"
|
||||||
|
| sliceStep : Assignable receiver "[" OptionalExpression optFirst "," Expression second ".." OptionalExpression optLast "]"
|
||||||
|
| fieldAccess : Assignable receiver "." Name field
|
||||||
|
| ifDefinedOrDefault: Assignable receiver "?" Expression defaultExpression
|
||||||
|
| constructor : Name name "(" {Assignable ","}+ arguments ")"
|
||||||
|
| \tuple : "\<" {Assignable ","}+ elements "\>"
|
||||||
|
| annotation : Assignable receiver "@" Name annotation ;
|
||||||
|
|
||||||
|
lexical StringConstant
|
||||||
|
= @category="Constant" "\"" StringCharacter* chars "\"" ;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
syntax Assoc
|
||||||
|
= associative: "assoc"
|
||||||
|
| left: "left"
|
||||||
|
| nonAssociative: "non-assoc"
|
||||||
|
| right: "right" ;
|
||||||
|
|
||||||
|
syntax Replacement
|
||||||
|
= unconditional: Expression replacementExpression
|
||||||
|
| conditional: Expression replacementExpression "when" {Expression ","}+ conditions ;
|
||||||
|
|
||||||
|
syntax DataTarget
|
||||||
|
= empty:
|
||||||
|
| labeled: Name label ":" ;
|
||||||
|
|
||||||
|
lexical StringCharacter
|
||||||
|
= "\\" [\" \' \< \> \\ b f n r t]
|
||||||
|
| UnicodeEscape
|
||||||
|
| ![\" \' \< \> \\]
|
||||||
|
| [\n][\ \t \u00A0 \u1680 \u2000-\u200A \u202F \u205F \u3000]* [\'] // margin
|
||||||
|
;
|
||||||
|
|
||||||
|
lexical JustTime
|
||||||
|
= "$T" TimePartNoTZ !>> [+\-] "$"
|
||||||
|
| "$T" TimePartNoTZ TimeZonePart "$"
|
||||||
|
;
|
||||||
|
|
||||||
|
lexical MidStringChars
|
||||||
|
= @category="Constant" [\>] StringCharacter* [\<] ;
|
||||||
|
|
||||||
|
lexical ProtocolChars
|
||||||
|
= [|] URLChars "://" !>> [\t-\n \r \ \u00A0 \u1680 \u2000-\u200A \u202F \u205F \u3000];
|
||||||
|
|
||||||
|
lexical RegExpModifier
|
||||||
|
= [d i m s]* ;
|
||||||
|
|
||||||
|
syntax CommonKeywordParameters
|
||||||
|
= absent: ()
|
||||||
|
| present: "(" {KeywordFormal ","}+ keywordFormalList ")"
|
||||||
|
;
|
||||||
|
|
||||||
|
syntax Parameters
|
||||||
|
= \default: "(" Formals formals KeywordFormals keywordFormals ")"
|
||||||
|
| varArgs: "(" Formals formals "..." KeywordFormals keywordFormals ")" ;
|
||||||
|
|
||||||
|
lexical OptionalComma = \default: ","? ;
|
||||||
|
|
||||||
|
syntax KeywordFormals
|
||||||
|
= \default: OptionalComma optionalComma [,\ (\t\n] << {KeywordFormal ","}+ keywordFormalList
|
||||||
|
| none: ()
|
||||||
|
;
|
||||||
|
|
||||||
|
syntax KeywordFormal
|
||||||
|
= \default: Type type Name name "=" Expression expression
|
||||||
|
;
|
||||||
|
|
||||||
|
syntax KeywordArguments[&T]
|
||||||
|
= \default: OptionalComma optionalComma [,\ (\t\n] << {KeywordArgument[&T] ","}+ keywordArgumentList
|
||||||
|
| none: ()
|
||||||
|
;
|
||||||
|
|
||||||
|
syntax KeywordArgument[&T] = \default: Name name "=" &T expression ;
|
||||||
|
|
||||||
|
lexical RegExp
|
||||||
|
= ![/ \< \> \\]
|
||||||
|
| "\<" Name "\>"
|
||||||
|
| [\\] [/ \< \> \\]
|
||||||
|
| "\<" Name ":" NamedRegExp* "\>"
|
||||||
|
| Backslash
|
||||||
|
// | @category="MetaVariable" [\<] Expression expression [\>] TODO: find out why this production existed
|
||||||
|
;
|
||||||
|
|
||||||
|
|
||||||
|
layout LAYOUTLIST
|
||||||
|
= LAYOUT* !>> [\u0009-\u000D \u0020 \u0085 \u00A0 \u1680 \u180E \u2000-\u200A \u2028 \u2029 \u202F \u205F \u3000] !>> "//" !>> "/*";
|
||||||
|
|
||||||
|
syntax LocalVariableDeclaration
|
||||||
|
= \default: Declarator declarator
|
||||||
|
| \dynamic: "dynamic" Declarator declarator ;
|
||||||
|
|
||||||
|
lexical RealLiteral
|
||||||
|
= [0-9]+ [D F d f]
|
||||||
|
| [0-9]+ [E e] [+ \-]? [0-9]+ [D F d f]?
|
||||||
|
| [0-9]+ "." !>> "." [0-9]* [D F d f]?
|
||||||
|
| [0-9]+ "." [0-9]* [E e] [+ \-]? [0-9]+ [D F d f]?
|
||||||
|
| [.] !<< "." [0-9]+ [D F d f]?
|
||||||
|
| [.] !<< "." [0-9]+ [E e] [+ \-]? [0-9]+ [D F d f]?
|
||||||
|
;
|
||||||
|
|
||||||
|
syntax Range
|
||||||
|
= fromTo: Char start "-" Char end
|
||||||
|
| character: Char character ;
|
||||||
|
|
||||||
|
syntax LocationLiteral
|
||||||
|
= \default: ProtocolPart protocolPart PathPart pathPart ;
|
||||||
|
|
||||||
|
syntax ShellCommand
|
||||||
|
= setOption: "set" QualifiedName name Expression expression
|
||||||
|
| undeclare: "undeclare" QualifiedName name
|
||||||
|
| help: "help"
|
||||||
|
| edit: "edit" QualifiedName name
|
||||||
|
| unimport: "unimport" QualifiedName name
|
||||||
|
| listDeclarations: "declarations"
|
||||||
|
| quit: "quit"
|
||||||
|
| history: "history"
|
||||||
|
| \test: "test"
|
||||||
|
| listModules: "modules"
|
||||||
|
| clear: "clear";
|
||||||
|
|
||||||
|
syntax StringMiddle
|
||||||
|
= mid: MidStringChars mid
|
||||||
|
| template: MidStringChars mid StringTemplate template StringMiddle tail
|
||||||
|
| interpolated: MidStringChars mid Expression expression StringMiddle tail ;
|
||||||
|
|
||||||
|
syntax QualifiedName
|
||||||
|
= \default: {Name "::"}+ names !>> "::" ;
|
||||||
|
|
||||||
|
lexical RationalLiteral
|
||||||
|
= [0-9][0-9]* [r]
|
||||||
|
| [1-9][0-9]* [r] [0-9][0-9]* !>> [0-9 A-Z _ a-z]
|
||||||
|
;
|
||||||
|
|
||||||
|
lexical DecimalIntegerLiteral
|
||||||
|
= "0" !>> [0-9 A-Z _ a-z]
|
||||||
|
| [1-9] [0-9]* !>> [0-9 A-Z _ a-z] ;
|
||||||
|
|
||||||
|
syntax DataTypeSelector
|
||||||
|
= selector: QualifiedName sort "." Name production ;
|
||||||
|
|
||||||
|
syntax StringTail
|
||||||
|
= midInterpolated: MidStringChars mid Expression expression StringTail tail
|
||||||
|
| post: PostStringChars post
|
||||||
|
| midTemplate: MidStringChars mid StringTemplate template StringTail tail ;
|
||||||
|
|
||||||
|
syntax PatternWithAction
|
||||||
|
= replacing: Pattern pattern "=\>" Replacement replacement
|
||||||
|
| arbitrary: Pattern pattern ":" Statement statement ;
|
||||||
|
|
||||||
|
lexical LAYOUT
|
||||||
|
= Comment
|
||||||
|
// all the white space chars defined in Unicode 6.0
|
||||||
|
| [\u0009-\u000D \u0020 \u0085 \u00A0 \u1680 \u180E \u2000-\u200A \u2028 \u2029 \u202F \u205F \u3000]
|
||||||
|
;
|
||||||
|
|
||||||
|
syntax Visit
|
||||||
|
= givenStrategy: Strategy strategy "visit" "(" Expression subject ")" "{" Case+ cases "}"
|
||||||
|
| defaultStrategy: "visit" "(" Expression subject ")" "{" Case+ cases "}" ;
|
||||||
|
|
||||||
|
start syntax Commands
|
||||||
|
= \commandlist: EvalCommand+ commands
|
||||||
|
;
|
||||||
|
|
||||||
|
start syntax EvalCommand
|
||||||
|
= declaration: Declaration declaration
|
||||||
|
| statement: Statement!variableDeclaration!functionDeclaration!visit statement
|
||||||
|
| \import: Import imported
|
||||||
|
| output: Output
|
||||||
|
;
|
||||||
|
|
||||||
|
lexical Output
|
||||||
|
= @category="Result" resultOutput: "⇨" ![\n\r]* [\n]
|
||||||
|
| @category="StdOut" stdoutOutput: ^ "≫" ![\n\r]* [\n]
|
||||||
|
| @category="StdErr" stderrOutput: ^ "⚠" ![\n\r]* [\n]
|
||||||
|
;
|
||||||
|
|
||||||
|
start syntax Command
|
||||||
|
= expression: Expression!nonEmptyBlock expression
|
||||||
|
| declaration: Declaration declaration
|
||||||
|
| shell: ":" ShellCommand command
|
||||||
|
| statement: Statement!variableDeclaration!functionDeclaration!visit statement
|
||||||
|
| \import: Import imported ;
|
||||||
|
|
||||||
|
lexical TagString
|
||||||
|
= "\\" !<< "{" ( ![{}] | ("\\" [{}]) | TagString)* contents "\\" !<< "}";
|
||||||
|
|
||||||
|
syntax ProtocolTail
|
||||||
|
= mid: MidProtocolChars mid Expression expression ProtocolTail tail
|
||||||
|
| post: PostProtocolChars post ;
|
||||||
|
|
||||||
|
lexical Nonterminal
|
||||||
|
= ([A-Z] !<< [A-Z] [0-9 A-Z _ a-z]* !>> [0-9 A-Z _ a-z]) \ RascalKeywords;
|
||||||
|
|
||||||
|
syntax PathTail
|
||||||
|
= mid: MidPathChars mid Expression expression PathTail tail
|
||||||
|
| post: PostPathChars post ;
|
||||||
|
|
||||||
|
syntax Visibility
|
||||||
|
= \private: "private"
|
||||||
|
| \default:
|
||||||
|
| \public: "public" ;
|
||||||
|
|
||||||
|
syntax StringLiteral
|
||||||
|
= template: PreStringChars pre StringTemplate template StringTail tail
|
||||||
|
| interpolated: PreStringChars pre Expression expression StringTail tail
|
||||||
|
| nonInterpolated: StringConstant constant ;
|
||||||
|
|
||||||
|
lexical Comment
|
||||||
|
= @category="Comment" "/*" (![*] | [*] !>> [/])* "*/"
|
||||||
|
| @category="Comment" "//" ![\n]* !>> [\ \t\r \u00A0 \u1680 \u2000-\u200A \u202F \u205F \u3000] $ // the restriction helps with parsing speed
|
||||||
|
;
|
||||||
|
|
||||||
|
|
||||||
|
syntax Renamings
|
||||||
|
= \default: "renaming" {Renaming ","}+ renamings ;
|
||||||
|
|
||||||
|
syntax Tags
|
||||||
|
= \default: Tag* tags ;
|
||||||
|
|
||||||
|
syntax Formals
|
||||||
|
= \default: {Pattern ","}* formals ;
|
||||||
|
|
||||||
|
lexical PostProtocolChars
|
||||||
|
= "\>" URLChars "://" ;
|
||||||
|
|
||||||
|
syntax Start
|
||||||
|
= absent:
|
||||||
|
| present: "start" ;
|
||||||
|
|
||||||
|
syntax Statement
|
||||||
|
= @breakable \assert: "assert" Expression expression ";"
|
||||||
|
| @breakable assertWithMessage: "assert" Expression expression ":" Expression message ";"
|
||||||
|
| @breakable expression: Expression!visit!nonEmptyBlock expression ";"
|
||||||
|
| @breakable \visit: Label label Visit visit
|
||||||
|
| @breakable \while: Label label "while" "(" {Expression ","}+ conditions ")" Statement!variableDeclaration!functionDeclaration body
|
||||||
|
| @breakable doWhile: Label label "do" Statement body "while" "(" Expression condition ")" ";"
|
||||||
|
| @breakable @breakable{generators} \for: Label label "for" "(" {Expression ","}+ generators ")" Statement body
|
||||||
|
| @breakable ifThen: Label label "if" "(" {Expression ","}+ conditions ")" Statement!variableDeclaration!functionDeclaration thenStatement () !>> "else"
|
||||||
|
| @breakable ifThenElse: Label label "if" "(" {Expression ","}+ conditions ")" Statement thenStatement "else" Statement!variableDeclaration!functionDeclaration elseStatement
|
||||||
|
| @breakable \switch: Label label "switch" "(" Expression expression ")" "{" Case+ cases "}"
|
||||||
|
| @breakable \fail: "fail" Target target ";"
|
||||||
|
| @breakable \break: "break" Target target ";"
|
||||||
|
| @breakable \continue: "continue" Target target ";"
|
||||||
|
| @breakable \filter: "filter" ";"
|
||||||
|
| @breakable \solve: "solve" "(" {QualifiedName ","}+ variables Bound bound ")" Statement!variableDeclaration!functionDeclaration body
|
||||||
|
| @breakable non-assoc \try: "try" Statement body Catch+ handlers
|
||||||
|
| @breakable tryFinally: "try" Statement body Catch+ handlers "finally" Statement!variableDeclaration!functionDeclaration finallyBody
|
||||||
|
| nonEmptyBlock: Label label "{" Statement+ statements "}"
|
||||||
|
| emptyStatement: ";"
|
||||||
|
| @breakable globalDirective: "global" Type type {QualifiedName ","}+ names ";"
|
||||||
|
| @breakable assignment: Assignable assignable Assignment operator Statement!functionDeclaration!variableDeclaration statement
|
||||||
|
| non-assoc (
|
||||||
|
@breakable \return : "return" Statement!functionDeclaration!variableDeclaration statement
|
||||||
|
| @breakable \throw : "throw" Statement!functionDeclaration!variableDeclaration statement
|
||||||
|
| @breakable \insert : "insert" DataTarget dataTarget Statement!functionDeclaration!variableDeclaration statement
|
||||||
|
| @breakable \append : "append" DataTarget dataTarget Statement!functionDeclaration!variableDeclaration statement
|
||||||
|
)
|
||||||
|
| @breakable functionDeclaration: FunctionDeclaration functionDeclaration
|
||||||
|
| @breakable variableDeclaration: LocalVariableDeclaration declaration ";"
|
||||||
|
;
|
||||||
|
|
||||||
|
|
||||||
|
syntax StructuredType
|
||||||
|
= \default: BasicType basicType "[" {TypeArg ","}+ arguments "]" ;
|
||||||
|
|
||||||
|
lexical NonterminalLabel
|
||||||
|
= [a-z] [0-9 A-Z _ a-z]* !>> [0-9 A-Z _ a-z] ;
|
||||||
|
|
||||||
|
syntax FunctionType
|
||||||
|
= typeArguments: Type type "(" {TypeArg ","}* arguments ")" ;
|
||||||
|
|
||||||
|
syntax Case
|
||||||
|
= @Foldable patternWithAction: "case" PatternWithAction patternWithAction
|
||||||
|
| @Foldable \default: "default" ":" Statement statement ;
|
||||||
|
|
||||||
|
syntax Declarator
|
||||||
|
= \default: Type type {Variable ","}+ variables ;
|
||||||
|
|
||||||
|
syntax Bound
|
||||||
|
= \default: ";" Expression expression
|
||||||
|
| empty: ;
|
||||||
|
|
||||||
|
keyword RascalKeywords
|
||||||
|
= "o"
|
||||||
|
| "syntax"
|
||||||
|
| "keyword"
|
||||||
|
| "lexical"
|
||||||
|
| "int"
|
||||||
|
| "break"
|
||||||
|
| "continue"
|
||||||
|
| "rat"
|
||||||
|
| "true"
|
||||||
|
| "bag"
|
||||||
|
| "num"
|
||||||
|
| "node"
|
||||||
|
| "finally"
|
||||||
|
| "private"
|
||||||
|
| "real"
|
||||||
|
| "list"
|
||||||
|
| "fail"
|
||||||
|
| "filter"
|
||||||
|
| "if"
|
||||||
|
| "tag"
|
||||||
|
| BasicType
|
||||||
|
| "extend"
|
||||||
|
| "append"
|
||||||
|
| "rel"
|
||||||
|
| "lrel"
|
||||||
|
| "void"
|
||||||
|
| "non-assoc"
|
||||||
|
| "assoc"
|
||||||
|
| "test"
|
||||||
|
| "anno"
|
||||||
|
| "layout"
|
||||||
|
| "data"
|
||||||
|
| "join"
|
||||||
|
| "it"
|
||||||
|
| "bracket"
|
||||||
|
| "in"
|
||||||
|
| "import"
|
||||||
|
| "false"
|
||||||
|
| "all"
|
||||||
|
| "dynamic"
|
||||||
|
| "solve"
|
||||||
|
| "type"
|
||||||
|
| "try"
|
||||||
|
| "catch"
|
||||||
|
| "notin"
|
||||||
|
| "else"
|
||||||
|
| "insert"
|
||||||
|
| "switch"
|
||||||
|
| "return"
|
||||||
|
| "case"
|
||||||
|
| "while"
|
||||||
|
| "str"
|
||||||
|
| "throws"
|
||||||
|
| "visit"
|
||||||
|
| "tuple"
|
||||||
|
| "for"
|
||||||
|
| "assert"
|
||||||
|
| "loc"
|
||||||
|
| "default"
|
||||||
|
| "map"
|
||||||
|
| "alias"
|
||||||
|
| "any"
|
||||||
|
| "module"
|
||||||
|
| "mod"
|
||||||
|
| "bool"
|
||||||
|
| "public"
|
||||||
|
| "one"
|
||||||
|
| "throw"
|
||||||
|
| "set"
|
||||||
|
| "start"
|
||||||
|
| "datetime"
|
||||||
|
| "value"
|
||||||
|
;
|
||||||
|
|
||||||
|
syntax Type
|
||||||
|
= bracket \bracket: "(" Type type ")"
|
||||||
|
| user: UserType user
|
||||||
|
| function: FunctionType function
|
||||||
|
| structured: StructuredType structured
|
||||||
|
| basic: BasicType basic
|
||||||
|
| selector: DataTypeSelector selector
|
||||||
|
| variable: TypeVar typeVar
|
||||||
|
| symbol: Sym!nonterminal!labeled!parametrized!parameter symbol
|
||||||
|
;
|
||||||
|
|
||||||
|
syntax Declaration
|
||||||
|
= variable : Tags tags Visibility visibility Type type {Variable ","}+ variables ";"
|
||||||
|
| annotation : Tags tags Visibility visibility "anno" Type annoType Type onType "@" Name name ";"
|
||||||
|
| \alias : Tags tags Visibility visibility "alias" UserType user "=" Type base ";"
|
||||||
|
| \tag : Tags tags Visibility visibility "tag" Kind kind Name name "on" {Type ","}+ types ";"
|
||||||
|
| dataAbstract: Tags tags Visibility visibility "data" UserType user CommonKeywordParameters commonKeywordParameters ";"
|
||||||
|
| @Foldable \data : Tags tags Visibility visibility "data" UserType user CommonKeywordParameters commonKeywordParameters"=" {Variant "|"}+ variants ";"
|
||||||
|
| function : FunctionDeclaration functionDeclaration
|
||||||
|
;
|
||||||
|
|
||||||
|
syntax Class
|
||||||
|
= simpleCharclass: "[" Range* ranges "]"
|
||||||
|
| complement: "!" Class charClass
|
||||||
|
> left difference: Class lhs "-" Class rhs
|
||||||
|
> left intersection: Class lhs "&&" Class rhs
|
||||||
|
> left union: Class lhs "||" Class rhs
|
||||||
|
| bracket \bracket: "(" Class charclass ")" ;
|
||||||
|
|
||||||
|
lexical RegExpLiteral
|
||||||
|
= "/" RegExp* "/" RegExpModifier ;
|
||||||
|
|
||||||
|
syntax FunctionModifiers
|
||||||
|
= \modifierlist: FunctionModifier* modifiers ;
|
||||||
|
|
||||||
|
syntax Comprehension
|
||||||
|
= @breakable{results,generators} \set: "{" {Expression ","}+ results "|" {Expression ","}+ generators "}"
|
||||||
|
| @breakable{from,to,generators} \map: "(" Expression from ":" Expression to "|" {Expression ","}+ generators ")"
|
||||||
|
| @breakable{results,generators} \list: "[" {Expression ","}+ results "|" {Expression ","}+ generators "]" ;
|
||||||
|
|
||||||
|
syntax Variant
|
||||||
|
= nAryConstructor: Name name "(" {TypeArg ","}* arguments KeywordFormals keywordArguments ")" ;
|
||||||
|
|
||||||
|
syntax FunctionDeclaration
|
||||||
|
= abstract: Tags tags Visibility visibility Signature signature ";"
|
||||||
|
| @Foldable @breakable{expression} expression: Tags tags Visibility visibility Signature signature "=" Expression expression ";"
|
||||||
|
| @Foldable @breakable{expression,conditions} conditional: Tags tags Visibility visibility Signature signature "=" Expression expression "when" {Expression ","}+ conditions ";"
|
||||||
|
| @Foldable \default: Tags tags Visibility visibility Signature signature FunctionBody body ;
|
||||||
|
|
||||||
|
lexical PreProtocolChars
|
||||||
|
= "|" URLChars "\<" ;
|
||||||
|
|
||||||
|
lexical NamedRegExp
|
||||||
|
= "\<" Name "\>"
|
||||||
|
| [\\] [/ \< \> \\]
|
||||||
|
| NamedBackslash
|
||||||
|
| ![/ \< \> \\] ;
|
||||||
|
|
||||||
|
syntax ProdModifier
|
||||||
|
= associativity: Assoc associativity
|
||||||
|
| \bracket: "bracket"
|
||||||
|
| \tag: Tag tag;
|
||||||
|
|
||||||
|
syntax Toplevel
|
||||||
|
= givenVisibility: Declaration declaration ;
|
||||||
|
|
||||||
|
lexical PostStringChars
|
||||||
|
= @category="Constant" [\>] StringCharacter* [\"] ;
|
||||||
|
|
||||||
|
lexical HexIntegerLiteral
|
||||||
|
= [0] [X x] [0-9 A-F a-f]+ !>> [0-9 A-Z _ a-z] ;
|
||||||
|
|
||||||
|
syntax TypeVar
|
||||||
|
= free: "&" Name name
|
||||||
|
| bounded: "&" Name name "\<:" Type bound ;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
syntax BasicType
|
||||||
|
= \value: "value"
|
||||||
|
| \loc: "loc"
|
||||||
|
| \node: "node"
|
||||||
|
| \num: "num"
|
||||||
|
| \type: "type"
|
||||||
|
| \bag: "bag"
|
||||||
|
| \int: "int"
|
||||||
|
| rational: "rat"
|
||||||
|
| relation: "rel"
|
||||||
|
| listRelation: "lrel"
|
||||||
|
| \real: "real"
|
||||||
|
| \tuple: "tuple"
|
||||||
|
| string: "str"
|
||||||
|
| \bool: "bool"
|
||||||
|
| \void: "void"
|
||||||
|
| dateTime: "datetime"
|
||||||
|
| \set: "set"
|
||||||
|
| \map: "map"
|
||||||
|
| \list: "list"
|
||||||
|
;
|
||||||
|
|
||||||
|
lexical Char
|
||||||
|
= @category="Constant" "\\" [\ \" \' \- \< \> \[ \\ \] b f n r t]
|
||||||
|
| @category="Constant" ![\ \" \' \- \< \> \[ \\ \]]
|
||||||
|
| @category="Constant" UnicodeEscape
|
||||||
|
;
|
||||||
|
|
||||||
|
syntax Prod
|
||||||
|
= reference: ":" Name referenced
|
||||||
|
| labeled: ProdModifier* modifiers Name name ":" Sym* syms
|
||||||
|
| others: "..."
|
||||||
|
| unlabeled: ProdModifier* modifiers Sym* syms
|
||||||
|
| @Foldable associativityGroup: Assoc associativity "(" Prod group ")"
|
||||||
|
// | TODO add bracket rule for easy readability
|
||||||
|
> left \all : Prod lhs "|" Prod rhs
|
||||||
|
> left first : Prod lhs "\>" !>> "\>" Prod rhs
|
||||||
|
;
|
||||||
|
|
||||||
|
syntax DateTimeLiteral
|
||||||
|
= /*prefer()*/ dateLiteral: JustDate date
|
||||||
|
| /*prefer()*/ timeLiteral: JustTime time
|
||||||
|
| /*prefer()*/ dateAndTimeLiteral: DateAndTime dateAndTime ;
|
||||||
|
|
||||||
|
lexical PrePathChars
|
||||||
|
= URLChars "\<" ;
|
||||||
|
|
||||||
|
syntax Mapping[&T]
|
||||||
|
= \default: &T!ifDefinedOtherwise from ":" &T to
|
||||||
|
;
|
||||||
|
|
||||||
|
lexical MidPathChars
|
||||||
|
= "\>" URLChars "\<" ;
|
||||||
|
|
||||||
|
/*
|
||||||
|
Note that Pattern must closely follow the definitions of Expression because eventually
|
||||||
|
these two non-terminals will be fused just before AST generation.
|
||||||
|
*/
|
||||||
|
syntax Pattern
|
||||||
|
= \set : "{" {Pattern ","}* elements0 "}"
|
||||||
|
| \list : "[" {Pattern ","}* elements0 "]"
|
||||||
|
| qualifiedName : QualifiedName qualifiedName
|
||||||
|
| multiVariable : QualifiedName qualifiedName "*"
|
||||||
|
| splice : "*" Pattern argument
|
||||||
|
| splicePlus : "+" Pattern argument
|
||||||
|
| negative : "-" Pattern argument
|
||||||
|
| literal : Literal literal
|
||||||
|
| \tuple : "\<" {Pattern ","}+ elements "\>"
|
||||||
|
| typedVariable : Type type Name name
|
||||||
|
| \map : "(" {Mapping[Pattern] ","}* mappings ")"
|
||||||
|
| reifiedType : "type" "(" Pattern symbol "," Pattern definitions ")"
|
||||||
|
| callOrTree : Pattern expression "(" {Pattern ","}* arguments KeywordArguments[Pattern] keywordArguments ")"
|
||||||
|
> variableBecomes : Name name ":" Pattern pattern
|
||||||
|
| asType : "[" Type type "]" Pattern argument
|
||||||
|
| descendant : "/" Pattern pattern
|
||||||
|
| anti : "!" Pattern pattern
|
||||||
|
| typedVariableBecomes: Type type Name name ":" Pattern pattern
|
||||||
|
;
|
||||||
|
|
||||||
|
syntax Tag
|
||||||
|
= @Folded @category="Comment" \default : "@" Name name TagString contents
|
||||||
|
| @Folded @category="Comment" empty : "@" Name name
|
||||||
|
| @Folded @category="Comment" expression: "@" Name name "=" Expression expression !>> "@";
|
||||||
|
|
||||||
|
syntax ModuleActuals
|
||||||
|
= \default: "[" {Type ","}+ types "]" ;
|
||||||
8
samples/Rascal/Syntax.rsc
Normal file
8
samples/Rascal/Syntax.rsc
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
module Syntax
|
||||||
|
|
||||||
|
extend lang::std::Layout;
|
||||||
|
extend lang::std::Id;
|
||||||
|
|
||||||
|
start syntax Machine = machine: State+ states;
|
||||||
|
syntax State = @Foldable state: "state" Id name Trans* out;
|
||||||
|
syntax Trans = trans: Id event ":" Id to;
|
||||||
483
samples/Reason/JSX.re
Normal file
483
samples/Reason/JSX.re
Normal file
@@ -0,0 +1,483 @@
|
|||||||
|
type component = {displayName: string};
|
||||||
|
|
||||||
|
let module Bar = {
|
||||||
|
let createElement c::c=? children => {
|
||||||
|
displayName: "test"
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module Nesting = {
|
||||||
|
let createElement children => {
|
||||||
|
displayName: "test"
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module Much = {
|
||||||
|
let createElement children => {
|
||||||
|
displayName: "test"
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module Foo = {
|
||||||
|
let createElement a::a=? b::b=? children => {
|
||||||
|
displayName: "test"
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module One = {
|
||||||
|
let createElement
|
||||||
|
test::test=?
|
||||||
|
foo::foo=?
|
||||||
|
children => {
|
||||||
|
displayName: "test"
|
||||||
|
};
|
||||||
|
let createElementobvioustypo
|
||||||
|
test::test
|
||||||
|
children => {
|
||||||
|
displayName: "test"
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module Two = {
|
||||||
|
let createElement foo::foo=? children => {
|
||||||
|
displayName: "test"
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module Sibling = {
|
||||||
|
let createElement
|
||||||
|
foo::foo=?
|
||||||
|
(children: list component) => {
|
||||||
|
displayName: "test"
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module Test = {
|
||||||
|
let createElement yo::yo=? children => {
|
||||||
|
displayName: "test"
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module So = {
|
||||||
|
let createElement children => {
|
||||||
|
displayName: "test"
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module Foo2 = {
|
||||||
|
let createElement children => {
|
||||||
|
displayName: "test"
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module Text = {
|
||||||
|
let createElement children => {
|
||||||
|
displayName: "test"
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module Exp = {
|
||||||
|
let createElement children => {
|
||||||
|
displayName: "test"
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module Pun = {
|
||||||
|
let createElement intended::intended=? children => {
|
||||||
|
displayName: "test"
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module Namespace = {
|
||||||
|
let module Foo = {
|
||||||
|
let createElement
|
||||||
|
intended::intended=?
|
||||||
|
anotherOptional::x=100
|
||||||
|
children => {
|
||||||
|
displayName: "test"
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module LotsOfArguments = {
|
||||||
|
let createElement
|
||||||
|
argument1::argument1=?
|
||||||
|
argument2::argument2=?
|
||||||
|
argument3::argument3=?
|
||||||
|
argument4::argument4=?
|
||||||
|
argument5::argument5=?
|
||||||
|
argument6::argument6=?
|
||||||
|
children => {
|
||||||
|
displayName: "test"
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let div argument1::argument1=? children => {
|
||||||
|
displayName: "test"
|
||||||
|
};
|
||||||
|
|
||||||
|
let module List1 = {
|
||||||
|
let createElement children => {
|
||||||
|
displayName: "test"
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module List2 = {
|
||||||
|
let createElement children => {
|
||||||
|
displayName: "test"
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module List3 = {
|
||||||
|
let createElement children => {
|
||||||
|
displayName: "test"
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let (/><) a b => a + b;
|
||||||
|
|
||||||
|
let (><) a b => a + b;
|
||||||
|
|
||||||
|
let (/>) a b => a + b;
|
||||||
|
|
||||||
|
let (><\/) a b => a + b;
|
||||||
|
|
||||||
|
let tag1 = 5 />< 6;
|
||||||
|
|
||||||
|
let tag2 = 5 >< 7;
|
||||||
|
|
||||||
|
let tag3 = 5 /> 7;
|
||||||
|
|
||||||
|
let tag4 = 5 ><\/ 7;
|
||||||
|
|
||||||
|
let b = 2;
|
||||||
|
|
||||||
|
let selfClosing = <Foo />;
|
||||||
|
|
||||||
|
let selfClosing2 = <Foo a=1 b=true />;
|
||||||
|
|
||||||
|
let selfClosing3 =
|
||||||
|
<Foo
|
||||||
|
a="really long values that should"
|
||||||
|
b="cause the entire thing to wrap"
|
||||||
|
/>;
|
||||||
|
|
||||||
|
let a = <Foo> <Bar c=(fun a => a + 2) /> </Foo>;
|
||||||
|
|
||||||
|
let a3 = <So> <Much> <Nesting /> </Much> </So>;
|
||||||
|
|
||||||
|
let a4 =
|
||||||
|
<Sibling>
|
||||||
|
<One test=true foo=b />
|
||||||
|
<Two foo=b />
|
||||||
|
</Sibling>;
|
||||||
|
|
||||||
|
let a5 = <Foo> "testing a string here" </Foo>;
|
||||||
|
|
||||||
|
let a6 =
|
||||||
|
<Foo2>
|
||||||
|
<Text> "testing a string here" </Text>
|
||||||
|
<Test yo=1 />
|
||||||
|
<Text> "another string" </Text>
|
||||||
|
<Bar />
|
||||||
|
<Exp> (2 + 4) </Exp>
|
||||||
|
</Foo2>;
|
||||||
|
|
||||||
|
let intended = true;
|
||||||
|
|
||||||
|
let punning = <Pun intended />;
|
||||||
|
|
||||||
|
let namespace = <Namespace.Foo />;
|
||||||
|
|
||||||
|
let c = <Foo />;
|
||||||
|
|
||||||
|
let d = <Foo />;
|
||||||
|
|
||||||
|
let spaceBefore =
|
||||||
|
<So> <Much> <Nesting /> </Much> </So>;
|
||||||
|
|
||||||
|
let spaceBefore2 = <So> <Much /> </So>;
|
||||||
|
|
||||||
|
let siblingNotSpaced =
|
||||||
|
<So> <Much /> <Much /> </So>;
|
||||||
|
|
||||||
|
let jsxInList = [<Foo />];
|
||||||
|
|
||||||
|
let jsxInList2 = [<Foo />];
|
||||||
|
|
||||||
|
let jsxInListA = [<Foo />];
|
||||||
|
|
||||||
|
let jsxInListB = [<Foo />];
|
||||||
|
|
||||||
|
let jsxInListC = [<Foo />];
|
||||||
|
|
||||||
|
let jsxInListD = [<Foo />];
|
||||||
|
|
||||||
|
let jsxInList3 = [<Foo />, <Foo />, <Foo />];
|
||||||
|
|
||||||
|
let jsxInList4 = [<Foo />, <Foo />, <Foo />];
|
||||||
|
|
||||||
|
let jsxInList5 = [<Foo />, <Foo />];
|
||||||
|
|
||||||
|
let jsxInList6 = [<Foo />, <Foo />];
|
||||||
|
|
||||||
|
let jsxInList7 = [<Foo />, <Foo />];
|
||||||
|
|
||||||
|
let jsxInList8 = [<Foo />, <Foo />];
|
||||||
|
|
||||||
|
let testFunc b => b;
|
||||||
|
|
||||||
|
let jsxInFnCall = testFunc <Foo />;
|
||||||
|
|
||||||
|
let lotsOfArguments =
|
||||||
|
<LotsOfArguments
|
||||||
|
argument1=1
|
||||||
|
argument2=2
|
||||||
|
argument3=3
|
||||||
|
argument4=4
|
||||||
|
argument5=5
|
||||||
|
argument6="test">
|
||||||
|
<Namespace.Foo />
|
||||||
|
</LotsOfArguments>;
|
||||||
|
|
||||||
|
let lowerCase = <div argument1=1 />;
|
||||||
|
|
||||||
|
let b = 0;
|
||||||
|
|
||||||
|
let d = 0;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Should pun the first example:
|
||||||
|
*/
|
||||||
|
let a = <Foo a> 5 </Foo>;
|
||||||
|
|
||||||
|
let a = <Foo a=b> 5 </Foo>;
|
||||||
|
|
||||||
|
let a = <Foo a=b b=d> 5 </Foo>;
|
||||||
|
|
||||||
|
let a = <Foo a> 0.55 </Foo>;
|
||||||
|
|
||||||
|
let a = Foo.createElement "" [@JSX];
|
||||||
|
|
||||||
|
let ident = <Foo> a </Foo>;
|
||||||
|
|
||||||
|
let fragment1 = <> <Foo /> <Foo /> </>;
|
||||||
|
|
||||||
|
let fragment2 = <> <Foo /> <Foo /> </>;
|
||||||
|
|
||||||
|
let fragment3 = <> <Foo /> <Foo /> </>;
|
||||||
|
|
||||||
|
let fragment4 = <> <Foo /> <Foo /> </>;
|
||||||
|
|
||||||
|
let fragment5 = <> <Foo /> <Foo /> </>;
|
||||||
|
|
||||||
|
let fragment6 = <> <Foo /> <Foo /> </>;
|
||||||
|
|
||||||
|
let fragment7 = <> <Foo /> <Foo /> </>;
|
||||||
|
|
||||||
|
let fragment8 = <> <Foo /> <Foo /> </>;
|
||||||
|
|
||||||
|
let fragment9 = <> 2 2 2 2 </>;
|
||||||
|
|
||||||
|
let fragment10 = <> 2.2 3.2 4.6 1.2 </>;
|
||||||
|
|
||||||
|
let fragment11 = <> "str" </>;
|
||||||
|
|
||||||
|
let fragment12 = <> (6 + 2) (6 + 2) (6 + 2) </>;
|
||||||
|
|
||||||
|
let fragment13 = <> fragment11 fragment11 </>;
|
||||||
|
|
||||||
|
let listOfItems1 = <List1> 1 2 3 4 5 </List1>;
|
||||||
|
|
||||||
|
let listOfItems2 =
|
||||||
|
<List2> 1.0 2.8 3.8 4.0 5.1 </List2>;
|
||||||
|
|
||||||
|
let listOfItems3 =
|
||||||
|
<List3> fragment11 fragment11 </List3>;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Several sequential simple jsx expressions must be separated with a space.
|
||||||
|
*/
|
||||||
|
let thisIsRight a b => ();
|
||||||
|
|
||||||
|
let tagOne children => ();
|
||||||
|
|
||||||
|
let tagTwo children => ();
|
||||||
|
|
||||||
|
/* thisIsWrong <tagOne /><tagTwo />; */
|
||||||
|
thisIsRight <tagOne /> <tagTwo />;
|
||||||
|
|
||||||
|
/* thisIsWrong <tagOne> </tagOne><tagTwo> </tagTwo>; */
|
||||||
|
thisIsRight <tagOne /> <tagTwo />;
|
||||||
|
|
||||||
|
let a children => ();
|
||||||
|
|
||||||
|
let b children => ();
|
||||||
|
|
||||||
|
let thisIsOkay =
|
||||||
|
<List1> <a /> <b /> <a /> <b /> </List1>;
|
||||||
|
|
||||||
|
let thisIsAlsoOkay =
|
||||||
|
<List1> <a /> <b /> </List1>;
|
||||||
|
|
||||||
|
/* Doesn't make any sense, but suppose you defined an
|
||||||
|
infix operator to compare jsx */
|
||||||
|
<a /> < <b />;
|
||||||
|
|
||||||
|
<a /> > <b />;
|
||||||
|
|
||||||
|
<a /> < <b />;
|
||||||
|
|
||||||
|
<a /> > <b />;
|
||||||
|
|
||||||
|
let listOfListOfJsx = [<> </>];
|
||||||
|
|
||||||
|
let listOfListOfJsx = [<> <Foo /> </>];
|
||||||
|
|
||||||
|
let listOfListOfJsx = [
|
||||||
|
<> <Foo /> </>,
|
||||||
|
<> <Bar /> </>
|
||||||
|
];
|
||||||
|
|
||||||
|
let listOfListOfJsx = [
|
||||||
|
<> <Foo /> </>,
|
||||||
|
<> <Bar /> </>,
|
||||||
|
...listOfListOfJsx
|
||||||
|
];
|
||||||
|
|
||||||
|
let sameButWithSpaces = [<> </>];
|
||||||
|
|
||||||
|
let sameButWithSpaces = [<> <Foo /> </>];
|
||||||
|
|
||||||
|
let sameButWithSpaces = [
|
||||||
|
<> <Foo /> </>,
|
||||||
|
<> <Bar /> </>
|
||||||
|
];
|
||||||
|
|
||||||
|
let sameButWithSpaces = [
|
||||||
|
<> <Foo /> </>,
|
||||||
|
<> <Bar /> </>,
|
||||||
|
...sameButWithSpaces
|
||||||
|
];
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Test named tag right next to an open bracket.
|
||||||
|
*/
|
||||||
|
let listOfJsx = [];
|
||||||
|
|
||||||
|
let listOfJsx = [<Foo />];
|
||||||
|
|
||||||
|
let listOfJsx = [<Foo />, <Bar />];
|
||||||
|
|
||||||
|
let listOfJsx = [<Foo />, <Bar />, ...listOfJsx];
|
||||||
|
|
||||||
|
let sameButWithSpaces = [];
|
||||||
|
|
||||||
|
let sameButWithSpaces = [<Foo />];
|
||||||
|
|
||||||
|
let sameButWithSpaces = [<Foo />, <Bar />];
|
||||||
|
|
||||||
|
let sameButWithSpaces = [
|
||||||
|
<Foo />,
|
||||||
|
<Bar />,
|
||||||
|
...sameButWithSpaces
|
||||||
|
];
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test no conflict with polymorphic variant types.
|
||||||
|
*/
|
||||||
|
type thisType = [ | `Foo | `Bar];
|
||||||
|
|
||||||
|
type t 'a = [< thisType] as 'a;
|
||||||
|
|
||||||
|
let asd =
|
||||||
|
<One test=true foo=2> "a" "b" </One> [@foo];
|
||||||
|
|
||||||
|
let asd2 =
|
||||||
|
One.createElementobvioustypo
|
||||||
|
test::false
|
||||||
|
["a", "b"]
|
||||||
|
[@JSX]
|
||||||
|
[@foo];
|
||||||
|
|
||||||
|
let span
|
||||||
|
test::(test: bool)
|
||||||
|
foo::(foo: int)
|
||||||
|
children => 1;
|
||||||
|
|
||||||
|
let asd =
|
||||||
|
<span test=true foo=2> "a" "b" </span> [@foo];
|
||||||
|
|
||||||
|
/* "video" call doesn't end with a list, so the expression isn't converted to JSX */
|
||||||
|
let video test::(test: bool) children => children;
|
||||||
|
|
||||||
|
let asd2 = video test::false 10 [@JSX] [@foo];
|
||||||
|
|
||||||
|
let div children => 1;
|
||||||
|
|
||||||
|
((fun () => div) ()) [] [@JSX];
|
||||||
|
|
||||||
|
let myFun () =>
|
||||||
|
<>
|
||||||
|
<Namespace.Foo
|
||||||
|
intended=true
|
||||||
|
anotherOptional=200
|
||||||
|
/>
|
||||||
|
<Namespace.Foo
|
||||||
|
intended=true
|
||||||
|
anotherOptional=200
|
||||||
|
/>
|
||||||
|
<Namespace.Foo
|
||||||
|
intended=true anotherOptional=200>
|
||||||
|
<Foo />
|
||||||
|
<Foo />
|
||||||
|
<Foo />
|
||||||
|
<Foo />
|
||||||
|
<Foo />
|
||||||
|
<Foo />
|
||||||
|
<Foo />
|
||||||
|
</Namespace.Foo>
|
||||||
|
</>;
|
||||||
|
|
||||||
|
let myFun () => <> </>;
|
||||||
|
|
||||||
|
let myFun () =>
|
||||||
|
<>
|
||||||
|
<Namespace.Foo
|
||||||
|
intended=true
|
||||||
|
anotherOptional=200
|
||||||
|
/>
|
||||||
|
<Namespace.Foo
|
||||||
|
intended=true
|
||||||
|
anotherOptional=200
|
||||||
|
/>
|
||||||
|
<Namespace.Foo
|
||||||
|
intended=true anotherOptional=200>
|
||||||
|
<Foo />
|
||||||
|
<Foo />
|
||||||
|
<Foo />
|
||||||
|
<Foo />
|
||||||
|
<Foo />
|
||||||
|
<Foo />
|
||||||
|
<Foo />
|
||||||
|
</Namespace.Foo>
|
||||||
|
</>;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Children should wrap without forcing attributes to.
|
||||||
|
*/
|
||||||
|
<Foo a=10 b=0>
|
||||||
|
<Bar />
|
||||||
|
<Bar />
|
||||||
|
<Bar />
|
||||||
|
<Bar />
|
||||||
|
</Foo>;
|
||||||
|
/**
|
||||||
|
* Failing test cases:
|
||||||
|
*/
|
||||||
|
/* let res = <Foo a=10 b=(<Foo a=200 />) > */
|
||||||
|
/* <Bar /> */
|
||||||
|
/* </Foo>; */
|
||||||
|
/* let res = <Foo a=10 b=(<Foo a=200 />) />; */
|
||||||
1326
samples/Reason/Layout.re
Normal file
1326
samples/Reason/Layout.re
Normal file
File diff suppressed because it is too large
Load Diff
344
samples/Reason/Machine.re
Normal file
344
samples/Reason/Machine.re
Normal file
@@ -0,0 +1,344 @@
|
|||||||
|
open Format;
|
||||||
|
|
||||||
|
let module Endo = {
|
||||||
|
type t 'a = 'a => 'a;
|
||||||
|
};
|
||||||
|
|
||||||
|
let module Syntax = {
|
||||||
|
let module Var = {
|
||||||
|
type t = int;
|
||||||
|
};
|
||||||
|
let module Term = {
|
||||||
|
type t =
|
||||||
|
| App t t
|
||||||
|
| Lam t
|
||||||
|
| Var Var.t
|
||||||
|
;
|
||||||
|
};
|
||||||
|
let module Sub = {
|
||||||
|
type t 'a =
|
||||||
|
| Cmp (t 'a) (t 'a)
|
||||||
|
| Dot 'a (t 'a)
|
||||||
|
| Id
|
||||||
|
| Shift
|
||||||
|
;
|
||||||
|
|
||||||
|
let map f sgm => {
|
||||||
|
let rec go = fun
|
||||||
|
| Cmp sgm0 sgm1 => Cmp (go sgm0) (go sgm1)
|
||||||
|
| Dot a sgm => Dot (f a) (go sgm)
|
||||||
|
| Id => Id
|
||||||
|
| Shift => Shift
|
||||||
|
;
|
||||||
|
go sgm;
|
||||||
|
};
|
||||||
|
|
||||||
|
let rec apply sgm e =>
|
||||||
|
switch (sgm, e) {
|
||||||
|
| (sgm, Term.App e0 e1) => Term.App (apply sgm e0) (apply sgm e1)
|
||||||
|
| (sgm, Term.Lam e) => Term.Lam (apply (Dot (Term.Var 0) (Cmp sgm Shift)) e)
|
||||||
|
| (Dot e _, Term.Var 0) => e
|
||||||
|
| (Dot _ sgm, Term.Var i) => apply sgm (Term.Var (i - 1))
|
||||||
|
| (Id, Term.Var i) => Term.Var i
|
||||||
|
| (Shift, Term.Var i) => Term.Var (i + 1)
|
||||||
|
| (Cmp rho sgm, e) => apply sgm (apply rho e)
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module Zip = {
|
||||||
|
open Syntax;
|
||||||
|
type t 'a =
|
||||||
|
| App0 (t 'a) 'a
|
||||||
|
| App1 'a (t 'a)
|
||||||
|
| Halt
|
||||||
|
| Lam (t 'a)
|
||||||
|
;
|
||||||
|
|
||||||
|
let map f sgm => {
|
||||||
|
let rec go = fun
|
||||||
|
| App0 zip e1 => App0 (go zip) (f e1)
|
||||||
|
| App1 e0 zip => App1 (f e0) (go zip)
|
||||||
|
| Halt => Halt
|
||||||
|
| Lam zip => Lam (go zip)
|
||||||
|
;
|
||||||
|
go sgm;
|
||||||
|
};
|
||||||
|
|
||||||
|
let rec apply zip acc => switch zip {
|
||||||
|
| App0 zip e1 => apply zip (Term.App acc e1)
|
||||||
|
| App1 e0 zip => apply zip (Term.App e0 acc)
|
||||||
|
| Halt => acc
|
||||||
|
| Lam zip => apply zip (Term.Lam acc)
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module Clo = {
|
||||||
|
open Syntax;
|
||||||
|
type t =
|
||||||
|
| Clo Term.t (Sub.t t);
|
||||||
|
let rec from (Clo term sgm) => Sub.apply (Sub.map from sgm) term;
|
||||||
|
};
|
||||||
|
|
||||||
|
let module Pretty = {
|
||||||
|
let module Delim = {
|
||||||
|
type t = string;
|
||||||
|
let pp prev next fmt token => if (prev < next) { fprintf fmt "%s" token };
|
||||||
|
};
|
||||||
|
let module Prec = {
|
||||||
|
type t = int;
|
||||||
|
open Syntax.Term;
|
||||||
|
let calc = fun
|
||||||
|
| App _ _ => 1
|
||||||
|
| Lam _ => 2
|
||||||
|
| Var _ => 0
|
||||||
|
;
|
||||||
|
};
|
||||||
|
let module Name = {
|
||||||
|
type t = string;
|
||||||
|
|
||||||
|
let suffix = {
|
||||||
|
let script = fun
|
||||||
|
| 0 => "₀"
|
||||||
|
| 1 => "₁"
|
||||||
|
| 2 => "₂"
|
||||||
|
| 3 => "₃"
|
||||||
|
| 4 => "₄"
|
||||||
|
| 5 => "₅"
|
||||||
|
| 6 => "₆"
|
||||||
|
| 7 => "₇"
|
||||||
|
| 8 => "₈"
|
||||||
|
| 9 => "₉"
|
||||||
|
| _ => failwith "bad subscript";
|
||||||
|
let rec go acc => fun
|
||||||
|
| 0 => acc
|
||||||
|
| n => go (script (n mod 10) ^ acc) (n / 10);
|
||||||
|
go ""
|
||||||
|
};
|
||||||
|
|
||||||
|
let gen = {
|
||||||
|
let offset = 97;
|
||||||
|
let width = 26;
|
||||||
|
fun () i => {
|
||||||
|
let code = i mod width + offset;
|
||||||
|
let char = Char.chr code;
|
||||||
|
let prime = i / width;
|
||||||
|
let suffix = suffix prime;
|
||||||
|
let name = Char.escaped char ^ suffix;
|
||||||
|
Some name;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module Env = {
|
||||||
|
type t = {
|
||||||
|
used: list Name.t,
|
||||||
|
rest: Stream.t Name.t,
|
||||||
|
};
|
||||||
|
let mk () => {
|
||||||
|
let used = [];
|
||||||
|
let rest = Stream.from @@ Name.gen ();
|
||||||
|
{ used, rest };
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
type printer 'a = Env.t => Prec.t => formatter => 'a => unit;
|
||||||
|
|
||||||
|
let module Term = {
|
||||||
|
open Syntax.Term;
|
||||||
|
let rec pp ({ Env.used: used, rest } as env) prev fmt e => {
|
||||||
|
let next = Prec.calc e;
|
||||||
|
switch e {
|
||||||
|
| App e0 e1 =>
|
||||||
|
fprintf fmt "@[%a%a@ %a%a@]"
|
||||||
|
(Delim.pp prev next) "("
|
||||||
|
(pp env 1) e0
|
||||||
|
(pp env 0) e1
|
||||||
|
(Delim.pp prev next) ")"
|
||||||
|
| Lam e =>
|
||||||
|
let name = Stream.next rest;
|
||||||
|
let env = { ...env, Env.used: [name, ...used] };
|
||||||
|
fprintf fmt "%aλ%a.%a%a"
|
||||||
|
(Delim.pp prev next) "("
|
||||||
|
(pp_print_string) name
|
||||||
|
(pp env next) e
|
||||||
|
(Delim.pp prev next) ")"
|
||||||
|
| Var index =>
|
||||||
|
fprintf fmt "%s" @@ try (List.nth used index) {
|
||||||
|
| _ => "#" ^ string_of_int index
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module Sub = {
|
||||||
|
open Syntax.Sub;
|
||||||
|
let rec pp pp_elem env prev fmt => fun
|
||||||
|
| Cmp sgm1 sgm0 =>
|
||||||
|
fprintf fmt "@[%a;@ %a@]"
|
||||||
|
(pp pp_elem env prev) sgm1
|
||||||
|
(pp pp_elem env prev) sgm0
|
||||||
|
| Dot e sgm =>
|
||||||
|
fprintf fmt "@[%a@ ·@ %a@]"
|
||||||
|
(pp_elem env prev) e
|
||||||
|
(pp pp_elem env prev) sgm
|
||||||
|
| Id =>
|
||||||
|
fprintf fmt "ι"
|
||||||
|
| Shift =>
|
||||||
|
fprintf fmt "↑"
|
||||||
|
;
|
||||||
|
};
|
||||||
|
|
||||||
|
let module Clo = {
|
||||||
|
let rec pp env prev fmt (Clo.Clo e sgm) => {
|
||||||
|
let next = Prec.calc e;
|
||||||
|
fprintf fmt "@[%a%a%a[%a]@]"
|
||||||
|
(Delim.pp prev next) "("
|
||||||
|
(Term.pp env next) e
|
||||||
|
(Delim.pp prev next) ")"
|
||||||
|
(Sub.pp pp env next) sgm
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module Zip = {
|
||||||
|
open Zip;
|
||||||
|
let rec pp pp_elem env prev fmt => fun
|
||||||
|
| App0 zip elem =>
|
||||||
|
fprintf fmt "inl@[<v -1>⟨@,%a@,%a⟩@]"
|
||||||
|
(pp pp_elem env prev) zip
|
||||||
|
(pp_elem env prev) elem
|
||||||
|
| App1 elem zip =>
|
||||||
|
fprintf fmt "inr@[<v -1>⟨@,%a@,%a⟩@]"
|
||||||
|
(pp_elem env prev) elem
|
||||||
|
(pp pp_elem env prev) zip
|
||||||
|
| Halt =>
|
||||||
|
fprintf fmt "halt"
|
||||||
|
| Lam zip =>
|
||||||
|
fprintf fmt "lam@[<v -1>⟨@,%a⟩@]"
|
||||||
|
(pp pp_elem env prev) zip
|
||||||
|
;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module Machine = {
|
||||||
|
type t = {
|
||||||
|
clo: Clo.t,
|
||||||
|
ctx: Zip.t Clo.t,
|
||||||
|
};
|
||||||
|
|
||||||
|
let into e => {
|
||||||
|
open Clo;
|
||||||
|
open Syntax.Sub;
|
||||||
|
let clo = Clo e Id;
|
||||||
|
let ctx = Zip.Halt;
|
||||||
|
{ clo, ctx }
|
||||||
|
};
|
||||||
|
|
||||||
|
let from { clo, ctx } => Zip.apply (Zip.map Clo.from ctx) (Clo.from clo);
|
||||||
|
|
||||||
|
let pp fmt rule state => {
|
||||||
|
fprintf fmt "@[<v>ctx ::@[<v -5>@,%a@]@,clo ::@[<v -5>@,%a@]@,rule ::@[<v -5>@,%a@]@,term ::@[<v -5>@,%a@]@]@."
|
||||||
|
(Pretty.Zip.pp Pretty.Clo.pp (Pretty.Env.mk ()) 2) state.ctx
|
||||||
|
(Pretty.Clo.pp (Pretty.Env.mk ()) 2) state.clo
|
||||||
|
(pp_print_string) rule
|
||||||
|
(Pretty.Term.pp (Pretty.Env.mk ()) 2) (from state)
|
||||||
|
};
|
||||||
|
|
||||||
|
let halted state => {
|
||||||
|
open Clo;
|
||||||
|
open Syntax.Sub;
|
||||||
|
open Syntax.Term;
|
||||||
|
switch state {
|
||||||
|
| { clo: Clo (Var _) Id, _ } => true
|
||||||
|
| _ => false
|
||||||
|
} [@warning "-4"];
|
||||||
|
};
|
||||||
|
|
||||||
|
let step state => {
|
||||||
|
open Clo;
|
||||||
|
open Syntax.Sub;
|
||||||
|
open Syntax.Term;
|
||||||
|
let rule = ref "";
|
||||||
|
let state = switch state {
|
||||||
|
/* left */
|
||||||
|
| { clo: Clo (App e0 e1) sgm, ctx } =>
|
||||||
|
let clo = Clo e0 sgm;
|
||||||
|
let ctx = Zip.App0 ctx (Clo e1 sgm);
|
||||||
|
rule := "LEFT";
|
||||||
|
{ clo, ctx };
|
||||||
|
/* beta */
|
||||||
|
| { clo: Clo (Lam e) sgm, ctx: Zip.App0 ctx c0 } =>
|
||||||
|
let clo = Clo e (Cmp (Dot c0 sgm) Id);
|
||||||
|
rule := "BETA";
|
||||||
|
{ clo, ctx };
|
||||||
|
/* lambda */
|
||||||
|
| { clo: Clo (Lam e) sgm, ctx } =>
|
||||||
|
let clo = Clo e (Cmp (Dot (Clo (Var 0) Id) (Cmp sgm Shift)) Id);
|
||||||
|
let ctx = Zip.Lam ctx;
|
||||||
|
rule := "LAMBDA";
|
||||||
|
{ clo, ctx };
|
||||||
|
/* associate */
|
||||||
|
| { clo: Clo (Var n) (Cmp (Cmp pi rho) sgm), ctx } =>
|
||||||
|
let clo = Clo (Var n) (Cmp pi (Cmp rho sgm));
|
||||||
|
rule := "ASSOCIATE";
|
||||||
|
{ clo, ctx };
|
||||||
|
/* head */
|
||||||
|
| { clo: Clo (Var 0) (Cmp (Dot (Clo e pi) _) sgm), ctx } =>
|
||||||
|
let clo = Clo e (Cmp pi sgm);
|
||||||
|
rule := "HEAD";
|
||||||
|
{ clo, ctx };
|
||||||
|
/* tail */
|
||||||
|
| { clo: Clo (Var n) (Cmp (Dot (Clo _ _) rho) sgm), ctx } =>
|
||||||
|
let clo = Clo (Var (n - 1)) (Cmp rho sgm);
|
||||||
|
rule := "TAIL";
|
||||||
|
{ clo, ctx };
|
||||||
|
/* shift */
|
||||||
|
| { clo: Clo (Var n) (Cmp Shift sgm), ctx } =>
|
||||||
|
let clo = Clo (Var (n + 1)) sgm;
|
||||||
|
rule := "SHIFT";
|
||||||
|
{ clo, ctx };
|
||||||
|
/* id */
|
||||||
|
| { clo: Clo (Var n) (Cmp Id sgm), ctx } =>
|
||||||
|
let clo = Clo (Var n) sgm;
|
||||||
|
rule := "ID";
|
||||||
|
{ clo, ctx };
|
||||||
|
| _ =>
|
||||||
|
pp std_formatter !rule state;
|
||||||
|
failwith "bad state";
|
||||||
|
} [@warning "-4"];
|
||||||
|
pp std_formatter !rule state;
|
||||||
|
state;
|
||||||
|
};
|
||||||
|
|
||||||
|
let norm e => {
|
||||||
|
let count = ref 0;
|
||||||
|
let state = ref (into e);
|
||||||
|
while (not (halted !state)) {
|
||||||
|
fprintf std_formatter "@\n--- step[%d] ---@\n" !count;
|
||||||
|
incr count;
|
||||||
|
state := step !state;
|
||||||
|
};
|
||||||
|
from !state;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
let module Test = {
|
||||||
|
open Syntax.Term;
|
||||||
|
let l e => Lam e;
|
||||||
|
let ( *@ ) e0 e1 => App e0 e1;
|
||||||
|
let ff = l (l (Var 1));
|
||||||
|
let tt = l (l (Var 0));
|
||||||
|
let zero = l (l (Var 1));
|
||||||
|
let succ = l (l (l (Var 0 *@ Var 2)));
|
||||||
|
let one = succ *@ zero;
|
||||||
|
let two = succ *@ one;
|
||||||
|
let three = succ *@ two;
|
||||||
|
let const = l (l (Var 1));
|
||||||
|
let fix = l (l (Var 1 *@ (Var 0 *@ Var 0)) *@ l (Var 1 *@ (Var 0 *@ Var 0)));
|
||||||
|
let add = fix *@ l (l (l (Var 1 *@ Var 0 *@ l (succ *@ Var 3 *@ Var 0 *@ Var 1))));
|
||||||
|
let init = l (l (Var 0) *@ l (l (Var 1)));
|
||||||
|
};
|
||||||
|
|
||||||
|
let module Run = {
|
||||||
|
let go () => Machine.norm Test.init;
|
||||||
|
};
|
||||||
308
samples/Reason/SuperMerlin.re
Normal file
308
samples/Reason/SuperMerlin.re
Normal file
@@ -0,0 +1,308 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2015-present, Facebook, Inc.
|
||||||
|
* All rights reserved.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
let startedMerlin: ref (option Js.Unsafe.any) = {contents: None};
|
||||||
|
|
||||||
|
let fixedEnv = Js.Unsafe.js_expr "require('../lib/fixedEnv')";
|
||||||
|
|
||||||
|
/* This and the subsequent big js blocks are copied over from Nuclide. More convenient for now. */
|
||||||
|
let findNearestMerlinFile' = Js.Unsafe.js_expr {|
|
||||||
|
function findNearestMerlinFile(beginAtFilePath) {
|
||||||
|
var path = require('path');
|
||||||
|
var fs = require('fs');
|
||||||
|
var fileDir = path.dirname(beginAtFilePath);
|
||||||
|
var currentPath = path.resolve(fileDir);
|
||||||
|
do {
|
||||||
|
var fileToFind = path.join(currentPath, '.merlin');
|
||||||
|
var hasFile = fs.existsSync(fileToFind);
|
||||||
|
if (hasFile) {
|
||||||
|
return path.dirname(currentPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (path.dirname(currentPath) === currentPath) {
|
||||||
|
// Bail
|
||||||
|
return '.';
|
||||||
|
}
|
||||||
|
currentPath = path.dirname(currentPath);
|
||||||
|
} while (true);
|
||||||
|
}
|
||||||
|
|};
|
||||||
|
|
||||||
|
let findNearestMerlinFile beginAtFilePath::path => {
|
||||||
|
let result = Js.Unsafe.fun_call findNearestMerlinFile' [|Js.Unsafe.inject (Js.string path)|];
|
||||||
|
Js.to_string result
|
||||||
|
};
|
||||||
|
|
||||||
|
let createMerlinReaderFnOnce' = Js.Unsafe.js_expr {|
|
||||||
|
function(ocamlMerlinPath, ocamlMerlinFlags, dotMerlinDir, fixedEnv) {
|
||||||
|
var spawn = require('child_process').spawn;
|
||||||
|
// To split while stripping out any leading/trailing space, we match on all
|
||||||
|
// *non*-whitespace.
|
||||||
|
var items = ocamlMerlinFlags === '' ? [] : ocamlMerlinFlags.split(/\s+/);
|
||||||
|
var merlinProcess = spawn(ocamlMerlinPath, items, {cwd: dotMerlinDir});
|
||||||
|
merlinProcess.stderr.on('data', function(d) {
|
||||||
|
console.error('Ocamlmerlin: something wrong happened:');
|
||||||
|
console.error(d.toString());
|
||||||
|
});
|
||||||
|
|
||||||
|
merlinProcess.stdout.on('close', function(d) {
|
||||||
|
console.error('Ocamlmerlin: closed.');
|
||||||
|
});
|
||||||
|
|
||||||
|
var cmdQueue = [];
|
||||||
|
var hasStartedReading = false;
|
||||||
|
|
||||||
|
var readline = require('readline');
|
||||||
|
var reader = readline.createInterface({
|
||||||
|
input: merlinProcess.stdout,
|
||||||
|
terminal: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
return function(cmd, resolve, reject) {
|
||||||
|
cmdQueue.push([resolve, reject]);
|
||||||
|
|
||||||
|
if (!hasStartedReading) {
|
||||||
|
hasStartedReading = true;
|
||||||
|
reader.on('line', function(line) {
|
||||||
|
var response;
|
||||||
|
try {
|
||||||
|
response = JSON.parse(line);
|
||||||
|
} catch (err) {
|
||||||
|
response = null;
|
||||||
|
}
|
||||||
|
var resolveReject = cmdQueue.shift();
|
||||||
|
var resolve = resolveReject[0];
|
||||||
|
var reject = resolveReject[1];
|
||||||
|
|
||||||
|
if (!response || !Array.isArray(response) || response.length !== 2) {
|
||||||
|
reject(new Error('Unexpected ocamlmerlin output format: ' + line));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var status = response[0];
|
||||||
|
var content = response[1];
|
||||||
|
|
||||||
|
var errorResponses = {
|
||||||
|
'failure': true,
|
||||||
|
'error': true,
|
||||||
|
'exception': true,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (errorResponses[status]) {
|
||||||
|
reject(new Error('Ocamlmerlin returned an error: ' + line));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
resolve(content);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
merlinProcess.stdin.write(JSON.stringify(cmd));
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|};
|
||||||
|
|
||||||
|
let createMerlinReaderFnOnce
|
||||||
|
pathToMerlin::pathToMerlin
|
||||||
|
merlinFlags::merlinFlags
|
||||||
|
dotMerlinPath::dotMerlinPath =>
|
||||||
|
Js.Unsafe.fun_call
|
||||||
|
createMerlinReaderFnOnce'
|
||||||
|
[|
|
||||||
|
Js.Unsafe.inject (Js.string pathToMerlin),
|
||||||
|
Js.Unsafe.inject (Js.string merlinFlags),
|
||||||
|
Js.Unsafe.inject (Js.string dotMerlinPath),
|
||||||
|
Js.Unsafe.inject fixedEnv
|
||||||
|
|];
|
||||||
|
|
||||||
|
let startMerlinProcess path::path =>
|
||||||
|
switch startedMerlin.contents {
|
||||||
|
| Some readerFn => ()
|
||||||
|
| None =>
|
||||||
|
let atomReasonPathToMerlin = Atom.Config.get "atom-reason.pathToMerlin";
|
||||||
|
let atomReasonMerlinFlags = Atom.Config.get "atom-reason.merlinFlags";
|
||||||
|
let atomReasonMerlinLogFile = Atom.Config.get "atom-reason.merlinLogFile";
|
||||||
|
switch atomReasonMerlinLogFile {
|
||||||
|
| JsonString "" => ()
|
||||||
|
| JsonString s => Atom.Env.setEnvVar "MERLIN_LOG" s
|
||||||
|
| _ => ()
|
||||||
|
};
|
||||||
|
let readerFn =
|
||||||
|
createMerlinReaderFnOnce
|
||||||
|
pathToMerlin::(Atom.JsonValue.unsafeExtractString atomReasonPathToMerlin)
|
||||||
|
merlinFlags::(Atom.JsonValue.unsafeExtractString atomReasonMerlinFlags)
|
||||||
|
dotMerlinPath::(findNearestMerlinFile beginAtFilePath::path);
|
||||||
|
startedMerlin.contents = Some readerFn
|
||||||
|
};
|
||||||
|
|
||||||
|
let readOneLine cmd::cmd resolve reject =>
|
||||||
|
switch startedMerlin.contents {
|
||||||
|
| None => raise Not_found
|
||||||
|
| Some readerFn =>
|
||||||
|
Js.Unsafe.fun_call
|
||||||
|
readerFn
|
||||||
|
[|
|
||||||
|
Js.Unsafe.inject cmd,
|
||||||
|
Js.Unsafe.inject (Js.wrap_callback resolve),
|
||||||
|
Js.Unsafe.inject (Js.wrap_callback reject)
|
||||||
|
|]
|
||||||
|
};
|
||||||
|
|
||||||
|
/* contextify is important for avoiding different buffers calling the backing merlin at the same time. */
|
||||||
|
/* https://github.com/the-lambda-church/merlin/blob/d98a08d318ca14d9c702bbd6eeadbb762d325ce7/doc/dev/PROTOCOL.md#contextual-commands */
|
||||||
|
let contextify query::query path::path => Js.Unsafe.obj [|
|
||||||
|
("query", Js.Unsafe.inject query),
|
||||||
|
("context", Js.Unsafe.inject (Js.array [|Js.string "auto", Js.string path|]))
|
||||||
|
|];
|
||||||
|
|
||||||
|
let prepareCommand text::text path::path query::query resolve reject => {
|
||||||
|
startMerlinProcess path;
|
||||||
|
/* These two commands should be run before every main command. */
|
||||||
|
readOneLine
|
||||||
|
cmd::(
|
||||||
|
contextify
|
||||||
|
/* The protocol command tells Merlin which API version we want to use. (2 for us) */
|
||||||
|
query::(
|
||||||
|
Js.array [|
|
||||||
|
Js.Unsafe.inject (Js.string "protocol"),
|
||||||
|
Js.Unsafe.inject (Js.string "version"),
|
||||||
|
Js.Unsafe.inject (Js.number_of_float 2.)
|
||||||
|
|]
|
||||||
|
)
|
||||||
|
path::path
|
||||||
|
)
|
||||||
|
(
|
||||||
|
fun _ =>
|
||||||
|
readOneLine
|
||||||
|
cmd::(
|
||||||
|
contextify
|
||||||
|
/* The tell command allows us to synchronize our text with Merlin's internal buffer. */
|
||||||
|
query::(
|
||||||
|
Js.array [|Js.string "tell", Js.string "start", Js.string "end", Js.string text|]
|
||||||
|
)
|
||||||
|
path::path
|
||||||
|
)
|
||||||
|
(fun _ => readOneLine cmd::(contextify query::query path::path) resolve reject)
|
||||||
|
reject
|
||||||
|
)
|
||||||
|
reject
|
||||||
|
};
|
||||||
|
|
||||||
|
let positionToJsMerlinPosition (line, col) => Js.Unsafe.obj [|
|
||||||
|
/* lines (rows) are 1-based for merlin, not 0-based, like for Atom */
|
||||||
|
("line", Js.Unsafe.inject (Js.number_of_float (float_of_int (line + 1)))),
|
||||||
|
("col", Js.Unsafe.inject (Js.number_of_float (float_of_int col)))
|
||||||
|
|];
|
||||||
|
|
||||||
|
/* Actual merlin commands we'll use. */
|
||||||
|
let getTypeHint path::path text::text position::position resolve reject =>
|
||||||
|
prepareCommand
|
||||||
|
text::text
|
||||||
|
path::path
|
||||||
|
query::(
|
||||||
|
Js.array [|
|
||||||
|
Js.Unsafe.inject (Js.string "type"),
|
||||||
|
Js.Unsafe.inject (Js.string "enclosing"),
|
||||||
|
Js.Unsafe.inject (Js.string "at"),
|
||||||
|
Js.Unsafe.inject (positionToJsMerlinPosition position)
|
||||||
|
|]
|
||||||
|
)
|
||||||
|
resolve
|
||||||
|
reject;
|
||||||
|
|
||||||
|
let getAutoCompleteSuggestions
|
||||||
|
path::path
|
||||||
|
text::text
|
||||||
|
position::position
|
||||||
|
prefix::prefix
|
||||||
|
resolve
|
||||||
|
reject =>
|
||||||
|
prepareCommand
|
||||||
|
text::text
|
||||||
|
path::path
|
||||||
|
query::(
|
||||||
|
Js.array [|
|
||||||
|
Js.Unsafe.inject (Js.string "complete"),
|
||||||
|
Js.Unsafe.inject (Js.string "prefix"),
|
||||||
|
Js.Unsafe.inject (Js.string prefix),
|
||||||
|
Js.Unsafe.inject (Js.string "at"),
|
||||||
|
Js.Unsafe.inject (positionToJsMerlinPosition position),
|
||||||
|
Js.Unsafe.inject (Js.string "with"),
|
||||||
|
Js.Unsafe.inject (Js.string "doc")
|
||||||
|
|]
|
||||||
|
)
|
||||||
|
resolve
|
||||||
|
reject;
|
||||||
|
|
||||||
|
let getDiagnostics path::path text::text resolve reject =>
|
||||||
|
prepareCommand
|
||||||
|
text::text
|
||||||
|
path::path
|
||||||
|
query::(Js.array [|Js.Unsafe.inject (Js.string "errors")|])
|
||||||
|
resolve
|
||||||
|
reject;
|
||||||
|
|
||||||
|
let locate path::path text::text extension::extension position::position resolve reject =>
|
||||||
|
prepareCommand
|
||||||
|
text::text
|
||||||
|
path::path
|
||||||
|
query::(
|
||||||
|
Js.array [|
|
||||||
|
Js.Unsafe.inject (Js.string "locate"),
|
||||||
|
Js.Unsafe.inject (Js.string ""),
|
||||||
|
Js.Unsafe.inject (Js.string extension),
|
||||||
|
Js.Unsafe.inject (Js.string "at"),
|
||||||
|
Js.Unsafe.inject (positionToJsMerlinPosition position)
|
||||||
|
|]
|
||||||
|
)
|
||||||
|
resolve
|
||||||
|
reject;
|
||||||
|
|
||||||
|
/* reject */
|
||||||
|
let getOccurrences path::path text::text position::position resolve reject =>
|
||||||
|
prepareCommand
|
||||||
|
text::text
|
||||||
|
path::path
|
||||||
|
query::(
|
||||||
|
Js.array [|
|
||||||
|
Js.Unsafe.inject (Js.string "occurrences"),
|
||||||
|
Js.Unsafe.inject (Js.string "ident"),
|
||||||
|
Js.Unsafe.inject (Js.string "at"),
|
||||||
|
Js.Unsafe.inject (positionToJsMerlinPosition position)
|
||||||
|
|]
|
||||||
|
)
|
||||||
|
resolve
|
||||||
|
reject;
|
||||||
|
|
||||||
|
let destruct
|
||||||
|
path::path
|
||||||
|
text::text
|
||||||
|
startPosition::startPosition
|
||||||
|
endPosition::endPosition
|
||||||
|
resolve
|
||||||
|
reject =>
|
||||||
|
prepareCommand
|
||||||
|
text::text
|
||||||
|
path::path
|
||||||
|
query::(
|
||||||
|
Js.array [|
|
||||||
|
Js.Unsafe.inject (Js.string "case"),
|
||||||
|
Js.Unsafe.inject (Js.string "analysis"),
|
||||||
|
Js.Unsafe.inject (Js.string "from"),
|
||||||
|
Js.Unsafe.inject (positionToJsMerlinPosition startPosition),
|
||||||
|
Js.Unsafe.inject (Js.string "to"),
|
||||||
|
Js.Unsafe.inject (positionToJsMerlinPosition endPosition)
|
||||||
|
|]
|
||||||
|
)
|
||||||
|
resolve
|
||||||
|
reject;
|
||||||
|
|
||||||
|
let getOutline path::path text::text resolve reject =>
|
||||||
|
prepareCommand
|
||||||
|
text::text
|
||||||
|
path::path
|
||||||
|
query::(Js.array [|Js.Unsafe.inject (Js.string "outline")|])
|
||||||
|
resolve
|
||||||
|
reject;
|
||||||
989
samples/Reason/Syntax.re
Normal file
989
samples/Reason/Syntax.re
Normal file
@@ -0,0 +1,989 @@
|
|||||||
|
/* Copyright (c) 2015-present, Facebook, Inc. All rights reserved. */
|
||||||
|
[@@@autoFormat let wrap = 80; let shift = 2];
|
||||||
|
|
||||||
|
Modules.run ();
|
||||||
|
|
||||||
|
Polymorphism.run ();
|
||||||
|
|
||||||
|
Variants.run ();
|
||||||
|
|
||||||
|
BasicStructures.run ();
|
||||||
|
|
||||||
|
TestUtils.printSection "General Syntax";
|
||||||
|
|
||||||
|
/* Won't work! */
|
||||||
|
/* let matchingFunc a = match a with */
|
||||||
|
/* `Thingy x => (print_string "matched thingy x"); x */
|
||||||
|
/* | `Other x => (print_string "matched other x"); x;; */
|
||||||
|
/* */
|
||||||
|
let matchingFunc a =>
|
||||||
|
switch a {
|
||||||
|
| `Thingy x =>
|
||||||
|
print_string "matched thingy x";
|
||||||
|
let zz = 10;
|
||||||
|
zz
|
||||||
|
| `Other x =>
|
||||||
|
print_string "matched other x";
|
||||||
|
x
|
||||||
|
};
|
||||||
|
|
||||||
|
type firstTwoShouldBeGroupedInParens =
|
||||||
|
(int => int) => int => int;
|
||||||
|
|
||||||
|
type allParensCanBeRemoved =
|
||||||
|
int => int => int => int;
|
||||||
|
|
||||||
|
type firstTwoShouldBeGroupedAndFirstThree =
|
||||||
|
((int => int) => int) => int;
|
||||||
|
|
||||||
|
/* Same thing now but with type constructors instead of each int */
|
||||||
|
type firstTwoShouldBeGroupedInParens =
|
||||||
|
(list int => list int) => list int => list int;
|
||||||
|
|
||||||
|
type allParensCanBeRemoved =
|
||||||
|
list int => list int => list int => list int;
|
||||||
|
|
||||||
|
type firstTwoShouldBeGroupedAndFirstThree =
|
||||||
|
((list int => list int) => list int) =>
|
||||||
|
list int;
|
||||||
|
|
||||||
|
type myRecordType = {
|
||||||
|
firstTwoShouldBeGroupedInParens:
|
||||||
|
(int => int) => int => int,
|
||||||
|
allParensCanBeRemoved:
|
||||||
|
int => int => int => int,
|
||||||
|
firstTwoShouldBeGroupedAndFirstThree:
|
||||||
|
((int => int) => int) => int
|
||||||
|
};
|
||||||
|
|
||||||
|
type firstNamedArgShouldBeGroupedInParens =
|
||||||
|
first::(int => int) => second::int => int;
|
||||||
|
|
||||||
|
type allParensCanBeRemoved =
|
||||||
|
first::int => second::int => third::int => int;
|
||||||
|
|
||||||
|
type firstTwoShouldBeGroupedAndFirstThree =
|
||||||
|
first::((int => int) => int) => int;
|
||||||
|
|
||||||
|
/* Same thing now, but with type constructors instead of int */
|
||||||
|
type firstNamedArgShouldBeGroupedInParens =
|
||||||
|
first::(list int => list int) =>
|
||||||
|
second::list int =>
|
||||||
|
list int;
|
||||||
|
|
||||||
|
type allParensCanBeRemoved =
|
||||||
|
first::list int =>
|
||||||
|
second::list int =>
|
||||||
|
third::list int =>
|
||||||
|
list int;
|
||||||
|
|
||||||
|
type firstTwoShouldBeGroupedAndFirstThree =
|
||||||
|
first::((list int => list int) => list int) =>
|
||||||
|
list int;
|
||||||
|
|
||||||
|
type firstNamedArgShouldBeGroupedInParens =
|
||||||
|
first::(int => int)? =>
|
||||||
|
second::int list? =>
|
||||||
|
int;
|
||||||
|
|
||||||
|
/* The arrow necessitates parens around the next two args. The ? isn't what
|
||||||
|
* makes the parens necessary. */
|
||||||
|
type firstNamedArgShouldBeGroupedInParensAndSecondNamedArg =
|
||||||
|
first::(int => int)? =>
|
||||||
|
second::(int => int)? =>
|
||||||
|
int;
|
||||||
|
|
||||||
|
type allParensCanBeRemoved =
|
||||||
|
first::int? =>
|
||||||
|
second::int? =>
|
||||||
|
third::int? =>
|
||||||
|
int;
|
||||||
|
|
||||||
|
type firstTwoShouldBeGroupedAndFirstThree =
|
||||||
|
first::((int => int) => int) => int;
|
||||||
|
|
||||||
|
type noParens =
|
||||||
|
one::int => int => int => two::int => int;
|
||||||
|
|
||||||
|
type noParensNeeded =
|
||||||
|
one::int => int => int => two::int => int;
|
||||||
|
|
||||||
|
type firstNamedArgNeedsParens =
|
||||||
|
one::(int => int => int) => two::int => int;
|
||||||
|
|
||||||
|
/* Now, let's try type aliasing */
|
||||||
|
/* Unless wrapped in parens, types between arrows may not be aliased, may not
|
||||||
|
* themselves be arrows. */
|
||||||
|
type parensRequiredAroundFirstArg =
|
||||||
|
(list int as 'a) => int as 'a;
|
||||||
|
|
||||||
|
type parensRequiredAroundReturnType =
|
||||||
|
(list int as 'a) => (int as 'a);
|
||||||
|
|
||||||
|
type parensRequiredAroundReturnType =
|
||||||
|
(list int as 'a) => (int as 'a) as 'b;
|
||||||
|
|
||||||
|
type noParensNeededWhenInTuple =
|
||||||
|
(list int as 'a, list int as 'b) as 'entireThing;
|
||||||
|
|
||||||
|
type myTypeDef 'a = list 'a;
|
||||||
|
|
||||||
|
type instatiatedTypeDef = myTypeDef int => int;
|
||||||
|
|
||||||
|
/* Test a type attribute for good measure */
|
||||||
|
/* We should clean up all of the attribute tagging eventually, but for now,
|
||||||
|
* let's make it super ugly to get out of the way of all the formatting/parsing
|
||||||
|
* implementations (fewer conflicts during parsing, fewer edge cases during
|
||||||
|
* printing).
|
||||||
|
*/
|
||||||
|
type something = (
|
||||||
|
int,
|
||||||
|
int [@lookAtThisAttribute]
|
||||||
|
);
|
||||||
|
|
||||||
|
type longWrappingTypeDefinitionExample =
|
||||||
|
M_RK__G.Types.instance
|
||||||
|
(TGRecognizer.tGFields unit unit)
|
||||||
|
(TGRecognizer.tGMethods unit unit);
|
||||||
|
|
||||||
|
type semiLongWrappingTypeDefinitionExample =
|
||||||
|
M_RK__Gesture.Types.instance
|
||||||
|
TGRecognizerFinal.tGFields
|
||||||
|
TGRecognizerFinal.tGMethods;
|
||||||
|
|
||||||
|
type semiLongWrappingTypeWithConstraint =
|
||||||
|
M_RK__Gesture.Types.instance
|
||||||
|
'a
|
||||||
|
TGRecognizerFinal.tGFields
|
||||||
|
TGRecognizerFinal.tGMethods
|
||||||
|
constraint 'a = (unit, unit);
|
||||||
|
|
||||||
|
type onelineConstrain = 'a constraint 'a = int;
|
||||||
|
|
||||||
|
/* This must be in trunk but not in this branch of OCaml */
|
||||||
|
/* type withNestedRecords = MyConstructor {myField: int} */
|
||||||
|
type colors =
|
||||||
|
| Red int
|
||||||
|
| Black int
|
||||||
|
| Green int;
|
||||||
|
|
||||||
|
/* Another approach is to require declared variants to wrap any record */
|
||||||
|
/* type myRecord = MyRecord {name: int}; */
|
||||||
|
/* let myValue = MyRecord {name: int}; */
|
||||||
|
/* This would force importing of the module */
|
||||||
|
/* This would also lend itself naturally to pattern matching - and avoid having
|
||||||
|
to use `.` operator at all since you normally destructure. */
|
||||||
|
type nameBlahType = {nameBlah: int};
|
||||||
|
|
||||||
|
let myRecord = {nameBlah: 20};
|
||||||
|
|
||||||
|
let myRecordName = myRecord.nameBlah;
|
||||||
|
|
||||||
|
let {nameBlah}: nameBlahType = {nameBlah: 20};
|
||||||
|
|
||||||
|
print_int nameBlah;
|
||||||
|
|
||||||
|
let {nameBlah: aliasedToThisVar}: nameBlahType = {
|
||||||
|
nameBlah: 20
|
||||||
|
};
|
||||||
|
|
||||||
|
print_int aliasedToThisVar;
|
||||||
|
|
||||||
|
let desiredFormattingForWrappedLambda:
|
||||||
|
int => int => int => nameBlahType =
|
||||||
|
/*
|
||||||
|
|
||||||
|
fun is
|
||||||
|
pre- /firstarg\
|
||||||
|
fix /-coupled--\
|
||||||
|
|-\ /-to-prefix--\ */
|
||||||
|
fun curriedArg anotherArg lastArg => {
|
||||||
|
nameBlah: 10
|
||||||
|
};
|
||||||
|
|
||||||
|
type longerInt = int;
|
||||||
|
|
||||||
|
let desiredFormattingForWrappedLambdaWrappedArrow:
|
||||||
|
longerInt =>
|
||||||
|
longerInt =>
|
||||||
|
longerInt =>
|
||||||
|
nameBlahType =
|
||||||
|
/*
|
||||||
|
|
||||||
|
fun is
|
||||||
|
pre- /firstarg\
|
||||||
|
fix /-coupled--\
|
||||||
|
|-\ /-to-prefix--\ */
|
||||||
|
fun curriedArg anotherArg lastArg => {
|
||||||
|
nameBlah: 10
|
||||||
|
};
|
||||||
|
|
||||||
|
let desiredFormattingForWrappedLambdaReturnOnNewLine
|
||||||
|
/*
|
||||||
|
|
||||||
|
fun is
|
||||||
|
pre- /firstarg\
|
||||||
|
fix /-coupled--\
|
||||||
|
|-\ /-to-prefix--\ */
|
||||||
|
curriedArg
|
||||||
|
anotherArg
|
||||||
|
lastArg => {
|
||||||
|
nameBlah: 10
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
let is
|
||||||
|
pre-
|
||||||
|
fix /-function binding name---\
|
||||||
|
|-\ / is coupled to prefix \ */
|
||||||
|
let desiredFormattingForWrappedSugar
|
||||||
|
curriedArg
|
||||||
|
anotherArg
|
||||||
|
lastArg => {
|
||||||
|
nameBlah: 10
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
let is
|
||||||
|
pre-
|
||||||
|
fix /-function binding name---\
|
||||||
|
|-\ / is coupled to prefix \ */
|
||||||
|
let desiredFormattingForWrappedSugarReturnOnNewLine
|
||||||
|
curriedArg
|
||||||
|
anotherArg
|
||||||
|
lastArg => {
|
||||||
|
nameBlah: 10
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
let : type t1 t2. t1 * t2 list -> t1 = ...
|
||||||
|
let rec f : 't1 't2. 't1 * 't2 list -> 't1 =
|
||||||
|
fun (type t1) (type t2) -> (... : t1 * t2 list -> t1)
|
||||||
|
*/
|
||||||
|
type point = {x: int, y: int};
|
||||||
|
|
||||||
|
type point3D = {x: int, y: int, z: int};
|
||||||
|
|
||||||
|
let point2D = {x: 20, y: 30};
|
||||||
|
|
||||||
|
let point3D: point3D = {
|
||||||
|
x: 10,
|
||||||
|
y: 11,
|
||||||
|
z: 80 /* Optional Comma */
|
||||||
|
};
|
||||||
|
|
||||||
|
let printPoint (p: point) => {
|
||||||
|
print_int p.x;
|
||||||
|
print_int p.y
|
||||||
|
};
|
||||||
|
|
||||||
|
let addPoints (p1: point, p2: point) => {
|
||||||
|
x: p1.x + p2.x,
|
||||||
|
y: p1.y + p2.y
|
||||||
|
};
|
||||||
|
|
||||||
|
let res1 = printPoint point2D;
|
||||||
|
|
||||||
|
let res2 =
|
||||||
|
printPoint {x: point3D.x, y: point3D.y};
|
||||||
|
|
||||||
|
/*
|
||||||
|
When () were used to indicate sequences, the parser used seq_expr not only
|
||||||
|
for grouping sequences, but also to form standard precedences.
|
||||||
|
/------- sequence_expr ------\
|
||||||
|
let res3 = printPoint (addPoints (point2D, point3D));
|
||||||
|
|
||||||
|
Interestingly, it knew that tuples aren't sequences.
|
||||||
|
|
||||||
|
To move towards semi delimited, semi-terminated, braces-grouped sequences:
|
||||||
|
while allowing any non-sequence expression to be grouped on parens, we make
|
||||||
|
an explicit rule that allows one single non-semi ended expression to be
|
||||||
|
grouped in parens.
|
||||||
|
|
||||||
|
Actually: We will allow an arbitrary number of semi-delimited expressions to
|
||||||
|
be wrapped in parens, but the braces grouped semi delimited (sequence)
|
||||||
|
expressions must *also* be terminated with a semicolon.
|
||||||
|
|
||||||
|
This allows the parser to distinguish between
|
||||||
|
|
||||||
|
let x = {a}; /* Record {a:a} */
|
||||||
|
let x = {a;}; /* Single item sequence returning identifier {a} */
|
||||||
|
*/
|
||||||
|
let res3 =
|
||||||
|
printPoint (
|
||||||
|
addPoints (
|
||||||
|
point2D,
|
||||||
|
{x: point3D.x, y: point3D.y}
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
type person = {age: int, name: string};
|
||||||
|
|
||||||
|
type hiredPerson = {
|
||||||
|
age: string,
|
||||||
|
name: string,
|
||||||
|
dateHired: int
|
||||||
|
};
|
||||||
|
|
||||||
|
let o: person = {name: "bob", age: 10};
|
||||||
|
|
||||||
|
/* Parens needed? Nope! */
|
||||||
|
let o: person = {name: "bob", age: 10};
|
||||||
|
|
||||||
|
let printPerson (p: person) => {
|
||||||
|
let q: person = p;
|
||||||
|
p.name ^ p.name
|
||||||
|
};
|
||||||
|
|
||||||
|
/* let dontParseMeBro x y:int = x = y;*/
|
||||||
|
/* With this unification, anywhere eyou see `= fun` you can just ommit it */
|
||||||
|
let blah a => a; /* Done */
|
||||||
|
|
||||||
|
let blah a => a; /* Done (almost) */
|
||||||
|
|
||||||
|
let blah a b => a; /* Done */
|
||||||
|
|
||||||
|
let blah a b => a; /* Done (almost) */
|
||||||
|
|
||||||
|
/* More than one consecutive pattern must have a single case */
|
||||||
|
type blah = {blahBlah: int};
|
||||||
|
|
||||||
|
let blah a {blahBlah} => a;
|
||||||
|
|
||||||
|
let blah a {blahBlah} => a;
|
||||||
|
|
||||||
|
let module TryToExportTwice = {
|
||||||
|
let myVal = "hello";
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
Unifying top level module syntax with local module syntax is probably a bad
|
||||||
|
idea at the moment because it makes it more difficult to continue to support
|
||||||
|
`let .. in` bindings. We can distinguish local modules for `let..in` that
|
||||||
|
just happen to be defined at the top level (but not exported).
|
||||||
|
|
||||||
|
let MyModule = {let myVal = 20;} in
|
||||||
|
MyModule.x
|
||||||
|
|
||||||
|
Wait, where would this ever be valid, even if we continued to support
|
||||||
|
`let..in`?
|
||||||
|
*/
|
||||||
|
let onlyDoingThisTopLevelLetToBypassTopLevelSequence = {
|
||||||
|
let x = {
|
||||||
|
print_int 1;
|
||||||
|
print_int 20 /* Missing trailing SEMI */
|
||||||
|
};
|
||||||
|
let x = {
|
||||||
|
print_int 1;
|
||||||
|
print_int 20; /* Ensure missing middle SEMI reported well */
|
||||||
|
print_int 20
|
||||||
|
};
|
||||||
|
let x = {
|
||||||
|
print_int 1;
|
||||||
|
print_int 20;
|
||||||
|
10
|
||||||
|
/* Comment in final position */
|
||||||
|
}; /* Missing final SEMI */
|
||||||
|
x + x
|
||||||
|
};
|
||||||
|
|
||||||
|
type hasA = {a: int};
|
||||||
|
|
||||||
|
let a = 10;
|
||||||
|
|
||||||
|
let returnsASequenceExpressionWithASingleIdentifier
|
||||||
|
() => a;
|
||||||
|
|
||||||
|
let thisReturnsA () => a;
|
||||||
|
|
||||||
|
let thisReturnsAAsWell () => a;
|
||||||
|
|
||||||
|
let recordVal: int = (thisReturnsARecord ()).a;
|
||||||
|
|
||||||
|
Printf.printf
|
||||||
|
"\nproof that thisReturnsARecord: %n\n"
|
||||||
|
recordVal;
|
||||||
|
|
||||||
|
Printf.printf
|
||||||
|
"\nproof that thisReturnsA: %n\n"
|
||||||
|
(thisReturnsA ());
|
||||||
|
|
||||||
|
/* Pattern matching */
|
||||||
|
let blah arg =>
|
||||||
|
switch arg {
|
||||||
|
/* Comment before Bar */
|
||||||
|
| /* Comment between bar/pattern */ Red _ => 1
|
||||||
|
/* Comment Before non-first bar */
|
||||||
|
| /* Comment betwen bar/pattern */ Black _ => 0
|
||||||
|
| Green _ => 0
|
||||||
|
};
|
||||||
|
|
||||||
|
/* Any function that pattern matches a multicase match is interpretted as a
|
||||||
|
* single arg that is then matched on. Instead of the above `blah` example:*/
|
||||||
|
let blah =
|
||||||
|
fun
|
||||||
|
| Red _ => 1
|
||||||
|
| Black _ => 0
|
||||||
|
| Green _ => 1;
|
||||||
|
|
||||||
|
/* `fun a => a` is read as "a function that maps a to a". Then the */
|
||||||
|
/* above example is read: "a function that 'either maps' Red to.. or maps .." */
|
||||||
|
/* Thc00f564e first bar is read as "either maps" */
|
||||||
|
/* Curried form is not supported:
|
||||||
|
let blah x | Red _ => 1 | Black _ => 0;
|
||||||
|
Theres no sugar rule for dropping => fun, only = fun
|
||||||
|
*/
|
||||||
|
/* let blahCurriedX x => fun /* See, nothing says we can drop the => fun */ */
|
||||||
|
/* |(Red x | Black x | Green x) => 1 /* With some effort, we can ammend the sugar rule that would */ */
|
||||||
|
/* | Black x => 0 /* Allow us to drop any => fun.. Just need to make pattern matching */ */
|
||||||
|
/* | Green x => 0; /* Support that */ */
|
||||||
|
/* */
|
||||||
|
let blahCurriedX x =>
|
||||||
|
fun
|
||||||
|
| Red x
|
||||||
|
| Black x
|
||||||
|
| Green x =>
|
||||||
|
1 /* With some effort, we can ammend the sugar rule that would */
|
||||||
|
| Black x => 0 /* Allow us to drop any => fun.. Just need to make pattern matching */
|
||||||
|
| Green x => 0; /* Support that */
|
||||||
|
|
||||||
|
let sameThingInLocal = {
|
||||||
|
let blahCurriedX x =>
|
||||||
|
fun
|
||||||
|
| Red x
|
||||||
|
| Black x
|
||||||
|
| Green x =>
|
||||||
|
1 /* With some effort, we can ammend the sugar rule that would */
|
||||||
|
| Black x => 0 /* Allow us to drop any => fun.. Just need to make pattern matching */
|
||||||
|
| Green x => 0; /* Support that */
|
||||||
|
blahCurriedX
|
||||||
|
};
|
||||||
|
|
||||||
|
/* This should be parsed/printed exactly as the previous */
|
||||||
|
let blahCurriedX x =>
|
||||||
|
fun
|
||||||
|
| Red x
|
||||||
|
| Black x
|
||||||
|
| Green x => 1
|
||||||
|
| Black x => 0
|
||||||
|
| Green x => 0;
|
||||||
|
|
||||||
|
/* Any time there are multiple match cases we require a leading BAR */
|
||||||
|
let v = Red 10;
|
||||||
|
|
||||||
|
let Black x | Red x | Green x = v; /* So this NON-function still parses */
|
||||||
|
|
||||||
|
/* This doesn't parse, however (and it doesn't in OCaml either):
|
||||||
|
let | Black x | Red x | Green x = v;
|
||||||
|
*/
|
||||||
|
print_int x;
|
||||||
|
|
||||||
|
/* Scoping: Let sequences. Familiar syntax for lexical ML style scope and
|
||||||
|
sequences. */
|
||||||
|
let res = {
|
||||||
|
let a = "a starts out as";
|
||||||
|
{
|
||||||
|
print_string a;
|
||||||
|
let a = 20;
|
||||||
|
print_int a
|
||||||
|
};
|
||||||
|
print_string a
|
||||||
|
};
|
||||||
|
|
||||||
|
let res = {
|
||||||
|
let a = "first its a string";
|
||||||
|
let a = 20;
|
||||||
|
print_int a;
|
||||||
|
print_int a;
|
||||||
|
print_int a
|
||||||
|
};
|
||||||
|
|
||||||
|
let res = {
|
||||||
|
let a = "a is always a string";
|
||||||
|
print_string a;
|
||||||
|
let b = 30;
|
||||||
|
print_int b
|
||||||
|
};
|
||||||
|
|
||||||
|
/* let result = LyList.map (fun | [] => true | _ => false) []; */
|
||||||
|
/* OTHERWISE: You cannot tell if a is the first match case falling through or
|
||||||
|
* a curried first arg */
|
||||||
|
/* let blah = fun a | patt => 0 | anotherPatt => 1; */
|
||||||
|
/* let blah a patt => 0 | anotherPatt => 1; */
|
||||||
|
/*simple pattern EQUALGREATER expr */
|
||||||
|
let blah a {blahBlah} => a;
|
||||||
|
|
||||||
|
/* match_case */
|
||||||
|
/* pattern EQUALGREATER expr */
|
||||||
|
let blah =
|
||||||
|
fun
|
||||||
|
| Red _ => 1
|
||||||
|
| Black _ => 0
|
||||||
|
| Green _ => 0;
|
||||||
|
|
||||||
|
/* Won't work! */
|
||||||
|
/* let arrowFunc = fun a b => print_string "returning aplusb from arrow"; a + b;; */
|
||||||
|
let arrowFunc a b => {
|
||||||
|
print_string "returning aplusb from arrow";
|
||||||
|
a + b
|
||||||
|
};
|
||||||
|
|
||||||
|
let add a b => {
|
||||||
|
let extra = {
|
||||||
|
print_string "adding";
|
||||||
|
0
|
||||||
|
};
|
||||||
|
let anotherExtra = 0;
|
||||||
|
extra + a + b + anotherExtra
|
||||||
|
};
|
||||||
|
|
||||||
|
print_string (string_of_int (add 4 34));
|
||||||
|
|
||||||
|
let dummy _ => 10;
|
||||||
|
|
||||||
|
dummy res1;
|
||||||
|
|
||||||
|
dummy res2;
|
||||||
|
|
||||||
|
dummy res3;
|
||||||
|
|
||||||
|
/* Some edge cases */
|
||||||
|
let myFun firstArg (Red x | Black x | Green x) =>
|
||||||
|
firstArg + x;
|
||||||
|
|
||||||
|
let matchesWithWhen a =>
|
||||||
|
switch a {
|
||||||
|
| Red x when 1 > 0 => 10
|
||||||
|
| Red _ => 10
|
||||||
|
| Black x => 10
|
||||||
|
| Green x => 10
|
||||||
|
};
|
||||||
|
|
||||||
|
let matchesWithWhen =
|
||||||
|
fun
|
||||||
|
| Red x when 1 > 0 => 10
|
||||||
|
| Red _ => 10
|
||||||
|
| Black x => 10
|
||||||
|
| Green x => 10;
|
||||||
|
|
||||||
|
let matchesOne (`Red x) => 10;
|
||||||
|
|
||||||
|
/*
|
||||||
|
Typical OCaml would make you *wrap the functions in parens*! This is because it
|
||||||
|
can't tell if a semicolon is a sequence operator. Even if we had records use
|
||||||
|
commas to separate fields,
|
||||||
|
*/
|
||||||
|
type adders = {
|
||||||
|
addTwoNumbers: int => int => int,
|
||||||
|
addThreeNumbers: int => int => int => int,
|
||||||
|
addThreeNumbersTupled: (int, int, int) => int
|
||||||
|
};
|
||||||
|
|
||||||
|
let myRecordWithFunctions = {
|
||||||
|
addTwoNumbers: fun a b => a + b,
|
||||||
|
addThreeNumbers: fun a b c => a + b + c,
|
||||||
|
addThreeNumbersTupled: fun (a, b, c) =>
|
||||||
|
a + b + c
|
||||||
|
};
|
||||||
|
|
||||||
|
let result =
|
||||||
|
myRecordWithFunctions.addThreeNumbers 10 20 30;
|
||||||
|
|
||||||
|
let result =
|
||||||
|
myRecordWithFunctions.addThreeNumbersTupled (
|
||||||
|
10,
|
||||||
|
20,
|
||||||
|
30
|
||||||
|
);
|
||||||
|
|
||||||
|
let lookTuplesRequireParens = (1, 2);
|
||||||
|
|
||||||
|
/* let thisDoesntParse = 1, 2; */
|
||||||
|
let tupleInsideAParenSequence = {
|
||||||
|
print_string "look, a tuple inside a sequence";
|
||||||
|
let x = 10;
|
||||||
|
(x, x)
|
||||||
|
};
|
||||||
|
|
||||||
|
let tupleInsideALetSequence = {
|
||||||
|
print_string "look, a tuple inside a sequence";
|
||||||
|
let x = 10;
|
||||||
|
(x, x)
|
||||||
|
};
|
||||||
|
|
||||||
|
/* We *require* that function return types be wrapped in
|
||||||
|
parenthesis. In this example, there's no ambiguity */
|
||||||
|
let makeIncrementer (delta: int) :(int => int) =>
|
||||||
|
fun a => a + delta;
|
||||||
|
|
||||||
|
/* We could even force that consistency with let bindings - it's allowed
|
||||||
|
currently but not forced.
|
||||||
|
*/
|
||||||
|
let myAnnotatedValBinding: int = 10;
|
||||||
|
|
||||||
|
/* Class functions (constructors) and methods are unified in the same way */
|
||||||
|
class classWithNoArg = {
|
||||||
|
method x = 0;
|
||||||
|
method y = 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
/* This parses but doesn't type check
|
||||||
|
class myClass init => object
|
||||||
|
method x => init
|
||||||
|
method y => init
|
||||||
|
end;
|
||||||
|
*/
|
||||||
|
let myFunc (a: int) (b: int) :(int, int) => (
|
||||||
|
a,
|
||||||
|
b
|
||||||
|
);
|
||||||
|
|
||||||
|
let myFunc (a: int) (b: int) :list int => [1];
|
||||||
|
|
||||||
|
let myFunc (a: int) (b: int) :point => {
|
||||||
|
x: a,
|
||||||
|
y: b
|
||||||
|
};
|
||||||
|
|
||||||
|
let myFunc (a: int, b: int) :point => {
|
||||||
|
x: a,
|
||||||
|
y: b
|
||||||
|
};
|
||||||
|
|
||||||
|
type myThing = (int, int);
|
||||||
|
|
||||||
|
type stillARecord = {name: string, age: int};
|
||||||
|
|
||||||
|
/* Rebase latest OCaml to get the following: And fixup
|
||||||
|
`generalized_constructor_arguments` according to master. */
|
||||||
|
/* type ('a, 'b) myOtherThing = Leaf {first:'a, second: 'b} | Null; */
|
||||||
|
type branch 'a 'b = {first: 'a, second: 'b};
|
||||||
|
|
||||||
|
type myOtherThing 'a 'b =
|
||||||
|
| Leaf (branch 'a 'b)
|
||||||
|
| Null;
|
||||||
|
|
||||||
|
type yourThing = myOtherThing int int;
|
||||||
|
|
||||||
|
/* Conveniently - this parses exactly how you would intend! No *need* to wrap
|
||||||
|
in an extra [], but it doesn't hurt */
|
||||||
|
/* FIXME type lookAtThesePolyVariants = list [`Red] ; */
|
||||||
|
/* FIXME type bracketsGroupMultipleParamsAndPrecedence = list (list (list [`Red])); */
|
||||||
|
/* FIXME type youCanWrapExtraIfYouWant = (list [`Red]); */
|
||||||
|
/* FIXME type hereAreMultiplePolyVariants = list [`Red | `Black]; */
|
||||||
|
/* FIXME type hereAreMultiplePolyVariantsWithOptionalWrapping = list ([`Red | `Black]); */
|
||||||
|
/*
|
||||||
|
/* Proposal: ES6 style lambdas: */
|
||||||
|
|
||||||
|
/* Currying */
|
||||||
|
let lookES6Style = (`Red x) (`Black y) => { };
|
||||||
|
let lookES6Style (`Red x) (`Black y) => { };
|
||||||
|
|
||||||
|
/* Matching the single argument */
|
||||||
|
let lookES6Style = oneArg => match oneArg with
|
||||||
|
| `Red x => x
|
||||||
|
| `Black x => x;
|
||||||
|
|
||||||
|
/* The "trick" to currying that we already have is basically the same - we just
|
||||||
|
* have to reword it a bit:
|
||||||
|
* From:
|
||||||
|
* "Any time you see [let x = fun ...] just replace it with [let x ...]"
|
||||||
|
* To:
|
||||||
|
* "Any time you see [let x = ... => ] just replace it with [let x ... => ]"
|
||||||
|
*/
|
||||||
|
let lookES6Style oneArg => match oneArg with
|
||||||
|
| `Red x => x
|
||||||
|
| `Black x => x;
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
/** Current OCaml Named Arguments. Any aliasing is more than just aliasing!
|
||||||
|
OCaml allows full on pattern matching of named args. */
|
||||||
|
/*
|
||||||
|
A: let named ~a ~b = aa + bb in
|
||||||
|
B: let namedAlias ~a:aa ~b:bb = aa + bb in
|
||||||
|
C: let namedAnnot ~(a:int) ~(b:int) = a + b in
|
||||||
|
D: let namedAliasAnnot ~a:(aa:int) ~b:(bb:int) = aa + bb in
|
||||||
|
E: let optional ?a ?b = 10 in
|
||||||
|
F: let optionalAlias ?a:aa ?b:bb = 10 in
|
||||||
|
G: let optionalAnnot ?(a:int option) ?(b:int option) = 10 in
|
||||||
|
H: let optionalAliasAnnot ?a:(aa:int option) ?b:(bb:int option) = 10 in
|
||||||
|
/*
|
||||||
|
Look! When a default is provided, annotation causes inferred type of argument
|
||||||
|
to not be "option" since it's automatically destructured (because we know it
|
||||||
|
will always be available one way or another.)
|
||||||
|
*/
|
||||||
|
I: let defOptional ?(a=10) ?(b=10) = 10 in
|
||||||
|
J: let defOptionalAlias ?a:(aa=10) ?b:(bb=10) = 10 in
|
||||||
|
K: let defOptionalAnnot ?(a:int=10) ?(b:int=10) = 10 in
|
||||||
|
\ \
|
||||||
|
\label_let_pattern opt_default: no longer needed in SugarML
|
||||||
|
|
||||||
|
L: let defOptionalAliasAnnot ?a:(aa:int=10) ?b:(bb:int=10) = 10 in
|
||||||
|
\ \
|
||||||
|
\let_pattern: still a useful syntactic building block in SugarML
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* In Reason, the syntax for named args uses double semicolon, since
|
||||||
|
* the syntax for lists uses ES6 style [], freeing up the ::.
|
||||||
|
*/
|
||||||
|
let a = 10;
|
||||||
|
|
||||||
|
let b = 20;
|
||||||
|
|
||||||
|
/*A*/
|
||||||
|
let named a::a b::b => a + b;
|
||||||
|
|
||||||
|
type named = a::int => b::int => int;
|
||||||
|
|
||||||
|
/*B*/
|
||||||
|
let namedAlias a::aa b::bb => aa + bb;
|
||||||
|
|
||||||
|
let namedAlias a::aa b::bb => aa + bb;
|
||||||
|
|
||||||
|
type namedAlias = a::int => b::int => int;
|
||||||
|
|
||||||
|
/*C*/
|
||||||
|
let namedAnnot a::(a: int) b::(b: int) => 20;
|
||||||
|
|
||||||
|
/*D*/
|
||||||
|
let namedAliasAnnot a::(aa: int) b::(bb: int) => 20;
|
||||||
|
|
||||||
|
/*E*/
|
||||||
|
let myOptional a::a=? b::b=? () => 10;
|
||||||
|
|
||||||
|
type named = a::int? => b::int? => unit => int;
|
||||||
|
|
||||||
|
/*F*/
|
||||||
|
let optionalAlias a::aa=? b::bb=? () => 10;
|
||||||
|
|
||||||
|
/*G*/
|
||||||
|
let optionalAnnot a::(a: int)=? b::(b: int)=? () => 10;
|
||||||
|
|
||||||
|
/*H*/
|
||||||
|
let optionalAliasAnnot
|
||||||
|
a::(aa: int)=?
|
||||||
|
b::(bb: int)=?
|
||||||
|
() => 10;
|
||||||
|
|
||||||
|
/*I: */
|
||||||
|
let defOptional a::a=10 b::b=10 () => 10;
|
||||||
|
|
||||||
|
type named = a::int? => b::int? => unit => int;
|
||||||
|
|
||||||
|
/*J*/
|
||||||
|
let defOptionalAlias a::aa=10 b::bb=10 () => 10;
|
||||||
|
|
||||||
|
/*K*/
|
||||||
|
let defOptionalAnnot
|
||||||
|
a::(a: int)=10
|
||||||
|
b::(b: int)=10
|
||||||
|
() => 10;
|
||||||
|
|
||||||
|
/*L*/
|
||||||
|
let defOptionalAliasAnnot
|
||||||
|
a::(aa: int)=10
|
||||||
|
b::(bb: int)=10
|
||||||
|
() => 10;
|
||||||
|
|
||||||
|
/*M: Invoking them - Punned */
|
||||||
|
let resNotAnnotated = named a::a b::b;
|
||||||
|
|
||||||
|
/*N:*/
|
||||||
|
let resAnnotated: int = named a::a b::b;
|
||||||
|
|
||||||
|
/*O: Invoking them */
|
||||||
|
let resNotAnnotated = named a::a b::b;
|
||||||
|
|
||||||
|
/*P: Invoking them */
|
||||||
|
let resAnnotated: int = named a::a b::b;
|
||||||
|
|
||||||
|
/*Q: Here's why "punning" doesn't work! */
|
||||||
|
/* Is b:: punned with a final non-named arg, or is b:: supplied b as one named arg? */
|
||||||
|
let b = 20;
|
||||||
|
|
||||||
|
let resAnnotated = named a::a b::b;
|
||||||
|
|
||||||
|
/*R: Proof that there are no ambiguities with return values being annotated */
|
||||||
|
let resAnnotated: ty = named a::a b;
|
||||||
|
|
||||||
|
/*S: Explicitly passed optionals are a nice way to say "use the default value"*/
|
||||||
|
let explictlyPassed =
|
||||||
|
myOptional a::?None b::?None;
|
||||||
|
|
||||||
|
/*T: Annotating the return value of the entire function call */
|
||||||
|
let explictlyPassedAnnotated: int =
|
||||||
|
myOptional a::?None b::?None;
|
||||||
|
|
||||||
|
/*U: Explicitly passing optional with identifier expression */
|
||||||
|
let a = None;
|
||||||
|
|
||||||
|
let explictlyPassed = myOptional a::?a b::?None;
|
||||||
|
|
||||||
|
let explictlyPassedAnnotated: int =
|
||||||
|
myOptional a::?a b::?None;
|
||||||
|
|
||||||
|
let nestedLet = {
|
||||||
|
let _ = 1;
|
||||||
|
()
|
||||||
|
};
|
||||||
|
|
||||||
|
let nestedLet = {
|
||||||
|
let _ = 1;
|
||||||
|
()
|
||||||
|
};
|
||||||
|
|
||||||
|
let nestedLet = {
|
||||||
|
let _ = 1;
|
||||||
|
()
|
||||||
|
};
|
||||||
|
|
||||||
|
let nestedLet = {
|
||||||
|
let _ = 1;
|
||||||
|
2
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Showing many combinations of type annotations and named arguments.
|
||||||
|
*/
|
||||||
|
type typeWithNestedNamedArgs =
|
||||||
|
outerOne::(
|
||||||
|
innerOne::int => innerTwo::int => int
|
||||||
|
) =>
|
||||||
|
outerTwo::int =>
|
||||||
|
int;
|
||||||
|
|
||||||
|
type typeWithNestedOptionalNamedArgs =
|
||||||
|
outerOne::
|
||||||
|
(innerOne::int => innerTwo::int => int)? =>
|
||||||
|
outerTwo::int? =>
|
||||||
|
int;
|
||||||
|
|
||||||
|
type typeWithNestedOptionalNamedArgs =
|
||||||
|
outerOne::list string? => outerTwo::int? => int;
|
||||||
|
|
||||||
|
let x =
|
||||||
|
callSomeFunction
|
||||||
|
withArg::10 andOtherArg::wrappedArg;
|
||||||
|
|
||||||
|
let res = {
|
||||||
|
(constraintedSequenceItem: string);
|
||||||
|
(dontKnowWheYoudWantToActuallyDoThis: string)
|
||||||
|
};
|
||||||
|
|
||||||
|
let res = {
|
||||||
|
(
|
||||||
|
butTheyWillBePrintedWithAppropriateSpacing: string
|
||||||
|
);
|
||||||
|
(soAsToInstillBestDevelopmentPractices: string)
|
||||||
|
};
|
||||||
|
|
||||||
|
let x = [
|
||||||
|
(eachItemInListCanBeAnnotated: int),
|
||||||
|
(typeConstraints: float),
|
||||||
|
(
|
||||||
|
tupleConstraints: int,
|
||||||
|
andNotFunctionInvocations: int
|
||||||
|
)
|
||||||
|
];
|
||||||
|
|
||||||
|
let x = [
|
||||||
|
(butWeWillPrint: int),
|
||||||
|
(themAsSpaceSeparated: float),
|
||||||
|
(toInfluenceYour: int, developmentHabbits: int)
|
||||||
|
];
|
||||||
|
|
||||||
|
let newRecord = {
|
||||||
|
...(annotatedSpreadRecord: someRec),
|
||||||
|
x: y
|
||||||
|
};
|
||||||
|
|
||||||
|
let newRecord = {
|
||||||
|
...(annotatedSpreadRecord: someRec),
|
||||||
|
blah: 0,
|
||||||
|
foo: 1
|
||||||
|
};
|
||||||
|
|
||||||
|
let newRecord = {
|
||||||
|
...(
|
||||||
|
youCanEvenCallMethodsHereAndAnnotate them: someRec
|
||||||
|
),
|
||||||
|
blah: 0,
|
||||||
|
foo: 1
|
||||||
|
};
|
||||||
|
|
||||||
|
let newRecord = {
|
||||||
|
...(
|
||||||
|
youCanEvenCallMethodsHereAndAnnotate
|
||||||
|
them named::10: someRec
|
||||||
|
),
|
||||||
|
blah: 0,
|
||||||
|
foo: 1
|
||||||
|
};
|
||||||
|
|
||||||
|
let something: thing blah = aTypeAnnotation;
|
||||||
|
|
||||||
|
let something: thing blah = thisIsANamedArg;
|
||||||
|
|
||||||
|
let something: thing blah = aTypeAnnotation;
|
||||||
|
|
||||||
|
let something: blah = thisIsANamedArg thing;
|
||||||
|
|
||||||
|
let something: blah = typeAnnotation thing;
|
||||||
|
|
||||||
|
let newRecord = {
|
||||||
|
...(
|
||||||
|
heresAFunctionWithNamedArgs argOne::i: annotatedResult
|
||||||
|
),
|
||||||
|
soAsToInstill: 0,
|
||||||
|
developmentHabbits: 1
|
||||||
|
};
|
||||||
|
|
||||||
|
[@@@thisIsAThing];
|
||||||
|
|
||||||
|
let x = 10;
|
||||||
|
|
||||||
|
/* Ensure that the parenthesis are preserved here because they are
|
||||||
|
* important:
|
||||||
|
*/
|
||||||
|
let something =
|
||||||
|
fun
|
||||||
|
| None => (
|
||||||
|
fun
|
||||||
|
| [] => "emptyList"
|
||||||
|
| [_, ..._] => "nonEmptyList"
|
||||||
|
)
|
||||||
|
| Some _ => (
|
||||||
|
fun
|
||||||
|
| [] => "emptyList"
|
||||||
|
| [_, ..._] => "nonEmptyList"
|
||||||
|
);
|
||||||
|
|
||||||
|
/* A | B = X; */
|
||||||
|
let A | B = X;
|
||||||
|
|
||||||
|
/* A | (B | C) = X; */
|
||||||
|
let A | (B | C) = X;
|
||||||
|
|
||||||
|
/* (A | B) | (C | D) = X; */
|
||||||
|
let A | B | (C | D) = X;
|
||||||
|
|
||||||
|
/* A | B | (C | D) = X; */
|
||||||
|
let A | B | (C | D) = X;
|
||||||
|
|
||||||
|
/* (A | B) | C = X; */
|
||||||
|
let A | B | C = X;
|
||||||
|
|
||||||
|
/* A | B | C = X; */
|
||||||
|
let A | B | C = X;
|
||||||
|
|
||||||
|
|
||||||
|
/** External function declaration
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
external f : int => int = "foo";
|
||||||
|
|
||||||
|
let x = {contents: 0};
|
||||||
|
|
||||||
|
let unitVal = x.contents = 210;
|
||||||
75
samples/Ruby/filenames/Dangerfile
Normal file
75
samples/Ruby/filenames/Dangerfile
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
# Sometimes its a README fix, or something like that - which isn't relevant for
|
||||||
|
# including in a CHANGELOG for example
|
||||||
|
# From https://github.com/danger/danger/blob/master/Dangerfile
|
||||||
|
|
||||||
|
has_app_changes = !git.modified_files.grep(/lib/).empty?
|
||||||
|
has_test_changes = !git.modified_files.grep(/spec/).empty?
|
||||||
|
is_version_bump = git.modified_files.sort == ["CHANGELOG.md", "lib/danger/version.rb"].sort
|
||||||
|
|
||||||
|
if has_app_changes && !has_test_changes && !is_version_bump
|
||||||
|
warn("Tests were not updated", sticky: false)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Thanks other people!
|
||||||
|
message(":tada:") if is_version_bump && github.pr_author != "orta"
|
||||||
|
|
||||||
|
# Make a note about contributors not in the organization
|
||||||
|
unless github.api.organization_member?('danger', github.pr_author)
|
||||||
|
message "@#{github.pr_author} is not a contributor yet, would you like to join the Danger org?"
|
||||||
|
|
||||||
|
# Pay extra attention if they modify the gemspec
|
||||||
|
if git.modified_files.include?("*.gemspec")
|
||||||
|
warn "External contributor has edited the Gemspec"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Mainly to encourage writing up some reasoning about the PR, rather than
|
||||||
|
# just leaving a title
|
||||||
|
if github.pr_body.length < 5
|
||||||
|
fail "Please provide a summary in the Pull Request description"
|
||||||
|
end
|
||||||
|
|
||||||
|
# Let people say that this isn't worth a CHANGELOG entry in the PR if they choose
|
||||||
|
declared_trivial = (github.pr_title + github.pr_body).include?("#trivial") || !has_app_changes
|
||||||
|
|
||||||
|
if !git.modified_files.include?("CHANGELOG.md") && !declared_trivial
|
||||||
|
fail("Please include a CHANGELOG entry. \nYou can find it at [CHANGELOG.md](https://github.com/danger/danger/blob/master/CHANGELOG.md).", sticky: false)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Docs are critical, so let's re-run the docs part of the specs and show any issues:
|
||||||
|
core_plugins_docs = `bundle exec danger plugins lint lib/danger/danger_core/plugins/*.rb --warnings-as-errors`
|
||||||
|
|
||||||
|
# If it failed, fail the build, and include markdown with the output error.
|
||||||
|
unless $?.success?
|
||||||
|
# We want to strip ANSI colors for our markdown, and make paths relative
|
||||||
|
colourless_error = core_plugins_docs.gsub(/\e\[(\d+)(;\d+)*m/, "")
|
||||||
|
markdown("### Core Docs Errors \n\n#{colourless_error}")
|
||||||
|
fail("Failing due to documentation issues, see below.", sticky: false)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Oddly enough, it's quite possible to do some testing of Danger, inside Danger
|
||||||
|
# So, you can ignore these, if you're looking at the Dangerfile to get ideas.
|
||||||
|
#
|
||||||
|
# If these are all empty something has gone wrong, better to raise it in a comment
|
||||||
|
if git.modified_files.empty? && git.added_files.empty? && git.deleted_files.empty?
|
||||||
|
fail "This PR has no changes at all, this is likely an issue during development."
|
||||||
|
end
|
||||||
|
|
||||||
|
# This comes from `./danger_plugins/protect_files.rb` which is automatically parsed by Danger
|
||||||
|
files.protect_files(path: "danger.gemspec", message: ".gemspec modified", fail_build: false)
|
||||||
|
|
||||||
|
# Ensure that our core plugins all have 100% documentation
|
||||||
|
core_plugins = Dir.glob("lib/danger/danger_core/plugins/*.rb")
|
||||||
|
core_lint_output = `bundle exec yard stats #{core_plugins.join ' '} --list-undoc --tag tags`
|
||||||
|
|
||||||
|
if !core_lint_output.include?("100.00%")
|
||||||
|
fail "The core plugins are not at 100% doc'd - see below:", sticky: false
|
||||||
|
markdown "```\n#{core_lint_output}```"
|
||||||
|
elsif core_lint_output.include? "warning"
|
||||||
|
warn "The core plugins are have yard warnings - see below", sticky: false
|
||||||
|
markdown "```\n#{core_lint_output}```"
|
||||||
|
end
|
||||||
|
|
||||||
|
junit.parse "junit-results.xml"
|
||||||
|
junit.headers = [:file, :name]
|
||||||
|
junit.report
|
||||||
2934
samples/SQL/zipcodes.uk.mysql
Normal file
2934
samples/SQL/zipcodes.uk.mysql
Normal file
File diff suppressed because it is too large
Load Diff
339
samples/Text/filenames/LICENSE.mysql
Normal file
339
samples/Text/filenames/LICENSE.mysql
Normal file
@@ -0,0 +1,339 @@
|
|||||||
|
GNU GENERAL PUBLIC LICENSE
|
||||||
|
Version 2, June 1991
|
||||||
|
|
||||||
|
Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
|
||||||
|
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Preamble
|
||||||
|
|
||||||
|
The licenses for most software are designed to take away your
|
||||||
|
freedom to share and change it. By contrast, the GNU General Public
|
||||||
|
License is intended to guarantee your freedom to share and change free
|
||||||
|
software--to make sure the software is free for all its users. This
|
||||||
|
General Public License applies to most of the Free Software
|
||||||
|
Foundation's software and to any other program whose authors commit to
|
||||||
|
using it. (Some other Free Software Foundation software is covered by
|
||||||
|
the GNU Lesser General Public License instead.) You can apply it to
|
||||||
|
your programs, too.
|
||||||
|
|
||||||
|
When we speak of free software, we are referring to freedom, not
|
||||||
|
price. Our General Public Licenses are designed to make sure that you
|
||||||
|
have the freedom to distribute copies of free software (and charge for
|
||||||
|
this service if you wish), that you receive source code or can get it
|
||||||
|
if you want it, that you can change the software or use pieces of it
|
||||||
|
in new free programs; and that you know you can do these things.
|
||||||
|
|
||||||
|
To protect your rights, we need to make restrictions that forbid
|
||||||
|
anyone to deny you these rights or to ask you to surrender the rights.
|
||||||
|
These restrictions translate to certain responsibilities for you if you
|
||||||
|
distribute copies of the software, or if you modify it.
|
||||||
|
|
||||||
|
For example, if you distribute copies of such a program, whether
|
||||||
|
gratis or for a fee, you must give the recipients all the rights that
|
||||||
|
you have. You must make sure that they, too, receive or can get the
|
||||||
|
source code. And you must show them these terms so they know their
|
||||||
|
rights.
|
||||||
|
|
||||||
|
We protect your rights with two steps: (1) copyright the software, and
|
||||||
|
(2) offer you this license which gives you legal permission to copy,
|
||||||
|
distribute and/or modify the software.
|
||||||
|
|
||||||
|
Also, for each author's protection and ours, we want to make certain
|
||||||
|
that everyone understands that there is no warranty for this free
|
||||||
|
software. If the software is modified by someone else and passed on, we
|
||||||
|
want its recipients to know that what they have is not the original, so
|
||||||
|
that any problems introduced by others will not reflect on the original
|
||||||
|
authors' reputations.
|
||||||
|
|
||||||
|
Finally, any free program is threatened constantly by software
|
||||||
|
patents. We wish to avoid the danger that redistributors of a free
|
||||||
|
program will individually obtain patent licenses, in effect making the
|
||||||
|
program proprietary. To prevent this, we have made it clear that any
|
||||||
|
patent must be licensed for everyone's free use or not licensed at all.
|
||||||
|
|
||||||
|
The precise terms and conditions for copying, distribution and
|
||||||
|
modification follow.
|
||||||
|
|
||||||
|
GNU GENERAL PUBLIC LICENSE
|
||||||
|
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||||
|
|
||||||
|
0. This License applies to any program or other work which contains
|
||||||
|
a notice placed by the copyright holder saying it may be distributed
|
||||||
|
under the terms of this General Public License. The "Program", below,
|
||||||
|
refers to any such program or work, and a "work based on the Program"
|
||||||
|
means either the Program or any derivative work under copyright law:
|
||||||
|
that is to say, a work containing the Program or a portion of it,
|
||||||
|
either verbatim or with modifications and/or translated into another
|
||||||
|
language. (Hereinafter, translation is included without limitation in
|
||||||
|
the term "modification".) Each licensee is addressed as "you".
|
||||||
|
|
||||||
|
Activities other than copying, distribution and modification are not
|
||||||
|
covered by this License; they are outside its scope. The act of
|
||||||
|
running the Program is not restricted, and the output from the Program
|
||||||
|
is covered only if its contents constitute a work based on the
|
||||||
|
Program (independent of having been made by running the Program).
|
||||||
|
Whether that is true depends on what the Program does.
|
||||||
|
|
||||||
|
1. You may copy and distribute verbatim copies of the Program's
|
||||||
|
source code as you receive it, in any medium, provided that you
|
||||||
|
conspicuously and appropriately publish on each copy an appropriate
|
||||||
|
copyright notice and disclaimer of warranty; keep intact all the
|
||||||
|
notices that refer to this License and to the absence of any warranty;
|
||||||
|
and give any other recipients of the Program a copy of this License
|
||||||
|
along with the Program.
|
||||||
|
|
||||||
|
You may charge a fee for the physical act of transferring a copy, and
|
||||||
|
you may at your option offer warranty protection in exchange for a fee.
|
||||||
|
|
||||||
|
2. You may modify your copy or copies of the Program or any portion
|
||||||
|
of it, thus forming a work based on the Program, and copy and
|
||||||
|
distribute such modifications or work under the terms of Section 1
|
||||||
|
above, provided that you also meet all of these conditions:
|
||||||
|
|
||||||
|
a) You must cause the modified files to carry prominent notices
|
||||||
|
stating that you changed the files and the date of any change.
|
||||||
|
|
||||||
|
b) You must cause any work that you distribute or publish, that in
|
||||||
|
whole or in part contains or is derived from the Program or any
|
||||||
|
part thereof, to be licensed as a whole at no charge to all third
|
||||||
|
parties under the terms of this License.
|
||||||
|
|
||||||
|
c) If the modified program normally reads commands interactively
|
||||||
|
when run, you must cause it, when started running for such
|
||||||
|
interactive use in the most ordinary way, to print or display an
|
||||||
|
announcement including an appropriate copyright notice and a
|
||||||
|
notice that there is no warranty (or else, saying that you provide
|
||||||
|
a warranty) and that users may redistribute the program under
|
||||||
|
these conditions, and telling the user how to view a copy of this
|
||||||
|
License. (Exception: if the Program itself is interactive but
|
||||||
|
does not normally print such an announcement, your work based on
|
||||||
|
the Program is not required to print an announcement.)
|
||||||
|
|
||||||
|
These requirements apply to the modified work as a whole. If
|
||||||
|
identifiable sections of that work are not derived from the Program,
|
||||||
|
and can be reasonably considered independent and separate works in
|
||||||
|
themselves, then this License, and its terms, do not apply to those
|
||||||
|
sections when you distribute them as separate works. But when you
|
||||||
|
distribute the same sections as part of a whole which is a work based
|
||||||
|
on the Program, the distribution of the whole must be on the terms of
|
||||||
|
this License, whose permissions for other licensees extend to the
|
||||||
|
entire whole, and thus to each and every part regardless of who wrote it.
|
||||||
|
|
||||||
|
Thus, it is not the intent of this section to claim rights or contest
|
||||||
|
your rights to work written entirely by you; rather, the intent is to
|
||||||
|
exercise the right to control the distribution of derivative or
|
||||||
|
collective works based on the Program.
|
||||||
|
|
||||||
|
In addition, mere aggregation of another work not based on the Program
|
||||||
|
with the Program (or with a work based on the Program) on a volume of
|
||||||
|
a storage or distribution medium does not bring the other work under
|
||||||
|
the scope of this License.
|
||||||
|
|
||||||
|
3. You may copy and distribute the Program (or a work based on it,
|
||||||
|
under Section 2) in object code or executable form under the terms of
|
||||||
|
Sections 1 and 2 above provided that you also do one of the following:
|
||||||
|
|
||||||
|
a) Accompany it with the complete corresponding machine-readable
|
||||||
|
source code, which must be distributed under the terms of Sections
|
||||||
|
1 and 2 above on a medium customarily used for software interchange; or,
|
||||||
|
|
||||||
|
b) Accompany it with a written offer, valid for at least three
|
||||||
|
years, to give any third party, for a charge no more than your
|
||||||
|
cost of physically performing source distribution, a complete
|
||||||
|
machine-readable copy of the corresponding source code, to be
|
||||||
|
distributed under the terms of Sections 1 and 2 above on a medium
|
||||||
|
customarily used for software interchange; or,
|
||||||
|
|
||||||
|
c) Accompany it with the information you received as to the offer
|
||||||
|
to distribute corresponding source code. (This alternative is
|
||||||
|
allowed only for noncommercial distribution and only if you
|
||||||
|
received the program in object code or executable form with such
|
||||||
|
an offer, in accord with Subsection b above.)
|
||||||
|
|
||||||
|
The source code for a work means the preferred form of the work for
|
||||||
|
making modifications to it. For an executable work, complete source
|
||||||
|
code means all the source code for all modules it contains, plus any
|
||||||
|
associated interface definition files, plus the scripts used to
|
||||||
|
control compilation and installation of the executable. However, as a
|
||||||
|
special exception, the source code distributed need not include
|
||||||
|
anything that is normally distributed (in either source or binary
|
||||||
|
form) with the major components (compiler, kernel, and so on) of the
|
||||||
|
operating system on which the executable runs, unless that component
|
||||||
|
itself accompanies the executable.
|
||||||
|
|
||||||
|
If distribution of executable or object code is made by offering
|
||||||
|
access to copy from a designated place, then offering equivalent
|
||||||
|
access to copy the source code from the same place counts as
|
||||||
|
distribution of the source code, even though third parties are not
|
||||||
|
compelled to copy the source along with the object code.
|
||||||
|
|
||||||
|
4. You may not copy, modify, sublicense, or distribute the Program
|
||||||
|
except as expressly provided under this License. Any attempt
|
||||||
|
otherwise to copy, modify, sublicense or distribute the Program is
|
||||||
|
void, and will automatically terminate your rights under this License.
|
||||||
|
However, parties who have received copies, or rights, from you under
|
||||||
|
this License will not have their licenses terminated so long as such
|
||||||
|
parties remain in full compliance.
|
||||||
|
|
||||||
|
5. You are not required to accept this License, since you have not
|
||||||
|
signed it. However, nothing else grants you permission to modify or
|
||||||
|
distribute the Program or its derivative works. These actions are
|
||||||
|
prohibited by law if you do not accept this License. Therefore, by
|
||||||
|
modifying or distributing the Program (or any work based on the
|
||||||
|
Program), you indicate your acceptance of this License to do so, and
|
||||||
|
all its terms and conditions for copying, distributing or modifying
|
||||||
|
the Program or works based on it.
|
||||||
|
|
||||||
|
6. Each time you redistribute the Program (or any work based on the
|
||||||
|
Program), the recipient automatically receives a license from the
|
||||||
|
original licensor to copy, distribute or modify the Program subject to
|
||||||
|
these terms and conditions. You may not impose any further
|
||||||
|
restrictions on the recipients' exercise of the rights granted herein.
|
||||||
|
You are not responsible for enforcing compliance by third parties to
|
||||||
|
this License.
|
||||||
|
|
||||||
|
7. If, as a consequence of a court judgment or allegation of patent
|
||||||
|
infringement or for any other reason (not limited to patent issues),
|
||||||
|
conditions are imposed on you (whether by court order, agreement or
|
||||||
|
otherwise) that contradict the conditions of this License, they do not
|
||||||
|
excuse you from the conditions of this License. If you cannot
|
||||||
|
distribute so as to satisfy simultaneously your obligations under this
|
||||||
|
License and any other pertinent obligations, then as a consequence you
|
||||||
|
may not distribute the Program at all. For example, if a patent
|
||||||
|
license would not permit royalty-free redistribution of the Program by
|
||||||
|
all those who receive copies directly or indirectly through you, then
|
||||||
|
the only way you could satisfy both it and this License would be to
|
||||||
|
refrain entirely from distribution of the Program.
|
||||||
|
|
||||||
|
If any portion of this section is held invalid or unenforceable under
|
||||||
|
any particular circumstance, the balance of the section is intended to
|
||||||
|
apply and the section as a whole is intended to apply in other
|
||||||
|
circumstances.
|
||||||
|
|
||||||
|
It is not the purpose of this section to induce you to infringe any
|
||||||
|
patents or other property right claims or to contest validity of any
|
||||||
|
such claims; this section has the sole purpose of protecting the
|
||||||
|
integrity of the free software distribution system, which is
|
||||||
|
implemented by public license practices. Many people have made
|
||||||
|
generous contributions to the wide range of software distributed
|
||||||
|
through that system in reliance on consistent application of that
|
||||||
|
system; it is up to the author/donor to decide if he or she is willing
|
||||||
|
to distribute software through any other system and a licensee cannot
|
||||||
|
impose that choice.
|
||||||
|
|
||||||
|
This section is intended to make thoroughly clear what is believed to
|
||||||
|
be a consequence of the rest of this License.
|
||||||
|
|
||||||
|
8. If the distribution and/or use of the Program is restricted in
|
||||||
|
certain countries either by patents or by copyrighted interfaces, the
|
||||||
|
original copyright holder who places the Program under this License
|
||||||
|
may add an explicit geographical distribution limitation excluding
|
||||||
|
those countries, so that distribution is permitted only in or among
|
||||||
|
countries not thus excluded. In such case, this License incorporates
|
||||||
|
the limitation as if written in the body of this License.
|
||||||
|
|
||||||
|
9. The Free Software Foundation may publish revised and/or new versions
|
||||||
|
of the General Public License from time to time. Such new versions will
|
||||||
|
be similar in spirit to the present version, but may differ in detail to
|
||||||
|
address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the Program
|
||||||
|
specifies a version number of this License which applies to it and "any
|
||||||
|
later version", you have the option of following the terms and conditions
|
||||||
|
either of that version or of any later version published by the Free
|
||||||
|
Software Foundation. If the Program does not specify a version number of
|
||||||
|
this License, you may choose any version ever published by the Free Software
|
||||||
|
Foundation.
|
||||||
|
|
||||||
|
10. If you wish to incorporate parts of the Program into other free
|
||||||
|
programs whose distribution conditions are different, write to the author
|
||||||
|
to ask for permission. For software which is copyrighted by the Free
|
||||||
|
Software Foundation, write to the Free Software Foundation; we sometimes
|
||||||
|
make exceptions for this. Our decision will be guided by the two goals
|
||||||
|
of preserving the free status of all derivatives of our free software and
|
||||||
|
of promoting the sharing and reuse of software generally.
|
||||||
|
|
||||||
|
NO WARRANTY
|
||||||
|
|
||||||
|
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
|
||||||
|
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
|
||||||
|
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
|
||||||
|
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
|
||||||
|
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
|
||||||
|
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
|
||||||
|
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
|
||||||
|
REPAIR OR CORRECTION.
|
||||||
|
|
||||||
|
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||||
|
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
|
||||||
|
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
|
||||||
|
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
|
||||||
|
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
|
||||||
|
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
|
||||||
|
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
|
||||||
|
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
|
||||||
|
POSSIBILITY OF SUCH DAMAGES.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
How to Apply These Terms to Your New Programs
|
||||||
|
|
||||||
|
If you develop a new program, and you want it to be of the greatest
|
||||||
|
possible use to the public, the best way to achieve this is to make it
|
||||||
|
free software which everyone can redistribute and change under these terms.
|
||||||
|
|
||||||
|
To do so, attach the following notices to the program. It is safest
|
||||||
|
to attach them to the start of each source file to most effectively
|
||||||
|
convey the exclusion of warranty; and each file should have at least
|
||||||
|
the "copyright" line and a pointer to where the full notice is found.
|
||||||
|
|
||||||
|
<one line to give the program's name and a brief idea of what it does.>
|
||||||
|
Copyright (C) <year> <name of author>
|
||||||
|
|
||||||
|
This program is free software; you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation; either version 2 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License along
|
||||||
|
with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
|
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
|
||||||
|
Also add information on how to contact you by electronic and paper mail.
|
||||||
|
|
||||||
|
If the program is interactive, make it output a short notice like this
|
||||||
|
when it starts in an interactive mode:
|
||||||
|
|
||||||
|
Gnomovision version 69, Copyright (C) year name of author
|
||||||
|
Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||||
|
This is free software, and you are welcome to redistribute it
|
||||||
|
under certain conditions; type `show c' for details.
|
||||||
|
|
||||||
|
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||||
|
parts of the General Public License. Of course, the commands you use may
|
||||||
|
be called something other than `show w' and `show c'; they could even be
|
||||||
|
mouse-clicks or menu items--whatever suits your program.
|
||||||
|
|
||||||
|
You should also get your employer (if you work as a programmer) or your
|
||||||
|
school, if any, to sign a "copyright disclaimer" for the program, if
|
||||||
|
necessary. Here is a sample; alter the names:
|
||||||
|
|
||||||
|
Yoyodyne, Inc., hereby disclaims all copyright interest in the program
|
||||||
|
`Gnomovision' (which makes passes at compilers) written by James Hacker.
|
||||||
|
|
||||||
|
<signature of Ty Coon>, 1 April 1989
|
||||||
|
Ty Coon, President of Vice
|
||||||
|
|
||||||
|
This General Public License does not permit incorporating your program into
|
||||||
|
proprietary programs. If your program is a subroutine library, you may
|
||||||
|
consider it more useful to permit linking proprietary applications with the
|
||||||
|
library. If this is what you want to do, use the GNU Lesser General
|
||||||
|
Public License instead of this License.
|
||||||
24
samples/Text/filenames/README.mysql
Normal file
24
samples/Text/filenames/README.mysql
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
README for users interested in using MySQL as a triplestore backend
|
||||||
|
===================================================================
|
||||||
|
|
||||||
|
The KiWi Triple Store used by Apache Marmotta supports different database
|
||||||
|
backends, including H2, PostgreSQL and MySQL. However, for legal reasons,
|
||||||
|
we are not allowed to distribute the MySQL connector library together with
|
||||||
|
the Apache Marmotta source code or binaries, as it is licensed under GPL
|
||||||
|
license.
|
||||||
|
|
||||||
|
Nonetheless, it is possible to use MySQL by downloading and installing the
|
||||||
|
connector manually:
|
||||||
|
1. download and unpack the MySQL Connector/J from
|
||||||
|
http://dev.mysql.com/downloads/connector/j/
|
||||||
|
2. copy the mysql-connector-java-5.x.x.jar file to
|
||||||
|
a. the library directory of the application server
|
||||||
|
(e.g. $TOMCAT_HOME/lib)
|
||||||
|
-- OR --
|
||||||
|
b. the library directory of the Apache Marmotta Web application
|
||||||
|
(e.g. $TOMCAT_HOME/webapps/marmotta/WEB-INF/lib)
|
||||||
|
3. restart the application server
|
||||||
|
|
||||||
|
Apache Marmotta will then automatically be able to use the MySQL connector
|
||||||
|
to connect to a MySQL database. Please note that Marmotta requires at least
|
||||||
|
MySQL 5.x, because it makes use of nested queries and foreign keys.
|
||||||
1297
samples/XCompose/filenames/XCompose
Normal file
1297
samples/XCompose/filenames/XCompose
Normal file
File diff suppressed because it is too large
Load Diff
42
samples/YAML/database.yml.mysql
Normal file
42
samples/YAML/database.yml.mysql
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
#
|
||||||
|
# PRODUCTION
|
||||||
|
#
|
||||||
|
production:
|
||||||
|
adapter: mysql2
|
||||||
|
encoding: utf8mb4
|
||||||
|
collation: utf8mb4_general_ci
|
||||||
|
reconnect: false
|
||||||
|
database: gitlabhq_production
|
||||||
|
pool: 10
|
||||||
|
username: git
|
||||||
|
password: "secure password"
|
||||||
|
# host: localhost
|
||||||
|
# socket: /tmp/mysql.sock
|
||||||
|
|
||||||
|
#
|
||||||
|
# Development specific
|
||||||
|
#
|
||||||
|
development:
|
||||||
|
adapter: mysql2
|
||||||
|
encoding: utf8mb4
|
||||||
|
collation: utf8mb4_general_ci
|
||||||
|
reconnect: false
|
||||||
|
database: gitlabhq_development
|
||||||
|
pool: 5
|
||||||
|
username: root
|
||||||
|
password: "secure password"
|
||||||
|
# socket: /tmp/mysql.sock
|
||||||
|
|
||||||
|
# Warning: The database defined as "test" will be erased and
|
||||||
|
# re-generated from your development database when you run "rake".
|
||||||
|
# Do not set this db to the same as development or production.
|
||||||
|
test: &test
|
||||||
|
adapter: mysql2
|
||||||
|
encoding: utf8mb4
|
||||||
|
collation: utf8mb4_general_ci
|
||||||
|
reconnect: false
|
||||||
|
database: gitlabhq_test
|
||||||
|
pool: 5
|
||||||
|
username: root
|
||||||
|
password:
|
||||||
|
# socket: /tmp/mysql.sock
|
||||||
@@ -78,10 +78,13 @@ https = "https://#{parts[:host]}/#{parts[:user]}/#{parts[:repo]}"
|
|||||||
repo_new = "vendor/grammars/#{parts[:repo]}"
|
repo_new = "vendor/grammars/#{parts[:repo]}"
|
||||||
repo_old = parse_submodule($replace) if $replace
|
repo_old = parse_submodule($replace) if $replace
|
||||||
|
|
||||||
|
Dir.chdir(ROOT)
|
||||||
|
|
||||||
if repo_old
|
if repo_old
|
||||||
log "Deregistering: #{repo_old}"
|
log "Deregistering: #{repo_old}"
|
||||||
`git submodule deinit #{repo_old}`
|
`git submodule deinit #{repo_old}`
|
||||||
`git rm -rf #{repo_old}`
|
`git rm -rf #{repo_old}`
|
||||||
|
`script/convert-grammars`
|
||||||
end
|
end
|
||||||
|
|
||||||
log "Registering new submodule: #{repo_new}"
|
log "Registering new submodule: #{repo_new}"
|
||||||
@@ -90,7 +93,12 @@ exit 1 if $?.exitstatus > 0
|
|||||||
`script/convert-grammars --add #{repo_new}`
|
`script/convert-grammars --add #{repo_new}`
|
||||||
|
|
||||||
log "Confirming license"
|
log "Confirming license"
|
||||||
`script/licensed --module "#{repo_new}"`
|
if repo_old
|
||||||
|
`script/licensed`
|
||||||
|
else
|
||||||
|
`script/licensed --module "#{repo_new}"`
|
||||||
|
end
|
||||||
|
|
||||||
log "Updating grammar documentation in vendor/REAEDME.md"
|
log "Updating grammar documentation in vendor/REAEDME.md"
|
||||||
`script list-grammars`
|
`bundle exec rake samples`
|
||||||
|
`script/list-grammars`
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
#!/usr/bin/env ruby
|
#!/usr/bin/env ruby
|
||||||
|
|
||||||
|
require 'bundler/setup'
|
||||||
require 'json'
|
require 'json'
|
||||||
require 'net/http'
|
require 'net/http'
|
||||||
require 'optparse'
|
require 'optparse'
|
||||||
@@ -173,6 +174,7 @@ end
|
|||||||
def load_grammars(tmp_dir, source, all_scopes)
|
def load_grammars(tmp_dir, source, all_scopes)
|
||||||
is_url = source.start_with?("http:", "https:")
|
is_url = source.start_with?("http:", "https:")
|
||||||
return [] if is_url && !$options[:remote]
|
return [] if is_url && !$options[:remote]
|
||||||
|
return [] if !is_url && !File.exist?(source)
|
||||||
|
|
||||||
p = if !is_url
|
p = if !is_url
|
||||||
if File.directory?(source)
|
if File.directory?(source)
|
||||||
|
|||||||
@@ -51,7 +51,7 @@ SUBMODULES.partition { |submodule| SLOW_SUBMODULES.include?(submodule) }.flatten
|
|||||||
submodules.push(submodule)
|
submodules.push(submodule)
|
||||||
end
|
end
|
||||||
|
|
||||||
8.times do
|
(ARGV.first || 8).to_i.times do
|
||||||
Thread.new { run_thread(submodules, results) }
|
Thread.new { run_thread(submodules, results) }
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|||||||
@@ -1,19 +1,20 @@
|
|||||||
#!/usr/bin/env ruby
|
#!/usr/bin/env ruby
|
||||||
|
|
||||||
|
require "bundler/setup"
|
||||||
require "linguist"
|
require "linguist"
|
||||||
require "json"
|
require "json"
|
||||||
require "yaml"
|
require "yaml"
|
||||||
|
|
||||||
class GrammarList
|
class GrammarList
|
||||||
|
|
||||||
ROOT = File.expand_path "../../", __FILE__
|
ROOT = File.expand_path "../../", __FILE__
|
||||||
|
|
||||||
def initialize
|
def initialize
|
||||||
@submodules = load_submodules()
|
@submodules = load_submodules()
|
||||||
@sources = load_sources()
|
@sources = load_sources()
|
||||||
@language_names = load_languages()
|
@language_names = load_languages()
|
||||||
end
|
end
|
||||||
|
|
||||||
# Load .gitmodules
|
# Load .gitmodules
|
||||||
def load_submodules
|
def load_submodules
|
||||||
submodules = {}
|
submodules = {}
|
||||||
@@ -29,14 +30,14 @@ class GrammarList
|
|||||||
end
|
end
|
||||||
submodules
|
submodules
|
||||||
end
|
end
|
||||||
|
|
||||||
# Grab the name of each language, sorted case-insensitively
|
# Grab the name of each language, sorted case-insensitively
|
||||||
def load_languages
|
def load_languages
|
||||||
Linguist::Language.all.map(&:name).sort do |a, b|
|
Linguist::Language.all.map(&:name).sort do |a, b|
|
||||||
a.downcase() <=> b.downcase()
|
a.downcase() <=> b.downcase()
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# Load grammars.yml
|
# Load grammars.yml
|
||||||
def load_sources
|
def load_sources
|
||||||
sources = {}
|
sources = {}
|
||||||
@@ -46,7 +47,7 @@ class GrammarList
|
|||||||
end
|
end
|
||||||
sources
|
sources
|
||||||
end
|
end
|
||||||
|
|
||||||
# Shorten a repository URL
|
# Shorten a repository URL
|
||||||
def shorten(url)
|
def shorten(url)
|
||||||
if url =~ /^https?:\/\/(?:www\.)?github\.com\/([^\/]+\/[^\/]+)/i
|
if url =~ /^https?:\/\/(?:www\.)?github\.com\/([^\/]+\/[^\/]+)/i
|
||||||
@@ -57,7 +58,7 @@ class GrammarList
|
|||||||
url.replace(/^https?:\/\/(?:www\.)?/i, "")
|
url.replace(/^https?:\/\/(?:www\.)?/i, "")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# Markdown: Generate grammar list
|
# Markdown: Generate grammar list
|
||||||
def to_markdown
|
def to_markdown
|
||||||
markdown = ""
|
markdown = ""
|
||||||
@@ -70,7 +71,7 @@ class GrammarList
|
|||||||
when "https://bitbucket.org/Clams/sublimesystemverilog/get/default.tar.gz"
|
when "https://bitbucket.org/Clams/sublimesystemverilog/get/default.tar.gz"
|
||||||
short_url = "bitbucket:Clams/sublimesystemverilog"
|
short_url = "bitbucket:Clams/sublimesystemverilog"
|
||||||
long_url = "https://bitbucket.org/Clams/sublimesystemverilog"
|
long_url = "https://bitbucket.org/Clams/sublimesystemverilog"
|
||||||
when "http://svn.edgewall.org/repos/genshi/contrib/textmate/Genshi.tmbundle/Syntaxes/Markup%20Template%20%28XML%29.tmLanguage"
|
when "https://svn.edgewall.org/repos/genshi/contrib/textmate/Genshi.tmbundle/Syntaxes/Markup%20Template%20%28XML%29.tmLanguage"
|
||||||
short_url = "genshi.edgewall.org/query"
|
short_url = "genshi.edgewall.org/query"
|
||||||
long_url = "https://genshi.edgewall.org/query"
|
long_url = "https://genshi.edgewall.org/query"
|
||||||
when "vendor/grammars/oz-tmbundle/Syntaxes/Oz.tmLanguage"
|
when "vendor/grammars/oz-tmbundle/Syntaxes/Oz.tmLanguage"
|
||||||
@@ -87,7 +88,7 @@ class GrammarList
|
|||||||
|
|
||||||
markdown
|
markdown
|
||||||
end
|
end
|
||||||
|
|
||||||
# Update the file displaying the reader-friendly list of grammar repos
|
# Update the file displaying the reader-friendly list of grammar repos
|
||||||
def update_readme
|
def update_readme
|
||||||
readme = "#{ROOT}/vendor/README.md"
|
readme = "#{ROOT}/vendor/README.md"
|
||||||
|
|||||||
@@ -1,10 +1,13 @@
|
|||||||
#!/usr/bin/env ruby
|
#!/usr/bin/env ruby
|
||||||
|
|
||||||
|
require "bundler/setup"
|
||||||
require "json"
|
require "json"
|
||||||
require "linguist"
|
require "linguist"
|
||||||
require "set"
|
require "set"
|
||||||
require "yaml"
|
require "yaml"
|
||||||
|
|
||||||
|
ROOT = File.expand_path("../../", __FILE__)
|
||||||
|
|
||||||
def find_includes(json)
|
def find_includes(json)
|
||||||
case json
|
case json
|
||||||
when Hash
|
when Hash
|
||||||
@@ -32,7 +35,7 @@ def transitive_includes(scope, includes)
|
|||||||
end
|
end
|
||||||
|
|
||||||
includes = {}
|
includes = {}
|
||||||
Dir["grammars/*.json"].each do |path|
|
Dir[File.join(ROOT, "grammars/*.json")].each do |path|
|
||||||
scope = File.basename(path).sub(/\.json/, '')
|
scope = File.basename(path).sub(/\.json/, '')
|
||||||
json = JSON.load(File.read(path))
|
json = JSON.load(File.read(path))
|
||||||
incs = find_includes(json)
|
incs = find_includes(json)
|
||||||
@@ -41,7 +44,7 @@ Dir["grammars/*.json"].each do |path|
|
|||||||
includes[scope] += incs
|
includes[scope] += incs
|
||||||
end
|
end
|
||||||
|
|
||||||
yaml = YAML.load(File.read("grammars.yml"))
|
yaml = YAML.load(File.read(File.join(ROOT, "grammars.yml")))
|
||||||
language_scopes = Linguist::Language.all.map(&:tm_scope).to_set
|
language_scopes = Linguist::Language.all.map(&:tm_scope).to_set
|
||||||
|
|
||||||
# The set of used scopes is the scopes for each language, plus all the scopes
|
# The set of used scopes is the scopes for each language, plus all the scopes
|
||||||
@@ -54,4 +57,4 @@ puts "Unused grammar repos"
|
|||||||
puts unused.map { |repo, scopes| sprintf("%-100s %s", repo, scopes.join(", ")) }.sort.join("\n")
|
puts unused.map { |repo, scopes| sprintf("%-100s %s", repo, scopes.join(", ")) }.sort.join("\n")
|
||||||
|
|
||||||
yaml.delete_if { |k| unused.key?(k) }
|
yaml.delete_if { |k| unused.key?(k) }
|
||||||
File.write("grammars.yml", YAML.dump(yaml))
|
File.write(File.join(ROOT, "grammars.yml"), YAML.dump(yaml))
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
#!/usr/bin/env ruby
|
#!/usr/bin/env ruby
|
||||||
|
|
||||||
|
require 'bundler/setup'
|
||||||
require 'yaml'
|
require 'yaml'
|
||||||
require 'pry'
|
require 'pry'
|
||||||
|
|
||||||
@@ -11,18 +13,18 @@ header = <<-EOF
|
|||||||
# ace_mode - A String name of the Ace Mode used for highlighting whenever
|
# ace_mode - A String name of the Ace Mode used for highlighting whenever
|
||||||
# a file is edited. This must match one of the filenames in http://git.io/3XO_Cg.
|
# a file is edited. This must match one of the filenames in http://git.io/3XO_Cg.
|
||||||
# Use "text" if a mode does not exist.
|
# Use "text" if a mode does not exist.
|
||||||
|
# codemirror_mode - A String name of the CodeMirror Mode used for highlighting whenever a file is edited.
|
||||||
|
# This must match a mode from https://git.io/vi9Fx
|
||||||
# wrap - Boolean wrap to enable line wrapping (default: false)
|
# wrap - Boolean wrap to enable line wrapping (default: false)
|
||||||
# extensions - An Array of associated extensions (the first one is
|
# extensions - An Array of associated extensions (the first one is
|
||||||
# considered the primary extension, the others should be
|
# considered the primary extension, the others should be
|
||||||
# listed alphabetically)
|
# listed alphabetically)
|
||||||
# interpreters - An Array of associated interpreters
|
# interpreters - An Array of associated interpreters
|
||||||
# searchable - Boolean flag to enable searching (defaults to true)
|
# searchable - Boolean flag to enable searching (defaults to true)
|
||||||
# search_term - Deprecated: Some languages may be indexed under a
|
|
||||||
# different alias. Avoid defining new exceptions.
|
|
||||||
# language_id - Integer used as a language-name-independent indexed field so that we can rename
|
# language_id - Integer used as a language-name-independent indexed field so that we can rename
|
||||||
# languages in Linguist without reindexing all the code on GitHub. Must not be
|
# languages in Linguist without reindexing all the code on GitHub. Must not be
|
||||||
# changed for existing languages without the explicit permission of GitHub staff.
|
# changed for existing languages without the explicit permission of GitHub staff.
|
||||||
# color - CSS hex color to represent the language.
|
# color - CSS hex color to represent the language. Only used if type is "programming" or "prose".
|
||||||
# tm_scope - The TextMate scope that represents this programming
|
# tm_scope - The TextMate scope that represents this programming
|
||||||
# language. This should match one of the scopes listed in
|
# language. This should match one of the scopes listed in
|
||||||
# the grammars.yml file. Use "none" if there is no grammar
|
# the grammars.yml file. Use "none" if there is no grammar
|
||||||
@@ -36,21 +38,23 @@ header = <<-EOF
|
|||||||
# Please keep this list alphabetized. Capitalization comes before lowercase.
|
# Please keep this list alphabetized. Capitalization comes before lowercase.
|
||||||
|
|
||||||
EOF
|
EOF
|
||||||
|
require 'digest'
|
||||||
|
|
||||||
generated = true if ARGV[0] == "--force"
|
generated = true if ARGV[0] == "--force"
|
||||||
update = true if ARGV[0] == "--update"
|
update = true if ARGV[0] == "--update"
|
||||||
|
|
||||||
|
def generate_language_id(language)
|
||||||
|
Digest::SHA256.hexdigest(language).to_i(16) % (2**30 - 1)
|
||||||
|
end
|
||||||
|
|
||||||
if generated
|
if generated
|
||||||
puts "You're regenerating all of the language_id attributes for all Linguist "
|
puts "You're regenerating all of the language_id attributes for all Linguist "
|
||||||
puts "languages defined in languages.yml. This is almost certainly NOT what"
|
puts "languages defined in languages.yml. This is almost certainly NOT what"
|
||||||
puts "you meant to do!"
|
puts "you meant to do!"
|
||||||
|
|
||||||
language_index = 0
|
|
||||||
|
|
||||||
languages = YAML.load(File.read("lib/linguist/languages.yml"))
|
languages = YAML.load(File.read("lib/linguist/languages.yml"))
|
||||||
languages.each do |name, vals|
|
languages.each do |name, vals|
|
||||||
vals.merge!('language_id' => language_index)
|
vals.merge!('language_id' => generate_language_id(name))
|
||||||
language_index += 1
|
|
||||||
end
|
end
|
||||||
|
|
||||||
File.write("lib/linguist/languages.yml", header + YAML.dump(languages))
|
File.write("lib/linguist/languages.yml", header + YAML.dump(languages))
|
||||||
@@ -58,20 +62,12 @@ elsif update
|
|||||||
puts "Adding new language_id attributes to languages.yml that don't have one set"
|
puts "Adding new language_id attributes to languages.yml that don't have one set"
|
||||||
languages = YAML.load(File.read("lib/linguist/languages.yml"))
|
languages = YAML.load(File.read("lib/linguist/languages.yml"))
|
||||||
|
|
||||||
# First grab the maximum language_id
|
|
||||||
language_ids = []
|
|
||||||
languages.each { |name, vals| language_ids << vals['language_id'] if vals.has_key?('language_id')}
|
|
||||||
max_language_id = language_ids.max
|
|
||||||
puts "Current maximum language_id is #{max_language_id}"
|
|
||||||
|
|
||||||
missing_count = 0
|
missing_count = 0
|
||||||
language_index = max_language_id
|
|
||||||
|
|
||||||
languages.each do |name, vals|
|
languages.each do |name, vals|
|
||||||
unless vals.has_key?('language_id')
|
unless vals.has_key?('language_id')
|
||||||
language_index += 1
|
|
||||||
missing_count += 1
|
missing_count += 1
|
||||||
vals.merge!('language_id' => language_index)
|
vals.merge!('language_id' => generate_language_id(name))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|||||||
29
test/fixtures/CMake/CMakeLists.txt
vendored
Normal file
29
test/fixtures/CMake/CMakeLists.txt
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
cmake_minimum_required(VERSION 2.8 FATAL_ERROR)
|
||||||
|
|
||||||
|
project("To do list")
|
||||||
|
|
||||||
|
enable_testing()
|
||||||
|
|
||||||
|
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU" OR
|
||||||
|
"${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang")
|
||||||
|
set(warnings "-Wall -Wextra -Werror")
|
||||||
|
elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC")
|
||||||
|
set(warnings "/W4 /WX /EHsc")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(optimize "-O2")
|
||||||
|
|
||||||
|
if (NOT CONFIGURED_ONCE)
|
||||||
|
set(CMAKE_CXX_FLAGS "${warnings} ${optimize}"
|
||||||
|
CACHE STRING "Flags used by the compiler during all build types." FORCE)
|
||||||
|
set(CMAKE_C_FLAGS "${warnings} ${optimize}"
|
||||||
|
CACHE STRING "Flags used by the compiler during all build types." FORCE)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
|
||||||
|
add_executable(toDo main.cpp ToDo.cpp)
|
||||||
|
|
||||||
|
add_test(toDoTest toDo)
|
||||||
|
|
||||||
|
set(CONFIGURED_ONCE TRUE CACHE INTERNAL
|
||||||
|
"A flag showing that CMake has configured at least once.")
|
||||||
200
test/fixtures/CoffeeScript/Cakefile
vendored
Executable file
200
test/fixtures/CoffeeScript/Cakefile
vendored
Executable file
@@ -0,0 +1,200 @@
|
|||||||
|
http = require 'http'
|
||||||
|
https = require 'https'
|
||||||
|
fs = require 'fs'
|
||||||
|
path = require 'path'
|
||||||
|
{spawn, exec} = require 'child_process'
|
||||||
|
semver = require 'semver'
|
||||||
|
AdmZip = require('adm-zip')
|
||||||
|
GitHubApi = require 'github'
|
||||||
|
|
||||||
|
github = new GitHubApi(version: '3.0.0')
|
||||||
|
|
||||||
|
# ----------------
|
||||||
|
# Server / Builder
|
||||||
|
# ----------------
|
||||||
|
|
||||||
|
option '-P', '--production', 'run server in production mode'
|
||||||
|
option null, '--port [PORT]', 'listen on specified port (default 3333)'
|
||||||
|
|
||||||
|
LOCAL_BRUNCH = path.join('.', 'node_modules', '.bin', 'brunch')
|
||||||
|
|
||||||
|
spawnBrunch = (flags, env) ->
|
||||||
|
if fs.existsSync(LOCAL_BRUNCH)
|
||||||
|
brunch = spawn LOCAL_BRUNCH, flags, env
|
||||||
|
else
|
||||||
|
console.error 'Warning, using global brunch. Run `npm install`.'
|
||||||
|
brunch = spawn 'brunch', flags, env
|
||||||
|
|
||||||
|
brunch.stdout.on 'data', (data) -> console.log data.toString().trim()
|
||||||
|
brunch.stderr.on 'data', (data) -> console.log data.toString().trim()
|
||||||
|
|
||||||
|
runBrunchWatch = (options, shouldStartServer) ->
|
||||||
|
flags = ['w']
|
||||||
|
flags.push '-s' if shouldStartServer
|
||||||
|
|
||||||
|
if options.production?
|
||||||
|
flags.push('-P')
|
||||||
|
process.env.BRUNCH_ENV = 'production'
|
||||||
|
|
||||||
|
if options.port?
|
||||||
|
flags.push '-p'
|
||||||
|
flags.push options.port
|
||||||
|
|
||||||
|
spawnBrunch flags, process.env
|
||||||
|
|
||||||
|
task 'server', 'start the brunch server in development', (options) ->
|
||||||
|
runBrunchWatch(options, true)
|
||||||
|
|
||||||
|
task 'watch', 'build the app continuously without a server', (options) ->
|
||||||
|
runBrunchWatch(options, false)
|
||||||
|
|
||||||
|
task 'build', 'build for production', ->
|
||||||
|
process.env.BRUNCH_ENV = 'production'
|
||||||
|
spawnBrunch ['b', '-P'], process.env
|
||||||
|
|
||||||
|
task 'test', 'run brunch in the test environment', ->
|
||||||
|
flags = ['w', '-s']
|
||||||
|
process.env.BRUNCH_ENV = 'test'
|
||||||
|
spawnBrunch flags, process.env
|
||||||
|
|
||||||
|
# -------------
|
||||||
|
# Tapas Updates
|
||||||
|
# -------------
|
||||||
|
updateMessage = 'update Tapas to latest (Cakefile, package.json, portkey.json,
|
||||||
|
config.coffee, generators/*)'
|
||||||
|
task 'tapas:update', updateMessage, (options) ->
|
||||||
|
url = 'https://codeload.github.com/mutewinter/tapas-with-ember/zip/master'
|
||||||
|
filesToUpdate = [
|
||||||
|
'Cakefile'
|
||||||
|
'package.json'
|
||||||
|
'portkey.json'
|
||||||
|
'config.coffee'
|
||||||
|
'generators/'
|
||||||
|
'testem.json'
|
||||||
|
'bower.json'
|
||||||
|
]
|
||||||
|
https.get url, (res) ->
|
||||||
|
data = []
|
||||||
|
dataLen = 0
|
||||||
|
|
||||||
|
res.on('data', (chunk) ->
|
||||||
|
data.push(chunk)
|
||||||
|
dataLen += chunk.length
|
||||||
|
).on('end', ->
|
||||||
|
buf = new Buffer(dataLen)
|
||||||
|
|
||||||
|
pos = 0
|
||||||
|
for dataItem in data
|
||||||
|
dataItem.copy(buf, pos)
|
||||||
|
pos += dataItem.length
|
||||||
|
|
||||||
|
zip = new AdmZip(buf)
|
||||||
|
|
||||||
|
filesToUpdate.forEach (file) ->
|
||||||
|
targetFile = "tapas-with-ember-master/#{file}"
|
||||||
|
if /\/$/.test(file)
|
||||||
|
zip.extractEntryTo(targetFile, file, false, true)
|
||||||
|
else
|
||||||
|
zip.extractEntryTo(targetFile, '', false, true)
|
||||||
|
)
|
||||||
|
|
||||||
|
# --------------
|
||||||
|
# Script Updates
|
||||||
|
# --------------
|
||||||
|
|
||||||
|
EMBER_BASE_URL = 'http://builds.emberjs.com'
|
||||||
|
GITHUB_API_URL = 'https://api.github.com'
|
||||||
|
EMBER = {}
|
||||||
|
EMBER_DATA = {}
|
||||||
|
['release', 'beta', 'canary'].forEach (build) ->
|
||||||
|
EMBER[build] =
|
||||||
|
prod: "#{EMBER_BASE_URL}/#{build}/ember.prod.js"
|
||||||
|
dev: "#{EMBER_BASE_URL}/#{build}/ember.js"
|
||||||
|
EMBER_DATA[build] =
|
||||||
|
prod: "#{EMBER_BASE_URL}/#{build}/ember-data.prod.js"
|
||||||
|
dev: "#{EMBER_BASE_URL}/#{build}/ember-data.js"
|
||||||
|
|
||||||
|
EMBER['tag'] =
|
||||||
|
prod: "#{EMBER_BASE_URL}/tags/{{tag}}/ember.prod.js"
|
||||||
|
dev: "#{EMBER_BASE_URL}/tags/{{tag}}/ember.js"
|
||||||
|
|
||||||
|
EMBER_DATA['tag'] =
|
||||||
|
prod: "#{EMBER_BASE_URL}/tags/{{tag}}/ember-data.prod.js"
|
||||||
|
dev: "#{EMBER_BASE_URL}/tags/{{tag}}/ember-data.js"
|
||||||
|
|
||||||
|
downloadFile = (src, dest) ->
|
||||||
|
console.log('Downloading ' + src + ' to ' + dest)
|
||||||
|
data = ''
|
||||||
|
request = http.get src, (response) ->
|
||||||
|
response.on('data', (chunk) ->
|
||||||
|
data += chunk
|
||||||
|
)
|
||||||
|
response.on('end', ->
|
||||||
|
fs.writeFileSync(dest, data)
|
||||||
|
)
|
||||||
|
|
||||||
|
downloadEmberFile = (src, dest) ->
|
||||||
|
downloadFile(src, "vendor/ember/#{dest}")
|
||||||
|
|
||||||
|
listTags = (user, repo, since, name, command) ->
|
||||||
|
github.repos.getTags(user: user, repo: repo, (resp, tags) ->
|
||||||
|
for tag in tags
|
||||||
|
if semver.valid(tag.name) and !semver.lt(tag.name, since)
|
||||||
|
firstTag = tag.name unless firstTag
|
||||||
|
console.log " #{tag.name}"
|
||||||
|
console.log "Install with cake -t \"#{firstTag}\" #{command}"
|
||||||
|
)
|
||||||
|
|
||||||
|
installEmberFiles = (project, filename, options) ->
|
||||||
|
if 'tag' of options
|
||||||
|
# Download a Tag
|
||||||
|
tag = options.tag
|
||||||
|
tag = "v#{tag}" unless /^v/.test(tag)
|
||||||
|
downloadEmberFile(project['tag'].dev.replace(/{{tag}}/, tag),
|
||||||
|
"development/#{filename}")
|
||||||
|
downloadEmberFile(project['tag'].prod.replace(/{{tag}}/, tag),
|
||||||
|
"production/#{filename}")
|
||||||
|
else
|
||||||
|
# Download a Channel
|
||||||
|
channel = options.channel ? 'release'
|
||||||
|
downloadEmberFile project[channel].dev, "development/#{filename}"
|
||||||
|
downloadEmberFile project[channel].prod, "production/#{filename}"
|
||||||
|
|
||||||
|
# Channel
|
||||||
|
option '-c', '--channel "[CHANNEL_NAME]"',
|
||||||
|
'relase, beta, or canary (http://emberjs.com/builds)'
|
||||||
|
|
||||||
|
# Tag
|
||||||
|
option '-t', '--tag "[TAG_NAME]"',
|
||||||
|
'a tagged release to install. Run cake ember:list to see known tags'
|
||||||
|
|
||||||
|
# -----
|
||||||
|
# Ember
|
||||||
|
# -----
|
||||||
|
task 'ember:install', 'install latest Ember', (options) ->
|
||||||
|
installEmberFiles(EMBER, 'ember.js', options)
|
||||||
|
|
||||||
|
task 'ember:list', 'list tagged relases of Ember since v1.0.0', (options) ->
|
||||||
|
listTags 'emberjs', 'ember.js', 'v1.0.0', 'Ember', 'ember:install'
|
||||||
|
|
||||||
|
# ----------
|
||||||
|
# Ember Data
|
||||||
|
# ----------
|
||||||
|
task 'ember-data:install', 'install latest Ember Data', (options) ->
|
||||||
|
options.channel or= 'beta'
|
||||||
|
installEmberFiles(EMBER_DATA, 'ember-data.js', options)
|
||||||
|
|
||||||
|
task 'ember-data:list', 'list tagged relases of Ember Data', (options) ->
|
||||||
|
listTags 'emberjs', 'data', 'v0.0.1', 'Ember Data',
|
||||||
|
'ember-data:install'
|
||||||
|
|
||||||
|
# -----------
|
||||||
|
# Ember Model
|
||||||
|
# -----------
|
||||||
|
EMBER_MODEL =
|
||||||
|
dev: 'http://builds.erikbryn.com/ember-model/ember-model-latest.js'
|
||||||
|
prod: 'http://builds.erikbryn.com/ember-model/ember-model-latest.prod.js'
|
||||||
|
|
||||||
|
task 'ember-model:install', 'install latest Ember Model', (options) ->
|
||||||
|
downloadEmberFile EMBER_MODEL.dev, 'development/ember-model.js'
|
||||||
|
downloadEmberFile EMBER_MODEL.prod, 'production/ember-model.js'
|
||||||
9
test/fixtures/Data/yarn.lock
vendored
Normal file
9
test/fixtures/Data/yarn.lock
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
||||||
|
# yarn lockfile v1
|
||||||
|
abab@^1.0.0:
|
||||||
|
version "1.0.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/abab/-/abab-1.0.3.tgz#b81de5f7274ec4e756d797cd834f303642724e5d"
|
||||||
|
|
||||||
|
abbrev@1, abbrev@1.0.x:
|
||||||
|
version "1.0.9"
|
||||||
|
resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.0.9.tgz#91b4792588a7738c25f35dd6f63752a2f8776135"
|
||||||
97
test/fixtures/Dockerfile/Dockerfile
vendored
Executable file
97
test/fixtures/Dockerfile/Dockerfile
vendored
Executable file
@@ -0,0 +1,97 @@
|
|||||||
|
FROM ubuntu:14.04
|
||||||
|
|
||||||
|
MAINTAINER Wesley Hales <wesleyhales@gmail.com>
|
||||||
|
|
||||||
|
# Install.
|
||||||
|
RUN \
|
||||||
|
sed -i 's/# \(.*multiverse$\)/\1/g' /etc/apt/sources.list && \
|
||||||
|
apt-get update && \
|
||||||
|
apt-get -y upgrade && \
|
||||||
|
apt-get install -y build-essential && \
|
||||||
|
apt-get install -y software-properties-common && \
|
||||||
|
apt-get install -y byobu curl git htop man unzip vim wget && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Set environment variables.
|
||||||
|
ENV HOME /root
|
||||||
|
|
||||||
|
# Define working directory.
|
||||||
|
WORKDIR /root
|
||||||
|
|
||||||
|
# Install Java.
|
||||||
|
RUN \
|
||||||
|
echo oracle-java7-installer shared/accepted-oracle-license-v1-1 select true | debconf-set-selections && \
|
||||||
|
add-apt-repository -y ppa:webupd8team/java && \
|
||||||
|
apt-get update && \
|
||||||
|
apt-get install -y oracle-java7-installer && \
|
||||||
|
rm -rf /var/lib/apt/lists/* \
|
||||||
|
echo "done"
|
||||||
|
|
||||||
|
# Install Phantom2 build requirements (Won't build on systems < 2GB ram)
|
||||||
|
RUN \
|
||||||
|
sudo apt-get update && apt-get -y install g++ flex bison gperf ruby perl \
|
||||||
|
libsqlite3-dev libfontconfig1-dev libicu-dev libfreetype6 libssl-dev libjpeg-dev libqt5webkit5-dev
|
||||||
|
|
||||||
|
#####################################build latest phantom
|
||||||
|
######################################+++++ only do this in dev when needed
|
||||||
|
|
||||||
|
#RUN rm -rf phantomjs
|
||||||
|
|
||||||
|
#RUN git clone git://github.com/ariya/phantomjs.git
|
||||||
|
|
||||||
|
#RUN cd /root/phantomjs/ && ./build.sh --confirm
|
||||||
|
|
||||||
|
#RUN ln -s /root/phantomjs/bin/phantomjs /usr/bin/phantomjs
|
||||||
|
######################################+++++ END only do this in dev when needed
|
||||||
|
|
||||||
|
######################################+++++ comment out when building new version of phantomjs
|
||||||
|
ADD phantomjs /root/phantomjs
|
||||||
|
|
||||||
|
RUN ln -s /root/phantomjs /usr/bin/phantomjs
|
||||||
|
######################################+++++ END comment out when building new version of phantomjs
|
||||||
|
|
||||||
|
RUN git clone git://github.com/wesleyhales/speedgun.git
|
||||||
|
|
||||||
|
#RUN mkdir /root/speedgun/core/reports
|
||||||
|
|
||||||
|
#VOLUME ["/root/speedgun/core/reports"]
|
||||||
|
|
||||||
|
RUN cd speedgun/core && phantomjs --ssl-protocol=any --ignore-ssl-errors=yes speedgun.js http://www.google.com performance csv
|
||||||
|
|
||||||
|
RUN cd /root && wget https://dl.dropboxusercontent.com/u/12278845/server.tar
|
||||||
|
|
||||||
|
RUN cd /root && tar -xvf server.tar
|
||||||
|
|
||||||
|
#RUN echo "cd /root/jboss-as-7.1.1.Final-fluxui/ && ./bin/standalone.sh --server-config=standalone-full.xml -b 0.0.0.0" >> /root/.bashrc
|
||||||
|
|
||||||
|
# install maven
|
||||||
|
RUN sudo apt-get update && apt-get install -y maven
|
||||||
|
|
||||||
|
ADD src /root/src
|
||||||
|
ADD pom.xml /root/pom.xml
|
||||||
|
RUN mvn clean install
|
||||||
|
|
||||||
|
#RUN cp -rf /root/target/speedgun.war /root/jboss-as-7.1.1.Final-fluxui/standalone/deployments/
|
||||||
|
|
||||||
|
RUN ln -s /root/target/speedgun /root/jboss-as-7.1.1.Final-fluxui/standalone/deployments/speedgun.war
|
||||||
|
|
||||||
|
RUN touch /root/jboss-as-7.1.1.Final-fluxui/standalone/deployments/speedgun.war.dodeploy
|
||||||
|
|
||||||
|
# Cleanup old JMS queue
|
||||||
|
RUN rm -rf /root/jboss-as-7.1.1.Final-fluxui/standalone/tmp/ /root/jboss-as-7.1.1.Final-fluxui/standalone/data/*
|
||||||
|
|
||||||
|
RUN mkdir /root/jboss-as-7.1.1.Final-fluxui/speedgun
|
||||||
|
RUN cd /root/jboss-as-7.1.1.Final-fluxui/speedgun && curl -O https://raw.githubusercontent.com/wesleyhales/speedgun/master/core/speedgun.js
|
||||||
|
RUN cd /root/jboss-as-7.1.1.Final-fluxui/speedgun && curl -O https://raw.githubusercontent.com/wesleyhales/speedgun/master/core/config.json
|
||||||
|
|
||||||
|
COPY server-entrypoint.sh /
|
||||||
|
|
||||||
|
ENTRYPOINT ["/server-entrypoint.sh"]
|
||||||
|
|
||||||
|
RUN apt-get install -y postgresql-client
|
||||||
|
|
||||||
|
COPY speedgun.sql /
|
||||||
|
|
||||||
|
EXPOSE 3306 8080 8443
|
||||||
|
|
||||||
|
#CMD ["postgres"]
|
||||||
5
test/fixtures/Makefile/Makefile
vendored
Executable file
5
test/fixtures/Makefile/Makefile
vendored
Executable file
@@ -0,0 +1,5 @@
|
|||||||
|
SUBDIRS:=components test
|
||||||
|
.PHONY: ${SUBDIRS} clean
|
||||||
|
all:${SUBDIRS}
|
||||||
|
${SUBDIRS}:
|
||||||
|
${MAKE} -C $@ all
|
||||||
57
test/fixtures/Maven POM/pom.xml
vendored
Normal file
57
test/fixtures/Maven POM/pom.xml
vendored
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
<groupId>awilbur.personal</groupId>
|
||||||
|
<artifactId>hudsel</artifactId>
|
||||||
|
<version>1.0-SNAPSHOT</version>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
<name>hudsel</name>
|
||||||
|
<properties>
|
||||||
|
<suiteXmlFile>src/test/resources/testng.xml</suiteXmlFile>
|
||||||
|
<skipTests>false</skipTests>
|
||||||
|
</properties>
|
||||||
|
<dependencies>
|
||||||
|
<!-- Adding TestNG for unit test support -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.testng</groupId>
|
||||||
|
<artifactId>testng</artifactId>
|
||||||
|
<version>6.8</version>
|
||||||
|
</dependency>
|
||||||
|
<!-- Adding Selenium dependency -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.seleniumhq.selenium</groupId>
|
||||||
|
<artifactId>selenium-server</artifactId>
|
||||||
|
<version>2.41.0</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<!-- using the compiler plug-in to specify that this project is to be compiled with JDK 1.6 -->
|
||||||
|
<!-- This is needed so that we get the JDK annotation support that was introduced recently -->
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-compiler-plugin</artifactId>
|
||||||
|
<configuration>
|
||||||
|
<source>1.6</source>
|
||||||
|
<target>1.6</target>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-surefire-plugin</artifactId>
|
||||||
|
<version>2.6</version>
|
||||||
|
<configuration>
|
||||||
|
<!-- You can specify a specific testng.xml file here <suiteXmlFiles>
|
||||||
|
<suiteXmlFile>src/test/resources/testng-sample.xml</suiteXmlFile> </suiteXmlFiles> -->
|
||||||
|
<!-- Or dynamically with something like '-DsuiteXmlFile=src/test/resources/testng-sample.xml' -->
|
||||||
|
<suiteXmlFiles>
|
||||||
|
<suiteXmlFile>${suiteXmlFile}</suiteXmlFile>
|
||||||
|
</suiteXmlFiles>
|
||||||
|
<!-- Build with '-DskipTests=true' to bypass test execution @ build time Default: false -->
|
||||||
|
<skipTests>${skipTests}</skipTests>
|
||||||
|
<testFailureIgnore>true</testFailureIgnore>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
</project>
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user