mirror of
https://github.com/KevinMidboe/linguist.git
synced 2025-10-29 17:50:22 +00:00
Compare commits
298 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7133c3b11a | ||
|
|
d72114083b | ||
|
|
683925fcd5 | ||
|
|
1c8bf32d35 | ||
|
|
28913833f4 | ||
|
|
c6752be546 | ||
|
|
395e474cad | ||
|
|
16ea189aa6 | ||
|
|
ac81fc5da9 | ||
|
|
58191c10b3 | ||
|
|
c2ca23d580 | ||
|
|
b5dfb40c7d | ||
|
|
7e647fd915 | ||
|
|
5b9f9bc0e6 | ||
|
|
b1c057fa30 | ||
|
|
ca348dd373 | ||
|
|
b802045c5c | ||
|
|
3c244a9501 | ||
|
|
832a7b9b06 | ||
|
|
e7d856345f | ||
|
|
ac559effaa | ||
|
|
95d0daba80 | ||
|
|
a0ad1523a1 | ||
|
|
06c049b8c0 | ||
|
|
24c7380765 | ||
|
|
73ef1bf156 | ||
|
|
c8b30a62f9 | ||
|
|
48dfdd2dfe | ||
|
|
68727f724a | ||
|
|
f46e053633 | ||
|
|
d2e739ba8c | ||
|
|
37174e1d2c | ||
|
|
cdb5206def | ||
|
|
d636eaf1e3 | ||
|
|
49f3eb1286 | ||
|
|
8ab94a8643 | ||
|
|
f72c337c5b | ||
|
|
d22321de07 | ||
|
|
473e5db51f | ||
|
|
8b9fc4683a | ||
|
|
3b4415cc3c | ||
|
|
2afce1754a | ||
|
|
f232b93214 | ||
|
|
db64f192fa | ||
|
|
ca96ecdc55 | ||
|
|
2a06d1aa19 | ||
|
|
b2fa2a1f46 | ||
|
|
6839516b5c | ||
|
|
7247c80fac | ||
|
|
01d05d1d4e | ||
|
|
8db4cc482e | ||
|
|
074f17ed98 | ||
|
|
68b553ea55 | ||
|
|
fd5da9cb15 | ||
|
|
d081f687b0 | ||
|
|
34abe5b983 | ||
|
|
5765f1faf1 | ||
|
|
550b67215c | ||
|
|
d7b2826113 | ||
|
|
b4f94c7c25 | ||
|
|
d24677adbb | ||
|
|
04f981eeac | ||
|
|
b53d84d8ff | ||
|
|
fea8bb21a0 | ||
|
|
996ed8a8b1 | ||
|
|
daef164163 | ||
|
|
61a5cab1f2 | ||
|
|
2fddaaf3d7 | ||
|
|
741d246581 | ||
|
|
1b23e81541 | ||
|
|
362d300cb0 | ||
|
|
932726863f | ||
|
|
a35a3e98ce | ||
|
|
7c404e72d2 | ||
|
|
8736e2305d | ||
|
|
a13664698f | ||
|
|
6e934067a8 | ||
|
|
3040642f97 | ||
|
|
dd7e44f957 | ||
|
|
6b56a243e7 | ||
|
|
5d08605aef | ||
|
|
bf705cbaf2 | ||
|
|
fe827896e0 | ||
|
|
317219e479 | ||
|
|
64e7df7596 | ||
|
|
b3ff84872b | ||
|
|
5d176a781c | ||
|
|
e0c97f97ba | ||
|
|
83f358976e | ||
|
|
9ee6153891 | ||
|
|
825e640061 | ||
|
|
e5ae213839 | ||
|
|
74e034c689 | ||
|
|
a55a60a161 | ||
|
|
9d865ec018 | ||
|
|
8b01e3dead | ||
|
|
9e3cc01715 | ||
|
|
0f204767a9 | ||
|
|
11e3251efd | ||
|
|
1f1416a5f7 | ||
|
|
b3786f3825 | ||
|
|
09c2eee91e | ||
|
|
dc78b14902 | ||
|
|
500ce0959a | ||
|
|
aa0c9e3572 | ||
|
|
e6de75d48a | ||
|
|
a5ad0a34f8 | ||
|
|
6e609cc4e3 | ||
|
|
27727a927f | ||
|
|
5ff580df0a | ||
|
|
b34acac722 | ||
|
|
37840856ed | ||
|
|
acfad4371f | ||
|
|
ae42cc0307 | ||
|
|
d06860df37 | ||
|
|
7d5d162f6b | ||
|
|
d39a75b68b | ||
|
|
d72f7311f6 | ||
|
|
3f81b7c179 | ||
|
|
33d1255a5a | ||
|
|
0ebea6a0ff | ||
|
|
c28da3a4a9 | ||
|
|
538f479b60 | ||
|
|
88cc73fa49 | ||
|
|
d6d368a65d | ||
|
|
c5be9cc3e9 | ||
|
|
c462c2bd31 | ||
|
|
3426165621 | ||
|
|
98b99e38bb | ||
|
|
d8e3bec499 | ||
|
|
7c759d4d29 | ||
|
|
41d438b47e | ||
|
|
41911d6921 | ||
|
|
dca18d77cb | ||
|
|
040af5dad2 | ||
|
|
01bb6c37ab | ||
|
|
c624d68628 | ||
|
|
4867c49bd9 | ||
|
|
a354eddf4b | ||
|
|
9b78c533a5 | ||
|
|
090ea576b9 | ||
|
|
6a2d33a4b3 | ||
|
|
b54a9c7412 | ||
|
|
2c62da7834 | ||
|
|
0145a0adb2 | ||
|
|
473282d64c | ||
|
|
c2c068e9db | ||
|
|
13d1f662d1 | ||
|
|
bdd57f58a0 | ||
|
|
b1bcabd6e6 | ||
|
|
e128c3fa82 | ||
|
|
efac9fe750 | ||
|
|
2b8545a8fa | ||
|
|
b275b5d728 | ||
|
|
1f46cfafa7 | ||
|
|
b1dcdf3418 | ||
|
|
4bfd65deb8 | ||
|
|
213cf322f5 | ||
|
|
61102812a0 | ||
|
|
580cfce7fb | ||
|
|
f1383d7a45 | ||
|
|
e4ce5bfe39 | ||
|
|
6ed64f25a2 | ||
|
|
114a331106 | ||
|
|
9aa24a216a | ||
|
|
13702451ab | ||
|
|
f0242f6f97 | ||
|
|
9775820398 | ||
|
|
7daf26bcd0 | ||
|
|
231f705098 | ||
|
|
893ab8fd8d | ||
|
|
5afdd2c533 | ||
|
|
e4f5c0066a | ||
|
|
a167f852dd | ||
|
|
b428bce126 | ||
|
|
e62d0e19a5 | ||
|
|
9b8bf9068f | ||
|
|
6e05edc350 | ||
|
|
dd8eaf2893 | ||
|
|
ecc750f445 | ||
|
|
cf5268a7d4 | ||
|
|
e24efad5ff | ||
|
|
58a34cdb7d | ||
|
|
b1c6b330e9 | ||
|
|
7c3e265033 | ||
|
|
13695a716c | ||
|
|
c9e43804d6 | ||
|
|
1535e3553e | ||
|
|
0ac05bbbeb | ||
|
|
d3f979d640 | ||
|
|
0e9ded45dc | ||
|
|
a2ca886510 | ||
|
|
25a1af3775 | ||
|
|
0d8e0a2970 | ||
|
|
c0fff6c8a8 | ||
|
|
e6b4428614 | ||
|
|
4e6e69833d | ||
|
|
1d9faff4c6 | ||
|
|
7025cbe760 | ||
|
|
e922b7c2ca | ||
|
|
96518d2d0f | ||
|
|
1241b20ba1 | ||
|
|
f03f5c1628 | ||
|
|
cb550a3662 | ||
|
|
d1f90d61c5 | ||
|
|
16e65fe189 | ||
|
|
62a0faa729 | ||
|
|
fbb3ab2292 | ||
|
|
b3b75e5ef8 | ||
|
|
8b36210db5 | ||
|
|
a74f3b3e46 | ||
|
|
e214a52de5 | ||
|
|
0624a9395c | ||
|
|
b2e7f7ffa6 | ||
|
|
b312b39a10 | ||
|
|
80e2d112b2 | ||
|
|
519b169df0 | ||
|
|
5c2cfbc334 | ||
|
|
7d91e4959a | ||
|
|
0c5aa2a7eb | ||
|
|
0d7a264981 | ||
|
|
52ff2d2e74 | ||
|
|
8a7ceaa845 | ||
|
|
fd9ce2d1cf | ||
|
|
2c2b37bec3 | ||
|
|
c777f2d388 | ||
|
|
eca10056a8 | ||
|
|
c7bab11ebe | ||
|
|
6995fc28b6 | ||
|
|
102f14d0e9 | ||
|
|
aac168402b | ||
|
|
152d49513f | ||
|
|
d5564c808d | ||
|
|
82410e07b2 | ||
|
|
94d90b30b5 | ||
|
|
06997f0da2 | ||
|
|
55aafa416d | ||
|
|
6226a46988 | ||
|
|
8d216f0c43 | ||
|
|
7f5bb25542 | ||
|
|
5fcdf6adc2 | ||
|
|
6a565a849b | ||
|
|
66fc67e34c | ||
|
|
7cf140940e | ||
|
|
60e90bab23 | ||
|
|
4f58258186 | ||
|
|
03e2904ebf | ||
|
|
bea90b256e | ||
|
|
8eb37ba956 | ||
|
|
8d20c1fb59 | ||
|
|
9a1abf0c49 | ||
|
|
5aae7a4000 | ||
|
|
d9509a1750 | ||
|
|
978c448fb8 | ||
|
|
997c0fca10 | ||
|
|
3ae6e68492 | ||
|
|
851c93a1f7 | ||
|
|
a5f7355e16 | ||
|
|
18ffdbaa65 | ||
|
|
c089222bc6 | ||
|
|
37f9535d27 | ||
|
|
4650368bc2 | ||
|
|
88b14ed455 | ||
|
|
54a2a47bc0 | ||
|
|
ffcc970140 | ||
|
|
7a811e39e0 | ||
|
|
11f158cbb3 | ||
|
|
5d5550c48b | ||
|
|
fd570d906a | ||
|
|
deab0662f9 | ||
|
|
7238f50a6b | ||
|
|
499fcd1f3f | ||
|
|
dc0ddc82d6 | ||
|
|
436fc34cb9 | ||
|
|
f072cd96e3 | ||
|
|
3441a001c7 | ||
|
|
bc747844ea | ||
|
|
a887f58bcc | ||
|
|
f42afef6e0 | ||
|
|
18eaf22cb9 | ||
|
|
d94f427e12 | ||
|
|
b94eb42db6 | ||
|
|
d2297f5516 | ||
|
|
ef6f58b828 | ||
|
|
eb0bf16cce | ||
|
|
5a646384f6 | ||
|
|
8917f1a91a | ||
|
|
bc8d65e7d3 | ||
|
|
3180c5d554 | ||
|
|
03369b8a6c | ||
|
|
3b2ddb1a18 | ||
|
|
1e20b12241 | ||
|
|
81c41df15c | ||
|
|
8b736189e0 | ||
|
|
188d2367df | ||
|
|
5aeac500da | ||
|
|
5730ab28ab | ||
|
|
1c56b03a28 |
50
.gitmodules
vendored
50
.gitmodules
vendored
@@ -85,6 +85,9 @@
|
||||
[submodule "vendor/grammars/language-shellscript"]
|
||||
path = vendor/grammars/language-shellscript
|
||||
url = https://github.com/atom/language-shellscript
|
||||
[submodule "vendor/grammars/language-supercollider"]
|
||||
path = vendor/grammars/language-supercollider
|
||||
url = https://github.com/supercollider/language-supercollider
|
||||
[submodule "vendor/grammars/language-yaml"]
|
||||
path = vendor/grammars/language-yaml
|
||||
url = https://github.com/atom/language-yaml
|
||||
@@ -169,9 +172,6 @@
|
||||
[submodule "vendor/grammars/sublime-idris"]
|
||||
path = vendor/grammars/sublime-idris
|
||||
url = https://github.com/laughedelic/sublime-idris
|
||||
[submodule "vendor/grammars/sublime-better-typescript"]
|
||||
path = vendor/grammars/sublime-better-typescript
|
||||
url = https://github.com/lavrton/sublime-better-typescript
|
||||
[submodule "vendor/grammars/moonscript-tmbundle"]
|
||||
path = vendor/grammars/moonscript-tmbundle
|
||||
url = https://github.com/leafo/moonscript-tmbundle
|
||||
@@ -256,9 +256,6 @@
|
||||
[submodule "vendor/grammars/SublimeXtend"]
|
||||
path = vendor/grammars/SublimeXtend
|
||||
url = https://github.com/staltz/SublimeXtend
|
||||
[submodule "vendor/grammars/Stata.tmbundle"]
|
||||
path = vendor/grammars/Stata.tmbundle
|
||||
url = https://github.com/statatmbundle/Stata.tmbundle
|
||||
[submodule "vendor/grammars/Vala-TMBundle"]
|
||||
path = vendor/grammars/Vala-TMBundle
|
||||
url = https://github.com/technosophos/Vala-TMBundle
|
||||
@@ -397,9 +394,6 @@
|
||||
[submodule "vendor/grammars/processing.tmbundle"]
|
||||
path = vendor/grammars/processing.tmbundle
|
||||
url = https://github.com/textmate/processing.tmbundle
|
||||
[submodule "vendor/grammars/prolog.tmbundle"]
|
||||
path = vendor/grammars/prolog.tmbundle
|
||||
url = https://github.com/textmate/prolog.tmbundle
|
||||
[submodule "vendor/grammars/python-django.tmbundle"]
|
||||
path = vendor/grammars/python-django.tmbundle
|
||||
url = https://github.com/textmate/python-django.tmbundle
|
||||
@@ -655,7 +649,7 @@
|
||||
url = https://github.com/SRI-CSL/SMT.tmbundle.git
|
||||
[submodule "vendor/grammars/language-crystal"]
|
||||
path = vendor/grammars/language-crystal
|
||||
url = https://github.com/k2b6s9j/language-crystal
|
||||
url = https://github.com/atom-crystal/language-crystal
|
||||
[submodule "vendor/grammars/language-xbase"]
|
||||
path = vendor/grammars/language-xbase
|
||||
url = https://github.com/hernad/atom-language-harbour
|
||||
@@ -668,3 +662,39 @@
|
||||
[submodule "vendor/grammars/vue-syntax-highlight"]
|
||||
path = vendor/grammars/vue-syntax-highlight
|
||||
url = https://github.com/vuejs/vue-syntax-highlight
|
||||
[submodule "vendor/grammars/st2-zonefile"]
|
||||
path = vendor/grammars/st2-zonefile
|
||||
url = https://github.com/sixty4k/st2-zonefile
|
||||
[submodule "vendor/grammars/sublimeprolog"]
|
||||
path = vendor/grammars/sublimeprolog
|
||||
url = https://github.com/alnkpa/sublimeprolog
|
||||
[submodule "vendor/grammars/sublime-aspectj"]
|
||||
path = vendor/grammars/sublime-aspectj
|
||||
url = https://github.com/pchaigno/sublime-aspectj
|
||||
[submodule "vendor/grammars/sublime-typescript"]
|
||||
path = vendor/grammars/sublime-typescript
|
||||
url = https://github.com/Microsoft/TypeScript-Sublime-Plugin
|
||||
[submodule "vendor/grammars/sublime-pony"]
|
||||
path = vendor/grammars/sublime-pony
|
||||
url = https://github.com/CausalityLtd/sublime-pony
|
||||
[submodule "vendor/grammars/X10"]
|
||||
path = vendor/grammars/X10
|
||||
url = git@github.com:x10-lang/x10-highlighting.git
|
||||
[submodule "vendor/grammars/language-babel"]
|
||||
path = vendor/grammars/language-babel
|
||||
url = https://github.com/gandm/language-babel
|
||||
[submodule "vendor/grammars/UrWeb-Language-Definition"]
|
||||
path = vendor/grammars/UrWeb-Language-Definition
|
||||
url = https://github.com/gwalborn/UrWeb-Language-Definition.git
|
||||
[submodule "vendor/grammars/Stata.tmbundle"]
|
||||
path = vendor/grammars/Stata.tmbundle
|
||||
url = https://github.com/pschumm/Stata.tmbundle
|
||||
[submodule "vendor/grammars/FreeMarker.tmbundle"]
|
||||
path = vendor/grammars/FreeMarker.tmbundle
|
||||
url = https://github.com/freemarker/FreeMarker.tmbundle
|
||||
[submodule "vendor/grammars/language-click"]
|
||||
path = vendor/grammars/language-click
|
||||
url = https://github.com/stenverbois/language-click.git
|
||||
[submodule "vendor/grammars/language-maxscript"]
|
||||
path = vendor/grammars/language-maxscript
|
||||
url = https://github.com/Alhadis/language-maxscript
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
language: ruby
|
||||
sudo: false
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- libicu-dev
|
||||
- libicu48
|
||||
before_install: script/travis/before_install
|
||||
rvm:
|
||||
- 1.9.3
|
||||
- 2.0.0
|
||||
- 2.1
|
||||
- 2.2
|
||||
|
||||
@@ -12,7 +12,7 @@ We try only to add new extensions once they have some usage on GitHub. In most c
|
||||
|
||||
To add support for a new extension:
|
||||
|
||||
0. Add your extension to the language entry in [`languages.yml`][languages].
|
||||
0. Add your extension to the language entry in [`languages.yml`][languages], keeping the extensions in alphabetical order.
|
||||
0. Add at least one sample for your extension to the [samples directory][samples] in the correct subdirectory.
|
||||
0. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage.
|
||||
|
||||
|
||||
10
README.md
10
README.md
@@ -13,11 +13,11 @@ See [Troubleshooting](#troubleshooting) and [`CONTRIBUTING.md`](/CONTRIBUTING.md
|
||||
|
||||

|
||||
|
||||
The Language stats bar is built by aggregating the languages of each file in that repository. If it is reporting a language that you don't expect:
|
||||
The Language stats bar displays languages percentages for the files in the repository. The percentages are calculated based on the bytes of code for each language as reported by the [List Languages](https://developer.github.com/v3/repos/#list-languages) API. If the bar is reporting a language that you don't expect:
|
||||
|
||||
0. Click on the name of the language in the stats bar to see a list of the files that are identified as that language.
|
||||
0. If you see files that you didn't write, consider moving the files into one of the [paths for vendored code](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml), or use the [manual overrides](#overrides) feature to ignore them.
|
||||
0. If the files are being misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you an add, especially links to public repositories, is helpful.
|
||||
0. If you see files that you didn't write, consider moving the files into one of the [paths for vendored code](/lib/linguist/vendor.yml), or use the [manual overrides](#overrides) feature to ignore them.
|
||||
0. If the files are being misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you can add, especially links to public repositories, is helpful.
|
||||
0. If there are no reported issues of this misclassification, [open an issue][new-issue] and include a link to the repository or a sample of the code that is being misclassified.
|
||||
|
||||
## Overrides
|
||||
@@ -33,9 +33,9 @@ $ cat .gitattributes
|
||||
*.rb linguist-language=Java
|
||||
```
|
||||
|
||||
Checking code you didn't write, such as JavaScript libraries, into your git repo is a common practice, but this often inflates your project's language stats and may even cause your project to be labeled as another language. By default, Linguist treats all of the paths defined in [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository. Vendored files are also hidden by default in diffs on github.com.
|
||||
Checking code you didn't write, such as JavaScript libraries, into your git repo is a common practice, but this often inflates your project's language stats and may even cause your project to be labeled as another language. By default, Linguist treats all of the paths defined in [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository.
|
||||
|
||||
Use the `linguist-vendored` attribute to vendor or un-vendor paths. Please note, overriding the vendored (or un-vendored) status of a file only affects the language statistics for the repository and not the behavior in diffs on github.com.
|
||||
Use the `linguist-vendored` attribute to vendor or un-vendor paths.
|
||||
|
||||
```
|
||||
$ cat .gitattributes
|
||||
|
||||
137
bin/git-linguist
Executable file
137
bin/git-linguist
Executable file
@@ -0,0 +1,137 @@
|
||||
#!/usr/bin/env ruby
|
||||
|
||||
require 'linguist'
|
||||
require 'rugged'
|
||||
require 'optparse'
|
||||
require 'json'
|
||||
require 'tmpdir'
|
||||
require 'zlib'
|
||||
|
||||
class GitLinguist
|
||||
def initialize(path, commit_oid, incremental = true)
|
||||
@repo_path = path
|
||||
@commit_oid = commit_oid
|
||||
@incremental = incremental
|
||||
end
|
||||
|
||||
def linguist
|
||||
if @commit_oid.nil?
|
||||
raise "git-linguist must be called with a specific commit OID to perform language computation"
|
||||
end
|
||||
repo = Linguist::Repository.new(rugged, @commit_oid)
|
||||
|
||||
if @incremental && stats = load_language_stats
|
||||
old_commit_oid, old_stats = stats
|
||||
|
||||
# A cache with NULL oid means that we want to froze
|
||||
# these language stats in place and stop computing
|
||||
# them (for performance reasons)
|
||||
return old_stats if old_commit_oid == NULL_OID
|
||||
repo.load_existing_stats(old_commit_oid, old_stats)
|
||||
end
|
||||
|
||||
result = yield repo
|
||||
|
||||
save_language_stats(@commit_oid, repo.cache)
|
||||
result
|
||||
end
|
||||
|
||||
def load_language_stats
|
||||
version, oid, stats = load_cache
|
||||
if version == LANGUAGE_STATS_CACHE_VERSION && oid && stats
|
||||
[oid, stats]
|
||||
end
|
||||
end
|
||||
|
||||
def save_language_stats(oid, stats)
|
||||
cache = [LANGUAGE_STATS_CACHE_VERSION, oid, stats]
|
||||
write_cache(cache)
|
||||
end
|
||||
|
||||
def clear_language_stats
|
||||
File.unlink(cache_file)
|
||||
end
|
||||
|
||||
def disable_language_stats
|
||||
save_language_stats(NULL_OID, {})
|
||||
end
|
||||
|
||||
protected
|
||||
NULL_OID = ("0" * 40).freeze
|
||||
|
||||
LANGUAGE_STATS_CACHE = 'language-stats.cache'
|
||||
LANGUAGE_STATS_CACHE_VERSION = "v3:#{Linguist::VERSION}"
|
||||
|
||||
def rugged
|
||||
@rugged ||= Rugged::Repository.bare(@repo_path)
|
||||
end
|
||||
|
||||
def cache_file
|
||||
File.join(@repo_path, LANGUAGE_STATS_CACHE)
|
||||
end
|
||||
|
||||
def write_cache(object)
|
||||
return unless File.directory? @repo_path
|
||||
|
||||
begin
|
||||
tmp_path = Dir::Tmpname.make_tmpname(cache_file, nil)
|
||||
File.open(tmp_path, "wb") do |f|
|
||||
marshal = Marshal.dump(object)
|
||||
f.write(Zlib::Deflate.deflate(marshal))
|
||||
end
|
||||
|
||||
File.rename(tmp_path, cache_file)
|
||||
rescue => e
|
||||
(File.unlink(tmp_path) rescue nil)
|
||||
raise e
|
||||
end
|
||||
end
|
||||
|
||||
def load_cache
|
||||
marshal = File.open(cache_file, "rb") { |f| Zlib::Inflate.inflate(f.read) }
|
||||
Marshal.load(marshal)
|
||||
rescue SystemCallError, ::Zlib::DataError, ::Zlib::BufError, TypeError
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
def git_linguist(args)
|
||||
incremental = true
|
||||
commit = nil
|
||||
|
||||
parser = OptionParser.new do |opts|
|
||||
opts.banner = "Usage: git-linguist [OPTIONS] stats|breakdown|dump-cache|clear|disable"
|
||||
|
||||
opts.on("-f", "--force", "Force a full rescan") { incremental = false }
|
||||
opts.on("--commit=COMMIT", "Commit to index") { |v| commit = v}
|
||||
end
|
||||
|
||||
parser.parse!(args)
|
||||
|
||||
git_dir = `git rev-parse --git-dir`.strip
|
||||
raise "git-linguist must be ran in a Git repository" unless $?.success?
|
||||
wrapper = GitLinguist.new(git_dir, commit, incremental)
|
||||
|
||||
case args.pop
|
||||
when "stats"
|
||||
wrapper.linguist do |linguist|
|
||||
puts JSON.dump(linguist.languages)
|
||||
end
|
||||
when "breakdown"
|
||||
wrapper.linguist do |linguist|
|
||||
puts JSON.dump(linguist.breakdown_by_file)
|
||||
end
|
||||
when "dump-cache"
|
||||
puts JSON.dump(wrapper.load_language_stats)
|
||||
when "clear"
|
||||
wrapper.clear_language_stats
|
||||
when "disable"
|
||||
wrapper.disable_language_stats
|
||||
else
|
||||
$stderr.print(parser.help)
|
||||
exit 1
|
||||
end
|
||||
end
|
||||
|
||||
git_linguist(ARGV)
|
||||
@@ -10,8 +10,8 @@ Gem::Specification.new do |s|
|
||||
s.homepage = "https://github.com/github/linguist"
|
||||
s.license = "MIT"
|
||||
|
||||
s.files = Dir['lib/**/*'] - ['lib/linguist/grammars.rb']
|
||||
s.executables << 'linguist'
|
||||
s.files = Dir['lib/**/*'] - ['lib/linguist/grammars.rb'] + ['LICENSE']
|
||||
s.executables = ['linguist', 'git-linguist']
|
||||
|
||||
s.add_dependency 'charlock_holmes', '~> 0.7.3'
|
||||
s.add_dependency 'escape_utils', '~> 1.1.0'
|
||||
@@ -24,4 +24,6 @@ Gem::Specification.new do |s|
|
||||
s.add_development_dependency 'rake'
|
||||
s.add_development_dependency 'yajl-ruby'
|
||||
s.add_development_dependency 'color-proximity', '~> 0.2.1'
|
||||
s.add_development_dependency 'licensee', '6.0.0b1'
|
||||
|
||||
end
|
||||
|
||||
39
grammars.yml
39
grammars.yml
@@ -42,6 +42,8 @@ vendor/grammars/Docker.tmbundle:
|
||||
- source.dockerfile
|
||||
vendor/grammars/Elm.tmLanguage:
|
||||
- source.elm
|
||||
vendor/grammars/FreeMarker.tmbundle:
|
||||
- text.html.ftl
|
||||
vendor/grammars/G-Code/:
|
||||
- source.LS
|
||||
- source.MCPOST
|
||||
@@ -92,7 +94,7 @@ vendor/grammars/Scalate.tmbundle:
|
||||
- text.html.ssp
|
||||
vendor/grammars/Slash.tmbundle:
|
||||
- text.html.slash
|
||||
vendor/grammars/Stata.tmbundle:
|
||||
vendor/grammars/Stata.tmbundle/:
|
||||
- source.mata
|
||||
- source.stata
|
||||
vendor/grammars/Stylus/:
|
||||
@@ -140,10 +142,14 @@ vendor/grammars/TXL/:
|
||||
- source.txl
|
||||
vendor/grammars/Textmate-Gosu-Bundle:
|
||||
- source.gosu.2
|
||||
vendor/grammars/UrWeb-Language-Definition:
|
||||
- source.ur
|
||||
vendor/grammars/VBDotNetSyntax:
|
||||
- source.vbnet
|
||||
vendor/grammars/Vala-TMBundle:
|
||||
- source.vala
|
||||
vendor/grammars/X10:
|
||||
- source.x10
|
||||
vendor/grammars/abap.tmbundle:
|
||||
- source.abap
|
||||
vendor/grammars/actionscript3-tmbundle:
|
||||
@@ -176,6 +182,9 @@ vendor/grammars/assembly.tmbundle:
|
||||
- source.x86asm
|
||||
vendor/grammars/atom-fsharp/:
|
||||
- source.fsharp
|
||||
- source.fsharp.fsi
|
||||
- source.fsharp.fsl
|
||||
- source.fsharp.fsx
|
||||
vendor/grammars/atom-language-purescript/:
|
||||
- source.purescript
|
||||
vendor/grammars/atom-salt:
|
||||
@@ -295,8 +304,8 @@ vendor/grammars/io.tmbundle:
|
||||
vendor/grammars/ioke-outdated:
|
||||
- source.ioke
|
||||
vendor/grammars/jade-tmbundle:
|
||||
- source.jade
|
||||
- source.pyjade
|
||||
- text.jade
|
||||
vendor/grammars/jasmin-sublime:
|
||||
- source.jasmin
|
||||
vendor/grammars/java.tmbundle:
|
||||
@@ -312,6 +321,11 @@ vendor/grammars/json.tmbundle:
|
||||
- source.json
|
||||
vendor/grammars/kotlin-sublime-package:
|
||||
- source.Kotlin
|
||||
vendor/grammars/language-babel/:
|
||||
- source.js.jsx
|
||||
- source.regexp.babel
|
||||
vendor/grammars/language-click/:
|
||||
- source.click
|
||||
vendor/grammars/language-clojure:
|
||||
- source.clojure
|
||||
vendor/grammars/language-coffee-script:
|
||||
@@ -331,9 +345,12 @@ vendor/grammars/language-hy:
|
||||
vendor/grammars/language-javascript:
|
||||
- source.js
|
||||
- source.js.regexp
|
||||
- source.js.regexp.replacement
|
||||
vendor/grammars/language-jsoniq/:
|
||||
- source.jq
|
||||
- source.xq
|
||||
vendor/grammars/language-maxscript:
|
||||
- source.maxscript
|
||||
vendor/grammars/language-ncl:
|
||||
- source.ncl
|
||||
vendor/grammars/language-python:
|
||||
@@ -344,6 +361,8 @@ vendor/grammars/language-python:
|
||||
vendor/grammars/language-shellscript:
|
||||
- source.shell
|
||||
- text.shell-session
|
||||
vendor/grammars/language-supercollider:
|
||||
- source.supercollider
|
||||
vendor/grammars/language-xbase:
|
||||
- source.harbour
|
||||
vendor/grammars/language-yaml:
|
||||
@@ -431,8 +450,6 @@ vendor/grammars/powershell:
|
||||
- source.powershell
|
||||
vendor/grammars/processing.tmbundle:
|
||||
- source.processing
|
||||
vendor/grammars/prolog.tmbundle:
|
||||
- source.prolog
|
||||
vendor/grammars/protobuf-tmbundle:
|
||||
- source.protobuf
|
||||
vendor/grammars/puppet-textmate-bundle:
|
||||
@@ -470,6 +487,8 @@ vendor/grammars/smalltalk-tmbundle:
|
||||
- source.smalltalk
|
||||
vendor/grammars/sql.tmbundle:
|
||||
- source.sql
|
||||
vendor/grammars/st2-zonefile:
|
||||
- text.zone_file
|
||||
vendor/grammars/standard-ml.tmbundle:
|
||||
- source.cm
|
||||
- source.ml
|
||||
@@ -477,10 +496,10 @@ vendor/grammars/sublime-MuPAD:
|
||||
- source.mupad
|
||||
vendor/grammars/sublime-apl/:
|
||||
- source.apl
|
||||
vendor/grammars/sublime-aspectj/:
|
||||
- source.aspectj
|
||||
vendor/grammars/sublime-befunge:
|
||||
- source.befunge
|
||||
vendor/grammars/sublime-better-typescript:
|
||||
- source.ts
|
||||
vendor/grammars/sublime-bsv:
|
||||
- source.bsv
|
||||
vendor/grammars/sublime-cirru:
|
||||
@@ -504,6 +523,8 @@ vendor/grammars/sublime-nix:
|
||||
vendor/grammars/sublime-opal/:
|
||||
- source.opal
|
||||
- source.opalsysdefs
|
||||
vendor/grammars/sublime-pony:
|
||||
- source.pony
|
||||
vendor/grammars/sublime-robot-plugin:
|
||||
- text.robot
|
||||
vendor/grammars/sublime-rust:
|
||||
@@ -519,6 +540,9 @@ vendor/grammars/sublime-text-ox/:
|
||||
- source.ox
|
||||
vendor/grammars/sublime-text-pig-latin/:
|
||||
- source.pig_latin
|
||||
vendor/grammars/sublime-typescript/:
|
||||
- source.ts
|
||||
- source.tsx
|
||||
vendor/grammars/sublime-varnish:
|
||||
- source.varnish.vcl
|
||||
vendor/grammars/sublime_cobol:
|
||||
@@ -529,6 +553,9 @@ vendor/grammars/sublime_cobol:
|
||||
vendor/grammars/sublime_man_page_support:
|
||||
- source.man
|
||||
- text.groff
|
||||
vendor/grammars/sublimeprolog/:
|
||||
- source.prolog
|
||||
- source.prolog.eclipse
|
||||
vendor/grammars/sublimetext-cuda-cpp:
|
||||
- source.cuda-c++
|
||||
vendor/grammars/swift.tmbundle:
|
||||
|
||||
73
lib/linguist/blob.rb
Normal file
73
lib/linguist/blob.rb
Normal file
@@ -0,0 +1,73 @@
|
||||
require 'linguist/blob_helper'
|
||||
|
||||
module Linguist
|
||||
# A Blob is a wrapper around the content of a file to make it quack
|
||||
# like a Grit::Blob. It provides the basic interface: `name`,
|
||||
# `data`, `path` and `size`.
|
||||
class Blob
|
||||
include BlobHelper
|
||||
|
||||
# Public: Initialize a new Blob.
|
||||
#
|
||||
# path - A path String (does not necessarily exists on the file system).
|
||||
# content - Content of the file.
|
||||
#
|
||||
# Returns a Blob.
|
||||
def initialize(path, content)
|
||||
@path = path
|
||||
@content = content
|
||||
end
|
||||
|
||||
# Public: Filename
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# Blob.new("/path/to/linguist/lib/linguist.rb", "").path
|
||||
# # => "/path/to/linguist/lib/linguist.rb"
|
||||
#
|
||||
# Returns a String
|
||||
attr_reader :path
|
||||
|
||||
# Public: File name
|
||||
#
|
||||
# Returns a String
|
||||
def name
|
||||
File.basename(@path)
|
||||
end
|
||||
|
||||
# Public: File contents.
|
||||
#
|
||||
# Returns a String.
|
||||
def data
|
||||
@content
|
||||
end
|
||||
|
||||
# Public: Get byte size
|
||||
#
|
||||
# Returns an Integer.
|
||||
def size
|
||||
@content.bytesize
|
||||
end
|
||||
|
||||
# Public: Get file extension.
|
||||
#
|
||||
# Returns a String.
|
||||
def extension
|
||||
extensions.last || ""
|
||||
end
|
||||
|
||||
# Public: Return an array of the file extensions
|
||||
#
|
||||
# >> Linguist::Blob.new("app/views/things/index.html.erb").extensions
|
||||
# => [".html.erb", ".erb"]
|
||||
#
|
||||
# Returns an Array
|
||||
def extensions
|
||||
basename, *segments = name.downcase.split(".")
|
||||
|
||||
segments.map.with_index do |segment, index|
|
||||
"." + segments[index..-1].join(".")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -13,6 +13,7 @@
|
||||
- (^|/)[Dd]ocumentation/
|
||||
- (^|/)javadoc/
|
||||
- ^man/
|
||||
- ^[Ee]xamples/
|
||||
|
||||
## Documentation files ##
|
||||
|
||||
@@ -21,4 +22,9 @@
|
||||
- (^|/)COPYING(\.|$)
|
||||
- (^|/)INSTALL(\.|$)
|
||||
- (^|/)LICEN[CS]E(\.|$)
|
||||
- (^|/)[Ll]icen[cs]e(\.|$)
|
||||
- (^|/)README(\.|$)
|
||||
- (^|/)[Rr]eadme(\.|$)
|
||||
|
||||
# Samples folders
|
||||
- ^[Ss]amples/
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
require 'linguist/blob_helper'
|
||||
require 'linguist/blob'
|
||||
|
||||
module Linguist
|
||||
# A FileBlob is a wrapper around a File object to make it quack
|
||||
# like a Grit::Blob. It provides the basic interface: `name`,
|
||||
# `data`, `path` and `size`.
|
||||
class FileBlob
|
||||
class FileBlob < Blob
|
||||
include BlobHelper
|
||||
|
||||
# Public: Initialize a new FileBlob from a path
|
||||
@@ -18,20 +19,6 @@ module Linguist
|
||||
@path = base_path ? path.sub("#{base_path}/", '') : path
|
||||
end
|
||||
|
||||
# Public: Filename
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# FileBlob.new("/path/to/linguist/lib/linguist.rb").path
|
||||
# # => "/path/to/linguist/lib/linguist.rb"
|
||||
#
|
||||
# FileBlob.new("/path/to/linguist/lib/linguist.rb",
|
||||
# "/path/to/linguist").path
|
||||
# # => "lib/linguist.rb"
|
||||
#
|
||||
# Returns a String
|
||||
attr_reader :path
|
||||
|
||||
# Public: Read file permissions
|
||||
#
|
||||
# Returns a String like '100644'
|
||||
@@ -39,13 +26,6 @@ module Linguist
|
||||
File.stat(@fullpath).mode.to_s(8)
|
||||
end
|
||||
|
||||
# Public: File name
|
||||
#
|
||||
# Returns a String
|
||||
def name
|
||||
File.basename(@fullpath)
|
||||
end
|
||||
|
||||
# Public: Read file contents.
|
||||
#
|
||||
# Returns a String.
|
||||
@@ -59,26 +39,5 @@ module Linguist
|
||||
def size
|
||||
File.size(@fullpath)
|
||||
end
|
||||
|
||||
# Public: Get file extension.
|
||||
#
|
||||
# Returns a String.
|
||||
def extension
|
||||
extensions.last || ""
|
||||
end
|
||||
|
||||
# Public: Return an array of the file extensions
|
||||
#
|
||||
# >> Linguist::FileBlob.new("app/views/things/index.html.erb").extensions
|
||||
# => [".html.erb", ".erb"]
|
||||
#
|
||||
# Returns an Array
|
||||
def extensions
|
||||
basename, *segments = name.downcase.split(".")
|
||||
|
||||
segments.map.with_index do |segment, index|
|
||||
"." + segments[index..-1].join(".")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -71,7 +71,8 @@ module Linguist
|
||||
generated_jni_header? ||
|
||||
vcr_cassette? ||
|
||||
generated_module? ||
|
||||
generated_unity3d_meta?
|
||||
generated_unity3d_meta? ||
|
||||
generated_racc?
|
||||
end
|
||||
|
||||
# Internal: Is the blob an Xcode file?
|
||||
@@ -241,22 +242,26 @@ module Linguist
|
||||
return lines[0].include?("Code generated by")
|
||||
end
|
||||
|
||||
PROTOBUF_EXTENSIONS = ['.py', '.java', '.h', '.cc', '.cpp']
|
||||
|
||||
# Internal: Is the blob a C++, Java or Python source file generated by the
|
||||
# Protocol Buffer compiler?
|
||||
#
|
||||
# Returns true of false.
|
||||
def generated_protocol_buffer?
|
||||
return false unless ['.py', '.java', '.h', '.cc', '.cpp'].include?(extname)
|
||||
return false unless PROTOBUF_EXTENSIONS.include?(extname)
|
||||
return false unless lines.count > 1
|
||||
|
||||
return lines[0].include?("Generated by the protocol buffer compiler. DO NOT EDIT!")
|
||||
end
|
||||
|
||||
APACHE_THRIFT_EXTENSIONS = ['.rb', '.py', '.go', '.js', '.m', '.java', '.h', '.cc', '.cpp']
|
||||
|
||||
# Internal: Is the blob generated by Apache Thrift compiler?
|
||||
#
|
||||
# Returns true or false
|
||||
def generated_apache_thrift?
|
||||
return false unless ['.rb', '.py', '.go', '.js', '.m', '.java', '.h', '.cc', '.cpp'].include?(extname)
|
||||
return false unless APACHE_THRIFT_EXTENSIONS.include?(extname)
|
||||
return false unless lines.count > 1
|
||||
|
||||
return lines[0].include?("Autogenerated by Thrift Compiler") || lines[1].include?("Autogenerated by Thrift Compiler")
|
||||
@@ -355,5 +360,18 @@ module Linguist
|
||||
return false unless lines.count > 1
|
||||
return lines[0].include?("fileFormatVersion: ")
|
||||
end
|
||||
|
||||
# Internal: Is this a Racc-generated file?
|
||||
#
|
||||
# A Racc-generated file contains:
|
||||
# # This file is automatically generated by Racc x.y.z
|
||||
# on the third line.
|
||||
#
|
||||
# Return true or false
|
||||
def generated_racc?
|
||||
return false unless extname == '.rb'
|
||||
return false unless lines.count > 2
|
||||
return lines[2].start_with?("# This file is automatically generated by Racc")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -33,7 +33,7 @@ module Linguist
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# disambiguate "Perl", "Prolog" do |data|
|
||||
# disambiguate ".pm" do |data|
|
||||
# if data.include?("use strict")
|
||||
# Language["Perl"]
|
||||
# elsif /^[^#]+:-/.match(data)
|
||||
@@ -56,6 +56,7 @@ module Linguist
|
||||
|
||||
# Internal: Check if this heuristic matches the candidate languages.
|
||||
def matches?(filename)
|
||||
filename = filename.downcase
|
||||
@extensions.any? { |ext| filename.end_with?(ext) }
|
||||
end
|
||||
|
||||
@@ -65,7 +66,17 @@ module Linguist
|
||||
end
|
||||
|
||||
# Common heuristics
|
||||
ObjectiveCRegex = /^[ \t]*@(interface|class|protocol|property|end|synchronised|selector|implementation)\b/
|
||||
ObjectiveCRegex = /^\s*(@(interface|class|protocol|property|end|synchronised|selector|implementation)\b|#import\s+.+\.h[">])/
|
||||
|
||||
disambiguate ".asc" do |data|
|
||||
if /^(----[- ]BEGIN|ssh-(rsa|dss)) /.match(data)
|
||||
Language["Public Key"]
|
||||
elsif /^[=-]+(\s|\n)|{{[A-Za-z]/.match(data)
|
||||
Language["AsciiDoc"]
|
||||
elsif /^(\/\/.+|((import|export)\s+)?(function|int|float|char)\s+((room|repeatedly|on|game)_)?([A-Za-z]+[A-Za-z_0-9]+)\s*[;\(])/.match(data)
|
||||
Language["AGS Script"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".bb" do |data|
|
||||
if /^\s*; /.match(data) || data.include?("End Function")
|
||||
@@ -75,67 +86,9 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".cs" do |data|
|
||||
if /![\w\s]+methodsFor: /.match(data)
|
||||
Language["Smalltalk"]
|
||||
elsif /^\s*namespace\s*[\w\.]+\s*{/.match(data) || /^\s*\/\//.match(data)
|
||||
Language["C#"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".h" do |data|
|
||||
if ObjectiveCRegex.match(data)
|
||||
Language["Objective-C"]
|
||||
elsif (/^\s*#\s*include <(cstdint|string|vector|map|list|array|bitset|queue|stack|forward_list|unordered_map|unordered_set|(i|o|io)stream)>/.match(data) ||
|
||||
/^\s*template\s*</.match(data) || /^[ \t]*try/.match(data) || /^[ \t]*catch\s*\(/.match(data) || /^[ \t]*(class|(using[ \t]+)?namespace)\s+\w+/.match(data) || /^[ \t]*(private|public|protected):$/.match(data) || /std::\w+/.match(data))
|
||||
Language["C++"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pl" do |data|
|
||||
if /^(use v6|(my )?class|module)/.match(data)
|
||||
Language["Perl6"]
|
||||
elsif /use strict|use\s+v?5\./.match(data)
|
||||
Language["Perl"]
|
||||
elsif /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pm" do |data|
|
||||
if /^(use v6|(my )?class|module)/.match(data)
|
||||
Language["Perl6"]
|
||||
elsif /use strict|use\s+v?5\./.match(data)
|
||||
Language["Perl"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ecl" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif data.include?(":=")
|
||||
Language["ECL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pro" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif data.include?("last_client=")
|
||||
Language["INI"]
|
||||
elsif data.include?("HEADERS") && data.include?("SOURCES")
|
||||
Language["QMake"]
|
||||
elsif /^\s*function[ \w,]+$/.match(data)
|
||||
Language["IDL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".tst" do |data|
|
||||
if (data.include?("gap> "))
|
||||
Language["GAP"]
|
||||
# Heads up - we don't usually write heuristics like this (with no regex match)
|
||||
else
|
||||
Language["Scilab"]
|
||||
disambiguate ".ch" do |data|
|
||||
if /^\s*#\s*(if|ifdef|ifndef|define|command|xcommand|translate|xtranslate|include|pragma|undef)\b/i.match(data)
|
||||
Language["xBase"]
|
||||
end
|
||||
end
|
||||
|
||||
@@ -149,40 +102,50 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".php" do |data|
|
||||
if data.include?("<?hh")
|
||||
Language["Hack"]
|
||||
elsif /<?[^h]/.match(data)
|
||||
Language["PHP"]
|
||||
disambiguate ".cs" do |data|
|
||||
if /![\w\s]+methodsFor: /.match(data)
|
||||
Language["Smalltalk"]
|
||||
elsif /^\s*namespace\s*[\w\.]+\s*{/.match(data) || /^\s*\/\//.match(data)
|
||||
Language["C#"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".sc" do |data|
|
||||
if /\^(this|super)\./.match(data) || /^\s*(\+|\*)\s*\w+\s*{/.match(data) || /^\s*~\w+\s*=\./.match(data)
|
||||
Language["SuperCollider"]
|
||||
elsif /^\s*import (scala|java)\./.match(data) || /^\s*val\s+\w+\s*=/.match(data) || /^\s*class\b/.match(data)
|
||||
Language["Scala"]
|
||||
disambiguate ".d" do |data|
|
||||
if /^module /.match(data)
|
||||
Language["D"]
|
||||
elsif /^((dtrace:::)?BEGIN|provider |#pragma (D (option|attributes)|ident)\s)/.match(data)
|
||||
Language["DTrace"]
|
||||
elsif /(\/.*:( .* \\)$| : \\$|^ : |: \\$)/.match(data)
|
||||
Language["Makefile"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".asc" do |data|
|
||||
if /^(----[- ]BEGIN|ssh-(rsa|dss)) /.match(data)
|
||||
Language["Public Key"]
|
||||
elsif /^[=-]+(\s|\n)|{{[A-Za-z]/.match(data)
|
||||
Language["AsciiDoc"]
|
||||
elsif /^(\/\/.+|((import|export)\s+)?(function|int|float|char)\s+((room|repeatedly|on|game)_)?([A-Za-z]+[A-Za-z_0-9]+)\s*[;\(])/.match(data)
|
||||
Language["AGS Script"]
|
||||
disambiguate ".ecl" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["ECLiPSe"]
|
||||
elsif data.include?(":=")
|
||||
Language["ECL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".for", ".f" do |data|
|
||||
if /^: /.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^([c*][^a-z]| (subroutine|program)\s|\s*!)/i.match(data)
|
||||
elsif /^([c*][^abd-z]| (subroutine|program|end)\s|\s*!)/i.match(data)
|
||||
Language["FORTRAN"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".fr" do |data|
|
||||
if /^(: |also |new-device|previous )/.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^\s*(import|module|package|data|type) /.match(data)
|
||||
Language["Frege"]
|
||||
else
|
||||
Language["Text"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".fs" do |data|
|
||||
if /^(: |new-device)/.match(data)
|
||||
Language["Forth"]
|
||||
@@ -195,6 +158,47 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".gs" do |data|
|
||||
Language["Gosu"] if /^uses java\./.match(data)
|
||||
end
|
||||
|
||||
disambiguate ".h" do |data|
|
||||
if ObjectiveCRegex.match(data)
|
||||
Language["Objective-C"]
|
||||
elsif (/^\s*#\s*include <(cstdint|string|vector|map|list|array|bitset|queue|stack|forward_list|unordered_map|unordered_set|(i|o|io)stream)>/.match(data) ||
|
||||
/^\s*template\s*</.match(data) || /^[ \t]*try/.match(data) || /^[ \t]*catch\s*\(/.match(data) || /^[ \t]*(class|(using[ \t]+)?namespace)\s+\w+/.match(data) || /^[ \t]*(private|public|protected):$/.match(data) || /std::\w+/.match(data))
|
||||
Language["C++"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".l" do |data|
|
||||
if /\(def(un|macro)\s/.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^(%[%{}]xs|<.*>)/.match(data)
|
||||
Language["Lex"]
|
||||
elsif /^\.[a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
elsif /^\((de|class|rel|code|data|must)\s/.match(data)
|
||||
Language["PicoLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ls" do |data|
|
||||
if /^\s*package\s*[\w\.\/\*\s]*\s*{/.match(data)
|
||||
Language["LoomScript"]
|
||||
else
|
||||
Language["LiveScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".lsp", ".lisp" do |data|
|
||||
if /^\s*\((defun|in-package|defpackage) /i.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^\s*\(define /.match(data)
|
||||
Language["NewLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".m" do |data|
|
||||
if ObjectiveCRegex.match(data)
|
||||
Language["Objective-C"]
|
||||
@@ -213,41 +217,115 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".gs" do |data|
|
||||
Language["Gosu"] if /^uses java\./.match(data)
|
||||
end
|
||||
|
||||
disambiguate ".ls" do |data|
|
||||
if /^\s*package\s*[\w\.\/\*\s]*\s*{/.match(data)
|
||||
Language["LoomScript"]
|
||||
else
|
||||
Language["LiveScript"]
|
||||
disambiguate ".ml" do |data|
|
||||
if /(^\s*module)|let rec |match\s+(\S+\s)+with/.match(data)
|
||||
Language["OCaml"]
|
||||
elsif /=> |case\s+(\S+\s)+of/.match(data)
|
||||
Language["Standard ML"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".lsp", ".lisp" do |data|
|
||||
if /^\s*\((defun|in-package|defpackage) /i.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^\s*\(define /.match(data)
|
||||
disambiguate ".mod" do |data|
|
||||
if data.include?('<!ENTITY ')
|
||||
Language["XML"]
|
||||
elsif /MODULE\s\w+\s*;/i.match(data) || /^\s*END \w+;$/i.match(data)
|
||||
Language["Modula-2"]
|
||||
else
|
||||
[Language["Linux Kernel Module"], Language["AMPL"]]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ms" do |data|
|
||||
if /^[.'][a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
elsif /(?<!\S)\.(include|globa?l)\s/.match(data) || /(?<!\/\*)(\A|\n)\s*\.[A-Za-z]/.match(data.gsub(/"([^\\"]|\\.)*"|'([^\\']|\\.)*'|\\\s*(?:--.*)?\n/, ""))
|
||||
Language["GAS"]
|
||||
else
|
||||
Language["MAXScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".n" do |data|
|
||||
if /^[.']/.match(data)
|
||||
Language["Groff"]
|
||||
elsif /^(module|namespace|using)\s/.match(data)
|
||||
Language["Nemerle"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ncl" do |data|
|
||||
if data.include?("THE_TITLE")
|
||||
Language["Text"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".nl" do |data|
|
||||
if /^(b|g)[0-9]+ /.match(data)
|
||||
Language["NL"]
|
||||
else
|
||||
Language["NewLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ts" do |data|
|
||||
if data.include?("<TS ")
|
||||
Language["XML"]
|
||||
else
|
||||
Language["TypeScript"]
|
||||
disambiguate ".php" do |data|
|
||||
if data.include?("<?hh")
|
||||
Language["Hack"]
|
||||
elsif /<?[^h]/.match(data)
|
||||
Language["PHP"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".fr" do |data|
|
||||
if /^(: |also |new-device|previous )/.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^\s*(import|module|package|data|type) /.match(data)
|
||||
Language["Frege"]
|
||||
else
|
||||
Language["Text"]
|
||||
disambiguate ".pl" do |data|
|
||||
if /^(use v6|(my )?class|module)/.match(data)
|
||||
Language["Perl6"]
|
||||
elsif /use strict|use\s+v?5\./.match(data)
|
||||
Language["Perl"]
|
||||
elsif /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pm", ".t" do |data|
|
||||
if /^(use v6|(my )?class|module)/.match(data)
|
||||
Language["Perl6"]
|
||||
elsif /use strict|use\s+v?5\./.match(data)
|
||||
Language["Perl"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pro" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif data.include?("last_client=")
|
||||
Language["INI"]
|
||||
elsif data.include?("HEADERS") && data.include?("SOURCES")
|
||||
Language["QMake"]
|
||||
elsif /^\s*function[ \w,]+$/.match(data)
|
||||
Language["IDL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".r" do |data|
|
||||
if /\bRebol\b/i.match(data)
|
||||
Language["Rebol"]
|
||||
elsif data.include?("<-")
|
||||
Language["R"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".rs" do |data|
|
||||
if /^(use |fn |mod |pub |macro_rules|impl|#!?\[)/.match(data)
|
||||
Language["Rust"]
|
||||
elsif /#include|#pragma\s+(rs|version)|__attribute__/.match(data)
|
||||
Language["RenderScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".sc" do |data|
|
||||
if /\^(this|super)\./.match(data) || /^\s*(\+|\*)\s*\w+\s*{/.match(data) || /^\s*~\w+\s*=\./.match(data)
|
||||
Language["SuperCollider"]
|
||||
elsif /^\s*import (scala|java)\./.match(data) || /^\s*val\s+\w+\s*=/.match(data) || /^\s*class\b/.match(data)
|
||||
Language["Scala"]
|
||||
end
|
||||
end
|
||||
|
||||
@@ -267,95 +345,20 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".d" do |data|
|
||||
if /^module /.match(data)
|
||||
Language["D"]
|
||||
elsif /^((dtrace:::)?BEGIN|provider |#pragma (D (option|attributes)|ident)\s)/.match(data)
|
||||
Language["DTrace"]
|
||||
elsif /(\/.*:( .* \\)$| : \\$|^ : |: \\$)/.match(data)
|
||||
Language["Makefile"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ml" do |data|
|
||||
if /(^\s*module)|let rec |match\s+(\S+\s)+with/.match(data)
|
||||
Language["OCaml"]
|
||||
elsif /=> |case\s+(\S+\s)+of/.match(data)
|
||||
Language["Standard ML"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".mod" do |data|
|
||||
if data.include?('<!ENTITY ')
|
||||
disambiguate ".ts" do |data|
|
||||
if data.include?("<TS ")
|
||||
Language["XML"]
|
||||
elsif /MODULE\s\w+\s*;/i.match(data) || /^\s*END \w+;$/i.match(data)
|
||||
Language["Modula-2"]
|
||||
else
|
||||
[Language["Linux Kernel Module"], Language["AMPL"]]
|
||||
Language["TypeScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ncl" do |data|
|
||||
if data.include?("THE_TITLE")
|
||||
Language["Text"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".nl" do |data|
|
||||
if /^(b|g)[0-9]+ /.match(data)
|
||||
Language["NL"]
|
||||
disambiguate ".tst" do |data|
|
||||
if (data.include?("gap> "))
|
||||
Language["GAP"]
|
||||
# Heads up - we don't usually write heuristics like this (with no regex match)
|
||||
else
|
||||
Language["NewLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".rs" do |data|
|
||||
if /^(use |fn |mod |pub |macro_rules|impl|#!?\[)/.match(data)
|
||||
Language["Rust"]
|
||||
elsif /#include|#pragma\s+(rs|version)|__attribute__/.match(data)
|
||||
Language["RenderScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".l" do |data|
|
||||
if /\(def(un|macro)\s/.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^(%[%{}]xs|<.*>)/.match(data)
|
||||
Language["Lex"]
|
||||
elsif /^\.[a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
elsif /^\((de|class|rel|code|data|must)\s/.match(data)
|
||||
Language["PicoLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".n" do |data|
|
||||
if /^[.']/.match(data)
|
||||
Language["Groff"]
|
||||
elsif /^(module|namespace|using)\s/.match(data)
|
||||
Language["Nemerle"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ms" do |data|
|
||||
if /^[.'][a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
elsif /((^|\s)move?[. ])|\.(include|globa?l)\s/.match(data)
|
||||
Language["GAS"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ch" do |data|
|
||||
if /^\s*#\s*(if|ifdef|ifndef|define|command|xcommand|translate|xtranslate|include|pragma|undef)\b/i.match(data)
|
||||
Language["xBase"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".r", ".R" do |data|
|
||||
if /\bRebol\b/i.match(data)
|
||||
Language["Rebol"]
|
||||
elsif data.include?("<-")
|
||||
Language["R"]
|
||||
Language["Scilab"]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -150,7 +150,8 @@ module Linguist
|
||||
#
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.find_by_name(name)
|
||||
name && @name_index[name.downcase]
|
||||
return nil if name.to_s.empty?
|
||||
name && (@name_index[name.downcase] || @name_index[name.split(',').first.downcase])
|
||||
end
|
||||
|
||||
# Public: Look up Language by one of its aliases.
|
||||
@@ -164,7 +165,8 @@ module Linguist
|
||||
#
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.find_by_alias(name)
|
||||
name && @alias_index[name.downcase]
|
||||
return nil if name.to_s.empty?
|
||||
name && (@alias_index[name.downcase] || @alias_index[name.split(',').first.downcase])
|
||||
end
|
||||
|
||||
# Public: Look up Languages by filename.
|
||||
@@ -240,7 +242,8 @@ module Linguist
|
||||
#
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.[](name)
|
||||
name && @index[name.downcase]
|
||||
return nil if name.to_s.empty?
|
||||
name && (@index[name.downcase] || @index[name.split(',').first.downcase])
|
||||
end
|
||||
|
||||
# Public: A List of popular languages
|
||||
|
||||
@@ -8,7 +8,8 @@
|
||||
# Use "text" if a mode does not exist.
|
||||
# wrap - Boolean wrap to enable line wrapping (default: false)
|
||||
# extensions - An Array of associated extensions (the first one is
|
||||
# considered the primary extension)
|
||||
# considered the primary extension, the others should be
|
||||
# listed alphabetically)
|
||||
# interpreters - An Array of associated interpreters
|
||||
# searchable - Boolean flag to enable searching (defaults to true)
|
||||
# search_term - Deprecated: Some languages maybe indexed under a
|
||||
@@ -141,7 +142,7 @@ Agda:
|
||||
|
||||
Alloy:
|
||||
type: programming # 'modeling' would be more appropiate
|
||||
color: "#cc5c24"
|
||||
color: "#64C800"
|
||||
extensions:
|
||||
- .als
|
||||
ace_mode: text
|
||||
@@ -214,7 +215,7 @@ AspectJ:
|
||||
color: "#a957b0"
|
||||
extensions:
|
||||
- .aj
|
||||
tm_scope: none
|
||||
tm_scope: source.aspectj
|
||||
ace_mode: text
|
||||
|
||||
Assembly:
|
||||
@@ -392,6 +393,7 @@ C#:
|
||||
- csharp
|
||||
extensions:
|
||||
- .cs
|
||||
- .cake
|
||||
- .cshtml
|
||||
- .csx
|
||||
|
||||
@@ -539,6 +541,14 @@ Clean:
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
|
||||
Click:
|
||||
type: programming
|
||||
color: "#E4E6F3"
|
||||
extensions:
|
||||
- .click
|
||||
tm_scope: source.click
|
||||
ace_mode: text
|
||||
|
||||
Clojure:
|
||||
type: programming
|
||||
ace_mode: clojure
|
||||
@@ -567,6 +577,7 @@ CoffeeScript:
|
||||
extensions:
|
||||
- .coffee
|
||||
- ._coffee
|
||||
- .cake
|
||||
- .cjsx
|
||||
- .cson
|
||||
- .iced
|
||||
@@ -616,6 +627,7 @@ Common Lisp:
|
||||
- .lsp
|
||||
- .ny
|
||||
- .podsl
|
||||
- .sexp
|
||||
interpreters:
|
||||
- lisp
|
||||
- sbcl
|
||||
@@ -718,7 +730,7 @@ Cython:
|
||||
|
||||
D:
|
||||
type: programming
|
||||
color: "#fcd46d"
|
||||
color: "#ba595e"
|
||||
extensions:
|
||||
- .d
|
||||
- .di
|
||||
@@ -750,6 +762,14 @@ DM:
|
||||
tm_scope: source.c++
|
||||
ace_mode: c_cpp
|
||||
|
||||
DNS Zone:
|
||||
type: data
|
||||
extensions:
|
||||
- .zone
|
||||
- .arpa
|
||||
tm_scope: text.zone_file
|
||||
ace_mode: text
|
||||
|
||||
DTrace:
|
||||
type: programming
|
||||
aliases:
|
||||
@@ -834,6 +854,14 @@ ECL:
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
|
||||
ECLiPSe:
|
||||
type: programming
|
||||
group: prolog
|
||||
extensions:
|
||||
- .ecl
|
||||
tm_scope: source.prolog.eclipse
|
||||
ace_mode: prolog
|
||||
|
||||
Eagle:
|
||||
type: markup
|
||||
color: "#814C05"
|
||||
@@ -867,6 +895,8 @@ Elixir:
|
||||
ace_mode: elixir
|
||||
filenames:
|
||||
- mix.lock
|
||||
interpreters:
|
||||
- elixir
|
||||
|
||||
Elm:
|
||||
type: programming
|
||||
@@ -994,6 +1024,7 @@ Formatted:
|
||||
type: data
|
||||
extensions:
|
||||
- .for
|
||||
- .eam.fs
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
|
||||
@@ -1011,6 +1042,16 @@ Forth:
|
||||
- .fs
|
||||
ace_mode: forth
|
||||
|
||||
FreeMarker:
|
||||
type: programming
|
||||
color: "#0050b2"
|
||||
aliases:
|
||||
- ftl
|
||||
extensions:
|
||||
- .ftl
|
||||
tm_scope: text.html.ftl
|
||||
ace_mode: ftl
|
||||
|
||||
Frege:
|
||||
type: programming
|
||||
color: "#00cafe"
|
||||
@@ -1300,11 +1341,22 @@ HTML+Django:
|
||||
- .mustache
|
||||
- .jinja
|
||||
aliases:
|
||||
- django
|
||||
- html+django/jinja
|
||||
- html+jinja
|
||||
- htmldjango
|
||||
ace_mode: django
|
||||
|
||||
HTML+EEX:
|
||||
type: markup
|
||||
tm_scope: text.html.elixir
|
||||
group: HTML
|
||||
aliases:
|
||||
- eex
|
||||
extensions:
|
||||
- .eex
|
||||
ace_mode: text
|
||||
|
||||
HTML+ERB:
|
||||
type: markup
|
||||
tm_scope: text.html.erb
|
||||
@@ -1314,7 +1366,7 @@ HTML+ERB:
|
||||
extensions:
|
||||
- .erb
|
||||
- .erb.deface
|
||||
ace_mode: html_ruby
|
||||
ace_mode: text
|
||||
|
||||
HTML+PHP:
|
||||
type: markup
|
||||
@@ -1350,6 +1402,7 @@ Haml:
|
||||
Handlebars:
|
||||
type: markup
|
||||
color: "#01a9d6"
|
||||
group: HTML
|
||||
aliases:
|
||||
- hbs
|
||||
- htmlbars
|
||||
@@ -1528,7 +1581,9 @@ JSON:
|
||||
searchable: false
|
||||
extensions:
|
||||
- .json
|
||||
- .geojson
|
||||
- .lock
|
||||
- .topojson
|
||||
filenames:
|
||||
- .jshintrc
|
||||
- composer.lock
|
||||
@@ -1556,12 +1611,20 @@ JSONiq:
|
||||
- .jq
|
||||
tm_scope: source.jq
|
||||
|
||||
JSX:
|
||||
type: programming
|
||||
group: JavaScript
|
||||
extensions:
|
||||
- .jsx
|
||||
tm_scope: source.js.jsx
|
||||
ace_mode: javascript
|
||||
|
||||
Jade:
|
||||
group: HTML
|
||||
type: markup
|
||||
extensions:
|
||||
- .jade
|
||||
tm_scope: source.jade
|
||||
tm_scope: text.jade
|
||||
ace_mode: jade
|
||||
|
||||
Jasmin:
|
||||
@@ -1606,10 +1669,10 @@ JavaScript:
|
||||
- .gs
|
||||
- .jake
|
||||
- .jsb
|
||||
- .jscad
|
||||
- .jsfl
|
||||
- .jsm
|
||||
- .jss
|
||||
- .jsx
|
||||
- .njs
|
||||
- .pac
|
||||
- .sjs
|
||||
@@ -1641,6 +1704,18 @@ Julia:
|
||||
color: "#a270ba"
|
||||
ace_mode: julia
|
||||
|
||||
Jupyter Notebook:
|
||||
type: markup
|
||||
ace_mode: json
|
||||
tm_scope: source.json
|
||||
color: "#DA5B0B"
|
||||
extensions:
|
||||
- .ipynb
|
||||
filenames:
|
||||
- Notebook
|
||||
aliases:
|
||||
- IPython Notebook
|
||||
|
||||
KRL:
|
||||
type: programming
|
||||
color: "#28431f"
|
||||
@@ -1653,6 +1728,7 @@ KiCad:
|
||||
type: programming
|
||||
extensions:
|
||||
- .sch
|
||||
- .brd
|
||||
- .kicad_pcb
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
@@ -1702,6 +1778,7 @@ LSL:
|
||||
ace_mode: lsl
|
||||
extensions:
|
||||
- .lsl
|
||||
- .lslp
|
||||
interpreters:
|
||||
- lsl
|
||||
color: '#3d9970'
|
||||
@@ -1710,8 +1787,8 @@ LabVIEW:
|
||||
type: programming
|
||||
extensions:
|
||||
- .lvproj
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
tm_scope: text.xml
|
||||
ace_mode: xml
|
||||
|
||||
Lasso:
|
||||
type: programming
|
||||
@@ -1901,6 +1978,15 @@ M:
|
||||
tm_scope: source.lisp
|
||||
ace_mode: lisp
|
||||
|
||||
MAXScript:
|
||||
type: programming
|
||||
color: "#00a6a6"
|
||||
extensions:
|
||||
- .ms
|
||||
- .mcr
|
||||
tm_scope: source.maxscript
|
||||
ace_mode: text
|
||||
|
||||
MTML:
|
||||
type: markup
|
||||
color: "#b7e1f4"
|
||||
@@ -1933,6 +2019,7 @@ Makefile:
|
||||
- GNUmakefile
|
||||
- Kbuild
|
||||
- Makefile
|
||||
- Makefile.inc
|
||||
- makefile
|
||||
interpreters:
|
||||
- make
|
||||
@@ -1985,6 +2072,8 @@ Mathematica:
|
||||
Matlab:
|
||||
type: programming
|
||||
color: "#bb92ac"
|
||||
aliases:
|
||||
- octave
|
||||
extensions:
|
||||
- .matlab
|
||||
- .m
|
||||
@@ -2033,6 +2122,14 @@ Mercury:
|
||||
tm_scope: source.mercury
|
||||
ace_mode: prolog
|
||||
|
||||
Metal:
|
||||
type: programming
|
||||
color: "#8f14e9"
|
||||
extensions:
|
||||
- .metal
|
||||
tm_scope: source.c++
|
||||
ace_mode: c_cpp
|
||||
|
||||
MiniD: # Legacy
|
||||
type: programming
|
||||
searchable: false
|
||||
@@ -2108,7 +2205,7 @@ Myghty:
|
||||
|
||||
NCL:
|
||||
type: programming
|
||||
color: #28431f
|
||||
color: "#28431f"
|
||||
extensions:
|
||||
- .ncl
|
||||
tm_scope: source.ncl
|
||||
@@ -2411,6 +2508,7 @@ PHP:
|
||||
- .php3
|
||||
- .php4
|
||||
- .php5
|
||||
- .phps
|
||||
- .phpt
|
||||
filenames:
|
||||
- Phakefile
|
||||
@@ -2424,8 +2522,10 @@ PLSQL:
|
||||
type: programming
|
||||
ace_mode: sql
|
||||
tm_scope: source.plsql.oracle
|
||||
color: "#dad8d8"
|
||||
extensions:
|
||||
- .pls
|
||||
- .pck
|
||||
- .pkb
|
||||
- .pks
|
||||
- .plb
|
||||
@@ -2542,6 +2642,13 @@ Perl6:
|
||||
tm_scope: source.perl.6
|
||||
ace_mode: perl
|
||||
|
||||
Pickle:
|
||||
type: data
|
||||
extensions:
|
||||
- .pkl
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
|
||||
PicoLisp:
|
||||
type: programming
|
||||
extensions:
|
||||
@@ -2586,6 +2693,13 @@ PogoScript:
|
||||
tm_scope: source.pogoscript
|
||||
ace_mode: text
|
||||
|
||||
Pony:
|
||||
type: programming
|
||||
extensions:
|
||||
- .pony
|
||||
tm_scope: source.pony
|
||||
ace_mode: text
|
||||
|
||||
PostScript:
|
||||
type: markup
|
||||
extensions:
|
||||
@@ -2618,11 +2732,11 @@ Prolog:
|
||||
color: "#74283c"
|
||||
extensions:
|
||||
- .pl
|
||||
- .ecl
|
||||
- .pro
|
||||
- .prolog
|
||||
interpreters:
|
||||
- swipl
|
||||
tm_scope: source.prolog
|
||||
ace_mode: prolog
|
||||
|
||||
Propeller Spin:
|
||||
@@ -2653,7 +2767,7 @@ Public Key:
|
||||
|
||||
Puppet:
|
||||
type: programming
|
||||
color: "#332A77"
|
||||
color: "#302B6D"
|
||||
extensions:
|
||||
- .pp
|
||||
filenames:
|
||||
@@ -2760,7 +2874,7 @@ R:
|
||||
ace_mode: r
|
||||
|
||||
RAML:
|
||||
type: data
|
||||
type: markup
|
||||
ace_mode: yaml
|
||||
tm_scope: source.yaml
|
||||
color: "#77d9fb"
|
||||
@@ -2803,7 +2917,7 @@ RMarkdown:
|
||||
ace_mode: markdown
|
||||
extensions:
|
||||
- .rmd
|
||||
tm_scope: none
|
||||
tm_scope: source.gfm
|
||||
|
||||
Racket:
|
||||
type: programming
|
||||
@@ -2933,6 +3047,7 @@ Ruby:
|
||||
- .pryrc
|
||||
- Appraisals
|
||||
- Berksfile
|
||||
- Brewfile
|
||||
- Buildfile
|
||||
- Deliverfile
|
||||
- Fastfile
|
||||
@@ -3076,7 +3191,7 @@ Sass:
|
||||
Scala:
|
||||
type: programming
|
||||
ace_mode: scala
|
||||
color: "#7dd3b0"
|
||||
color: "#DC322F"
|
||||
extensions:
|
||||
- .scala
|
||||
- .sbt
|
||||
@@ -3181,6 +3296,7 @@ Slim:
|
||||
color: "#ff8f77"
|
||||
extensions:
|
||||
- .slim
|
||||
tm_scope: text.slim
|
||||
ace_mode: text
|
||||
|
||||
Smali:
|
||||
@@ -3264,9 +3380,12 @@ SuperCollider:
|
||||
type: programming
|
||||
color: "#46390b"
|
||||
extensions:
|
||||
- .scd
|
||||
- .sc
|
||||
tm_scope: none
|
||||
- .scd
|
||||
interpreters:
|
||||
- sclang
|
||||
- scsynth
|
||||
tm_scope: source.supercollider
|
||||
ace_mode: text
|
||||
|
||||
Swift:
|
||||
@@ -3409,6 +3528,7 @@ TypeScript:
|
||||
- ts
|
||||
extensions:
|
||||
- .ts
|
||||
- .tsx
|
||||
tm_scope: source.ts
|
||||
ace_mode: typescript
|
||||
|
||||
@@ -3442,6 +3562,17 @@ UnrealScript:
|
||||
tm_scope: source.java
|
||||
ace_mode: java
|
||||
|
||||
UrWeb:
|
||||
type: programming
|
||||
aliases:
|
||||
- Ur/Web
|
||||
- Ur
|
||||
extensions:
|
||||
- .ur
|
||||
- .urs
|
||||
tm_scope: source.ur
|
||||
ace_mode: text
|
||||
|
||||
VCL:
|
||||
group: Perl
|
||||
type: programming
|
||||
@@ -3547,6 +3678,16 @@ WebIDL:
|
||||
tm_scope: source.webidl
|
||||
ace_mode: text
|
||||
|
||||
X10:
|
||||
type: programming
|
||||
aliases:
|
||||
- xten
|
||||
ace_mode: text
|
||||
extensions:
|
||||
- .x10
|
||||
color: "#4B6BEF"
|
||||
tm_scope: source.x10
|
||||
|
||||
XC:
|
||||
type: programming
|
||||
color: "#99DA07"
|
||||
@@ -3569,6 +3710,7 @@ XML:
|
||||
- .ccxml
|
||||
- .clixml
|
||||
- .cproject
|
||||
- .csl
|
||||
- .csproj
|
||||
- .ct
|
||||
- .dita
|
||||
@@ -3584,6 +3726,7 @@ XML:
|
||||
- .iml
|
||||
- .ivy
|
||||
- .jelly
|
||||
- .jsproj
|
||||
- .kml
|
||||
- .launch
|
||||
- .mdpolicy
|
||||
@@ -3614,6 +3757,7 @@ XML:
|
||||
- .tmSnippet
|
||||
- .tmTheme
|
||||
- .ts
|
||||
- .tsx
|
||||
- .ui
|
||||
- .urdf
|
||||
- .vbproj
|
||||
@@ -3716,7 +3860,9 @@ YAML:
|
||||
- .yml
|
||||
- .reek
|
||||
- .rviz
|
||||
- .syntax
|
||||
- .yaml
|
||||
- .yaml-tmlanguage
|
||||
ace_mode: yaml
|
||||
|
||||
Yacc:
|
||||
@@ -3821,8 +3967,13 @@ wisp:
|
||||
xBase:
|
||||
type: programming
|
||||
color: "#403a40"
|
||||
aliases:
|
||||
- advpl
|
||||
- clipper
|
||||
- foxpro
|
||||
extensions:
|
||||
- .prg
|
||||
- .ch
|
||||
- .prw
|
||||
tm_scope: source.harbour
|
||||
ace_mode: text
|
||||
|
||||
@@ -4,7 +4,11 @@ require 'rugged'
|
||||
|
||||
module Linguist
|
||||
class LazyBlob
|
||||
GIT_ATTR = ['linguist-documentation', 'linguist-language', 'linguist-vendored']
|
||||
GIT_ATTR = ['linguist-documentation',
|
||||
'linguist-language',
|
||||
'linguist-vendored',
|
||||
'linguist-generated']
|
||||
|
||||
GIT_ATTR_OPTS = { :priority => [:index], :skip_system => true }
|
||||
GIT_ATTR_FLAGS = Rugged::Repository::Attributes.parse_opts(GIT_ATTR_OPTS)
|
||||
|
||||
@@ -31,14 +35,6 @@ module Linguist
|
||||
name, GIT_ATTR, GIT_ATTR_FLAGS)
|
||||
end
|
||||
|
||||
def vendored?
|
||||
if attr = git_attributes['linguist-vendored']
|
||||
return boolean_attribute(attr)
|
||||
else
|
||||
return super
|
||||
end
|
||||
end
|
||||
|
||||
def documentation?
|
||||
if attr = git_attributes['linguist-documentation']
|
||||
boolean_attribute(attr)
|
||||
@@ -47,6 +43,22 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
def generated?
|
||||
if attr = git_attributes['linguist-generated']
|
||||
boolean_attribute(attr)
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
def vendored?
|
||||
if attr = git_attributes['linguist-vendored']
|
||||
return boolean_attribute(attr)
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
def language
|
||||
return @language if defined?(@language)
|
||||
|
||||
@@ -67,6 +79,10 @@ module Linguist
|
||||
@size
|
||||
end
|
||||
|
||||
def cleanup!
|
||||
@data.clear if @data
|
||||
end
|
||||
|
||||
protected
|
||||
|
||||
# Returns true if the attribute is present and not the string "false".
|
||||
|
||||
@@ -126,12 +126,13 @@ module Linguist
|
||||
end
|
||||
|
||||
protected
|
||||
MAX_TREE_SIZE = 100_000
|
||||
|
||||
def compute_stats(old_commit_oid, cache = nil)
|
||||
return {} if current_tree.count_recursive(MAX_TREE_SIZE) >= MAX_TREE_SIZE
|
||||
|
||||
old_tree = old_commit_oid && Rugged::Commit.lookup(repository, old_commit_oid).tree
|
||||
|
||||
read_index
|
||||
|
||||
diff = Rugged::Tree.diff(repository, old_tree, current_tree)
|
||||
|
||||
# Clear file map and fetch full diff if any .gitattributes files are changed
|
||||
@@ -157,8 +158,11 @@ module Linguist
|
||||
|
||||
blob = Linguist::LazyBlob.new(repository, delta.new_file[:oid], new, mode.to_s(8))
|
||||
|
||||
next unless blob.include_in_language_stats?
|
||||
file_map[new] = [blob.language.group.name, blob.size]
|
||||
if blob.include_in_language_stats?
|
||||
file_map[new] = [blob.language.group.name, blob.size]
|
||||
end
|
||||
|
||||
blob.cleanup!
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ module Linguist
|
||||
module Strategy
|
||||
class Modeline
|
||||
EmacsModeline = /-\*-\s*(?:(?!mode)[\w-]+\s*:\s*(?:[\w+-]+)\s*;?\s*)*(?:mode\s*:)?\s*([\w+-]+)\s*(?:;\s*(?!mode)[\w-]+\s*:\s*[\w+-]+\s*)*;?\s*-\*-/i
|
||||
VimModeline = /vim:\s*set\s*(?:ft|filetype)=(\w+):/i
|
||||
VimModeline = /vim:\s*set.*\s(?:ft|filetype)=(\w+)\s?.*:/i
|
||||
|
||||
# Public: Detects language based on Vim and Emacs modelines
|
||||
#
|
||||
|
||||
@@ -86,13 +86,13 @@ module Linguist
|
||||
if s.peek(1) == "\""
|
||||
s.getch
|
||||
else
|
||||
s.skip_until(/[^\\]"/)
|
||||
s.skip_until(/(?<!\\)"/)
|
||||
end
|
||||
elsif s.scan(/'/)
|
||||
if s.peek(1) == "'"
|
||||
s.getch
|
||||
else
|
||||
s.skip_until(/[^\\]'/)
|
||||
s.skip_until(/(?<!\\)'/)
|
||||
end
|
||||
|
||||
# Skip number literals
|
||||
|
||||
@@ -78,6 +78,9 @@
|
||||
# Haxelib projects often contain a neko bytecode file named run.n
|
||||
- run.n$
|
||||
|
||||
# Bootstrap Datepicker
|
||||
- bootstrap-datepicker/
|
||||
|
||||
## Commonly Bundled JavaScript frameworks ##
|
||||
|
||||
# jQuery
|
||||
@@ -88,6 +91,34 @@
|
||||
- (^|/)jquery\-ui(\-\d\.\d+(\.\d+)?)?(\.\w+)?\.(js|css)$
|
||||
- (^|/)jquery\.(ui|effects)\.([^.]*)\.(js|css)$
|
||||
|
||||
# jQuery Gantt
|
||||
- jquery.fn.gantt.js
|
||||
|
||||
# jQuery fancyBox
|
||||
- jquery.fancybox.(js|css)
|
||||
|
||||
# Fuel UX
|
||||
- fuelux.js
|
||||
|
||||
# jQuery File Upload
|
||||
- (^|/)jquery\.fileupload(-\w+)?\.js$
|
||||
|
||||
# Slick
|
||||
- (^|/)slick\.\w+.js$
|
||||
|
||||
# Leaflet plugins
|
||||
- (^|/)Leaflet\.Coordinates-\d+\.\d+\.\d+\.src\.js$
|
||||
- leaflet.draw-src.js
|
||||
- leaflet.draw.css
|
||||
- Control.FullScreen.css
|
||||
- Control.FullScreen.js
|
||||
- leaflet.spin.js
|
||||
- wicket-leaflet.js
|
||||
|
||||
# Sublime Text workspace files
|
||||
- .sublime-project
|
||||
- .sublime-workspace
|
||||
|
||||
# Prototype
|
||||
- (^|/)prototype(.*)\.js$
|
||||
- (^|/)effects\.js$
|
||||
@@ -122,7 +153,7 @@
|
||||
- (^|/)Chart\.js$
|
||||
|
||||
# Codemirror
|
||||
- (^|/)[Cc]ode[Mm]irror/(lib|mode|theme|addon|keymap)
|
||||
- (^|/)[Cc]ode[Mm]irror/(\d+\.\d+/)?(lib|mode|theme|addon|keymap|demo)
|
||||
|
||||
# SyntaxHighlighter - http://alexgorbatchev.com/
|
||||
- (^|/)shBrush([^.]*)\.js$
|
||||
@@ -164,6 +195,11 @@
|
||||
|
||||
## Obj-C ##
|
||||
|
||||
# Xcode
|
||||
|
||||
- \.xctemplate/
|
||||
- \.imageset/
|
||||
|
||||
# Carthage
|
||||
- ^Carthage/
|
||||
|
||||
@@ -179,6 +215,10 @@
|
||||
# Fabric
|
||||
- Fabric.framework/
|
||||
|
||||
# git config files
|
||||
- gitattributes$
|
||||
- gitignore$
|
||||
- gitmodules$
|
||||
|
||||
## Groovy ##
|
||||
|
||||
@@ -224,21 +264,9 @@
|
||||
# Html5shiv
|
||||
- (^|/)html5shiv\.js$
|
||||
|
||||
# Samples folders
|
||||
- ^[Ss]amples/
|
||||
|
||||
# LICENSE, README, git config files
|
||||
- ^COPYING$
|
||||
- LICENSE$
|
||||
- License$
|
||||
- gitattributes$
|
||||
- gitignore$
|
||||
- gitmodules$
|
||||
- ^README$
|
||||
- ^readme$
|
||||
|
||||
# Test fixtures
|
||||
- ^[Tt]ests?/fixtures/
|
||||
- ^[Ss]pecs?/fixtures/
|
||||
|
||||
# PhoneGap/Cordova
|
||||
- (^|/)cordova([^.]*)\.js$
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
module Linguist
|
||||
VERSION = "4.5.11"
|
||||
VERSION = "4.7.3"
|
||||
end
|
||||
|
||||
86
samples/C#/build.cake
Normal file
86
samples/C#/build.cake
Normal file
@@ -0,0 +1,86 @@
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// ARGUMENTS
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
var target = Argument<string>("target", "Default");
|
||||
var configuration = Argument<string>("configuration", "Release");
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// GLOBAL VARIABLES
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
var solutions = GetFiles("./**/*.sln");
|
||||
var solutionPaths = solutions.Select(solution => solution.GetDirectory());
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// SETUP / TEARDOWN
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
Setup(() =>
|
||||
{
|
||||
// Executed BEFORE the first task.
|
||||
Information("Running tasks...");
|
||||
});
|
||||
|
||||
Teardown(() =>
|
||||
{
|
||||
// Executed AFTER the last task.
|
||||
Information("Finished running tasks.");
|
||||
});
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// TASK DEFINITIONS
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
Task("Clean")
|
||||
.Does(() =>
|
||||
{
|
||||
// Clean solution directories.
|
||||
foreach(var path in solutionPaths)
|
||||
{
|
||||
Information("Cleaning {0}", path);
|
||||
CleanDirectories(path + "/**/bin/" + configuration);
|
||||
CleanDirectories(path + "/**/obj/" + configuration);
|
||||
}
|
||||
});
|
||||
|
||||
Task("Restore")
|
||||
.Does(() =>
|
||||
{
|
||||
// Restore all NuGet packages.
|
||||
foreach(var solution in solutions)
|
||||
{
|
||||
Information("Restoring {0}...", solution);
|
||||
NuGetRestore(solution);
|
||||
}
|
||||
});
|
||||
|
||||
Task("Build")
|
||||
.IsDependentOn("Clean")
|
||||
.IsDependentOn("Restore")
|
||||
.Does(() =>
|
||||
{
|
||||
// Build all solutions.
|
||||
foreach(var solution in solutions)
|
||||
{
|
||||
Information("Building {0}", solution);
|
||||
MSBuild(solution, settings =>
|
||||
settings.SetPlatformTarget(PlatformTarget.MSIL)
|
||||
.WithProperty("TreatWarningsAsErrors","true")
|
||||
.WithTarget("Build")
|
||||
.SetConfiguration(configuration));
|
||||
}
|
||||
});
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// TARGETS
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
Task("Default")
|
||||
.IsDependentOn("Build");
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// EXECUTION
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
RunTarget(target);
|
||||
133
samples/Click/sr2.click
Normal file
133
samples/Click/sr2.click
Normal file
@@ -0,0 +1,133 @@
|
||||
rates :: AvailableRates
|
||||
elementclass sr2 {
|
||||
$sr2_ip, $sr2_nm, $wireless_mac, $gateway, $probes|
|
||||
|
||||
|
||||
arp :: ARPTable();
|
||||
lt :: LinkTable(IP $sr2_ip);
|
||||
|
||||
|
||||
gw :: SR2GatewaySelector(ETHTYPE 0x062c,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
LT lt,
|
||||
ARP arp,
|
||||
PERIOD 15,
|
||||
GW $gateway);
|
||||
|
||||
|
||||
gw -> SR2SetChecksum -> [0] output;
|
||||
|
||||
set_gw :: SR2SetGateway(SEL gw);
|
||||
|
||||
|
||||
es :: SR2ETTStat(ETHTYPE 0x0641,
|
||||
ETH $wireless_mac,
|
||||
IP $sr2_ip,
|
||||
PERIOD 30000,
|
||||
TAU 300000,
|
||||
ARP arp,
|
||||
PROBES $probes,
|
||||
ETT metric,
|
||||
RT rates);
|
||||
|
||||
|
||||
metric :: SR2ETTMetric(LT lt);
|
||||
|
||||
|
||||
forwarder :: SR2Forwarder(ETHTYPE 0x0643,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
ARP arp,
|
||||
LT lt);
|
||||
|
||||
|
||||
querier :: SR2Querier(ETH $wireless_mac,
|
||||
SR forwarder,
|
||||
LT lt,
|
||||
ROUTE_DAMPENING true,
|
||||
TIME_BEFORE_SWITCH 5,
|
||||
DEBUG true);
|
||||
|
||||
|
||||
query_forwarder :: SR2MetricFlood(ETHTYPE 0x0644,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
LT lt,
|
||||
ARP arp,
|
||||
DEBUG false);
|
||||
|
||||
query_responder :: SR2QueryResponder(ETHTYPE 0x0645,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
LT lt,
|
||||
ARP arp,
|
||||
DEBUG true);
|
||||
|
||||
|
||||
query_responder -> SR2SetChecksum -> [0] output;
|
||||
query_forwarder -> SR2SetChecksum -> SR2Print(forwarding) -> [0] output;
|
||||
query_forwarder [1] -> query_responder;
|
||||
|
||||
data_ck :: SR2SetChecksum()
|
||||
|
||||
input [1]
|
||||
-> host_cl :: IPClassifier(dst net $sr2_ip mask $sr2_nm,
|
||||
-)
|
||||
-> querier
|
||||
-> data_ck;
|
||||
|
||||
|
||||
host_cl [1] -> [0] set_gw [0] -> querier;
|
||||
|
||||
forwarder[0]
|
||||
-> dt ::DecIPTTL
|
||||
-> data_ck
|
||||
-> [2] output;
|
||||
|
||||
|
||||
dt[1]
|
||||
-> Print(ttl-error)
|
||||
-> ICMPError($sr2_ip, timeexceeded, 0)
|
||||
-> querier;
|
||||
|
||||
|
||||
// queries
|
||||
querier [1] -> [1] query_forwarder;
|
||||
es -> SetTimestamp() -> [1] output;
|
||||
|
||||
|
||||
forwarder[1] //ip packets to me
|
||||
-> SR2StripHeader()
|
||||
-> CheckIPHeader()
|
||||
-> from_gw_cl :: IPClassifier(src net $sr2_ip mask $sr2_nm,
|
||||
-)
|
||||
-> [3] output;
|
||||
|
||||
from_gw_cl [1] -> [1] set_gw [1] -> [3] output;
|
||||
|
||||
input [0]
|
||||
-> ncl :: Classifier(
|
||||
12/0643 , //sr2_forwarder
|
||||
12/0644 , //sr2
|
||||
12/0645 , //replies
|
||||
12/0641 , //sr2_es
|
||||
12/062c , //sr2_gw
|
||||
);
|
||||
|
||||
|
||||
ncl[0] -> SR2CheckHeader() -> [0] forwarder;
|
||||
ncl[1] -> SR2CheckHeader() -> PrintSR(query) -> query_forwarder
|
||||
ncl[2] -> SR2CheckHeader() -> query_responder;
|
||||
ncl[3] -> es;
|
||||
ncl[4] -> SR2CheckHeader() -> gw;
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
Idle -> s :: sr2(2.0.0.1, 255.0.0.0, 00:00:00:00:00:01, false, "12 60 12 1500") -> Discard;
|
||||
Idle -> [1] s;
|
||||
s[1] -> Discard;
|
||||
s[2] -> Discard;
|
||||
s[3] -> Discard;
|
||||
142
samples/Click/thomer-nat.click
Normal file
142
samples/Click/thomer-nat.click
Normal file
@@ -0,0 +1,142 @@
|
||||
// This Click configuration implements a firewall and NAT, roughly based on the
|
||||
// mazu-nat.click example.
|
||||
//
|
||||
// This example assumes there is one interface that is IP-aliased. In this
|
||||
// example, eth0 and eth0:0 have IP addresses 66.68.65.90 and 192.168.1.1,
|
||||
// respectively. There is a local network, 192.168.1.0/24, and an upstream
|
||||
// gateway, 66.58.65.89. Traffic from the local network is NATed.
|
||||
//
|
||||
// Connections can be initiated from the NAT box itself, also.
|
||||
//
|
||||
// For bugs, suggestions, and, corrections, please email me.
|
||||
//
|
||||
// Author: Thomer M. Gil (click@thomer.com)
|
||||
|
||||
AddressInfo(
|
||||
eth0-in 192.168.1.1 192.168.1.0/24 00:0d:87:9d:1c:e9,
|
||||
eth0-ex 66.58.65.90 00:0d:87:9d:1c:e9,
|
||||
gw-addr 66.58.65.89 00:20:6f:14:54:c2
|
||||
);
|
||||
|
||||
|
||||
elementclass SniffGatewayDevice {
|
||||
$device |
|
||||
from :: FromDevice($device)
|
||||
-> t1 :: Tee
|
||||
-> output;
|
||||
input -> q :: Queue(1024)
|
||||
-> t2 :: PullTee
|
||||
-> to :: ToDevice($device);
|
||||
t1[1] -> ToHostSniffers;
|
||||
t2[1] -> ToHostSniffers($device);
|
||||
ScheduleInfo(from .1, to 1);
|
||||
}
|
||||
|
||||
|
||||
device :: SniffGatewayDevice(eth0);
|
||||
arpq_in :: ARPQuerier(eth0-in) -> device;
|
||||
ip_to_extern :: GetIPAddress(16)
|
||||
-> CheckIPHeader
|
||||
-> EtherEncap(0x800, eth0-ex, gw-addr)
|
||||
-> device;
|
||||
ip_to_host :: EtherEncap(0x800, gw-addr, eth0-ex)
|
||||
-> ToHost;
|
||||
ip_to_intern :: GetIPAddress(16)
|
||||
-> CheckIPHeader
|
||||
-> arpq_in;
|
||||
|
||||
|
||||
arp_class :: Classifier(
|
||||
12/0806 20/0001, // [0] ARP requests
|
||||
12/0806 20/0002, // [1] ARP replies to host
|
||||
12/0800); // [2] IP packets
|
||||
|
||||
device -> arp_class;
|
||||
|
||||
// ARP crap
|
||||
arp_class[0] -> ARPResponder(eth0-in, eth0-ex) -> device;
|
||||
arp_class[1] -> arp_t :: Tee;
|
||||
arp_t[0] -> ToHost;
|
||||
arp_t[1] -> [1]arpq_in;
|
||||
|
||||
|
||||
// IP packets
|
||||
arp_class[2] -> Strip(14)
|
||||
-> CheckIPHeader
|
||||
-> ipclass :: IPClassifier(dst host eth0-ex,
|
||||
dst host eth0-in,
|
||||
src net eth0-in);
|
||||
|
||||
// Define pattern NAT
|
||||
iprw :: IPRewriterPatterns(NAT eth0-ex 50000-65535 - -);
|
||||
|
||||
// Rewriting rules for UDP/TCP packets
|
||||
// output[0] rewritten to go into the wild
|
||||
// output[1] rewritten to come back from the wild or no match
|
||||
rw :: IPRewriter(pattern NAT 0 1,
|
||||
pass 1);
|
||||
|
||||
// Rewriting rules for ICMP packets
|
||||
irw :: ICMPPingRewriter(eth0-ex, -);
|
||||
irw[0] -> ip_to_extern;
|
||||
irw[1] -> icmp_me_or_intern :: IPClassifier(dst host eth0-ex, -);
|
||||
icmp_me_or_intern[0] -> ip_to_host;
|
||||
icmp_me_or_intern[1] -> ip_to_intern;
|
||||
|
||||
// Rewriting rules for ICMP error packets
|
||||
ierw :: ICMPRewriter(rw irw);
|
||||
ierw[0] -> icmp_me_or_intern;
|
||||
ierw[1] -> icmp_me_or_intern;
|
||||
|
||||
|
||||
// Packets directed at eth0-ex.
|
||||
// Send it through IPRewriter(pass). If there was a mapping, it will be
|
||||
// rewritten such that dst is eth0-in:net, otherwise dst will still be for
|
||||
// eth0-ex.
|
||||
ipclass[0] -> [1]rw;
|
||||
|
||||
// packets that were rewritten, heading into the wild world.
|
||||
rw[0] -> ip_to_extern;
|
||||
|
||||
// packets that come back from the wild or are not part of an established
|
||||
// connection.
|
||||
rw[1] -> established_class :: IPClassifier(dst host eth0-ex,
|
||||
dst net eth0-in);
|
||||
|
||||
// not established yet or returning packets for a connection that was
|
||||
// established from this host itself.
|
||||
established_class[0] ->
|
||||
firewall :: IPClassifier(dst tcp port ssh,
|
||||
dst tcp port smtp,
|
||||
dst tcp port domain,
|
||||
dst udp port domain,
|
||||
icmp type echo-reply,
|
||||
proto icmp,
|
||||
port > 4095,
|
||||
-);
|
||||
|
||||
firewall[0] -> ip_to_host; // ssh
|
||||
firewall[1] -> ip_to_host; // smtp
|
||||
firewall[2] -> ip_to_host; // domain (t)
|
||||
firewall[3] -> ip_to_host; // domain (u)
|
||||
firewall[4] -> [0]irw; // icmp reply
|
||||
firewall[5] -> [0]ierw; // other icmp
|
||||
firewall[6] -> ip_to_host; // port > 4095, probably for connection
|
||||
// originating from host itself
|
||||
firewall[7] -> Discard; // don't allow incoming for port <= 4095
|
||||
|
||||
// established connection
|
||||
established_class[1] -> ip_to_intern;
|
||||
|
||||
// To eth0-in. Only accept from inside network.
|
||||
ipclass[1] -> IPClassifier(src net eth0-in) -> ip_to_host;
|
||||
|
||||
// Packets from eth0-in:net either stay on local network or go to the wild.
|
||||
// Those that go into the wild need to go through the appropriate rewriting
|
||||
// element. (Either UDP/TCP rewriter or ICMP rewriter.)
|
||||
ipclass[2] -> inter_class :: IPClassifier(dst net eth0-in, -);
|
||||
inter_class[0] -> ip_to_intern;
|
||||
inter_class[1] -> ip_udp_class :: IPClassifier(tcp or udp,
|
||||
icmp type echo);
|
||||
ip_udp_class[0] -> [0]rw;
|
||||
ip_udp_class[1] -> [0]irw;
|
||||
17
samples/CoffeeScript/build.cake
Normal file
17
samples/CoffeeScript/build.cake
Normal file
@@ -0,0 +1,17 @@
|
||||
fs = require 'fs'
|
||||
|
||||
{print} = require 'sys'
|
||||
{spawn} = require 'child_process'
|
||||
|
||||
build = (callback) ->
|
||||
coffee = spawn 'coffee', ['-c', '-o', '.', '.']
|
||||
coffee.stderr.on 'data', (data) ->
|
||||
process.stderr.write data.toString()
|
||||
coffee.stdout.on 'data', (data) ->
|
||||
print data.toString()
|
||||
coffee.on 'exit', (code) ->
|
||||
callback?() if code is 0
|
||||
|
||||
task 'build', 'Build from source', ->
|
||||
build()
|
||||
|
||||
2
samples/Common Lisp/config.sexp
Normal file
2
samples/Common Lisp/config.sexp
Normal file
@@ -0,0 +1,2 @@
|
||||
((exe_name hello)
|
||||
(link_order (world hello)))
|
||||
103
samples/Common Lisp/rss.sexp
Normal file
103
samples/Common Lisp/rss.sexp
Normal file
@@ -0,0 +1,103 @@
|
||||
|
||||
(:TURTLE
|
||||
|
||||
(:@PREFIX "rdf:" "<http://www.w3.org/1999/02/22-rdf-syntax-ns#>")
|
||||
(:@PREFIX "owl:" "<http://www.w3.org/2002/07/owl#>")
|
||||
(:@PREFIX "dc:" "<http://purl.org/dc/elements/1.1/>")
|
||||
(:@PREFIX "xsd:" "<http://www.w3.org/2001/XMLSchema#>")
|
||||
(:@PREFIX "rdfs:" "<http://www.w3.org/2000/01/rdf-schema#>")
|
||||
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/channel>")
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:URIREF #1="<http://www.w3.org/1999/02/22-rdf-syntax-ns#type>")
|
||||
(:OBJECTS
|
||||
(:QNAME "rdfs:Class")))
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:QNAME "rdfs:comment")
|
||||
(:OBJECTS
|
||||
(:STRING "An RSS information channel.")))
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS
|
||||
(:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:QNAME "rdfs:label")
|
||||
(:OBJECTS
|
||||
(:STRING "Channel"))))
|
||||
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/description>")
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:URIREF #1#)
|
||||
(:OBJECTS
|
||||
(:QNAME "rdf:Property")))
|
||||
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS (:STRING "A short text description of the subject.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Description")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:subPropertyOf") (:OBJECTS (:QNAME "dc:description"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/image>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdfs:Class")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment") (:OBJECTS (:STRING "An RSS image.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Image"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/item>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdfs:Class")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment") (:OBJECTS (:STRING "An RSS item.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Item"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/items>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS
|
||||
(:STRING "Points to a list of rss:item elements that are members of the subject channel.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Items"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/link>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS (:STRING "The URL to which an HTML rendering of the subject will link.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Link")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:subPropertyOf") (:OBJECTS (:QNAME "dc:identifier"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/name>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS (:STRING "The text input field's (variable) name.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Name"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/textinput>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdfs:Class")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment") (:OBJECTS (:STRING "An RSS text input.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Text Input"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/title>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS (:STRING "A descriptive title for the channel.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Title")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:subPropertyOf") (:OBJECTS (:QNAME "dc:title"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/url>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS
|
||||
(:STRING
|
||||
"The URL of the image to used in the 'src' attribute of the channel's image tag when rendered as HTML.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "URL")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:subPropertyOf") (:OBJECTS (:QNAME "dc:identifier")))))
|
||||
13
samples/DNS Zone/sample.arpa
Normal file
13
samples/DNS Zone/sample.arpa
Normal file
@@ -0,0 +1,13 @@
|
||||
$ORIGIN 0.0.0.c.2.1.0.3.0.0.2.1.e.f.f.3.ip6.arpa.
|
||||
$TTL 60
|
||||
@ IN SOA ns root (
|
||||
2002042901 ; SERIAL
|
||||
7200 ; REFRESH
|
||||
600 ; RETRY
|
||||
36000000 ; EXPIRE
|
||||
120 ; MINIMUM
|
||||
)
|
||||
|
||||
NS ns.example.com.
|
||||
|
||||
c.a.7.e.d.7.e.f.f.f.0.2.8.0.a.0 PTR sip01.example.com.
|
||||
12
samples/DNS Zone/sneaky.net.zone
Normal file
12
samples/DNS Zone/sneaky.net.zone
Normal file
@@ -0,0 +1,12 @@
|
||||
$TTL 3d
|
||||
@ IN SOA root.localhost. root.sneaky.net. (
|
||||
2015042907 ; serial
|
||||
3d ; refresh
|
||||
1h ; retry
|
||||
12d ; expire
|
||||
2h ; negative response TTL
|
||||
)
|
||||
IN NS root.localhost.
|
||||
IN NS localhost. ; secondary name server is preferably externally maintained
|
||||
|
||||
www IN A 3.141.59.26
|
||||
3608
samples/Formatted/NiAlH_jea.eam.fs
Normal file
3608
samples/Formatted/NiAlH_jea.eam.fs
Normal file
File diff suppressed because it is too large
Load Diff
31
samples/FreeMarker/example.ftl
Normal file
31
samples/FreeMarker/example.ftl
Normal file
@@ -0,0 +1,31 @@
|
||||
<#import "layout.ftl" as layout>
|
||||
|
||||
<#assign results = [
|
||||
{
|
||||
"title": "Example Result",
|
||||
"description": "Lorem ipsum dolor sit amet, pede id pellentesque, sollicitudin turpis sed in sed sed, libero dictum."
|
||||
}
|
||||
] />
|
||||
|
||||
<@layout.page title="FreeMarker Example">
|
||||
<#if results?size == 0>
|
||||
There were no results.
|
||||
<#else>
|
||||
<ul>
|
||||
<#list results as result>
|
||||
<li>
|
||||
<strong>${result.title}</strong>
|
||||
<p>${result.description}</p>
|
||||
</li>
|
||||
</#list>
|
||||
</ul>
|
||||
</#if>
|
||||
|
||||
<#-- This is a FreeMarker comment -->
|
||||
<@currentTime />
|
||||
</@layout.page>
|
||||
|
||||
|
||||
<#macro currentTime>
|
||||
${.now?string.full}
|
||||
</#macro>
|
||||
32
samples/FreeMarker/layout.ftl
Normal file
32
samples/FreeMarker/layout.ftl
Normal file
@@ -0,0 +1,32 @@
|
||||
<#ftl strip_text=true />
|
||||
|
||||
<#macro page title>
|
||||
<!doctype html>
|
||||
<html lang="${.lang}">
|
||||
<head>
|
||||
<title>${title}</title>
|
||||
<@metaTags />
|
||||
</head>
|
||||
<body>
|
||||
<#nested />
|
||||
<@footer />
|
||||
</body>
|
||||
</html>
|
||||
</#macro>
|
||||
|
||||
|
||||
<#---
|
||||
Default meta tags
|
||||
-->
|
||||
<#macro metaTags>
|
||||
<#compress>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width,initial-scale=1">
|
||||
<meta name="format-detection" content="telephone=no">
|
||||
</#compress>
|
||||
</#macro>
|
||||
|
||||
<#macro footer>
|
||||
<p>This page is using FreeMarker v${.version}</p>
|
||||
</#macro>
|
||||
26
samples/HTML+EEX/index.html.eex
Normal file
26
samples/HTML+EEX/index.html.eex
Normal file
@@ -0,0 +1,26 @@
|
||||
<h1>Listing Books</h1>
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<th>Title</th>
|
||||
<th>Summary</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
</tr>
|
||||
|
||||
<%= for book <- @books do %>
|
||||
<tr>
|
||||
<%# comment %>
|
||||
<td><%= book.title %></td>
|
||||
<td><%= book.content %></td>
|
||||
<td><%= link "Show", to: book_path(@conn, :show, book) %></td>
|
||||
<td><%= link "Edit", to: book_path(@conn, :edit, book) %></td>
|
||||
<td><%= link "Delete", to: book_path(@conn, :delete, book), method: :delete, data: [confirm: "Are you sure?"] %></td>
|
||||
</tr>
|
||||
<% end %>
|
||||
</table>
|
||||
|
||||
<br />
|
||||
|
||||
<%= link "New book", to: book_path(@conn, :new) %>
|
||||
82
samples/JSON/geo.geojson
Normal file
82
samples/JSON/geo.geojson
Normal file
@@ -0,0 +1,82 @@
|
||||
{
|
||||
"type": "FeatureCollection",
|
||||
"features": [
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"name": "Australia Post - North Ryde BC",
|
||||
"geo": [-33.787792, 151.13288],
|
||||
"streetAddress": "11 Waterloo Road",
|
||||
"addressLocality": "Macquarie Park",
|
||||
"addressRegion": "New South Wales",
|
||||
"addressCountry": "Australia",
|
||||
"postalCode": "2113"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [151.13288, -33.787792, 0]
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"name": "George Weston Foods Limited",
|
||||
"geo": [-37.8263884, 144.9105381],
|
||||
"streetAddress": "Level 3, 187 Todd Road",
|
||||
"addressLocality": "Port Melbourne",
|
||||
"addressRegion": "Victoria",
|
||||
"addressCountry": "Australia",
|
||||
"postalCode": "3207"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[144.9097088901841, -37.82622654171794, 0],
|
||||
[144.9099724266943, -37.82679388891783, 0],
|
||||
[144.9110127325916, -37.82651526396403, 0],
|
||||
[144.9112227645738, -37.82655667152123, 0],
|
||||
[144.9113739439796, -37.82618552508767, 0],
|
||||
[144.9112740633105, -37.82615750100924, 0],
|
||||
[144.9111355846674, -37.82584493693527, 0],
|
||||
[144.9097088901841, -37.82622654171794, 0]
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"name": "George Weston Foods Limited",
|
||||
"geo": [-37.05202791502396, 144.2085614999388],
|
||||
"streetAddress": "67 Richards Road",
|
||||
"addressLocality": "Castlemaine",
|
||||
"addressRegion": "Victoria",
|
||||
"addressCountry": "Australia",
|
||||
"postalCode": "3450"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[144.2052428913937, -37.04906391287216, 0],
|
||||
[144.205540392692, -37.05049727485623, 0],
|
||||
[144.2059800881858, -37.05066835966983, 0],
|
||||
[144.206490656024, -37.05279538900776, 0],
|
||||
[144.2064525845008, -37.05366195881602, 0],
|
||||
[144.2084322301922, -37.0538920493147, 0],
|
||||
[144.2084811895712, -37.05266519735124, 0],
|
||||
[144.2079784002005, -37.05041270555773, 0],
|
||||
[144.2074017905817, -37.04817406993293, 0],
|
||||
[144.2061363939852, -37.04834972871226, 0],
|
||||
[144.2052428913937, -37.04906391287216, 0]
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
1
samples/JSON/switzerland.topojson
Normal file
1
samples/JSON/switzerland.topojson
Normal file
File diff suppressed because one or more lines are too long
23
samples/JSX/sample.jsx
Normal file
23
samples/JSX/sample.jsx
Normal file
@@ -0,0 +1,23 @@
|
||||
'use strict';
|
||||
|
||||
const React = require('react')
|
||||
|
||||
module.exports = React.createClass({
|
||||
render: function() {
|
||||
let {feeds, log} = this.props;
|
||||
|
||||
log.info(feeds);
|
||||
return <div className="feed-list">
|
||||
<h3>News Feed's</h3>
|
||||
<ul>
|
||||
{feeds.map(function(feed) {
|
||||
return <li key={feed.name} className={feed.fetched ? 'loaded' : 'loading'}>
|
||||
{feed.data && feed.data.length > 0 ?
|
||||
<span>{feed.name} <span className='light'>({feed.data.length})</span></span>
|
||||
: 'feed.name' }
|
||||
</li>
|
||||
})}
|
||||
</ul>
|
||||
</div>;
|
||||
}
|
||||
});
|
||||
19
samples/JavaScript/logo.jscad
Normal file
19
samples/JavaScript/logo.jscad
Normal file
@@ -0,0 +1,19 @@
|
||||
// title : OpenJSCAD.org Logo
|
||||
// author : Rene K. Mueller
|
||||
// license : MIT License
|
||||
// revision : 0.003
|
||||
// tags : Logo,Intersection,Sphere,Cube
|
||||
// file : logo.jscad
|
||||
|
||||
function main() {
|
||||
return union(
|
||||
difference(
|
||||
cube({size: 3, center: true}),
|
||||
sphere({r:2, center: true})
|
||||
),
|
||||
intersection(
|
||||
sphere({r: 1.3, center: true}),
|
||||
cube({size: 2.1, center: true})
|
||||
)
|
||||
).translate([0,0,1.5]).scale(10);
|
||||
}
|
||||
210
samples/Jupyter Notebook/JupyterNotebook.ipynb
Normal file
210
samples/Jupyter Notebook/JupyterNotebook.ipynb
Normal file
File diff suppressed because one or more lines are too long
14069
samples/KiCad/tc14badge.brd
Normal file
14069
samples/KiCad/tc14badge.brd
Normal file
File diff suppressed because it is too large
Load Diff
74
samples/LSL/LSL.lslp
Normal file
74
samples/LSL/LSL.lslp
Normal file
@@ -0,0 +1,74 @@
|
||||
/*
|
||||
Testing syntax highlighting
|
||||
for the Linden Scripting Language
|
||||
*/
|
||||
|
||||
integer someIntNormal = 3672;
|
||||
integer someIntHex = 0x00000000;
|
||||
integer someIntMath = PI_BY_TWO;
|
||||
|
||||
integer event = 5673;// 'event' is invalid.illegal
|
||||
|
||||
key someKeyTexture = TEXTURE_DEFAULT;
|
||||
string someStringSpecial = EOF;
|
||||
|
||||
some_user_defined_function_without_return_type(string inputAsString)
|
||||
{
|
||||
llSay(PUBLIC_CHANNEL, inputAsString);
|
||||
}
|
||||
|
||||
string user_defined_function_returning_a_string(key inputAsKey)
|
||||
{
|
||||
return (string)inputAsKey;
|
||||
}
|
||||
|
||||
default
|
||||
{
|
||||
state_entry()
|
||||
{
|
||||
key someKey = NULL_KEY;
|
||||
someKey = llGetOwner();
|
||||
|
||||
string someString = user_defined_function_returning_a_string(someKey);
|
||||
|
||||
some_user_defined_function_without_return_type(someString);
|
||||
}
|
||||
|
||||
touch_start(integer num_detected)
|
||||
{
|
||||
list agentsInRegion = llGetAgentList(AGENT_LIST_REGION, []);
|
||||
integer numOfAgents = llGetListLength(agentsInRegion);
|
||||
|
||||
integer index; // defaults to 0
|
||||
for (; index <= numOfAgents - 1; index++) // for each agent in region
|
||||
{
|
||||
llRegionSayTo(llList2Key(agentsInRegion, index), PUBLIC_CHANNEL, "Hello, Avatar!");
|
||||
}
|
||||
}
|
||||
|
||||
touch_end(integer num_detected)
|
||||
{
|
||||
someIntNormal = 3672;
|
||||
someIntHex = 0x00000000;
|
||||
someIntMath = PI_BY_TWO;
|
||||
|
||||
event = 5673;// 'event' is invalid.illegal
|
||||
|
||||
someKeyTexture = TEXTURE_DEFAULT;
|
||||
someStringSpecial = EOF;
|
||||
|
||||
llSetInventoryPermMask("some item", MASK_NEXT, PERM_ALL);// 'llSetInventoryPermMask' is reserved.godmode
|
||||
|
||||
llWhisper(PUBLIC_CHANNEL, "Leaving \"default\" now...");
|
||||
state other;
|
||||
}
|
||||
}
|
||||
|
||||
state other
|
||||
{
|
||||
state_entry()
|
||||
{
|
||||
llWhisper(PUBLIC_CHANNEL, "Entered \"state other\", returning to \"default\" again...");
|
||||
state default;
|
||||
}
|
||||
}
|
||||
29
samples/MAXScript/macro-1.mcr
Normal file
29
samples/MAXScript/macro-1.mcr
Normal file
@@ -0,0 +1,29 @@
|
||||
-- Taken from an example from Autodesk's MAXScript reference:
|
||||
-- http://help.autodesk.com/view/3DSMAX/2016/ENU/?guid=__files_GUID_84E24969_C175_4389_B9A6_3B2699B66785_htm
|
||||
|
||||
macroscript MoveToSurface
|
||||
category: "HowTo"
|
||||
(
|
||||
fn g_filter o = superclassof o == Geometryclass
|
||||
fn find_intersection z_node node_to_z = (
|
||||
local testRay = ray node_to_z.pos [0,0,-1]
|
||||
local nodeMaxZ = z_node.max.z
|
||||
testRay.pos.z = nodeMaxZ + 0.0001 * abs nodeMaxZ
|
||||
intersectRay z_node testRay
|
||||
)
|
||||
|
||||
on isEnabled return selection.count > 0
|
||||
|
||||
on Execute do (
|
||||
target_mesh = pickObject message:"Pick Target Surface:" filter:g_filter
|
||||
|
||||
if isValidNode target_mesh then (
|
||||
undo "MoveToSurface" on (
|
||||
for i in selection do (
|
||||
int_point = find_intersection target_mesh i
|
||||
if int_point != undefined then i.pos = int_point.pos
|
||||
)--end i loop
|
||||
)--end undo
|
||||
)--end if
|
||||
)--end execute
|
||||
)--end script
|
||||
53
samples/MAXScript/macro-2.mcr
Normal file
53
samples/MAXScript/macro-2.mcr
Normal file
@@ -0,0 +1,53 @@
|
||||
-- Taken from an example from Autodesk's MAXScript reference:
|
||||
-- http://help.autodesk.com/view/3DSMAX/2016/ENU/?guid=__files_GUID_0876DF46_FAA3_4131_838D_5739A67FF2C1_htm
|
||||
|
||||
macroscript FreeSpline category:"HowTo" tooltip:"FreeSpline" (
|
||||
local old_pos
|
||||
local new_spline
|
||||
local second_knot_set
|
||||
|
||||
fn get_mouse_pos pen_pos old_pen_pos = (
|
||||
if old_pos == undefined then old_pos = old_pen_pos
|
||||
if distance pen_pos old_pos > 10 then
|
||||
(
|
||||
if second_knot_set then
|
||||
addKnot new_spline 1 #smooth #curve pen_pos
|
||||
else
|
||||
(
|
||||
setKnotPoint new_spline 1 2 pen_pos
|
||||
second_knot_set = true
|
||||
)
|
||||
old_pos = pen_pos
|
||||
updateShape new_spline
|
||||
)-- end if
|
||||
)-- end fn
|
||||
|
||||
fn draw_new_line old_pen_pos = (
|
||||
pickPoint mouseMoveCallback:#(get_mouse_pos,old_pen_pos)
|
||||
)
|
||||
|
||||
undo"Free Spline"on(
|
||||
new_spline = splineShape ()
|
||||
old_pen_pos = pickPoint ()
|
||||
|
||||
if old_pen_pos == #RightClick then
|
||||
delete new_spline
|
||||
else
|
||||
(
|
||||
select new_spline
|
||||
new_spline.pos = old_pen_pos
|
||||
addNewSpline new_spline
|
||||
addKnot new_spline 1 #smooth #curve old_pen_pos
|
||||
addKnot new_spline 1 #smooth #curve old_pen_pos
|
||||
second_knot_set = false
|
||||
draw_new_line old_pen_pos
|
||||
q = querybox "Close Spline?" title:"Free Spline"
|
||||
if q then
|
||||
(
|
||||
close new_spline 1
|
||||
updateshape new_spline
|
||||
)
|
||||
select new_spline
|
||||
)--end else
|
||||
)--end undo
|
||||
)--end script
|
||||
64
samples/MAXScript/svg-renderer.ms
Normal file
64
samples/MAXScript/svg-renderer.ms
Normal file
@@ -0,0 +1,64 @@
|
||||
-- Taken from a 3-part tutorial from Autodesk's MAXScript reference
|
||||
-- Source: http://help.autodesk.com/view/3DSMAX/2016/ENU/?guid=__files_GUID_6B5EDC11_A154_4AA7_A972_A11AC36949E9_htm
|
||||
|
||||
fn ColourToHex col = (
|
||||
local theComponents = #(bit.intAsHex col.r, bit.intAsHex col.g, bit.intAsHex col.b)
|
||||
local theValue = "#"
|
||||
for i in theComponents do
|
||||
theValue += (if i.count == 1 then "0" else "") + i
|
||||
theValue
|
||||
)
|
||||
|
||||
local st = timestamp()
|
||||
local theFileName = (getDir #userscripts + "\\PolygonRendering3.svg")
|
||||
local theSVGfile = createFile theFileName
|
||||
format "<svg xmlns=\"http://www.w3.org/2000/svg\"\n" to:theSVGfile
|
||||
format "\t\txmlns:xlink=\"http://www.w3.org/1999/xlink\">\n" to:theSVGfile
|
||||
|
||||
local theViewTM = viewport.getTM()
|
||||
theViewTM.row4 = [0,0,0]
|
||||
|
||||
local theViewTM2 = viewport.getTM()
|
||||
local theViewSize = getViewSize()
|
||||
local theViewScale = getViewSize()
|
||||
theViewScale.x /= 1024.0
|
||||
theViewScale.y /= 1024.0
|
||||
|
||||
local theStrokeThickness = 3
|
||||
|
||||
gw.setTransform (matrix3 1)
|
||||
for o in Geometry where not o.isHiddenInVpt and classof o != TargetObject do (
|
||||
local theStrokeColour = white
|
||||
local theFillColour = o.wirecolor
|
||||
|
||||
local theMesh = snapshotAsMesh o
|
||||
for f = 1 to theMesh.numfaces do (
|
||||
local theNormal = normalize (getFaceNormal theMesh f)
|
||||
|
||||
if (theNormal*theViewTM).z > 0 do
|
||||
(
|
||||
local theFace = getFace theMesh f
|
||||
local v1 = gw.transPoint (getVert theMesh theFace.x)
|
||||
local v2 = gw.transPoint (getVert theMesh theFace.y)
|
||||
local v3 = gw.transPoint (getVert theMesh theFace.z)
|
||||
|
||||
v1.x /= theViewScale.x
|
||||
v1.y /= theViewScale.y
|
||||
v2.x /= theViewScale.x
|
||||
v2.y /= theViewScale.y
|
||||
v3.x /= theViewScale.x
|
||||
v3.y /= theViewScale.y
|
||||
|
||||
format "\t<polygon points='%,% %,% %,%' \n" v1.x v1.y v2.x v2.y v3.x v3.y to:theSVGfile
|
||||
format "\tstyle='stroke:%; fill:%; stroke-width:%'/>\n" (ColourToHex theStrokeColour) (ColourToHex theFillColour) theStrokeThickness to:theSVGfile
|
||||
)--end if normal positive
|
||||
)--end f loop
|
||||
)--end o loop
|
||||
|
||||
format "</svg>\n" to:theSVGfile
|
||||
close theSVGfile
|
||||
local theSVGMap = VectorMap vectorFile:theFileName alphasource:0
|
||||
local theBitmap = bitmap theViewSize.x theViewSize.y
|
||||
renderMap theSVGMap into:theBitmap filter:true
|
||||
display theBitmap
|
||||
format "Render Time: % sec.\n" ((timestamp()-st)/1000.0)
|
||||
22
samples/MAXScript/volume-calc.ms
Normal file
22
samples/MAXScript/volume-calc.ms
Normal file
@@ -0,0 +1,22 @@
|
||||
fn CalculateVolumeAndCentreOfMass obj =
|
||||
(
|
||||
local Volume= 0.0
|
||||
local Centre= [0.0, 0.0, 0.0]
|
||||
local theMesh = snapshotasmesh obj
|
||||
local numFaces = theMesh.numfaces
|
||||
for i = 1 to numFaces do
|
||||
(
|
||||
local Face= getFace theMesh i
|
||||
local vert2 = getVert theMesh Face.z
|
||||
local vert1 = getVert theMesh Face.y
|
||||
local vert0 = getVert theMesh Face.x
|
||||
local dV = Dot (Cross (vert1 - vert0) (vert2 - vert0)) vert0
|
||||
Volume+= dV
|
||||
Centre+= (vert0 + vert1 + vert2) * dV
|
||||
)
|
||||
delete theMesh
|
||||
Volume /= 6
|
||||
Centre /= 24
|
||||
Centre /= Volume
|
||||
#(Volume,Centre)
|
||||
)
|
||||
31
samples/Makefile/filenames/Makefile.inc
Normal file
31
samples/Makefile/filenames/Makefile.inc
Normal file
@@ -0,0 +1,31 @@
|
||||
# $OpenBSD: Makefile.inc,v 1.2 2003/11/14 20:09:20 drahn Exp $
|
||||
# $NetBSD: Makefile.inc,v 1.1 1996/09/30 16:34:59 ws Exp $
|
||||
|
||||
.if !defined(__stand_makefile_inc)
|
||||
__stand_makefile_inc=1
|
||||
|
||||
KERN_AS= library
|
||||
|
||||
S=$(.CURDIR)/../../../$(R)
|
||||
|
||||
.if !make(libdep) && !make(sadep) && !make(salibdir) && !make(kernlibdir) && !make(obj) && !defined(NOMACHINE)
|
||||
.BEGIN:
|
||||
@([ -h machine ] || ln -s $(S)/arch/$(MACHINE)/include machine)
|
||||
.endif
|
||||
|
||||
#
|
||||
EXTRACFLAGS= -msoft-float
|
||||
REAL_VIRT?= -v
|
||||
ENTRY?= _start
|
||||
|
||||
INCLUDES+= -I. -I$(.OBJDIR) -I$(.CURDIR)/.. -I$(S)/arch -I$(S)
|
||||
INCLUDES+= -I$(S)/lib/libsa
|
||||
DEFS+= -DSTANDALONE
|
||||
CFLAGS+= $(INCLUDES) $(DEFS) $(EXTRACFLAGS)
|
||||
CFLAGS+= -fno-stack-protector
|
||||
LDFLAGS?= -X -N -Ttext $(RELOC) -e $(ENTRY)
|
||||
|
||||
cleandir:
|
||||
rm -rf lib machine
|
||||
|
||||
.endif
|
||||
99
samples/Metal/ITMVisualisationEngine.metal
Normal file
99
samples/Metal/ITMVisualisationEngine.metal
Normal file
@@ -0,0 +1,99 @@
|
||||
// Copyright 2014 Isis Innovation Limited and the authors of InfiniTAM
|
||||
|
||||
#include <metal_stdlib>
|
||||
|
||||
#include "../../DeviceAgnostic/ITMSceneReconstructionEngine.h"
|
||||
#include "../../DeviceAgnostic/ITMVisualisationEngine.h"
|
||||
#include "ITMVisualisationEngine_Metal.h"
|
||||
|
||||
using namespace metal;
|
||||
|
||||
kernel void genericRaycastVH_device(DEVICEPTR(Vector4f) *pointsRay [[ buffer(0) ]],
|
||||
const CONSTPTR(ITMVoxel) *voxelData [[ buffer(1) ]],
|
||||
const CONSTPTR(typename ITMVoxelIndex::IndexData) *voxelIndex [[ buffer(2) ]],
|
||||
const CONSTPTR(Vector2f) *minmaxdata [[ buffer(3) ]],
|
||||
const CONSTPTR(CreateICPMaps_Params) *params [[ buffer(4) ]],
|
||||
uint2 threadIdx [[ thread_position_in_threadgroup ]],
|
||||
uint2 blockIdx [[ threadgroup_position_in_grid ]],
|
||||
uint2 blockDim [[ threads_per_threadgroup ]])
|
||||
{
|
||||
int x = threadIdx.x + blockIdx.x * blockDim.x, y = threadIdx.y + blockIdx.y * blockDim.y;
|
||||
|
||||
if (x >= params->imgSize.x || y >= params->imgSize.y) return;
|
||||
|
||||
int locId = x + y * params->imgSize.x;
|
||||
int locId2 = (int)floor((float)x / minmaximg_subsample) + (int)floor((float)y / minmaximg_subsample) * params->imgSize.x;
|
||||
|
||||
castRay<ITMVoxel, ITMVoxelIndex>(pointsRay[locId], x, y, voxelData, voxelIndex, params->invM, params->invProjParams,
|
||||
params->voxelSizes.y, params->lightSource.w, minmaxdata[locId2]);
|
||||
}
|
||||
|
||||
kernel void genericRaycastVGMissingPoints_device(DEVICEPTR(Vector4f) *forwardProjection [[ buffer(0) ]],
|
||||
const CONSTPTR(int) *fwdProjMissingPoints [[ buffer(1) ]],
|
||||
const CONSTPTR(ITMVoxel) *voxelData [[ buffer(2) ]],
|
||||
const CONSTPTR(typename ITMVoxelIndex::IndexData) *voxelIndex [[ buffer(3) ]],
|
||||
const CONSTPTR(Vector2f) *minmaxdata [[ buffer(4) ]],
|
||||
const CONSTPTR(CreateICPMaps_Params) *params [[ buffer(5) ]],
|
||||
uint2 threadIdx [[ thread_position_in_threadgroup ]],
|
||||
uint2 blockIdx [[ threadgroup_position_in_grid ]],
|
||||
uint2 blockDim [[ threads_per_threadgroup ]])
|
||||
{
|
||||
int pointId = threadIdx.x + blockIdx.x * blockDim.x;
|
||||
|
||||
if (pointId >= params->imgSize.z) return;
|
||||
|
||||
int locId = fwdProjMissingPoints[pointId];
|
||||
int y = locId / params->imgSize.x, x = locId - y * params->imgSize.x;
|
||||
int locId2 = (int)floor((float)x / minmaximg_subsample) + (int)floor((float)y / minmaximg_subsample) * params->imgSize.x;
|
||||
|
||||
castRay<ITMVoxel, ITMVoxelIndex>(forwardProjection[locId], x, y, voxelData, voxelIndex, params->invM, params->invProjParams,
|
||||
params->voxelSizes.y, params->lightSource.w, minmaxdata[locId2]);
|
||||
}
|
||||
|
||||
kernel void renderICP_device(const CONSTPTR(Vector4f) *pointsRay [[ buffer(0) ]],
|
||||
DEVICEPTR(Vector4f) *pointsMap [[ buffer(1) ]],
|
||||
DEVICEPTR(Vector4f) *normalsMap [[ buffer(2) ]],
|
||||
DEVICEPTR(Vector4u) *outRendering [[ buffer(3) ]],
|
||||
const CONSTPTR(CreateICPMaps_Params) *params [[ buffer(4) ]],
|
||||
uint2 threadIdx [[ thread_position_in_threadgroup ]],
|
||||
uint2 blockIdx [[ threadgroup_position_in_grid ]],
|
||||
uint2 blockDim [[ threads_per_threadgroup ]])
|
||||
{
|
||||
int x = threadIdx.x + blockIdx.x * blockDim.x, y = threadIdx.y + blockIdx.y * blockDim.y;
|
||||
|
||||
if (x >= params->imgSize.x || y >= params->imgSize.y) return;
|
||||
|
||||
processPixelICP<false>(outRendering, pointsMap, normalsMap, pointsRay, params->imgSize.xy, x, y, params->voxelSizes.x, TO_VECTOR3(params->lightSource));
|
||||
}
|
||||
|
||||
kernel void renderForward_device(DEVICEPTR(Vector4u) *outRendering [[ buffer(0) ]],
|
||||
const CONSTPTR(Vector4f) *pointsRay [[ buffer(1) ]],
|
||||
const CONSTPTR(CreateICPMaps_Params) *params [[ buffer(2) ]],
|
||||
uint2 threadIdx [[ thread_position_in_threadgroup ]],
|
||||
uint2 blockIdx [[ threadgroup_position_in_grid ]],
|
||||
uint2 blockDim [[ threads_per_threadgroup ]])
|
||||
{
|
||||
int x = threadIdx.x + blockIdx.x * blockDim.x, y = threadIdx.y + blockIdx.y * blockDim.y;
|
||||
|
||||
if (x >= params->imgSize.x || y >= params->imgSize.y) return;
|
||||
|
||||
processPixelForwardRender<false>(outRendering, pointsRay, params->imgSize.xy, x, y, params->voxelSizes.x, TO_VECTOR3(params->lightSource));
|
||||
}
|
||||
|
||||
kernel void forwardProject_device(DEVICEPTR(Vector4f) *forwardProjection [[ buffer(0) ]],
|
||||
const CONSTPTR(Vector4f) *pointsRay [[ buffer(1) ]],
|
||||
const CONSTPTR(CreateICPMaps_Params) *params [[ buffer(2) ]],
|
||||
uint2 threadIdx [[ thread_position_in_threadgroup ]],
|
||||
uint2 blockIdx [[ threadgroup_position_in_grid ]],
|
||||
uint2 blockDim [[ threads_per_threadgroup ]])
|
||||
{
|
||||
int x = (threadIdx.x + blockIdx.x * blockDim.x), y = (threadIdx.y + blockIdx.y * blockDim.y);
|
||||
|
||||
if (x >= params->imgSize.x || y >= params->imgSize.y) return;
|
||||
|
||||
int locId = x + y * params->imgSize.x;
|
||||
Vector4f pixel = pointsRay[locId];
|
||||
|
||||
int locId_new = forwardProjectPixel(pixel * params->voxelSizes.x, params->M, params->projParams, params->imgSize.xy);
|
||||
if (locId_new >= 0) forwardProjection[locId_new] = pixel;
|
||||
}
|
||||
16
samples/Objective-C/Siesta.h
Normal file
16
samples/Objective-C/Siesta.h
Normal file
@@ -0,0 +1,16 @@
|
||||
//
|
||||
// Siesta.h
|
||||
// Siesta
|
||||
//
|
||||
// Created by Paul on 2015/6/14.
|
||||
// Copyright © 2015 Bust Out Solutions. MIT license.
|
||||
//
|
||||
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
//! Project version number for Siesta.
|
||||
FOUNDATION_EXPORT double SiestaVersionNumber;
|
||||
|
||||
//! Project version string for Siesta.
|
||||
FOUNDATION_EXPORT const unsigned char SiestaVersionString[];
|
||||
|
||||
31
samples/PHP/mail.phps
Normal file
31
samples/PHP/mail.phps
Normal file
@@ -0,0 +1,31 @@
|
||||
<?php
|
||||
/**
|
||||
* This example shows sending a message using PHP's mail() function.
|
||||
*/
|
||||
|
||||
require '../PHPMailerAutoload.php';
|
||||
|
||||
//Create a new PHPMailer instance
|
||||
$mail = new PHPMailer;
|
||||
//Set who the message is to be sent from
|
||||
$mail->setFrom('from@example.com', 'First Last');
|
||||
//Set an alternative reply-to address
|
||||
$mail->addReplyTo('replyto@example.com', 'First Last');
|
||||
//Set who the message is to be sent to
|
||||
$mail->addAddress('whoto@example.com', 'John Doe');
|
||||
//Set the subject line
|
||||
$mail->Subject = 'PHPMailer mail() test';
|
||||
//Read an HTML message body from an external file, convert referenced images to embedded,
|
||||
//convert HTML into a basic plain-text alternative body
|
||||
$mail->msgHTML(file_get_contents('contents.html'), dirname(__FILE__));
|
||||
//Replace the plain text body with one created manually
|
||||
$mail->AltBody = 'This is a plain-text message body';
|
||||
//Attach an image file
|
||||
$mail->addAttachment('images/phpmailer_mini.png');
|
||||
|
||||
//send the message, check for errors
|
||||
if (!$mail->send()) {
|
||||
echo "Mailer Error: " . $mail->ErrorInfo;
|
||||
} else {
|
||||
echo "Message sent!";
|
||||
}
|
||||
90
samples/PLSQL/plsqlguide.pck
Normal file
90
samples/PLSQL/plsqlguide.pck
Normal file
@@ -0,0 +1,90 @@
|
||||
create or replace package plsqlguide is
|
||||
|
||||
-- Author : Jared Petersen
|
||||
-- Created : 9/22/2015 12:26:22 AM
|
||||
-- Purpose : Basic PLSQL template/guide
|
||||
|
||||
/* Procedures */
|
||||
procedure p_main;
|
||||
|
||||
end plsqlguide;
|
||||
/
|
||||
create or replace package body plsqlguide is
|
||||
|
||||
/* Main entry point (homepage) */
|
||||
procedure p_main
|
||||
is
|
||||
begin
|
||||
|
||||
htp.prn('
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<!-- The above 3 meta tags *must* come first in the head; any other head content must come *after* these tags -->
|
||||
<title>PL/SQL Sample Application</title>
|
||||
|
||||
<!-- Bootstrap -->
|
||||
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/css/bootstrap.min.css">
|
||||
|
||||
<!-- HTML5 shim and Respond.js for IE8 support of HTML5 elements and media queries -->
|
||||
<!--[if lt IE 9]>
|
||||
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
|
||||
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
|
||||
<![endif]-->
|
||||
</head>
|
||||
<body>
|
||||
<!-- Static navbar -->
|
||||
<nav class="navbar navbar-default navbar-static-top">
|
||||
<div class="container">
|
||||
<div class="navbar-header">
|
||||
<a class="navbar-brand" href="#">PL/SQL Sample Application</a>
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<div class="container">
|
||||
<table class="table table-bordered">
|
||||
<tr>
|
||||
<th>#</th>
|
||||
<th>Name</th>
|
||||
<th>Description</th>
|
||||
<th>Quantity</th>
|
||||
<th>Price</th>
|
||||
</tr>
|
||||
');
|
||||
|
||||
-- Fill out the parts table
|
||||
for row in (select * from parts) loop
|
||||
htp.prn('
|
||||
<tr>
|
||||
<td>'||row.pid||'</td>
|
||||
<td>'||row.name||'</td>
|
||||
<td>'||row.description||'</td>
|
||||
<td>'||row.quantity||'</td>
|
||||
<td>'||row.price||'</td>
|
||||
</tr>
|
||||
');
|
||||
end loop;
|
||||
|
||||
htp.prn('
|
||||
</table>
|
||||
</div> <!-- /container -->
|
||||
|
||||
<!-- jQuery (necessary for Bootstrap''s JavaScript plugins) -->
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.3/jquery.min.js"></script>
|
||||
<!-- Include all compiled plugins (below), or include individual files as needed -->
|
||||
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/js/bootstrap.min.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
');
|
||||
|
||||
end p_main;
|
||||
|
||||
begin
|
||||
-- Initialization
|
||||
null;
|
||||
end plsqlguide;
|
||||
/
|
||||
24
samples/Pickle/data.pkl
Normal file
24
samples/Pickle/data.pkl
Normal file
@@ -0,0 +1,24 @@
|
||||
(dp0
|
||||
S'a'
|
||||
p1
|
||||
(lp2
|
||||
I1
|
||||
aF2.0
|
||||
aI3
|
||||
ac__builtin__
|
||||
complex
|
||||
p3
|
||||
(F4.0
|
||||
F6.0
|
||||
tp4
|
||||
Rp5
|
||||
asS'c'
|
||||
p6
|
||||
NsS'b'
|
||||
p7
|
||||
(S'string'
|
||||
p8
|
||||
VUnicode string
|
||||
p9
|
||||
tp10
|
||||
s.
|
||||
60
samples/Pickle/neural-network-ce-l2reg-784-10-30.pkl
Normal file
60
samples/Pickle/neural-network-ce-l2reg-784-10-30.pkl
Normal file
File diff suppressed because one or more lines are too long
36
samples/Pickle/random.pkl
Normal file
36
samples/Pickle/random.pkl
Normal file
@@ -0,0 +1,36 @@
|
||||
cnumpy.core.multiarray
|
||||
_reconstruct
|
||||
p0
|
||||
(cnumpy
|
||||
ndarray
|
||||
p1
|
||||
(I0
|
||||
tp2
|
||||
S'b'
|
||||
p3
|
||||
tp4
|
||||
Rp5
|
||||
(I1
|
||||
(I100
|
||||
tp6
|
||||
cnumpy
|
||||
dtype
|
||||
p7
|
||||
(S'f8'
|
||||
p8
|
||||
I0
|
||||
I1
|
||||
tp9
|
||||
Rp10
|
||||
(I3
|
||||
S'<'
|
||||
p11
|
||||
NNNI-1
|
||||
I-1
|
||||
I0
|
||||
tp12
|
||||
bI00
|
||||
S'\x1cc~\xc3\xa7r\xed?\xe5${\xec\xd6\xcd\xed?\x809-\x02%\xa9\xa2?F\x0f\x1d\xe8\xef\xa3\xdb?\xfe\xd1\x0c\xb7\x83\x13\xef?\xe0<o\xa1\xa9^\xdf?CE\x96\x88/o\xe2?<\xd8\xa1\x96\xa2T\xce?\x152\x8e\xe5\xa8\x7f\xe8?\xe4\xb7\x9a\xe0$\x0f\xdc?\x90\xe4\xe2\xd4=\xce\xc3?Ix\xe3P\xc4C\xe1?\x16\xd17\xc1Y\xfc\xed?5\xd7\xae@4\xfa\xe8?\x0f\x87\x8d>\xfcO\xe5?Y\x97\xcb"\xa7%\xe7?\x9b\x8d\x16\xda\x97\xe1\xeb?T\x14\xbd\xfe|\xf4\xd0?\x18\xdfH\xc56A\xba?\x90\xc5\xfb\xc63:\xe5?\xbf%\xad\xe5.\x86\xe9?\xc6\x0c\xa9\x8c\xd7\xd5\xe9?\xf8\xafc:\x84g\xd7?\xf8\x98\x879\x9a\x16\xee?\xba\xdf\x88\x8az\x06\xe2?~g-\xeb\xc8\xed\xee?\x08A\xcc\x8c\xe7>\xef?\xceD\xc4ar\n\xdc?\x92w\xbb\xa34\xb1\xd9?\x88\xb9\xc0{u\xa3\xdc?d\x1a\xad\xe8\xf3\x14\xdd?\x9c\x95\x13\x96o?\xe5?\x9cT[\xb8r\xa9\xe5?0\xf1\x01+(\x0f\xdf?W\xbdjqD&\xed?c\xcf1-W\xe6\xe1?\xce\xbc\xe1{zW\xd9?"d\xcf\xd7\x13\x93\xde?\xf2P\xf6\xc3\xd6\x87\xd5?\xc2\x0e\x92q\x89\xda\xd5?\xc0:B\x1bb\x00\x9e?Y\xafHmr\x80\xe3?\x1co\xa7\xba\xa5/\xe4?\xa2\xbc \x9c\xddB\xd0?\xd2L\x935\x17\'\xee?|\x8cM\xeb\x97=\xe8?\x0f0xN*V\xea?\x81p\xe3,!\xf2\xee?\xf5w\xed\x10\x9eu\xe0?\xc5\x16\\LR\xb5\xe1?\xbeh\x04\xa4g\xe5\xd6?\xea\xc0\xb9\xf0\xb2\xd8\xd9?\xac\x9c\xeep\x1a\xa9\xd8?@W9hp\x16\xb1?\xc4\xedS\xd6V\xa1\xed?\x93,!\xdc\xa1\x8b\xe9?\x80)\xb1\xa6[T\xc9?\xac\xbc\x8a\xd21\xdd\xc5?\x80\x9c.g\xf1\xf2\xc6?\tLu\xc3\xf7U\xe9?n\'\x9f?\xbe\xf9\xe9?\xa3\xe7K\x1c\xb3\xa9\xea?X\x98\x1a\xcb\xa0\xcd\xd3? \xb6O\x9c\x1bQ\xc2?"\x89[\xad1\x8e\xea?\xdd\x8f\xa0P\xc7\x0e\xe2?c\xa4j\xa3\r\xac\xef?\xba\xb6\x0f\x8emo\xef?\xe0\xed\xa0\xc5R9\xab?U\xf1\xcd\xcf\xbf\xcb\xea?\x89*#\x06\xb0|\xe8?d\xa3\xad\xcd\xe0]\xcc?\xb5\xe78\xa7w\x13\xe3?\xce\x99\x98\xefS%\xd7?\xb1\xf8\xd8\x8eI\x13\xef?\x91`]\x93\xd4 \xec?\xc0\rPz\xee\xbd\xe7?7\x92\xd4\x0fP\x8f\xe1?L\x0f\xaf\xa9\xc3\x19\xdd?\\}\x15X\x870\xc7? ~ t\xcat\xb1?@?\xec\x97u\x05\xe9?F\x8d:\xac4D\xdb?qY\xe1Qk|\xe2? \xaf\xeaj\xa5\x04\xab?J[\x1al;\x00\xd5?\x00^{n\xc2\xf1S?\xb0\x82dN\xda\xb5\xc7?\xe0 \x07\xe1?R\x92?\xc4\r\x08+\x99J\xe1?I|&U\x19\xc4\xe1?|*\xf9\xebq\x7f\xed?\xbc*\x93\x89k\xab\xe9?oiL\x90;\xe0\xef?\x96\xcd\x9b\xff\x18g\xdc?pt\xb4\xa5\x9c\xa2\xbc?Nu]w*\xb7\xd2?\x88k\xac\xd0\xfd\xbf\xd5?Q\x02$b\xfeH\xea?5\xf6\t\xb6K\x1a\xee?'
|
||||
p13
|
||||
tp14
|
||||
b.
|
||||
10
samples/Pickle/save.pkl
Normal file
10
samples/Pickle/save.pkl
Normal file
@@ -0,0 +1,10 @@
|
||||
(dp0
|
||||
S'lion'
|
||||
p1
|
||||
S'yellow'
|
||||
p2
|
||||
sS'kitty'
|
||||
p3
|
||||
S'red'
|
||||
p4
|
||||
s.
|
||||
30
samples/Pony/circle.pony
Normal file
30
samples/Pony/circle.pony
Normal file
@@ -0,0 +1,30 @@
|
||||
use "collections"
|
||||
|
||||
class Circle
|
||||
var _radius: F32
|
||||
|
||||
new create(radius': F32) =>
|
||||
_radius = radius'
|
||||
|
||||
fun ref get_radius(): F32 =>
|
||||
_radius
|
||||
|
||||
fun ref get_area(): F32 =>
|
||||
F32.pi() * _radius.pow(2)
|
||||
|
||||
fun ref get_circumference(): F32 =>
|
||||
2 * _radius * F32.pi()
|
||||
|
||||
actor Main
|
||||
new create(env: Env) =>
|
||||
|
||||
for i in Range[F32](1.0, 101.0) do
|
||||
let c = Circle(i)
|
||||
|
||||
var str =
|
||||
"Radius: " + c.get_radius().string() + "\n" +
|
||||
"Circumference: " + c.get_circumference().string() + "\n" +
|
||||
"Area: " + c.get_area().string() + "\n"
|
||||
|
||||
env.out.print(str)
|
||||
end
|
||||
32
samples/Pony/counter.pony
Normal file
32
samples/Pony/counter.pony
Normal file
@@ -0,0 +1,32 @@
|
||||
use "collections"
|
||||
|
||||
actor Counter
|
||||
var _count: U32
|
||||
|
||||
new create() =>
|
||||
_count = 0
|
||||
|
||||
be increment() =>
|
||||
_count = _count + 1
|
||||
|
||||
be get_and_reset(main: Main) =>
|
||||
main.display(_count)
|
||||
_count = 0
|
||||
|
||||
actor Main
|
||||
var _env: Env
|
||||
|
||||
new create(env: Env) =>
|
||||
_env = env
|
||||
|
||||
var count: U32 = try env.args(1).u32() else 10 end
|
||||
var counter = Counter
|
||||
|
||||
for i in Range[U32](0, count) do
|
||||
counter.increment()
|
||||
end
|
||||
|
||||
counter.get_and_reset(this)
|
||||
|
||||
be display(result: U32) =>
|
||||
_env.out.print(result.string())
|
||||
261
samples/Pony/gups-opt.pony
Normal file
261
samples/Pony/gups-opt.pony
Normal file
@@ -0,0 +1,261 @@
|
||||
use "options"
|
||||
use "time"
|
||||
use "collections"
|
||||
|
||||
class Config
|
||||
var logtable: U64 = 20
|
||||
var iterate: U64 = 10000
|
||||
var logchunk: U64 = 10
|
||||
var logactors: U64 = 2
|
||||
|
||||
fun ref apply(env: Env): Bool =>
|
||||
var options = Options(env)
|
||||
|
||||
options
|
||||
.add("logtable", "l", I64Argument)
|
||||
.add("iterate", "i", I64Argument)
|
||||
.add("chunk", "c", I64Argument)
|
||||
.add("actors", "a", I64Argument)
|
||||
|
||||
for option in options do
|
||||
match option
|
||||
| ("table", var arg: I64) => logtable = arg.u64()
|
||||
| ("iterate", var arg: I64) => iterate = arg.u64()
|
||||
| ("chunk", var arg: I64) => logchunk = arg.u64()
|
||||
| ("actors", var arg: I64) => logactors = arg.u64()
|
||||
| let err: ParseError =>
|
||||
err.report(env.out)
|
||||
env.out.print(
|
||||
"""
|
||||
gups_opt [OPTIONS]
|
||||
--table N log2 of the total table size. Defaults to 20.
|
||||
--iterate N number of iterations. Defaults to 10000.
|
||||
--chunk N log2 of the chunk size. Defaults to 10.
|
||||
--actors N log2 of the actor count. Defaults to 2.
|
||||
"""
|
||||
)
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
env.out.print(
|
||||
"logtable: " + logtable.string() +
|
||||
"\niterate: " + iterate.string() +
|
||||
"\nlogchunk: " + logchunk.string() +
|
||||
"\nlogactors: " + logactors.string()
|
||||
)
|
||||
true
|
||||
|
||||
actor Main
|
||||
let _env: Env
|
||||
let _config: Config = Config
|
||||
|
||||
var _updates: U64 = 0
|
||||
var _confirm: U64 = 0
|
||||
let _start: U64
|
||||
var _actors: Array[Updater] val
|
||||
|
||||
new create(env: Env) =>
|
||||
_env = env
|
||||
|
||||
if _config(env) then
|
||||
let actor_count = 1 << _config.logactors
|
||||
let loglocal = _config.logtable - _config.logactors
|
||||
let chunk_size = 1 << _config.logchunk
|
||||
let chunk_iterate = chunk_size * _config.iterate
|
||||
|
||||
_updates = chunk_iterate * actor_count
|
||||
_confirm = actor_count
|
||||
|
||||
var updaters = recover Array[Updater](actor_count) end
|
||||
|
||||
for i in Range[U64](0, actor_count) do
|
||||
updaters.push(Updater(this, actor_count, i, loglocal, chunk_size,
|
||||
chunk_iterate * i))
|
||||
end
|
||||
|
||||
_actors = consume updaters
|
||||
_start = Time.nanos()
|
||||
|
||||
for a in _actors.values() do
|
||||
a.start(_actors, _config.iterate)
|
||||
end
|
||||
else
|
||||
_start = 0
|
||||
_actors = recover Array[Updater] end
|
||||
end
|
||||
|
||||
be done() =>
|
||||
if (_confirm = _confirm - 1) == 1 then
|
||||
for a in _actors.values() do
|
||||
a.done()
|
||||
end
|
||||
end
|
||||
|
||||
be confirm() =>
|
||||
_confirm = _confirm + 1
|
||||
|
||||
if _confirm == _actors.size() then
|
||||
let elapsed = (Time.nanos() - _start).f64()
|
||||
let gups = _updates.f64() / elapsed
|
||||
|
||||
_env.out.print(
|
||||
"Time: " + (elapsed / 1e9).string() +
|
||||
"\nGUPS: " + gups.string()
|
||||
)
|
||||
end
|
||||
|
||||
actor Updater
|
||||
let _main: Main
|
||||
let _index: U64
|
||||
let _updaters: U64
|
||||
let _chunk: U64
|
||||
let _mask: U64
|
||||
let _loglocal: U64
|
||||
|
||||
let _output: Array[Array[U64] iso]
|
||||
let _reuse: List[Array[U64] iso] = List[Array[U64] iso]
|
||||
var _others: (Array[Updater] val | None) = None
|
||||
var _table: Array[U64]
|
||||
var _rand: U64
|
||||
|
||||
new create(main:Main, updaters: U64, index: U64, loglocal: U64, chunk: U64,
|
||||
seed: U64)
|
||||
=>
|
||||
_main = main
|
||||
_index = index
|
||||
_updaters = updaters
|
||||
_chunk = chunk
|
||||
_mask = updaters - 1
|
||||
_loglocal = loglocal
|
||||
|
||||
_rand = PolyRand.seed(seed)
|
||||
_output = _output.create(updaters)
|
||||
|
||||
let size = 1 << loglocal
|
||||
_table = Array[U64].undefined(size)
|
||||
|
||||
var offset = index * size
|
||||
|
||||
try
|
||||
for i in Range[U64](0, size) do
|
||||
_table(i) = i + offset
|
||||
end
|
||||
end
|
||||
|
||||
be start(others: Array[Updater] val, iterate: U64) =>
|
||||
_others = others
|
||||
iteration(iterate)
|
||||
|
||||
be apply(iterate: U64) =>
|
||||
iteration(iterate)
|
||||
|
||||
fun ref iteration(iterate: U64) =>
|
||||
let chk = _chunk
|
||||
|
||||
for i in Range(0, _updaters) do
|
||||
_output.push(
|
||||
try
|
||||
_reuse.pop()
|
||||
else
|
||||
recover Array[U64](chk) end
|
||||
end
|
||||
)
|
||||
end
|
||||
|
||||
for i in Range(0, _chunk) do
|
||||
var datum = _rand = PolyRand(_rand)
|
||||
var updater = (datum >> _loglocal) and _mask
|
||||
|
||||
try
|
||||
if updater == _index then
|
||||
_table(i) = _table(i) xor datum
|
||||
else
|
||||
_output(updater).push(datum)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
try
|
||||
let to = _others as Array[Updater] val
|
||||
|
||||
repeat
|
||||
let data = _output.pop()
|
||||
|
||||
if data.size() > 0 then
|
||||
to(_output.size()).receive(consume data)
|
||||
else
|
||||
_reuse.push(consume data)
|
||||
end
|
||||
until _output.size() == 0 end
|
||||
end
|
||||
|
||||
if iterate > 1 then
|
||||
apply(iterate - 1)
|
||||
else
|
||||
_main.done()
|
||||
end
|
||||
|
||||
be receive(data: Array[U64] iso) =>
|
||||
try
|
||||
for i in Range(0, data.size()) do
|
||||
let datum = data(i)
|
||||
var j = (datum >> _loglocal) and _mask
|
||||
_table(j) = _table(j) xor datum
|
||||
end
|
||||
|
||||
data.clear()
|
||||
_reuse.push(consume data)
|
||||
end
|
||||
|
||||
be done() =>
|
||||
_main.confirm()
|
||||
|
||||
primitive PolyRand
|
||||
fun apply(prev: U64): U64 =>
|
||||
(prev << 1) xor if prev.i64() < 0 then _poly() else 0 end
|
||||
|
||||
fun seed(from: U64): U64 =>
|
||||
var n = from % _period()
|
||||
|
||||
if n == 0 then
|
||||
return 1
|
||||
end
|
||||
|
||||
var m2 = Array[U64].undefined(64)
|
||||
var temp = U64(1)
|
||||
|
||||
try
|
||||
for i in Range(0, 64) do
|
||||
m2(i) = temp
|
||||
temp = this(temp)
|
||||
temp = this(temp)
|
||||
end
|
||||
end
|
||||
|
||||
var i: U64 = 64 - n.clz()
|
||||
var r = U64(2)
|
||||
|
||||
try
|
||||
while i > 0 do
|
||||
temp = 0
|
||||
|
||||
for j in Range(0, 64) do
|
||||
if ((r >> j) and 1) != 0 then
|
||||
temp = temp xor m2(j)
|
||||
end
|
||||
end
|
||||
|
||||
r = temp
|
||||
i = i - 1
|
||||
|
||||
if ((n >> i) and 1) != 0 then
|
||||
r = this(r)
|
||||
end
|
||||
end
|
||||
end
|
||||
r
|
||||
|
||||
fun _poly(): U64 => 7
|
||||
|
||||
fun _period(): U64 => 1317624576693539401
|
||||
3
samples/Pony/hello-world.pony
Normal file
3
samples/Pony/hello-world.pony
Normal file
@@ -0,0 +1,3 @@
|
||||
actor Main
|
||||
new create(env: Env) =>
|
||||
env.out.print("Hello, world.")
|
||||
188
samples/Pony/mandelbrot.pony
Normal file
188
samples/Pony/mandelbrot.pony
Normal file
@@ -0,0 +1,188 @@
|
||||
use "files"
|
||||
use "options"
|
||||
use "collections"
|
||||
|
||||
actor Worker
|
||||
new mandelbrot(main: Main, x: U64, y: U64, width: U64, iterations: U64,
|
||||
limit: F32, real: Array[F32] val, imaginary: Array[F32] val)
|
||||
=>
|
||||
var view: Array[U8] iso =
|
||||
recover
|
||||
Array[U8]((y - x) * (width >> 3))
|
||||
end
|
||||
|
||||
let group_r = Array[F32].undefined(8)
|
||||
let group_i = Array[F32].undefined(8)
|
||||
|
||||
var row = x
|
||||
|
||||
try
|
||||
while row < y do
|
||||
let prefetch_i = imaginary(row)
|
||||
|
||||
var col: U64 = 0
|
||||
|
||||
while col < width do
|
||||
var j: U64 = 0
|
||||
|
||||
while j < 8 do
|
||||
group_r.update(j, real(col + j))
|
||||
group_i.update(j, prefetch_i)
|
||||
j = j + 1
|
||||
end
|
||||
|
||||
var bitmap: U8 = 0xFF
|
||||
var n = iterations
|
||||
|
||||
repeat
|
||||
var mask: U8 = 0x80
|
||||
var k: U64 = 0
|
||||
|
||||
while k < 8 do
|
||||
let r = group_r(k)
|
||||
let i = group_i(k)
|
||||
|
||||
group_r.update(k, ((r * r) - (i * i)) + real(col + k))
|
||||
group_i.update(k, (2.0 * r * i) + prefetch_i)
|
||||
|
||||
if ((r * r) + (i * i)) > limit then
|
||||
bitmap = bitmap and not mask
|
||||
end
|
||||
|
||||
mask = mask >> 1
|
||||
k = k + 1
|
||||
end
|
||||
until (bitmap == 0) or ((n = n - 1) == 1) end
|
||||
|
||||
view.push(bitmap)
|
||||
|
||||
col = col + 8
|
||||
end
|
||||
row = row + 1
|
||||
end
|
||||
|
||||
main.draw(x * (width >> 3), consume view)
|
||||
end
|
||||
|
||||
actor Main
|
||||
var iterations: U64 = 50
|
||||
var limit: F32 = 4.0
|
||||
var chunks: U64 = 16
|
||||
var width: U64 = 16000
|
||||
var actors: U64 = 0
|
||||
var header: U64 = 0
|
||||
var real: Array[F32] val = recover Array[F32] end
|
||||
var imaginary: Array[F32] val = recover Array[F32] end
|
||||
var outfile: (File | None) = None
|
||||
|
||||
new create(env: Env) =>
|
||||
try
|
||||
arguments(env)
|
||||
|
||||
let length = width
|
||||
let recip_width = 2.0 / width.f32()
|
||||
|
||||
var r = recover Array[F32](length) end
|
||||
var i = recover Array[F32](length) end
|
||||
|
||||
for j in Range(0, width) do
|
||||
r.push((recip_width * j.f32()) - 1.5)
|
||||
i.push((recip_width * j.f32()) - 1.0)
|
||||
end
|
||||
|
||||
real = consume r
|
||||
imaginary = consume i
|
||||
|
||||
spawn_actors()
|
||||
create_outfile()
|
||||
end
|
||||
|
||||
be draw(offset: U64, pixels: Array[U8] val) =>
|
||||
match outfile
|
||||
| var out: File =>
|
||||
out.seek_start(header + offset)
|
||||
out.write(pixels)
|
||||
if (actors = actors - 1) == 1 then
|
||||
out.dispose()
|
||||
end
|
||||
end
|
||||
|
||||
fun ref create_outfile() =>
|
||||
match outfile
|
||||
| var f: File =>
|
||||
f.print("P4\n " + width.string() + " " + width.string() + "\n")
|
||||
header = f.size()
|
||||
f.set_length((width * (width >> 3)) + header)
|
||||
end
|
||||
|
||||
fun ref spawn_actors() =>
|
||||
actors = ((width + (chunks - 1)) / chunks)
|
||||
|
||||
var rest = width % chunks
|
||||
|
||||
if rest == 0 then rest = chunks end
|
||||
|
||||
var x: U64 = 0
|
||||
var y: U64 = 0
|
||||
|
||||
for i in Range(0, actors - 1) do
|
||||
x = i * chunks
|
||||
y = x + chunks
|
||||
Worker.mandelbrot(this, x, y, width, iterations, limit, real, imaginary)
|
||||
end
|
||||
|
||||
Worker.mandelbrot(this, y, y + rest, width, iterations, limit, real,
|
||||
imaginary)
|
||||
|
||||
fun ref arguments(env: Env) ? =>
|
||||
let options = Options(env)
|
||||
|
||||
options
|
||||
.add("iterations", "i", I64Argument)
|
||||
.add("limit", "l", F64Argument)
|
||||
.add("chunks", "c", I64Argument)
|
||||
.add("width", "w", I64Argument)
|
||||
.add("output", "o", StringArgument)
|
||||
|
||||
for option in options do
|
||||
match option
|
||||
| ("iterations", var arg: I64) => iterations = arg.u64()
|
||||
| ("limit", var arg: F64) => limit = arg.f32()
|
||||
| ("chunks", var arg: I64) => chunks = arg.u64()
|
||||
| ("width", var arg: I64) => width = arg.u64()
|
||||
| ("output", var arg: String) =>
|
||||
outfile = try File(FilePath(env.root, arg)) end
|
||||
| let err: ParseError => err.report(env.out) ; usage(env) ; error
|
||||
end
|
||||
end
|
||||
|
||||
fun tag usage(env: Env) =>
|
||||
env.out.print(
|
||||
"""
|
||||
mandelbrot [OPTIONS]
|
||||
|
||||
The binary output can be converted to a BMP with the following command
|
||||
(ImageMagick Tools required):
|
||||
|
||||
convert <output> JPEG:<output>.jpg
|
||||
|
||||
Available options:
|
||||
|
||||
--iterations, -i Maximum amount of iterations to be done for each pixel.
|
||||
Defaults to 50.
|
||||
|
||||
--limit, -l Square of the limit that pixels need to exceed in order
|
||||
to escape from the Mandelbrot set.
|
||||
Defaults to 4.0.
|
||||
|
||||
--chunks, -c Maximum line count of chunks the image should be
|
||||
divided into for divide & conquer processing.
|
||||
Defaults to 16.
|
||||
|
||||
--width, -w Lateral length of the resulting mandelbrot image.
|
||||
Defaults to 16000.
|
||||
|
||||
--output, -o File to write the output to.
|
||||
|
||||
"""
|
||||
)
|
||||
130
samples/Pony/mixed.pony
Normal file
130
samples/Pony/mixed.pony
Normal file
@@ -0,0 +1,130 @@
|
||||
use "collections"
|
||||
|
||||
actor Worker
|
||||
var _env: Env
|
||||
|
||||
new create(env: Env) =>
|
||||
_env = env
|
||||
|
||||
var a: U64 = 86028157
|
||||
var b: U64 = 329545133
|
||||
|
||||
var result = factorize(a*b)
|
||||
|
||||
var correct =
|
||||
try
|
||||
(result.size() == 2) and
|
||||
(result(0) == 86028157) and
|
||||
(result(1) == 329545133)
|
||||
else
|
||||
false
|
||||
end
|
||||
|
||||
fun ref factorize(bigint: U64) : Array[U64] =>
|
||||
var factors = Array[U64](2)
|
||||
|
||||
if bigint <= 3 then
|
||||
factors.push(bigint)
|
||||
else
|
||||
var d: U64 = 2
|
||||
var i: U64 = 0
|
||||
var n = bigint
|
||||
|
||||
while d < n do
|
||||
if (n % d) == 0 then
|
||||
i = i + 1
|
||||
factors.push(d)
|
||||
n = n / d
|
||||
else
|
||||
d = if d == 2 then 3 else (d + 2) end
|
||||
end
|
||||
end
|
||||
|
||||
factors.push(d)
|
||||
end
|
||||
|
||||
factors
|
||||
|
||||
actor Ring
|
||||
var _env: Env
|
||||
var _size: U32
|
||||
var _pass: U32
|
||||
var _repetitions: U32
|
||||
var _next: Ring
|
||||
|
||||
new create(env: Env, size: U32, pass: U32, repetitions: U32) =>
|
||||
_env = env
|
||||
_size = size
|
||||
_pass = pass
|
||||
_repetitions = repetitions
|
||||
_next = spawn_ring(_env, _size, _pass)
|
||||
run()
|
||||
|
||||
new neighbor(env: Env, next: Ring) =>
|
||||
_env = env
|
||||
_next = next
|
||||
_size = 0
|
||||
_pass = 0
|
||||
_repetitions = 0
|
||||
|
||||
be apply(i: U32) =>
|
||||
if i > 0 then
|
||||
_next(i - 1)
|
||||
else
|
||||
run()
|
||||
end
|
||||
|
||||
fun ref run() =>
|
||||
if _repetitions > 0 then
|
||||
_repetitions = _repetitions - 1
|
||||
_next(_pass * _size)
|
||||
Worker(_env)
|
||||
end
|
||||
|
||||
fun tag spawn_ring(env: Env, size: U32, pass': U32) : Ring =>
|
||||
var next: Ring = this
|
||||
|
||||
for i in Range[U32](0, size) do
|
||||
next = Ring.neighbor(env, next)
|
||||
end
|
||||
|
||||
next
|
||||
|
||||
actor Main
|
||||
var _size: U32 = 50
|
||||
var _count: U32 = 20
|
||||
var _pass: U32 = 10000
|
||||
var _repetitions: U32 = 5
|
||||
var _env: Env
|
||||
|
||||
new create(env: Env) =>
|
||||
_env = env
|
||||
|
||||
try
|
||||
arguments()
|
||||
start_benchmark()
|
||||
else
|
||||
usage()
|
||||
end
|
||||
|
||||
fun ref arguments() ? =>
|
||||
_count = _env.args(1).u32()
|
||||
_size = _env.args(2).u32()
|
||||
_pass = _env.args(3).u32()
|
||||
_repetitions = _env.args(4).u32()
|
||||
|
||||
fun ref start_benchmark() =>
|
||||
for i in Range[U32](0, _count) do
|
||||
Ring(_env, _size, _pass, _repetitions)
|
||||
end
|
||||
|
||||
fun ref usage() =>
|
||||
_env.out.print(
|
||||
"""
|
||||
mixed OPTIONS
|
||||
N number of actors in each ring"
|
||||
N number of rings"
|
||||
N number of messages to pass around each ring"
|
||||
N number of times to repeat"
|
||||
"""
|
||||
)
|
||||
1
samples/Puppet/hiera_include.pp
Normal file
1
samples/Puppet/hiera_include.pp
Normal file
@@ -0,0 +1 @@
|
||||
hiera_include('classes')
|
||||
9
samples/Ruby/filenames/Brewfile
Normal file
9
samples/Ruby/filenames/Brewfile
Normal file
@@ -0,0 +1,9 @@
|
||||
tap 'caskroom/cask'
|
||||
tap 'telemachus/brew', 'https://telemachus@bitbucket.org/telemachus/brew.git'
|
||||
brew 'emacs', args: ['with-cocoa', 'with-gnutls']
|
||||
brew 'redis', restart_service: true
|
||||
brew 'mongodb'
|
||||
brew 'sphinx'
|
||||
brew 'imagemagick'
|
||||
brew 'mysql'
|
||||
cask 'google-chrome'
|
||||
267
samples/Ruby/racc.rb
Normal file
267
samples/Ruby/racc.rb
Normal file
@@ -0,0 +1,267 @@
|
||||
#
|
||||
# DO NOT MODIFY!!!!
|
||||
# This file is automatically generated by Racc 1.4.7
|
||||
# from Racc grammer file "".
|
||||
#
|
||||
|
||||
require 'racc/parser.rb'
|
||||
module RJSON
|
||||
class Parser < Racc::Parser
|
||||
|
||||
|
||||
require 'rjson/handler'
|
||||
|
||||
attr_reader :handler
|
||||
|
||||
def initialize tokenizer, handler = Handler.new
|
||||
@tokenizer = tokenizer
|
||||
@handler = handler
|
||||
super()
|
||||
end
|
||||
|
||||
def next_token
|
||||
@tokenizer.next_token
|
||||
end
|
||||
|
||||
def parse
|
||||
do_parse
|
||||
handler
|
||||
end
|
||||
##### State transition tables begin ###
|
||||
|
||||
racc_action_table = [
|
||||
9, 33, 9, 11, 13, 16, 19, 22, 9, 7,
|
||||
23, 1, 9, 11, 13, 16, 19, 29, 30, 7,
|
||||
21, 1, 9, 11, 13, 16, 19, 31, nil, 7,
|
||||
21, 1, 23, 7, nil, 1 ]
|
||||
|
||||
racc_action_check = [
|
||||
6, 27, 33, 33, 33, 33, 33, 3, 31, 33,
|
||||
6, 33, 29, 29, 29, 29, 29, 12, 22, 29,
|
||||
12, 29, 2, 2, 2, 2, 2, 25, nil, 2,
|
||||
2, 2, 25, 0, nil, 0 ]
|
||||
|
||||
racc_action_pointer = [
|
||||
24, nil, 20, 7, nil, nil, -2, nil, nil, nil,
|
||||
nil, nil, 10, nil, nil, nil, nil, nil, nil, nil,
|
||||
nil, nil, 18, nil, nil, 20, nil, -7, nil, 10,
|
||||
nil, 6, nil, 0, nil, nil, nil ]
|
||||
|
||||
racc_action_default = [
|
||||
-27, -12, -21, -27, -1, -2, -27, -10, -15, -26,
|
||||
-8, -22, -27, -23, -17, -16, -24, -20, -18, -25,
|
||||
-19, -11, -27, -13, -3, -27, -6, -27, -9, -21,
|
||||
37, -27, -4, -21, -14, -5, -7 ]
|
||||
|
||||
racc_goto_table = [
|
||||
8, 26, 24, 27, 10, 3, 25, 5, 4, 12,
|
||||
nil, nil, nil, nil, 28, nil, nil, nil, nil, nil,
|
||||
nil, 32, nil, nil, nil, nil, 35, 34, 27, nil,
|
||||
nil, 36 ]
|
||||
|
||||
racc_goto_check = [
|
||||
9, 7, 5, 8, 11, 1, 6, 3, 2, 12,
|
||||
nil, nil, nil, nil, 11, nil, nil, nil, nil, nil,
|
||||
nil, 5, nil, nil, nil, nil, 7, 9, 8, nil,
|
||||
nil, 9 ]
|
||||
|
||||
racc_goto_pointer = [
|
||||
nil, 5, 8, 7, nil, -4, 0, -5, -3, -2,
|
||||
nil, 2, 7, nil, nil ]
|
||||
|
||||
racc_goto_default = [
|
||||
nil, nil, 14, 18, 6, nil, nil, nil, 20, nil,
|
||||
2, nil, nil, 15, 17 ]
|
||||
|
||||
racc_reduce_table = [
|
||||
0, 0, :racc_error,
|
||||
1, 14, :_reduce_none,
|
||||
1, 14, :_reduce_none,
|
||||
2, 15, :_reduce_none,
|
||||
3, 15, :_reduce_none,
|
||||
3, 19, :_reduce_none,
|
||||
1, 19, :_reduce_none,
|
||||
3, 20, :_reduce_none,
|
||||
2, 16, :_reduce_none,
|
||||
3, 16, :_reduce_none,
|
||||
1, 23, :_reduce_10,
|
||||
1, 24, :_reduce_11,
|
||||
1, 17, :_reduce_12,
|
||||
1, 18, :_reduce_13,
|
||||
3, 25, :_reduce_none,
|
||||
1, 25, :_reduce_none,
|
||||
1, 22, :_reduce_none,
|
||||
1, 22, :_reduce_none,
|
||||
1, 22, :_reduce_none,
|
||||
1, 26, :_reduce_none,
|
||||
1, 26, :_reduce_20,
|
||||
0, 27, :_reduce_none,
|
||||
1, 27, :_reduce_22,
|
||||
1, 27, :_reduce_23,
|
||||
1, 27, :_reduce_24,
|
||||
1, 27, :_reduce_25,
|
||||
1, 21, :_reduce_26 ]
|
||||
|
||||
racc_reduce_n = 27
|
||||
|
||||
racc_shift_n = 37
|
||||
|
||||
racc_token_table = {
|
||||
false => 0,
|
||||
:error => 1,
|
||||
:STRING => 2,
|
||||
:NUMBER => 3,
|
||||
:TRUE => 4,
|
||||
:FALSE => 5,
|
||||
:NULL => 6,
|
||||
"," => 7,
|
||||
":" => 8,
|
||||
"[" => 9,
|
||||
"]" => 10,
|
||||
"{" => 11,
|
||||
"}" => 12 }
|
||||
|
||||
racc_nt_base = 13
|
||||
|
||||
racc_use_result_var = true
|
||||
|
||||
Racc_arg = [
|
||||
racc_action_table,
|
||||
racc_action_check,
|
||||
racc_action_default,
|
||||
racc_action_pointer,
|
||||
racc_goto_table,
|
||||
racc_goto_check,
|
||||
racc_goto_default,
|
||||
racc_goto_pointer,
|
||||
racc_nt_base,
|
||||
racc_reduce_table,
|
||||
racc_token_table,
|
||||
racc_shift_n,
|
||||
racc_reduce_n,
|
||||
racc_use_result_var ]
|
||||
|
||||
Racc_token_to_s_table = [
|
||||
"$end",
|
||||
"error",
|
||||
"STRING",
|
||||
"NUMBER",
|
||||
"TRUE",
|
||||
"FALSE",
|
||||
"NULL",
|
||||
"\",\"",
|
||||
"\":\"",
|
||||
"\"[\"",
|
||||
"\"]\"",
|
||||
"\"{\"",
|
||||
"\"}\"",
|
||||
"$start",
|
||||
"document",
|
||||
"object",
|
||||
"array",
|
||||
"start_object",
|
||||
"end_object",
|
||||
"pairs",
|
||||
"pair",
|
||||
"string",
|
||||
"value",
|
||||
"start_array",
|
||||
"end_array",
|
||||
"values",
|
||||
"scalar",
|
||||
"literal" ]
|
||||
|
||||
Racc_debug_parser = false
|
||||
|
||||
##### State transition tables end #####
|
||||
|
||||
# reduce 0 omitted
|
||||
|
||||
# reduce 1 omitted
|
||||
|
||||
# reduce 2 omitted
|
||||
|
||||
# reduce 3 omitted
|
||||
|
||||
# reduce 4 omitted
|
||||
|
||||
# reduce 5 omitted
|
||||
|
||||
# reduce 6 omitted
|
||||
|
||||
# reduce 7 omitted
|
||||
|
||||
# reduce 8 omitted
|
||||
|
||||
# reduce 9 omitted
|
||||
|
||||
def _reduce_10(val, _values, result)
|
||||
@handler.start_array
|
||||
result
|
||||
end
|
||||
|
||||
def _reduce_11(val, _values, result)
|
||||
@handler.end_array
|
||||
result
|
||||
end
|
||||
|
||||
def _reduce_12(val, _values, result)
|
||||
@handler.start_object
|
||||
result
|
||||
end
|
||||
|
||||
def _reduce_13(val, _values, result)
|
||||
@handler.end_object
|
||||
result
|
||||
end
|
||||
|
||||
# reduce 14 omitted
|
||||
|
||||
# reduce 15 omitted
|
||||
|
||||
# reduce 16 omitted
|
||||
|
||||
# reduce 17 omitted
|
||||
|
||||
# reduce 18 omitted
|
||||
|
||||
# reduce 19 omitted
|
||||
|
||||
def _reduce_20(val, _values, result)
|
||||
@handler.scalar val[0]
|
||||
result
|
||||
end
|
||||
|
||||
# reduce 21 omitted
|
||||
|
||||
def _reduce_22(val, _values, result)
|
||||
n = val[0]; result = n.count('.') > 0 ? n.to_f : n.to_i
|
||||
result
|
||||
end
|
||||
|
||||
def _reduce_23(val, _values, result)
|
||||
result = true
|
||||
result
|
||||
end
|
||||
|
||||
def _reduce_24(val, _values, result)
|
||||
result = false
|
||||
result
|
||||
end
|
||||
|
||||
def _reduce_25(val, _values, result)
|
||||
result = nil
|
||||
result
|
||||
end
|
||||
|
||||
def _reduce_26(val, _values, result)
|
||||
@handler.scalar val[0].gsub(/^"|"$/, '')
|
||||
result
|
||||
end
|
||||
|
||||
def _reduce_none(val, _values, result)
|
||||
val[0]
|
||||
end
|
||||
|
||||
end # class Parser
|
||||
end # module RJSON
|
||||
2324
samples/Rust/hashmap.rs
Normal file
2324
samples/Rust/hashmap.rs
Normal file
File diff suppressed because it is too large
Load Diff
12
samples/Rust/main.rs
Normal file
12
samples/Rust/main.rs
Normal file
@@ -0,0 +1,12 @@
|
||||
extern crate foo;
|
||||
extern crate bar;
|
||||
|
||||
use foo::{self, quix};
|
||||
use bar::car::*;
|
||||
use bar;
|
||||
|
||||
fn main() {
|
||||
println!("Hello {}", "World");
|
||||
|
||||
panic!("Goodbye")
|
||||
}
|
||||
27
samples/TypeScript/tsxAttributeResolution9.tsx
Normal file
27
samples/TypeScript/tsxAttributeResolution9.tsx
Normal file
@@ -0,0 +1,27 @@
|
||||
//@jsx: preserve
|
||||
//@module: amd
|
||||
|
||||
//@filename: react.d.ts
|
||||
declare module JSX {
|
||||
interface Element { }
|
||||
interface IntrinsicElements {
|
||||
}
|
||||
interface ElementAttributesProperty {
|
||||
props;
|
||||
}
|
||||
}
|
||||
|
||||
interface Props {
|
||||
foo: string;
|
||||
}
|
||||
|
||||
//@filename: file.tsx
|
||||
export class MyComponent {
|
||||
render() {
|
||||
}
|
||||
|
||||
props: { foo: string; }
|
||||
}
|
||||
|
||||
<MyComponent foo="bar" />; // ok
|
||||
<MyComponent foo={0} />; // should be an error
|
||||
79
samples/UrWeb/iso8601.ur
Normal file
79
samples/UrWeb/iso8601.ur
Normal file
@@ -0,0 +1,79 @@
|
||||
open Parse.String
|
||||
|
||||
val digit = satisfy isdigit
|
||||
|
||||
val decimal_of_len n =
|
||||
ds <- count n digit;
|
||||
return (List.foldl (fn d acc => 10*acc + ((ord d)-(ord #"0"))) 0 ds)
|
||||
|
||||
val date =
|
||||
y <- decimal_of_len 4;
|
||||
char' #"-";
|
||||
m <- decimal_of_len 2;
|
||||
char' #"-";
|
||||
d <- decimal_of_len 2;
|
||||
if m > 0 && m <= 12 then
|
||||
return {Year=y, Month=(Datetime.intToMonth (m-1)), Day=d}
|
||||
else
|
||||
fail
|
||||
|
||||
(* We parse fractions of a second, but ignore them since Datetime
|
||||
doesn't permit representing them. *)
|
||||
val time =
|
||||
h <- decimal_of_len 2;
|
||||
char' #":";
|
||||
m <- decimal_of_len 2;
|
||||
s <- maybe (char' #":";
|
||||
s <- decimal_of_len 2;
|
||||
maybe' (char' #"."; skipWhile isdigit);
|
||||
return s);
|
||||
return {Hour=h, Minute=m, Second=Option.get 0 s}
|
||||
|
||||
val timezone_offset =
|
||||
let val zulu = char' #"Z"; return 0
|
||||
val digits = decimal_of_len 2
|
||||
val sign = or (char' #"+"; return 1)
|
||||
(char' #"-"; return (-1))
|
||||
in
|
||||
zulu `or` (s <- sign;
|
||||
h <- digits;
|
||||
m <- (maybe' (char' #":"); or digits (return 0));
|
||||
return (s*(h*60+m)))
|
||||
end
|
||||
|
||||
val datetime_with_tz =
|
||||
d <- date; char' #"T"; t <- time;
|
||||
tz <- timezone_offset;
|
||||
return (d ++ t ++ {TZOffsetMinutes=tz})
|
||||
|
||||
val datetime =
|
||||
d <- datetime_with_tz;
|
||||
return (d -- #TZOffsetMinutes)
|
||||
|
||||
fun process v =
|
||||
case parse (d <- datetime_with_tz; eof; return d) v of
|
||||
Some r =>
|
||||
let
|
||||
val {Year=year,Month=month,Day=day,
|
||||
Hour=hour,Minute=minute,Second=second} =
|
||||
Datetime.addMinutes (r.TZOffsetMinutes) (r -- #TZOffsetMinutes)
|
||||
fun pad x =
|
||||
if x < 10 then "0" `strcat` show x else show x
|
||||
in
|
||||
<xml>{[pad hour]}:{[pad minute]}:{[pad second]} {[month]} {[day]}, {[year]}</xml>
|
||||
end
|
||||
| None => <xml>none</xml>
|
||||
|
||||
fun main () : transaction page =
|
||||
input <- source "2012-01-01T01:10:42Z";
|
||||
return <xml>
|
||||
<body>
|
||||
<label>
|
||||
Enter an
|
||||
<a href="https://en.wikipedia.org/wiki/ISO_8601">ISO 8601</a>
|
||||
datetime here:
|
||||
<ctextbox source={input} />
|
||||
</label>
|
||||
<ul><dyn signal={v <- signal input; return (process v)} /></ul>
|
||||
</body>
|
||||
</xml>
|
||||
85
samples/UrWeb/parse.urs
Normal file
85
samples/UrWeb/parse.urs
Normal file
@@ -0,0 +1,85 @@
|
||||
functor Make(Stream : sig type t end) : sig
|
||||
con t :: Type -> Type
|
||||
|
||||
val mreturn : a ::: Type -> a -> t a
|
||||
val mbind : a ::: Type -> b ::: Type ->
|
||||
(t a) -> (a -> t b) -> (t b)
|
||||
val monad_parse : monad t
|
||||
|
||||
val parse : a ::: Type -> t a -> Stream.t -> option a
|
||||
|
||||
(** Combinators *)
|
||||
val fail : a ::: Type -> t a
|
||||
val or : a ::: Type -> t a -> t a -> t a
|
||||
val maybe : a ::: Type -> t a -> t (option a)
|
||||
val maybe' : a ::: Type -> t a -> t unit
|
||||
val many : a ::: Type -> t a -> t (list a)
|
||||
val count : a ::: Type -> int -> t a -> t (list a)
|
||||
val skipMany : a ::: Type -> t a -> t unit
|
||||
val sepBy : a ::: Type -> s ::: Type -> t a -> t s -> t (list a)
|
||||
end
|
||||
|
||||
structure String : sig
|
||||
con t :: Type -> Type
|
||||
val monad_parse : monad t
|
||||
|
||||
val parse : a ::: Type -> t a -> string -> option a
|
||||
|
||||
(** Combinators *)
|
||||
val fail : a ::: Type -> t a
|
||||
val or : a ::: Type -> t a -> t a -> t a
|
||||
val maybe : a ::: Type -> t a -> t (option a)
|
||||
val maybe' : a ::: Type -> t a -> t unit
|
||||
val many : a ::: Type -> t a -> t (list a)
|
||||
val count : a ::: Type -> int -> t a -> t (list a)
|
||||
val skipMany : a ::: Type -> t a -> t unit
|
||||
val sepBy : a ::: Type -> s ::: Type -> t a -> t s -> t (list a)
|
||||
|
||||
val eof : t unit
|
||||
(* We provide alternative versions of some of these predicates
|
||||
* that return t unit as a monadic syntactical convenience. *)
|
||||
val string : string -> t string
|
||||
val string' : string -> t unit
|
||||
val stringCI : string -> t string
|
||||
val stringCI' : string -> t unit
|
||||
val char : char -> t char
|
||||
val char' : char -> t unit
|
||||
val take : int -> t (string*int)
|
||||
val drop : int -> t unit
|
||||
val satisfy : (char -> bool) -> t char
|
||||
val skip : (char -> bool) -> t unit
|
||||
val skipWhile : (char -> bool) -> t unit
|
||||
val takeWhile : (char -> bool) -> t (string*int)
|
||||
val takeWhile' : (char -> bool) -> t string (* conses *)
|
||||
(* Well, "till" is the correct form; but "til" is in common enough
|
||||
* usage that I'll prefer it for terseness. *)
|
||||
val takeTil : (char -> bool) -> t (string*int)
|
||||
val takeTil' : (char -> bool) -> t string (* conses *)
|
||||
val takeRest : t string
|
||||
|
||||
(** Convenience functions *)
|
||||
val skipSpace : t unit
|
||||
val endOfLine : t unit
|
||||
val unsigned_int_of_radix : int -> t int
|
||||
(*
|
||||
* val signed_int_of_radix : int -> t int
|
||||
* val double : t float
|
||||
*)
|
||||
end
|
||||
|
||||
structure Blob : sig
|
||||
con t :: Type -> Type
|
||||
val monad_parse : monad t
|
||||
|
||||
val parse : a ::: Type -> t a -> blob -> option a
|
||||
|
||||
(** Combinators *)
|
||||
val fail : a ::: Type -> t a
|
||||
val or : a ::: Type -> t a -> t a -> t a
|
||||
val maybe : a ::: Type -> t a -> t (option a)
|
||||
val maybe' : a ::: Type -> t a -> t unit
|
||||
val many : a ::: Type -> t a -> t (list a)
|
||||
val count : a ::: Type -> int -> t a -> t (list a)
|
||||
val skipMany : a ::: Type -> t a -> t unit
|
||||
val sepBy : a ::: Type -> s ::: Type -> t a -> t s -> t (list a)
|
||||
end
|
||||
72
samples/X10/ArraySum.x10
Normal file
72
samples/X10/ArraySum.x10
Normal file
@@ -0,0 +1,72 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.io.Console;
|
||||
|
||||
/**
|
||||
* A simple illustration of loop parallelization within a single place.
|
||||
*/
|
||||
public class ArraySum {
|
||||
|
||||
var sum:Long;
|
||||
val data:Rail[Long];
|
||||
|
||||
public def this(n:Long) {
|
||||
// Create a Rail with n elements (0..(n-1)), all initialized to 1.
|
||||
data = new Rail[Long](n, 1);
|
||||
sum = 0;
|
||||
}
|
||||
|
||||
def sum(a:Rail[Long], start:Long, last:Long) {
|
||||
var mySum: Long = 0;
|
||||
for (i in start..(last-1)) {
|
||||
mySum += a(i);
|
||||
}
|
||||
return mySum;
|
||||
}
|
||||
|
||||
def sum(numThreads:Long) {
|
||||
val mySize = data.size/numThreads;
|
||||
finish for (p in 0..(numThreads-1)) async {
|
||||
val mySum = sum(data, p*mySize, (p+1)*mySize);
|
||||
// Multiple activities will simultaneously update
|
||||
// this location -- so use an atomic operation.
|
||||
atomic sum += mySum;
|
||||
}
|
||||
}
|
||||
|
||||
public static def main(args:Rail[String]) {
|
||||
var size:Long = 5*1000*1000;
|
||||
if (args.size >=1)
|
||||
size = Long.parse(args(0));
|
||||
|
||||
Console.OUT.println("Initializing.");
|
||||
val a = new ArraySum(size);
|
||||
val P = [1,2,4];
|
||||
|
||||
//warmup loop
|
||||
Console.OUT.println("Warming up.");
|
||||
for (numThreads in P)
|
||||
a.sum(numThreads);
|
||||
|
||||
for (numThreads in P) {
|
||||
Console.OUT.println("Starting with " + numThreads + " threads.");
|
||||
a.sum=0;
|
||||
var time: long = - System.nanoTime();
|
||||
a.sum(numThreads);
|
||||
time += System.nanoTime();
|
||||
Console.OUT.println("For p=" + numThreads
|
||||
+ " result: " + a.sum
|
||||
+ ((size==a.sum)? " ok" : " bad")
|
||||
+ " (time=" + (time/(1000*1000)) + " ms)");
|
||||
}
|
||||
}
|
||||
}
|
||||
50
samples/X10/Cancellation.x10
Normal file
50
samples/X10/Cancellation.x10
Normal file
@@ -0,0 +1,50 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.xrx.Runtime;
|
||||
|
||||
/**
|
||||
* Demonstrate how to instantiate the X10 runtime as an executor service
|
||||
* submit jobs to the runtime, wait jobs to complete and cancel all jobs
|
||||
*
|
||||
* Compile with: x10c -O -EXECUTOR_MODE=true Cancellation.x10
|
||||
* Run with: X10_CANCELLABLE=true X10_NPLACES=4 x10 -DX10RT_IMPL=JavaSockets Cancellation
|
||||
*/
|
||||
class Cancellation {
|
||||
static def job(id:Long, iterations:Long) = ()=>{
|
||||
at (Place.places().next(here)) async {
|
||||
for (i in 1..iterations) {
|
||||
finish for (p in Place.places()) {
|
||||
at (p) async Console.OUT.println(here+" says hello (job " + id + ", iteration " + i + ")");
|
||||
}
|
||||
Console.ERR.println();
|
||||
System.sleep(200);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public static def main(args:Rail[String]):void {
|
||||
val w1 = Runtime.submit(job(1, 5));
|
||||
w1.await(); Console.ERR.println("Job 1 completed\n");
|
||||
val w2 = Runtime.submit(job(2, 1000));
|
||||
System.threadSleep(1000);
|
||||
val c1 = Runtime.cancelAll();
|
||||
try { w2.await(); } catch (e:Exception) { Console.ERR.println("Job 2 aborted with exception " + e +"\n"); }
|
||||
c1.await(); // waiting for cancellation to be processed
|
||||
System.threadSleep(1000);
|
||||
Runtime.submit(job(3, 1000));
|
||||
Runtime.submit(job(4, 1000));
|
||||
System.threadSleep(1000);
|
||||
val c2 = Runtime.cancelAll();
|
||||
c2.await();
|
||||
Console.ERR.println("Goodbye\n");
|
||||
}
|
||||
}
|
||||
52
samples/X10/Fibonacci.x10
Normal file
52
samples/X10/Fibonacci.x10
Normal file
@@ -0,0 +1,52 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.io.Console;
|
||||
|
||||
/**
|
||||
* This is a small program to illustrate the use of
|
||||
* <code>async</code> and <code>finish</code> in a
|
||||
* prototypical recursive divide-and-conquer algorithm.
|
||||
* It is obviously not intended to show a efficient way to
|
||||
* compute Fibonacci numbers in X10.<p>
|
||||
*
|
||||
* The heart of the example is the <code>run</code> method,
|
||||
* which directly embodies the recursive definition of
|
||||
* <pre>
|
||||
* fib(n) = fib(n-1)+fib(n-2);
|
||||
* </pre>
|
||||
* by using an <code>async</code> to compute <code>fib(n-1)</code> while
|
||||
* the current activity computes <code>fib(n-2)</code>. A <code>finish</code>
|
||||
* is used to ensure that both computations are complete before
|
||||
* their results are added together to compute <code>fib(n)</code>
|
||||
*/
|
||||
public class Fibonacci {
|
||||
|
||||
public static def fib(n:long) {
|
||||
if (n<=2) return 1;
|
||||
|
||||
val f1:long;
|
||||
val f2:long;
|
||||
finish {
|
||||
async { f1 = fib(n-1); }
|
||||
f2 = fib(n-2);
|
||||
}
|
||||
return f1 + f2;
|
||||
}
|
||||
|
||||
public static def main(args:Rail[String]) {
|
||||
val n = (args.size > 0) ? Long.parse(args(0)) : 10;
|
||||
Console.OUT.println("Computing fib("+n+")");
|
||||
val f = fib(n);
|
||||
Console.OUT.println("fib("+n+") = "+f);
|
||||
}
|
||||
}
|
||||
|
||||
86
samples/X10/HeatTransfer_v0.x10
Normal file
86
samples/X10/HeatTransfer_v0.x10
Normal file
@@ -0,0 +1,86 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.array.*;
|
||||
import x10.compiler.Foreach;
|
||||
import x10.compiler.Inline;
|
||||
|
||||
|
||||
/**
|
||||
* This is a sample program illustrating how to use
|
||||
* X10's array classes. It also illustrates the use
|
||||
* of foreach to acheive intra-place parallelism.
|
||||
*
|
||||
* The program solves a set of 2D partial differential
|
||||
* equations by iteratively applying a 5-point stencil
|
||||
* operation until convergence is reached.
|
||||
*/
|
||||
public class HeatTransfer_v0 {
|
||||
static val EPSILON = 1.0e-5;
|
||||
|
||||
val N:Long;
|
||||
val A:Array_2[Double]{self!=null};
|
||||
val Tmp:Array_2[Double]{self!=null};
|
||||
|
||||
public def this(size:Long) {
|
||||
N = size;
|
||||
A = new Array_2[Double](N+2, N+2); // zero-initialized N+2 * N+2 array of doubles
|
||||
for (j in 1..N) A(0, j) = 1; // set one border row to 1
|
||||
Tmp = new Array_2[Double](A);
|
||||
}
|
||||
|
||||
final @Inline def stencil(x:Long, y:Long):Double {
|
||||
return (A(x-1,y) + A(x+1,y) + A(x,y-1) + A(x,y+1)) / 4;
|
||||
}
|
||||
|
||||
def run() {
|
||||
val is = new DenseIterationSpace_2(1,1,N,N);
|
||||
var delta:Double;
|
||||
do {
|
||||
// Compute new values, storing in tmp
|
||||
delta = Foreach.blockReduce(is,
|
||||
(i:Long, j:Long)=>{
|
||||
Tmp(i,j) = stencil(i,j);
|
||||
// Reduce max element-wise delta (A now holds previous values)
|
||||
return Math.abs(Tmp(i,j) - A(i,j));
|
||||
},
|
||||
(a:Double, b:Double)=>Math.max(a,b), 0.0
|
||||
);
|
||||
|
||||
// swap backing data of A and Tmp
|
||||
Array.swap(A, Tmp);
|
||||
} while (delta > EPSILON);
|
||||
}
|
||||
|
||||
def prettyPrintResult() {
|
||||
for (i in 1..N) {
|
||||
for (j in 1..N) {
|
||||
Console.OUT.printf("%1.4f ",A(i,j));
|
||||
}
|
||||
Console.OUT.println();
|
||||
}
|
||||
}
|
||||
|
||||
public static def main(args:Rail[String]) {
|
||||
val n = args.size > 0 ? Long.parse(args(0)) : 8;
|
||||
Console.OUT.println("HeatTransfer example with N="+n+" and epsilon="+EPSILON);
|
||||
Console.OUT.println("Initializing data structures");
|
||||
val ht = new HeatTransfer_v0(n);
|
||||
Console.OUT.println("Beginning computation...");
|
||||
val start = System.nanoTime();
|
||||
ht.run();
|
||||
val stop = System.nanoTime();
|
||||
Console.OUT.printf("...completed in %1.3f seconds.\n", ((stop-start) as double)/1e9);
|
||||
if (n <= 10) {
|
||||
ht.prettyPrintResult();
|
||||
}
|
||||
}
|
||||
}
|
||||
114
samples/X10/HeatTransfer_v1.x10
Normal file
114
samples/X10/HeatTransfer_v1.x10
Normal file
@@ -0,0 +1,114 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.array.*;
|
||||
import x10.compiler.Foreach;
|
||||
import x10.util.Team;
|
||||
|
||||
/**
|
||||
* This is a sample program illustrating how to use
|
||||
* X10's distributed array classes. It also illustrates the use
|
||||
* of foreach to achieve intra-place parallelism and the mixture
|
||||
* of APGAS finish/async/at with Team collective operations.
|
||||
*
|
||||
* This version of the program uses a vanilla DistArray without
|
||||
* ghost regions. As a result, the stencil function does
|
||||
* inefficient fine-grained neighbor communication to get individual values.
|
||||
* Compare this to HeatTransfer_v2 which utilizes ghost regions and
|
||||
* bulk ghost-region exchange functions.
|
||||
*
|
||||
* The program solves a set of 2D partial differential
|
||||
* equations by iteratively applying a 5-point stencil
|
||||
* operation until convergence is reached.
|
||||
*/
|
||||
public class HeatTransfer_v1 {
|
||||
static val EPSILON = 1.0e-5;
|
||||
|
||||
val N:Long;
|
||||
val A:DistArray_BlockBlock_2[Double]{self!=null};
|
||||
val Tmp:DistArray_BlockBlock_2[Double]{self!=null};
|
||||
|
||||
public def this(size:Long) {
|
||||
N = size;
|
||||
val init = (i:Long, j:Long)=>i==0 ? 1.0 : 0.0;
|
||||
A = new DistArray_BlockBlock_2[Double](N+2, N+2, init);
|
||||
Tmp = new DistArray_BlockBlock_2[Double](N+2, N+2, init);
|
||||
}
|
||||
|
||||
final def stencil(x:Long, y:Long):Double {
|
||||
val cls = (dx:Long, dy:Long)=>{
|
||||
val p = A.place(x+dx, y+dy);
|
||||
p == here ? A(x+dx,y+dy) : at (p) A(x+dx,y+dy)
|
||||
};
|
||||
val tmp = cls(-1,0) + cls(1,0) + cls(0,-1) + cls(0,1);
|
||||
return tmp / 4;
|
||||
}
|
||||
|
||||
def run() {
|
||||
val myTeam = new Team(A.placeGroup());
|
||||
finish for (p in A.placeGroup()) at (p) async {
|
||||
// Compute the subset of the local indices on which
|
||||
// we want to apply the stencil (the interior points of the N+2 x N+2 grid)
|
||||
val li = A.localIndices();
|
||||
val interior = new DenseIterationSpace_2(li.min(0) == 0 ? 1 : li.min(0),
|
||||
li.min(1) == 0 ? 1 : li.min(1),
|
||||
li.max(0) == N+1 ? N : li.max(0),
|
||||
li.max(1) == N+1 ? N : li.max(1));
|
||||
var delta:Double;
|
||||
do {
|
||||
// Compute new values, storing in tmp
|
||||
val myDelta = Foreach.blockReduce(interior,
|
||||
(i:Long, j:Long)=>{
|
||||
Tmp(i,j) = stencil(i,j);
|
||||
// Reduce max element-wise delta (A now holds previous values)
|
||||
return Math.abs(Tmp(i,j) - A(i,j));
|
||||
},
|
||||
(a:Double, b:Double)=>Math.max(a,b), 0.0
|
||||
);
|
||||
|
||||
myTeam.barrier();
|
||||
|
||||
// Unlike Array, DistArray doesn't provide an optimized swap.
|
||||
// So, until it does, we have to copy the data elements.
|
||||
Foreach.block(interior, (i:Long, j:Long)=>{
|
||||
A(i,j) = Tmp(i,j);
|
||||
});
|
||||
|
||||
delta = myTeam.allreduce(myDelta, Team.MAX);
|
||||
} while (delta > EPSILON);
|
||||
}
|
||||
}
|
||||
|
||||
def prettyPrintResult() {
|
||||
for (i in 1..N) {
|
||||
for (j in 1..N) {
|
||||
val x = at (A.place(i,j)) A(i,j);
|
||||
Console.OUT.printf("%1.4f ", x);
|
||||
}
|
||||
Console.OUT.println();
|
||||
}
|
||||
}
|
||||
|
||||
public static def main(args:Rail[String]) {
|
||||
val n = args.size > 0 ? Long.parse(args(0)) : 8;
|
||||
Console.OUT.println("HeatTransfer example with N="+n+" and epsilon="+EPSILON);
|
||||
Console.OUT.println("Initializing data structures");
|
||||
val ht = new HeatTransfer_v1(n);
|
||||
Console.OUT.println("Beginning computation...");
|
||||
val start = System.nanoTime();
|
||||
ht.run();
|
||||
val stop = System.nanoTime();
|
||||
Console.OUT.printf("...completed in %1.3f seconds.\n", ((stop-start) as double)/1e9);
|
||||
if (n <= 10) {
|
||||
ht.prettyPrintResult();
|
||||
}
|
||||
}
|
||||
}
|
||||
44
samples/X10/HelloWholeWorld.x10
Normal file
44
samples/X10/HelloWholeWorld.x10
Normal file
@@ -0,0 +1,44 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.io.Console;
|
||||
|
||||
/**
|
||||
* The classic hello world program, with a twist - prints a message
|
||||
* from the command line at every Place.
|
||||
* The messages from each Place may appear in any order, but the
|
||||
* finish ensures that the last message printed will be "Goodbye"
|
||||
* <pre>
|
||||
* Typical output:
|
||||
* [dgrove@linchen samples]$ ./HelloWholeWorld 'best wishes'
|
||||
* Place(1) says hello and best wishes
|
||||
* Place(2) says hello and best wishes
|
||||
* Place(3) says hello and best wishes
|
||||
* Place(0) says hello and best wishes
|
||||
* Goodbye
|
||||
* [dgrove@linchen samples]$
|
||||
* </pre>
|
||||
*/
|
||||
class HelloWholeWorld {
|
||||
public static def main(args:Rail[String]):void {
|
||||
if (args.size < 1) {
|
||||
Console.OUT.println("Usage: HelloWholeWorld message");
|
||||
return;
|
||||
}
|
||||
|
||||
finish for (p in Place.places()) {
|
||||
at (p) async Console.OUT.println(here+" says hello and "+args(0));
|
||||
}
|
||||
Console.OUT.println("Goodbye");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
23
samples/X10/HelloWorld.x10
Normal file
23
samples/X10/HelloWorld.x10
Normal file
@@ -0,0 +1,23 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.io.Console;
|
||||
|
||||
/**
|
||||
* The classic hello world program, shows how to output to the console.
|
||||
*/
|
||||
class HelloWorld {
|
||||
public static def main(Rail[String]) {
|
||||
Console.OUT.println("Hello World!" );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
45
samples/X10/Histogram.x10
Normal file
45
samples/X10/Histogram.x10
Normal file
@@ -0,0 +1,45 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
public class Histogram {
|
||||
public static def compute(data:Rail[Int], numBins:Int) {
|
||||
val bins = new Rail[Int](numBins);
|
||||
finish for (i in data.range) async {
|
||||
val b = data(i) % numBins;
|
||||
atomic bins(b)++;
|
||||
}
|
||||
return bins;
|
||||
}
|
||||
|
||||
public static def run(N:Int, S:Int):Boolean {
|
||||
val a = new Rail[Int](N, (i:long)=> i as int);
|
||||
val b = compute(a, S);
|
||||
val v = b(0);
|
||||
var ok:Boolean = true;
|
||||
for (x in b.range) ok &= (b(x)==v);
|
||||
return ok;
|
||||
}
|
||||
|
||||
public static def main(args:Rail[String]) {
|
||||
if (args.size != 2L) {
|
||||
Console.OUT.println("Usage: Histogram SizeOfArray NumberOfBins");
|
||||
return;
|
||||
}
|
||||
val N = Int.parse(args(0));
|
||||
val S = Int.parse(args(1));
|
||||
val ok = run(N,S);
|
||||
if (ok) {
|
||||
Console.OUT.println("Test ok.");
|
||||
} else {
|
||||
Console.OUT.println("Test failed.");
|
||||
}
|
||||
}
|
||||
}
|
||||
55
samples/X10/Integrate.x10
Normal file
55
samples/X10/Integrate.x10
Normal file
@@ -0,0 +1,55 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
/**
|
||||
* This is a slightly more realistic example of the
|
||||
* basic computational pattern of using async/finish
|
||||
* to express recursive divide-and-conquer algorithms.
|
||||
* The program does integration via Guassian Quadrature.
|
||||
* <p>
|
||||
* It also can serve as an example of using a closure.
|
||||
*/
|
||||
public class Integrate {
|
||||
static val epsilon = 1.0e-9;
|
||||
|
||||
val fun:(double)=>double;
|
||||
|
||||
public def this(f:(double)=>double) { fun = f; }
|
||||
|
||||
public def computeArea(left:double, right:double) {
|
||||
return recEval(left, fun(left), right, fun(right), 0);
|
||||
}
|
||||
|
||||
private def recEval(l:double, fl:double, r:double, fr:double, a:double) {
|
||||
val h = (r - l) / 2;
|
||||
val hh = h / 2;
|
||||
val c = l + h;
|
||||
val fc = fun(c);
|
||||
val al = (fl + fc) * hh;
|
||||
val ar = (fr + fc) * hh;
|
||||
val alr = al + ar;
|
||||
if (Math.abs(alr - a) < epsilon) return alr;
|
||||
val expr1:double;
|
||||
val expr2:double;
|
||||
finish {
|
||||
async { expr1 = recEval(c, fc, r, fr, ar); };
|
||||
expr2 = recEval(l, fl, c, fc, al);
|
||||
}
|
||||
return expr1 + expr2;
|
||||
}
|
||||
|
||||
public static def main(args:Rail[String]) {
|
||||
val obj = new Integrate((x:double)=>(x*x + 1.0) * x);
|
||||
val xMax = args.size > 0 ? Long.parse(args(0)) : 10;
|
||||
val area = obj.computeArea(0, xMax);
|
||||
Console.OUT.println("The area of (x*x +1) * x from 0 to "+xMax+" is "+area);
|
||||
}
|
||||
}
|
||||
151
samples/X10/KMeans.x10
Normal file
151
samples/X10/KMeans.x10
Normal file
@@ -0,0 +1,151 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.io.Console;
|
||||
import x10.util.Random;
|
||||
|
||||
/**
|
||||
* A KMeans object o can compute K means of a given set of
|
||||
* points of dimension o.myDim.
|
||||
* <p>
|
||||
* This class implements a sequential program, that is readily parallelizable.
|
||||
*
|
||||
* For a scalable, high-performance version of this benchmark see
|
||||
* KMeans.x10 in the X10 Benchmarks (separate download from x10-lang.org)
|
||||
*/
|
||||
public class KMeans(myDim:Long) {
|
||||
|
||||
static val DIM=2;
|
||||
static val K=4;
|
||||
static val POINTS=2000;
|
||||
static val ITERATIONS=50;
|
||||
static val EPS=0.01F;
|
||||
|
||||
static type ValVector(k:Long) = Rail[Float]{self.size==k};
|
||||
static type ValVector = ValVector(DIM);
|
||||
|
||||
static type Vector(k:Long) = Rail[Float]{self.size==k};
|
||||
static type Vector = Vector(DIM);
|
||||
|
||||
static type SumVector(d:Long) = V{self.dim==d};
|
||||
static type SumVector = SumVector(DIM);
|
||||
|
||||
/**
|
||||
* V represents the sum of 'count' number of vectors of dimension 'dim'.
|
||||
*/
|
||||
static class V(dim:Long) implements (Long)=>Float {
|
||||
var vec: Vector(dim);
|
||||
var count:Int;
|
||||
def this(dim:Long, init:(Long)=>Float): SumVector(dim) {
|
||||
property(dim);
|
||||
vec = new Rail[Float](this.dim, init);
|
||||
count = 0n;
|
||||
}
|
||||
public operator this(i:Long) = vec(i);
|
||||
def makeZero() {
|
||||
for (i in 0..(dim-1))
|
||||
vec(i) =0.0F;
|
||||
count=0n;
|
||||
}
|
||||
def addIn(a:ValVector(dim)) {
|
||||
for (i in 0..(dim-1))
|
||||
vec(i) += a(i);
|
||||
count++;
|
||||
}
|
||||
def div(f:Int) {
|
||||
for (i in 0..(dim-1))
|
||||
vec(i) /= f;
|
||||
}
|
||||
def dist(a:ValVector(dim)):Float {
|
||||
var dist:Float=0.0F;
|
||||
for (i in 0..(dim-1)) {
|
||||
val tmp = vec(i)-a(i);
|
||||
dist += tmp*tmp;
|
||||
}
|
||||
return dist;
|
||||
}
|
||||
def dist(a:SumVector(dim)):Float {
|
||||
var dist:Float=0.0F;
|
||||
for (i in 0..(dim-1)) {
|
||||
val tmp = vec(i)-a(i);
|
||||
dist += tmp*tmp;
|
||||
}
|
||||
return dist;
|
||||
}
|
||||
def print() {
|
||||
Console.OUT.println();
|
||||
for (i in 0..(dim-1)) {
|
||||
Console.OUT.print((i>0? " " : "") + vec(i));
|
||||
}
|
||||
}
|
||||
def normalize() { div(count);}
|
||||
def count() = count;
|
||||
}
|
||||
|
||||
|
||||
def this(myDim:Long):KMeans{self.myDim==myDim} {
|
||||
property(myDim);
|
||||
}
|
||||
static type KMeansData(myK:Long, myDim:Long)= Rail[SumVector(myDim)]{self.size==myK};
|
||||
|
||||
/**
|
||||
* Compute myK means for the given set of points of dimension myDim.
|
||||
*/
|
||||
def computeMeans(myK:Long, points:Rail[ValVector(myDim)]):KMeansData(myK, myDim) {
|
||||
var redCluster : KMeansData(myK, myDim) =
|
||||
new Rail[SumVector(myDim)](myK, (i:long)=> new V(myDim, (j:long)=>points(i)(j)));
|
||||
var blackCluster: KMeansData(myK, myDim) =
|
||||
new Rail[SumVector(myDim)](myK, (i:long)=> new V(myDim, (j:long)=>0.0F));
|
||||
for (i in 1..ITERATIONS) {
|
||||
val tmp = redCluster;
|
||||
redCluster = blackCluster;
|
||||
blackCluster=tmp;
|
||||
for (p in 0..(POINTS-1)) {
|
||||
var closest:Long = -1;
|
||||
var closestDist:Float = Float.MAX_VALUE;
|
||||
val point = points(p);
|
||||
for (k in 0..(myK-1)) { // compute closest mean in cluster.
|
||||
val dist = blackCluster(k).dist(point);
|
||||
if (dist < closestDist) {
|
||||
closestDist = dist;
|
||||
closest = k;
|
||||
}
|
||||
}
|
||||
redCluster(closest).addIn(point);
|
||||
}
|
||||
for (k in 0..(myK-1))
|
||||
redCluster(k).normalize();
|
||||
|
||||
var b:Boolean = true;
|
||||
for (k in 0..(myK-1)) {
|
||||
if (redCluster(k).dist(blackCluster(k)) > EPS) {
|
||||
b=false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (b)
|
||||
break;
|
||||
for (k in 0..(myK-1))
|
||||
blackCluster(k).makeZero();
|
||||
}
|
||||
return redCluster;
|
||||
}
|
||||
|
||||
public static def main (Rail[String]) {
|
||||
val rnd = new Random(0);
|
||||
val points = new Rail[ValVector](POINTS,
|
||||
(long)=>new Rail[Float](DIM, (long)=>rnd.nextFloat()));
|
||||
val result = new KMeans(DIM).computeMeans(K, points);
|
||||
for (k in 0..(K-1)) result(k).print();
|
||||
}
|
||||
}
|
||||
|
||||
// vim: shiftwidth=4:tabstop=4:expandtab
|
||||
147
samples/X10/KMeansDist.x10
Normal file
147
samples/X10/KMeansDist.x10
Normal file
@@ -0,0 +1,147 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.array.*;
|
||||
import x10.io.Console;
|
||||
import x10.util.Random;
|
||||
|
||||
/**
|
||||
* A low performance formulation of distributed KMeans using fine-grained asyncs.
|
||||
*
|
||||
* For a highly optimized and scalable, version of this benchmark see
|
||||
* KMeans.x10 in the X10 Benchmarks (separate download from x10-lang.org)
|
||||
*/
|
||||
public class KMeansDist {
|
||||
|
||||
static val DIM=2;
|
||||
static val CLUSTERS=4;
|
||||
static val POINTS=2000;
|
||||
static val ITERATIONS=50;
|
||||
|
||||
public static def main (Rail[String]) {
|
||||
val world = Place.places();
|
||||
val local_curr_clusters =
|
||||
PlaceLocalHandle.make[Array_2[Float]](world, () => new Array_2[Float](CLUSTERS, DIM));
|
||||
val local_new_clusters =
|
||||
PlaceLocalHandle.make[Array_2[Float]](world, () => new Array_2[Float](CLUSTERS, DIM));
|
||||
val local_cluster_counts =
|
||||
PlaceLocalHandle.make[Rail[Int]](world, ()=> new Rail[Int](CLUSTERS));
|
||||
|
||||
val rnd = PlaceLocalHandle.make[Random](world, () => new Random(0));
|
||||
val points = new DistArray_Block_2[Float](POINTS, DIM, world, (Long,Long)=>rnd().nextFloat());
|
||||
|
||||
val central_clusters = new Array_2[Float](CLUSTERS, DIM, (i:Long, j:Long) => {
|
||||
at (points.place(i,j)) points(i,j)
|
||||
});
|
||||
|
||||
val old_central_clusters = new Array_2[Float](CLUSTERS, DIM);
|
||||
|
||||
val central_cluster_counts = new Rail[Int](CLUSTERS);
|
||||
|
||||
for (iter in 1..ITERATIONS) {
|
||||
|
||||
Console.OUT.println("Iteration: "+iter);
|
||||
|
||||
finish {
|
||||
// reset state
|
||||
for (d in world) at (d) async {
|
||||
for ([i,j] in central_clusters.indices()) {
|
||||
local_curr_clusters()(i, j) = central_clusters(i, j);
|
||||
local_new_clusters()(i, j) = 0f;
|
||||
}
|
||||
|
||||
local_cluster_counts().clear();
|
||||
}
|
||||
}
|
||||
|
||||
finish {
|
||||
// compute new clusters and counters
|
||||
for (p in 0..(POINTS-1)) {
|
||||
at (points.place(p,0)) async {
|
||||
var closest:Long = -1;
|
||||
var closest_dist:Float = Float.MAX_VALUE;
|
||||
for (k in 0..(CLUSTERS-1)) {
|
||||
var dist : Float = 0;
|
||||
for (d in 0..(DIM-1)) {
|
||||
val tmp = points(p,d) - local_curr_clusters()(k, d);
|
||||
dist += tmp * tmp;
|
||||
}
|
||||
if (dist < closest_dist) {
|
||||
closest_dist = dist;
|
||||
closest = k;
|
||||
}
|
||||
}
|
||||
atomic {
|
||||
for (d in 0..(DIM-1)) {
|
||||
local_new_clusters()(closest,d) += points(p,d);
|
||||
}
|
||||
local_cluster_counts()(closest)++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for ([i,j] in old_central_clusters.indices()) {
|
||||
old_central_clusters(i, j) = central_clusters(i, j);
|
||||
central_clusters(i, j) = 0f;
|
||||
}
|
||||
|
||||
central_cluster_counts.clear();
|
||||
|
||||
finish {
|
||||
val central_clusters_gr = GlobalRef(central_clusters);
|
||||
val central_cluster_counts_gr = GlobalRef(central_cluster_counts);
|
||||
val there = here;
|
||||
for (d in world) at (d) async {
|
||||
// access PlaceLocalHandles 'here' and then data will be captured by at and transfered to 'there' for accumulation
|
||||
val tmp_new_clusters = local_new_clusters();
|
||||
val tmp_cluster_counts = local_cluster_counts();
|
||||
at (there) atomic {
|
||||
for ([i,j] in tmp_new_clusters.indices()) {
|
||||
central_clusters_gr()(i,j) += tmp_new_clusters(i,j);
|
||||
}
|
||||
for (j in 0..(CLUSTERS-1)) {
|
||||
central_cluster_counts_gr()(j) += tmp_cluster_counts(j);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (k in 0..(CLUSTERS-1)) {
|
||||
for (d in 0..(DIM-1)) {
|
||||
central_clusters(k, d) /= central_cluster_counts(k);
|
||||
}
|
||||
}
|
||||
|
||||
// TEST FOR CONVERGENCE
|
||||
var b:Boolean = true;
|
||||
for ([i,j] in old_central_clusters.indices()) {
|
||||
if (Math.abs(old_central_clusters(i, j)-central_clusters(i, j))>0.0001) {
|
||||
b = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (b) break;
|
||||
|
||||
}
|
||||
|
||||
for (d in 0..(DIM-1)) {
|
||||
for (k in 0..(CLUSTERS-1)) {
|
||||
if (k>0)
|
||||
Console.OUT.print(" ");
|
||||
Console.OUT.print(central_clusters(k,d));
|
||||
}
|
||||
Console.OUT.println();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// vim: shiftwidth=4:tabstop=4:expandtab
|
||||
144
samples/X10/KMeansDistPlh.x10
Normal file
144
samples/X10/KMeansDistPlh.x10
Normal file
@@ -0,0 +1,144 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2015.
|
||||
*/
|
||||
|
||||
import x10.array.Array;
|
||||
import x10.array.Array_2;
|
||||
import x10.compiler.Foreach;
|
||||
import x10.util.Random;
|
||||
|
||||
/**
|
||||
* A better formulation of distributed KMeans using coarse-grained asyncs to
|
||||
* implement an allreduce pattern for cluster centers and counts.
|
||||
*
|
||||
* For a highly optimized and scalable, version of this benchmark see
|
||||
* KMeans.x10 in the X10 Benchmarks (separate download from x10-lang.org)
|
||||
*/
|
||||
public class KMeansDistPlh {
|
||||
|
||||
static val DIM=2;
|
||||
static val CLUSTERS=4;
|
||||
|
||||
static class ClusterState {
|
||||
val clusters = new Array_2[Float](CLUSTERS, DIM);
|
||||
val clusterCounts = new Rail[Int](CLUSTERS);
|
||||
}
|
||||
|
||||
public static def main(args:Rail[String]) {
|
||||
val numPoints = args.size > 0 ? Long.parse(args(0)) : 2000;
|
||||
val iterations = args.size > 1 ? Long.parse(args(1)) : 50;
|
||||
val world = Place.places();
|
||||
|
||||
val clusterStatePlh = PlaceLocalHandle.make[ClusterState](world, () => new ClusterState());
|
||||
val currentClustersPlh = PlaceLocalHandle.make[Array_2[Float]](world, () => new Array_2[Float](CLUSTERS, DIM));
|
||||
val pointsPlh = PlaceLocalHandle.make[Array_2[Float]](world, () => {
|
||||
val rand = new Random(here.id);
|
||||
return new Array_2[Float](numPoints/world.size(), DIM, (Long,Long)=>rand.nextFloat());
|
||||
});
|
||||
|
||||
val centralCurrentClusters = new Array_2[Float](CLUSTERS, DIM);
|
||||
val centralNewClusters = new Array_2[Float](CLUSTERS, DIM);
|
||||
val centralClusterCounts = new Rail[Int](CLUSTERS);
|
||||
|
||||
// arbitrarily initialize central clusters to first few points
|
||||
for ([i,j] in centralCurrentClusters.indices()) {
|
||||
centralCurrentClusters(i,j) = pointsPlh()(i,j);
|
||||
}
|
||||
|
||||
for (iter in 1..iterations) {
|
||||
Console.OUT.println("Iteration: "+iter);
|
||||
|
||||
finish {
|
||||
for (place in world) async {
|
||||
val placeClusters = at(place) {
|
||||
val currentClusters = currentClustersPlh();
|
||||
Array.copy(centralCurrentClusters, currentClusters);
|
||||
|
||||
val clusterState = clusterStatePlh();
|
||||
val newClusters = clusterState.clusters;
|
||||
newClusters.clear();
|
||||
val clusterCounts = clusterState.clusterCounts;
|
||||
clusterCounts.clear();
|
||||
|
||||
// compute new clusters and counters
|
||||
val points = pointsPlh();
|
||||
|
||||
for (p in 0..(points.numElems_1-1)) {
|
||||
var closest:Long = -1;
|
||||
var closestDist:Float = Float.MAX_VALUE;
|
||||
for (k in 0..(CLUSTERS-1)) {
|
||||
var dist : Float = 0;
|
||||
for (d in 0..(DIM-1)) {
|
||||
val tmp = points(p,d) - currentClusters(k, d);
|
||||
dist += tmp * tmp;
|
||||
}
|
||||
if (dist < closestDist) {
|
||||
closestDist = dist;
|
||||
closest = k;
|
||||
}
|
||||
}
|
||||
|
||||
atomic {
|
||||
for (d in 0..(DIM-1)) {
|
||||
newClusters(closest,d) += points(p,d);
|
||||
}
|
||||
clusterCounts(closest)++;
|
||||
}
|
||||
}
|
||||
clusterState
|
||||
};
|
||||
|
||||
// combine place clusters to central
|
||||
atomic {
|
||||
for ([i,j] in centralNewClusters.indices()) {
|
||||
centralNewClusters(i,j) += placeClusters.clusters(i,j);
|
||||
}
|
||||
for (j in 0..(CLUSTERS-1)) {
|
||||
centralClusterCounts(j) += placeClusters.clusterCounts(j);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (k in 0..(CLUSTERS-1)) {
|
||||
for (d in 0..(DIM-1)) {
|
||||
centralNewClusters(k, d) /= centralClusterCounts(k);
|
||||
}
|
||||
}
|
||||
|
||||
// TEST FOR CONVERGENCE
|
||||
var b:Boolean = true;
|
||||
for ([i,j] in centralCurrentClusters.indices()) {
|
||||
if (Math.abs(centralCurrentClusters(i, j)-centralNewClusters(i, j)) > 0.0001) {
|
||||
b = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Array.copy(centralNewClusters, centralCurrentClusters);
|
||||
|
||||
if (b) break;
|
||||
|
||||
centralNewClusters.clear();
|
||||
centralClusterCounts.clear();
|
||||
}
|
||||
|
||||
for (d in 0..(DIM-1)) {
|
||||
for (k in 0..(CLUSTERS-1)) {
|
||||
if (k > 0)
|
||||
Console.OUT.print(" ");
|
||||
Console.OUT.print(centralCurrentClusters(k,d));
|
||||
}
|
||||
Console.OUT.println();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// vim: shiftwidth=4:tabstop=4:expandtab
|
||||
192
samples/X10/KMeansSPMD.x10
Normal file
192
samples/X10/KMeansSPMD.x10
Normal file
@@ -0,0 +1,192 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.io.Console;
|
||||
import x10.io.File;
|
||||
import x10.io.Marshal;
|
||||
import x10.io.IOException;
|
||||
import x10.util.OptionsParser;
|
||||
import x10.util.Option;
|
||||
import x10.util.Team;
|
||||
|
||||
/**
|
||||
* An SPMD formulation of KMeans.
|
||||
*
|
||||
* For a highly optimized and scalable version of this benchmark see
|
||||
* KMeans.x10 in the X10 Benchmarks (separate download from x10-lang.org)
|
||||
*/
|
||||
public class KMeansSPMD {
|
||||
|
||||
public static def printClusters (clusters:Rail[Float], dims:long) {
|
||||
for (d in 0..(dims-1)) {
|
||||
for (k in 0..(clusters.size/dims-1)) {
|
||||
if (k>0)
|
||||
Console.OUT.print(" ");
|
||||
Console.OUT.print(clusters(k*dims+d).toString());
|
||||
}
|
||||
Console.OUT.println();
|
||||
}
|
||||
}
|
||||
|
||||
public static def main (args:Rail[String]) {here == Place.FIRST_PLACE } {
|
||||
|
||||
val opts = new OptionsParser(args, [
|
||||
Option("q","quiet","just print time taken"),
|
||||
Option("v","verbose","print out each iteration"),
|
||||
Option("h","help","this information")
|
||||
], [
|
||||
Option("p","points","location of data file"),
|
||||
Option("i","iterations","quit after this many iterations"),
|
||||
Option("c","clusters","number of clusters to find"),
|
||||
Option("d","dim","number of dimensions"),
|
||||
Option("s","slices","factor by which to oversubscribe computational resources"),
|
||||
Option("n","num","quantity of points")
|
||||
]);
|
||||
if (opts.filteredArgs().size!=0L) {
|
||||
Console.ERR.println("Unexpected arguments: "+opts.filteredArgs());
|
||||
Console.ERR.println("Use -h or --help.");
|
||||
System.setExitCode(1n);
|
||||
return;
|
||||
}
|
||||
if (opts("-h")) {
|
||||
Console.OUT.println(opts.usage(""));
|
||||
return;
|
||||
}
|
||||
|
||||
val fname = opts("-p", "points.dat");
|
||||
val num_clusters=opts("-c",4);
|
||||
val num_slices=opts("-s",1);
|
||||
val num_global_points=opts("-n", 2000);
|
||||
val iterations=opts("-i",50);
|
||||
val dim=opts("-d", 4);
|
||||
val verbose = opts("-v");
|
||||
val quiet = opts("-q");
|
||||
|
||||
if (!quiet)
|
||||
Console.OUT.println("points: "+num_global_points+" clusters: "+num_clusters+" dim: "+dim);
|
||||
|
||||
// file is dimension-major
|
||||
val file = new File(fname);
|
||||
val fr = file.openRead();
|
||||
val init_points = (long) => Float.fromIntBits(Marshal.INT.read(fr).reverseBytes());
|
||||
val num_file_points = (file.size() / dim / 4) as Int;
|
||||
val file_points = new Rail[Float](num_file_points*dim, init_points);
|
||||
|
||||
val team = Team.WORLD;
|
||||
|
||||
val num_slice_points = num_global_points / num_slices / Place.numPlaces();
|
||||
|
||||
finish {
|
||||
for (h in Place.places()) at(h) async {
|
||||
var compute_time:Long = 0;
|
||||
var comm_time:Long = 0;
|
||||
var barrier_time:Long = 0;
|
||||
|
||||
val host_clusters = new Rail[Float](num_clusters*dim, (i:long)=>file_points(i));
|
||||
val host_cluster_counts = new Rail[Int](num_clusters);
|
||||
|
||||
for (slice in 0..(num_slices-1)) {
|
||||
// carve out local portion of points (point-major)
|
||||
val offset = (slice*Place.numPlaces() + here.id) * num_slice_points;
|
||||
if (verbose)
|
||||
Console.OUT.println(h.toString()+" gets "+offset+" len "+num_slice_points);
|
||||
val init = (i:long) => {
|
||||
val p=i%num_slice_points;
|
||||
val d=i/num_slice_points;
|
||||
return file_points(offset+p+d*num_file_points);
|
||||
};
|
||||
|
||||
// these are pretty big so allocate up front
|
||||
val host_points = new Rail[Float](num_slice_points*dim, init);
|
||||
val host_nearest = new Rail[Float](num_slice_points);
|
||||
|
||||
val start_time = System.currentTimeMillis();
|
||||
|
||||
barrier_time -= System.nanoTime();
|
||||
team.barrier();
|
||||
barrier_time += System.nanoTime();
|
||||
|
||||
main_loop: for (iter in 0..(iterations-1)) {
|
||||
|
||||
//if (offset==0) Console.OUT.println("Iteration: "+iter);
|
||||
|
||||
val old_clusters = new Rail[Float](host_clusters.size);
|
||||
Rail.copy(host_clusters, 0L, old_clusters, 0L, host_clusters.size);
|
||||
|
||||
host_clusters.clear();
|
||||
host_cluster_counts.clear();
|
||||
|
||||
compute_time -= System.nanoTime();
|
||||
for (p in 0..(num_slice_points-1)) {
|
||||
var closest:Long = -1;
|
||||
var closest_dist:Float = Float.MAX_VALUE;
|
||||
for (k in 0..(num_clusters-1)) {
|
||||
var dist : Float = 0;
|
||||
for (d in 0..(dim-1)) {
|
||||
val tmp = host_points(p+d*num_slice_points) - old_clusters(k*dim+d);
|
||||
dist += tmp * tmp;
|
||||
}
|
||||
if (dist < closest_dist) {
|
||||
closest_dist = dist;
|
||||
closest = k;
|
||||
}
|
||||
}
|
||||
for (d in 0..(dim-1)) {
|
||||
host_clusters(closest*dim+d) += host_points(p+d*num_slice_points);
|
||||
}
|
||||
host_cluster_counts(closest)++;
|
||||
}
|
||||
compute_time += System.nanoTime();
|
||||
|
||||
comm_time -= System.nanoTime();
|
||||
team.allreduce(host_clusters, 0L, host_clusters, 0L, host_clusters.size, Team.ADD);
|
||||
team.allreduce(host_cluster_counts, 0L, host_cluster_counts, 0L, host_cluster_counts.size, Team.ADD);
|
||||
comm_time += System.nanoTime();
|
||||
|
||||
for (k in 0..(num_clusters-1)) {
|
||||
for (d in 0..(dim-1)) host_clusters(k*dim+d) /= host_cluster_counts(k);
|
||||
}
|
||||
|
||||
if (offset==0 && verbose) {
|
||||
Console.OUT.println("Iteration: "+iter);
|
||||
printClusters(host_clusters,dim);
|
||||
}
|
||||
|
||||
// TEST FOR CONVERGENCE
|
||||
for (j in 0..(num_clusters*dim-1)) {
|
||||
if (true/*||Math.abs(clusters_old(j)-host_clusters(j))>0.0001*/) continue main_loop;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
} // main_loop
|
||||
|
||||
} // slice
|
||||
|
||||
Console.OUT.printf("%d: computation %.3f s communication %.3f s (barrier %.3f s)\n",
|
||||
here.id, compute_time/1E9, comm_time/1E9, barrier_time/1E9);
|
||||
|
||||
team.barrier();
|
||||
|
||||
if (here.id == 0) {
|
||||
Console.OUT.println("\nFinal results:");
|
||||
printClusters(host_clusters,dim);
|
||||
}
|
||||
|
||||
} // async
|
||||
|
||||
} // finish
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// vim: shiftwidth=4:tabstop=4:expandtab
|
||||
42
samples/X10/MontyPi.x10
Normal file
42
samples/X10/MontyPi.x10
Normal file
@@ -0,0 +1,42 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.array.DistArray_Unique;
|
||||
import x10.io.Console;
|
||||
import x10.util.Random;
|
||||
|
||||
/**
|
||||
* Calculation of an approximation to pi by using a Monte Carlo simulation
|
||||
* (throwing darts into the unit square and determining the fraction that land
|
||||
* in the unit circle).
|
||||
*/
|
||||
public class MontyPi {
|
||||
public static def main(args:Rail[String]) {
|
||||
if (args.size != 1L) {
|
||||
Console.OUT.println("Usage: MontyPi <number of points>");
|
||||
return;
|
||||
}
|
||||
val N = Long.parse(args(0));
|
||||
val initializer = () => {
|
||||
val r = new Random();
|
||||
var result:Long = 0;
|
||||
for(c in 1..N) {
|
||||
val x = r.nextDouble();
|
||||
val y = r.nextDouble();
|
||||
if (x*x +y*y <= 1.0) result++;
|
||||
}
|
||||
result
|
||||
};
|
||||
val result = new DistArray_Unique[Long](Place.places(), initializer);
|
||||
val pi = (4.0*result.reduce((x:Long,y:Long) => x+y, 0) as Double)/(N*Place.numPlaces());
|
||||
Console.OUT.println("The value of pi is " + pi);
|
||||
}
|
||||
}
|
||||
123
samples/X10/NQueensDist.x10
Normal file
123
samples/X10/NQueensDist.x10
Normal file
@@ -0,0 +1,123 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
* (C) Copyright Australian National University 2011.
|
||||
*/
|
||||
|
||||
import x10.array.DistArray_Unique;
|
||||
|
||||
/**
|
||||
* A distributed version of NQueens. Runs over NUM_PLACES.
|
||||
* Identical to NQueensPar, except that work is distributed
|
||||
* over multiple places rather than shared between threads.
|
||||
*/
|
||||
public class NQueensDist {
|
||||
public static val EXPECTED_SOLUTIONS =
|
||||
[0, 1, 0, 0, 2, 10, 4, 40, 92, 352, 724, 2680, 14200, 73712, 365596, 2279184, 14772512];
|
||||
|
||||
val N:Long;
|
||||
val P:Long;
|
||||
val results:DistArray_Unique[Long];
|
||||
val R:LongRange;
|
||||
|
||||
def this(N:Long, P:Long) {
|
||||
this.N=N;
|
||||
this.P=P;
|
||||
this.results = new DistArray_Unique[Long]();
|
||||
this.R = 0..(N-1);
|
||||
}
|
||||
def start() {
|
||||
new Board().distSearch();
|
||||
}
|
||||
def run():Long {
|
||||
finish start();
|
||||
val result = results.reduce(((x:Long,y:Long) => x+y),0);
|
||||
return result;
|
||||
}
|
||||
|
||||
class Board {
|
||||
val q: Rail[Long];
|
||||
/** The number of low-rank positions that are fixed in this board for the purposes of search. */
|
||||
var fixed:Long;
|
||||
def this() {
|
||||
q = new Rail[Long](N);
|
||||
fixed = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if it is safe to put a queen in file <code>j</code>
|
||||
* on the next rank after the last fixed position.
|
||||
*/
|
||||
def safe(j:Long) {
|
||||
for (k in 0..(fixed-1)) {
|
||||
if (j == q(k) || Math.abs(fixed-k) == Math.abs(j-q(k)))
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/** Search all positions for the current board. */
|
||||
def search() {
|
||||
for (k in R) searchOne(k);
|
||||
}
|
||||
|
||||
/**
|
||||
* Modify the current board by adding a new queen
|
||||
* in file <code>k</code> on rank <code>fixed</code>,
|
||||
* and search for all safe positions with this prefix.
|
||||
*/
|
||||
def searchOne(k:Long) {
|
||||
if (safe(k)) {
|
||||
if (fixed==(N-1)) {
|
||||
// all ranks safely filled
|
||||
atomic NQueensDist.this.results(here.id)++;
|
||||
} else {
|
||||
q(fixed++) = k;
|
||||
search();
|
||||
fixed--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Search this board, dividing the work between all places
|
||||
* using a block distribution of the current free rank.
|
||||
*/
|
||||
def distSearch() {
|
||||
val work = R.split(Place.numPlaces());
|
||||
finish for (p in Place.places()) {
|
||||
val myPiece = work(p.id);
|
||||
at (p) async {
|
||||
// implicit copy of 'this' made across the at divide
|
||||
for (k in myPiece) {
|
||||
searchOne(k);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static def main(args:Rail[String]) {
|
||||
val n = args.size > 0 ? Long.parse(args(0)) : 8;
|
||||
Console.OUT.println("N=" + n);
|
||||
//warmup
|
||||
//finish new NQueensPar(12, 1).start();
|
||||
val P = Place.numPlaces();
|
||||
val nq = new NQueensDist(n,P);
|
||||
var start:Long = -System.nanoTime();
|
||||
val answer = nq.run();
|
||||
val result = answer==EXPECTED_SOLUTIONS(n);
|
||||
start += System.nanoTime();
|
||||
start /= 1000000;
|
||||
Console.OUT.println("NQueensDist " + nq.N + "(P=" + P +
|
||||
") has " + answer + " solutions" +
|
||||
(result? " (ok)." : " (wrong).") +
|
||||
"time=" + start + "ms");
|
||||
}
|
||||
}
|
||||
117
samples/X10/NQueensPar.x10
Normal file
117
samples/X10/NQueensPar.x10
Normal file
@@ -0,0 +1,117 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
* (C) Copyright Australian National University 2011.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Compute the number of solutions to the N queens problem.
|
||||
*/
|
||||
public class NQueensPar {
|
||||
public static val EXPECTED_SOLUTIONS =
|
||||
[0, 1, 0, 0, 2, 10, 4, 40, 92, 352, 724, 2680, 14200, 73712, 365596, 2279184, 14772512];
|
||||
|
||||
val N:Int;
|
||||
val P:Int;
|
||||
var nSolutions:Int = 0n;
|
||||
val R:IntRange;
|
||||
|
||||
def this(N:Int, P:Int) {
|
||||
this.N=N;
|
||||
this.P=P;
|
||||
this.R = 0n..(N-1n);
|
||||
}
|
||||
|
||||
def start() {
|
||||
new Board().parSearch();
|
||||
}
|
||||
|
||||
class Board {
|
||||
val q: Rail[Int];
|
||||
/** The number of low-rank positions that are fixed in this board for the purposes of search. */
|
||||
var fixed:Int;
|
||||
def this() {
|
||||
q = new Rail[Int](N);
|
||||
fixed = 0n;
|
||||
}
|
||||
|
||||
def this(b:Board) {
|
||||
this.q = new Rail[Int](b.q);
|
||||
this.fixed = b.fixed;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if it is safe to put a queen in file <code>j</code>
|
||||
* on the next rank after the last fixed position.
|
||||
*/
|
||||
def safe(j:Int) {
|
||||
for (k in 0n..(fixed-1n)) {
|
||||
if (j == q(k) || Math.abs(fixed-k) == Math.abs(j-q(k)))
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/** Search all positions for the current board. */
|
||||
def search() {
|
||||
for (k in R) searchOne(k);
|
||||
}
|
||||
|
||||
/**
|
||||
* Modify the current board by adding a new queen
|
||||
* in file <code>k</code> on rank <code>fixed</code>,
|
||||
* and search for all safe positions with this prefix.
|
||||
*/
|
||||
def searchOne(k:Int) {
|
||||
if (safe(k)) {
|
||||
if (fixed==(N-1n)) {
|
||||
// all ranks safely filled
|
||||
atomic NQueensPar.this.nSolutions++;
|
||||
} else {
|
||||
q(fixed++) = k;
|
||||
search();
|
||||
fixed--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Search this board, dividing the work between threads
|
||||
* using a block distribution of the current free rank.
|
||||
*/
|
||||
def parSearch() {
|
||||
for (work in R.split(P)) async {
|
||||
val board = new Board(this);
|
||||
for (w in work) {
|
||||
board.searchOne(w);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static def main(args:Rail[String]) {
|
||||
val n = args.size > 0 ? Int.parse(args(0)) : 8n;
|
||||
Console.OUT.println("N=" + n);
|
||||
//warmup
|
||||
//finish new NQueensPar(12, 1).start();
|
||||
val ps = [1n,2n,4n];
|
||||
for (numTasks in ps) {
|
||||
Console.OUT.println("starting " + numTasks + " tasks");
|
||||
val nq = new NQueensPar(n,numTasks);
|
||||
var start:Long = -System.nanoTime();
|
||||
finish nq.start();
|
||||
val result = (nq.nSolutions as Long)==EXPECTED_SOLUTIONS(nq.N);
|
||||
start += System.nanoTime();
|
||||
start /= 1000000;
|
||||
Console.OUT.println("NQueensPar " + nq.N + "(P=" + numTasks +
|
||||
") has " + nq.nSolutions + " solutions" +
|
||||
(result? " (ok)." : " (wrong).") + "time=" + start + "ms");
|
||||
}
|
||||
}
|
||||
}
|
||||
73
samples/X10/QSort.x10
Normal file
73
samples/X10/QSort.x10
Normal file
@@ -0,0 +1,73 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* Straightforward quicksort implementation using
|
||||
* naive partition-in-the-middle and not bothering with
|
||||
* well-known optimizations such as using insertion sort
|
||||
* once the partitions get small. This is only intended
|
||||
* as a simple example of an array-based program that
|
||||
* combines a recirsive divide and conquer algorithm
|
||||
* with async and finish, not as a highly efficient
|
||||
* sorting procedure..
|
||||
*/
|
||||
public class QSort {
|
||||
|
||||
private static def partition(data:Rail[int], left:long, right:long) {
|
||||
var i:long = left;
|
||||
var j:long = right;
|
||||
var tmp:int;
|
||||
var pivot:long = data((left + right) / 2);
|
||||
|
||||
while (i <= j) {
|
||||
while (data(i) < pivot) i++;
|
||||
while (data(j) > pivot) j--;
|
||||
if (i <= j) {
|
||||
tmp = data(i);
|
||||
data(i) = data(j);
|
||||
data(j) = tmp;
|
||||
i++;
|
||||
j--;
|
||||
}
|
||||
}
|
||||
|
||||
return i;
|
||||
}
|
||||
|
||||
public static def qsort(data:Rail[int], left:long, right:long) {
|
||||
index:long = partition(data, left, right);
|
||||
finish {
|
||||
if (left < index - 1)
|
||||
async qsort(data, left, index - 1);
|
||||
|
||||
if (index < right)
|
||||
qsort(data, index, right);
|
||||
}
|
||||
}
|
||||
|
||||
public static def main(args:Rail[String]) {
|
||||
val N = args.size>0 ? Long.parse(args(0)) : 100;
|
||||
val r = new x10.util.Random();
|
||||
val data = new Rail[int](N, (long)=>r.nextInt(9999n));
|
||||
qsort(data, 0, N-1);
|
||||
for (i in 0..(N-1)) {
|
||||
Console.OUT.print(data(i));
|
||||
if (i%10 == 9) {
|
||||
Console.OUT.println();
|
||||
} else {
|
||||
Console.OUT.print(", ");
|
||||
}
|
||||
}
|
||||
Console.OUT.println();
|
||||
}
|
||||
}
|
||||
|
||||
123
samples/X10/StructSpheres.x10
Normal file
123
samples/X10/StructSpheres.x10
Normal file
@@ -0,0 +1,123 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.io.Console;
|
||||
import x10.util.Random;
|
||||
|
||||
/**
|
||||
* This class represents a real-world problem in graphics engines --
|
||||
* determining which objects in a large sprawling world are close enough to the
|
||||
* camera to be considered for rendering.
|
||||
*
|
||||
* It illustrates the usage of X10 structs to define new primitive types.
|
||||
* In Native X10, structs are allocated within their containing object/stack frame
|
||||
* and thus using structs instead of classes for Vector3 and WorldObject greatly
|
||||
* improves the memory efficiency of the computation.
|
||||
*
|
||||
* @Author Dave Cunningham
|
||||
* @Author Vijay Saraswat
|
||||
*/
|
||||
class StructSpheres {
|
||||
static type Real = Float;
|
||||
|
||||
static struct Vector3(x:Real, y:Real, z:Real) {
|
||||
public def getX () = x;
|
||||
public def getY () = y;
|
||||
public def getZ () = z;
|
||||
|
||||
public def add (other:Vector3)
|
||||
= Vector3(this.x+other.x, this.y+other.y, this.z+other.z);
|
||||
|
||||
public def neg () = Vector3(-this.x, -this.y, -this.z);
|
||||
|
||||
public def sub (other:Vector3) = add(other.neg());
|
||||
|
||||
public def length () = Math.sqrtf(length2());
|
||||
|
||||
public def length2 () = x*x + y*y + z*z;
|
||||
}
|
||||
|
||||
|
||||
static struct WorldObject {
|
||||
|
||||
def this (x:Real, y:Real, z:Real, r:Real) {
|
||||
pos = Vector3(x,y,z);
|
||||
renderingDistance = r;
|
||||
}
|
||||
|
||||
public def intersects (home:Vector3)
|
||||
= home.sub(pos).length2() < renderingDistance*renderingDistance;
|
||||
|
||||
protected val pos:Vector3;
|
||||
protected val renderingDistance:Real;
|
||||
}
|
||||
|
||||
|
||||
public static def compute():boolean {
|
||||
|
||||
val reps = 7500;
|
||||
|
||||
// The following correspond to a modern out-door computer game:
|
||||
val num_objects = 50000;
|
||||
val world_size = 6000;
|
||||
val obj_max_size = 400;
|
||||
|
||||
val ran = new Random(0);
|
||||
|
||||
// the array can go on the heap
|
||||
// but the elements ought to be /*inlined*/ in the array
|
||||
val spheres =
|
||||
new Rail[WorldObject](num_objects, (i:long) => {
|
||||
val x = (ran.nextDouble()*world_size) as Real;
|
||||
val y = (ran.nextDouble()*world_size) as Real;
|
||||
val z = (ran.nextDouble()*world_size) as Real;
|
||||
val r = (ran.nextDouble()*obj_max_size) as Real;
|
||||
return WorldObject(x,y,z,r);
|
||||
});
|
||||
|
||||
val time_start = System.nanoTime();
|
||||
|
||||
var counter : Long = 0;
|
||||
|
||||
// HOT LOOP BEGINS
|
||||
for (c in 1..reps) {
|
||||
|
||||
val x = (ran.nextDouble()*world_size) as Real;
|
||||
val y = (ran.nextDouble()*world_size) as Real;
|
||||
val z = (ran.nextDouble()*world_size) as Real;
|
||||
|
||||
val pos = Vector3(x,y,z);
|
||||
|
||||
for (i in spheres.range()) {
|
||||
if (spheres(i).intersects(pos)) {
|
||||
counter++;
|
||||
}
|
||||
}
|
||||
}
|
||||
// HOT LOOP ENDS
|
||||
|
||||
val time_taken = System.nanoTime() - time_start;
|
||||
Console.OUT.println("Total time: "+time_taken/1E9);
|
||||
|
||||
val expected = 109702;
|
||||
val ok = counter == expected;
|
||||
if (!ok) {
|
||||
Console.ERR.println("number of intersections: "+counter
|
||||
+" (expected "+expected+")");
|
||||
}
|
||||
return ok;
|
||||
}
|
||||
|
||||
public static def main (Rail[String]) {
|
||||
compute();
|
||||
}
|
||||
|
||||
}
|
||||
96
samples/XML/JSBrowser.jsproj
Normal file
96
samples/XML/JSBrowser.jsproj
Normal file
@@ -0,0 +1,96 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project ToolsVersion="14.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<ItemGroup Label="ProjectConfigurations">
|
||||
<ProjectConfiguration Include="Debug|AnyCPU">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>AnyCPU</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Debug|ARM">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>ARM</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Debug|x64">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Debug|x86">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>x86</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|AnyCPU">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>AnyCPU</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|ARM">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>ARM</Platform>
|
||||
<UseDotNetNativeToolchain>true</UseDotNetNativeToolchain>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|x64">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
<UseDotNetNativeToolchain>true</UseDotNetNativeToolchain>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|x86">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>x86</Platform>
|
||||
<UseDotNetNativeToolchain>true</UseDotNetNativeToolchain>
|
||||
</ProjectConfiguration>
|
||||
</ItemGroup>
|
||||
<PropertyGroup Label="Globals">
|
||||
<ProjectGuid>42fc11d8-64c6-4967-a15a-dfd787f68766</ProjectGuid>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
|
||||
<PropertyGroup Condition="'$(VisualStudioVersion)' == '' or '$(VisualStudioVersion)' < '14.0'">
|
||||
<VisualStudioVersion>14.0</VisualStudioVersion>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\$(WMSJSProjectDirectory)\Microsoft.VisualStudio.$(WMSJSProject).Default.props" />
|
||||
<Import Project="$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\$(WMSJSProjectDirectory)\Microsoft.VisualStudio.$(WMSJSProject).props" />
|
||||
<PropertyGroup>
|
||||
<EnableDotNetNativeCompatibleProfile>true</EnableDotNetNativeCompatibleProfile>
|
||||
<TargetPlatformIdentifier>UAP</TargetPlatformIdentifier>
|
||||
<TargetPlatformVersion>10.0.10240.0</TargetPlatformVersion>
|
||||
<TargetPlatformMinVersion>10.0.10240.0</TargetPlatformMinVersion>
|
||||
<MinimumVisualStudioVersion>$(VersionNumberMajor).$(VersionNumberMinor)</MinimumVisualStudioVersion>
|
||||
<DefaultLanguage>en-US</DefaultLanguage>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<AppxManifest Include="package.appxmanifest">
|
||||
<SubType>Designer</SubType>
|
||||
</AppxManifest>
|
||||
<Content Include="css\browser.css" />
|
||||
<Content Include="default.html" />
|
||||
<Content Include="images\icons.png" />
|
||||
<Content Include="images\logo_150x150.png" />
|
||||
<Content Include="images\logo_310x150.png" />
|
||||
<Content Include="images\logo_310x310.png" />
|
||||
<Content Include="images\logo_44x44.png" />
|
||||
<Content Include="images\logo_71x71.png" />
|
||||
<Content Include="images\logo_badge.png" />
|
||||
<Content Include="images\logo_bg.png" />
|
||||
<Content Include="images\logo_splash.png" />
|
||||
<Content Include="images\logo_store.png" />
|
||||
<Content Include="js\components\address-bar.js" />
|
||||
<Content Include="js\browser.js" />
|
||||
<Content Include="js\components\favorites.js" />
|
||||
<Content Include="js\components\navigation.js" />
|
||||
<Content Include="js\components\settings.js" />
|
||||
<Content Include="js\components\title-bar.js" />
|
||||
<Content Include="js\components\webview.js" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\NativeListener\NativeListener.vcxproj" />
|
||||
</ItemGroup>
|
||||
<Import Project="$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\$(WMSJSProjectDirectory)\Microsoft.VisualStudio.$(WMSJSProject).targets" />
|
||||
<!-- To modify your build process, add your task inside one of the targets below then uncomment
|
||||
that target and the DisableFastUpToDateCheck PropertyGroup.
|
||||
Other similar extension points exist, see Microsoft.Common.targets.
|
||||
<Target Name="BeforeBuild">
|
||||
</Target>
|
||||
<Target Name="AfterBuild">
|
||||
</Target>
|
||||
<PropertyGroup>
|
||||
<DisableFastUpToDateCheck>true</DisableFastUpToDateCheck>
|
||||
</PropertyGroup>
|
||||
-->
|
||||
</Project>
|
||||
1053
samples/XML/sample.csl
Normal file
1053
samples/XML/sample.csl
Normal file
File diff suppressed because it is too large
Load Diff
213
samples/XML/water.tsx
Normal file
213
samples/XML/water.tsx
Normal file
@@ -0,0 +1,213 @@
|
||||
<?xml version="1.0" ?>
|
||||
<tileset name="Sunny Beach">
|
||||
<tile id="0">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAABYklEQVR42tVVq47DMBDsp/hHDxQcOFBwwMAgwMDEIKAgoCAgUqBhqH8l50126lHaq9TrQ7pKq8TOemd2dtfd7f7rz3TTbNpibpyNT/N7gUOxSHbKs+mLlW+vAwaQJ1B5ulQN+88kYkIJ3GhwkVwyFRABvKbEkKvfI0TOGYkha9nDe4AiqfqIHZUoznbTfURwYA1agn+rvAL2VRpuX8ym1Rw9QUpIiB/WLQilRZXfgXuVziro51hr69HxtPZUAlk3ui9ErZYNvvLeEplAU7MAx00tj3mVEhkMuWYdVeZGQTrtCZTDqz8IsAotleqCCDcUZ7aQUduSsanKjnVDe44yx9Qogdvj1k21m08KDiltqllAdnwX+xgvRzNqvLuaUQ4EDSCgB8oQtb4GxIBDfvxeOE9ApO7mUjltWl5HvQfiMy+kXhXBjGP8DhsF2rTYa6/kQKPFV7Sf3vinFLQX3Np0f43zA259mZw6IuSNAAAAAElFTkSuQmCC
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="1">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAAB/0lEQVR42tVVLYvDQBBduTJy5drIyMrY/oOTFScrKioqKioiwhFxcBWFVhQaU0JEOCIKPXFQGVmbvzI3M/vRlIM7ev2AKwwNk7Bv3nvzEiH+7S/dQG/dg2gVQZRpeBiuro+g8yPI+R7E8A3EQEC0jkDPNdf9gLcIvGtBr/Efr4k9lUoVxGUMYaJAZaaofzvgvAG9ahhUl1gfLYisADEpQE6ksWFrFIgWEcixhHAVXq+IXlhQKsuae3gtZoVnT/47QGKvZwEPxCqMBPcvUoQ9rh0wAr42BpjkTw+gp+j985IVoIOZ5VhAv+p7sF5p+kES+Gd+HYSkZYnnFjQ5GGDLmu/htRhtEBBruGRgqi6QnEkPqKxFdN/1SLEzaxjYSe2qwl5ltp2BP1szGJbCfSALeJCRjSKmgOQncAKL85j3g/qcELLFPuOG8eqdlq0zQN7xnodpQZVmmKBq2ALHjBgFCECH0y7wXmwjz5R6POQ8hLiKvQo/x622rEsbvcrETybvIJ4w+9PibMlUYsAJWE0VD+Ei6Qa5OBU8QG6TQPJnDQTpnv0PUwti2bLvC8OMpBdDYVJCqbhJHHdmT8TASK9Ty8hGjpmujN+8Ayj1N4+vfiHRCyjB5Ru88Oazv1l48hR7DLwIOYb3+xiMlp4d+U5yx3Xs4/ewjxLtQfcl89dzvgD75hkn04cPugAAAABJRU5ErkJggg==
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="2">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACjElEQVR42r1VIY/iYBTkZ6ysrURWYpFIJBKLRJJTVZsUQcKKTajZEAQhCBIqmrSCpAiSIhCfQFQgKk5UnHk3876W3Iq7bLjdJXkp/dow8+bNPFqtBz+d2EgSjaQs5pKfJtL6ro93rqR7EuluS+lFpSTpWMx5ItlxjPpCIs5bLm5UiLcrZVmI+BcB+ATgvlQ/F5LEI1UiP40/VxEnzMR5QYGAd6jEOxvx3ox0VoVk6L4s50ogO0CJSyCb9UC/a0GV/W74GBlnZkG1Xu316TmR7nQv3bQSY5bo1FcgksgxBnPhfaPERJZhX58X1+DjJJwtwLYNMEg8o0hmZ6Q9y2WwrmR5E1WABmSHSTqSqlwoENUgGZ5xLMYEksMbfC/HuP4OHBlxVgCd1qA/EgvMogJryB5VMkD14IN+Kiq3zh6ABNZRANiOw1dTFte5ktlH9fnZqvYeuJG6KYA5a1zD3JJKC3Ffclu4J3j/INJbGZFfSy0CslOmoqiV4YhYJEm/6LUmV1AZPLMmQ6YpsYKz42ndNa+hBW6/Gn3G+I0ZQ4BT8rIIABjc5eYZKz+OLQGSIvDRArM226ESfRdb7bZRIPzDfFuj8WunpQzOkL1EBAsrP+Uk0F1eyk4/YBQkRflz3RETu7Rudmnx7J95VxPG1hNubMGfZkgAZq8RDCG9bDR2/PEstbNVJVKbAhvPOVTx9ayJ5sfTQAIYjwupvXUhvRjOBwEXY1A5jTUTu+G8lYiqMcT96L6gtOv/2ZAuuu2gYRdjGR5FFWBH9+xj7kZ3QKCxJClrzD3eWXzeRuTy8a9wP/xAiRtD6UzZPRfOJagN94X/Cb24VBC6mPJmBxKwbv/Wf0Wu3PK2UKdT/kd/5zeaE2gm63UKTAAAAABJRU5ErkJggg==
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="3">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAABvklEQVR42tVVr2/CQBTmb0XWVlZW1lZWoicWMkECgqQIklY0acVEBeIEomICe3vfd33H0Y1kY0AykpfCcbzv17tjNvuvr7ZLbbmL7fJlbodjYZ8GbA65rQS42ie2f88Izs91IpU+jki5jW3f57YVkPUqssYU4oC838Qkge/bJpVK+LwbsCqFOnMoaDvKgWEtF2LZ6Ehul69zvmeJKzcDAxRqoRSK2yZjEUwInD4Wjgyc6TKCYX9Irto5IiD5K2DU8i2y5SaixVQjzQCkmQMsdOY0LEhuEMI6oNPYWvn99akWxuWYJxV0yDfywLBX1VWBM8NQuGEUAn4gJ7GhB/pxvXNxBYq/5oYnWJOEgKAR1kFovYp9HFCI5krCRZCQXDiY6kSvJGSvgTNN4Mh3thGodg2MEIW9aKBOADiMiarVAVEK9XQJZCbuXp/60TZjcj6x2ZFJSQJk1AmAaEzqhq7r6UCVI6kL639y7n3jiW3lNmJsdGokyH114qPi3IxD2/7lgro4FTt32zkw54qS4PrmrLStEw7mXe9+r7o5Dyczrd1waUQPvZL1uOpwVXsQyPycPO1PCTkPxwUHFUf51j6f/7okyrolgPYAAAAASUVORK5CYII=
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="4">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACUklEQVR42tWVr2/qUBTHJ5FIZC2yEomdRCKRtVUvVUsz8dLMMUHSiSXMEDJBGgRJESRFkBRB0oknEE8gJvgXzjufU+4eZsl7+5Vsyclt77m73+/5nu8pFxff9a991xZ/5EvzuinJYiBfBty6aUk060k4uZR4NZDhNpDw8VIg1LnvfB4R79aT1nXLgCAQzfoSKInurCvZLpJ40RfOuPi4in+2hACgcdUQb+SJf+NJ8NCV6TqU6aYOiBj4VVP8e19Qiva8GXj/K5FqG8vhkEq+CMSf1JdWu0TSdWCgibYA2WlB9NizM5BwZFjJ/5ciABPZvAbNl6GUm0jKdWQEqB5iUyWRK4lcc7G2BJB42TfAYhtJtgqMAHewWqhqrwJXu1gKBTs+p8Jz9RQreCCNHw3ZPyUmZ61AnYNENO9Ze9gnkuXAgMcajhSEjMht21b+hyLOKh5KNhtYlUhZKnueAeAC5C/1mYsgFDzUcpODGN4ghyJjzac6kuYZVSOcdF+IEJxJ533DQQ2K+6uAVlWoqYpVKPv90AjQb1b2Dkr0qH6AHOfoP7nOqG3nbX9XFzFVBVAjcW3AtHddI8UUQfDVVlDtcBOo+YZy+J1KpqydGpBAapTIF6HKWVdDDpKstJB9fMQ7gOcj/M9m5EKMZ6tWx4UHlFFVCq2MtplSJz84tx+fx0ac1tIiqseI7xrHUo3HVBRKyPmhPKkCCT5MvBeniWG/Ui8dj+OP+yBR6UvVSsiZ06ZCVwdsiun7p32SM519KnUfJ6SutsnJtPHX/ShhxPN+v/WeP+Pal6x1OIpVAAAAAElFTkSuQmCC
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="5">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACTElEQVR42tVUr2vrUBSenKyMjI28MjI2MrIyMjYyaoSJEepSUWhEoc+UMlFKRaAVhUwUOvFgExMVT0Q80X/hvO876X2rGlvf9mCBQ27Opff7cb7bq6vv+qw3iRx/T6X9NUaV8t+Al7tUmk0q6zqRfBsr+H6Xy9Mj6mf+dUSennNZLmI5HEoFIoE9QNNZCCfG0jyk4g5dabap9j8N+PBSyB6Ay1UCkEwP9ye+vgmY1311Ib0PlRh7XuXBkUyOx+nlRFQJAPW9y4REWqhvtok6oT26AWem6K0xGhLjKOYPiWT3fYnrUMzAFTMz7ydCZeUukbYtNWDnituXUrrwjcUZOB34JlYXSJTquTcHmRC/MZVRN9h3bh1dvzljzpEHqTINVaFlVVvFtDutIx2JP/K0T0cIzL1s1ZfiRIzkvYGna+7x7dw5r0SyRaTAnB9LlbdjDRn3qJyq13WqB3ezLnBQLMkkgNWR3obmGePC3OMfgSSLUIH5e6veTEzXm0VKwoyMBvjVAYDzetFCrglkU22tVdvPg3YoJMahcQVQHBbOfAmrDsgC85wSmTBDT4JFoH3m4s3Us5h6hufc2jlGoXmA9UusUygMoCw8qck3nUNqOwhmGBOLoAQP4M67w5itIs2BHQfJdA4UmvJ+BeWrUMuH8ilGQmBabYvf3OP74uvIwLBI4PrmWtxRZ2ECNTy4d9v7m3AqdW96SL+nZLn3aX9I51fJqqKt/O4BlD0lcKov+0umMmt1+Ygc4BZwrcT+xeqPPgT0ht399kbexcB/AEhbiVW/ps4pAAAAAElFTkSuQmCC
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="6">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACOUlEQVR42tWVrW/bUBTF+2cEhhoaGpoGBgYamhoaWgOTFeaCSS6IlJEoKoiiAksNiJSASimIlIACgwKDAoOB0bv7u3bc7qPSlq2VFunpfeTl3XPPOffm4uJ//RyOiWy2kSyvA0lXI3m3wNVjJjsNvLuLpXxIpX7Km/19rCN5OyCbdSRlmclBg9wWoVRVrgzoehU1IBRQ/7Iv3Dvs/yGQU6ZkVz3mRjuDIAwYKcvUALAHhK2Vkbqeng8kXQWWLZmS8WGf2rBgCuDrl2kDBmaOqczXoXgTz34z34YGJF4MxR33xZ25vw+EbBnLm1A2q9AoJqPdNrZA0A24cOJLfDPsmGFNUL6f30UdEOfKsfPeh56tX3e10rdp9TR6j+gbdoHLh8wyZk/WnQnrvJ2nnSFhgzOAZLqOZ0ML3hs3IHofe89AkmIky0XQZMm4j1sNWxBKP1lyD0Bkyx66mfmtmVPvDj65kq4Dkw3qgyvPZIgmA3EnrjHnjB0DkRaBTPW9n+r6pdtNd505q5SBuso7OQBzW0RK/chkiq4HBpLAxpiOZDGy80ylcC8d8Re+ycGd113falpVmc0Aa8AkBgKtYYWHYAEgyENAHiY77kAzhuUeg+D+Z1/+qO5/1LuCGWWFMx6FKXQ1/6xPzSg22qGb7L2ZZ0DPLsfvqmLbdDuCYTACnMrPzlUKPABgfPBXfeBXvb8z3P7ZnFYxOhOUeqc/vGlLPpUrAKwnFDSstPPJu/0pYb76aWpGxYTnvvMN/STd514e0SYAAAAASUVORK5CYII=
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="7">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAABjklEQVR42tVVsWrDMBDNp+i7+gnZO3UqoUMoHQL1YKgHgbWYoKEUD4Z4CHhUN636FVV30knnkBYSnEADRwKS7r179+6yWv3Xj+itFzpEPXkhjb8vsArRsRicf1d7jNsBJyAhCyh+18Yfvq3f9wcksN6++vXzZjkiQhkvGhPBQHKZwAMwKAHg7ThlApvdRyYBal0PXJsisYwgAJAVOAFv9ddMhYfHp0i4t5cRoQcROJDYJYmHklxUBoMDcxUyAZVUg+jCm9H9TgQO8WIVQcXLFGU+6TFJjORUJEZndI7gTXyLisFvzcgoNjXY46ObmQpJyEgGKoMqITlXAXssbfYCEQeysztkWMoPcXTniLCxUqz3w5SrbEczr7JK4InsOaWQhGZTI2P8PW49YwkPPx0SIZkpCZxRxbxVdDd7gaq+yIzwQKVJgErfUmsakwnM5j9Uimr1xRMIPrgFxpHk206lTckrBE6uJ3A0dWcXXEhjUkQXf+TNqJl/tMG47UpWbLT4ipb2jn9KKnmhjvvi2jw/5gK11tLQbjMAAAAASUVORK5CYII=
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="8">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACOElEQVR42tWVr2/bQBzF+ycMBoYaFgaGBhoGGoYaGgZN0cCkDERKQaWGBBhEVUGkBVRywCQHRDqDAIOAAwUHCgxG3t73e7nVqbQf6tpKi/TV2U7k97n3np2Li//1Y6oxim2K2zyBe5jj3YTtcYqSwuW3DPVhouJ6vss447cDKe5T1PUUhiJf1yNYO6cDPN6kCiHfm/2Yk+n6asJhp7I7e5yr7TJeTK5NCTY5OTLF7SrRYx268mLhXm4x2AHxIzDcNxSb6KgYAZrHGw8jzlQTFRN32nDl1oMI5F8Ld+8MZPoU7W8d4gOQNdCby42cu1EAOW870/C6wDlGEwr6PDZD0F8Lb2p0c4NoZtBbcve5Q39NgIoOfAdGRyCxQEpX1HoKyU2dm/syEuBnIZ/FJk4IlF6vfFznwktzPqsavY1TF3pbH8Xw4DDY0w1GktTQOGSHcvMA4SPIFK5dzOBEHSD4WyvO7FuOdBctgHB8bdD5WODyzjKOBjFdESBxIqEzqayMaESoEXxX9HEMIkfvkoy4IVDaj+o3T4kKSw/ufSTdWakwceU7ITCJA4+tuhTTjf7OR5XUDYaMagwfWXhCilMsZ9b/sYwCsPDinSuun0qEJ+NDXrKcjT/fcF0RhhAiPhCQB66O1wmnu/6XF5Q6sD71hHGEmDrXJaJlrQ5FVwaXa6sdESBxYyAQhH21F5KWVRzJT/34UqLzudCOhJjaxX2zV3IoabR4KqxEIjHJvNufUpSfIGT3s5cL/wB3sgL2s65DmgAAAABJRU5ErkJggg==
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="9">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACYUlEQVR42tVVoW7jUBDsJwQGmhoaGpoWBhoGhgYGGp2igpN6IFICItUkqgKiKiBSDCo5IJIDLNkg4IECgwKDAwYlc7P77F7InapeW+kirTZ5D8zszOzL1dX/+vGTAulhguppjof1EF8HXDYIn4HBocGoBIpyyoqQnSas6POIOHEBZ8tKDIYkMMwBY+aoqjman3dIH8co8og10f5hwL3ZHv3vKYSA+2jg5TX8Y43BETp1/TwnkalWdpzgYTPUrsX79088y9BfZOgtUvT53VkX7Clc9rEB/F2FQSW9Zg5uYWiFAKaHTokI93GoROT+zcDuhpOynFWhwP2bVBXwdgbukndLA39TIUgaS4CZcI8V1anQ1HdWjfNUiYgtxtyiYDb2u5Hm5c/AW2On/JFp9QgsKjgLS8TjxN6KJGiFTO2faoxqZoF1zUyM2cMz1JKM4DK5EJEtETL7pD0rrVW/pWaoZNrLcjfGqhC3Zy0xISBEglOD4NzYPLAPSGAiRI7cEG5G+AINp5ARFaQyrqz2jpgok18o8gq2ugBeWvCOlJdQ6q2oYNQKb0VVaIe3rXB9qGlLDYdBlTUVYjKtKKAqUAEhpPko/7IlFqxQzwX81RrJA7PQ+9aGUraCBBU8b7TL9O6hQiRqPNEe/sbLvX0n2s14+xZwEifOrBKU37nJtAuozQTDSFBnYc/EmgEBAyoxJnjITbn0/p/W0dnZnMhGdDb1l5Zcp4ooEPB19NZU5MTpSUI8/7iXMGkVaTOh2yLg3bbwzGcGAtrhU4XPe5LbkLrxZWDtoyX1ZX9K8iIqCZl+9n7gXyTF4JLCnrXHAAAAAElFTkSuQmCC
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="10">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACQ0lEQVR42tVVoW7jQBDtp5gaGhqaGgYGGpoaBgaGBvbASSmpTgVRFWCpAZFiYMkGkRxgsKDAIGBBgMGRd29mbQdVV7W9ShdpNJvx23nvza6Tu7v/9ePtGvg7A/+hgfezwvcR5yR9MvAem1uw5u0ZFPXviIWIJAHJ/S1zaTX7PxqETx3CbXcT85VCpKmOmTnYk+jcI6itEsskgrxDVPRILeAXFl7RwWfNO3xSiCcE28Et3QmR/2AQ5hbhwSJi+CQLBhEhI9gxKCI4OIz7zn0U827ikO7iVyA+gSQ9oi0bPRp1HnM9z4G0BeZ8PiuJI2Zecy01iTOQFMzsMROMAcKSU5O7QzFvE7e9kkqzhHnBjSmbROKYEbGWGEeado5oxojqHlnvBCWCN66HiJH9ijs5Q7HgmWU6E7Fp13jeJgqoygWaekkQxbQOKOpnV2D5mwStcxoPbkVQwvqsc8SZdfWA05Je/XWD5rxEc1pifiGWz+NymBS/R+w9CRHgschwPGQwZq1CmtNKszQTkv76S9eCk6bW3uNln6IqFlo37YrrTNcV93WvfJ6nsJd7HPeZZtknXG8exbix69aa1UHtXHSclggU0pecQs+rYZ0qYVUvpmci3hgXx1EU492XUYjEmebBXSeTYePjIdVj00kNAhVXLyYRIk5CXdfLj7+OQizxvEvpxI3WkbmpjCK0vr85bSjE2s3X/SAJyeSaxOPY9UyHezIeUfUZx38VcnJOx8tVlSJgNd2Tb/tTknO2l41eVHmVP9rnD/NFCxuaQAv3AAAAAElFTkSuQmCC
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="11">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACIUlEQVR42tWVrW/jQBDFCwsPFi41NDQ0NQwMLCy9vyLgwIGCgIAAEwMD62RgEGCpAZEcUCkFAQYFhgYHDI5M35vZ9VWV7qT22koXabRrOTvvN1/ri4v/9efqk7gStr4Tt+3kc4VzWHGSqO7NdoO4XS9893HCEKRIVMDKXuLDaGuOZwCp4dk17wzi8k7cpjPnFcT2AwziG8vCbChH1CITbW+l+deMuHU3O3Zbi46iIdIYMGkvktSDLM4iiyP2u1GS4yRxC0C81/P16XUg4YBGRDGARACIGfU37FedgXgYdwvQAJsbrFvdGUDu/QQYZOXPwj51KriG4HcY6p3UiKqBHSaNNuW+GXSvZQnCKJOu7AH6IDzBS+sRLV/ty1g+y4jWqnhRT3Y2ROJq0DU6jupMo4aDGF1PxwTkBLD2armHYZ9QnA1a2JkwMZqhrS9t/mx8w2iFiFSoMoG49SBwkJwndUxxXZkNwnpAhSp8+TYWuVrjA/UAfx+32kdBaoozyqKfnc6j6FeFK3y6V92cLbf5fWdoiV/TjBHmPEE3pwBJykGyBj2AkqTsdFiKTLAfUvYI+iHZ2/+ZEb2kcO5L1clVdZLL2x9vH0cVwZgpTAGA/WRiAYSrLwXf8XnxIBIdrEnf7UKK0YgZZvx6FLn5KXJ9NlvuRbJ20rtg+Yj9AyDux4+7kimU4dJZEgAgX38hOxDNDqIZ+LSPEtO8uIdRePf2iJ8AkT7BKeWMTHEAAAAASUVORK5CYII=
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="12">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAAAhklEQVR42mNgGKpgztI5/5Ex3S2u7mxGwTR3CC6Lae4QYi1Gx1GZSWBMscVAJhjDDCTBYjAmOUTQfUysQ3BZTHTUEApqXA4hZDHBNEJqHKM7hFiLCTqEUBzj8impaYVg4kQ3gNggJuQQknMFqXFLyCF0LweoXiCRUQ7QBhBRDtAHYCkHyAIATRZdO8VgYzoAAAAASUVORK5CYII=
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="13">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACNElEQVR42tWVrY/bQBTEAwMNDU0NAw8eDQwMDDQNDIwKqqisBZbOINKZRJGBZRlY2gORHHCSD0TaggJD/xuvM2+zl6hqq7b3ITXSUxwr8fx2ZnYzGv2vr7DoJCqsxFsr44+VvJtwd1xJVS4k3HYSl71MT4OEpZWg7GSct28Hss/n0h6WUtULud/OpHtcyb5I5KYeJM6tRBmAil6CXSejzLweCFdrHpxwd1zLfjfXMXUipklk0gxyexwkSNtnJ4IMTtCN9AXRtF4UK+WKTbPUoQv7Yi79t42DgTM3DwN6YCRCH2JcTzCEGH8yzo2/AaEwRe7SqdznMxWvigVE6UQi9rRWAMJ5Z+LMukLu8I44psWgUGHq3Bh/MYCpnCu/etFOCtqntXs4P0PcC7eHlbMe96+d6fvPMkEZ2QWKx4jjtmQvepmgE4RSkPQMghld7xq/Wgox78u4jO+ymQrynjqSXeLQCOCG/bpR17SIEIwfBxUn1PQJbhwAB8gwx075ULn5EYRiPlPfdooQhMJ0oKM7APFOMI7rmPg70ywkAgRd4RkR1xBmNE2vMYWA/O254dtOi7nnKeQb72PwTrQA9DF5N3hfhTm564ZGA1ei2v55GZ8fmDsBLacCLfUeY2uPS9eJs0s+Pl4H2JLRoddOBLv2Zdvx0hPXfsJpR+DMTyFYXHyPW/XVDiQt5HnVVe12iIfguA64iAj5ZkcyxTSC7LxldbWJQhHy3f6U9Mw4bVDWta78X5/zHZ1ZvLB/eOxTAAAAAElFTkSuQmCC
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="14">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACSUlEQVR42tVVLW/jQBTsT1lquNAw1NDw4MHQwEKjKjqWgkgpqFSTgoCAAEs1iBSDSDaw5IACg4IFAYamrzNvN70rqO7UL+kijXbzIc+8efNeLi7+15fZdmLWwHIv5raWbyOOyl7MfSfR2p963zmJmgFn/3VClBDk0T2wASqnIuKNk0kxyKQEDqPERwBiPs/qvBZzU/tqaTmqNUUvNge2TuJdIIYAklsK2Pn7BHg/8bLWClmtuQ3kN506YAunhIkT+XEUmVQgoxCe7SjJIz7vAXxPEe5p8e9CTAESWG1Lpw9NylER48HxEp9fd2IhgrYnu1HSSvSkC2kQM21F3+93M+n7hXRNJg/FVLrj/G0h7K/ZgByVm2tUf1VLvPb9jYEIVdvQc62+8raTSFuB36QgnopIehCtugaxe1pJXV2CPJP6cCk9RPB8TRzSfE63ksEB2s/TIlQWbYgA7W8zauhiEvMOy9NB5Cdsn55EhtMdsFIHujZ7caI/i3ici6Mz7R+OmPy3iJc7e7/1ghRM/wHEaAExgRusPml81dqSxtvv3AJYyUM59WLKmZ4URDfebgWJuWQw0xo8JF5FBEERgmhJvAntKLwLtJ9tmZ98MAeQ9/1csa9mWvkr6/8aRgrIw/ghE+aXzwZFkJzLh6N4DiNd0Gy0fgJsOyi5Vt1kHxtHdYFCrvY+F2H+mRcVUPgpoAiK4fhlCCPFfN5CKoMja58PToi2IHd6TxlEWM8tyF3wdX9CIaRRWE5cSpyS80L6vn9DuBGdhSCY733OM8S85MLHyAKwAAAAAElFTkSuQmCC
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="15">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACUElEQVR42tWVr4/icBDF90/BViIrschKZGVtJbKyFsmKS8CsQJANgoSKJosgAdGkFYgKxFcgKlZUnHn33rTcj+Tukg27myzJZL5pE+Yzb958+/DwVX9eXsNb1xg8HjH4dsSnFfZPDYYbh+HWwc8ay17uMMgIsyk/DkSFgiswqYDgDPhFi2FGiJXDaNvAXzsD8/aNwbxb4Rhd0dABowIYXxg8T3iWAqN9izFhgopAVMdnttHcq0h9TlGeEqTfgbhhMEeXXwoEVa9C37kvFTSSvIGnsQjiqTSIN4GosOJ5E+Eli7DLYhwPU4QESF8JcaICWWtgAc+SXmMY8zwh2Chv4T/WnU96rwxuMP8bTVkkLBijuc4tu8vc4rifoq5SO6ccx5wR0Qth3SmhEajokBBDFpIqgtKzUa+Ktyw7RRQrgix/25qySPGyjyEAhXMzBiHy2ELvd9sIDZ9JifI0tWft6wLhpRvLWN5Q508svGZRjkHFBSejevtuU7xVr0T+FxD9uTpVluyCUtcah0DcefYT4gbcNHPLET0SUZGw6sx5U8WUkFkPrSljAMvS4t+jqLqOZUABlFViZ1OHEILZyRvb2IAnGoU2hF2PtRFLZ/m2mjYCSq8NeZMZ3WVm81dBU4dKuHpm8r/kEfQ+pAeStjNjzDxip/KFyX5g8XO3mt4998IfW0GgI9UwqJsqBJm6zpC2CUV/UfX3wbtdSCr2vA5RmwqJjUkQzi2QoAdQcZoy4Fg+7Eo2BWhS2wyubKS7Ad3FFBT4vI+SjNhcF2bU8I6OfwC3fgnHe4r96AAAAABJRU5ErkJggg==
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="16">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACP0lEQVR42tWVoW/qUBjF96dgkZWVtUjkZGVt5WQlFgnuzSAQhCCagGgCgqQIklYgKhAVExUTFc+cd853C1u2ZAvLtuTd5MulN4Tf+c53brm7+19XUSbY7mIs5yHCCvg1cH0eIyc43z+gOo0QPQMRgGHJOv6gkG0Wo6rGKA4J1mmEup7QgQTRGUgk4AQMKGDI5+F3OnLpNCe4Pk/MdlVxTKwEe6g7AQfg/ug++2WL/qL6uhBB1a06VcfFcWRVVSMbQfv8ByFBSUsgwTH3YN8a3NvU6O9r+KcW3qFBf3ODEIFVy1WE7SbCeuPmne/oRDlC8zQxmED3ZWc7HQiyBoNdi8FjbXuQNvAXFLFpzAmJ6K0KfGj1ljABzF6mfE0BF3B1Gl+t1zi8FUFnZ3vUsDiKsHQjCCQga01AwGd918voypwiHgurVx2PsVyErkvVwe2y30TQfgF1LkHrNL6OI6C9yoBcGAg4I3DBzumGHJAQ6z4lWHURsGDNJSTHu3v9Ou0G4q6zmkIb5sGc6pxomsm1W2/OLgnzj13nOptV1n1vRtiqctC34Hej6NJe12PbJUwiwr/AiHdeYi5OKHw2Z8LtBpzgrJ86cJ8C5EBPJRG3hFEgzV9BiznjmAJ05/WsAOrFo249ChDMV+gYOC9zoTPLVy54H4bvsxXDBStU0jVrBa92ZwJqBAPZrquXujKbP0v9rUv2D59eHLDuBZ0WNmsl3Cr9ZvDb5TP5dq3Slztu3XbJ/rU/pX7WBWyaW331d/4B+NMC1rjos6YAAAAASUVORK5CYII=
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="17">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAAB/klEQVR42tVVLU/DUBTlp9RWPln57CRykv/BD0AiEBMTFTUVFRUVT1Q02QRJJ0geAlGBmEBM1l7Oue9t3QKEAIOEJSdru6Tn4577dnHxXz+J85JUwGIlSd7LnxGn3VZSEKfNIEnpAxyuW6DxvyeEhGkL8gLk9SBmsxPjtkign7AXc04hhsTlIFkLQgjIHkd1m+ZeTMM0hjAKkjOFLt7/NJH5g4i9H5U4c0AH1FsxAMnoPtuMYiCAiSQQlCCdpPaTIKLxXxOSIdr5KDJ7BNaj2HonlgLg3BZbsTlEFENAOUGToAAKuVkF8iKKUUG9pvIx8RMI4Mg2ELARmTvRa5JbprAOCdh21BRMHZxrN5jCnpwduAPZMvaCz3hdHYkpjrbGPOzk6kU0UkZOzF9CAiTht30OYmbdKJe4T10YhfYAwnQzulBQdc0e7AUcp1BFkfl7QjhTvExFIHoWTIFnhEUnMvxmMIqT+FlEjmDpA+Gyn0axOHLu4tpGAR+OYob4OW8S0Wm6DiIOZYTzzEVxFMAk3DA5u169Xc0ypvKVMnI0PHA0mQpkOVaxjNFzLdEXjiAtYtylP0UXV/Kn54I6Zho8fBb+UDyeCdp8kt/2U+v3rtsQ+flOQo6CxC4Qq3tGftufJlD1it87kqvhICCph+n4/axcZ/83LOKOL0LpvvueV8nivBN16X1tAAAAAElFTkSuQmCC
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="18">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACQklEQVR42tVUr4vjQBTePyV25MjI2pGRKyPXVlZWHeXEQc1CKwoXU46IslQELiLQiEAiFhKxYkTFiBURJ2Lffe9NuqUcy+4t3YMrPObHy/u+731v6M3N//pruzkdyik9pDH1zyv6Z8TuuKQaxHU1I/u0EHI5NzPE/POEHIopWbukFiQ/sztybgUHsM+nIoLz7eMcMZP1asTSqV1Id+64Ets5PBnfLX1eHFnSwy6WvQRc+TBxVBHdOqJJM1DUEMgWEkIGATT8EBJxplu8CGJxYdmTeSQK85506Ujl9v1C+ONJNwjAbUd094QVAnTTy8yHX98pRm76TBRb5DmOOBORacaakZwjQt4AQ2cQUrnXhXBSui2JogLgCFNACM6mHASIhbAb8dETRQBmoVHnv2FCgxy7x2Fy1FcDTYCjcwjYW4kgqc9CAnQcpC0F25ZUaiksejgA68cuWH004Jz1XhyfIWbCOQCHEM3fh52/03uHPe53jvTWkt7ZC3KFu2DTes4LIYkXcRKiuBBgGvNUGfYF9hCnKggEuMlAkjovmDtMrIRKfK0QrWtPhHMADMHf+Hh1FPLRHlGMznChKIcwiFTrlnTqxYkLbDG7hRmHuDNJL/fsgNr4ZgST8fbt+x+jFMAiKb7H+rWWlUHFVjgjXbPN6XmMvAppMUb2l8R/CGHyk31fDhckMiKMQW/8/kUsr7mvudofkgCyIzxffiNrPw71rZb17EAt8Wl/yULCD/b0wLbjO3nrcV1dCLvBpPcHsf2jOL8BAQzkc/Yiwa4AAAAASUVORK5CYII=
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="19">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACN0lEQVR42tVUr4vjQBjdPyU2MjKyNjKyMjI2srKyticWeuJga1ZUlFIRaEShEYVUFFJREVExomLEidjv3vsmuestt+zt0j24wGPIJOT9+iYPD//rVZ/GsiszWS0SsdeZ/DNic5lKBeJqP5LmPFFyvT+MgPHnCdltM2maqdQg2eSpGDNDAiPZFJmK4PP6OAZGut6NuHdKd+Yy09gJR8a9KYRNukSmslomP58xlQ8Tk5Ru6ZSO6+NEoWQQ0H5/cmKYzGmiZHz/Vlx0FIkakcGplXcRE6t1Krsi1YjprkLcJOo7J9ltMq19UnFZK5LsgatIDAFhaSXctxLkRoKjfV0IFe+6PtUBpnwDAT0x4+3dVTfJWDtzwwgByQWEcJucRTE8AFhTiBlAzABCvEUt3rz6JeRlbMZMf4ud5CrkMNJ1k2e6zwr4LgXzwynIh3R+AlHZSrgwEkFAWFjxSyNe3oi/aMR7hoCt+YOQ7lzfTnvfO/cMErAQxjpINkbUKd3ZziW6jvJWBrlVQSHWaIv7A+IvDERYR/7N4fWp7zq9Pd9OzFhiOIpLAC4jkMRwPARx0rq9AdwmhUhAp18r8ee1I1XX2FvXfz+M/fmmY/aZwbXGzMlmtMdWBUWIm+CQMWKNfN1IcLAu6vydxC8vuoyvzmGMiOOz67hH8Ixuv8DtEqRriJh3PRduvdsPiY5CdMl+6dBHjwHIgsdaSSmA8BaV4tN+yXTlE+x32bllzG8N192F4Agp6eMO+LjjH9KwCMHNaVe4AAAAAElFTkSuQmCC
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="20">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACS0lEQVR42tWVrY/bQBDF708oDDQNLDQMDTQMDDQNDAyqogOVUlApB066kFNlUBAQKQaRHBDJBpEcELDggMGBBQELSqbvza7dkqq9T6mRVjMZW57fvHlxrq7+10/0tRSe/qqWflbLuzXu3ZTC8yEDAJpH97VCtPmbNY73jQyOTuLcaiNO3/tUSO9zIdGX0tcygiDPzeuBDCuR4aNIckLMnQyPIh83jfRvPET/3kjvNgDwAErjxuj1Zzeuq5nYx6XEBo2tkzEghoAYPYjElZX4e6NAcdZIcna6EsLye3Tt16QgVGT3BEXKairlYSrFbiL1aSbNw0JSNJ2Jbz42TtUYACIBHFcywHrGFsqssaY96ifXrUnXAoBo/RePsBmbc+oS0xNgu0kBsBRznisUr4/ggxQw6QUwZ5FJgwigMSJ9kmQEcgoTXRedObsY8l+Nj74ZH25Oc6H0rLUQxX4ixsxVCR5jFmrGFBMPDk5ssxBx33RqrQUFqFD/tvZeWQU16JnMeH9QldVvHmkBOGkbGzQrA5CCBV9Ye6eqWLuUbZ5qznsmlVeCKxv9CMbdI9/hHJDvCAZ1APfnVRznOjVXwmmZl/upfrfNUtVwAOBqFKxVC/c1UENVRG3qAHAOigBoAFXio/13M1JuNSNhQiPNK++VzqyoucsdIP2qOAAPvZJcvAoJlHnRz7Ez5TrtDMlIheowMUF4j9G1ecVYf7UXkkp98A9ujanmPczUK9vcK8UV8PqbvZKpRvueoCGpQFvj9O/2p6QK5JMg9/zZjX8CTEsEAZI+lIoAAAAASUVORK5CYII=
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="21">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAABfElEQVR42tVVoW7DQAw9WFhYGBoYGBg2RQVTpIJWChgoKCgYGBgYGNh33+7Zfo6jVZuqtJtW6ZSrk/g9Pz9fUvqvv+a4yt3zWlZb9r8H/JQyloC/bHK90z3jdwPuATYmXfuUh7cq91glPrxXEmtGXYjdvOL2vM7Vg17xHyBC6lH3iEENEMEziIPMoooJHsGkSiMEHwCUpHgPRBGnQlcpcglY9pYM++GjFhB6AUT6181U/W56nx6R575TREBLIoJLT01WVigKWGIkrLZJgU36ejt5wL1R7uEqhJHfMGbAsYdIgORkLjFUaNXENpCcE9wnrz7mxfN471CUi36CkrO5FnkL2/akgN5bEjCJmbCxieBe5I/KnVZfvMF8P7reWzJOQKiAxGKlVMKNauZ0HwRvXDd+RoDyM4bKOH4wJ/ZSrfUYvujCO7i/6ByIhoqOjj3mNOAqbTRv3O4kLJV2Z03uUzLqFMzGDa0oz971W+BngynDNjR/8VGSM2HB2f8JRvClN9IGbE8AAAAASUVORK5CYII=
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="22">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACO0lEQVR42tVVLW/CUBRFIpGVWGRlZW1lJRJZ26CqlmZiaeaKICmChBlCEM2CIAGxpAiSIkiKmKhAIBD8hbt77uN1H8m+P5ItuenbbXPPueect9Vq//XHvG6SOTBV8fnPgJu9JqHaU5vMsUnNiwaZQ5N0/9eAraFFxrUh1bo0yGYC1tQi9HFGT79H78eAHR6GzVqDFvljl8y+2hbbA6h+UVdKcK9x2SDjypBvG6wMyHwZuLyPqNiEdDgktJh7VGwjqbKMKkLugBXotSictylIXQHEu3q3TgYrohX6lCIARt3OPMqWngzO1wHlq4CiZYdOx4TPvpCL+XdUtg1psgvkDGLIBYgAGKogIwjrm4oUPCRb+gKAc7ELacEEAFzuQCqmDMD8Ln+izGTLxFiBZOWR0+c8MFBn6kghGyBgpzbVujW5LahnYcXg27QjWyZzfm4COWNDkOiwzABEH4QWc7+yI7vzheDplJC2zLuxyR1blKxZvZlbWYOs2DO7sg+EnNR5JAIwbIihZRkrAls1FL3ojuXnrUFOKwHgOG0zMV/6IAF75MyqhWwJVIKqL9V91QqRd5+wxLE88bEiw33YwGS0EgDRNmk1vIEt/cM+lsCisjMp1IfDqAfK87zdAcqI9J7YhuunCcp3sO9sleSGS7behN+7jtWtWAWSBwWmVBG7diqYCduhNy02yMbo5/4gAQRbCyADx1O3IoENAaotyr+z8btEztdVhytfg0BU5eTP/inB59NxJEHFVf7qnAcPbLZlMU2ZQwAAAABJRU5ErkJggg==
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="23">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACPUlEQVR42tVVLY/iUBSdn4KtrKzEVlYikVgkshKLZMUmjBmBaAiChIomVJC0okkRiCdGPIF4YkTFmrP33DclzSa7yczOTDKT3DwohPN1z5uHh+/6115SnMo5dtsp4nOHLwO2zytUAlydFzDXJeIGGBsgbByC3HwekVMxhzErtHWK42EGa9fiQIqkBiYXIM47RLlDdJGz+SBHCNQrrQTYPq/Vdk7bpDp0JCoc5r+8E5xIIomuHUb79v1ECEq1VEoibbPUMWapEXQvG0+GzlyWGJcdEnEiIYHS6fsgM0riTUQIzNntZzjlMxxzn3dVLhTI3dZKgOSGzpBQIvsQZhbx1qkr0cEikAlzOTMh8q8doZ0nASOA2itbfhQCPbC5ru7WVwNnnFsrkc5tMC46hGdRf/Az+tEi3BpMhFh89c6MtkLkscJA8Qq7bOpVcmp/UqGS4B7UqT4noeNhfo+D9pOgkpBo4jO0kvFNwPai+lEI7L0LwV7iYCQ/Wz9/Eul7Pdx2BZKTz6wQdaKa5Pg9RkPgPqaJtCHtPDCryTjYjqnzSxkUFqOn9k7gr1H0mVq70pPEPJlUSZDMMBp9LZ/HrKPYnEj+Y9ZRiESZ9YtI4MK8bRl7e/V8raKlM2r9TGOjU3w+swIs4KFYzZNkWMNA9mF0MP9fx3sryoWq9dV7dUWcSgrpv9isVZQ9UNWFV/6hd3+vmsD9cmpE4gprp9dyLaqz9vOu5L6uWkHeCbXfDV7JdODL/ilx+dxto0Pw9/7Ob7W/DzcYBXyyAAAAAElFTkSuQmCC
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="24">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACK0lEQVR42tVVr2/CQBTmT5hEYpGVSCwSOYmsPVnZTF3mOrGkCBIUIROEIEiKICmCpAiSTiAqJhAT/Rfevu/BwUZGlv2AZCSXu757fe/7cVcqlf/6C0dt8bqeDju9las1rj3UhMProfnAk3DS1nW9W5dGr3E5ICxeva/qqN9VJVr60p8ZiScdCWe3YnpN3XMA/5wxGQ4XRrxHT8xTS9nzebg0umYsgC2dbkNzCebm7ubnQIqNlXwVynYbSzL1wbIj/bkvASQnoLQIZTzzZTiFAjgDjHGOMey8o8/NUVOyZSDbl0i+1ZhjPPElRYMEMrNItgjA2BcLIHxOwJwKsGn2bCXZhMq8BOD8OZQUe+ncSFFESoQkcuSdbZyv8RKala8ogDWLJADAxmxSbCKVlXsZlUFh9R97jCfI57vZYgeYJJhHW1iD9TQOEJyP1wkFxqMdK/qYrQJdEzVlJHIWYpyAkqnRovnaKkMWL8tYnGU8D7SNYLivgNkYgAsHArkkwBpHBT6RjQmcGdtCAcrLAsxjYTZ2NvH5ABY55WtfZz0veP9U3bNWqLwvMVhEOo/hu1ODIMygeVCCcppBS1XgNc3U612czYrC6kj3tnyQ/qufk1fnE9lS3AbapkqBjVPJSc0Y7XRAuP+r63i4FQCUuWaYeS1dQ43PjkzzFc9G/+8+SGxyYP3Ob/V07zstcuAu9kl215UA9NotCcDuD214vT8lHkSedItDym/ET+u8AfUc39TwIyvtAAAAAElFTkSuQmCC
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="25">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACV0lEQVR42tWVr4/iUBDHV1YiK7HISmQtElmJrCUnLlWbZsWlwbHiEhAkrCEXREMQJEVsAoKkiE1YsQKBqFjBvzA3n+k+cifuR/Z2NzmSyXt9fW++P2Zeubr6X39p3pVgHNQxaMqHATdvm0IEEwWeBdK8btjcrb8bcHvSFn/gW7RufAnnobTnbWGdOWvuPWtvrrg1bon3yZPga60W9QB5155k9z2ZrnvSuGlIb9yWRMvTUGcg80/AALjR2W0ODFpGKJl1ZJhH8m3XN9D+vGNxeEilWMVSVSM57FM5PmV/TyS8CyXMw4tK5iRNV5EsFChVpZ28I/Gk3sN6pjHcxeJ/8c0NiHGmWPfleBxKpCWCxOHxN0ToZBI6cJIxYisgHEatA13sE/E+ewJhR/D8PJKO3op0GRlgqeqrk7rwmFqUu0S4PYwXYJI4qwHDYutqtRtrqSnkim1fFmorNcfuQpOwP5nr/CmVZNk1uzdbBVnXZDb3dWkYceL4QgJylT4fHn5wBMXUFQeC25YpQsF0HctIlWNtqQfZM1KFhZI7nDJzCqtxgXfltp4DCiF6wcjoHkb6Azd+WQqS+HqlUFGq7XVzdQ3IqWLPcBapaiWiBEtVzTpjsVK1eq46qeJjZrFRUij/yfo//WhArI/vaKDhxbbRqqdgsSzyntmKGsoCgUjLVe5riwFzpHDi1deRZMRiGRsZymKJdcSlukFTey5feoC9pVp9Pk/f7oMEiFNNw1JbAN2dR3GlTjly7/ZJBpCGcs1V7pTEPjObefdhf0o03/l5Ktk2th55bZ7vlm2RtXeU/5AAAAAASUVORK5CYII=
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="26">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACSElEQVR42tVVr4vjQBReWVkZGRsZGRlbWVkZOXY4FU4sYcUR1nVFIRWFrikloiwVhVQUuqLQioWuOBGxIuLE/gvfve+ls+yJ+8F1e3CFIdOZl/f9mDcvV1f/6y+89RGOQ9j7HqqnFP8M2L/zwRFOQoSzEP51V+du/WLA0SSCd+vpCG48xGWMbJUgLfsodkbX3D5jPwzYlj2kyz6CcYDOp44Cci1bD5CTwKKPh4PVNc69L57GmvtY185S7GwlqLObCpk43wn4rAczoRMDBZ+uDQqZH58yHfvHFPtdiuZliL8C5lyBOS8juL3eogWeP1pMNwZakBJHEq9NgeNzJkQSbDcWdT3E8ZChWhmJN/hlcTERrXVWcq1701VLOSco96g43yTIZ33UzVATM9773NZAthyg/ppjL8CMpxMkRTfq51yfb8CpBDtlBGMCrepTdSfjCFnZgpIA/89F9VzUzUXlgzyPYvFWriKVbgWsERdevxXqAI/COVELiam8Q3KN/B9u3jlC9SQRL2KEd4GScsqdxXrWUpBWzn66TtQtKq/WtlUnie2sLT4qbV4KtV7JSAyfWh9C5Lf33AHTQnuq/EpedjfCjOU4xD0jZNiMqJJWVyt7AhfFda6jWCZK8Afr/6ThENTdax6PI+WPfCSjqK0DGcYROKR6Y0ia75CIqj5kOKvzMRmPhiRYF60rbS/gvtaNtGV1TBzSYxyF6tTHdUJew1GgyQnqOl8sDed9S2YMYy/6LXjrDXRGWjIBO9edy34LfvpREjLn9P7vqg9kQ79shN4AAAAASUVORK5CYII=
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="27">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACMUlEQVR42tVVrW/bQBTvnzBYGDpoaBgaaGhoaHrw0HTQKkuZByK5JKoCLCsgkg0iOSBSAiI5YMCgwKDAYKD07f3e+bwUbNW6dNIsPeWcD/8+73Jz879ezdlQvVNUrCJS+Yz+GXD3NKcDAx/2mtpvCfXPqb0/ah7zcUTqrSK9DqhhkHITU9el7ACvKyUk8HlzMjxaXq8G7JRCXfeUiu2Yx50FhCNtmwyOzKnII1nLsCvvBvYXPk3uJ1SwonQdMVhCejkTF9RqRi/fM0uGlbfnRMDgTj0Qy6qYo7FEQPKPgPU6FHCsvaVn1yufHKmkiiheTIUMnEk2Ib30mTjRczSuoEKunY+xNUz0l8B4sHc3EQAMVMZQzANAlQcUfvUpePBlrRgUD+371JaRCbhCGhZwGVu2jYWUxHS2cb0Cdsrihynd3t2St2DVXz6RqUJKlgGVUMM/KtnWYh1LHBgohM2OhI1AS0Evi+mcaB0J/m7H98UlEad+mk/Ju/8spAyr1Kw2Yrs7FOxoM3U5A7hmUvNVOD543KKsFOrFJZAZdovslLP5fRRuQAqOmDzkCSjdKy5XJPGUG1u+ciibcwPvF0zC7Q5MPZB6Zf1bl+tBynmiZI87tv+oButj2W5SMFYj1m/dYWSjAjmMqP6bAwq2YhAF8j2MYEbsRExSMLxf/VTaHLUU86pn/6j6ZLeVIwGFst+5J47cx/0JnaxSV67DHgTsDrjqEfzWhZz750yajiP4vc/5ASZt46J2q67KAAAAAElFTkSuQmCC
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="30">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAAAF0lEQVR42mNgGAWjYBSMglEwCkbBSAcACBAAAb475JcAAAAASUVORK5CYII=
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
<tile id="31">
|
||||
<image format="png">
|
||||
<data encoding="base64">
|
||||
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAAAF0lEQVR42mNgGAWjYBSMglEwCkbBSAcACBAAAb475JcAAAAASUVORK5CYII=
|
||||
</data>
|
||||
</image>
|
||||
</tile>
|
||||
</tileset>
|
||||
38
samples/YAML/Ansible.YAML-tmLanguage
Normal file
38
samples/YAML/Ansible.YAML-tmLanguage
Normal file
@@ -0,0 +1,38 @@
|
||||
# [PackageDev] target_format: plist, ext: tmLanguage
|
||||
---
|
||||
name: Ansible
|
||||
scopeName: source.ansible
|
||||
fileTypes: []
|
||||
uuid: 787ae642-b4ae-48b1-94e9-f935bec43a8f
|
||||
|
||||
patterns:
|
||||
- name: comment.line.number-sign.ansible
|
||||
match: (?:^ *|\G *)((#).*)
|
||||
captures:
|
||||
'1': {name: comment.line.number-sign.ansible}
|
||||
'2': {name: punctuation.definition.comment.line.ansible}
|
||||
|
||||
- name: storage.type.ansible
|
||||
match: (\{\{ *[^\{\}]+ *\}\})|(\$\{[^\{\}]+\})
|
||||
|
||||
- name: keyword.other.ansible
|
||||
match: \- (name\:|include\:) (.*)|(^(- |\s*)\w+\:)
|
||||
captures:
|
||||
'2': {name: string.quoted.double.ansible}
|
||||
|
||||
- name: variable.complex.ansible
|
||||
contentName: string.other.ansible
|
||||
begin: (\w+)(=)\"?
|
||||
beginCaptures:
|
||||
'1': {name: entity.other.attribute-name.ansible}
|
||||
'2': {name: text}
|
||||
end: \"?\s
|
||||
patterns:
|
||||
- include: $self
|
||||
- name: constant.other.ansible
|
||||
match: .
|
||||
|
||||
- name: string.quoted.double.ansible
|
||||
match: ^(\[[0-9a-zA-Z_-]+(((\:)children)*)\])
|
||||
captures:
|
||||
'2': {name: variable.parameter.ansible}
|
||||
16
samples/YAML/source.r-console.syntax
Normal file
16
samples/YAML/source.r-console.syntax
Normal file
@@ -0,0 +1,16 @@
|
||||
---
|
||||
name: R Console
|
||||
fileTypes: []
|
||||
|
||||
scopeName: source.r-console
|
||||
uuid: F629C7F3-823B-4A4C-8EEE-9971490C5710
|
||||
patterns:
|
||||
- name: source.r.embedded.r-console
|
||||
begin: "^> "
|
||||
beginCaptures:
|
||||
"0":
|
||||
name: punctuation.section.embedded.r-console
|
||||
end: \n|\z
|
||||
patterns:
|
||||
- include: source.r
|
||||
keyEquivalent: ^~R
|
||||
512
samples/xBase/sample.prw
Executable file
512
samples/xBase/sample.prw
Executable file
@@ -0,0 +1,512 @@
|
||||
|
||||
/**
|
||||
* This is a sample file for Linguist.
|
||||
* It's written in AdvPL, a xBase Language.
|
||||
*
|
||||
* Author: Arthur Helfstein Fragoso
|
||||
*
|
||||
* This script has the specific use of integrating between a financial institution
|
||||
* and other two companies in the process of creating Installment Bills for
|
||||
* customers.
|
||||
*
|
||||
* The functions are called from the ERP Protheus TOTVS.
|
||||
*
|
||||
**/
|
||||
|
||||
|
||||
|
||||
#Include "TOPCONN.ch"
|
||||
#include "tbiconn.ch"
|
||||
#Include "Protheus.ch"
|
||||
#Include "rwmake.ch"
|
||||
#Include "FileIO.ch"
|
||||
#Include "json.ch"
|
||||
#Include "utils.ch"
|
||||
|
||||
|
||||
////////////////////////
|
||||
// Faturando (Reparcelando)
|
||||
// FA280
|
||||
// FA280_01
|
||||
//
|
||||
|
||||
User Function FA280()
|
||||
|
||||
//Executado uma vez para cada parcela
|
||||
|
||||
If cEmpAnt == '06'
|
||||
|
||||
SE5->(dbSelectArea("SE5"))
|
||||
|
||||
cSet3Filter := "SE5->E5_FATURA == SE1->E1_NUM"
|
||||
|
||||
SE5->(dbSetFilter( {|| &cSet3Filter }, cSet3Filter ))
|
||||
SE5->(dbGoTOP())
|
||||
|
||||
aOrig06Tit := {} // = Todos os Titulos que ser<65>o reparcelados
|
||||
nTotal := 0
|
||||
|
||||
While SE5->(!EOF())
|
||||
AADD(aOrig06Tit, {SE5->E5_PREFIXO, SE5->E5_NUMERO, SE5->E5_VALOR})
|
||||
nTotal += SE5->E5_VALOR
|
||||
SE5->(dbSkip())
|
||||
End
|
||||
|
||||
aNovoTitulo:= {;//{"E1_FILIAL" ,SE1ORIG->E1_FILIAL ,Nil},;
|
||||
;//{"E1_PREFIXO" ,SE1->E1_PREFIXO ,Nil},;
|
||||
{"E1_NUM" ,SE1->E1_NUM ,Nil},;
|
||||
{"E1_TIPO" ,SE1->E1_TIPO ,Nil},;
|
||||
{"E1_PARCELA" ,SE1->E1_PARCELA ,Nil},;
|
||||
{"E1_NATUREZ" ,SE1->E1_NATUREZ ,Nil},;
|
||||
{"E1_CLIENTE" ,SE1->E1_CLIENTE ,Nil},;
|
||||
{"E1_LOJA" ,SE1->E1_LOJA ,Nil},;
|
||||
{"E1_NRDOC" ,SE1->E1_NRDOC ,Nil},;
|
||||
;//{"E1_X_COD" ,SE1->E1_NATUREZ ,Nil},;
|
||||
{"E1_EMISSAO" ,SE1->E1_EMISSAO ,Nil},;
|
||||
{"E1_VENCTO" ,SE1->E1_VENCTO ,Nil},;
|
||||
{"E1_VENCREA" ,SE1->E1_VENCREA ,Nil},;
|
||||
;//{"E1_VALOR" ,SE1->E1_VALOR ,Nil},;
|
||||
;//{"E1_SALDO" ,SE1->E1_SALDO ,Nil},;
|
||||
;//{"E1_VLCRUZ" ,SE1->E1_VLCRUZ ,Nil},;
|
||||
{"E1_PORTADO" ,SE1->E1_PORTADO ,Nil},;
|
||||
{"E1_FATURA" ,SE1->E1_FATURA ,Nil},;
|
||||
{"E1_X_DTPAV" ,SE1->E1_X_DTPAV ,Nil},;
|
||||
{"E1_X_DTSAV" ,SE1->E1_X_DTSAV ,Nil},;
|
||||
{"E1_X_DTTAV" ,SE1->E1_X_DTTAV ,Nil},;
|
||||
{"E1_X_DTSPC" ,SE1->E1_X_DTSPC ,Nil},;
|
||||
{"E1_X_DTPRO" ,SE1->E1_X_DTPRO ,Nil},;
|
||||
{"E1_NUMBCO" ,SE1->E1_NUMBCO ,Nil},;
|
||||
{"E1_X_DUDME" ,SE1->E1_X_DUDME ,Nil},;
|
||||
{"E1_X_TIPOP" ,SE1->E1_X_TIPOP ,Nil},;
|
||||
{"E1_X_DTCAN" ,SE1->E1_X_DTCAN ,Nil},;
|
||||
{"E1_X_MOTIV" ,SE1->E1_X_MOTIV ,Nil},;
|
||||
{"E1_X_DESPC" ,SE1->E1_X_DESPC ,Nil},;
|
||||
{"E1_NUMNOTA" ,SE1->E1_NUMNOTA ,Nil},;
|
||||
{"E1_SERIE" ,SE1->E1_SERIE ,Nil},;
|
||||
{"E1_X_DEPRO" ,SE1->E1_X_DEPRO ,Nil},;
|
||||
{"E1_X_TPPAI" ,SE1->E1_X_TPPAI ,Nil},;
|
||||
{"E1_X_CGC" ,SE1->E1_X_CGC ,Nil},;
|
||||
{"E1_XTPEMP" ,SE1->E1_XTPEMP ,Nil},;
|
||||
{"E1_X_CTRIM" ,SE1->E1_X_CTRIM ,Nil}}
|
||||
|
||||
StartJob("U_FA280_01",getenvserver(),.T., SE1->E1_PREFIXO ,SE1->E1_NUM, SE1->E1_TIPO, SE1->E1_VALOR, aOrig06Tit, nTotal, SE1->E1_PARCELA, aNovoTitulo)
|
||||
|
||||
SE5->(dbClearFilter())
|
||||
|
||||
EndIf
|
||||
|
||||
Return nil
|
||||
|
||||
|
||||
User Function FA280_01(cE1PREFIXO, cE1NUM, cE1TIPO, nE1Valor, aOrig06Tit, nTotal, cE1PARCELA, aNovoTitulo)
|
||||
Local nValPar := nil
|
||||
Local aTit05 := {}
|
||||
|
||||
RpcSetType(3) // Nao consome licensa
|
||||
|
||||
//Prepare Environment Empresa "01" Filial '0102'
|
||||
// Muda de empresa
|
||||
While !RpcSetEnv('01', '0102',,,,GetEnvServer(),{})
|
||||
Sleep(400)
|
||||
End
|
||||
|
||||
nFileLog := u_OpenLog("\Logs\FA280_"+dToS(dDataBase)+".log")
|
||||
|
||||
fWrite(nFileLog,"----- FA280 -----"+CRLF)
|
||||
|
||||
fWrite(nFileLog,cE1NUM+CRLF)
|
||||
|
||||
nParcelas := round(nTotal/nE1Valor, 0)
|
||||
|
||||
cUltima := '0'+ chr(64+nParcelas)
|
||||
|
||||
fWrite(nFileLog,"valor das parcelas: "+ cvaltochar(nE1Valor) +CRLF)
|
||||
fWrite(nFileLog,"parcelas: "+ cvaltochar(nParcelas) +CRLF)
|
||||
fWrite(nFileLog,"parcela atual: "+ cE1PARCELA +CRLF)
|
||||
fWrite(nFileLog,"ultima parcela: "+ cUltima +CRLF)
|
||||
|
||||
n0102total := 0
|
||||
n0105total := 0
|
||||
|
||||
//Loop entre todos os Titulos que serão Reparcelados
|
||||
|
||||
For nI := 1 To len(aOrig06Tit)
|
||||
|
||||
fWrite(nFileLog,"E5_NUMERO: "+aOrig06Tit[nI][2] +CRLF)
|
||||
|
||||
cQuery := "select * from SE1010 where E1_PREFIXO = '"+ aOrig06Tit[nI][1] +"' and E1_NUM = '"+ aOrig06Tit[nI][2] +"' and E1_TIPO = 'FAT' and D_E_L_E_T_ <> '*'"
|
||||
|
||||
fWrite(nFileLog,cQuery +CRLF)
|
||||
|
||||
If select("SE1ORIG") > 0
|
||||
SE1ORIG->(DbCloseArea())
|
||||
endif
|
||||
TcQuery cQuery New Alias 'SE1ORIG'
|
||||
dbSelectArea("SE1ORIG")
|
||||
SE1ORIG->(DBGOTOP())
|
||||
|
||||
While SE1ORIG->(!EOF()) //Loop entre as duas filiais: 0102, 0105
|
||||
fWrite(nFileLog,"SE1ORIG loop: "+SE1ORIG->E1_FILIAL +CRLF)
|
||||
cFilAnt := SE1ORIG->E1_FILIAL
|
||||
|
||||
//Faz a baixa
|
||||
if alltrim(SE1ORIG->E1_STATUS) == 'A'
|
||||
fWrite(nFileLog, SE1ORIG->E1_FILIAL+" : Fazendo baixa" +CRLF)
|
||||
|
||||
aBaixa := {{"E1_FILIAL" ,SE1ORIG->E1_FILIAL ,Nil},;
|
||||
{"E1_PREFIXO" ,SE1ORIG->E1_PREFIXO ,Nil},;
|
||||
{"E1_NUM" ,SE1ORIG->E1_NUM ,Nil},;
|
||||
{"E1_TIPO" ,SE1ORIG->E1_TIPO ,Nil},;
|
||||
{"E1_PARCELA" ,SE1ORIG->E1_PARCELA ,Nil},;
|
||||
{"E1_DESCONT" ,SE1ORIG->E1_DESCONT ,Nil},;
|
||||
{"E1_JUROS" ,SE1ORIG->E1_JUROS ,Nil},;
|
||||
{"E1_MULTA" ,SE1ORIG->E1_MULTA ,Nil},;
|
||||
{"E1_VLRREAL" ,SE1ORIG->E1_VLRREAL ,Nil},;
|
||||
{"AUTMOTBX" ,"FAT" ,Nil},;
|
||||
{"AUTDTBAIXA" ,date() ,Nil},;
|
||||
{"AUTDTCREDITO",date() ,Nil},;
|
||||
{"AUTHIST" ,"Bx.Emis.Fat."+cE1NUM,Nil},;
|
||||
{"AUTVALREC" ,SE1ORIG->E1_VALOR ,Nil}}
|
||||
|
||||
|
||||
lMsErroAuto:=.F. //reseta lMsErroAuto
|
||||
MSExecAuto ({|x,y| FINA070(x,y)},aBaixa, 3)
|
||||
|
||||
If lMsErroAuto
|
||||
|
||||
fWrite(nFileLog,SE1ORIG->E1_FILIAL+" : Não foi efetuada a baixa do titulo : "+CRLF+ MSErroString()+ CRLF + tojson(aBaixa) + CRLF)
|
||||
return
|
||||
else
|
||||
|
||||
RECLOCK('SE5',.F.)
|
||||
E5_FATURA := cE1NUM
|
||||
E5_FATPREF:= cE1PREFIXO
|
||||
//E5_LA = S
|
||||
//E5_MOEDA = ''
|
||||
//E5_TXMOEDA = 1
|
||||
MSUNLOCK()
|
||||
RECLOCK('SE1',.F.)
|
||||
E1_FATURA := cE1NUM
|
||||
E1_FATPREF:= cE1PREFIXO
|
||||
E1_TIPOFAT:= cE1TIPO
|
||||
E1_FLAGFAT:= 'S'
|
||||
E1_DTFATUR:= dDataBase
|
||||
MSUNLOCK()
|
||||
|
||||
fWrite(nFileLog,SE1ORIG->E1_FILIAL+" : baixa feita" +CRLF)
|
||||
endif
|
||||
|
||||
endif
|
||||
|
||||
//calcula valor total de cada filial para poder calcular a Fatura
|
||||
|
||||
if SE1ORIG->E1_FILIAL == '0102'
|
||||
n0102total += SE1ORIG->E1_VALOR
|
||||
elseif SE1ORIG->E1_FILIAL == '0105'
|
||||
n0105total += SE1ORIG->E1_VALOR
|
||||
else
|
||||
fWrite(nFileLog,"Programa nao preparado para a filial "+SE1ORIG->E1_FILIAL +CRLF)
|
||||
endif
|
||||
|
||||
SE1ORIG->(dbskip())
|
||||
|
||||
End
|
||||
|
||||
Next nI
|
||||
|
||||
cFilAnt := '0102'
|
||||
|
||||
fWrite(nFileLog,"Total 0102: "+cvaltochar(n0102total) +CRLF)
|
||||
fWrite(nFileLog,"Total 0105: "+cvaltochar(n0105total) +CRLF)
|
||||
|
||||
n0102val := round(nE1Valor * n0102total/nTotal, 2)
|
||||
n0105val := nE1Valor - n0102val
|
||||
|
||||
aFili := {}
|
||||
|
||||
if n0102total > 0
|
||||
AADD(aFili,'0102')
|
||||
endif
|
||||
|
||||
if n0105total > 0
|
||||
AADD(aFili,'0105')
|
||||
endif
|
||||
|
||||
For nI := 1 To len(aFili)
|
||||
|
||||
cQuery := "select COUNT(*) as QUANT, SUM(E1_VALOR) as TOTALINC from SE1010 where E1_NUM = '"+ cE1NUM +"' and E1_FILIAL='"+ aFili[nI] +"' and E1_PREFIXO = '"+ cE1PREFIXO +"' and D_E_L_E_T_ <> '*'"
|
||||
|
||||
If select("PARC") > 0
|
||||
PARC->(DbCloseArea())
|
||||
endif
|
||||
TcQuery cQuery New Alias 'PARC'
|
||||
dbSelectArea("PARC")
|
||||
|
||||
//verificamos se estamos na ultima parcela
|
||||
if PARC->QUANT == nParcelas -1 //QUANT = quantidade de parcelas incluida
|
||||
fWrite(nFileLog,"Ultima Parcela"+CRLF)
|
||||
//o valor desta será o valor que resta
|
||||
nValPar := SE1ORIG->E1_VALOR - PARC->TOTALINC
|
||||
|
||||
if aFili[nI] == '0102'
|
||||
n0102val := n0102total - PARC->TOTALINC
|
||||
elseif aFili[nI] == '0105'
|
||||
n0105val := n0105total - PARC->TOTALINC
|
||||
endif
|
||||
endif
|
||||
|
||||
Next nI
|
||||
|
||||
fWrite(nFileLog,"Total 0102: "+cvaltochar(n0102total) + " -> Parcela de: "+cvaltochar(n0102val) +CRLF)
|
||||
fWrite(nFileLog,"Total 0105: "+cvaltochar(n0105total) + " -> Parcela de: "+cvaltochar(n0105val) +CRLF)
|
||||
|
||||
/////////////////
|
||||
|
||||
For nI := 1 To len(aFili)
|
||||
|
||||
if aFili[nI] == '0102'
|
||||
nValPar := n0102val
|
||||
elseif aFili[nI] == '0105'
|
||||
nValPar := n0105val
|
||||
endif
|
||||
|
||||
aTitulo := ACLONE(aNovoTitulo)
|
||||
|
||||
AADD(aTitulo, {"E1_PREFIXO" ,cE1PREFIXO ,Nil})
|
||||
AADD(aTitulo, {"E1_FILIAL" ,aFili[nI] ,Nil})
|
||||
AADD(aTitulo, {"E1_VALOR" ,nValPar ,Nil})
|
||||
AADD(aTitulo, {"E1_SALDO" ,nValPar ,Nil})
|
||||
AADD(aTitulo, {"E1_VLCRUZ" ,nValPar ,Nil})
|
||||
|
||||
lMsErroAuto := .F.
|
||||
|
||||
if aFili[nI] == '0102'
|
||||
|
||||
MSExecAuto({|x,y| FINA040(x,y)},aTitulo,3) //Inclusao
|
||||
|
||||
If lMsErroAuto
|
||||
fWrite(nFileLog,"Erro " + CRLF)
|
||||
fWrite(nFileLog,"Erro ao incluir titulo: "+CRLF+ MSErroString()+ CRLF + tojson(aTitulo) + CRLF)
|
||||
return
|
||||
else
|
||||
fWrite(nFileLog,"Sucesso "+ CRLF)
|
||||
fWrite(nFileLog,"Titulo incluido: "+ aFili[nI] +" : " + cValToChar(nValPar) +CRLF)
|
||||
endif
|
||||
|
||||
elseif aFili[nI] == '0105'
|
||||
fWrite(nFileLog,"Salvando titulos 05 para o final "+aFili[nI]+CRLF)
|
||||
//StartJob("U_JOBF040",getenvserver(),.T., SE1ORIG->E1_FILIAL, aTitulo)
|
||||
AADD(aTit05, aTitulo)
|
||||
//fWrite(nFileLog,"passou pela thread "+CRLF)
|
||||
else
|
||||
fWrite(nFileLog,"Erro, filial nao tratada "+aFili[nI]+CRLF)
|
||||
endif
|
||||
|
||||
Next nI
|
||||
|
||||
|
||||
Reset Environment
|
||||
|
||||
While !RpcSetEnv('01', '0105',,,,GetEnvServer(),{})
|
||||
Sleep(400)
|
||||
End
|
||||
|
||||
For nI := 1 To len(aTit05)
|
||||
|
||||
lMsErroAuto := .F.
|
||||
|
||||
MSExecAuto({|x,y| FINA040(x,y)},aTit05[nI],3) //Inclusao
|
||||
|
||||
If lMsErroAuto
|
||||
fWrite(nFileLog,"Erro " + CRLF)
|
||||
fWrite(nFileLog,"Erro ao incluir titulo: "+CRLF+ MSErroString()+ CRLF + tojson(aTit05[nI]) + CRLF)
|
||||
return
|
||||
else
|
||||
fWrite(nFileLog,"Sucesso "+ CRLF)
|
||||
fWrite(nFileLog,"Titulo incluido: "+CRLF)
|
||||
endif
|
||||
|
||||
Next nI
|
||||
|
||||
Reset Environment
|
||||
|
||||
fClose(nFileLog)
|
||||
|
||||
Return
|
||||
|
||||
|
||||
|
||||
|
||||
////////////////////////
|
||||
// Cancelamento da Fatura (Cancelamento do Reparcelamento)
|
||||
// F280PCAN
|
||||
// JOBF280C
|
||||
//
|
||||
|
||||
User Function F280PCAN()
|
||||
|
||||
/**
|
||||
* cFatCan - numero da fatura
|
||||
* cPrefCan - prefixo
|
||||
* cTipoCan - tipo
|
||||
**/
|
||||
|
||||
If cEmpAnt == '06'
|
||||
|
||||
StartJob("U_JOBF280C",getenvserver(),.T., cPrefCan, cFatCan, cTipoCan)
|
||||
|
||||
EndIf
|
||||
|
||||
Return .T.
|
||||
|
||||
|
||||
User Function JOBF280C(cPrefCan, cFatCan, cTipoCan)
|
||||
|
||||
RpcSetType(3) // Nao consome licensa
|
||||
|
||||
While !RpcSetEnv('01', '0102',,,,GetEnvServer(),{})
|
||||
Sleep(400)
|
||||
End
|
||||
|
||||
nFileLog := u_OpenLog("\Logs\F280PCAN_"+dToS(dDataBase)+".log")
|
||||
|
||||
fWrite(nFileLog,"----- F280PCAN -----"+CRLF)
|
||||
|
||||
fWrite(nFileLog,"E1_PREFIXO = '"+ cPrefCan +"' and E1_NUM = '"+ cFatCan +"' and E1_TIPO = '"+ cTipoCan +"'"+CRLF)
|
||||
|
||||
cQuery := "select * from SE1010 where E1_PREFIXO = '"+ cPrefCan +"' and E1_NUM = '"+ cFatCan +"' and E1_TIPO = '"+ cTipoCan +"' and D_E_L_E_T_ <> '*'"
|
||||
|
||||
If select("SE1ORIG") > 0
|
||||
SE1ORIG->(DbCloseArea())
|
||||
endif
|
||||
TcQuery cQuery New Alias 'SE1ORIG'
|
||||
dbSelectArea("SE1ORIG")
|
||||
SE1ORIG->(DBGOTOP())
|
||||
|
||||
While SE1ORIG->(!EOF()) //Loop entre todas as parcelas e filiais
|
||||
|
||||
SE1->(dbselectarea("SE1"))
|
||||
SE1->(dbSetOrder(1))
|
||||
|
||||
|
||||
fWrite(nFileLog,"dbseek" + CRLF)
|
||||
if ! SE1->(dbSeek(SE1ORIG->E1_FILIAL+ SE1ORIG->E1_PREFIXO+ SE1ORIG->E1_NUM+ SE1ORIG->E1_PARCELA+ SE1ORIG->E1_TIPO))
|
||||
fWrite(nFileLog,"Erro dbseek" + CRLF)
|
||||
Alert("Erro. Verificar F280PCAN() - dbseek")
|
||||
fWrite(nFileLog,"Erro dbseek("+SE1ORIG->E1_FILIAL+ SE1ORIG->E1_PREFIXO+ SE1ORIG->E1_NUM+ SE1ORIG->E1_PARCELA+ SE1ORIG->E1_TIPO+")" + CRLF)
|
||||
return .F.
|
||||
endif
|
||||
|
||||
cFilAnt := SE1ORIG->E1_FILIAL
|
||||
|
||||
aFatura:= {{"E1_FILIAL" ,SE1ORIG->E1_FILIAL ,Nil},;
|
||||
{"E1_PREFIXO" ,SE1ORIG->E1_PREFIXO ,Nil},;
|
||||
{"E1_NUM" ,SE1ORIG->E1_NUM ,Nil},;
|
||||
{"E1_PARCELA" ,SE1ORIG->E1_PARCELA ,Nil},;
|
||||
{"E1_TIPO" ,SE1ORIG->E1_TIPO ,Nil}}
|
||||
|
||||
lMsErroAuto := .F.
|
||||
|
||||
MSExecAuto({|x,y| FINA040(x,y)},aFatura,5) //Exclus<75>o
|
||||
|
||||
If lMsErroAuto
|
||||
fWrite(nFileLog,"Erro " + CRLF)
|
||||
fWrite(nFileLog,"Erro ao remover o titulo: "+CRLF+ MSErroString()+ CRLF + tojson(aFatura) + CRLF)
|
||||
Alert("Erro ao remover o titulo. Verificar F280PCAN()")
|
||||
return .F.
|
||||
else
|
||||
fWrite(nFileLog,"Sucesso "+ CRLF)
|
||||
fWrite(nFileLog,"Titulo removido" +CRLF)
|
||||
endif
|
||||
|
||||
SE1ORIG->(dbskip())
|
||||
|
||||
end
|
||||
|
||||
|
||||
/////////////////////////////////////////////
|
||||
/////// Cancela as baixas
|
||||
///
|
||||
|
||||
fWrite(nFileLog,"- cancela baixas" + CRLF)
|
||||
|
||||
cQuery := "select * from SE1060 where E1_FATURA = '"+ cFatCan +"' and D_E_L_E_T_ <> '*'"
|
||||
|
||||
If select("SE1ORIG") > 0
|
||||
SE1ORIG->(DbCloseArea())
|
||||
endif
|
||||
TcQuery cQuery New Alias 'SE1ORIG'
|
||||
dbSelectArea("SE1ORIG")
|
||||
SE1ORIG->(DBGOTOP())
|
||||
|
||||
aFili := {"0102", "0105"}
|
||||
|
||||
While SE1ORIG->(!EOF()) //Loop entre todas as parcelas e filiais
|
||||
|
||||
SE1->(dbselectarea("SE1"))
|
||||
SE1->(dbSetOrder(1))
|
||||
|
||||
For nI := 1 To len(aFili)
|
||||
|
||||
cFilAnt := aFili[nI]
|
||||
|
||||
fWrite(nFileLog,"dbseek" + CRLF)
|
||||
if ! SE1->(dbSeek(aFili[nI]+ SE1ORIG->E1_PREFIXO+ SE1ORIG->E1_NUM+ SE1ORIG->E1_PARCELA+ SE1ORIG->E1_TIPO))
|
||||
fWrite(nFileLog,"dbseek nao encontrou titulo para filial "+aFili[nI] + CRLF)
|
||||
fWrite(nFileLog,"dbseek('"+aFili[nI]+ SE1ORIG->E1_PREFIXO+ SE1ORIG->E1_NUM+ SE1ORIG->E1_PARCELA+ SE1ORIG->E1_TIPO+"')" + CRLF)
|
||||
LOOP
|
||||
endif
|
||||
|
||||
nSE5Recno := u_RetSQLOne("select R_E_C_N_O_ from SE5010 where E5_FILIAL = '"+SE1->E1_FILIAL+"' and E5_PREFIXO = '"+SE1->E1_PREFIXO+"' and E5_TIPO = '"+SE1->E1_TIPO+"' and E5_NUMERO = '"+SE1->E1_NUM+"' "+;
|
||||
" and E5_FATURA = '"+SE1->E1_FATURA+"' and E5_FATPREF='"+SE1->E1_FATPREF+"' and D_E_L_E_T_ <> '*' ", "R_E_C_N_O_")
|
||||
|
||||
//Removemos os Flags de Fatura para conseguirmos cancelar a baixa pelo FINA070
|
||||
RECLOCK('SE1',.F.)
|
||||
E1_FATURA := ''
|
||||
E1_FATPREF:= ''
|
||||
E1_TIPOFAT:= ''
|
||||
E1_FLAGFAT:= ''
|
||||
E1_DTFATUR:= StoD('')
|
||||
MSUNLOCK()
|
||||
|
||||
SE5->(DbGoTo(nSE5Recno))
|
||||
RECLOCK('SE5',.F.)
|
||||
E5_MOTBX := 'NOR'
|
||||
//E5_FATURA := ''
|
||||
//E5_FATPREF:= ''
|
||||
MSUNLOCK()
|
||||
|
||||
aBaixa := {{"E1_FILIAL" ,SE1->E1_FILIAL ,Nil},;
|
||||
{"E1_PREFIXO" ,SE1->E1_PREFIXO ,Nil},;
|
||||
{"E1_NUM" ,SE1->E1_NUM ,Nil},;
|
||||
{"E1_TIPO" ,SE1->E1_TIPO ,Nil},;
|
||||
{"E1_PARCELA" ,SE1->E1_PARCELA ,Nil},;
|
||||
{"E1_DESCONT" ,SE1->E1_DESCONT ,Nil},;
|
||||
{"E1_JUROS" ,SE1->E1_JUROS ,Nil},;
|
||||
{"E1_MULTA" ,SE1->E1_MULTA ,Nil},;
|
||||
{"E1_VLRREAL" ,SE1->E1_VLRREAL ,Nil},;
|
||||
{"AUTMOTBX" ,"NOR" ,Nil},;
|
||||
{"AUTDTBAIXA" ,date() ,Nil},;
|
||||
{"AUTDTCREDITO",date() ,Nil},;
|
||||
{"AUTHIST" ,"" ,Nil},;
|
||||
{"AUTVALREC" ,SE1->E1_VALOR ,Nil}}
|
||||
|
||||
|
||||
lMsErroAuto:=.F. //reseta lMsErroAuto
|
||||
MSExecAuto ({|x,y| FINA070(x,y)},aBaixa, 5)
|
||||
|
||||
If lMsErroAuto
|
||||
fWrite(nFileLog,SE1->E1_FILIAL+" : Não foi efetuada o cancelamento de baixa : "+CRLF+ MSErroString()+ CRLF + tojson(aBaixa) + CRLF)
|
||||
return
|
||||
else
|
||||
fWrite(nFileLog,SE1->E1_FILIAL+" : cancelamento de baixa feito" +CRLF)
|
||||
endif
|
||||
|
||||
Next nI
|
||||
|
||||
SE1ORIG->(dbskip())
|
||||
end
|
||||
|
||||
Reset Environment
|
||||
|
||||
Return
|
||||
@@ -5,18 +5,6 @@ set -ex
|
||||
# Fetch all commits/refs needed to run our tests.
|
||||
git fetch origin master:master v2.0.0:v2.0.0 test/attributes:test/attributes test/master:test/master
|
||||
|
||||
sudo apt-get update
|
||||
|
||||
script/vendor-deb libicu48 libicu-dev
|
||||
if ruby -e 'exit RUBY_VERSION >= "2.0" && RUBY_VERSION < "2.1"'; then
|
||||
# Workaround for https://bugs.ruby-lang.org/issues/8074. We can't use this
|
||||
# solution on all versions of Ruby due to
|
||||
# https://github.com/bundler/bundler/pull/3338.
|
||||
bundle config build.charlock_holmes --with-icu-include=$(pwd)/vendor/debs/include --with-icu-lib=$(pwd)/vendor/debs/lib
|
||||
else
|
||||
bundle config build.charlock_holmes --with-icu-dir=$(pwd)/vendor/debs
|
||||
fi
|
||||
|
||||
# Replace SSH links to submodules by HTTPS links.
|
||||
sed -i 's/git@github.com:/https:\/\/github.com\//' .gitmodules
|
||||
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -ex
|
||||
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
mkdir -p vendor/apt vendor/debs
|
||||
|
||||
(cd vendor/apt && apt-get --assume-yes download "$@")
|
||||
|
||||
for deb in vendor/apt/*.deb; do
|
||||
ar p $deb data.tar.gz | tar -vzxC vendor/debs --strip-components=2
|
||||
done
|
||||
3
test/fixtures/Data/Modelines/ruby2
vendored
Normal file
3
test/fixtures/Data/Modelines/ruby2
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
/* vim: set ts=8 sw=4 filetype=ruby tw=0: */
|
||||
|
||||
# Please help how do I into setting vim modlines
|
||||
3
test/fixtures/Data/Modelines/ruby3
vendored
Normal file
3
test/fixtures/Data/Modelines/ruby3
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
/* vim: set ft=ruby ts=8 sw=4 tw=0: */
|
||||
|
||||
# I am not good at humor
|
||||
@@ -3,14 +3,22 @@ require "minitest/autorun"
|
||||
require "mocha/setup"
|
||||
require "linguist"
|
||||
require 'color-proximity'
|
||||
require "linguist/blob"
|
||||
require 'licensee'
|
||||
|
||||
def fixtures_path
|
||||
File.expand_path("../fixtures", __FILE__)
|
||||
end
|
||||
|
||||
def fixture_blob(name)
|
||||
name = File.join(fixtures_path, name) unless name =~ /^\//
|
||||
Linguist::FileBlob.new(name, fixtures_path)
|
||||
filepath = (name =~ /^\//)? name : File.join(fixtures_path, name)
|
||||
Linguist::FileBlob.new(filepath, fixtures_path)
|
||||
end
|
||||
|
||||
def fixture_blob_memory(name)
|
||||
filepath = (name =~ /^\//)? name : File.join(fixtures_path, name)
|
||||
content = File.read(filepath)
|
||||
Linguist::Blob.new(name, content)
|
||||
end
|
||||
|
||||
def samples_path
|
||||
@@ -18,6 +26,12 @@ def samples_path
|
||||
end
|
||||
|
||||
def sample_blob(name)
|
||||
name = File.join(samples_path, name) unless name =~ /^\//
|
||||
Linguist::FileBlob.new(name, samples_path)
|
||||
filepath = (name =~ /^\//)? name : File.join(samples_path, name)
|
||||
Linguist::FileBlob.new(filepath, samples_path)
|
||||
end
|
||||
|
||||
def sample_blob_memory(name)
|
||||
filepath = (name =~ /^\//)? name : File.join(samples_path, name)
|
||||
content = File.read(filepath)
|
||||
Linguist::Blob.new(name, content)
|
||||
end
|
||||
|
||||
@@ -15,50 +15,47 @@ class TestBlob < Minitest::Test
|
||||
end
|
||||
|
||||
def script_blob(name)
|
||||
blob = sample_blob(name)
|
||||
blob = sample_blob_memory(name)
|
||||
blob.instance_variable_set(:@name, 'script')
|
||||
blob
|
||||
end
|
||||
|
||||
def test_name
|
||||
assert_equal "foo.rb", sample_blob("foo.rb").name
|
||||
assert_equal "foo.rb", sample_blob_memory("Ruby/foo.rb").name
|
||||
end
|
||||
|
||||
def test_mime_type
|
||||
assert_equal "application/postscript", fixture_blob("Binary/octocat.ai").mime_type
|
||||
assert_equal "application/x-ruby", sample_blob("Ruby/grit.rb").mime_type
|
||||
assert_equal "application/x-sh", sample_blob("Shell/script.sh").mime_type
|
||||
assert_equal "application/xml", sample_blob("XML/bar.xml").mime_type
|
||||
assert_equal "audio/ogg", fixture_blob("Binary/foo.ogg").mime_type
|
||||
assert_equal "text/plain", fixture_blob("Data/README").mime_type
|
||||
assert_equal "application/postscript", fixture_blob_memory("Binary/octocat.ai").mime_type
|
||||
assert_equal "application/x-ruby", sample_blob_memory("Ruby/grit.rb").mime_type
|
||||
assert_equal "application/x-sh", sample_blob_memory("Shell/script.sh").mime_type
|
||||
assert_equal "text/plain", fixture_blob_memory("Data/README").mime_type
|
||||
end
|
||||
|
||||
def test_content_type
|
||||
assert_equal "application/pdf", fixture_blob("Binary/foo.pdf").content_type
|
||||
assert_equal "audio/ogg", fixture_blob("Binary/foo.ogg").content_type
|
||||
assert_equal "image/png", fixture_blob("Binary/foo.png").content_type
|
||||
assert_equal "text/plain; charset=iso-8859-2", fixture_blob("Data/README").content_type
|
||||
assert_equal "application/pdf", fixture_blob_memory("Binary/foo.pdf").content_type
|
||||
assert_equal "image/png", fixture_blob_memory("Binary/foo.png").content_type
|
||||
assert_equal "text/plain; charset=iso-8859-2", fixture_blob_memory("Data/README").content_type
|
||||
end
|
||||
|
||||
def test_disposition
|
||||
assert_equal "attachment; filename=foo+bar.jar", fixture_blob("Binary/foo bar.jar").disposition
|
||||
assert_equal "attachment; filename=foo.bin", fixture_blob("Binary/foo.bin").disposition
|
||||
assert_equal "attachment; filename=linguist.gem", fixture_blob("Binary/linguist.gem").disposition
|
||||
assert_equal "attachment; filename=octocat.ai", fixture_blob("Binary/octocat.ai").disposition
|
||||
assert_equal "inline", fixture_blob("Data/README").disposition
|
||||
assert_equal "inline", sample_blob("Text/foo.txt").disposition
|
||||
assert_equal "inline", sample_blob("Ruby/grit.rb").disposition
|
||||
assert_equal "inline", fixture_blob("Binary/octocat.png").disposition
|
||||
assert_equal "attachment; filename=foo+bar.jar", fixture_blob_memory("Binary/foo bar.jar").disposition
|
||||
assert_equal "attachment; filename=foo.bin", fixture_blob_memory("Binary/foo.bin").disposition
|
||||
assert_equal "attachment; filename=linguist.gem", fixture_blob_memory("Binary/linguist.gem").disposition
|
||||
assert_equal "attachment; filename=octocat.ai", fixture_blob_memory("Binary/octocat.ai").disposition
|
||||
assert_equal "inline", fixture_blob_memory("Data/README").disposition
|
||||
assert_equal "inline", sample_blob_memory("Text/foo.txt").disposition
|
||||
assert_equal "inline", sample_blob_memory("Ruby/grit.rb").disposition
|
||||
assert_equal "inline", fixture_blob_memory("Binary/octocat.png").disposition
|
||||
end
|
||||
|
||||
def test_data
|
||||
assert_equal "module Foo\nend\n", sample_blob("Ruby/foo.rb").data
|
||||
assert_equal "module Foo\nend\n", sample_blob_memory("Ruby/foo.rb").data
|
||||
end
|
||||
|
||||
def test_lines
|
||||
assert_equal ["module Foo", "end", ""], sample_blob("Ruby/foo.rb").lines
|
||||
assert_equal ["line 1", "line 2", ""], sample_blob("Text/mac.txt").lines
|
||||
assert_equal 475, sample_blob("Emacs Lisp/ess-julia.el").lines.length
|
||||
assert_equal ["module Foo", "end", ""], sample_blob_memory("Ruby/foo.rb").lines
|
||||
assert_equal ["line 1", "line 2", ""], sample_blob_memory("Text/mac.txt").lines
|
||||
assert_equal 475, sample_blob_memory("Emacs Lisp/ess-julia.el").lines.length
|
||||
end
|
||||
|
||||
def test_lines_maintains_original_encoding
|
||||
@@ -66,494 +63,173 @@ class TestBlob < Minitest::Test
|
||||
# earlier versions of the gem made implicit guarantees that the encoding of
|
||||
# each `line` is in the same encoding as the file was originally read (in
|
||||
# practice, UTF-8 or ASCII-8BIT)
|
||||
assert_equal Encoding.default_external, fixture_blob("Data/utf16le").lines.first.encoding
|
||||
assert_equal Encoding.default_external, fixture_blob_memory("Data/utf16le").lines.first.encoding
|
||||
end
|
||||
|
||||
def test_size
|
||||
assert_equal 15, sample_blob("Ruby/foo.rb").size
|
||||
assert_equal 15, sample_blob_memory("Ruby/foo.rb").size
|
||||
end
|
||||
|
||||
def test_loc
|
||||
assert_equal 3, sample_blob("Ruby/foo.rb").loc
|
||||
assert_equal 3, sample_blob_memory("Ruby/foo.rb").loc
|
||||
end
|
||||
|
||||
def test_sloc
|
||||
assert_equal 2, sample_blob("Ruby/foo.rb").sloc
|
||||
assert_equal 3, fixture_blob("Data/utf16le-windows").sloc
|
||||
assert_equal 1, fixture_blob("Data/iso8859-8-i").sloc
|
||||
assert_equal 2, sample_blob_memory("Ruby/foo.rb").sloc
|
||||
assert_equal 3, fixture_blob_memory("Data/utf16le-windows").sloc
|
||||
assert_equal 1, fixture_blob_memory("Data/iso8859-8-i").sloc
|
||||
end
|
||||
|
||||
def test_encoding
|
||||
assert_equal "ISO-8859-2", fixture_blob("Data/README").encoding
|
||||
assert_equal "ISO-8859-2", fixture_blob("Data/README").ruby_encoding
|
||||
assert_equal "UTF-8", sample_blob("Text/foo.txt").encoding
|
||||
assert_equal "UTF-8", sample_blob("Text/foo.txt").ruby_encoding
|
||||
assert_equal "UTF-16LE", fixture_blob("Data/utf16le").encoding
|
||||
assert_equal "UTF-16LE", fixture_blob("Data/utf16le").ruby_encoding
|
||||
assert_equal "UTF-16LE", fixture_blob("Data/utf16le-windows").encoding
|
||||
assert_equal "UTF-16LE", fixture_blob("Data/utf16le-windows").ruby_encoding
|
||||
assert_equal "ISO-2022-KR", sample_blob("Text/ISO-2022-KR.txt").encoding
|
||||
assert_equal "binary", sample_blob("Text/ISO-2022-KR.txt").ruby_encoding
|
||||
assert_nil fixture_blob("Binary/dog.o").encoding
|
||||
assert_equal "ISO-8859-2", fixture_blob_memory("Data/README").encoding
|
||||
assert_equal "ISO-8859-2", fixture_blob_memory("Data/README").ruby_encoding
|
||||
assert_equal "UTF-8", sample_blob_memory("Text/foo.txt").encoding
|
||||
assert_equal "UTF-8", sample_blob_memory("Text/foo.txt").ruby_encoding
|
||||
assert_equal "UTF-16LE", fixture_blob_memory("Data/utf16le").encoding
|
||||
assert_equal "UTF-16LE", fixture_blob_memory("Data/utf16le").ruby_encoding
|
||||
assert_equal "UTF-16LE", fixture_blob_memory("Data/utf16le-windows").encoding
|
||||
assert_equal "UTF-16LE", fixture_blob_memory("Data/utf16le-windows").ruby_encoding
|
||||
assert_equal "ISO-2022-KR", sample_blob_memory("Text/ISO-2022-KR.txt").encoding
|
||||
assert_equal "binary", sample_blob_memory("Text/ISO-2022-KR.txt").ruby_encoding
|
||||
assert_nil fixture_blob_memory("Binary/dog.o").encoding
|
||||
end
|
||||
|
||||
def test_binary
|
||||
# Large blobs aren't loaded
|
||||
large_blob = sample_blob("git.exe")
|
||||
large_blob.instance_eval do
|
||||
def data; end
|
||||
end
|
||||
assert large_blob.binary?
|
||||
|
||||
assert fixture_blob("Binary/git.deb").binary?
|
||||
assert fixture_blob("Binary/git.exe").binary?
|
||||
assert fixture_blob("Binary/hello.pbc").binary?
|
||||
assert fixture_blob("Binary/linguist.gem").binary?
|
||||
assert fixture_blob("Binary/octocat.ai").binary?
|
||||
assert fixture_blob("Binary/octocat.png").binary?
|
||||
assert fixture_blob("Binary/zip").binary?
|
||||
assert !fixture_blob("Data/README").binary?
|
||||
assert !sample_blob("Ruby/foo.rb").binary?
|
||||
assert !sample_blob("Perl/script.pl").binary?
|
||||
assert fixture_blob_memory("Binary/git.deb").binary?
|
||||
assert fixture_blob_memory("Binary/hello.pbc").binary?
|
||||
assert fixture_blob_memory("Binary/linguist.gem").binary?
|
||||
assert fixture_blob_memory("Binary/octocat.ai").binary?
|
||||
assert fixture_blob_memory("Binary/octocat.png").binary?
|
||||
assert fixture_blob_memory("Binary/zip").binary?
|
||||
assert !fixture_blob_memory("Data/README").binary?
|
||||
assert !sample_blob_memory("Ruby/foo.rb").binary?
|
||||
assert !sample_blob_memory("Perl/script.pl").binary?
|
||||
end
|
||||
|
||||
def test_all_binary
|
||||
Samples.each do |sample|
|
||||
blob = sample_blob(sample[:path])
|
||||
blob = sample_blob_memory(sample[:path])
|
||||
assert ! (blob.likely_binary? || blob.binary?), "#{sample[:path]} is a binary file"
|
||||
end
|
||||
end
|
||||
|
||||
def test_text
|
||||
assert fixture_blob("Data/README").text?
|
||||
assert fixture_blob("Data/md").text?
|
||||
assert sample_blob("Shell/script.sh").text?
|
||||
assert fixture_blob("Data/txt").text?
|
||||
assert fixture_blob_memory("Data/README").text?
|
||||
assert fixture_blob_memory("Data/md").text?
|
||||
assert sample_blob_memory("Shell/script.sh").text?
|
||||
assert fixture_blob_memory("Data/txt").text?
|
||||
end
|
||||
|
||||
def test_image
|
||||
assert fixture_blob("Binary/octocat.gif").image?
|
||||
assert fixture_blob("Binary/octocat.jpeg").image?
|
||||
assert fixture_blob("Binary/octocat.jpg").image?
|
||||
assert fixture_blob("Binary/octocat.png").image?
|
||||
assert !fixture_blob("Binary/octocat.ai").image?
|
||||
assert !fixture_blob("Binary/octocat.psd").image?
|
||||
assert fixture_blob_memory("Binary/octocat.png").image?
|
||||
assert !fixture_blob_memory("Binary/octocat.ai").image?
|
||||
assert !fixture_blob_memory("Binary/octocat.psd").image?
|
||||
end
|
||||
|
||||
def test_solid
|
||||
assert fixture_blob("Binary/cube.stl").solid?
|
||||
assert fixture_blob("Data/cube.stl").solid?
|
||||
assert fixture_blob_memory("Binary/cube.stl").solid?
|
||||
assert fixture_blob_memory("Data/cube.stl").solid?
|
||||
end
|
||||
|
||||
def test_csv
|
||||
assert fixture_blob("Data/cars.csv").csv?
|
||||
assert fixture_blob_memory("Data/cars.csv").csv?
|
||||
end
|
||||
|
||||
def test_pdf
|
||||
assert fixture_blob("Binary/foo.pdf").pdf?
|
||||
assert fixture_blob_memory("Binary/foo.pdf").pdf?
|
||||
end
|
||||
|
||||
def test_viewable
|
||||
assert fixture_blob("Data/README").viewable?
|
||||
assert sample_blob("Ruby/foo.rb").viewable?
|
||||
assert sample_blob("Perl/script.pl").viewable?
|
||||
assert !fixture_blob("Binary/linguist.gem").viewable?
|
||||
assert !fixture_blob("Binary/octocat.ai").viewable?
|
||||
assert !fixture_blob("Binary/octocat.png").viewable?
|
||||
assert fixture_blob_memory("Data/README").viewable?
|
||||
assert sample_blob_memory("Ruby/foo.rb").viewable?
|
||||
assert sample_blob_memory("Perl/script.pl").viewable?
|
||||
assert !fixture_blob_memory("Binary/linguist.gem").viewable?
|
||||
assert !fixture_blob_memory("Binary/octocat.ai").viewable?
|
||||
assert !fixture_blob_memory("Binary/octocat.png").viewable?
|
||||
end
|
||||
|
||||
def test_generated
|
||||
assert !fixture_blob("Data/README").generated?
|
||||
|
||||
# Xcode project files
|
||||
assert !sample_blob("XML/MainMenu.xib").generated?
|
||||
assert fixture_blob("Binary/MainMenu.nib").generated?
|
||||
assert !sample_blob("XML/project.pbxproj").generated?
|
||||
|
||||
# Gemfile.lock is NOT generated
|
||||
assert !sample_blob("Gemfile.lock").generated?
|
||||
assert !fixture_blob_memory("Data/README").generated?
|
||||
|
||||
# Generated .NET Docfiles
|
||||
assert sample_blob("XML/net_docfile.xml").generated?
|
||||
assert sample_blob_memory("XML/net_docfile.xml").generated?
|
||||
|
||||
# Long line
|
||||
assert !sample_blob("JavaScript/uglify.js").generated?
|
||||
assert !sample_blob_memory("JavaScript/uglify.js").generated?
|
||||
|
||||
# Inlined JS, but mostly code
|
||||
assert !sample_blob("JavaScript/json2_backbone.js").generated?
|
||||
assert !sample_blob_memory("JavaScript/json2_backbone.js").generated?
|
||||
|
||||
# Minified JS
|
||||
assert !sample_blob("JavaScript/jquery-1.6.1.js").generated?
|
||||
assert sample_blob("JavaScript/jquery-1.6.1.min.js").generated?
|
||||
assert sample_blob("JavaScript/jquery-1.4.2.min.js").generated?
|
||||
|
||||
# CoffeeScript-generated JS
|
||||
# TODO
|
||||
|
||||
# TypeScript-generated JS
|
||||
# TODO
|
||||
assert !sample_blob_memory("JavaScript/jquery-1.6.1.js").generated?
|
||||
assert sample_blob_memory("JavaScript/jquery-1.6.1.min.js").generated?
|
||||
assert sample_blob_memory("JavaScript/jquery-1.4.2.min.js").generated?
|
||||
|
||||
# Composer generated composer.lock file
|
||||
assert sample_blob("JSON/composer.lock").generated?
|
||||
assert sample_blob_memory("JSON/composer.lock").generated?
|
||||
|
||||
# PEG.js-generated parsers
|
||||
assert sample_blob("JavaScript/parser.js").generated?
|
||||
assert sample_blob_memory("JavaScript/parser.js").generated?
|
||||
|
||||
# Generated PostScript
|
||||
assert !sample_blob("PostScript/sierpinski.ps").generated?
|
||||
assert !sample_blob_memory("PostScript/sierpinski.ps").generated?
|
||||
|
||||
# These examples are too basic to tell
|
||||
assert !sample_blob("JavaScript/hello.js").generated?
|
||||
assert !sample_blob_memory("JavaScript/hello.js").generated?
|
||||
|
||||
assert sample_blob("JavaScript/intro-old.js").generated?
|
||||
assert sample_blob("JavaScript/classes-old.js").generated?
|
||||
assert sample_blob_memory("JavaScript/intro-old.js").generated?
|
||||
assert sample_blob_memory("JavaScript/classes-old.js").generated?
|
||||
|
||||
assert sample_blob("JavaScript/intro.js").generated?
|
||||
assert sample_blob("JavaScript/classes.js").generated?
|
||||
assert sample_blob_memory("JavaScript/intro.js").generated?
|
||||
assert sample_blob_memory("JavaScript/classes.js").generated?
|
||||
|
||||
# Protocol Buffer generated code
|
||||
assert sample_blob("C++/protocol-buffer.pb.h").generated?
|
||||
assert sample_blob("C++/protocol-buffer.pb.cc").generated?
|
||||
assert sample_blob("Java/ProtocolBuffer.java").generated?
|
||||
assert sample_blob("Python/protocol_buffer_pb2.py").generated?
|
||||
assert sample_blob("Go/api.pb.go").generated?
|
||||
assert sample_blob("Go/embedded.go").generated?
|
||||
assert sample_blob_memory("C++/protocol-buffer.pb.h").generated?
|
||||
assert sample_blob_memory("C++/protocol-buffer.pb.cc").generated?
|
||||
assert sample_blob_memory("Java/ProtocolBuffer.java").generated?
|
||||
assert sample_blob_memory("Python/protocol_buffer_pb2.py").generated?
|
||||
assert sample_blob_memory("Go/api.pb.go").generated?
|
||||
assert sample_blob_memory("Go/embedded.go").generated?
|
||||
|
||||
# Apache Thrift generated code
|
||||
assert sample_blob("Python/gen-py-linguist-thrift.py").generated?
|
||||
assert sample_blob("Go/gen-go-linguist-thrift.go").generated?
|
||||
assert sample_blob("Java/gen-java-linguist-thrift.java").generated?
|
||||
assert sample_blob("JavaScript/gen-js-linguist-thrift.js").generated?
|
||||
assert sample_blob("Ruby/gen-rb-linguist-thrift.rb").generated?
|
||||
assert sample_blob("Objective-C/gen-cocoa-linguist-thrift.m").generated?
|
||||
assert sample_blob_memory("Python/gen-py-linguist-thrift.py").generated?
|
||||
assert sample_blob_memory("Go/gen-go-linguist-thrift.go").generated?
|
||||
assert sample_blob_memory("Java/gen-java-linguist-thrift.java").generated?
|
||||
assert sample_blob_memory("JavaScript/gen-js-linguist-thrift.js").generated?
|
||||
assert sample_blob_memory("Ruby/gen-rb-linguist-thrift.rb").generated?
|
||||
assert sample_blob_memory("Objective-C/gen-cocoa-linguist-thrift.m").generated?
|
||||
|
||||
# Generated JNI
|
||||
assert sample_blob("C/jni_layer.h").generated?
|
||||
assert sample_blob_memory("C/jni_layer.h").generated?
|
||||
|
||||
# Minified CSS
|
||||
assert !sample_blob("CSS/bootstrap.css").generated?
|
||||
assert sample_blob("CSS/bootstrap.min.css").generated?
|
||||
assert !sample_blob_memory("CSS/bootstrap.css").generated?
|
||||
assert sample_blob_memory("CSS/bootstrap.min.css").generated?
|
||||
|
||||
# Generated VCR
|
||||
assert sample_blob("YAML/vcr_cassette.yml").generated?
|
||||
assert sample_blob_memory("YAML/vcr_cassette.yml").generated?
|
||||
|
||||
# Generated by Zephir
|
||||
assert sample_blob("Zephir/filenames/exception.zep.c").generated?
|
||||
assert sample_blob("Zephir/filenames/exception.zep.h").generated?
|
||||
assert sample_blob("Zephir/filenames/exception.zep.php").generated?
|
||||
assert !sample_blob("Zephir/Router.zep").generated?
|
||||
|
||||
assert sample_blob("node_modules/grunt/lib/grunt.js").generated?
|
||||
|
||||
# Godep saved dependencies
|
||||
assert sample_blob("Godeps/Godeps.json").generated?
|
||||
assert sample_blob("Godeps/_workspace/src/github.com/kr/s3/sign.go").generated?
|
||||
assert !sample_blob_memory("Zephir/Router.zep").generated?
|
||||
|
||||
# Cython-generated C/C++
|
||||
assert sample_blob("C/sgd_fast.c").generated?
|
||||
assert sample_blob("C++/wrapper_inner.cpp").generated?
|
||||
assert sample_blob_memory("C/sgd_fast.c").generated?
|
||||
assert sample_blob_memory("C++/wrapper_inner.cpp").generated?
|
||||
|
||||
# Unity3D-generated metadata
|
||||
assert sample_blob("Unity3D Asset/Tiles.meta").generated?
|
||||
assert sample_blob_memory("Unity3D Asset/Tiles.meta").generated?
|
||||
|
||||
# Racc-generated Ruby
|
||||
assert sample_blob_memory("Ruby/racc.rb").generated?
|
||||
end
|
||||
|
||||
def test_vendored
|
||||
assert !fixture_blob("Data/README").vendored?
|
||||
assert !sample_blob("ext/extconf.rb").vendored?
|
||||
|
||||
# Dependencies
|
||||
assert sample_blob("dependencies/windows/headers/GL/glext.h").vendored?
|
||||
|
||||
# Node dependencies
|
||||
assert sample_blob("node_modules/coffee-script/lib/coffee-script.js").vendored?
|
||||
|
||||
# Bower Components
|
||||
assert sample_blob("bower_components/custom/custom.js").vendored?
|
||||
assert sample_blob("app/bower_components/custom/custom.js").vendored?
|
||||
assert sample_blob("vendor/assets/bower_components/custom/custom.js").vendored?
|
||||
|
||||
# Go dependencies
|
||||
assert !sample_blob("Godeps/Godeps.json").vendored?
|
||||
assert sample_blob("Godeps/_workspace/src/github.com/kr/s3/sign.go").vendored?
|
||||
|
||||
# Rails vendor/
|
||||
assert sample_blob("vendor/plugins/will_paginate/lib/will_paginate.rb").vendored?
|
||||
|
||||
# Vendor/
|
||||
assert sample_blob("Vendor/my_great_file.h").vendored?
|
||||
|
||||
# 'thirdparty' directory
|
||||
assert sample_blob("thirdparty/lib/main.c").vendored?
|
||||
|
||||
# 'extern(al)' directory
|
||||
assert sample_blob("extern/util/__init__.py").vendored?
|
||||
assert sample_blob("external/jquery.min.js").vendored?
|
||||
|
||||
# C deps
|
||||
assert sample_blob("deps/http_parser/http_parser.c").vendored?
|
||||
assert sample_blob("deps/v8/src/v8.h").vendored?
|
||||
|
||||
assert sample_blob("tools/something/else.c").vendored?
|
||||
|
||||
# Chart.js
|
||||
assert sample_blob("some/vendored/path/Chart.js").vendored?
|
||||
assert !sample_blob("some/vendored/path/chart.js").vendored?
|
||||
|
||||
# Codemirror deps
|
||||
assert sample_blob("codemirror/mode/blah.js").vendored?
|
||||
|
||||
# Debian packaging
|
||||
assert sample_blob("debian/cron.d").vendored?
|
||||
|
||||
# Erlang
|
||||
assert sample_blob("rebar").vendored?
|
||||
|
||||
# Minified JavaScript and CSS
|
||||
assert sample_blob("foo.min.js").vendored?
|
||||
assert sample_blob("foo.min.css").vendored?
|
||||
assert sample_blob("foo-min.js").vendored?
|
||||
assert sample_blob("foo-min.css").vendored?
|
||||
assert !sample_blob("foomin.css").vendored?
|
||||
assert !sample_blob("foo.min.txt").vendored?
|
||||
|
||||
#.osx
|
||||
assert sample_blob(".osx").vendored?
|
||||
|
||||
# Prototype
|
||||
assert !sample_blob("public/javascripts/application.js").vendored?
|
||||
assert sample_blob("public/javascripts/prototype.js").vendored?
|
||||
assert sample_blob("public/javascripts/effects.js").vendored?
|
||||
assert sample_blob("public/javascripts/controls.js").vendored?
|
||||
assert sample_blob("public/javascripts/dragdrop.js").vendored?
|
||||
|
||||
# Samples
|
||||
assert sample_blob("Samples/Ruby/foo.rb").vendored?
|
||||
|
||||
# jQuery
|
||||
assert sample_blob("jquery.js").vendored?
|
||||
assert sample_blob("public/javascripts/jquery.js").vendored?
|
||||
assert sample_blob("public/javascripts/jquery.min.js").vendored?
|
||||
assert sample_blob("public/javascripts/jquery-1.7.js").vendored?
|
||||
assert sample_blob("public/javascripts/jquery-1.7.min.js").vendored?
|
||||
assert sample_blob("public/javascripts/jquery-1.5.2.js").vendored?
|
||||
assert sample_blob("public/javascripts/jquery-1.6.1.js").vendored?
|
||||
assert sample_blob("public/javascripts/jquery-1.6.1.min.js").vendored?
|
||||
assert sample_blob("public/javascripts/jquery-1.10.1.js").vendored?
|
||||
assert sample_blob("public/javascripts/jquery-1.10.1.min.js").vendored?
|
||||
assert !sample_blob("public/javascripts/jquery.github.menu.js").vendored?
|
||||
|
||||
# jQuery UI
|
||||
assert sample_blob("themes/ui-lightness/jquery-ui.css").vendored?
|
||||
assert sample_blob("themes/ui-lightness/jquery-ui-1.8.22.custom.css").vendored?
|
||||
assert sample_blob("themes/ui-lightness/jquery.ui.accordion.css").vendored?
|
||||
assert sample_blob("ui/i18n/jquery.ui.datepicker-ar.js").vendored?
|
||||
assert sample_blob("ui/i18n/jquery-ui-i18n.js").vendored?
|
||||
assert sample_blob("ui/jquery.effects.blind.js").vendored?
|
||||
assert sample_blob("ui/jquery-ui-1.8.22.custom.js").vendored?
|
||||
assert sample_blob("ui/jquery-ui-1.8.22.custom.min.js").vendored?
|
||||
assert sample_blob("ui/jquery-ui-1.8.22.js").vendored?
|
||||
assert sample_blob("ui/jquery-ui-1.8.js").vendored?
|
||||
assert sample_blob("ui/jquery-ui.min.js").vendored?
|
||||
assert sample_blob("ui/jquery.ui.accordion.js").vendored?
|
||||
assert sample_blob("ui/minified/jquery.effects.blind.min.js").vendored?
|
||||
assert sample_blob("ui/minified/jquery.ui.accordion.min.js").vendored?
|
||||
|
||||
# MooTools
|
||||
assert sample_blob("public/javascripts/mootools-core-1.3.2-full-compat.js").vendored?
|
||||
assert sample_blob("public/javascripts/mootools-core-1.3.2-full-compat-yc.js").vendored?
|
||||
|
||||
# Dojo
|
||||
assert sample_blob("public/javascripts/dojo.js").vendored?
|
||||
|
||||
# MochiKit
|
||||
assert sample_blob("public/javascripts/MochiKit.js").vendored?
|
||||
|
||||
# YUI
|
||||
assert sample_blob("public/javascripts/yahoo-dom-event.js").vendored?
|
||||
assert sample_blob("public/javascripts/yahoo-min.js").vendored?
|
||||
assert sample_blob("public/javascripts/yuiloader-dom-event.js").vendored?
|
||||
|
||||
# WYS editors
|
||||
assert sample_blob("public/javascripts/ckeditor.js").vendored?
|
||||
assert sample_blob("public/javascripts/tiny_mce.js").vendored?
|
||||
assert sample_blob("public/javascripts/tiny_mce_popup.js").vendored?
|
||||
assert sample_blob("public/javascripts/tiny_mce_src.js").vendored?
|
||||
|
||||
# AngularJS
|
||||
assert sample_blob("public/javascripts/angular.js").vendored?
|
||||
assert sample_blob("public/javascripts/angular.min.js").vendored?
|
||||
|
||||
# D3.js
|
||||
assert sample_blob("public/javascripts/d3.v3.js").vendored?
|
||||
assert sample_blob("public/javascripts/d3.v3.min.js").vendored?
|
||||
|
||||
# Modernizr
|
||||
assert sample_blob("public/javascripts/modernizr-2.7.1.js").vendored?
|
||||
assert sample_blob("public/javascripts/modernizr.custom.01009.js").vendored?
|
||||
|
||||
# Fabric
|
||||
assert sample_blob("fabfile.py").vendored?
|
||||
|
||||
# WAF
|
||||
assert sample_blob("waf").vendored?
|
||||
|
||||
# Visual Studio IntelliSense
|
||||
assert sample_blob("Scripts/jquery-1.7-vsdoc.js").vendored?
|
||||
|
||||
# Microsoft Ajax
|
||||
assert sample_blob("Scripts/MicrosoftAjax.debug.js").vendored?
|
||||
assert sample_blob("Scripts/MicrosoftAjax.js").vendored?
|
||||
assert sample_blob("Scripts/MicrosoftMvcAjax.debug.js").vendored?
|
||||
assert sample_blob("Scripts/MicrosoftMvcAjax.js").vendored?
|
||||
assert sample_blob("Scripts/MicrosoftMvcValidation.debug.js").vendored?
|
||||
assert sample_blob("Scripts/MicrosoftMvcValidation.js").vendored?
|
||||
|
||||
# jQuery validation plugin (MS bundles this with asp.net mvc)
|
||||
assert sample_blob("Scripts/jquery.validate.js").vendored?
|
||||
assert sample_blob("Scripts/jquery.validate.min.js").vendored?
|
||||
assert sample_blob("Scripts/jquery.validate.unobtrusive.js").vendored?
|
||||
assert sample_blob("Scripts/jquery.validate.unobtrusive.min.js").vendored?
|
||||
assert sample_blob("Scripts/jquery.unobtrusive-ajax.js").vendored?
|
||||
assert sample_blob("Scripts/jquery.unobtrusive-ajax.min.js").vendored?
|
||||
|
||||
# NuGet Packages
|
||||
assert sample_blob("packages/Modernizr.2.0.6/Content/Scripts/modernizr-2.0.6-development-only.js").vendored?
|
||||
|
||||
# Font Awesome
|
||||
assert sample_blob("some/asset/path/font-awesome.min.css").vendored?
|
||||
assert sample_blob("some/asset/path/font-awesome.css").vendored?
|
||||
|
||||
# Normalize
|
||||
assert sample_blob("some/asset/path/normalize.css").vendored?
|
||||
|
||||
# Carthage
|
||||
assert sample_blob('Carthage/blah').vendored?
|
||||
|
||||
# Cocoapods
|
||||
assert sample_blob('Pods/blah').vendored?
|
||||
|
||||
# Html5shiv
|
||||
assert sample_blob("Scripts/html5shiv.js").vendored?
|
||||
assert sample_blob("Scripts/html5shiv.min.js").vendored?
|
||||
|
||||
# Test fixtures
|
||||
assert sample_blob("test/fixtures/random.rkt").vendored?
|
||||
assert sample_blob("Test/fixtures/random.rkt").vendored?
|
||||
assert sample_blob("tests/fixtures/random.rkt").vendored?
|
||||
|
||||
# Cordova/PhoneGap
|
||||
assert sample_blob("cordova.js").vendored?
|
||||
assert sample_blob("cordova.min.js").vendored?
|
||||
assert sample_blob("cordova-2.1.0.js").vendored?
|
||||
assert sample_blob("cordova-2.1.0.min.js").vendored?
|
||||
|
||||
# Foundation js
|
||||
assert sample_blob("foundation.js").vendored?
|
||||
assert sample_blob("foundation.min.js").vendored?
|
||||
assert sample_blob("foundation.abide.js").vendored?
|
||||
|
||||
# Vagrant
|
||||
assert sample_blob("Vagrantfile").vendored?
|
||||
|
||||
# Gradle
|
||||
assert sample_blob("gradlew").vendored?
|
||||
assert sample_blob("gradlew.bat").vendored?
|
||||
assert sample_blob("gradle/wrapper/gradle-wrapper.properties").vendored?
|
||||
assert sample_blob("subproject/gradlew").vendored?
|
||||
assert sample_blob("subproject/gradlew.bat").vendored?
|
||||
assert sample_blob("subproject/gradle/wrapper/gradle-wrapper.properties").vendored?
|
||||
|
||||
# Octicons
|
||||
assert sample_blob("octicons.css").vendored?
|
||||
assert sample_blob("public/octicons.min.css").vendored?
|
||||
assert sample_blob("public/octicons/sprockets-octicons.scss").vendored?
|
||||
|
||||
# Typesafe Activator
|
||||
assert sample_blob("activator").vendored?
|
||||
assert sample_blob("activator.bat").vendored?
|
||||
assert sample_blob("subproject/activator").vendored?
|
||||
assert sample_blob("subproject/activator.bat").vendored?
|
||||
|
||||
assert_predicate fixture_blob(".google_apis/bar.jar"), :vendored?
|
||||
assert_predicate fixture_blob("foo/.google_apis/bar.jar"), :vendored?
|
||||
|
||||
# Sphinx docs
|
||||
assert sample_blob("docs/_build/asset.doc").vendored?
|
||||
assert sample_blob("docs/theme/file.css").vendored?
|
||||
|
||||
# Vagrant
|
||||
assert sample_blob("puphpet/file.pp").vendored?
|
||||
|
||||
# Fabric.io
|
||||
assert sample_blob("Fabric.framework/Fabric.h").vendored?
|
||||
|
||||
# Crashlytics
|
||||
assert sample_blob("Crashlytics.framework/Crashlytics.h").vendored?
|
||||
end
|
||||
|
||||
def test_documentation
|
||||
assert_predicate fixture_blob("doc/foo.html"), :documentation?
|
||||
assert_predicate fixture_blob("docs/foo.html"), :documentation?
|
||||
refute_predicate fixture_blob("project/doc/foo.html"), :documentation?
|
||||
refute_predicate fixture_blob("project/docs/foo.html"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("Documentation/foo.md"), :documentation?
|
||||
assert_predicate fixture_blob("documentation/foo.md"), :documentation?
|
||||
assert_predicate fixture_blob("project/Documentation/foo.md"), :documentation?
|
||||
assert_predicate fixture_blob("project/documentation/foo.md"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("javadoc/foo.html"), :documentation?
|
||||
assert_predicate fixture_blob("project/javadoc/foo.html"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("man/foo.html"), :documentation?
|
||||
refute_predicate fixture_blob("project/man/foo.html"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("README"), :documentation?
|
||||
assert_predicate fixture_blob("README.md"), :documentation?
|
||||
assert_predicate fixture_blob("README.txt"), :documentation?
|
||||
assert_predicate fixture_blob("foo/README"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("CHANGE"), :documentation?
|
||||
assert_predicate fixture_blob("CHANGE.md"), :documentation?
|
||||
assert_predicate fixture_blob("CHANGE.txt"), :documentation?
|
||||
assert_predicate fixture_blob("foo/CHANGE"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("CHANGELOG"), :documentation?
|
||||
assert_predicate fixture_blob("CHANGELOG.md"), :documentation?
|
||||
assert_predicate fixture_blob("CHANGELOG.txt"), :documentation?
|
||||
assert_predicate fixture_blob("foo/CHANGELOG"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("CHANGES"), :documentation?
|
||||
assert_predicate fixture_blob("CHANGES.md"), :documentation?
|
||||
assert_predicate fixture_blob("CHANGES.txt"), :documentation?
|
||||
assert_predicate fixture_blob("foo/CHANGES"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("CONTRIBUTING"), :documentation?
|
||||
assert_predicate fixture_blob("CONTRIBUTING.md"), :documentation?
|
||||
assert_predicate fixture_blob("CONTRIBUTING.txt"), :documentation?
|
||||
assert_predicate fixture_blob("foo/CONTRIBUTING"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("LICENSE"), :documentation?
|
||||
assert_predicate fixture_blob("LICENCE.md"), :documentation?
|
||||
assert_predicate fixture_blob("LICENSE.txt"), :documentation?
|
||||
assert_predicate fixture_blob("foo/LICENSE"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("COPYING"), :documentation?
|
||||
assert_predicate fixture_blob("COPYING.md"), :documentation?
|
||||
assert_predicate fixture_blob("COPYING.txt"), :documentation?
|
||||
assert_predicate fixture_blob("foo/COPYING"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("INSTALL"), :documentation?
|
||||
assert_predicate fixture_blob("INSTALL.md"), :documentation?
|
||||
assert_predicate fixture_blob("INSTALL.txt"), :documentation?
|
||||
assert_predicate fixture_blob("foo/INSTALL"), :documentation?
|
||||
|
||||
refute_predicate fixture_blob("foo.md"), :documentation?
|
||||
assert !fixture_blob_memory("Data/README").vendored?
|
||||
end
|
||||
|
||||
def test_language
|
||||
Samples.each do |sample|
|
||||
blob = sample_blob(sample[:path])
|
||||
blob = sample_blob_memory(sample[:path])
|
||||
assert blob.language, "No language for #{sample[:path]}"
|
||||
assert_equal sample[:language], blob.language.name, blob.name
|
||||
end
|
||||
@@ -572,7 +248,7 @@ class TestBlob < Minitest::Test
|
||||
filepath = File.join(dirname, filename)
|
||||
next unless File.file?(filepath)
|
||||
|
||||
blob = fixture_blob(filepath)
|
||||
blob = fixture_blob_memory(filepath)
|
||||
if language == 'Data'
|
||||
assert blob.language.nil?, "A language was found for #{filepath}"
|
||||
elsif language == 'Generated'
|
||||
@@ -586,7 +262,7 @@ class TestBlob < Minitest::Test
|
||||
end
|
||||
|
||||
def test_minified_files_not_safe_to_highlight
|
||||
assert !sample_blob("JavaScript/jquery-1.6.1.min.js").safe_to_colorize?
|
||||
assert !sample_blob_memory("JavaScript/jquery-1.6.1.min.js").safe_to_colorize?
|
||||
end
|
||||
|
||||
def test_empty
|
||||
@@ -599,27 +275,19 @@ class TestBlob < Minitest::Test
|
||||
end
|
||||
|
||||
def test_include_in_language_stats
|
||||
vendored = sample_blob("bower_components/custom/custom.js")
|
||||
assert_predicate vendored, :vendored?
|
||||
refute_predicate vendored, :include_in_language_stats?
|
||||
|
||||
documentation = fixture_blob("README")
|
||||
assert_predicate documentation, :documentation?
|
||||
refute_predicate documentation, :include_in_language_stats?
|
||||
|
||||
generated = sample_blob("CSS/bootstrap.min.css")
|
||||
generated = sample_blob_memory("CSS/bootstrap.min.css")
|
||||
assert_predicate generated, :generated?
|
||||
refute_predicate generated, :include_in_language_stats?
|
||||
|
||||
data = sample_blob("Ant Build System/filenames/ant.xml")
|
||||
data = sample_blob_memory("Ant Build System/filenames/ant.xml")
|
||||
assert_equal :data, data.language.type
|
||||
refute_predicate data, :include_in_language_stats?
|
||||
|
||||
prose = sample_blob("Markdown/tender.md")
|
||||
prose = sample_blob_memory("Markdown/tender.md")
|
||||
assert_equal :prose, prose.language.type
|
||||
refute_predicate prose, :include_in_language_stats?
|
||||
|
||||
included = sample_blob("HTML/pages.html")
|
||||
included = sample_blob_memory("HTML/pages.html")
|
||||
assert_predicate included, :include_in_language_stats?
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,9 +1,669 @@
|
||||
require_relative "./helper"
|
||||
|
||||
class TestFileBlob < Minitest::Test
|
||||
class TestBlob < Minitest::Test
|
||||
include Linguist
|
||||
|
||||
def setup
|
||||
# git blobs are normally loaded as ASCII-8BIT since they may contain data
|
||||
# with arbitrary encoding not known ahead of time
|
||||
@original_external = Encoding.default_external
|
||||
Encoding.default_external = Encoding.find("ASCII-8BIT")
|
||||
end
|
||||
|
||||
def teardown
|
||||
Encoding.default_external = @original_external
|
||||
end
|
||||
|
||||
def script_blob(name)
|
||||
blob = sample_blob(name)
|
||||
blob.instance_variable_set(:@name, 'script')
|
||||
blob
|
||||
end
|
||||
|
||||
def test_extensions
|
||||
assert_equal [".gitignore"], Linguist::FileBlob.new(".gitignore").extensions
|
||||
assert_equal [".xml"], Linguist::FileBlob.new("build.xml").extensions
|
||||
assert_equal [".html.erb", ".erb"], Linguist::FileBlob.new("dotted.dir/index.html.erb").extensions
|
||||
end
|
||||
|
||||
def test_name
|
||||
assert_equal "foo.rb", sample_blob("foo.rb").name
|
||||
end
|
||||
|
||||
def test_mime_type
|
||||
assert_equal "application/postscript", fixture_blob("Binary/octocat.ai").mime_type
|
||||
assert_equal "application/x-ruby", sample_blob("Ruby/grit.rb").mime_type
|
||||
assert_equal "application/x-sh", sample_blob("Shell/script.sh").mime_type
|
||||
assert_equal "application/xml", sample_blob("XML/bar.xml").mime_type
|
||||
assert_equal "audio/ogg", fixture_blob("Binary/foo.ogg").mime_type
|
||||
assert_equal "text/plain", fixture_blob("Data/README").mime_type
|
||||
end
|
||||
|
||||
def test_content_type
|
||||
assert_equal "application/pdf", fixture_blob("Binary/foo.pdf").content_type
|
||||
assert_equal "audio/ogg", fixture_blob("Binary/foo.ogg").content_type
|
||||
assert_equal "image/png", fixture_blob("Binary/foo.png").content_type
|
||||
assert_equal "text/plain; charset=iso-8859-2", fixture_blob("Data/README").content_type
|
||||
end
|
||||
|
||||
def test_disposition
|
||||
assert_equal "attachment; filename=foo+bar.jar", fixture_blob("Binary/foo bar.jar").disposition
|
||||
assert_equal "attachment; filename=foo.bin", fixture_blob("Binary/foo.bin").disposition
|
||||
assert_equal "attachment; filename=linguist.gem", fixture_blob("Binary/linguist.gem").disposition
|
||||
assert_equal "attachment; filename=octocat.ai", fixture_blob("Binary/octocat.ai").disposition
|
||||
assert_equal "inline", fixture_blob("Data/README").disposition
|
||||
assert_equal "inline", sample_blob("Text/foo.txt").disposition
|
||||
assert_equal "inline", sample_blob("Ruby/grit.rb").disposition
|
||||
assert_equal "inline", fixture_blob("Binary/octocat.png").disposition
|
||||
end
|
||||
|
||||
def test_data
|
||||
assert_equal "module Foo\nend\n", sample_blob("Ruby/foo.rb").data
|
||||
end
|
||||
|
||||
def test_lines
|
||||
assert_equal ["module Foo", "end", ""], sample_blob("Ruby/foo.rb").lines
|
||||
assert_equal ["line 1", "line 2", ""], sample_blob("Text/mac.txt").lines
|
||||
assert_equal 475, sample_blob("Emacs Lisp/ess-julia.el").lines.length
|
||||
end
|
||||
|
||||
def test_lines_maintains_original_encoding
|
||||
# Even if the file's encoding is detected as something like UTF-16LE,
|
||||
# earlier versions of the gem made implicit guarantees that the encoding of
|
||||
# each `line` is in the same encoding as the file was originally read (in
|
||||
# practice, UTF-8 or ASCII-8BIT)
|
||||
assert_equal Encoding.default_external, fixture_blob("Data/utf16le").lines.first.encoding
|
||||
end
|
||||
|
||||
def test_size
|
||||
assert_equal 15, sample_blob("Ruby/foo.rb").size
|
||||
end
|
||||
|
||||
def test_loc
|
||||
assert_equal 3, sample_blob("Ruby/foo.rb").loc
|
||||
end
|
||||
|
||||
def test_sloc
|
||||
assert_equal 2, sample_blob("Ruby/foo.rb").sloc
|
||||
assert_equal 3, fixture_blob("Data/utf16le-windows").sloc
|
||||
assert_equal 1, fixture_blob("Data/iso8859-8-i").sloc
|
||||
end
|
||||
|
||||
def test_encoding
|
||||
assert_equal "ISO-8859-2", fixture_blob("Data/README").encoding
|
||||
assert_equal "ISO-8859-2", fixture_blob("Data/README").ruby_encoding
|
||||
assert_equal "UTF-8", sample_blob("Text/foo.txt").encoding
|
||||
assert_equal "UTF-8", sample_blob("Text/foo.txt").ruby_encoding
|
||||
assert_equal "UTF-16LE", fixture_blob("Data/utf16le").encoding
|
||||
assert_equal "UTF-16LE", fixture_blob("Data/utf16le").ruby_encoding
|
||||
assert_equal "UTF-16LE", fixture_blob("Data/utf16le-windows").encoding
|
||||
assert_equal "UTF-16LE", fixture_blob("Data/utf16le-windows").ruby_encoding
|
||||
assert_equal "ISO-2022-KR", sample_blob("Text/ISO-2022-KR.txt").encoding
|
||||
assert_equal "binary", sample_blob("Text/ISO-2022-KR.txt").ruby_encoding
|
||||
assert_nil fixture_blob("Binary/dog.o").encoding
|
||||
end
|
||||
|
||||
def test_binary
|
||||
# Large blobs aren't loaded
|
||||
large_blob = sample_blob("git.exe")
|
||||
large_blob.instance_eval do
|
||||
def data; end
|
||||
end
|
||||
assert large_blob.binary?
|
||||
|
||||
assert fixture_blob("Binary/git.deb").binary?
|
||||
assert fixture_blob("Binary/git.exe").binary?
|
||||
assert fixture_blob("Binary/hello.pbc").binary?
|
||||
assert fixture_blob("Binary/linguist.gem").binary?
|
||||
assert fixture_blob("Binary/octocat.ai").binary?
|
||||
assert fixture_blob("Binary/octocat.png").binary?
|
||||
assert fixture_blob("Binary/zip").binary?
|
||||
assert !fixture_blob("Data/README").binary?
|
||||
assert !sample_blob("Ruby/foo.rb").binary?
|
||||
assert !sample_blob("Perl/script.pl").binary?
|
||||
end
|
||||
|
||||
def test_all_binary
|
||||
Samples.each do |sample|
|
||||
blob = sample_blob(sample[:path])
|
||||
assert ! (blob.likely_binary? || blob.binary?), "#{sample[:path]} is a binary file"
|
||||
end
|
||||
end
|
||||
|
||||
def test_text
|
||||
assert fixture_blob("Data/README").text?
|
||||
assert fixture_blob("Data/md").text?
|
||||
assert sample_blob("Shell/script.sh").text?
|
||||
assert fixture_blob("Data/txt").text?
|
||||
end
|
||||
|
||||
def test_image
|
||||
assert fixture_blob("Binary/octocat.gif").image?
|
||||
assert fixture_blob("Binary/octocat.jpeg").image?
|
||||
assert fixture_blob("Binary/octocat.jpg").image?
|
||||
assert fixture_blob("Binary/octocat.png").image?
|
||||
assert !fixture_blob("Binary/octocat.ai").image?
|
||||
assert !fixture_blob("Binary/octocat.psd").image?
|
||||
end
|
||||
|
||||
def test_solid
|
||||
assert fixture_blob("Binary/cube.stl").solid?
|
||||
assert fixture_blob("Data/cube.stl").solid?
|
||||
end
|
||||
|
||||
def test_csv
|
||||
assert fixture_blob("Data/cars.csv").csv?
|
||||
end
|
||||
|
||||
def test_pdf
|
||||
assert fixture_blob("Binary/foo.pdf").pdf?
|
||||
end
|
||||
|
||||
def test_viewable
|
||||
assert fixture_blob("Data/README").viewable?
|
||||
assert sample_blob("Ruby/foo.rb").viewable?
|
||||
assert sample_blob("Perl/script.pl").viewable?
|
||||
assert !fixture_blob("Binary/linguist.gem").viewable?
|
||||
assert !fixture_blob("Binary/octocat.ai").viewable?
|
||||
assert !fixture_blob("Binary/octocat.png").viewable?
|
||||
end
|
||||
|
||||
def test_generated
|
||||
assert !fixture_blob("Data/README").generated?
|
||||
|
||||
# Xcode project files
|
||||
assert !sample_blob("XML/MainMenu.xib").generated?
|
||||
assert fixture_blob("Binary/MainMenu.nib").generated?
|
||||
assert !sample_blob("XML/project.pbxproj").generated?
|
||||
|
||||
# Gemfile.lock is NOT generated
|
||||
assert !sample_blob("Gemfile.lock").generated?
|
||||
|
||||
# Generated .NET Docfiles
|
||||
assert sample_blob("XML/net_docfile.xml").generated?
|
||||
|
||||
# Long line
|
||||
assert !sample_blob("JavaScript/uglify.js").generated?
|
||||
|
||||
# Inlined JS, but mostly code
|
||||
assert !sample_blob("JavaScript/json2_backbone.js").generated?
|
||||
|
||||
# Minified JS
|
||||
assert !sample_blob("JavaScript/jquery-1.6.1.js").generated?
|
||||
assert sample_blob("JavaScript/jquery-1.6.1.min.js").generated?
|
||||
assert sample_blob("JavaScript/jquery-1.4.2.min.js").generated?
|
||||
|
||||
# CoffeeScript-generated JS
|
||||
# TODO
|
||||
|
||||
# TypeScript-generated JS
|
||||
# TODO
|
||||
|
||||
# Composer generated composer.lock file
|
||||
assert sample_blob("JSON/composer.lock").generated?
|
||||
|
||||
# PEG.js-generated parsers
|
||||
assert sample_blob("JavaScript/parser.js").generated?
|
||||
|
||||
# Generated PostScript
|
||||
assert !sample_blob("PostScript/sierpinski.ps").generated?
|
||||
|
||||
# These examples are too basic to tell
|
||||
assert !sample_blob("JavaScript/hello.js").generated?
|
||||
|
||||
assert sample_blob("JavaScript/intro-old.js").generated?
|
||||
assert sample_blob("JavaScript/classes-old.js").generated?
|
||||
|
||||
assert sample_blob("JavaScript/intro.js").generated?
|
||||
assert sample_blob("JavaScript/classes.js").generated?
|
||||
|
||||
# Protocol Buffer generated code
|
||||
assert sample_blob("C++/protocol-buffer.pb.h").generated?
|
||||
assert sample_blob("C++/protocol-buffer.pb.cc").generated?
|
||||
assert sample_blob("Java/ProtocolBuffer.java").generated?
|
||||
assert sample_blob("Python/protocol_buffer_pb2.py").generated?
|
||||
assert sample_blob("Go/api.pb.go").generated?
|
||||
assert sample_blob("Go/embedded.go").generated?
|
||||
|
||||
# Apache Thrift generated code
|
||||
assert sample_blob("Python/gen-py-linguist-thrift.py").generated?
|
||||
assert sample_blob("Go/gen-go-linguist-thrift.go").generated?
|
||||
assert sample_blob("Java/gen-java-linguist-thrift.java").generated?
|
||||
assert sample_blob("JavaScript/gen-js-linguist-thrift.js").generated?
|
||||
assert sample_blob("Ruby/gen-rb-linguist-thrift.rb").generated?
|
||||
assert sample_blob("Objective-C/gen-cocoa-linguist-thrift.m").generated?
|
||||
|
||||
# Generated JNI
|
||||
assert sample_blob("C/jni_layer.h").generated?
|
||||
|
||||
# Minified CSS
|
||||
assert !sample_blob("CSS/bootstrap.css").generated?
|
||||
assert sample_blob("CSS/bootstrap.min.css").generated?
|
||||
|
||||
# Generated VCR
|
||||
assert sample_blob("YAML/vcr_cassette.yml").generated?
|
||||
|
||||
# Generated by Zephir
|
||||
assert sample_blob("Zephir/filenames/exception.zep.c").generated?
|
||||
assert sample_blob("Zephir/filenames/exception.zep.h").generated?
|
||||
assert sample_blob("Zephir/filenames/exception.zep.php").generated?
|
||||
assert !sample_blob("Zephir/Router.zep").generated?
|
||||
|
||||
assert sample_blob("node_modules/grunt/lib/grunt.js").generated?
|
||||
|
||||
# Godep saved dependencies
|
||||
assert sample_blob("Godeps/Godeps.json").generated?
|
||||
assert sample_blob("Godeps/_workspace/src/github.com/kr/s3/sign.go").generated?
|
||||
|
||||
# Cython-generated C/C++
|
||||
assert sample_blob("C/sgd_fast.c").generated?
|
||||
assert sample_blob("C++/wrapper_inner.cpp").generated?
|
||||
|
||||
# Unity3D-generated metadata
|
||||
assert sample_blob("Unity3D Asset/Tiles.meta").generated?
|
||||
end
|
||||
|
||||
def test_vendored
|
||||
assert !fixture_blob("Data/README").vendored?
|
||||
assert !sample_blob("ext/extconf.rb").vendored?
|
||||
|
||||
# Dependencies
|
||||
assert sample_blob("dependencies/windows/headers/GL/glext.h").vendored?
|
||||
|
||||
# Node dependencies
|
||||
assert sample_blob("node_modules/coffee-script/lib/coffee-script.js").vendored?
|
||||
|
||||
# Bower Components
|
||||
assert sample_blob("bower_components/custom/custom.js").vendored?
|
||||
assert sample_blob("app/bower_components/custom/custom.js").vendored?
|
||||
assert sample_blob("vendor/assets/bower_components/custom/custom.js").vendored?
|
||||
|
||||
# Go dependencies
|
||||
assert !sample_blob("Godeps/Godeps.json").vendored?
|
||||
assert sample_blob("Godeps/_workspace/src/github.com/kr/s3/sign.go").vendored?
|
||||
|
||||
# Rails vendor/
|
||||
assert sample_blob("vendor/plugins/will_paginate/lib/will_paginate.rb").vendored?
|
||||
|
||||
# Vendor/
|
||||
assert sample_blob("Vendor/my_great_file.h").vendored?
|
||||
|
||||
# 'thirdparty' directory
|
||||
assert sample_blob("thirdparty/lib/main.c").vendored?
|
||||
|
||||
# 'extern(al)' directory
|
||||
assert sample_blob("extern/util/__init__.py").vendored?
|
||||
assert sample_blob("external/jquery.min.js").vendored?
|
||||
|
||||
# C deps
|
||||
assert sample_blob("deps/http_parser/http_parser.c").vendored?
|
||||
assert sample_blob("deps/v8/src/v8.h").vendored?
|
||||
|
||||
assert sample_blob("tools/something/else.c").vendored?
|
||||
|
||||
# Chart.js
|
||||
assert sample_blob("some/vendored/path/Chart.js").vendored?
|
||||
assert !sample_blob("some/vendored/path/chart.js").vendored?
|
||||
|
||||
# Codemirror deps
|
||||
assert sample_blob("codemirror/mode/blah.js").vendored?
|
||||
assert sample_blob("codemirror/5.0/mode/blah.js").vendored?
|
||||
|
||||
# Debian packaging
|
||||
assert sample_blob("debian/cron.d").vendored?
|
||||
|
||||
# Erlang
|
||||
assert sample_blob("rebar").vendored?
|
||||
|
||||
# git config files
|
||||
|
||||
assert_predicate fixture_blob("some/path/.gitattributes"), :vendored?
|
||||
assert_predicate fixture_blob(".gitignore"), :vendored?
|
||||
assert_predicate fixture_blob("special/path/.gitmodules"), :vendored?
|
||||
|
||||
# Minified JavaScript and CSS
|
||||
assert sample_blob("foo.min.js").vendored?
|
||||
assert sample_blob("foo.min.css").vendored?
|
||||
assert sample_blob("foo-min.js").vendored?
|
||||
assert sample_blob("foo-min.css").vendored?
|
||||
assert !sample_blob("foomin.css").vendored?
|
||||
assert !sample_blob("foo.min.txt").vendored?
|
||||
|
||||
#.osx
|
||||
assert sample_blob(".osx").vendored?
|
||||
|
||||
# Prototype
|
||||
assert !sample_blob("public/javascripts/application.js").vendored?
|
||||
assert sample_blob("public/javascripts/prototype.js").vendored?
|
||||
assert sample_blob("public/javascripts/effects.js").vendored?
|
||||
assert sample_blob("public/javascripts/controls.js").vendored?
|
||||
assert sample_blob("public/javascripts/dragdrop.js").vendored?
|
||||
|
||||
# jQuery
|
||||
assert sample_blob("jquery.js").vendored?
|
||||
assert sample_blob("public/javascripts/jquery.js").vendored?
|
||||
assert sample_blob("public/javascripts/jquery.min.js").vendored?
|
||||
assert sample_blob("public/javascripts/jquery-1.7.js").vendored?
|
||||
assert sample_blob("public/javascripts/jquery-1.7.min.js").vendored?
|
||||
assert sample_blob("public/javascripts/jquery-1.5.2.js").vendored?
|
||||
assert sample_blob("public/javascripts/jquery-1.6.1.js").vendored?
|
||||
assert sample_blob("public/javascripts/jquery-1.6.1.min.js").vendored?
|
||||
assert sample_blob("public/javascripts/jquery-1.10.1.js").vendored?
|
||||
assert sample_blob("public/javascripts/jquery-1.10.1.min.js").vendored?
|
||||
assert !sample_blob("public/javascripts/jquery.github.menu.js").vendored?
|
||||
|
||||
# jQuery UI
|
||||
assert sample_blob("themes/ui-lightness/jquery-ui.css").vendored?
|
||||
assert sample_blob("themes/ui-lightness/jquery-ui-1.8.22.custom.css").vendored?
|
||||
assert sample_blob("themes/ui-lightness/jquery.ui.accordion.css").vendored?
|
||||
assert sample_blob("ui/i18n/jquery.ui.datepicker-ar.js").vendored?
|
||||
assert sample_blob("ui/i18n/jquery-ui-i18n.js").vendored?
|
||||
assert sample_blob("ui/jquery.effects.blind.js").vendored?
|
||||
assert sample_blob("ui/jquery-ui-1.8.22.custom.js").vendored?
|
||||
assert sample_blob("ui/jquery-ui-1.8.22.custom.min.js").vendored?
|
||||
assert sample_blob("ui/jquery-ui-1.8.22.js").vendored?
|
||||
assert sample_blob("ui/jquery-ui-1.8.js").vendored?
|
||||
assert sample_blob("ui/jquery-ui.min.js").vendored?
|
||||
assert sample_blob("ui/jquery.ui.accordion.js").vendored?
|
||||
assert sample_blob("ui/minified/jquery.effects.blind.min.js").vendored?
|
||||
assert sample_blob("ui/minified/jquery.ui.accordion.min.js").vendored?
|
||||
|
||||
# jQuery Gantt
|
||||
assert sample_blob("web-app/jquery-gantt/js/jquery.fn.gantt.js").vendored?
|
||||
|
||||
# jQuery fancyBox
|
||||
assert sample_blob("web-app/fancybox/jquery.fancybox.js").vendored?
|
||||
|
||||
# Fuel UX
|
||||
assert sample_blob("web-app/fuelux/js/fuelux.js").vendored?
|
||||
|
||||
# jQuery File Upload
|
||||
assert sample_blob("fileupload-9.0.0/jquery.fileupload-process.js").vendored?
|
||||
|
||||
# Slick
|
||||
assert sample_blob("web-app/slickgrid/controls/slick.columnpicker.js").vendored?
|
||||
|
||||
# Leaflet plugins
|
||||
assert sample_blob("leaflet-plugins/Leaflet.Coordinates-0.5.0.src.js").vendored?
|
||||
assert sample_blob("leaflet-plugins/leaflet.draw-src.js").vendored?
|
||||
assert sample_blob("leaflet-plugins/leaflet.spin.js").vendored?
|
||||
|
||||
# MooTools
|
||||
assert sample_blob("public/javascripts/mootools-core-1.3.2-full-compat.js").vendored?
|
||||
assert sample_blob("public/javascripts/mootools-core-1.3.2-full-compat-yc.js").vendored?
|
||||
|
||||
# Dojo
|
||||
assert sample_blob("public/javascripts/dojo.js").vendored?
|
||||
|
||||
# MochiKit
|
||||
assert sample_blob("public/javascripts/MochiKit.js").vendored?
|
||||
|
||||
# YUI
|
||||
assert sample_blob("public/javascripts/yahoo-dom-event.js").vendored?
|
||||
assert sample_blob("public/javascripts/yahoo-min.js").vendored?
|
||||
assert sample_blob("public/javascripts/yuiloader-dom-event.js").vendored?
|
||||
|
||||
# WYS editors
|
||||
assert sample_blob("public/javascripts/ckeditor.js").vendored?
|
||||
assert sample_blob("public/javascripts/tiny_mce.js").vendored?
|
||||
assert sample_blob("public/javascripts/tiny_mce_popup.js").vendored?
|
||||
assert sample_blob("public/javascripts/tiny_mce_src.js").vendored?
|
||||
|
||||
# AngularJS
|
||||
assert sample_blob("public/javascripts/angular.js").vendored?
|
||||
assert sample_blob("public/javascripts/angular.min.js").vendored?
|
||||
|
||||
# D3.js
|
||||
assert sample_blob("public/javascripts/d3.v3.js").vendored?
|
||||
assert sample_blob("public/javascripts/d3.v3.min.js").vendored?
|
||||
|
||||
# Modernizr
|
||||
assert sample_blob("public/javascripts/modernizr-2.7.1.js").vendored?
|
||||
assert sample_blob("public/javascripts/modernizr.custom.01009.js").vendored?
|
||||
|
||||
# Fabric
|
||||
assert sample_blob("fabfile.py").vendored?
|
||||
|
||||
# WAF
|
||||
assert sample_blob("waf").vendored?
|
||||
|
||||
# Visual Studio IntelliSense
|
||||
assert sample_blob("Scripts/jquery-1.7-vsdoc.js").vendored?
|
||||
|
||||
# Microsoft Ajax
|
||||
assert sample_blob("Scripts/MicrosoftAjax.debug.js").vendored?
|
||||
assert sample_blob("Scripts/MicrosoftAjax.js").vendored?
|
||||
assert sample_blob("Scripts/MicrosoftMvcAjax.debug.js").vendored?
|
||||
assert sample_blob("Scripts/MicrosoftMvcAjax.js").vendored?
|
||||
assert sample_blob("Scripts/MicrosoftMvcValidation.debug.js").vendored?
|
||||
assert sample_blob("Scripts/MicrosoftMvcValidation.js").vendored?
|
||||
|
||||
# jQuery validation plugin (MS bundles this with asp.net mvc)
|
||||
assert sample_blob("Scripts/jquery.validate.js").vendored?
|
||||
assert sample_blob("Scripts/jquery.validate.min.js").vendored?
|
||||
assert sample_blob("Scripts/jquery.validate.unobtrusive.js").vendored?
|
||||
assert sample_blob("Scripts/jquery.validate.unobtrusive.min.js").vendored?
|
||||
assert sample_blob("Scripts/jquery.unobtrusive-ajax.js").vendored?
|
||||
assert sample_blob("Scripts/jquery.unobtrusive-ajax.min.js").vendored?
|
||||
|
||||
# NuGet Packages
|
||||
assert sample_blob("packages/Modernizr.2.0.6/Content/Scripts/modernizr-2.0.6-development-only.js").vendored?
|
||||
|
||||
# Font Awesome
|
||||
assert sample_blob("some/asset/path/font-awesome.min.css").vendored?
|
||||
assert sample_blob("some/asset/path/font-awesome.css").vendored?
|
||||
|
||||
# Normalize
|
||||
assert sample_blob("some/asset/path/normalize.css").vendored?
|
||||
|
||||
# Carthage
|
||||
assert sample_blob('Carthage/blah').vendored?
|
||||
|
||||
# Cocoapods
|
||||
assert sample_blob('Pods/blah').vendored?
|
||||
|
||||
# Html5shiv
|
||||
assert sample_blob("Scripts/html5shiv.js").vendored?
|
||||
assert sample_blob("Scripts/html5shiv.min.js").vendored?
|
||||
|
||||
# Test fixtures
|
||||
assert sample_blob("test/fixtures/random.rkt").vendored?
|
||||
assert sample_blob("Test/fixtures/random.rkt").vendored?
|
||||
assert sample_blob("tests/fixtures/random.rkt").vendored?
|
||||
|
||||
# Cordova/PhoneGap
|
||||
assert sample_blob("cordova.js").vendored?
|
||||
assert sample_blob("cordova.min.js").vendored?
|
||||
assert sample_blob("cordova-2.1.0.js").vendored?
|
||||
assert sample_blob("cordova-2.1.0.min.js").vendored?
|
||||
|
||||
# Foundation js
|
||||
assert sample_blob("foundation.js").vendored?
|
||||
assert sample_blob("foundation.min.js").vendored?
|
||||
assert sample_blob("foundation.abide.js").vendored?
|
||||
|
||||
# Vagrant
|
||||
assert sample_blob("Vagrantfile").vendored?
|
||||
|
||||
# Gradle
|
||||
assert sample_blob("gradlew").vendored?
|
||||
assert sample_blob("gradlew.bat").vendored?
|
||||
assert sample_blob("gradle/wrapper/gradle-wrapper.properties").vendored?
|
||||
assert sample_blob("subproject/gradlew").vendored?
|
||||
assert sample_blob("subproject/gradlew.bat").vendored?
|
||||
assert sample_blob("subproject/gradle/wrapper/gradle-wrapper.properties").vendored?
|
||||
|
||||
# Octicons
|
||||
assert sample_blob("octicons.css").vendored?
|
||||
assert sample_blob("public/octicons.min.css").vendored?
|
||||
assert sample_blob("public/octicons/sprockets-octicons.scss").vendored?
|
||||
|
||||
# Typesafe Activator
|
||||
assert sample_blob("activator").vendored?
|
||||
assert sample_blob("activator.bat").vendored?
|
||||
assert sample_blob("subproject/activator").vendored?
|
||||
assert sample_blob("subproject/activator.bat").vendored?
|
||||
|
||||
assert_predicate fixture_blob(".google_apis/bar.jar"), :vendored?
|
||||
assert_predicate fixture_blob("foo/.google_apis/bar.jar"), :vendored?
|
||||
|
||||
# Sphinx docs
|
||||
assert sample_blob("docs/_build/asset.doc").vendored?
|
||||
assert sample_blob("docs/theme/file.css").vendored?
|
||||
|
||||
# Vagrant
|
||||
assert sample_blob("puphpet/file.pp").vendored?
|
||||
|
||||
# Fabric.io
|
||||
assert sample_blob("Fabric.framework/Fabric.h").vendored?
|
||||
|
||||
# Crashlytics
|
||||
assert sample_blob("Crashlytics.framework/Crashlytics.h").vendored?
|
||||
assert sample_blob("myapp/My Template.xctemplate/___FILEBASENAME___.h").vendored?
|
||||
assert sample_blob("myapp/My Images.xcassets/some/stuff.imageset/Contents.json").vendored?
|
||||
assert !sample_blob("myapp/MyData.json").vendored?
|
||||
end
|
||||
|
||||
def test_documentation
|
||||
assert_predicate fixture_blob("doc/foo.html"), :documentation?
|
||||
assert_predicate fixture_blob("docs/foo.html"), :documentation?
|
||||
refute_predicate fixture_blob("project/doc/foo.html"), :documentation?
|
||||
refute_predicate fixture_blob("project/docs/foo.html"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("Documentation/foo.md"), :documentation?
|
||||
assert_predicate fixture_blob("documentation/foo.md"), :documentation?
|
||||
assert_predicate fixture_blob("project/Documentation/foo.md"), :documentation?
|
||||
assert_predicate fixture_blob("project/documentation/foo.md"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("javadoc/foo.html"), :documentation?
|
||||
assert_predicate fixture_blob("project/javadoc/foo.html"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("man/foo.html"), :documentation?
|
||||
refute_predicate fixture_blob("project/man/foo.html"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("README"), :documentation?
|
||||
assert_predicate fixture_blob("README.md"), :documentation?
|
||||
assert_predicate fixture_blob("README.txt"), :documentation?
|
||||
assert_predicate fixture_blob("Readme"), :documentation?
|
||||
assert_predicate fixture_blob("readme"), :documentation?
|
||||
assert_predicate fixture_blob("foo/README"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("CHANGE"), :documentation?
|
||||
assert_predicate fixture_blob("CHANGE.md"), :documentation?
|
||||
assert_predicate fixture_blob("CHANGE.txt"), :documentation?
|
||||
assert_predicate fixture_blob("foo/CHANGE"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("CHANGELOG"), :documentation?
|
||||
assert_predicate fixture_blob("CHANGELOG.md"), :documentation?
|
||||
assert_predicate fixture_blob("CHANGELOG.txt"), :documentation?
|
||||
assert_predicate fixture_blob("foo/CHANGELOG"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("CHANGES"), :documentation?
|
||||
assert_predicate fixture_blob("CHANGES.md"), :documentation?
|
||||
assert_predicate fixture_blob("CHANGES.txt"), :documentation?
|
||||
assert_predicate fixture_blob("foo/CHANGES"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("CONTRIBUTING"), :documentation?
|
||||
assert_predicate fixture_blob("CONTRIBUTING.md"), :documentation?
|
||||
assert_predicate fixture_blob("CONTRIBUTING.txt"), :documentation?
|
||||
assert_predicate fixture_blob("foo/CONTRIBUTING"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("examples/some-file.pl"), :documentation?
|
||||
assert_predicate fixture_blob("Examples/some-example-file.rb"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("LICENSE"), :documentation?
|
||||
assert_predicate fixture_blob("LICENCE.md"), :documentation?
|
||||
assert_predicate fixture_blob("License.txt"), :documentation?
|
||||
assert_predicate fixture_blob("LICENSE.txt"), :documentation?
|
||||
assert_predicate fixture_blob("foo/LICENSE"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("COPYING"), :documentation?
|
||||
assert_predicate fixture_blob("COPYING.md"), :documentation?
|
||||
assert_predicate fixture_blob("COPYING.txt"), :documentation?
|
||||
assert_predicate fixture_blob("foo/COPYING"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("INSTALL"), :documentation?
|
||||
assert_predicate fixture_blob("INSTALL.md"), :documentation?
|
||||
assert_predicate fixture_blob("INSTALL.txt"), :documentation?
|
||||
assert_predicate fixture_blob("foo/INSTALL"), :documentation?
|
||||
|
||||
refute_predicate fixture_blob("foo.md"), :documentation?
|
||||
|
||||
# Samples
|
||||
assert sample_blob("Samples/Ruby/foo.rb").documentation?
|
||||
|
||||
assert_predicate fixture_blob("INSTALL.txt"), :documentation?
|
||||
end
|
||||
|
||||
def test_language
|
||||
Samples.each do |sample|
|
||||
blob = sample_blob(sample[:path])
|
||||
assert blob.language, "No language for #{sample[:path]}"
|
||||
assert_equal sample[:language], blob.language.name, blob.name
|
||||
end
|
||||
|
||||
# Test language detection for files which shouldn't be used as samples
|
||||
root = File.expand_path('../fixtures', __FILE__)
|
||||
Dir.entries(root).each do |language|
|
||||
next if language == '.' || language == '..' || language == 'Binary' ||
|
||||
File.basename(language) == 'ace_modes.json'
|
||||
|
||||
# Each directory contains test files of a language
|
||||
dirname = File.join(root, language)
|
||||
Dir.entries(dirname).each do |filename|
|
||||
# By default blob search the file in the samples;
|
||||
# thus, we need to give it the absolute path
|
||||
filepath = File.join(dirname, filename)
|
||||
next unless File.file?(filepath)
|
||||
|
||||
blob = fixture_blob(filepath)
|
||||
if language == 'Data'
|
||||
assert blob.language.nil?, "A language was found for #{filepath}"
|
||||
elsif language == 'Generated'
|
||||
assert blob.generated?, "#{filepath} is not a generated file"
|
||||
else
|
||||
assert blob.language, "No language for #{filepath}"
|
||||
assert_equal language, blob.language.name, blob.name
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def test_minified_files_not_safe_to_highlight
|
||||
assert !sample_blob("JavaScript/jquery-1.6.1.min.js").safe_to_colorize?
|
||||
end
|
||||
|
||||
def test_empty
|
||||
blob = Struct.new(:data) { include Linguist::BlobHelper }
|
||||
|
||||
assert blob.new("").empty?
|
||||
assert blob.new(nil).empty?
|
||||
refute blob.new(" ").empty?
|
||||
refute blob.new("nope").empty?
|
||||
end
|
||||
|
||||
def test_include_in_language_stats
|
||||
vendored = sample_blob("bower_components/custom/custom.js")
|
||||
assert_predicate vendored, :vendored?
|
||||
refute_predicate vendored, :include_in_language_stats?
|
||||
|
||||
documentation = fixture_blob("README")
|
||||
assert_predicate documentation, :documentation?
|
||||
refute_predicate documentation, :include_in_language_stats?
|
||||
|
||||
generated = sample_blob("CSS/bootstrap.min.css")
|
||||
assert_predicate generated, :generated?
|
||||
refute_predicate generated, :include_in_language_stats?
|
||||
|
||||
data = sample_blob("Ant Build System/filenames/ant.xml")
|
||||
assert_equal :data, data.language.type
|
||||
refute_predicate data, :include_in_language_stats?
|
||||
|
||||
prose = sample_blob("Markdown/tender.md")
|
||||
assert_equal :prose, prose.language.type
|
||||
refute_predicate prose, :include_in_language_stats?
|
||||
|
||||
included = sample_blob("HTML/pages.html")
|
||||
assert_predicate included, :include_in_language_stats?
|
||||
end
|
||||
end
|
||||
|
||||
@@ -3,7 +3,7 @@ require_relative "./helper"
|
||||
class TestGrammars < Minitest::Test
|
||||
ROOT = File.expand_path("../..", __FILE__)
|
||||
|
||||
LICENSE_WHITELIST = [
|
||||
PROJECT_WHITELIST = [
|
||||
# This grammar's MIT license is inside a subdirectory.
|
||||
"vendor/grammars/SublimePapyrus",
|
||||
|
||||
@@ -16,6 +16,23 @@ class TestGrammars < Minitest::Test
|
||||
"vendor/grammars/x86-assembly-textmate-bundle"
|
||||
].freeze
|
||||
|
||||
# List of allowed SPDX license names
|
||||
LICENSE_WHITELIST = %w[
|
||||
apache-2.0
|
||||
bsd-2-clause
|
||||
bsd-3-clause
|
||||
cc-by-sa-3.0
|
||||
gpl-2.0
|
||||
gpl-3.0
|
||||
lgpl-3.0
|
||||
mit
|
||||
mpl-2.0
|
||||
textmate
|
||||
unlicense
|
||||
wtfpl
|
||||
zlib
|
||||
].freeze
|
||||
|
||||
def setup
|
||||
@grammars = YAML.load(File.read(File.join(ROOT, "grammars.yml")))
|
||||
end
|
||||
@@ -62,48 +79,38 @@ class TestGrammars < Minitest::Test
|
||||
end
|
||||
end
|
||||
|
||||
def test_submodules_have_recognized_licenses
|
||||
unrecognized = submodule_licenses.select { |k,v| v.nil? && Licensee::FSProject.new(k).license_file }
|
||||
unrecognized.reject! { |k,v| PROJECT_WHITELIST.include?(k) }
|
||||
message = "The following submodules have unrecognized licenses:\n* #{unrecognized.keys.join("\n* ")}\n"
|
||||
message << "Please ensure that the project's LICENSE file contains the full text of the license."
|
||||
assert_equal Hash.new, unrecognized, message
|
||||
end
|
||||
|
||||
def test_submodules_have_licenses
|
||||
categories = submodule_paths.group_by do |submodule|
|
||||
files = Dir[File.join(ROOT, submodule, "*")]
|
||||
license = files.find { |path| File.basename(path) =~ /\b(un)?licen[cs]e\b/i } || files.find { |path| File.basename(path) =~ /\bcopying\b/i }
|
||||
if license.nil?
|
||||
if readme = files.find { |path| File.basename(path) =~ /\Areadme\b/i }
|
||||
license = readme if File.read(readme) =~ /\blicen[cs]e\b/i
|
||||
end
|
||||
end
|
||||
if license.nil?
|
||||
:unlicensed
|
||||
elsif classify_license(license)
|
||||
:licensed
|
||||
else
|
||||
:unrecognized
|
||||
end
|
||||
end
|
||||
unlicensed = submodule_licenses.select { |k,v| v.nil? }.reject { |k,v| PROJECT_WHITELIST.include?(k) }
|
||||
message = "The following submodules don't have licenses:\n* #{unlicensed.keys.join("\n* ")}\n"
|
||||
message << "Please ensure that the project has a LICENSE file, and that the LICENSE file contains the full text of the license."
|
||||
assert_equal Hash.new, unlicensed, message
|
||||
end
|
||||
|
||||
unlicensed = categories[:unlicensed] || []
|
||||
unrecognized = categories[:unrecognized] || []
|
||||
disallowed_unlicensed = unlicensed - LICENSE_WHITELIST
|
||||
disallowed_unrecognized = unrecognized - LICENSE_WHITELIST
|
||||
extra_whitelist_entries = LICENSE_WHITELIST - (unlicensed | unrecognized)
|
||||
def test_submodules_have_approved_licenses
|
||||
unapproved = submodule_licenses.reject { |k,v| LICENSE_WHITELIST.include?(v) || PROJECT_WHITELIST.include?(k) }.map { |k,v| "#{k}: #{v}"}
|
||||
message = "The following submodules have unapproved licenses:\n* #{unapproved.join("\n* ")}\n"
|
||||
message << "The license must be added to the LICENSE_WHITELIST in /test/test_grammars.rb once approved."
|
||||
assert_equal [], unapproved, message
|
||||
end
|
||||
|
||||
message = ""
|
||||
if disallowed_unlicensed.any?
|
||||
message << "The following grammar submodules don't seem to have a license. All grammars must have a license that permits redistribution.\n"
|
||||
message << disallowed_unlicensed.sort.join("\n")
|
||||
end
|
||||
if disallowed_unrecognized.any?
|
||||
message << "\n\n" unless message.empty?
|
||||
message << "The following grammar submodules have an unrecognized license. Please update #{__FILE__} to recognize the license.\n"
|
||||
message << disallowed_unrecognized.sort.join("\n")
|
||||
end
|
||||
if extra_whitelist_entries.any?
|
||||
message << "\n\n" unless message.empty?
|
||||
message << "The following grammar submodules are listed in LICENSE_WHITELIST but either have a license (yay!)\n"
|
||||
message << "or have been removed from the repository. Please remove them from the whitelist.\n"
|
||||
message << extra_whitelist_entries.sort.join("\n")
|
||||
end
|
||||
def test_submodules_whitelist_has_no_extra_entries
|
||||
extra_whitelist_entries = PROJECT_WHITELIST - submodule_licenses.select { |k,v| v.nil? }.keys
|
||||
not_present = extra_whitelist_entries.reject { |k,v| Dir.exists?(k) }
|
||||
licensed = extra_whitelist_entries.select { |k,v| submodule_licenses[k] }
|
||||
|
||||
assert disallowed_unlicensed.empty? && disallowed_unrecognized.empty? && extra_whitelist_entries.empty?, message
|
||||
msg = "The following whitelisted submodules don't appear to be part of the project:\n* #{not_present.join("\n* ")}"
|
||||
assert_equal [], not_present, msg
|
||||
|
||||
msg = "The following whitelisted submodules actually have licenses and don't need to be whitelisted:\n* #{licensed.join("\n* ")}"
|
||||
assert_equal [], licensed, msg
|
||||
end
|
||||
|
||||
private
|
||||
@@ -112,30 +119,57 @@ class TestGrammars < Minitest::Test
|
||||
@submodule_paths ||= `git config --list --file "#{File.join(ROOT, ".gitmodules")}"`.lines.grep(/\.path=/).map { |line| line.chomp.split("=", 2).last }
|
||||
end
|
||||
|
||||
# Returns a hash of submodules in the form of submodule_path => license
|
||||
def submodule_licenses
|
||||
@@submodule_licenses ||= begin
|
||||
submodules = {}
|
||||
submodule_paths.each { |submodule| submodules[submodule] = submodule_license(submodule) }
|
||||
submodules
|
||||
end
|
||||
end
|
||||
|
||||
# Given the path to a submodule, return its SPDX-compliant license key
|
||||
def submodule_license(submodule)
|
||||
# Prefer Licensee to detect a submodule's license
|
||||
project = Licensee::FSProject.new(submodule)
|
||||
return project.license.key if project.license
|
||||
|
||||
# We know a license file exists, but Licensee wasn't able to detect the license,
|
||||
# Let's try our own more permissive regex method
|
||||
if project.license_file
|
||||
path = File.expand_path project.license_file.path, submodule
|
||||
license = classify_license(path)
|
||||
return license if license
|
||||
end
|
||||
|
||||
# Neither Licensee nor our own regex was able to detect the license, let's check the readme
|
||||
files = Dir[File.join(ROOT, submodule, "*")]
|
||||
if readme = files.find { |path| File.basename(path) =~ /\Areadme\b/i }
|
||||
classify_license(readme)
|
||||
end
|
||||
end
|
||||
|
||||
def classify_license(path)
|
||||
content = File.read(path)
|
||||
return unless content =~ /\blicen[cs]e\b/i
|
||||
if content.include?("Apache License") && content.include?("2.0")
|
||||
"Apache 2.0"
|
||||
"apache-2.0"
|
||||
elsif content.include?("GNU") && content =~ /general/i && content =~ /public/i
|
||||
if content =~ /version 2/i
|
||||
"GPLv2"
|
||||
"gpl-2.0"
|
||||
elsif content =~ /version 3/i
|
||||
"GPLv3"
|
||||
"gpl-3.0"
|
||||
end
|
||||
elsif content.include?("GPL") && content.include?("http://www.gnu.org/licenses/gpl.html")
|
||||
"GPLv3"
|
||||
elsif content.include?("Creative Commons")
|
||||
"CC"
|
||||
"gpl-3.0"
|
||||
elsif content.include?("Creative Commons Attribution-Share Alike 3.0")
|
||||
"cc-by-sa-3.0"
|
||||
elsif content.include?("tidy-license.txt") || content.include?("If not otherwise specified (see below)")
|
||||
"textmate"
|
||||
elsif content =~ /^\s*[*-]\s+Redistribution/ || content.include?("Redistributions of source code")
|
||||
"BSD"
|
||||
elsif content.include?("Permission is hereby granted") || content =~ /\bMIT\b/
|
||||
"MIT"
|
||||
elsif content.include?("unlicense.org")
|
||||
"unlicense"
|
||||
"mit"
|
||||
elsif content.include?("http://www.wtfpl.net/txt/copying/")
|
||||
"WTFPL"
|
||||
"wtfpl"
|
||||
elsif content.include?("zlib") && content.include?("license") && content.include?("2. Altered source versions must be plainly marked as such")
|
||||
"zlib"
|
||||
end
|
||||
|
||||
@@ -33,6 +33,101 @@ class TestHeuristcs < Minitest::Test
|
||||
end
|
||||
end
|
||||
|
||||
def test_detect_still_works_if_nothing_matches
|
||||
blob = Linguist::FileBlob.new(File.join(samples_path, "Objective-C/hello.m"))
|
||||
match = Language.detect(blob)
|
||||
assert_equal Language["Objective-C"], match
|
||||
end
|
||||
|
||||
# Candidate languages = ["AGS Script", "AsciiDoc", "Public Key"]
|
||||
def test_asc_by_heuristics
|
||||
assert_heuristics({
|
||||
"AsciiDoc" => all_fixtures("AsciiDoc", "*.asc"),
|
||||
"AGS Script" => all_fixtures("AGS Script", "*.asc"),
|
||||
"Public Key" => all_fixtures("Public Key", "*.asc")
|
||||
})
|
||||
end
|
||||
|
||||
def test_bb_by_heuristics
|
||||
assert_heuristics({
|
||||
"BitBake" => all_fixtures("BitBake", "*.bb"),
|
||||
"BlitzBasic" => all_fixtures("BlitzBasic", "*.bb")
|
||||
})
|
||||
end
|
||||
|
||||
def test_ch_by_heuristics
|
||||
assert_heuristics({
|
||||
"xBase" => all_fixtures("xBase", ".ch")
|
||||
})
|
||||
end
|
||||
|
||||
def test_cl_by_heuristics
|
||||
assert_heuristics({
|
||||
"Common Lisp" => all_fixtures("Common Lisp", "*.cl"),
|
||||
"OpenCL" => all_fixtures("OpenCL", "*.cl")
|
||||
})
|
||||
end
|
||||
|
||||
def test_cs_by_heuristics
|
||||
assert_heuristics({
|
||||
"C#" => all_fixtures("C#", "*.cs"),
|
||||
"Smalltalk" => all_fixtures("Smalltalk", "*.cs")
|
||||
})
|
||||
end
|
||||
|
||||
# Candidate languages = ["ECL", "ECLiPSe"]
|
||||
def test_ecl_by_heuristics
|
||||
assert_heuristics({
|
||||
"ECL" => all_fixtures("ECL", "*.ecl"),
|
||||
"ECLiPSe" => all_fixtures("ECLiPSe", "*.ecl")
|
||||
})
|
||||
end
|
||||
|
||||
def test_f_by_heuristics
|
||||
assert_heuristics({
|
||||
"FORTRAN" => all_fixtures("FORTRAN", "*.f") + all_fixtures("FORTRAN", "*.for"),
|
||||
"Forth" => all_fixtures("Forth", "*.f") + all_fixtures("Forth", "*.for")
|
||||
})
|
||||
end
|
||||
|
||||
def test_fr_by_heuristics
|
||||
assert_heuristics({
|
||||
"Frege" => all_fixtures("Frege", "*.fr"),
|
||||
"Forth" => all_fixtures("Forth", "*.fr"),
|
||||
"Text" => all_fixtures("Text", "*.fr")
|
||||
})
|
||||
end
|
||||
|
||||
def test_fs_by_heuristics
|
||||
assert_heuristics({
|
||||
"F#" => all_fixtures("F#", "*.fs"),
|
||||
"Forth" => all_fixtures("Forth", "*.fs"),
|
||||
"GLSL" => all_fixtures("GLSL", "*.fs")
|
||||
})
|
||||
end
|
||||
|
||||
# Candidate languages = ["Hack", "PHP"]
|
||||
def test_hack_by_heuristics
|
||||
assert_heuristics({
|
||||
"Hack" => all_fixtures("Hack", "*.php"),
|
||||
"PHP" => all_fixtures("PHP", "*.php")
|
||||
})
|
||||
end
|
||||
|
||||
def test_ls_by_heuristics
|
||||
assert_heuristics({
|
||||
"LiveScript" => all_fixtures("LiveScript", "*.ls"),
|
||||
"LoomScript" => all_fixtures("LoomScript", "*.ls")
|
||||
})
|
||||
end
|
||||
|
||||
def test_lsp_by_heuristics
|
||||
assert_heuristics({
|
||||
"Common Lisp" => all_fixtures("Common Lisp", "*.lsp") + all_fixtures("Common Lisp", "*.lisp"),
|
||||
"NewLisp" => all_fixtures("NewLisp", "*.lsp") + all_fixtures("NewLisp", "*.lisp")
|
||||
})
|
||||
end
|
||||
|
||||
# Candidate languages = ["C++", "Objective-C"]
|
||||
def test_obj_c_by_heuristics
|
||||
# Only calling out '.h' filenames as these are the ones causing issues
|
||||
@@ -43,12 +138,6 @@ class TestHeuristcs < Minitest::Test
|
||||
})
|
||||
end
|
||||
|
||||
def test_detect_still_works_if_nothing_matches
|
||||
blob = Linguist::FileBlob.new(File.join(samples_path, "Objective-C/hello.m"))
|
||||
match = Language.detect(blob)
|
||||
assert_equal Language["Objective-C"], match
|
||||
end
|
||||
|
||||
# Candidate languages = ["Perl", "Perl6", "Prolog"]
|
||||
def test_pl_prolog_perl_by_heuristics
|
||||
assert_heuristics({
|
||||
@@ -66,14 +155,6 @@ class TestHeuristcs < Minitest::Test
|
||||
})
|
||||
end
|
||||
|
||||
# Candidate languages = ["ECL", "Prolog"]
|
||||
def test_ecl_prolog_by_heuristics
|
||||
assert_heuristics({
|
||||
"ECL" => all_fixtures("ECL", "*.ecl"),
|
||||
"Prolog" => all_fixtures("Prolog", "*.ecl")
|
||||
})
|
||||
end
|
||||
|
||||
# Candidate languages = ["IDL", "Prolog", "QMake", "INI"]
|
||||
def test_pro_by_heuristics
|
||||
assert_heuristics({
|
||||
@@ -84,34 +165,10 @@ class TestHeuristcs < Minitest::Test
|
||||
})
|
||||
end
|
||||
|
||||
# Candidate languages = ["AGS Script", "AsciiDoc", "Public Key"]
|
||||
def test_asc_by_heuristics
|
||||
def test_r_by_heuristics
|
||||
assert_heuristics({
|
||||
"AsciiDoc" => all_fixtures("AsciiDoc", "*.asc"),
|
||||
"AGS Script" => all_fixtures("AGS Script", "*.asc"),
|
||||
"Public Key" => all_fixtures("Public Key", "*.asc")
|
||||
})
|
||||
end
|
||||
|
||||
def test_cl_by_heuristics
|
||||
assert_heuristics({
|
||||
"Common Lisp" => all_fixtures("Common Lisp", "*.cl"),
|
||||
"OpenCL" => all_fixtures("OpenCL", "*.cl")
|
||||
})
|
||||
end
|
||||
|
||||
def test_f_by_heuristics
|
||||
assert_heuristics({
|
||||
"FORTRAN" => all_fixtures("FORTRAN", "*.f") + all_fixtures("FORTRAN", "*.for"),
|
||||
"Forth" => all_fixtures("Forth", "*.f") + all_fixtures("Forth", "*.for")
|
||||
})
|
||||
end
|
||||
|
||||
# Candidate languages = ["Hack", "PHP"]
|
||||
def test_hack_by_heuristics
|
||||
assert_heuristics({
|
||||
"Hack" => all_fixtures("Hack", "*.php"),
|
||||
"PHP" => all_fixtures("PHP", "*.php")
|
||||
"R" => all_fixtures("R", "*.r") + all_fixtures("R", "*.R"),
|
||||
"Rebol" => all_fixtures("Rebol", "*.r")
|
||||
})
|
||||
end
|
||||
|
||||
@@ -123,47 +180,13 @@ class TestHeuristcs < Minitest::Test
|
||||
})
|
||||
end
|
||||
|
||||
def test_fs_by_heuristics
|
||||
# Candidate languages = ["Perl", "Perl6"]
|
||||
def test_t_perl_by_heuristics
|
||||
assert_heuristics({
|
||||
"F#" => all_fixtures("F#", "*.fs"),
|
||||
"Forth" => all_fixtures("Forth", "*.fs"),
|
||||
"GLSL" => all_fixtures("GLSL", "*.fs")
|
||||
})
|
||||
end
|
||||
|
||||
def test_fr_by_heuristics
|
||||
assert_heuristics({
|
||||
"Frege" => all_fixtures("Frege", "*.fr"),
|
||||
"Forth" => all_fixtures("Forth", "*.fr"),
|
||||
"Text" => all_fixtures("Text", "*.fr")
|
||||
})
|
||||
end
|
||||
|
||||
def test_bb_by_heuristics
|
||||
assert_heuristics({
|
||||
"BitBake" => all_fixtures("BitBake", "*.bb"),
|
||||
"BlitzBasic" => all_fixtures("BlitzBasic", "*.bb")
|
||||
})
|
||||
end
|
||||
|
||||
def test_lsp_by_heuristics
|
||||
assert_heuristics({
|
||||
"Common Lisp" => all_fixtures("Common Lisp", "*.lsp") + all_fixtures("Common Lisp", "*.lisp"),
|
||||
"NewLisp" => all_fixtures("NewLisp", "*.lsp") + all_fixtures("NewLisp", "*.lisp")
|
||||
})
|
||||
end
|
||||
|
||||
def test_cs_by_heuristics
|
||||
assert_heuristics({
|
||||
"C#" => all_fixtures("C#", "*.cs"),
|
||||
"Smalltalk" => all_fixtures("Smalltalk", "*.cs")
|
||||
})
|
||||
end
|
||||
|
||||
def test_ls_by_heuristics
|
||||
assert_heuristics({
|
||||
"LiveScript" => all_fixtures("LiveScript", "*.ls"),
|
||||
"LoomScript" => all_fixtures("LoomScript", "*.ls")
|
||||
"Perl" => all_fixtures("Perl", "*.t"),
|
||||
"Perl6" => ["Perl6/01-dash-uppercase-i.t", "Perl6/01-parse.t", "Perl6/advent2009-day16.t",
|
||||
"Perl6/basic-open.t", "Perl6/calendar.t", "Perl6/for.t", "Perl6/hash.t",
|
||||
"Perl6/listquote-whitespace.t"]
|
||||
})
|
||||
end
|
||||
|
||||
@@ -173,17 +196,4 @@ class TestHeuristcs < Minitest::Test
|
||||
"XML" => all_fixtures("XML", "*.ts")
|
||||
})
|
||||
end
|
||||
|
||||
def test_ch_by_heuristics
|
||||
assert_heuristics({
|
||||
"xBase" => all_fixtures("xBase", ".ch")
|
||||
})
|
||||
end
|
||||
|
||||
def test_r_by_heuristics
|
||||
assert_heuristics({
|
||||
"R" => all_fixtures("R", "*.r") + all_fixtures("R", "*.R"),
|
||||
"Rebol" => all_fixtures("Rebol", "*.r")
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user