mirror of
https://github.com/KevinMidboe/linguist.git
synced 2025-10-29 17:50:22 +00:00
Compare commits
350 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0c071990cb | ||
|
|
77dfb19a50 | ||
|
|
49254f1f74 | ||
|
|
9dd952c175 | ||
|
|
0b9897db1f | ||
|
|
470bd34349 | ||
|
|
65087dd7b8 | ||
|
|
89c5361f25 | ||
|
|
f82cc26e4f | ||
|
|
1b2f574af2 | ||
|
|
ca4bc6110f | ||
|
|
a944769d61 | ||
|
|
98a31515ef | ||
|
|
3e665099ac | ||
|
|
5400b534e4 | ||
|
|
6f2d4dc195 | ||
|
|
4f7fac3ba1 | ||
|
|
fd6569830a | ||
|
|
5d4cad6394 | ||
|
|
b790a49282 | ||
|
|
537b83c759 | ||
|
|
c0b9e2c3f4 | ||
|
|
f488b9b9f7 | ||
|
|
7060b116f4 | ||
|
|
0f4cf11294 | ||
|
|
048496723b | ||
|
|
70068f74f1 | ||
|
|
6f197bacc7 | ||
|
|
24a89d2d75 | ||
|
|
1ac16cbec7 | ||
|
|
8144438f39 | ||
|
|
f141abbc73 | ||
|
|
5329b96793 | ||
|
|
7a70931066 | ||
|
|
9a070d7bb3 | ||
|
|
189b2d684b | ||
|
|
71dfac26fe | ||
|
|
efdc790374 | ||
|
|
a3613dc438 | ||
|
|
97afedd861 | ||
|
|
00a436f175 | ||
|
|
aa2e3000cd | ||
|
|
10e0fa4360 | ||
|
|
0473af368f | ||
|
|
b82f563c38 | ||
|
|
344297895b | ||
|
|
d3c525645b | ||
|
|
59d02e5138 | ||
|
|
8522dc1d33 | ||
|
|
53d238f310 | ||
|
|
92ed2d189e | ||
|
|
7133c3b11a | ||
|
|
d72114083b | ||
|
|
683925fcd5 | ||
|
|
1c8bf32d35 | ||
|
|
28913833f4 | ||
|
|
c6752be546 | ||
|
|
395e474cad | ||
|
|
16ea189aa6 | ||
|
|
ac81fc5da9 | ||
|
|
58191c10b3 | ||
|
|
c2ca23d580 | ||
|
|
b5dfb40c7d | ||
|
|
7e647fd915 | ||
|
|
5b9f9bc0e6 | ||
|
|
b1c057fa30 | ||
|
|
ca348dd373 | ||
|
|
b802045c5c | ||
|
|
3c244a9501 | ||
|
|
832a7b9b06 | ||
|
|
e7d856345f | ||
|
|
ac559effaa | ||
|
|
95d0daba80 | ||
|
|
a0ad1523a1 | ||
|
|
06c049b8c0 | ||
|
|
24c7380765 | ||
|
|
73ef1bf156 | ||
|
|
c8b30a62f9 | ||
|
|
48dfdd2dfe | ||
|
|
68727f724a | ||
|
|
f46e053633 | ||
|
|
d2e739ba8c | ||
|
|
37174e1d2c | ||
|
|
cdb5206def | ||
|
|
d636eaf1e3 | ||
|
|
49f3eb1286 | ||
|
|
8ab94a8643 | ||
|
|
f72c337c5b | ||
|
|
d22321de07 | ||
|
|
473e5db51f | ||
|
|
8b9fc4683a | ||
|
|
3b4415cc3c | ||
|
|
2afce1754a | ||
|
|
f232b93214 | ||
|
|
db64f192fa | ||
|
|
ca96ecdc55 | ||
|
|
2a06d1aa19 | ||
|
|
b2fa2a1f46 | ||
|
|
6839516b5c | ||
|
|
7247c80fac | ||
|
|
01d05d1d4e | ||
|
|
8db4cc482e | ||
|
|
074f17ed98 | ||
|
|
68b553ea55 | ||
|
|
fd5da9cb15 | ||
|
|
d081f687b0 | ||
|
|
34abe5b983 | ||
|
|
5765f1faf1 | ||
|
|
550b67215c | ||
|
|
d7b2826113 | ||
|
|
b4f94c7c25 | ||
|
|
d24677adbb | ||
|
|
04f981eeac | ||
|
|
b53d84d8ff | ||
|
|
fea8bb21a0 | ||
|
|
996ed8a8b1 | ||
|
|
daef164163 | ||
|
|
61a5cab1f2 | ||
|
|
2fddaaf3d7 | ||
|
|
741d246581 | ||
|
|
1b23e81541 | ||
|
|
362d300cb0 | ||
|
|
7b185cc2f3 | ||
|
|
932726863f | ||
|
|
a35a3e98ce | ||
|
|
7c404e72d2 | ||
|
|
8736e2305d | ||
|
|
a13664698f | ||
|
|
6e934067a8 | ||
|
|
3040642f97 | ||
|
|
dd7e44f957 | ||
|
|
6b56a243e7 | ||
|
|
5d08605aef | ||
|
|
bf705cbaf2 | ||
|
|
fe827896e0 | ||
|
|
317219e479 | ||
|
|
64e7df7596 | ||
|
|
b3ff84872b | ||
|
|
5d176a781c | ||
|
|
e0c97f97ba | ||
|
|
83f358976e | ||
|
|
9ee6153891 | ||
|
|
825e640061 | ||
|
|
e5ae213839 | ||
|
|
74e034c689 | ||
|
|
a55a60a161 | ||
|
|
9d865ec018 | ||
|
|
8b01e3dead | ||
|
|
9e3cc01715 | ||
|
|
0f204767a9 | ||
|
|
11e3251efd | ||
|
|
1f1416a5f7 | ||
|
|
b3786f3825 | ||
|
|
09c2eee91e | ||
|
|
dc78b14902 | ||
|
|
500ce0959a | ||
|
|
aa0c9e3572 | ||
|
|
e6de75d48a | ||
|
|
a5ad0a34f8 | ||
|
|
6e609cc4e3 | ||
|
|
27727a927f | ||
|
|
5ff580df0a | ||
|
|
b34acac722 | ||
|
|
37840856ed | ||
|
|
acfad4371f | ||
|
|
ae42cc0307 | ||
|
|
d06860df37 | ||
|
|
7d5d162f6b | ||
|
|
d39a75b68b | ||
|
|
d72f7311f6 | ||
|
|
3f81b7c179 | ||
|
|
33d1255a5a | ||
|
|
0ebea6a0ff | ||
|
|
c28da3a4a9 | ||
|
|
538f479b60 | ||
|
|
88cc73fa49 | ||
|
|
d6d368a65d | ||
|
|
c5be9cc3e9 | ||
|
|
c462c2bd31 | ||
|
|
3426165621 | ||
|
|
98b99e38bb | ||
|
|
d8e3bec499 | ||
|
|
7c759d4d29 | ||
|
|
41d438b47e | ||
|
|
41911d6921 | ||
|
|
dca18d77cb | ||
|
|
040af5dad2 | ||
|
|
01bb6c37ab | ||
|
|
c624d68628 | ||
|
|
4867c49bd9 | ||
|
|
a354eddf4b | ||
|
|
9b78c533a5 | ||
|
|
090ea576b9 | ||
|
|
6a2d33a4b3 | ||
|
|
b54a9c7412 | ||
|
|
2c62da7834 | ||
|
|
0145a0adb2 | ||
|
|
473282d64c | ||
|
|
c2c068e9db | ||
|
|
13d1f662d1 | ||
|
|
bdd57f58a0 | ||
|
|
b1bcabd6e6 | ||
|
|
e128c3fa82 | ||
|
|
efac9fe750 | ||
|
|
2b8545a8fa | ||
|
|
b275b5d728 | ||
|
|
1f46cfafa7 | ||
|
|
b1dcdf3418 | ||
|
|
4bfd65deb8 | ||
|
|
213cf322f5 | ||
|
|
61102812a0 | ||
|
|
580cfce7fb | ||
|
|
f1383d7a45 | ||
|
|
e4ce5bfe39 | ||
|
|
6ed64f25a2 | ||
|
|
114a331106 | ||
|
|
9aa24a216a | ||
|
|
13702451ab | ||
|
|
f0242f6f97 | ||
|
|
9775820398 | ||
|
|
7daf26bcd0 | ||
|
|
231f705098 | ||
|
|
893ab8fd8d | ||
|
|
5afdd2c533 | ||
|
|
e4f5c0066a | ||
|
|
a167f852dd | ||
|
|
b428bce126 | ||
|
|
e62d0e19a5 | ||
|
|
9b8bf9068f | ||
|
|
6e05edc350 | ||
|
|
dd8eaf2893 | ||
|
|
ecc750f445 | ||
|
|
cf5268a7d4 | ||
|
|
e24efad5ff | ||
|
|
58a34cdb7d | ||
|
|
b1c6b330e9 | ||
|
|
7c3e265033 | ||
|
|
13695a716c | ||
|
|
c9e43804d6 | ||
|
|
1535e3553e | ||
|
|
0ac05bbbeb | ||
|
|
d3f979d640 | ||
|
|
0e9ded45dc | ||
|
|
a2ca886510 | ||
|
|
25a1af3775 | ||
|
|
0d8e0a2970 | ||
|
|
c0fff6c8a8 | ||
|
|
e6b4428614 | ||
|
|
4e6e69833d | ||
|
|
1d9faff4c6 | ||
|
|
7025cbe760 | ||
|
|
e922b7c2ca | ||
|
|
96518d2d0f | ||
|
|
1241b20ba1 | ||
|
|
f03f5c1628 | ||
|
|
cb550a3662 | ||
|
|
d1f90d61c5 | ||
|
|
16e65fe189 | ||
|
|
62a0faa729 | ||
|
|
fbb3ab2292 | ||
|
|
b3b75e5ef8 | ||
|
|
8b36210db5 | ||
|
|
a74f3b3e46 | ||
|
|
e214a52de5 | ||
|
|
0624a9395c | ||
|
|
b2e7f7ffa6 | ||
|
|
b312b39a10 | ||
|
|
80e2d112b2 | ||
|
|
519b169df0 | ||
|
|
5c2cfbc334 | ||
|
|
7d91e4959a | ||
|
|
0c5aa2a7eb | ||
|
|
0d7a264981 | ||
|
|
52ff2d2e74 | ||
|
|
8a7ceaa845 | ||
|
|
fd9ce2d1cf | ||
|
|
2c2b37bec3 | ||
|
|
c777f2d388 | ||
|
|
eca10056a8 | ||
|
|
c7bab11ebe | ||
|
|
6995fc28b6 | ||
|
|
102f14d0e9 | ||
|
|
aac168402b | ||
|
|
152d49513f | ||
|
|
d5564c808d | ||
|
|
82410e07b2 | ||
|
|
94d90b30b5 | ||
|
|
06997f0da2 | ||
|
|
55aafa416d | ||
|
|
6226a46988 | ||
|
|
8d216f0c43 | ||
|
|
7f5bb25542 | ||
|
|
5fcdf6adc2 | ||
|
|
6a565a849b | ||
|
|
66fc67e34c | ||
|
|
7cf140940e | ||
|
|
60e90bab23 | ||
|
|
4f58258186 | ||
|
|
03e2904ebf | ||
|
|
bea90b256e | ||
|
|
8eb37ba956 | ||
|
|
8d20c1fb59 | ||
|
|
9a1abf0c49 | ||
|
|
5aae7a4000 | ||
|
|
d9509a1750 | ||
|
|
978c448fb8 | ||
|
|
997c0fca10 | ||
|
|
3ae6e68492 | ||
|
|
851c93a1f7 | ||
|
|
a5f7355e16 | ||
|
|
18ffdbaa65 | ||
|
|
c089222bc6 | ||
|
|
37f9535d27 | ||
|
|
4650368bc2 | ||
|
|
88b14ed455 | ||
|
|
54a2a47bc0 | ||
|
|
ffcc970140 | ||
|
|
7a811e39e0 | ||
|
|
11f158cbb3 | ||
|
|
5d5550c48b | ||
|
|
fd570d906a | ||
|
|
deab0662f9 | ||
|
|
7238f50a6b | ||
|
|
499fcd1f3f | ||
|
|
dc0ddc82d6 | ||
|
|
436fc34cb9 | ||
|
|
f072cd96e3 | ||
|
|
3441a001c7 | ||
|
|
bc747844ea | ||
|
|
a887f58bcc | ||
|
|
f42afef6e0 | ||
|
|
18eaf22cb9 | ||
|
|
d94f427e12 | ||
|
|
b94eb42db6 | ||
|
|
d2297f5516 | ||
|
|
ef6f58b828 | ||
|
|
eb0bf16cce | ||
|
|
5a646384f6 | ||
|
|
8917f1a91a | ||
|
|
bc8d65e7d3 | ||
|
|
3180c5d554 | ||
|
|
03369b8a6c | ||
|
|
3b2ddb1a18 | ||
|
|
1e20b12241 | ||
|
|
81c41df15c | ||
|
|
8b736189e0 | ||
|
|
188d2367df | ||
|
|
5aeac500da | ||
|
|
5730ab28ab | ||
|
|
1c56b03a28 |
67
.gitmodules
vendored
67
.gitmodules
vendored
@@ -25,9 +25,6 @@
|
||||
[submodule "vendor/grammars/Sublime-REBOL"]
|
||||
path = vendor/grammars/Sublime-REBOL
|
||||
url = https://github.com/Oldes/Sublime-REBOL
|
||||
[submodule "vendor/grammars/Sublime-Inform"]
|
||||
path = vendor/grammars/Sublime-Inform
|
||||
url = https://github.com/PogiNate/Sublime-Inform
|
||||
[submodule "vendor/grammars/autoitv3-tmbundle"]
|
||||
path = vendor/grammars/autoitv3-tmbundle
|
||||
url = https://github.com/Red-Nova-Technologies/autoitv3-tmbundle
|
||||
@@ -85,6 +82,9 @@
|
||||
[submodule "vendor/grammars/language-shellscript"]
|
||||
path = vendor/grammars/language-shellscript
|
||||
url = https://github.com/atom/language-shellscript
|
||||
[submodule "vendor/grammars/language-supercollider"]
|
||||
path = vendor/grammars/language-supercollider
|
||||
url = https://github.com/supercollider/language-supercollider
|
||||
[submodule "vendor/grammars/language-yaml"]
|
||||
path = vendor/grammars/language-yaml
|
||||
url = https://github.com/atom/language-yaml
|
||||
@@ -169,9 +169,6 @@
|
||||
[submodule "vendor/grammars/sublime-idris"]
|
||||
path = vendor/grammars/sublime-idris
|
||||
url = https://github.com/laughedelic/sublime-idris
|
||||
[submodule "vendor/grammars/sublime-better-typescript"]
|
||||
path = vendor/grammars/sublime-better-typescript
|
||||
url = https://github.com/lavrton/sublime-better-typescript
|
||||
[submodule "vendor/grammars/moonscript-tmbundle"]
|
||||
path = vendor/grammars/moonscript-tmbundle
|
||||
url = https://github.com/leafo/moonscript-tmbundle
|
||||
@@ -256,9 +253,6 @@
|
||||
[submodule "vendor/grammars/SublimeXtend"]
|
||||
path = vendor/grammars/SublimeXtend
|
||||
url = https://github.com/staltz/SublimeXtend
|
||||
[submodule "vendor/grammars/Stata.tmbundle"]
|
||||
path = vendor/grammars/Stata.tmbundle
|
||||
url = https://github.com/statatmbundle/Stata.tmbundle
|
||||
[submodule "vendor/grammars/Vala-TMBundle"]
|
||||
path = vendor/grammars/Vala-TMBundle
|
||||
url = https://github.com/technosophos/Vala-TMBundle
|
||||
@@ -397,9 +391,6 @@
|
||||
[submodule "vendor/grammars/processing.tmbundle"]
|
||||
path = vendor/grammars/processing.tmbundle
|
||||
url = https://github.com/textmate/processing.tmbundle
|
||||
[submodule "vendor/grammars/prolog.tmbundle"]
|
||||
path = vendor/grammars/prolog.tmbundle
|
||||
url = https://github.com/textmate/prolog.tmbundle
|
||||
[submodule "vendor/grammars/python-django.tmbundle"]
|
||||
path = vendor/grammars/python-django.tmbundle
|
||||
url = https://github.com/textmate/python-django.tmbundle
|
||||
@@ -655,7 +646,7 @@
|
||||
url = https://github.com/SRI-CSL/SMT.tmbundle.git
|
||||
[submodule "vendor/grammars/language-crystal"]
|
||||
path = vendor/grammars/language-crystal
|
||||
url = https://github.com/k2b6s9j/language-crystal
|
||||
url = https://github.com/atom-crystal/language-crystal
|
||||
[submodule "vendor/grammars/language-xbase"]
|
||||
path = vendor/grammars/language-xbase
|
||||
url = https://github.com/hernad/atom-language-harbour
|
||||
@@ -664,7 +655,55 @@
|
||||
url = https://github.com/rpavlick/language-ncl.git
|
||||
[submodule "vendor/grammars/atom-language-purescript"]
|
||||
path = vendor/grammars/atom-language-purescript
|
||||
url = https://github.com/freebroccolo/atom-language-purescript
|
||||
url = https://github.com/purescript-contrib/atom-language-purescript
|
||||
[submodule "vendor/grammars/vue-syntax-highlight"]
|
||||
path = vendor/grammars/vue-syntax-highlight
|
||||
url = https://github.com/vuejs/vue-syntax-highlight
|
||||
[submodule "vendor/grammars/st2-zonefile"]
|
||||
path = vendor/grammars/st2-zonefile
|
||||
url = https://github.com/sixty4k/st2-zonefile
|
||||
[submodule "vendor/grammars/sublimeprolog"]
|
||||
path = vendor/grammars/sublimeprolog
|
||||
url = https://github.com/alnkpa/sublimeprolog
|
||||
[submodule "vendor/grammars/sublime-aspectj"]
|
||||
path = vendor/grammars/sublime-aspectj
|
||||
url = https://github.com/pchaigno/sublime-aspectj
|
||||
[submodule "vendor/grammars/sublime-typescript"]
|
||||
path = vendor/grammars/sublime-typescript
|
||||
url = https://github.com/Microsoft/TypeScript-Sublime-Plugin
|
||||
[submodule "vendor/grammars/sublime-pony"]
|
||||
path = vendor/grammars/sublime-pony
|
||||
url = https://github.com/CausalityLtd/sublime-pony
|
||||
[submodule "vendor/grammars/X10"]
|
||||
path = vendor/grammars/X10
|
||||
url = git@github.com:x10-lang/x10-highlighting.git
|
||||
[submodule "vendor/grammars/language-babel"]
|
||||
path = vendor/grammars/language-babel
|
||||
url = https://github.com/gandm/language-babel
|
||||
[submodule "vendor/grammars/UrWeb-Language-Definition"]
|
||||
path = vendor/grammars/UrWeb-Language-Definition
|
||||
url = https://github.com/gwalborn/UrWeb-Language-Definition.git
|
||||
[submodule "vendor/grammars/Stata.tmbundle"]
|
||||
path = vendor/grammars/Stata.tmbundle
|
||||
url = https://github.com/pschumm/Stata.tmbundle
|
||||
[submodule "vendor/grammars/FreeMarker.tmbundle"]
|
||||
path = vendor/grammars/FreeMarker.tmbundle
|
||||
url = https://github.com/freemarker/FreeMarker.tmbundle
|
||||
[submodule "vendor/grammars/MagicPython"]
|
||||
path = vendor/grammars/MagicPython
|
||||
url = git@github.com:MagicStack/MagicPython.git
|
||||
[submodule "vendor/grammars/language-click"]
|
||||
path = vendor/grammars/language-click
|
||||
url = https://github.com/stenverbois/language-click.git
|
||||
[submodule "vendor/grammars/language-maxscript"]
|
||||
path = vendor/grammars/language-maxscript
|
||||
url = https://github.com/Alhadis/language-maxscript
|
||||
[submodule "vendor/grammars/language-renpy"]
|
||||
path = vendor/grammars/language-renpy
|
||||
url = https://github.com/williamd1k0/language-renpy.git
|
||||
[submodule "vendor/grammars/language-inform7"]
|
||||
path = vendor/grammars/language-inform7
|
||||
url = https://github.com/erkyrath/language-inform7
|
||||
[submodule "vendor/grammars/atom-language-stan"]
|
||||
path = vendor/grammars/atom-language-stan
|
||||
url = git@github.com:jrnold/atom-language-stan.git
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
language: ruby
|
||||
sudo: false
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- libicu-dev
|
||||
- libicu48
|
||||
before_install: script/travis/before_install
|
||||
rvm:
|
||||
- 1.9.3
|
||||
- 2.0.0
|
||||
- 2.1
|
||||
- 2.2
|
||||
|
||||
@@ -12,7 +12,7 @@ We try only to add new extensions once they have some usage on GitHub. In most c
|
||||
|
||||
To add support for a new extension:
|
||||
|
||||
0. Add your extension to the language entry in [`languages.yml`][languages].
|
||||
0. Add your extension to the language entry in [`languages.yml`][languages], keeping the extensions in alphabetical order.
|
||||
0. Add at least one sample for your extension to the [samples directory][samples] in the correct subdirectory.
|
||||
0. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage.
|
||||
|
||||
|
||||
329
HACKING.rst.txt
Normal file
329
HACKING.rst.txt
Normal file
@@ -0,0 +1,329 @@
|
||||
Contributing to SciPy
|
||||
=====================
|
||||
|
||||
This document aims to give an overview of how to contribute to SciPy. It
|
||||
tries to answer commonly asked questions, and provide some insight into how the
|
||||
community process works in practice. Readers who are familiar with the SciPy
|
||||
community and are experienced Python coders may want to jump straight to the
|
||||
`git workflow`_ documentation.
|
||||
|
||||
|
||||
Contributing new code
|
||||
---------------------
|
||||
|
||||
If you have been working with the scientific Python toolstack for a while, you
|
||||
probably have some code lying around of which you think "this could be useful
|
||||
for others too". Perhaps it's a good idea then to contribute it to SciPy or
|
||||
another open source project. The first question to ask is then, where does
|
||||
this code belong? That question is hard to answer here, so we start with a
|
||||
more specific one: *what code is suitable for putting into SciPy?*
|
||||
Almost all of the new code added to scipy has in common that it's potentially
|
||||
useful in multiple scientific domains and it fits in the scope of existing
|
||||
scipy submodules. In principle new submodules can be added too, but this is
|
||||
far less common. For code that is specific to a single application, there may
|
||||
be an existing project that can use the code. Some scikits (`scikit-learn`_,
|
||||
`scikits-image`_, `statsmodels`_, etc.) are good examples here; they have a
|
||||
narrower focus and because of that more domain-specific code than SciPy.
|
||||
|
||||
Now if you have code that you would like to see included in SciPy, how do you
|
||||
go about it? After checking that your code can be distributed in SciPy under a
|
||||
compatible license (see FAQ for details), the first step is to discuss on the
|
||||
scipy-dev mailing list. All new features, as well as changes to existing code,
|
||||
are discussed and decided on there. You can, and probably should, already
|
||||
start this discussion before your code is finished.
|
||||
|
||||
Assuming the outcome of the discussion on the mailing list is positive and you
|
||||
have a function or piece of code that does what you need it to do, what next?
|
||||
Before code is added to SciPy, it at least has to have good documentation, unit
|
||||
tests and correct code style.
|
||||
|
||||
1. Unit tests
|
||||
In principle you should aim to create unit tests that exercise all the code
|
||||
that you are adding. This gives some degree of confidence that your code
|
||||
runs correctly, also on Python versions and hardware or OSes that you don't
|
||||
have available yourself. An extensive description of how to write unit
|
||||
tests is given in the NumPy `testing guidelines`_.
|
||||
|
||||
2. Documentation
|
||||
Clear and complete documentation is essential in order for users to be able
|
||||
to find and understand the code. Documentation for individual functions
|
||||
and classes -- which includes at least a basic description, type and
|
||||
meaning of all parameters and returns values, and usage examples in
|
||||
`doctest`_ format -- is put in docstrings. Those docstrings can be read
|
||||
within the interpreter, and are compiled into a reference guide in html and
|
||||
pdf format. Higher-level documentation for key (areas of) functionality is
|
||||
provided in tutorial format and/or in module docstrings. A guide on how to
|
||||
write documentation is given in `how to document`_.
|
||||
|
||||
3. Code style
|
||||
Uniformity of style in which code is written is important to others trying
|
||||
to understand the code. SciPy follows the standard Python guidelines for
|
||||
code style, `PEP8`_. In order to check that your code conforms to PEP8,
|
||||
you can use the `pep8 package`_ style checker. Most IDEs and text editors
|
||||
have settings that can help you follow PEP8, for example by translating
|
||||
tabs by four spaces. Using `pyflakes`_ to check your code is also a good
|
||||
idea.
|
||||
|
||||
At the end of this document a checklist is given that may help to check if your
|
||||
code fulfills all requirements for inclusion in SciPy.
|
||||
|
||||
Another question you may have is: *where exactly do I put my code*? To answer
|
||||
this, it is useful to understand how the SciPy public API (application
|
||||
programming interface) is defined. For most modules the API is two levels
|
||||
deep, which means your new function should appear as
|
||||
``scipy.submodule.my_new_func``. ``my_new_func`` can be put in an existing or
|
||||
new file under ``/scipy/<submodule>/``, its name is added to the ``__all__``
|
||||
dict in that file (which lists all public functions in the file), and those
|
||||
public functions are then imported in ``/scipy/<submodule>/__init__.py``. Any
|
||||
private functions/classes should have a leading underscore (``_``) in their
|
||||
name. A more detailed description of what the public API of SciPy is, is given
|
||||
in `SciPy API`_.
|
||||
|
||||
Once you think your code is ready for inclusion in SciPy, you can send a pull
|
||||
request (PR) on Github. We won't go into the details of how to work with git
|
||||
here, this is described well in the `git workflow`_ section of the NumPy
|
||||
documentation and in the Github help pages. When you send the PR for a new
|
||||
feature, be sure to also mention this on the scipy-dev mailing list. This can
|
||||
prompt interested people to help review your PR. Assuming that you already got
|
||||
positive feedback before on the general idea of your code/feature, the purpose
|
||||
of the code review is to ensure that the code is correct, efficient and meets
|
||||
the requirements outlined above. In many cases the code review happens
|
||||
relatively quickly, but it's possible that it stalls. If you have addressed
|
||||
all feedback already given, it's perfectly fine to ask on the mailing list
|
||||
again for review (after a reasonable amount of time, say a couple of weeks, has
|
||||
passed). Once the review is completed, the PR is merged into the "master"
|
||||
branch of SciPy.
|
||||
|
||||
The above describes the requirements and process for adding code to SciPy. It
|
||||
doesn't yet answer the question though how decisions are made exactly. The
|
||||
basic answer is: decisions are made by consensus, by everyone who chooses to
|
||||
participate in the discussion on the mailing list. This includes developers,
|
||||
other users and yourself. Aiming for consensus in the discussion is important
|
||||
-- SciPy is a project by and for the scientific Python community. In those
|
||||
rare cases that agreement cannot be reached, the `maintainers`_ of the module
|
||||
in question can decide the issue.
|
||||
|
||||
|
||||
Contributing by helping maintain existing code
|
||||
----------------------------------------------
|
||||
|
||||
The previous section talked specifically about adding new functionality to
|
||||
SciPy. A large part of that discussion also applies to maintenance of existing
|
||||
code. Maintenance means fixing bugs, improving code quality or style,
|
||||
documenting existing functionality better, adding missing unit tests, keeping
|
||||
build scripts up-to-date, etc. The SciPy `Trac`_ bug tracker contains all
|
||||
reported bugs, build/documentation issues, etc. Fixing issues described in
|
||||
Trac tickets helps improve the overall quality of SciPy, and is also a good way
|
||||
of getting familiar with the project. You may also want to fix a bug because
|
||||
you ran into it and need the function in question to work correctly.
|
||||
|
||||
The discussion on code style and unit testing above applies equally to bug
|
||||
fixes. It is usually best to start by writing a unit test that shows the
|
||||
problem, i.e. it should pass but doesn't. Once you have that, you can fix the
|
||||
code so that the test does pass. That should be enough to send a PR for this
|
||||
issue. Unlike when adding new code, discussing this on the mailing list may
|
||||
not be necessary - if the old behavior of the code is clearly incorrect, no one
|
||||
will object to having it fixed. It may be necessary to add some warning or
|
||||
deprecation message for the changed behavior. This should be part of the
|
||||
review process.
|
||||
|
||||
|
||||
Other ways to contribute
|
||||
------------------------
|
||||
|
||||
There are many ways to contribute other than contributing code. Participating
|
||||
in discussions on the scipy-user and scipy-dev *mailing lists* is a contribution
|
||||
in itself. The `scipy.org`_ *website* contains a lot of information on the
|
||||
SciPy community and can always use a new pair of hands. A redesign of this
|
||||
website is ongoing, see `scipy.github.com`_. The redesigned website is a
|
||||
static site based on Sphinx, the sources for it are
|
||||
also on Github at `scipy.org-new`_.
|
||||
|
||||
The SciPy *documentation* is constantly being improved by many developers and
|
||||
users. You can contribute by sending a PR on Github that improves the
|
||||
documentation, but there's also a `documentation wiki`_ that is very convenient
|
||||
for making edits to docstrings (and doesn't require git knowledge). Anyone can
|
||||
register a username on that wiki, ask on the scipy-dev mailing list for edit
|
||||
rights and make edits. The documentation there is updated every day with the
|
||||
latest changes in the SciPy master branch, and wiki edits are regularly
|
||||
reviewed and merged into master. Another advantage of the documentation wiki
|
||||
is that you can immediately see how the reStructuredText (reST) of docstrings
|
||||
and other docs is rendered as html, so you can easily catch formatting errors.
|
||||
|
||||
Code that doesn't belong in SciPy itself or in another package but helps users
|
||||
accomplish a certain task is valuable. `SciPy Central`_ is the place to share
|
||||
this type of code (snippets, examples, plotting code, etc.).
|
||||
|
||||
|
||||
Useful links, FAQ, checklist
|
||||
----------------------------
|
||||
|
||||
Checklist before submitting a PR
|
||||
````````````````````````````````
|
||||
|
||||
- Are there unit tests with good code coverage?
|
||||
- Do all public function have docstrings including examples?
|
||||
- Is the code style correct (PEP8, pyflakes)
|
||||
- Is the new functionality tagged with ``.. versionadded:: X.Y.Z`` (with
|
||||
X.Y.Z the version number of the next release - can be found in setup.py)?
|
||||
- Is the new functionality mentioned in the release notes of the next
|
||||
release?
|
||||
- Is the new functionality added to the reference guide?
|
||||
- In case of larger additions, is there a tutorial or more extensive
|
||||
module-level description?
|
||||
- In case compiled code is added, is it integrated correctly via setup.py
|
||||
(and preferably also Bento/Numscons configuration files)?
|
||||
- If you are a first-time contributor, did you add yourself to THANKS.txt?
|
||||
Please note that this is perfectly normal and desirable - the aim is to
|
||||
give every single contributor credit, and if you don't add yourself it's
|
||||
simply extra work for the reviewer (or worse, the reviewer may forget).
|
||||
- Did you check that the code can be distributed under a BSD license?
|
||||
|
||||
|
||||
Useful SciPy documents
|
||||
``````````````````````
|
||||
|
||||
- The `how to document`_ guidelines
|
||||
- NumPy/SciPy `testing guidelines`_
|
||||
- `SciPy API`_
|
||||
- SciPy `maintainers`_
|
||||
- NumPy/SciPy `git workflow`_
|
||||
|
||||
|
||||
FAQ
|
||||
```
|
||||
|
||||
*I based my code on existing Matlab/R/... code I found online, is this OK?*
|
||||
|
||||
It depends. SciPy is distributed under a BSD license, so if the code that you
|
||||
based your code on is also BSD licensed or has a BSD-compatible license (MIT,
|
||||
Apache, ...) then it's OK. Code which is GPL-licensed, has no clear license,
|
||||
requires citation or is free for academic use only can't be included in SciPy.
|
||||
Therefore if you copied existing code with such a license or made a direct
|
||||
translation to Python of it, your code can't be included. See also `license
|
||||
compatibility`_.
|
||||
|
||||
|
||||
*How do I set up SciPy so I can edit files, run the tests and make commits?*
|
||||
|
||||
The simplest method is setting up an in-place build. To create your local git
|
||||
repo and do the in-place build::
|
||||
|
||||
$ git clone https://github.com/scipy/scipy.git scipy
|
||||
$ cd scipy
|
||||
$ python setup.py build_ext -i
|
||||
|
||||
Then you need to either set up a symlink in your site-packages or add this
|
||||
directory to your PYTHONPATH environment variable, so Python can find it. Some
|
||||
IDEs (Spyder for example) have utilities to manage PYTHONPATH. On Linux and OS
|
||||
X, you can for example edit your .bash_login file to automatically add this dir
|
||||
on startup of your terminal. Add the line::
|
||||
|
||||
export PYTHONPATH="$HOME/scipy:${PYTHONPATH}"
|
||||
|
||||
Alternatively, to set up the symlink, use (prefix only necessary if you want to
|
||||
use your local instead of global site-packages dir)::
|
||||
|
||||
$ python setupegg.py develop --prefix=${HOME}
|
||||
|
||||
To test that everything works, start the interpreter (not inside the scipy/
|
||||
source dir) and run the tests::
|
||||
|
||||
$ python
|
||||
>>> import scipy as sp
|
||||
>>> sp.test()
|
||||
|
||||
Now editing a Python source file in SciPy allows you to immediately test and
|
||||
use your changes, by simply restarting the interpreter.
|
||||
|
||||
Note that while the above procedure is the most straightforward way to get
|
||||
started, you may want to look into using Bento or numscons for faster and more
|
||||
flexible building, or virtualenv to maintain development environments for
|
||||
multiple Python versions.
|
||||
|
||||
|
||||
*How do I set up a development version of SciPy in parallel to a released
|
||||
version that I use to do my job/research?*
|
||||
|
||||
One simple way to achieve this is to install the released version in
|
||||
site-packages, by using a binary installer or pip for example, and set up the
|
||||
development version with an in-place build in a virtualenv. First install
|
||||
`virtualenv`_ and `virtualenvwrapper`_, then create your virtualenv (named
|
||||
scipy-dev here) with::
|
||||
|
||||
$ mkvirtualenv scipy-dev
|
||||
|
||||
Now, whenever you want to switch to the virtual environment, you can use the
|
||||
command ``workon scipy-dev``, while the command ``deactivate`` exits from the
|
||||
virtual environment and brings back your previous shell. With scipy-dev
|
||||
activated, follow the in-place build with the symlink install above to actually
|
||||
install your development version of SciPy.
|
||||
|
||||
|
||||
*Can I use a programming language other than Python to speed up my code?*
|
||||
|
||||
Yes. The languages used in SciPy are Python, Cython, C, C++ and Fortran. All
|
||||
of these have their pros and cons. If Python really doesn't offer enough
|
||||
performance, one of those languages can be used. Important concerns when
|
||||
using compiled languages are maintainability and portability. For
|
||||
maintainability, Cython is clearly preferred over C/C++/Fortran. Cython and C
|
||||
are more portable than C++/Fortran. A lot of the existing C and Fortran code
|
||||
in SciPy is older, battle-tested code that was only wrapped in (but not
|
||||
specifically written for) Python/SciPy. Therefore the basic advice is: use
|
||||
Cython. If there's specific reasons why C/C++/Fortran should be preferred,
|
||||
please discuss those reasons first.
|
||||
|
||||
|
||||
*There's overlap between Trac and Github, which do I use for what?*
|
||||
|
||||
Trac_ is the bug tracker, Github_ the code repository. Before the SciPy code
|
||||
repository moved to Github, the preferred way to contribute code was to create
|
||||
a patch and attach it to a Trac ticket. The overhead of this approach is much
|
||||
larger than sending a PR on Github, so please don't do this anymore. Use Trac
|
||||
for bug reports, Github for patches.
|
||||
|
||||
|
||||
.. _scikit-learn: http://scikit-learn.org
|
||||
|
||||
.. _scikits-image: http://scikits-image.org/
|
||||
|
||||
.. _statsmodels: http://statsmodels.sourceforge.net/
|
||||
|
||||
.. _testing guidelines: https://github.com/numpy/numpy/blob/master/doc/TESTS.rst.txt
|
||||
|
||||
.. _how to document: https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt
|
||||
|
||||
.. _PEP8: http://www.python.org/dev/peps/pep-0008/
|
||||
|
||||
.. _pep8 package: http://pypi.python.org/pypi/pep8
|
||||
|
||||
.. _pyflakes: http://pypi.python.org/pypi/pyflakes
|
||||
|
||||
.. _SciPy API: http://docs.scipy.org/doc/scipy/reference/api.html
|
||||
|
||||
.. _git workflow: http://docs.scipy.org/doc/numpy/dev/gitwash/index.html
|
||||
|
||||
.. _maintainers: https://github.com/scipy/scipy/blob/master/doc/MAINTAINERS.rst.txt
|
||||
|
||||
.. _Trac: http://projects.scipy.org/scipy/timeline
|
||||
|
||||
.. _Github: https://github.com/scipy/scipy
|
||||
|
||||
.. _scipy.org: http://scipy.org/
|
||||
|
||||
.. _scipy.github.com: http://scipy.github.com/
|
||||
|
||||
.. _scipy.org-new: https://github.com/scipy/scipy.org-new
|
||||
|
||||
.. _documentation wiki: http://docs.scipy.org/scipy/Front%20Page/
|
||||
|
||||
.. _SciPy Central: http://scipy-central.org/
|
||||
|
||||
.. _license compatibility: http://www.scipy.org/License_Compatibility
|
||||
|
||||
.. _doctest: http://www.doughellmann.com/PyMOTW/doctest/
|
||||
|
||||
.. _virtualenv: http://www.virtualenv.org/
|
||||
|
||||
.. _virtualenvwrapper: http://www.doughellmann.com/projects/virtualenvwrapper/
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,4 +1,4 @@
|
||||
Copyright (c) 2011-2015 GitHub, Inc.
|
||||
Copyright (c) 2011-2016 GitHub, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
|
||||
10
README.md
10
README.md
@@ -13,11 +13,11 @@ See [Troubleshooting](#troubleshooting) and [`CONTRIBUTING.md`](/CONTRIBUTING.md
|
||||
|
||||

|
||||
|
||||
The Language stats bar is built by aggregating the languages of each file in that repository. If it is reporting a language that you don't expect:
|
||||
The Language stats bar displays languages percentages for the files in the repository. The percentages are calculated based on the bytes of code for each language as reported by the [List Languages](https://developer.github.com/v3/repos/#list-languages) API. If the bar is reporting a language that you don't expect:
|
||||
|
||||
0. Click on the name of the language in the stats bar to see a list of the files that are identified as that language.
|
||||
0. If you see files that you didn't write, consider moving the files into one of the [paths for vendored code](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml), or use the [manual overrides](#overrides) feature to ignore them.
|
||||
0. If the files are being misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you an add, especially links to public repositories, is helpful.
|
||||
0. If you see files that you didn't write, consider moving the files into one of the [paths for vendored code](/lib/linguist/vendor.yml), or use the [manual overrides](#overrides) feature to ignore them.
|
||||
0. If the files are being misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you can add, especially links to public repositories, is helpful.
|
||||
0. If there are no reported issues of this misclassification, [open an issue][new-issue] and include a link to the repository or a sample of the code that is being misclassified.
|
||||
|
||||
## Overrides
|
||||
@@ -33,9 +33,9 @@ $ cat .gitattributes
|
||||
*.rb linguist-language=Java
|
||||
```
|
||||
|
||||
Checking code you didn't write, such as JavaScript libraries, into your git repo is a common practice, but this often inflates your project's language stats and may even cause your project to be labeled as another language. By default, Linguist treats all of the paths defined in [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository. Vendored files are also hidden by default in diffs on github.com.
|
||||
Checking code you didn't write, such as JavaScript libraries, into your git repo is a common practice, but this often inflates your project's language stats and may even cause your project to be labeled as another language. By default, Linguist treats all of the paths defined in [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository.
|
||||
|
||||
Use the `linguist-vendored` attribute to vendor or un-vendor paths. Please note, overriding the vendored (or un-vendored) status of a file only affects the language statistics for the repository and not the behavior in diffs on github.com.
|
||||
Use the `linguist-vendored` attribute to vendor or un-vendor paths.
|
||||
|
||||
```
|
||||
$ cat .gitattributes
|
||||
|
||||
138
bin/git-linguist
Executable file
138
bin/git-linguist
Executable file
@@ -0,0 +1,138 @@
|
||||
#!/usr/bin/env ruby
|
||||
|
||||
require 'linguist'
|
||||
require 'rugged'
|
||||
require 'optparse'
|
||||
require 'json'
|
||||
require 'tmpdir'
|
||||
require 'zlib'
|
||||
|
||||
class GitLinguist
|
||||
def initialize(path, commit_oid, incremental = true)
|
||||
@repo_path = path
|
||||
@commit_oid = commit_oid
|
||||
@incremental = incremental
|
||||
end
|
||||
|
||||
def linguist
|
||||
if @commit_oid.nil?
|
||||
raise "git-linguist must be called with a specific commit OID to perform language computation"
|
||||
end
|
||||
repo = Linguist::Repository.new(rugged, @commit_oid)
|
||||
|
||||
if @incremental && stats = load_language_stats
|
||||
old_commit_oid, old_stats = stats
|
||||
|
||||
# A cache with NULL oid means that we want to froze
|
||||
# these language stats in place and stop computing
|
||||
# them (for performance reasons)
|
||||
return old_stats if old_commit_oid == NULL_OID
|
||||
repo.load_existing_stats(old_commit_oid, old_stats)
|
||||
end
|
||||
|
||||
result = yield repo
|
||||
|
||||
save_language_stats(@commit_oid, repo.cache)
|
||||
result
|
||||
end
|
||||
|
||||
def load_language_stats
|
||||
version, oid, stats = load_cache
|
||||
if version == LANGUAGE_STATS_CACHE_VERSION && oid && stats
|
||||
[oid, stats]
|
||||
end
|
||||
end
|
||||
|
||||
def save_language_stats(oid, stats)
|
||||
cache = [LANGUAGE_STATS_CACHE_VERSION, oid, stats]
|
||||
write_cache(cache)
|
||||
end
|
||||
|
||||
def clear_language_stats
|
||||
File.unlink(cache_file)
|
||||
rescue Errno::ENOENT
|
||||
end
|
||||
|
||||
def disable_language_stats
|
||||
save_language_stats(NULL_OID, {})
|
||||
end
|
||||
|
||||
protected
|
||||
NULL_OID = ("0" * 40).freeze
|
||||
|
||||
LANGUAGE_STATS_CACHE = 'language-stats.cache'
|
||||
LANGUAGE_STATS_CACHE_VERSION = "v3:#{Linguist::VERSION}"
|
||||
|
||||
def rugged
|
||||
@rugged ||= Rugged::Repository.bare(@repo_path)
|
||||
end
|
||||
|
||||
def cache_file
|
||||
File.join(@repo_path, LANGUAGE_STATS_CACHE)
|
||||
end
|
||||
|
||||
def write_cache(object)
|
||||
return unless File.directory? @repo_path
|
||||
|
||||
begin
|
||||
tmp_path = Dir::Tmpname.make_tmpname(cache_file, nil)
|
||||
File.open(tmp_path, "wb") do |f|
|
||||
marshal = Marshal.dump(object)
|
||||
f.write(Zlib::Deflate.deflate(marshal))
|
||||
end
|
||||
|
||||
File.rename(tmp_path, cache_file)
|
||||
rescue => e
|
||||
(File.unlink(tmp_path) rescue nil)
|
||||
raise e
|
||||
end
|
||||
end
|
||||
|
||||
def load_cache
|
||||
marshal = File.open(cache_file, "rb") { |f| Zlib::Inflate.inflate(f.read) }
|
||||
Marshal.load(marshal)
|
||||
rescue SystemCallError, ::Zlib::DataError, ::Zlib::BufError, TypeError
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
def git_linguist(args)
|
||||
incremental = true
|
||||
commit = nil
|
||||
|
||||
parser = OptionParser.new do |opts|
|
||||
opts.banner = "Usage: git-linguist [OPTIONS] stats|breakdown|dump-cache|clear|disable"
|
||||
|
||||
opts.on("-f", "--force", "Force a full rescan") { incremental = false }
|
||||
opts.on("--commit=COMMIT", "Commit to index") { |v| commit = v}
|
||||
end
|
||||
|
||||
parser.parse!(args)
|
||||
|
||||
git_dir = `git rev-parse --git-dir`.strip
|
||||
raise "git-linguist must be ran in a Git repository" unless $?.success?
|
||||
wrapper = GitLinguist.new(git_dir, commit, incremental)
|
||||
|
||||
case args.pop
|
||||
when "stats"
|
||||
wrapper.linguist do |linguist|
|
||||
puts JSON.dump(linguist.languages)
|
||||
end
|
||||
when "breakdown"
|
||||
wrapper.linguist do |linguist|
|
||||
puts JSON.dump(linguist.breakdown_by_file)
|
||||
end
|
||||
when "dump-cache"
|
||||
puts JSON.dump(wrapper.load_language_stats)
|
||||
when "clear"
|
||||
wrapper.clear_language_stats
|
||||
when "disable"
|
||||
wrapper.disable_language_stats
|
||||
else
|
||||
$stderr.print(parser.help)
|
||||
exit 1
|
||||
end
|
||||
end
|
||||
|
||||
git_linguist(ARGV)
|
||||
@@ -10,8 +10,8 @@ Gem::Specification.new do |s|
|
||||
s.homepage = "https://github.com/github/linguist"
|
||||
s.license = "MIT"
|
||||
|
||||
s.files = Dir['lib/**/*'] - ['lib/linguist/grammars.rb']
|
||||
s.executables << 'linguist'
|
||||
s.files = Dir['lib/**/*'] - ['lib/linguist/grammars.rb'] + ['LICENSE']
|
||||
s.executables = ['linguist', 'git-linguist']
|
||||
|
||||
s.add_dependency 'charlock_holmes', '~> 0.7.3'
|
||||
s.add_dependency 'escape_utils', '~> 1.1.0'
|
||||
@@ -24,4 +24,6 @@ Gem::Specification.new do |s|
|
||||
s.add_development_dependency 'rake'
|
||||
s.add_development_dependency 'yajl-ruby'
|
||||
s.add_development_dependency 'color-proximity', '~> 0.2.1'
|
||||
s.add_development_dependency 'licensee', '6.0.0b1'
|
||||
|
||||
end
|
||||
|
||||
52
grammars.yml
52
grammars.yml
@@ -42,6 +42,8 @@ vendor/grammars/Docker.tmbundle:
|
||||
- source.dockerfile
|
||||
vendor/grammars/Elm.tmLanguage:
|
||||
- source.elm
|
||||
vendor/grammars/FreeMarker.tmbundle:
|
||||
- text.html.ftl
|
||||
vendor/grammars/G-Code/:
|
||||
- source.LS
|
||||
- source.MCPOST
|
||||
@@ -67,6 +69,9 @@ vendor/grammars/Lean.tmbundle:
|
||||
- source.lean
|
||||
vendor/grammars/LiveScript.tmbundle:
|
||||
- source.livescript
|
||||
vendor/grammars/MagicPython:
|
||||
- source.python
|
||||
- source.regexp.python
|
||||
vendor/grammars/Modelica/:
|
||||
- source.modelica
|
||||
vendor/grammars/NSIS:
|
||||
@@ -92,7 +97,7 @@ vendor/grammars/Scalate.tmbundle:
|
||||
- text.html.ssp
|
||||
vendor/grammars/Slash.tmbundle:
|
||||
- text.html.slash
|
||||
vendor/grammars/Stata.tmbundle:
|
||||
vendor/grammars/Stata.tmbundle/:
|
||||
- source.mata
|
||||
- source.stata
|
||||
vendor/grammars/Stylus/:
|
||||
@@ -101,8 +106,6 @@ vendor/grammars/Sublime-Coq:
|
||||
- source.coq
|
||||
vendor/grammars/Sublime-HTTP:
|
||||
- source.httpspec
|
||||
vendor/grammars/Sublime-Inform:
|
||||
- source.Inform7
|
||||
vendor/grammars/Sublime-Lasso:
|
||||
- file.lasso
|
||||
vendor/grammars/Sublime-Logos:
|
||||
@@ -140,10 +143,14 @@ vendor/grammars/TXL/:
|
||||
- source.txl
|
||||
vendor/grammars/Textmate-Gosu-Bundle:
|
||||
- source.gosu.2
|
||||
vendor/grammars/UrWeb-Language-Definition:
|
||||
- source.ur
|
||||
vendor/grammars/VBDotNetSyntax:
|
||||
- source.vbnet
|
||||
vendor/grammars/Vala-TMBundle:
|
||||
- source.vala
|
||||
vendor/grammars/X10:
|
||||
- source.x10
|
||||
vendor/grammars/abap.tmbundle:
|
||||
- source.abap
|
||||
vendor/grammars/actionscript3-tmbundle:
|
||||
@@ -176,8 +183,13 @@ vendor/grammars/assembly.tmbundle:
|
||||
- source.x86asm
|
||||
vendor/grammars/atom-fsharp/:
|
||||
- source.fsharp
|
||||
- source.fsharp.fsi
|
||||
- source.fsharp.fsl
|
||||
- source.fsharp.fsx
|
||||
vendor/grammars/atom-language-purescript/:
|
||||
- source.purescript
|
||||
vendor/grammars/atom-language-stan/:
|
||||
- source.stan
|
||||
vendor/grammars/atom-salt:
|
||||
- source.python.salt
|
||||
- source.yaml.salt
|
||||
@@ -295,8 +307,8 @@ vendor/grammars/io.tmbundle:
|
||||
vendor/grammars/ioke-outdated:
|
||||
- source.ioke
|
||||
vendor/grammars/jade-tmbundle:
|
||||
- source.jade
|
||||
- source.pyjade
|
||||
- text.jade
|
||||
vendor/grammars/jasmin-sublime:
|
||||
- source.jasmin
|
||||
vendor/grammars/java.tmbundle:
|
||||
@@ -312,6 +324,11 @@ vendor/grammars/json.tmbundle:
|
||||
- source.json
|
||||
vendor/grammars/kotlin-sublime-package:
|
||||
- source.Kotlin
|
||||
vendor/grammars/language-babel/:
|
||||
- source.js.jsx
|
||||
- source.regexp.babel
|
||||
vendor/grammars/language-click/:
|
||||
- source.click
|
||||
vendor/grammars/language-clojure:
|
||||
- source.clojure
|
||||
vendor/grammars/language-coffee-script:
|
||||
@@ -328,22 +345,29 @@ vendor/grammars/language-gfm:
|
||||
- source.gfm
|
||||
vendor/grammars/language-hy:
|
||||
- source.hy
|
||||
vendor/grammars/language-inform7:
|
||||
- source.inform7
|
||||
vendor/grammars/language-javascript:
|
||||
- source.js
|
||||
- source.js.regexp
|
||||
- source.js.regexp.replacement
|
||||
vendor/grammars/language-jsoniq/:
|
||||
- source.jq
|
||||
- source.xq
|
||||
vendor/grammars/language-maxscript:
|
||||
- source.maxscript
|
||||
vendor/grammars/language-ncl:
|
||||
- source.ncl
|
||||
vendor/grammars/language-python:
|
||||
- source.python
|
||||
- source.regexp.python
|
||||
- text.python.console
|
||||
- text.python.traceback
|
||||
vendor/grammars/language-renpy:
|
||||
- source.renpy
|
||||
vendor/grammars/language-shellscript:
|
||||
- source.shell
|
||||
- text.shell-session
|
||||
vendor/grammars/language-supercollider:
|
||||
- source.supercollider
|
||||
vendor/grammars/language-xbase:
|
||||
- source.harbour
|
||||
vendor/grammars/language-yaml:
|
||||
@@ -431,8 +455,6 @@ vendor/grammars/powershell:
|
||||
- source.powershell
|
||||
vendor/grammars/processing.tmbundle:
|
||||
- source.processing
|
||||
vendor/grammars/prolog.tmbundle:
|
||||
- source.prolog
|
||||
vendor/grammars/protobuf-tmbundle:
|
||||
- source.protobuf
|
||||
vendor/grammars/puppet-textmate-bundle:
|
||||
@@ -470,6 +492,8 @@ vendor/grammars/smalltalk-tmbundle:
|
||||
- source.smalltalk
|
||||
vendor/grammars/sql.tmbundle:
|
||||
- source.sql
|
||||
vendor/grammars/st2-zonefile:
|
||||
- text.zone_file
|
||||
vendor/grammars/standard-ml.tmbundle:
|
||||
- source.cm
|
||||
- source.ml
|
||||
@@ -477,10 +501,10 @@ vendor/grammars/sublime-MuPAD:
|
||||
- source.mupad
|
||||
vendor/grammars/sublime-apl/:
|
||||
- source.apl
|
||||
vendor/grammars/sublime-aspectj/:
|
||||
- source.aspectj
|
||||
vendor/grammars/sublime-befunge:
|
||||
- source.befunge
|
||||
vendor/grammars/sublime-better-typescript:
|
||||
- source.ts
|
||||
vendor/grammars/sublime-bsv:
|
||||
- source.bsv
|
||||
vendor/grammars/sublime-cirru:
|
||||
@@ -504,6 +528,8 @@ vendor/grammars/sublime-nix:
|
||||
vendor/grammars/sublime-opal/:
|
||||
- source.opal
|
||||
- source.opalsysdefs
|
||||
vendor/grammars/sublime-pony:
|
||||
- source.pony
|
||||
vendor/grammars/sublime-robot-plugin:
|
||||
- text.robot
|
||||
vendor/grammars/sublime-rust:
|
||||
@@ -519,6 +545,9 @@ vendor/grammars/sublime-text-ox/:
|
||||
- source.ox
|
||||
vendor/grammars/sublime-text-pig-latin/:
|
||||
- source.pig_latin
|
||||
vendor/grammars/sublime-typescript/:
|
||||
- source.ts
|
||||
- source.tsx
|
||||
vendor/grammars/sublime-varnish:
|
||||
- source.varnish.vcl
|
||||
vendor/grammars/sublime_cobol:
|
||||
@@ -529,6 +558,9 @@ vendor/grammars/sublime_cobol:
|
||||
vendor/grammars/sublime_man_page_support:
|
||||
- source.man
|
||||
- text.groff
|
||||
vendor/grammars/sublimeprolog/:
|
||||
- source.prolog
|
||||
- source.prolog.eclipse
|
||||
vendor/grammars/sublimetext-cuda-cpp:
|
||||
- source.cuda-c++
|
||||
vendor/grammars/swift.tmbundle:
|
||||
|
||||
@@ -13,8 +13,8 @@ class << Linguist
|
||||
def instrument(*args, &bk)
|
||||
if instrumenter
|
||||
instrumenter.instrument(*args, &bk)
|
||||
else
|
||||
yield if block_given?
|
||||
elsif block_given?
|
||||
yield
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
73
lib/linguist/blob.rb
Normal file
73
lib/linguist/blob.rb
Normal file
@@ -0,0 +1,73 @@
|
||||
require 'linguist/blob_helper'
|
||||
|
||||
module Linguist
|
||||
# A Blob is a wrapper around the content of a file to make it quack
|
||||
# like a Grit::Blob. It provides the basic interface: `name`,
|
||||
# `data`, `path` and `size`.
|
||||
class Blob
|
||||
include BlobHelper
|
||||
|
||||
# Public: Initialize a new Blob.
|
||||
#
|
||||
# path - A path String (does not necessarily exists on the file system).
|
||||
# content - Content of the file.
|
||||
#
|
||||
# Returns a Blob.
|
||||
def initialize(path, content)
|
||||
@path = path
|
||||
@content = content
|
||||
end
|
||||
|
||||
# Public: Filename
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# Blob.new("/path/to/linguist/lib/linguist.rb", "").path
|
||||
# # => "/path/to/linguist/lib/linguist.rb"
|
||||
#
|
||||
# Returns a String
|
||||
attr_reader :path
|
||||
|
||||
# Public: File name
|
||||
#
|
||||
# Returns a String
|
||||
def name
|
||||
File.basename(@path)
|
||||
end
|
||||
|
||||
# Public: File contents.
|
||||
#
|
||||
# Returns a String.
|
||||
def data
|
||||
@content
|
||||
end
|
||||
|
||||
# Public: Get byte size
|
||||
#
|
||||
# Returns an Integer.
|
||||
def size
|
||||
@content.bytesize
|
||||
end
|
||||
|
||||
# Public: Get file extension.
|
||||
#
|
||||
# Returns a String.
|
||||
def extension
|
||||
extensions.last || ""
|
||||
end
|
||||
|
||||
# Public: Return an array of the file extensions
|
||||
#
|
||||
# >> Linguist::Blob.new("app/views/things/index.html.erb").extensions
|
||||
# => [".html.erb", ".erb"]
|
||||
#
|
||||
# Returns an Array
|
||||
def extensions
|
||||
basename, *segments = name.downcase.split(".")
|
||||
|
||||
segments.map.with_index do |segment, index|
|
||||
"." + segments[index..-1].join(".")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -13,6 +13,7 @@
|
||||
- (^|/)[Dd]ocumentation/
|
||||
- (^|/)javadoc/
|
||||
- ^man/
|
||||
- ^[Ee]xamples/
|
||||
|
||||
## Documentation files ##
|
||||
|
||||
@@ -21,4 +22,9 @@
|
||||
- (^|/)COPYING(\.|$)
|
||||
- (^|/)INSTALL(\.|$)
|
||||
- (^|/)LICEN[CS]E(\.|$)
|
||||
- (^|/)[Ll]icen[cs]e(\.|$)
|
||||
- (^|/)README(\.|$)
|
||||
- (^|/)[Rr]eadme(\.|$)
|
||||
|
||||
# Samples folders
|
||||
- ^[Ss]amples/
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
require 'linguist/blob_helper'
|
||||
require 'linguist/blob'
|
||||
|
||||
module Linguist
|
||||
# A FileBlob is a wrapper around a File object to make it quack
|
||||
# like a Grit::Blob. It provides the basic interface: `name`,
|
||||
# `data`, `path` and `size`.
|
||||
class FileBlob
|
||||
class FileBlob < Blob
|
||||
include BlobHelper
|
||||
|
||||
# Public: Initialize a new FileBlob from a path
|
||||
@@ -18,20 +19,6 @@ module Linguist
|
||||
@path = base_path ? path.sub("#{base_path}/", '') : path
|
||||
end
|
||||
|
||||
# Public: Filename
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# FileBlob.new("/path/to/linguist/lib/linguist.rb").path
|
||||
# # => "/path/to/linguist/lib/linguist.rb"
|
||||
#
|
||||
# FileBlob.new("/path/to/linguist/lib/linguist.rb",
|
||||
# "/path/to/linguist").path
|
||||
# # => "lib/linguist.rb"
|
||||
#
|
||||
# Returns a String
|
||||
attr_reader :path
|
||||
|
||||
# Public: Read file permissions
|
||||
#
|
||||
# Returns a String like '100644'
|
||||
@@ -39,13 +26,6 @@ module Linguist
|
||||
File.stat(@fullpath).mode.to_s(8)
|
||||
end
|
||||
|
||||
# Public: File name
|
||||
#
|
||||
# Returns a String
|
||||
def name
|
||||
File.basename(@fullpath)
|
||||
end
|
||||
|
||||
# Public: Read file contents.
|
||||
#
|
||||
# Returns a String.
|
||||
@@ -59,26 +39,5 @@ module Linguist
|
||||
def size
|
||||
File.size(@fullpath)
|
||||
end
|
||||
|
||||
# Public: Get file extension.
|
||||
#
|
||||
# Returns a String.
|
||||
def extension
|
||||
extensions.last || ""
|
||||
end
|
||||
|
||||
# Public: Return an array of the file extensions
|
||||
#
|
||||
# >> Linguist::FileBlob.new("app/views/things/index.html.erb").extensions
|
||||
# => [".html.erb", ".erb"]
|
||||
#
|
||||
# Returns an Array
|
||||
def extensions
|
||||
basename, *segments = name.downcase.split(".")
|
||||
|
||||
segments.map.with_index do |segment, index|
|
||||
"." + segments[index..-1].join(".")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -71,7 +71,8 @@ module Linguist
|
||||
generated_jni_header? ||
|
||||
vcr_cassette? ||
|
||||
generated_module? ||
|
||||
generated_unity3d_meta?
|
||||
generated_unity3d_meta? ||
|
||||
generated_racc?
|
||||
end
|
||||
|
||||
# Internal: Is the blob an Xcode file?
|
||||
@@ -241,22 +242,26 @@ module Linguist
|
||||
return lines[0].include?("Code generated by")
|
||||
end
|
||||
|
||||
PROTOBUF_EXTENSIONS = ['.py', '.java', '.h', '.cc', '.cpp']
|
||||
|
||||
# Internal: Is the blob a C++, Java or Python source file generated by the
|
||||
# Protocol Buffer compiler?
|
||||
#
|
||||
# Returns true of false.
|
||||
def generated_protocol_buffer?
|
||||
return false unless ['.py', '.java', '.h', '.cc', '.cpp'].include?(extname)
|
||||
return false unless PROTOBUF_EXTENSIONS.include?(extname)
|
||||
return false unless lines.count > 1
|
||||
|
||||
return lines[0].include?("Generated by the protocol buffer compiler. DO NOT EDIT!")
|
||||
end
|
||||
|
||||
APACHE_THRIFT_EXTENSIONS = ['.rb', '.py', '.go', '.js', '.m', '.java', '.h', '.cc', '.cpp']
|
||||
|
||||
# Internal: Is the blob generated by Apache Thrift compiler?
|
||||
#
|
||||
# Returns true or false
|
||||
def generated_apache_thrift?
|
||||
return false unless ['.rb', '.py', '.go', '.js', '.m', '.java', '.h', '.cc', '.cpp'].include?(extname)
|
||||
return false unless APACHE_THRIFT_EXTENSIONS.include?(extname)
|
||||
return false unless lines.count > 1
|
||||
|
||||
return lines[0].include?("Autogenerated by Thrift Compiler") || lines[1].include?("Autogenerated by Thrift Compiler")
|
||||
@@ -355,5 +360,18 @@ module Linguist
|
||||
return false unless lines.count > 1
|
||||
return lines[0].include?("fileFormatVersion: ")
|
||||
end
|
||||
|
||||
# Internal: Is this a Racc-generated file?
|
||||
#
|
||||
# A Racc-generated file contains:
|
||||
# # This file is automatically generated by Racc x.y.z
|
||||
# on the third line.
|
||||
#
|
||||
# Return true or false
|
||||
def generated_racc?
|
||||
return false unless extname == '.rb'
|
||||
return false unless lines.count > 2
|
||||
return lines[2].start_with?("# This file is automatically generated by Racc")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -33,7 +33,7 @@ module Linguist
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# disambiguate "Perl", "Prolog" do |data|
|
||||
# disambiguate ".pm" do |data|
|
||||
# if data.include?("use strict")
|
||||
# Language["Perl"]
|
||||
# elsif /^[^#]+:-/.match(data)
|
||||
@@ -56,6 +56,7 @@ module Linguist
|
||||
|
||||
# Internal: Check if this heuristic matches the candidate languages.
|
||||
def matches?(filename)
|
||||
filename = filename.downcase
|
||||
@extensions.any? { |ext| filename.end_with?(ext) }
|
||||
end
|
||||
|
||||
@@ -65,7 +66,17 @@ module Linguist
|
||||
end
|
||||
|
||||
# Common heuristics
|
||||
ObjectiveCRegex = /^[ \t]*@(interface|class|protocol|property|end|synchronised|selector|implementation)\b/
|
||||
ObjectiveCRegex = /^\s*(@(interface|class|protocol|property|end|synchronised|selector|implementation)\b|#import\s+.+\.h[">])/
|
||||
|
||||
disambiguate ".asc" do |data|
|
||||
if /^(----[- ]BEGIN|ssh-(rsa|dss)) /.match(data)
|
||||
Language["Public Key"]
|
||||
elsif /^[=-]+(\s|\n)|{{[A-Za-z]/.match(data)
|
||||
Language["AsciiDoc"]
|
||||
elsif /^(\/\/.+|((import|export)\s+)?(function|int|float|char)\s+((room|repeatedly|on|game)_)?([A-Za-z]+[A-Za-z_0-9]+)\s*[;\(])/.match(data)
|
||||
Language["AGS Script"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".bb" do |data|
|
||||
if /^\s*; /.match(data) || data.include?("End Function")
|
||||
@@ -75,67 +86,9 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".cs" do |data|
|
||||
if /![\w\s]+methodsFor: /.match(data)
|
||||
Language["Smalltalk"]
|
||||
elsif /^\s*namespace\s*[\w\.]+\s*{/.match(data) || /^\s*\/\//.match(data)
|
||||
Language["C#"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".h" do |data|
|
||||
if ObjectiveCRegex.match(data)
|
||||
Language["Objective-C"]
|
||||
elsif (/^\s*#\s*include <(cstdint|string|vector|map|list|array|bitset|queue|stack|forward_list|unordered_map|unordered_set|(i|o|io)stream)>/.match(data) ||
|
||||
/^\s*template\s*</.match(data) || /^[ \t]*try/.match(data) || /^[ \t]*catch\s*\(/.match(data) || /^[ \t]*(class|(using[ \t]+)?namespace)\s+\w+/.match(data) || /^[ \t]*(private|public|protected):$/.match(data) || /std::\w+/.match(data))
|
||||
Language["C++"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pl" do |data|
|
||||
if /^(use v6|(my )?class|module)/.match(data)
|
||||
Language["Perl6"]
|
||||
elsif /use strict|use\s+v?5\./.match(data)
|
||||
Language["Perl"]
|
||||
elsif /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pm" do |data|
|
||||
if /^(use v6|(my )?class|module)/.match(data)
|
||||
Language["Perl6"]
|
||||
elsif /use strict|use\s+v?5\./.match(data)
|
||||
Language["Perl"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ecl" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif data.include?(":=")
|
||||
Language["ECL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pro" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif data.include?("last_client=")
|
||||
Language["INI"]
|
||||
elsif data.include?("HEADERS") && data.include?("SOURCES")
|
||||
Language["QMake"]
|
||||
elsif /^\s*function[ \w,]+$/.match(data)
|
||||
Language["IDL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".tst" do |data|
|
||||
if (data.include?("gap> "))
|
||||
Language["GAP"]
|
||||
# Heads up - we don't usually write heuristics like this (with no regex match)
|
||||
else
|
||||
Language["Scilab"]
|
||||
disambiguate ".ch" do |data|
|
||||
if /^\s*#\s*(if|ifdef|ifndef|define|command|xcommand|translate|xtranslate|include|pragma|undef)\b/i.match(data)
|
||||
Language["xBase"]
|
||||
end
|
||||
end
|
||||
|
||||
@@ -149,40 +102,50 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".php" do |data|
|
||||
if data.include?("<?hh")
|
||||
Language["Hack"]
|
||||
elsif /<?[^h]/.match(data)
|
||||
Language["PHP"]
|
||||
disambiguate ".cs" do |data|
|
||||
if /![\w\s]+methodsFor: /.match(data)
|
||||
Language["Smalltalk"]
|
||||
elsif /^\s*namespace\s*[\w\.]+\s*{/.match(data) || /^\s*\/\//.match(data)
|
||||
Language["C#"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".sc" do |data|
|
||||
if /\^(this|super)\./.match(data) || /^\s*(\+|\*)\s*\w+\s*{/.match(data) || /^\s*~\w+\s*=\./.match(data)
|
||||
Language["SuperCollider"]
|
||||
elsif /^\s*import (scala|java)\./.match(data) || /^\s*val\s+\w+\s*=/.match(data) || /^\s*class\b/.match(data)
|
||||
Language["Scala"]
|
||||
disambiguate ".d" do |data|
|
||||
if /^module /.match(data)
|
||||
Language["D"]
|
||||
elsif /^((dtrace:::)?BEGIN|provider |#pragma (D (option|attributes)|ident)\s)/.match(data)
|
||||
Language["DTrace"]
|
||||
elsif /(\/.*:( .* \\)$| : \\$|^ : |: \\$)/.match(data)
|
||||
Language["Makefile"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".asc" do |data|
|
||||
if /^(----[- ]BEGIN|ssh-(rsa|dss)) /.match(data)
|
||||
Language["Public Key"]
|
||||
elsif /^[=-]+(\s|\n)|{{[A-Za-z]/.match(data)
|
||||
Language["AsciiDoc"]
|
||||
elsif /^(\/\/.+|((import|export)\s+)?(function|int|float|char)\s+((room|repeatedly|on|game)_)?([A-Za-z]+[A-Za-z_0-9]+)\s*[;\(])/.match(data)
|
||||
Language["AGS Script"]
|
||||
disambiguate ".ecl" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["ECLiPSe"]
|
||||
elsif data.include?(":=")
|
||||
Language["ECL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".for", ".f" do |data|
|
||||
if /^: /.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^([c*][^a-z]| (subroutine|program)\s|\s*!)/i.match(data)
|
||||
elsif /^([c*][^abd-z]| (subroutine|program|end)\s|\s*!)/i.match(data)
|
||||
Language["FORTRAN"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".fr" do |data|
|
||||
if /^(: |also |new-device|previous )/.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^\s*(import|module|package|data|type) /.match(data)
|
||||
Language["Frege"]
|
||||
else
|
||||
Language["Text"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".fs" do |data|
|
||||
if /^(: |new-device)/.match(data)
|
||||
Language["Forth"]
|
||||
@@ -195,6 +158,47 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".gs" do |data|
|
||||
Language["Gosu"] if /^uses java\./.match(data)
|
||||
end
|
||||
|
||||
disambiguate ".h" do |data|
|
||||
if ObjectiveCRegex.match(data)
|
||||
Language["Objective-C"]
|
||||
elsif (/^\s*#\s*include <(cstdint|string|vector|map|list|array|bitset|queue|stack|forward_list|unordered_map|unordered_set|(i|o|io)stream)>/.match(data) ||
|
||||
/^\s*template\s*</.match(data) || /^[ \t]*try/.match(data) || /^[ \t]*catch\s*\(/.match(data) || /^[ \t]*(class|(using[ \t]+)?namespace)\s+\w+/.match(data) || /^[ \t]*(private|public|protected):$/.match(data) || /std::\w+/.match(data))
|
||||
Language["C++"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".l" do |data|
|
||||
if /\(def(un|macro)\s/.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^(%[%{}]xs|<.*>)/.match(data)
|
||||
Language["Lex"]
|
||||
elsif /^\.[a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
elsif /^\((de|class|rel|code|data|must)\s/.match(data)
|
||||
Language["PicoLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ls" do |data|
|
||||
if /^\s*package\s*[\w\.\/\*\s]*\s*{/.match(data)
|
||||
Language["LoomScript"]
|
||||
else
|
||||
Language["LiveScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".lsp", ".lisp" do |data|
|
||||
if /^\s*\((defun|in-package|defpackage) /i.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^\s*\(define /.match(data)
|
||||
Language["NewLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".m" do |data|
|
||||
if ObjectiveCRegex.match(data)
|
||||
Language["Objective-C"]
|
||||
@@ -213,41 +217,115 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".gs" do |data|
|
||||
Language["Gosu"] if /^uses java\./.match(data)
|
||||
end
|
||||
|
||||
disambiguate ".ls" do |data|
|
||||
if /^\s*package\s*[\w\.\/\*\s]*\s*{/.match(data)
|
||||
Language["LoomScript"]
|
||||
else
|
||||
Language["LiveScript"]
|
||||
disambiguate ".ml" do |data|
|
||||
if /(^\s*module)|let rec |match\s+(\S+\s)+with/.match(data)
|
||||
Language["OCaml"]
|
||||
elsif /=> |case\s+(\S+\s)+of/.match(data)
|
||||
Language["Standard ML"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".lsp", ".lisp" do |data|
|
||||
if /^\s*\((defun|in-package|defpackage) /i.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^\s*\(define /.match(data)
|
||||
disambiguate ".mod" do |data|
|
||||
if data.include?('<!ENTITY ')
|
||||
Language["XML"]
|
||||
elsif /MODULE\s\w+\s*;/i.match(data) || /^\s*END \w+;$/i.match(data)
|
||||
Language["Modula-2"]
|
||||
else
|
||||
[Language["Linux Kernel Module"], Language["AMPL"]]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ms" do |data|
|
||||
if /^[.'][a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
elsif /(?<!\S)\.(include|globa?l)\s/.match(data) || /(?<!\/\*)(\A|\n)\s*\.[A-Za-z]/.match(data.gsub(/"([^\\"]|\\.)*"|'([^\\']|\\.)*'|\\\s*(?:--.*)?\n/, ""))
|
||||
Language["GAS"]
|
||||
else
|
||||
Language["MAXScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".n" do |data|
|
||||
if /^[.']/.match(data)
|
||||
Language["Groff"]
|
||||
elsif /^(module|namespace|using)\s/.match(data)
|
||||
Language["Nemerle"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ncl" do |data|
|
||||
if data.include?("THE_TITLE")
|
||||
Language["Text"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".nl" do |data|
|
||||
if /^(b|g)[0-9]+ /.match(data)
|
||||
Language["NL"]
|
||||
else
|
||||
Language["NewLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ts" do |data|
|
||||
if data.include?("<TS ")
|
||||
Language["XML"]
|
||||
else
|
||||
Language["TypeScript"]
|
||||
disambiguate ".php" do |data|
|
||||
if data.include?("<?hh")
|
||||
Language["Hack"]
|
||||
elsif /<?[^h]/.match(data)
|
||||
Language["PHP"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".fr" do |data|
|
||||
if /^(: |also |new-device|previous )/.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^\s*(import|module|package|data|type) /.match(data)
|
||||
Language["Frege"]
|
||||
else
|
||||
Language["Text"]
|
||||
disambiguate ".pl" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif /use strict|use\s+v?5\./.match(data)
|
||||
Language["Perl"]
|
||||
elsif /^(use v6|(my )?class|module)/.match(data)
|
||||
Language["Perl6"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pm", ".t" do |data|
|
||||
if /use strict|use\s+v?5\./.match(data)
|
||||
Language["Perl"]
|
||||
elsif /^(use v6|(my )?class|module)/.match(data)
|
||||
Language["Perl6"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pro" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif data.include?("last_client=")
|
||||
Language["INI"]
|
||||
elsif data.include?("HEADERS") && data.include?("SOURCES")
|
||||
Language["QMake"]
|
||||
elsif /^\s*function[ \w,]+$/.match(data)
|
||||
Language["IDL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".r" do |data|
|
||||
if /\bRebol\b/i.match(data)
|
||||
Language["Rebol"]
|
||||
elsif data.include?("<-")
|
||||
Language["R"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".rs" do |data|
|
||||
if /^(use |fn |mod |pub |macro_rules|impl|#!?\[)/.match(data)
|
||||
Language["Rust"]
|
||||
elsif /#include|#pragma\s+(rs|version)|__attribute__/.match(data)
|
||||
Language["RenderScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".sc" do |data|
|
||||
if /\^(this|super)\./.match(data) || /^\s*(\+|\*)\s*\w+\s*{/.match(data) || /^\s*~\w+\s*=\./.match(data)
|
||||
Language["SuperCollider"]
|
||||
elsif /^\s*import (scala|java)\./.match(data) || /^\s*val\s+\w+\s*=/.match(data) || /^\s*class\b/.match(data)
|
||||
Language["Scala"]
|
||||
end
|
||||
end
|
||||
|
||||
@@ -267,95 +345,20 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".d" do |data|
|
||||
if /^module /.match(data)
|
||||
Language["D"]
|
||||
elsif /^((dtrace:::)?BEGIN|provider |#pragma (D (option|attributes)|ident)\s)/.match(data)
|
||||
Language["DTrace"]
|
||||
elsif /(\/.*:( .* \\)$| : \\$|^ : |: \\$)/.match(data)
|
||||
Language["Makefile"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ml" do |data|
|
||||
if /(^\s*module)|let rec |match\s+(\S+\s)+with/.match(data)
|
||||
Language["OCaml"]
|
||||
elsif /=> |case\s+(\S+\s)+of/.match(data)
|
||||
Language["Standard ML"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".mod" do |data|
|
||||
if data.include?('<!ENTITY ')
|
||||
disambiguate ".ts" do |data|
|
||||
if data.include?("<TS ")
|
||||
Language["XML"]
|
||||
elsif /MODULE\s\w+\s*;/i.match(data) || /^\s*END \w+;$/i.match(data)
|
||||
Language["Modula-2"]
|
||||
else
|
||||
[Language["Linux Kernel Module"], Language["AMPL"]]
|
||||
Language["TypeScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ncl" do |data|
|
||||
if data.include?("THE_TITLE")
|
||||
Language["Text"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".nl" do |data|
|
||||
if /^(b|g)[0-9]+ /.match(data)
|
||||
Language["NL"]
|
||||
disambiguate ".tst" do |data|
|
||||
if (data.include?("gap> "))
|
||||
Language["GAP"]
|
||||
# Heads up - we don't usually write heuristics like this (with no regex match)
|
||||
else
|
||||
Language["NewLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".rs" do |data|
|
||||
if /^(use |fn |mod |pub |macro_rules|impl|#!?\[)/.match(data)
|
||||
Language["Rust"]
|
||||
elsif /#include|#pragma\s+(rs|version)|__attribute__/.match(data)
|
||||
Language["RenderScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".l" do |data|
|
||||
if /\(def(un|macro)\s/.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^(%[%{}]xs|<.*>)/.match(data)
|
||||
Language["Lex"]
|
||||
elsif /^\.[a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
elsif /^\((de|class|rel|code|data|must)\s/.match(data)
|
||||
Language["PicoLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".n" do |data|
|
||||
if /^[.']/.match(data)
|
||||
Language["Groff"]
|
||||
elsif /^(module|namespace|using)\s/.match(data)
|
||||
Language["Nemerle"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ms" do |data|
|
||||
if /^[.'][a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
elsif /((^|\s)move?[. ])|\.(include|globa?l)\s/.match(data)
|
||||
Language["GAS"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ch" do |data|
|
||||
if /^\s*#\s*(if|ifdef|ifndef|define|command|xcommand|translate|xtranslate|include|pragma|undef)\b/i.match(data)
|
||||
Language["xBase"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".r", ".R" do |data|
|
||||
if /\bRebol\b/i.match(data)
|
||||
Language["Rebol"]
|
||||
elsif data.include?("<-")
|
||||
Language["R"]
|
||||
Language["Scilab"]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -150,7 +150,8 @@ module Linguist
|
||||
#
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.find_by_name(name)
|
||||
name && @name_index[name.downcase]
|
||||
return nil if name.to_s.empty?
|
||||
name && (@name_index[name.downcase] || @name_index[name.split(',').first.downcase])
|
||||
end
|
||||
|
||||
# Public: Look up Language by one of its aliases.
|
||||
@@ -164,7 +165,8 @@ module Linguist
|
||||
#
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.find_by_alias(name)
|
||||
name && @alias_index[name.downcase]
|
||||
return nil if name.to_s.empty?
|
||||
name && (@alias_index[name.downcase] || @alias_index[name.split(',').first.downcase])
|
||||
end
|
||||
|
||||
# Public: Look up Languages by filename.
|
||||
@@ -240,7 +242,8 @@ module Linguist
|
||||
#
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.[](name)
|
||||
name && @index[name.downcase]
|
||||
return nil if name.to_s.empty?
|
||||
name && (@index[name.downcase] || @index[name.split(',').first.downcase])
|
||||
end
|
||||
|
||||
# Public: A List of popular languages
|
||||
|
||||
@@ -8,7 +8,8 @@
|
||||
# Use "text" if a mode does not exist.
|
||||
# wrap - Boolean wrap to enable line wrapping (default: false)
|
||||
# extensions - An Array of associated extensions (the first one is
|
||||
# considered the primary extension)
|
||||
# considered the primary extension, the others should be
|
||||
# listed alphabetically)
|
||||
# interpreters - An Array of associated interpreters
|
||||
# searchable - Boolean flag to enable searching (defaults to true)
|
||||
# search_term - Deprecated: Some languages maybe indexed under a
|
||||
@@ -141,7 +142,7 @@ Agda:
|
||||
|
||||
Alloy:
|
||||
type: programming # 'modeling' would be more appropiate
|
||||
color: "#cc5c24"
|
||||
color: "#64C800"
|
||||
extensions:
|
||||
- .als
|
||||
ace_mode: text
|
||||
@@ -214,7 +215,7 @@ AspectJ:
|
||||
color: "#a957b0"
|
||||
extensions:
|
||||
- .aj
|
||||
tm_scope: none
|
||||
tm_scope: source.aspectj
|
||||
ace_mode: text
|
||||
|
||||
Assembly:
|
||||
@@ -392,6 +393,7 @@ C#:
|
||||
- csharp
|
||||
extensions:
|
||||
- .cs
|
||||
- .cake
|
||||
- .cshtml
|
||||
- .csx
|
||||
|
||||
@@ -539,6 +541,14 @@ Clean:
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
|
||||
Click:
|
||||
type: programming
|
||||
color: "#E4E6F3"
|
||||
extensions:
|
||||
- .click
|
||||
tm_scope: source.click
|
||||
ace_mode: text
|
||||
|
||||
Clojure:
|
||||
type: programming
|
||||
ace_mode: clojure
|
||||
@@ -567,6 +577,7 @@ CoffeeScript:
|
||||
extensions:
|
||||
- .coffee
|
||||
- ._coffee
|
||||
- .cake
|
||||
- .cjsx
|
||||
- .cson
|
||||
- .iced
|
||||
@@ -616,6 +627,7 @@ Common Lisp:
|
||||
- .lsp
|
||||
- .ny
|
||||
- .podsl
|
||||
- .sexp
|
||||
interpreters:
|
||||
- lisp
|
||||
- sbcl
|
||||
@@ -718,7 +730,7 @@ Cython:
|
||||
|
||||
D:
|
||||
type: programming
|
||||
color: "#fcd46d"
|
||||
color: "#ba595e"
|
||||
extensions:
|
||||
- .d
|
||||
- .di
|
||||
@@ -750,6 +762,14 @@ DM:
|
||||
tm_scope: source.c++
|
||||
ace_mode: c_cpp
|
||||
|
||||
DNS Zone:
|
||||
type: data
|
||||
extensions:
|
||||
- .zone
|
||||
- .arpa
|
||||
tm_scope: text.zone_file
|
||||
ace_mode: text
|
||||
|
||||
DTrace:
|
||||
type: programming
|
||||
aliases:
|
||||
@@ -834,6 +854,14 @@ ECL:
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
|
||||
ECLiPSe:
|
||||
type: programming
|
||||
group: prolog
|
||||
extensions:
|
||||
- .ecl
|
||||
tm_scope: source.prolog.eclipse
|
||||
ace_mode: prolog
|
||||
|
||||
Eagle:
|
||||
type: markup
|
||||
color: "#814C05"
|
||||
@@ -867,6 +895,8 @@ Elixir:
|
||||
ace_mode: elixir
|
||||
filenames:
|
||||
- mix.lock
|
||||
interpreters:
|
||||
- elixir
|
||||
|
||||
Elm:
|
||||
type: programming
|
||||
@@ -994,6 +1024,7 @@ Formatted:
|
||||
type: data
|
||||
extensions:
|
||||
- .for
|
||||
- .eam.fs
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
|
||||
@@ -1011,6 +1042,16 @@ Forth:
|
||||
- .fs
|
||||
ace_mode: forth
|
||||
|
||||
FreeMarker:
|
||||
type: programming
|
||||
color: "#0050b2"
|
||||
aliases:
|
||||
- ftl
|
||||
extensions:
|
||||
- .ftl
|
||||
tm_scope: text.html.ftl
|
||||
ace_mode: ftl
|
||||
|
||||
Frege:
|
||||
type: programming
|
||||
color: "#00cafe"
|
||||
@@ -1300,11 +1341,22 @@ HTML+Django:
|
||||
- .mustache
|
||||
- .jinja
|
||||
aliases:
|
||||
- django
|
||||
- html+django/jinja
|
||||
- html+jinja
|
||||
- htmldjango
|
||||
ace_mode: django
|
||||
|
||||
HTML+EEX:
|
||||
type: markup
|
||||
tm_scope: text.html.elixir
|
||||
group: HTML
|
||||
aliases:
|
||||
- eex
|
||||
extensions:
|
||||
- .eex
|
||||
ace_mode: text
|
||||
|
||||
HTML+ERB:
|
||||
type: markup
|
||||
tm_scope: text.html.erb
|
||||
@@ -1314,7 +1366,7 @@ HTML+ERB:
|
||||
extensions:
|
||||
- .erb
|
||||
- .erb.deface
|
||||
ace_mode: html_ruby
|
||||
ace_mode: text
|
||||
|
||||
HTML+PHP:
|
||||
type: markup
|
||||
@@ -1350,6 +1402,7 @@ Haml:
|
||||
Handlebars:
|
||||
type: markup
|
||||
color: "#01a9d6"
|
||||
group: HTML
|
||||
aliases:
|
||||
- hbs
|
||||
- htmlbars
|
||||
@@ -1457,7 +1510,7 @@ Inform 7:
|
||||
extensions:
|
||||
- .ni
|
||||
- .i7x
|
||||
tm_scope: source.Inform7
|
||||
tm_scope: source.inform7
|
||||
aliases:
|
||||
- i7
|
||||
- inform7
|
||||
@@ -1528,7 +1581,9 @@ JSON:
|
||||
searchable: false
|
||||
extensions:
|
||||
- .json
|
||||
- .geojson
|
||||
- .lock
|
||||
- .topojson
|
||||
filenames:
|
||||
- .jshintrc
|
||||
- composer.lock
|
||||
@@ -1556,12 +1611,20 @@ JSONiq:
|
||||
- .jq
|
||||
tm_scope: source.jq
|
||||
|
||||
JSX:
|
||||
type: programming
|
||||
group: JavaScript
|
||||
extensions:
|
||||
- .jsx
|
||||
tm_scope: source.js.jsx
|
||||
ace_mode: javascript
|
||||
|
||||
Jade:
|
||||
group: HTML
|
||||
type: markup
|
||||
extensions:
|
||||
- .jade
|
||||
tm_scope: source.jade
|
||||
tm_scope: text.jade
|
||||
ace_mode: jade
|
||||
|
||||
Jasmin:
|
||||
@@ -1606,10 +1669,10 @@ JavaScript:
|
||||
- .gs
|
||||
- .jake
|
||||
- .jsb
|
||||
- .jscad
|
||||
- .jsfl
|
||||
- .jsm
|
||||
- .jss
|
||||
- .jsx
|
||||
- .njs
|
||||
- .pac
|
||||
- .sjs
|
||||
@@ -1641,6 +1704,18 @@ Julia:
|
||||
color: "#a270ba"
|
||||
ace_mode: julia
|
||||
|
||||
Jupyter Notebook:
|
||||
type: markup
|
||||
ace_mode: json
|
||||
tm_scope: source.json
|
||||
color: "#DA5B0B"
|
||||
extensions:
|
||||
- .ipynb
|
||||
filenames:
|
||||
- Notebook
|
||||
aliases:
|
||||
- IPython Notebook
|
||||
|
||||
KRL:
|
||||
type: programming
|
||||
color: "#28431f"
|
||||
@@ -1653,6 +1728,7 @@ KiCad:
|
||||
type: programming
|
||||
extensions:
|
||||
- .sch
|
||||
- .brd
|
||||
- .kicad_pcb
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
@@ -1702,6 +1778,7 @@ LSL:
|
||||
ace_mode: lsl
|
||||
extensions:
|
||||
- .lsl
|
||||
- .lslp
|
||||
interpreters:
|
||||
- lsl
|
||||
color: '#3d9970'
|
||||
@@ -1710,8 +1787,8 @@ LabVIEW:
|
||||
type: programming
|
||||
extensions:
|
||||
- .lvproj
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
tm_scope: text.xml
|
||||
ace_mode: xml
|
||||
|
||||
Lasso:
|
||||
type: programming
|
||||
@@ -1901,6 +1978,15 @@ M:
|
||||
tm_scope: source.lisp
|
||||
ace_mode: lisp
|
||||
|
||||
MAXScript:
|
||||
type: programming
|
||||
color: "#00a6a6"
|
||||
extensions:
|
||||
- .ms
|
||||
- .mcr
|
||||
tm_scope: source.maxscript
|
||||
ace_mode: text
|
||||
|
||||
MTML:
|
||||
type: markup
|
||||
color: "#b7e1f4"
|
||||
@@ -1933,6 +2019,7 @@ Makefile:
|
||||
- GNUmakefile
|
||||
- Kbuild
|
||||
- Makefile
|
||||
- Makefile.inc
|
||||
- makefile
|
||||
interpreters:
|
||||
- make
|
||||
@@ -1974,6 +2061,7 @@ Mathematica:
|
||||
- .cdf
|
||||
- .m
|
||||
- .ma
|
||||
- .mt
|
||||
- .nb
|
||||
- .nbp
|
||||
- .wl
|
||||
@@ -1985,6 +2073,8 @@ Mathematica:
|
||||
Matlab:
|
||||
type: programming
|
||||
color: "#bb92ac"
|
||||
aliases:
|
||||
- octave
|
||||
extensions:
|
||||
- .matlab
|
||||
- .m
|
||||
@@ -2018,6 +2108,7 @@ MediaWiki:
|
||||
wrap: true
|
||||
extensions:
|
||||
- .mediawiki
|
||||
- .wiki
|
||||
tm_scope: text.html.mediawiki
|
||||
ace_mode: text
|
||||
|
||||
@@ -2033,6 +2124,14 @@ Mercury:
|
||||
tm_scope: source.mercury
|
||||
ace_mode: prolog
|
||||
|
||||
Metal:
|
||||
type: programming
|
||||
color: "#8f14e9"
|
||||
extensions:
|
||||
- .metal
|
||||
tm_scope: source.c++
|
||||
ace_mode: c_cpp
|
||||
|
||||
MiniD: # Legacy
|
||||
type: programming
|
||||
searchable: false
|
||||
@@ -2108,7 +2207,7 @@ Myghty:
|
||||
|
||||
NCL:
|
||||
type: programming
|
||||
color: #28431f
|
||||
color: "#28431f"
|
||||
extensions:
|
||||
- .ncl
|
||||
tm_scope: source.ncl
|
||||
@@ -2411,6 +2510,7 @@ PHP:
|
||||
- .php3
|
||||
- .php4
|
||||
- .php5
|
||||
- .phps
|
||||
- .phpt
|
||||
filenames:
|
||||
- Phakefile
|
||||
@@ -2424,8 +2524,10 @@ PLSQL:
|
||||
type: programming
|
||||
ace_mode: sql
|
||||
tm_scope: source.plsql.oracle
|
||||
color: "#dad8d8"
|
||||
extensions:
|
||||
- .pls
|
||||
- .pck
|
||||
- .pkb
|
||||
- .pks
|
||||
- .plb
|
||||
@@ -2542,6 +2644,13 @@ Perl6:
|
||||
tm_scope: source.perl.6
|
||||
ace_mode: perl
|
||||
|
||||
Pickle:
|
||||
type: data
|
||||
extensions:
|
||||
- .pkl
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
|
||||
PicoLisp:
|
||||
type: programming
|
||||
extensions:
|
||||
@@ -2586,6 +2695,13 @@ PogoScript:
|
||||
tm_scope: source.pogoscript
|
||||
ace_mode: text
|
||||
|
||||
Pony:
|
||||
type: programming
|
||||
extensions:
|
||||
- .pony
|
||||
tm_scope: source.pony
|
||||
ace_mode: text
|
||||
|
||||
PostScript:
|
||||
type: markup
|
||||
extensions:
|
||||
@@ -2618,11 +2734,11 @@ Prolog:
|
||||
color: "#74283c"
|
||||
extensions:
|
||||
- .pl
|
||||
- .ecl
|
||||
- .pro
|
||||
- .prolog
|
||||
interpreters:
|
||||
- swipl
|
||||
tm_scope: source.prolog
|
||||
ace_mode: prolog
|
||||
|
||||
Propeller Spin:
|
||||
@@ -2653,7 +2769,7 @@ Public Key:
|
||||
|
||||
Puppet:
|
||||
type: programming
|
||||
color: "#332A77"
|
||||
color: "#302B6D"
|
||||
extensions:
|
||||
- .pp
|
||||
filenames:
|
||||
@@ -2760,7 +2876,7 @@ R:
|
||||
ace_mode: r
|
||||
|
||||
RAML:
|
||||
type: data
|
||||
type: markup
|
||||
ace_mode: yaml
|
||||
tm_scope: source.yaml
|
||||
color: "#77d9fb"
|
||||
@@ -2803,7 +2919,7 @@ RMarkdown:
|
||||
ace_mode: markdown
|
||||
extensions:
|
||||
- .rmd
|
||||
tm_scope: none
|
||||
tm_scope: source.gfm
|
||||
|
||||
Racket:
|
||||
type: programming
|
||||
@@ -2869,6 +2985,17 @@ Redcode:
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
|
||||
Ren'Py:
|
||||
type: programming
|
||||
group: Python
|
||||
aliases:
|
||||
- renpy
|
||||
color: "#ff7f7f"
|
||||
extensions:
|
||||
- .rpy
|
||||
tm_scope: source.renpy
|
||||
ace_mode: python
|
||||
|
||||
RenderScript:
|
||||
type: programming
|
||||
extensions:
|
||||
@@ -2933,6 +3060,7 @@ Ruby:
|
||||
- .pryrc
|
||||
- Appraisals
|
||||
- Berksfile
|
||||
- Brewfile
|
||||
- Buildfile
|
||||
- Deliverfile
|
||||
- Fastfile
|
||||
@@ -2953,6 +3081,7 @@ Rust:
|
||||
color: "#dea584"
|
||||
extensions:
|
||||
- .rs
|
||||
- .rs.in
|
||||
ace_mode: rust
|
||||
|
||||
SAS:
|
||||
@@ -3076,7 +3205,7 @@ Sass:
|
||||
Scala:
|
||||
type: programming
|
||||
ace_mode: scala
|
||||
color: "#7dd3b0"
|
||||
color: "#DC322F"
|
||||
extensions:
|
||||
- .scala
|
||||
- .sbt
|
||||
@@ -3181,6 +3310,7 @@ Slim:
|
||||
color: "#ff8f77"
|
||||
extensions:
|
||||
- .slim
|
||||
tm_scope: text.slim
|
||||
ace_mode: text
|
||||
|
||||
Smali:
|
||||
@@ -3227,6 +3357,14 @@ Squirrel:
|
||||
tm_scope: source.c++
|
||||
ace_mode: c_cpp
|
||||
|
||||
Stan:
|
||||
type: programming
|
||||
color: "#b2011d"
|
||||
extensions:
|
||||
- .stan
|
||||
ace_mode: text
|
||||
tm_scope: source.stan
|
||||
|
||||
Standard ML:
|
||||
type: programming
|
||||
color: "#dc566d"
|
||||
@@ -3264,9 +3402,12 @@ SuperCollider:
|
||||
type: programming
|
||||
color: "#46390b"
|
||||
extensions:
|
||||
- .scd
|
||||
- .sc
|
||||
tm_scope: none
|
||||
- .scd
|
||||
interpreters:
|
||||
- sclang
|
||||
- scsynth
|
||||
tm_scope: source.supercollider
|
||||
ace_mode: text
|
||||
|
||||
Swift:
|
||||
@@ -3409,6 +3550,7 @@ TypeScript:
|
||||
- ts
|
||||
extensions:
|
||||
- .ts
|
||||
- .tsx
|
||||
tm_scope: source.ts
|
||||
ace_mode: typescript
|
||||
|
||||
@@ -3442,6 +3584,17 @@ UnrealScript:
|
||||
tm_scope: source.java
|
||||
ace_mode: java
|
||||
|
||||
UrWeb:
|
||||
type: programming
|
||||
aliases:
|
||||
- Ur/Web
|
||||
- Ur
|
||||
extensions:
|
||||
- .ur
|
||||
- .urs
|
||||
tm_scope: source.ur
|
||||
ace_mode: text
|
||||
|
||||
VCL:
|
||||
group: Perl
|
||||
type: programming
|
||||
@@ -3547,6 +3700,16 @@ WebIDL:
|
||||
tm_scope: source.webidl
|
||||
ace_mode: text
|
||||
|
||||
X10:
|
||||
type: programming
|
||||
aliases:
|
||||
- xten
|
||||
ace_mode: text
|
||||
extensions:
|
||||
- .x10
|
||||
color: "#4B6BEF"
|
||||
tm_scope: source.x10
|
||||
|
||||
XC:
|
||||
type: programming
|
||||
color: "#99DA07"
|
||||
@@ -3569,6 +3732,7 @@ XML:
|
||||
- .ccxml
|
||||
- .clixml
|
||||
- .cproject
|
||||
- .csl
|
||||
- .csproj
|
||||
- .ct
|
||||
- .dita
|
||||
@@ -3584,6 +3748,7 @@ XML:
|
||||
- .iml
|
||||
- .ivy
|
||||
- .jelly
|
||||
- .jsproj
|
||||
- .kml
|
||||
- .launch
|
||||
- .mdpolicy
|
||||
@@ -3614,6 +3779,7 @@ XML:
|
||||
- .tmSnippet
|
||||
- .tmTheme
|
||||
- .ts
|
||||
- .tsx
|
||||
- .ui
|
||||
- .urdf
|
||||
- .vbproj
|
||||
@@ -3716,7 +3882,9 @@ YAML:
|
||||
- .yml
|
||||
- .reek
|
||||
- .rviz
|
||||
- .syntax
|
||||
- .yaml
|
||||
- .yaml-tmlanguage
|
||||
ace_mode: yaml
|
||||
|
||||
Yacc:
|
||||
@@ -3808,6 +3976,8 @@ reStructuredText:
|
||||
extensions:
|
||||
- .rst
|
||||
- .rest
|
||||
- .rest.txt
|
||||
- .rst.txt
|
||||
ace_mode: text
|
||||
|
||||
wisp:
|
||||
@@ -3821,8 +3991,13 @@ wisp:
|
||||
xBase:
|
||||
type: programming
|
||||
color: "#403a40"
|
||||
aliases:
|
||||
- advpl
|
||||
- clipper
|
||||
- foxpro
|
||||
extensions:
|
||||
- .prg
|
||||
- .ch
|
||||
- .prw
|
||||
tm_scope: source.harbour
|
||||
ace_mode: text
|
||||
|
||||
@@ -4,7 +4,11 @@ require 'rugged'
|
||||
|
||||
module Linguist
|
||||
class LazyBlob
|
||||
GIT_ATTR = ['linguist-documentation', 'linguist-language', 'linguist-vendored']
|
||||
GIT_ATTR = ['linguist-documentation',
|
||||
'linguist-language',
|
||||
'linguist-vendored',
|
||||
'linguist-generated']
|
||||
|
||||
GIT_ATTR_OPTS = { :priority => [:index], :skip_system => true }
|
||||
GIT_ATTR_FLAGS = Rugged::Repository::Attributes.parse_opts(GIT_ATTR_OPTS)
|
||||
|
||||
@@ -31,14 +35,6 @@ module Linguist
|
||||
name, GIT_ATTR, GIT_ATTR_FLAGS)
|
||||
end
|
||||
|
||||
def vendored?
|
||||
if attr = git_attributes['linguist-vendored']
|
||||
return boolean_attribute(attr)
|
||||
else
|
||||
return super
|
||||
end
|
||||
end
|
||||
|
||||
def documentation?
|
||||
if attr = git_attributes['linguist-documentation']
|
||||
boolean_attribute(attr)
|
||||
@@ -47,6 +43,22 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
def generated?
|
||||
if attr = git_attributes['linguist-generated']
|
||||
boolean_attribute(attr)
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
def vendored?
|
||||
if attr = git_attributes['linguist-vendored']
|
||||
return boolean_attribute(attr)
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
def language
|
||||
return @language if defined?(@language)
|
||||
|
||||
@@ -67,11 +79,15 @@ module Linguist
|
||||
@size
|
||||
end
|
||||
|
||||
def cleanup!
|
||||
@data.clear if @data
|
||||
end
|
||||
|
||||
protected
|
||||
|
||||
# Returns true if the attribute is present and not the string "false".
|
||||
def boolean_attribute(attr)
|
||||
attr != "false"
|
||||
def boolean_attribute(attribute)
|
||||
attribute != "false"
|
||||
end
|
||||
|
||||
def load_blob!
|
||||
|
||||
@@ -126,12 +126,13 @@ module Linguist
|
||||
end
|
||||
|
||||
protected
|
||||
MAX_TREE_SIZE = 100_000
|
||||
|
||||
def compute_stats(old_commit_oid, cache = nil)
|
||||
return {} if current_tree.count_recursive(MAX_TREE_SIZE) >= MAX_TREE_SIZE
|
||||
|
||||
old_tree = old_commit_oid && Rugged::Commit.lookup(repository, old_commit_oid).tree
|
||||
|
||||
read_index
|
||||
|
||||
diff = Rugged::Tree.diff(repository, old_tree, current_tree)
|
||||
|
||||
# Clear file map and fetch full diff if any .gitattributes files are changed
|
||||
@@ -157,8 +158,11 @@ module Linguist
|
||||
|
||||
blob = Linguist::LazyBlob.new(repository, delta.new_file[:oid], new, mode.to_s(8))
|
||||
|
||||
next unless blob.include_in_language_stats?
|
||||
file_map[new] = [blob.language.group.name, blob.size]
|
||||
if blob.include_in_language_stats?
|
||||
file_map[new] = [blob.language.group.name, blob.size]
|
||||
end
|
||||
|
||||
blob.cleanup!
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ module Linguist
|
||||
module Strategy
|
||||
class Modeline
|
||||
EmacsModeline = /-\*-\s*(?:(?!mode)[\w-]+\s*:\s*(?:[\w+-]+)\s*;?\s*)*(?:mode\s*:)?\s*([\w+-]+)\s*(?:;\s*(?!mode)[\w-]+\s*:\s*[\w+-]+\s*)*;?\s*-\*-/i
|
||||
VimModeline = /vim:\s*set\s*(?:ft|filetype)=(\w+):/i
|
||||
VimModeline = /vim:\s*set.*\s(?:ft|filetype)=(\w+)\s?.*:/i
|
||||
|
||||
# Public: Detects language based on Vim and Emacs modelines
|
||||
#
|
||||
|
||||
@@ -86,13 +86,13 @@ module Linguist
|
||||
if s.peek(1) == "\""
|
||||
s.getch
|
||||
else
|
||||
s.skip_until(/[^\\]"/)
|
||||
s.skip_until(/(?<!\\)"/)
|
||||
end
|
||||
elsif s.scan(/'/)
|
||||
if s.peek(1) == "'"
|
||||
s.getch
|
||||
else
|
||||
s.skip_until(/[^\\]'/)
|
||||
s.skip_until(/(?<!\\)'/)
|
||||
end
|
||||
|
||||
# Skip number literals
|
||||
|
||||
@@ -78,6 +78,9 @@
|
||||
# Haxelib projects often contain a neko bytecode file named run.n
|
||||
- run.n$
|
||||
|
||||
# Bootstrap Datepicker
|
||||
- bootstrap-datepicker/
|
||||
|
||||
## Commonly Bundled JavaScript frameworks ##
|
||||
|
||||
# jQuery
|
||||
@@ -88,6 +91,34 @@
|
||||
- (^|/)jquery\-ui(\-\d\.\d+(\.\d+)?)?(\.\w+)?\.(js|css)$
|
||||
- (^|/)jquery\.(ui|effects)\.([^.]*)\.(js|css)$
|
||||
|
||||
# jQuery Gantt
|
||||
- jquery.fn.gantt.js
|
||||
|
||||
# jQuery fancyBox
|
||||
- jquery.fancybox.(js|css)
|
||||
|
||||
# Fuel UX
|
||||
- fuelux.js
|
||||
|
||||
# jQuery File Upload
|
||||
- (^|/)jquery\.fileupload(-\w+)?\.js$
|
||||
|
||||
# Slick
|
||||
- (^|/)slick\.\w+.js$
|
||||
|
||||
# Leaflet plugins
|
||||
- (^|/)Leaflet\.Coordinates-\d+\.\d+\.\d+\.src\.js$
|
||||
- leaflet.draw-src.js
|
||||
- leaflet.draw.css
|
||||
- Control.FullScreen.css
|
||||
- Control.FullScreen.js
|
||||
- leaflet.spin.js
|
||||
- wicket-leaflet.js
|
||||
|
||||
# Sublime Text workspace files
|
||||
- .sublime-project
|
||||
- .sublime-workspace
|
||||
|
||||
# Prototype
|
||||
- (^|/)prototype(.*)\.js$
|
||||
- (^|/)effects\.js$
|
||||
@@ -122,7 +153,7 @@
|
||||
- (^|/)Chart\.js$
|
||||
|
||||
# Codemirror
|
||||
- (^|/)[Cc]ode[Mm]irror/(lib|mode|theme|addon|keymap)
|
||||
- (^|/)[Cc]ode[Mm]irror/(\d+\.\d+/)?(lib|mode|theme|addon|keymap|demo)
|
||||
|
||||
# SyntaxHighlighter - http://alexgorbatchev.com/
|
||||
- (^|/)shBrush([^.]*)\.js$
|
||||
@@ -164,6 +195,11 @@
|
||||
|
||||
## Obj-C ##
|
||||
|
||||
# Xcode
|
||||
|
||||
- \.xctemplate/
|
||||
- \.imageset/
|
||||
|
||||
# Carthage
|
||||
- ^Carthage/
|
||||
|
||||
@@ -179,6 +215,10 @@
|
||||
# Fabric
|
||||
- Fabric.framework/
|
||||
|
||||
# git config files
|
||||
- gitattributes$
|
||||
- gitignore$
|
||||
- gitmodules$
|
||||
|
||||
## Groovy ##
|
||||
|
||||
@@ -224,21 +264,9 @@
|
||||
# Html5shiv
|
||||
- (^|/)html5shiv\.js$
|
||||
|
||||
# Samples folders
|
||||
- ^[Ss]amples/
|
||||
|
||||
# LICENSE, README, git config files
|
||||
- ^COPYING$
|
||||
- LICENSE$
|
||||
- License$
|
||||
- gitattributes$
|
||||
- gitignore$
|
||||
- gitmodules$
|
||||
- ^README$
|
||||
- ^readme$
|
||||
|
||||
# Test fixtures
|
||||
- ^[Tt]ests?/fixtures/
|
||||
- ^[Ss]pecs?/fixtures/
|
||||
|
||||
# PhoneGap/Cordova
|
||||
- (^|/)cordova([^.]*)\.js$
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
module Linguist
|
||||
VERSION = "4.5.11"
|
||||
VERSION = "4.7.4"
|
||||
end
|
||||
|
||||
86
samples/C#/build.cake
Normal file
86
samples/C#/build.cake
Normal file
@@ -0,0 +1,86 @@
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// ARGUMENTS
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
var target = Argument<string>("target", "Default");
|
||||
var configuration = Argument<string>("configuration", "Release");
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// GLOBAL VARIABLES
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
var solutions = GetFiles("./**/*.sln");
|
||||
var solutionPaths = solutions.Select(solution => solution.GetDirectory());
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// SETUP / TEARDOWN
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
Setup(() =>
|
||||
{
|
||||
// Executed BEFORE the first task.
|
||||
Information("Running tasks...");
|
||||
});
|
||||
|
||||
Teardown(() =>
|
||||
{
|
||||
// Executed AFTER the last task.
|
||||
Information("Finished running tasks.");
|
||||
});
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// TASK DEFINITIONS
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
Task("Clean")
|
||||
.Does(() =>
|
||||
{
|
||||
// Clean solution directories.
|
||||
foreach(var path in solutionPaths)
|
||||
{
|
||||
Information("Cleaning {0}", path);
|
||||
CleanDirectories(path + "/**/bin/" + configuration);
|
||||
CleanDirectories(path + "/**/obj/" + configuration);
|
||||
}
|
||||
});
|
||||
|
||||
Task("Restore")
|
||||
.Does(() =>
|
||||
{
|
||||
// Restore all NuGet packages.
|
||||
foreach(var solution in solutions)
|
||||
{
|
||||
Information("Restoring {0}...", solution);
|
||||
NuGetRestore(solution);
|
||||
}
|
||||
});
|
||||
|
||||
Task("Build")
|
||||
.IsDependentOn("Clean")
|
||||
.IsDependentOn("Restore")
|
||||
.Does(() =>
|
||||
{
|
||||
// Build all solutions.
|
||||
foreach(var solution in solutions)
|
||||
{
|
||||
Information("Building {0}", solution);
|
||||
MSBuild(solution, settings =>
|
||||
settings.SetPlatformTarget(PlatformTarget.MSIL)
|
||||
.WithProperty("TreatWarningsAsErrors","true")
|
||||
.WithTarget("Build")
|
||||
.SetConfiguration(configuration));
|
||||
}
|
||||
});
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// TARGETS
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
Task("Default")
|
||||
.IsDependentOn("Build");
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// EXECUTION
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
RunTarget(target);
|
||||
133
samples/Click/sr2.click
Normal file
133
samples/Click/sr2.click
Normal file
@@ -0,0 +1,133 @@
|
||||
rates :: AvailableRates
|
||||
elementclass sr2 {
|
||||
$sr2_ip, $sr2_nm, $wireless_mac, $gateway, $probes|
|
||||
|
||||
|
||||
arp :: ARPTable();
|
||||
lt :: LinkTable(IP $sr2_ip);
|
||||
|
||||
|
||||
gw :: SR2GatewaySelector(ETHTYPE 0x062c,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
LT lt,
|
||||
ARP arp,
|
||||
PERIOD 15,
|
||||
GW $gateway);
|
||||
|
||||
|
||||
gw -> SR2SetChecksum -> [0] output;
|
||||
|
||||
set_gw :: SR2SetGateway(SEL gw);
|
||||
|
||||
|
||||
es :: SR2ETTStat(ETHTYPE 0x0641,
|
||||
ETH $wireless_mac,
|
||||
IP $sr2_ip,
|
||||
PERIOD 30000,
|
||||
TAU 300000,
|
||||
ARP arp,
|
||||
PROBES $probes,
|
||||
ETT metric,
|
||||
RT rates);
|
||||
|
||||
|
||||
metric :: SR2ETTMetric(LT lt);
|
||||
|
||||
|
||||
forwarder :: SR2Forwarder(ETHTYPE 0x0643,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
ARP arp,
|
||||
LT lt);
|
||||
|
||||
|
||||
querier :: SR2Querier(ETH $wireless_mac,
|
||||
SR forwarder,
|
||||
LT lt,
|
||||
ROUTE_DAMPENING true,
|
||||
TIME_BEFORE_SWITCH 5,
|
||||
DEBUG true);
|
||||
|
||||
|
||||
query_forwarder :: SR2MetricFlood(ETHTYPE 0x0644,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
LT lt,
|
||||
ARP arp,
|
||||
DEBUG false);
|
||||
|
||||
query_responder :: SR2QueryResponder(ETHTYPE 0x0645,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
LT lt,
|
||||
ARP arp,
|
||||
DEBUG true);
|
||||
|
||||
|
||||
query_responder -> SR2SetChecksum -> [0] output;
|
||||
query_forwarder -> SR2SetChecksum -> SR2Print(forwarding) -> [0] output;
|
||||
query_forwarder [1] -> query_responder;
|
||||
|
||||
data_ck :: SR2SetChecksum()
|
||||
|
||||
input [1]
|
||||
-> host_cl :: IPClassifier(dst net $sr2_ip mask $sr2_nm,
|
||||
-)
|
||||
-> querier
|
||||
-> data_ck;
|
||||
|
||||
|
||||
host_cl [1] -> [0] set_gw [0] -> querier;
|
||||
|
||||
forwarder[0]
|
||||
-> dt ::DecIPTTL
|
||||
-> data_ck
|
||||
-> [2] output;
|
||||
|
||||
|
||||
dt[1]
|
||||
-> Print(ttl-error)
|
||||
-> ICMPError($sr2_ip, timeexceeded, 0)
|
||||
-> querier;
|
||||
|
||||
|
||||
// queries
|
||||
querier [1] -> [1] query_forwarder;
|
||||
es -> SetTimestamp() -> [1] output;
|
||||
|
||||
|
||||
forwarder[1] //ip packets to me
|
||||
-> SR2StripHeader()
|
||||
-> CheckIPHeader()
|
||||
-> from_gw_cl :: IPClassifier(src net $sr2_ip mask $sr2_nm,
|
||||
-)
|
||||
-> [3] output;
|
||||
|
||||
from_gw_cl [1] -> [1] set_gw [1] -> [3] output;
|
||||
|
||||
input [0]
|
||||
-> ncl :: Classifier(
|
||||
12/0643 , //sr2_forwarder
|
||||
12/0644 , //sr2
|
||||
12/0645 , //replies
|
||||
12/0641 , //sr2_es
|
||||
12/062c , //sr2_gw
|
||||
);
|
||||
|
||||
|
||||
ncl[0] -> SR2CheckHeader() -> [0] forwarder;
|
||||
ncl[1] -> SR2CheckHeader() -> PrintSR(query) -> query_forwarder
|
||||
ncl[2] -> SR2CheckHeader() -> query_responder;
|
||||
ncl[3] -> es;
|
||||
ncl[4] -> SR2CheckHeader() -> gw;
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
Idle -> s :: sr2(2.0.0.1, 255.0.0.0, 00:00:00:00:00:01, false, "12 60 12 1500") -> Discard;
|
||||
Idle -> [1] s;
|
||||
s[1] -> Discard;
|
||||
s[2] -> Discard;
|
||||
s[3] -> Discard;
|
||||
142
samples/Click/thomer-nat.click
Normal file
142
samples/Click/thomer-nat.click
Normal file
@@ -0,0 +1,142 @@
|
||||
// This Click configuration implements a firewall and NAT, roughly based on the
|
||||
// mazu-nat.click example.
|
||||
//
|
||||
// This example assumes there is one interface that is IP-aliased. In this
|
||||
// example, eth0 and eth0:0 have IP addresses 66.68.65.90 and 192.168.1.1,
|
||||
// respectively. There is a local network, 192.168.1.0/24, and an upstream
|
||||
// gateway, 66.58.65.89. Traffic from the local network is NATed.
|
||||
//
|
||||
// Connections can be initiated from the NAT box itself, also.
|
||||
//
|
||||
// For bugs, suggestions, and, corrections, please email me.
|
||||
//
|
||||
// Author: Thomer M. Gil (click@thomer.com)
|
||||
|
||||
AddressInfo(
|
||||
eth0-in 192.168.1.1 192.168.1.0/24 00:0d:87:9d:1c:e9,
|
||||
eth0-ex 66.58.65.90 00:0d:87:9d:1c:e9,
|
||||
gw-addr 66.58.65.89 00:20:6f:14:54:c2
|
||||
);
|
||||
|
||||
|
||||
elementclass SniffGatewayDevice {
|
||||
$device |
|
||||
from :: FromDevice($device)
|
||||
-> t1 :: Tee
|
||||
-> output;
|
||||
input -> q :: Queue(1024)
|
||||
-> t2 :: PullTee
|
||||
-> to :: ToDevice($device);
|
||||
t1[1] -> ToHostSniffers;
|
||||
t2[1] -> ToHostSniffers($device);
|
||||
ScheduleInfo(from .1, to 1);
|
||||
}
|
||||
|
||||
|
||||
device :: SniffGatewayDevice(eth0);
|
||||
arpq_in :: ARPQuerier(eth0-in) -> device;
|
||||
ip_to_extern :: GetIPAddress(16)
|
||||
-> CheckIPHeader
|
||||
-> EtherEncap(0x800, eth0-ex, gw-addr)
|
||||
-> device;
|
||||
ip_to_host :: EtherEncap(0x800, gw-addr, eth0-ex)
|
||||
-> ToHost;
|
||||
ip_to_intern :: GetIPAddress(16)
|
||||
-> CheckIPHeader
|
||||
-> arpq_in;
|
||||
|
||||
|
||||
arp_class :: Classifier(
|
||||
12/0806 20/0001, // [0] ARP requests
|
||||
12/0806 20/0002, // [1] ARP replies to host
|
||||
12/0800); // [2] IP packets
|
||||
|
||||
device -> arp_class;
|
||||
|
||||
// ARP crap
|
||||
arp_class[0] -> ARPResponder(eth0-in, eth0-ex) -> device;
|
||||
arp_class[1] -> arp_t :: Tee;
|
||||
arp_t[0] -> ToHost;
|
||||
arp_t[1] -> [1]arpq_in;
|
||||
|
||||
|
||||
// IP packets
|
||||
arp_class[2] -> Strip(14)
|
||||
-> CheckIPHeader
|
||||
-> ipclass :: IPClassifier(dst host eth0-ex,
|
||||
dst host eth0-in,
|
||||
src net eth0-in);
|
||||
|
||||
// Define pattern NAT
|
||||
iprw :: IPRewriterPatterns(NAT eth0-ex 50000-65535 - -);
|
||||
|
||||
// Rewriting rules for UDP/TCP packets
|
||||
// output[0] rewritten to go into the wild
|
||||
// output[1] rewritten to come back from the wild or no match
|
||||
rw :: IPRewriter(pattern NAT 0 1,
|
||||
pass 1);
|
||||
|
||||
// Rewriting rules for ICMP packets
|
||||
irw :: ICMPPingRewriter(eth0-ex, -);
|
||||
irw[0] -> ip_to_extern;
|
||||
irw[1] -> icmp_me_or_intern :: IPClassifier(dst host eth0-ex, -);
|
||||
icmp_me_or_intern[0] -> ip_to_host;
|
||||
icmp_me_or_intern[1] -> ip_to_intern;
|
||||
|
||||
// Rewriting rules for ICMP error packets
|
||||
ierw :: ICMPRewriter(rw irw);
|
||||
ierw[0] -> icmp_me_or_intern;
|
||||
ierw[1] -> icmp_me_or_intern;
|
||||
|
||||
|
||||
// Packets directed at eth0-ex.
|
||||
// Send it through IPRewriter(pass). If there was a mapping, it will be
|
||||
// rewritten such that dst is eth0-in:net, otherwise dst will still be for
|
||||
// eth0-ex.
|
||||
ipclass[0] -> [1]rw;
|
||||
|
||||
// packets that were rewritten, heading into the wild world.
|
||||
rw[0] -> ip_to_extern;
|
||||
|
||||
// packets that come back from the wild or are not part of an established
|
||||
// connection.
|
||||
rw[1] -> established_class :: IPClassifier(dst host eth0-ex,
|
||||
dst net eth0-in);
|
||||
|
||||
// not established yet or returning packets for a connection that was
|
||||
// established from this host itself.
|
||||
established_class[0] ->
|
||||
firewall :: IPClassifier(dst tcp port ssh,
|
||||
dst tcp port smtp,
|
||||
dst tcp port domain,
|
||||
dst udp port domain,
|
||||
icmp type echo-reply,
|
||||
proto icmp,
|
||||
port > 4095,
|
||||
-);
|
||||
|
||||
firewall[0] -> ip_to_host; // ssh
|
||||
firewall[1] -> ip_to_host; // smtp
|
||||
firewall[2] -> ip_to_host; // domain (t)
|
||||
firewall[3] -> ip_to_host; // domain (u)
|
||||
firewall[4] -> [0]irw; // icmp reply
|
||||
firewall[5] -> [0]ierw; // other icmp
|
||||
firewall[6] -> ip_to_host; // port > 4095, probably for connection
|
||||
// originating from host itself
|
||||
firewall[7] -> Discard; // don't allow incoming for port <= 4095
|
||||
|
||||
// established connection
|
||||
established_class[1] -> ip_to_intern;
|
||||
|
||||
// To eth0-in. Only accept from inside network.
|
||||
ipclass[1] -> IPClassifier(src net eth0-in) -> ip_to_host;
|
||||
|
||||
// Packets from eth0-in:net either stay on local network or go to the wild.
|
||||
// Those that go into the wild need to go through the appropriate rewriting
|
||||
// element. (Either UDP/TCP rewriter or ICMP rewriter.)
|
||||
ipclass[2] -> inter_class :: IPClassifier(dst net eth0-in, -);
|
||||
inter_class[0] -> ip_to_intern;
|
||||
inter_class[1] -> ip_udp_class :: IPClassifier(tcp or udp,
|
||||
icmp type echo);
|
||||
ip_udp_class[0] -> [0]rw;
|
||||
ip_udp_class[1] -> [0]irw;
|
||||
17
samples/CoffeeScript/build.cake
Normal file
17
samples/CoffeeScript/build.cake
Normal file
@@ -0,0 +1,17 @@
|
||||
fs = require 'fs'
|
||||
|
||||
{print} = require 'sys'
|
||||
{spawn} = require 'child_process'
|
||||
|
||||
build = (callback) ->
|
||||
coffee = spawn 'coffee', ['-c', '-o', '.', '.']
|
||||
coffee.stderr.on 'data', (data) ->
|
||||
process.stderr.write data.toString()
|
||||
coffee.stdout.on 'data', (data) ->
|
||||
print data.toString()
|
||||
coffee.on 'exit', (code) ->
|
||||
callback?() if code is 0
|
||||
|
||||
task 'build', 'Build from source', ->
|
||||
build()
|
||||
|
||||
2
samples/Common Lisp/config.sexp
Normal file
2
samples/Common Lisp/config.sexp
Normal file
@@ -0,0 +1,2 @@
|
||||
((exe_name hello)
|
||||
(link_order (world hello)))
|
||||
103
samples/Common Lisp/rss.sexp
Normal file
103
samples/Common Lisp/rss.sexp
Normal file
@@ -0,0 +1,103 @@
|
||||
|
||||
(:TURTLE
|
||||
|
||||
(:@PREFIX "rdf:" "<http://www.w3.org/1999/02/22-rdf-syntax-ns#>")
|
||||
(:@PREFIX "owl:" "<http://www.w3.org/2002/07/owl#>")
|
||||
(:@PREFIX "dc:" "<http://purl.org/dc/elements/1.1/>")
|
||||
(:@PREFIX "xsd:" "<http://www.w3.org/2001/XMLSchema#>")
|
||||
(:@PREFIX "rdfs:" "<http://www.w3.org/2000/01/rdf-schema#>")
|
||||
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/channel>")
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:URIREF #1="<http://www.w3.org/1999/02/22-rdf-syntax-ns#type>")
|
||||
(:OBJECTS
|
||||
(:QNAME "rdfs:Class")))
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:QNAME "rdfs:comment")
|
||||
(:OBJECTS
|
||||
(:STRING "An RSS information channel.")))
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS
|
||||
(:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:QNAME "rdfs:label")
|
||||
(:OBJECTS
|
||||
(:STRING "Channel"))))
|
||||
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/description>")
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:URIREF #1#)
|
||||
(:OBJECTS
|
||||
(:QNAME "rdf:Property")))
|
||||
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS (:STRING "A short text description of the subject.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Description")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:subPropertyOf") (:OBJECTS (:QNAME "dc:description"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/image>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdfs:Class")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment") (:OBJECTS (:STRING "An RSS image.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Image"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/item>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdfs:Class")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment") (:OBJECTS (:STRING "An RSS item.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Item"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/items>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS
|
||||
(:STRING "Points to a list of rss:item elements that are members of the subject channel.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Items"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/link>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS (:STRING "The URL to which an HTML rendering of the subject will link.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Link")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:subPropertyOf") (:OBJECTS (:QNAME "dc:identifier"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/name>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS (:STRING "The text input field's (variable) name.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Name"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/textinput>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdfs:Class")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment") (:OBJECTS (:STRING "An RSS text input.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Text Input"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/title>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS (:STRING "A descriptive title for the channel.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Title")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:subPropertyOf") (:OBJECTS (:QNAME "dc:title"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/url>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS
|
||||
(:STRING
|
||||
"The URL of the image to used in the 'src' attribute of the channel's image tag when rendered as HTML.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "URL")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:subPropertyOf") (:OBJECTS (:QNAME "dc:identifier")))))
|
||||
13
samples/DNS Zone/sample.arpa
Normal file
13
samples/DNS Zone/sample.arpa
Normal file
@@ -0,0 +1,13 @@
|
||||
$ORIGIN 0.0.0.c.2.1.0.3.0.0.2.1.e.f.f.3.ip6.arpa.
|
||||
$TTL 60
|
||||
@ IN SOA ns root (
|
||||
2002042901 ; SERIAL
|
||||
7200 ; REFRESH
|
||||
600 ; RETRY
|
||||
36000000 ; EXPIRE
|
||||
120 ; MINIMUM
|
||||
)
|
||||
|
||||
NS ns.example.com.
|
||||
|
||||
c.a.7.e.d.7.e.f.f.f.0.2.8.0.a.0 PTR sip01.example.com.
|
||||
12
samples/DNS Zone/sneaky.net.zone
Normal file
12
samples/DNS Zone/sneaky.net.zone
Normal file
@@ -0,0 +1,12 @@
|
||||
$TTL 3d
|
||||
@ IN SOA root.localhost. root.sneaky.net. (
|
||||
2015042907 ; serial
|
||||
3d ; refresh
|
||||
1h ; retry
|
||||
12d ; expire
|
||||
2h ; negative response TTL
|
||||
)
|
||||
IN NS root.localhost.
|
||||
IN NS localhost. ; secondary name server is preferably externally maintained
|
||||
|
||||
www IN A 3.141.59.26
|
||||
3608
samples/Formatted/NiAlH_jea.eam.fs
Normal file
3608
samples/Formatted/NiAlH_jea.eam.fs
Normal file
File diff suppressed because it is too large
Load Diff
31
samples/FreeMarker/example.ftl
Normal file
31
samples/FreeMarker/example.ftl
Normal file
@@ -0,0 +1,31 @@
|
||||
<#import "layout.ftl" as layout>
|
||||
|
||||
<#assign results = [
|
||||
{
|
||||
"title": "Example Result",
|
||||
"description": "Lorem ipsum dolor sit amet, pede id pellentesque, sollicitudin turpis sed in sed sed, libero dictum."
|
||||
}
|
||||
] />
|
||||
|
||||
<@layout.page title="FreeMarker Example">
|
||||
<#if results?size == 0>
|
||||
There were no results.
|
||||
<#else>
|
||||
<ul>
|
||||
<#list results as result>
|
||||
<li>
|
||||
<strong>${result.title}</strong>
|
||||
<p>${result.description}</p>
|
||||
</li>
|
||||
</#list>
|
||||
</ul>
|
||||
</#if>
|
||||
|
||||
<#-- This is a FreeMarker comment -->
|
||||
<@currentTime />
|
||||
</@layout.page>
|
||||
|
||||
|
||||
<#macro currentTime>
|
||||
${.now?string.full}
|
||||
</#macro>
|
||||
32
samples/FreeMarker/layout.ftl
Normal file
32
samples/FreeMarker/layout.ftl
Normal file
@@ -0,0 +1,32 @@
|
||||
<#ftl strip_text=true />
|
||||
|
||||
<#macro page title>
|
||||
<!doctype html>
|
||||
<html lang="${.lang}">
|
||||
<head>
|
||||
<title>${title}</title>
|
||||
<@metaTags />
|
||||
</head>
|
||||
<body>
|
||||
<#nested />
|
||||
<@footer />
|
||||
</body>
|
||||
</html>
|
||||
</#macro>
|
||||
|
||||
|
||||
<#---
|
||||
Default meta tags
|
||||
-->
|
||||
<#macro metaTags>
|
||||
<#compress>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width,initial-scale=1">
|
||||
<meta name="format-detection" content="telephone=no">
|
||||
</#compress>
|
||||
</#macro>
|
||||
|
||||
<#macro footer>
|
||||
<p>This page is using FreeMarker v${.version}</p>
|
||||
</#macro>
|
||||
26
samples/HTML+EEX/index.html.eex
Normal file
26
samples/HTML+EEX/index.html.eex
Normal file
@@ -0,0 +1,26 @@
|
||||
<h1>Listing Books</h1>
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<th>Title</th>
|
||||
<th>Summary</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
</tr>
|
||||
|
||||
<%= for book <- @books do %>
|
||||
<tr>
|
||||
<%# comment %>
|
||||
<td><%= book.title %></td>
|
||||
<td><%= book.content %></td>
|
||||
<td><%= link "Show", to: book_path(@conn, :show, book) %></td>
|
||||
<td><%= link "Edit", to: book_path(@conn, :edit, book) %></td>
|
||||
<td><%= link "Delete", to: book_path(@conn, :delete, book), method: :delete, data: [confirm: "Are you sure?"] %></td>
|
||||
</tr>
|
||||
<% end %>
|
||||
</table>
|
||||
|
||||
<br />
|
||||
|
||||
<%= link "New book", to: book_path(@conn, :new) %>
|
||||
@@ -1,6 +1,74 @@
|
||||
Version 1 of Trivial Extension by Andrew Plotkin begins here.
|
||||
Version 2 of Trivial Extension by Andrew Plotkin begins here.
|
||||
|
||||
"This is the rubric of the extension."
|
||||
|
||||
"provided for the Linguist package by Andrew Plotkin"
|
||||
|
||||
[Note the two special quoted lines above.]
|
||||
|
||||
A cow is a kind of animal. A cow can be purple.
|
||||
|
||||
Understand "cow" as a cow.
|
||||
Understand "purple" as a purple cow.
|
||||
|
||||
Check pushing a cow:
|
||||
instead say "Cow-tipping, at your age?[paragraph break]Inconceivable."
|
||||
|
||||
[Here are the possible levels of heading:]
|
||||
|
||||
Volume One
|
||||
|
||||
Text-line is always "A line of text."
|
||||
|
||||
Book 2
|
||||
|
||||
Part the third - indented headings still count
|
||||
|
||||
Chapter IV - not for release
|
||||
|
||||
[Heading labels are case-insensitive.]
|
||||
|
||||
section foobar
|
||||
|
||||
[A line beginning "Volume" that does not have blank lines before and after it is *not* a header line. So the following should all be part of section foobar. Sadly, the "Volume is..." line gets colored as a header, because Atom's regexp model can't recognize "thing with blank lines before and after"!]
|
||||
|
||||
Measure is a kind of value.
|
||||
Volume is a measure. Length is a measure.
|
||||
Area is a measure.
|
||||
|
||||
[And now some Inform 6 inclusions.]
|
||||
|
||||
To say em -- running on:
|
||||
(- style underline; -).
|
||||
To say /em -- running on:
|
||||
(- style roman; -).
|
||||
|
||||
Include (-
|
||||
|
||||
! Inform 6 comments start with a ! mark and run to the end of the line.
|
||||
Global cowcount;
|
||||
|
||||
[ inform6func arg;
|
||||
print "Here is some text; ", (address) 'dictword', ".^";
|
||||
cowcount++; ! increment this variable
|
||||
];
|
||||
|
||||
Object i6cow
|
||||
with name 'cow' 'animal',
|
||||
with description "It looks like a cow.",
|
||||
has animate scenery;
|
||||
|
||||
-) after "Global Variables" in "Output.i6t".
|
||||
|
||||
Trivial Extension ends here.
|
||||
|
||||
---- DOCUMENTATION ----
|
||||
|
||||
Everything after the "---- DOCUMENTATION ----" line is documentation, so it should have the comment style.
|
||||
|
||||
However, tab-indented lines are sample Inform code within the documentation:
|
||||
|
||||
Horns are a kind of thing. Every cow has horns.
|
||||
say "Moo[if the noun is purple] indigo[end if]."
|
||||
|
||||
So we need to allow for that.
|
||||
|
||||
@@ -2,11 +2,61 @@
|
||||
|
||||
Include Trivial Extension by Andrew Plotkin.
|
||||
|
||||
Volume 1 - overview
|
||||
|
||||
Chapter - setting the scene
|
||||
|
||||
The Kitchen is a room.
|
||||
|
||||
[This kitchen is modelled after the one in Zork, although it lacks the detail to establish this to the player.]
|
||||
[Comment: this kitchen is modelled after the one in Zork, although it lacks the detail to establish this to the player.]
|
||||
|
||||
Section - the kitchen table
|
||||
|
||||
The spicerack is a container in the Kitchen.
|
||||
|
||||
Table of Spices
|
||||
Name Flavor
|
||||
"cinnamon" 5
|
||||
"nutmeg" 4
|
||||
"szechuan pepper" 8
|
||||
|
||||
The description of the spicerack is "It's mostly empty."
|
||||
|
||||
Chapter - a character
|
||||
|
||||
A purple cow called Gelett is in the Kitchen.
|
||||
|
||||
[This comment spans multiple lines..
|
||||
|
||||
...and this line contains [nested square[] brackets]...
|
||||
|
||||
...which is legal in Inform 7.]
|
||||
|
||||
Instead of examining Gelett:
|
||||
say "You'd rather see than be one."
|
||||
|
||||
Instead of examining Gelett:
|
||||
say "You'd rather see than be one."
|
||||
|
||||
Check smelling Gelett:
|
||||
say "This text contains several lines.
|
||||
|
||||
A blank line is displayed as a paragraph break,
|
||||
but a simple line break is not.";
|
||||
stop the action.
|
||||
|
||||
Section - cow catching
|
||||
|
||||
Gelett has a number called the mooness.
|
||||
|
||||
Instead of taking Gelett:
|
||||
increment the mooness of Gelett;
|
||||
if the mooness of Gelett is one:
|
||||
say "Gelett moos once.";
|
||||
else:
|
||||
say "Gelett moos [mooness of Gelett in words] times.";
|
||||
|
||||
Volume 2 - the turn cycle
|
||||
|
||||
Every turn:
|
||||
say "A turn passes[one of][or] placidly[or] idly[or] tediously[at random]."
|
||||
|
||||
82
samples/JSON/geo.geojson
Normal file
82
samples/JSON/geo.geojson
Normal file
@@ -0,0 +1,82 @@
|
||||
{
|
||||
"type": "FeatureCollection",
|
||||
"features": [
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"name": "Australia Post - North Ryde BC",
|
||||
"geo": [-33.787792, 151.13288],
|
||||
"streetAddress": "11 Waterloo Road",
|
||||
"addressLocality": "Macquarie Park",
|
||||
"addressRegion": "New South Wales",
|
||||
"addressCountry": "Australia",
|
||||
"postalCode": "2113"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [151.13288, -33.787792, 0]
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"name": "George Weston Foods Limited",
|
||||
"geo": [-37.8263884, 144.9105381],
|
||||
"streetAddress": "Level 3, 187 Todd Road",
|
||||
"addressLocality": "Port Melbourne",
|
||||
"addressRegion": "Victoria",
|
||||
"addressCountry": "Australia",
|
||||
"postalCode": "3207"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[144.9097088901841, -37.82622654171794, 0],
|
||||
[144.9099724266943, -37.82679388891783, 0],
|
||||
[144.9110127325916, -37.82651526396403, 0],
|
||||
[144.9112227645738, -37.82655667152123, 0],
|
||||
[144.9113739439796, -37.82618552508767, 0],
|
||||
[144.9112740633105, -37.82615750100924, 0],
|
||||
[144.9111355846674, -37.82584493693527, 0],
|
||||
[144.9097088901841, -37.82622654171794, 0]
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"name": "George Weston Foods Limited",
|
||||
"geo": [-37.05202791502396, 144.2085614999388],
|
||||
"streetAddress": "67 Richards Road",
|
||||
"addressLocality": "Castlemaine",
|
||||
"addressRegion": "Victoria",
|
||||
"addressCountry": "Australia",
|
||||
"postalCode": "3450"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[144.2052428913937, -37.04906391287216, 0],
|
||||
[144.205540392692, -37.05049727485623, 0],
|
||||
[144.2059800881858, -37.05066835966983, 0],
|
||||
[144.206490656024, -37.05279538900776, 0],
|
||||
[144.2064525845008, -37.05366195881602, 0],
|
||||
[144.2084322301922, -37.0538920493147, 0],
|
||||
[144.2084811895712, -37.05266519735124, 0],
|
||||
[144.2079784002005, -37.05041270555773, 0],
|
||||
[144.2074017905817, -37.04817406993293, 0],
|
||||
[144.2061363939852, -37.04834972871226, 0],
|
||||
[144.2052428913937, -37.04906391287216, 0]
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
1
samples/JSON/switzerland.topojson
Normal file
1
samples/JSON/switzerland.topojson
Normal file
File diff suppressed because one or more lines are too long
23
samples/JSX/sample.jsx
Normal file
23
samples/JSX/sample.jsx
Normal file
@@ -0,0 +1,23 @@
|
||||
'use strict';
|
||||
|
||||
const React = require('react')
|
||||
|
||||
module.exports = React.createClass({
|
||||
render: function() {
|
||||
let {feeds, log} = this.props;
|
||||
|
||||
log.info(feeds);
|
||||
return <div className="feed-list">
|
||||
<h3>News Feed's</h3>
|
||||
<ul>
|
||||
{feeds.map(function(feed) {
|
||||
return <li key={feed.name} className={feed.fetched ? 'loaded' : 'loading'}>
|
||||
{feed.data && feed.data.length > 0 ?
|
||||
<span>{feed.name} <span className='light'>({feed.data.length})</span></span>
|
||||
: 'feed.name' }
|
||||
</li>
|
||||
})}
|
||||
</ul>
|
||||
</div>;
|
||||
}
|
||||
});
|
||||
19
samples/JavaScript/logo.jscad
Normal file
19
samples/JavaScript/logo.jscad
Normal file
@@ -0,0 +1,19 @@
|
||||
// title : OpenJSCAD.org Logo
|
||||
// author : Rene K. Mueller
|
||||
// license : MIT License
|
||||
// revision : 0.003
|
||||
// tags : Logo,Intersection,Sphere,Cube
|
||||
// file : logo.jscad
|
||||
|
||||
function main() {
|
||||
return union(
|
||||
difference(
|
||||
cube({size: 3, center: true}),
|
||||
sphere({r:2, center: true})
|
||||
),
|
||||
intersection(
|
||||
sphere({r: 1.3, center: true}),
|
||||
cube({size: 2.1, center: true})
|
||||
)
|
||||
).translate([0,0,1.5]).scale(10);
|
||||
}
|
||||
210
samples/Jupyter Notebook/JupyterNotebook.ipynb
Normal file
210
samples/Jupyter Notebook/JupyterNotebook.ipynb
Normal file
File diff suppressed because one or more lines are too long
14069
samples/KiCad/tc14badge.brd
Normal file
14069
samples/KiCad/tc14badge.brd
Normal file
File diff suppressed because it is too large
Load Diff
74
samples/LSL/LSL.lslp
Normal file
74
samples/LSL/LSL.lslp
Normal file
@@ -0,0 +1,74 @@
|
||||
/*
|
||||
Testing syntax highlighting
|
||||
for the Linden Scripting Language
|
||||
*/
|
||||
|
||||
integer someIntNormal = 3672;
|
||||
integer someIntHex = 0x00000000;
|
||||
integer someIntMath = PI_BY_TWO;
|
||||
|
||||
integer event = 5673;// 'event' is invalid.illegal
|
||||
|
||||
key someKeyTexture = TEXTURE_DEFAULT;
|
||||
string someStringSpecial = EOF;
|
||||
|
||||
some_user_defined_function_without_return_type(string inputAsString)
|
||||
{
|
||||
llSay(PUBLIC_CHANNEL, inputAsString);
|
||||
}
|
||||
|
||||
string user_defined_function_returning_a_string(key inputAsKey)
|
||||
{
|
||||
return (string)inputAsKey;
|
||||
}
|
||||
|
||||
default
|
||||
{
|
||||
state_entry()
|
||||
{
|
||||
key someKey = NULL_KEY;
|
||||
someKey = llGetOwner();
|
||||
|
||||
string someString = user_defined_function_returning_a_string(someKey);
|
||||
|
||||
some_user_defined_function_without_return_type(someString);
|
||||
}
|
||||
|
||||
touch_start(integer num_detected)
|
||||
{
|
||||
list agentsInRegion = llGetAgentList(AGENT_LIST_REGION, []);
|
||||
integer numOfAgents = llGetListLength(agentsInRegion);
|
||||
|
||||
integer index; // defaults to 0
|
||||
for (; index <= numOfAgents - 1; index++) // for each agent in region
|
||||
{
|
||||
llRegionSayTo(llList2Key(agentsInRegion, index), PUBLIC_CHANNEL, "Hello, Avatar!");
|
||||
}
|
||||
}
|
||||
|
||||
touch_end(integer num_detected)
|
||||
{
|
||||
someIntNormal = 3672;
|
||||
someIntHex = 0x00000000;
|
||||
someIntMath = PI_BY_TWO;
|
||||
|
||||
event = 5673;// 'event' is invalid.illegal
|
||||
|
||||
someKeyTexture = TEXTURE_DEFAULT;
|
||||
someStringSpecial = EOF;
|
||||
|
||||
llSetInventoryPermMask("some item", MASK_NEXT, PERM_ALL);// 'llSetInventoryPermMask' is reserved.godmode
|
||||
|
||||
llWhisper(PUBLIC_CHANNEL, "Leaving \"default\" now...");
|
||||
state other;
|
||||
}
|
||||
}
|
||||
|
||||
state other
|
||||
{
|
||||
state_entry()
|
||||
{
|
||||
llWhisper(PUBLIC_CHANNEL, "Entered \"state other\", returning to \"default\" again...");
|
||||
state default;
|
||||
}
|
||||
}
|
||||
29
samples/MAXScript/macro-1.mcr
Normal file
29
samples/MAXScript/macro-1.mcr
Normal file
@@ -0,0 +1,29 @@
|
||||
-- Taken from an example from Autodesk's MAXScript reference:
|
||||
-- http://help.autodesk.com/view/3DSMAX/2016/ENU/?guid=__files_GUID_84E24969_C175_4389_B9A6_3B2699B66785_htm
|
||||
|
||||
macroscript MoveToSurface
|
||||
category: "HowTo"
|
||||
(
|
||||
fn g_filter o = superclassof o == Geometryclass
|
||||
fn find_intersection z_node node_to_z = (
|
||||
local testRay = ray node_to_z.pos [0,0,-1]
|
||||
local nodeMaxZ = z_node.max.z
|
||||
testRay.pos.z = nodeMaxZ + 0.0001 * abs nodeMaxZ
|
||||
intersectRay z_node testRay
|
||||
)
|
||||
|
||||
on isEnabled return selection.count > 0
|
||||
|
||||
on Execute do (
|
||||
target_mesh = pickObject message:"Pick Target Surface:" filter:g_filter
|
||||
|
||||
if isValidNode target_mesh then (
|
||||
undo "MoveToSurface" on (
|
||||
for i in selection do (
|
||||
int_point = find_intersection target_mesh i
|
||||
if int_point != undefined then i.pos = int_point.pos
|
||||
)--end i loop
|
||||
)--end undo
|
||||
)--end if
|
||||
)--end execute
|
||||
)--end script
|
||||
53
samples/MAXScript/macro-2.mcr
Normal file
53
samples/MAXScript/macro-2.mcr
Normal file
@@ -0,0 +1,53 @@
|
||||
-- Taken from an example from Autodesk's MAXScript reference:
|
||||
-- http://help.autodesk.com/view/3DSMAX/2016/ENU/?guid=__files_GUID_0876DF46_FAA3_4131_838D_5739A67FF2C1_htm
|
||||
|
||||
macroscript FreeSpline category:"HowTo" tooltip:"FreeSpline" (
|
||||
local old_pos
|
||||
local new_spline
|
||||
local second_knot_set
|
||||
|
||||
fn get_mouse_pos pen_pos old_pen_pos = (
|
||||
if old_pos == undefined then old_pos = old_pen_pos
|
||||
if distance pen_pos old_pos > 10 then
|
||||
(
|
||||
if second_knot_set then
|
||||
addKnot new_spline 1 #smooth #curve pen_pos
|
||||
else
|
||||
(
|
||||
setKnotPoint new_spline 1 2 pen_pos
|
||||
second_knot_set = true
|
||||
)
|
||||
old_pos = pen_pos
|
||||
updateShape new_spline
|
||||
)-- end if
|
||||
)-- end fn
|
||||
|
||||
fn draw_new_line old_pen_pos = (
|
||||
pickPoint mouseMoveCallback:#(get_mouse_pos,old_pen_pos)
|
||||
)
|
||||
|
||||
undo"Free Spline"on(
|
||||
new_spline = splineShape ()
|
||||
old_pen_pos = pickPoint ()
|
||||
|
||||
if old_pen_pos == #RightClick then
|
||||
delete new_spline
|
||||
else
|
||||
(
|
||||
select new_spline
|
||||
new_spline.pos = old_pen_pos
|
||||
addNewSpline new_spline
|
||||
addKnot new_spline 1 #smooth #curve old_pen_pos
|
||||
addKnot new_spline 1 #smooth #curve old_pen_pos
|
||||
second_knot_set = false
|
||||
draw_new_line old_pen_pos
|
||||
q = querybox "Close Spline?" title:"Free Spline"
|
||||
if q then
|
||||
(
|
||||
close new_spline 1
|
||||
updateshape new_spline
|
||||
)
|
||||
select new_spline
|
||||
)--end else
|
||||
)--end undo
|
||||
)--end script
|
||||
64
samples/MAXScript/svg-renderer.ms
Normal file
64
samples/MAXScript/svg-renderer.ms
Normal file
@@ -0,0 +1,64 @@
|
||||
-- Taken from a 3-part tutorial from Autodesk's MAXScript reference
|
||||
-- Source: http://help.autodesk.com/view/3DSMAX/2016/ENU/?guid=__files_GUID_6B5EDC11_A154_4AA7_A972_A11AC36949E9_htm
|
||||
|
||||
fn ColourToHex col = (
|
||||
local theComponents = #(bit.intAsHex col.r, bit.intAsHex col.g, bit.intAsHex col.b)
|
||||
local theValue = "#"
|
||||
for i in theComponents do
|
||||
theValue += (if i.count == 1 then "0" else "") + i
|
||||
theValue
|
||||
)
|
||||
|
||||
local st = timestamp()
|
||||
local theFileName = (getDir #userscripts + "\\PolygonRendering3.svg")
|
||||
local theSVGfile = createFile theFileName
|
||||
format "<svg xmlns=\"http://www.w3.org/2000/svg\"\n" to:theSVGfile
|
||||
format "\t\txmlns:xlink=\"http://www.w3.org/1999/xlink\">\n" to:theSVGfile
|
||||
|
||||
local theViewTM = viewport.getTM()
|
||||
theViewTM.row4 = [0,0,0]
|
||||
|
||||
local theViewTM2 = viewport.getTM()
|
||||
local theViewSize = getViewSize()
|
||||
local theViewScale = getViewSize()
|
||||
theViewScale.x /= 1024.0
|
||||
theViewScale.y /= 1024.0
|
||||
|
||||
local theStrokeThickness = 3
|
||||
|
||||
gw.setTransform (matrix3 1)
|
||||
for o in Geometry where not o.isHiddenInVpt and classof o != TargetObject do (
|
||||
local theStrokeColour = white
|
||||
local theFillColour = o.wirecolor
|
||||
|
||||
local theMesh = snapshotAsMesh o
|
||||
for f = 1 to theMesh.numfaces do (
|
||||
local theNormal = normalize (getFaceNormal theMesh f)
|
||||
|
||||
if (theNormal*theViewTM).z > 0 do
|
||||
(
|
||||
local theFace = getFace theMesh f
|
||||
local v1 = gw.transPoint (getVert theMesh theFace.x)
|
||||
local v2 = gw.transPoint (getVert theMesh theFace.y)
|
||||
local v3 = gw.transPoint (getVert theMesh theFace.z)
|
||||
|
||||
v1.x /= theViewScale.x
|
||||
v1.y /= theViewScale.y
|
||||
v2.x /= theViewScale.x
|
||||
v2.y /= theViewScale.y
|
||||
v3.x /= theViewScale.x
|
||||
v3.y /= theViewScale.y
|
||||
|
||||
format "\t<polygon points='%,% %,% %,%' \n" v1.x v1.y v2.x v2.y v3.x v3.y to:theSVGfile
|
||||
format "\tstyle='stroke:%; fill:%; stroke-width:%'/>\n" (ColourToHex theStrokeColour) (ColourToHex theFillColour) theStrokeThickness to:theSVGfile
|
||||
)--end if normal positive
|
||||
)--end f loop
|
||||
)--end o loop
|
||||
|
||||
format "</svg>\n" to:theSVGfile
|
||||
close theSVGfile
|
||||
local theSVGMap = VectorMap vectorFile:theFileName alphasource:0
|
||||
local theBitmap = bitmap theViewSize.x theViewSize.y
|
||||
renderMap theSVGMap into:theBitmap filter:true
|
||||
display theBitmap
|
||||
format "Render Time: % sec.\n" ((timestamp()-st)/1000.0)
|
||||
22
samples/MAXScript/volume-calc.ms
Normal file
22
samples/MAXScript/volume-calc.ms
Normal file
@@ -0,0 +1,22 @@
|
||||
fn CalculateVolumeAndCentreOfMass obj =
|
||||
(
|
||||
local Volume= 0.0
|
||||
local Centre= [0.0, 0.0, 0.0]
|
||||
local theMesh = snapshotasmesh obj
|
||||
local numFaces = theMesh.numfaces
|
||||
for i = 1 to numFaces do
|
||||
(
|
||||
local Face= getFace theMesh i
|
||||
local vert2 = getVert theMesh Face.z
|
||||
local vert1 = getVert theMesh Face.y
|
||||
local vert0 = getVert theMesh Face.x
|
||||
local dV = Dot (Cross (vert1 - vert0) (vert2 - vert0)) vert0
|
||||
Volume+= dV
|
||||
Centre+= (vert0 + vert1 + vert2) * dV
|
||||
)
|
||||
delete theMesh
|
||||
Volume /= 6
|
||||
Centre /= 24
|
||||
Centre /= Volume
|
||||
#(Volume,Centre)
|
||||
)
|
||||
31
samples/Makefile/filenames/Makefile.inc
Normal file
31
samples/Makefile/filenames/Makefile.inc
Normal file
@@ -0,0 +1,31 @@
|
||||
# $OpenBSD: Makefile.inc,v 1.2 2003/11/14 20:09:20 drahn Exp $
|
||||
# $NetBSD: Makefile.inc,v 1.1 1996/09/30 16:34:59 ws Exp $
|
||||
|
||||
.if !defined(__stand_makefile_inc)
|
||||
__stand_makefile_inc=1
|
||||
|
||||
KERN_AS= library
|
||||
|
||||
S=$(.CURDIR)/../../../$(R)
|
||||
|
||||
.if !make(libdep) && !make(sadep) && !make(salibdir) && !make(kernlibdir) && !make(obj) && !defined(NOMACHINE)
|
||||
.BEGIN:
|
||||
@([ -h machine ] || ln -s $(S)/arch/$(MACHINE)/include machine)
|
||||
.endif
|
||||
|
||||
#
|
||||
EXTRACFLAGS= -msoft-float
|
||||
REAL_VIRT?= -v
|
||||
ENTRY?= _start
|
||||
|
||||
INCLUDES+= -I. -I$(.OBJDIR) -I$(.CURDIR)/.. -I$(S)/arch -I$(S)
|
||||
INCLUDES+= -I$(S)/lib/libsa
|
||||
DEFS+= -DSTANDALONE
|
||||
CFLAGS+= $(INCLUDES) $(DEFS) $(EXTRACFLAGS)
|
||||
CFLAGS+= -fno-stack-protector
|
||||
LDFLAGS?= -X -N -Ttext $(RELOC) -e $(ENTRY)
|
||||
|
||||
cleandir:
|
||||
rm -rf lib machine
|
||||
|
||||
.endif
|
||||
1
samples/Mathematica/TestArithmetic.mt
Normal file
1
samples/Mathematica/TestArithmetic.mt
Normal file
@@ -0,0 +1 @@
|
||||
Test[1 + 2, 3, TestID -> "One plus two"]
|
||||
1
samples/Mathematica/TestString.mt
Normal file
1
samples/Mathematica/TestString.mt
Normal file
@@ -0,0 +1 @@
|
||||
Test["a" <> "b", "ab", TestID -> "Concat \"a\" and \"b\""]
|
||||
5
samples/Mathematica/TestSuite.mt
Normal file
5
samples/Mathematica/TestSuite.mt
Normal file
@@ -0,0 +1,5 @@
|
||||
TestSuite[
|
||||
{ "TestArithmetic.mt"
|
||||
, "TestString.mt"
|
||||
}
|
||||
]
|
||||
694
samples/MediaWiki/README.wiki
Normal file
694
samples/MediaWiki/README.wiki
Normal file
@@ -0,0 +1,694 @@
|
||||
= Name =
|
||||
|
||||
'''nginx_tcp_proxy_module''' - support TCP proxy with Nginx
|
||||
|
||||
= Installation =
|
||||
|
||||
Download the latest stable version of the release tarball of this module from [http://github.com/yaoweibin/nginx_tcp_proxy_module github]
|
||||
|
||||
Grab the nginx source code from [http://nginx.org/ nginx.org], for example, the version 1.2.1 (see nginx compatibility), and then build the source with this module:
|
||||
|
||||
<geshi lang="bash">
|
||||
$ wget 'http://nginx.org/download/nginx-1.2.1.tar.gz'
|
||||
$ tar -xzvf nginx-1.2.1.tar.gz
|
||||
$ cd nginx-1.2.1/
|
||||
$ patch -p1 < /path/to/nginx_tcp_proxy_module/tcp.patch
|
||||
|
||||
$ ./configure --add-module=/path/to/nginx_tcp_proxy_module
|
||||
|
||||
$ make
|
||||
$ make install
|
||||
</geshi>
|
||||
|
||||
|
||||
= Synopsis =
|
||||
|
||||
<geshi lang="nginx">
|
||||
http {
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
|
||||
location /status {
|
||||
tcp_check_status;
|
||||
}
|
||||
}
|
||||
}
|
||||
</geshi>
|
||||
|
||||
<geshi lang="nginx">
|
||||
|
||||
#You can also include tcp_proxy.conf file individually
|
||||
|
||||
#include /path/to/tcp_proxy.conf;
|
||||
|
||||
tcp {
|
||||
|
||||
upstream cluster {
|
||||
# simple round-robin
|
||||
server 192.168.0.1:80;
|
||||
server 192.168.0.2:80;
|
||||
|
||||
check interval=3000 rise=2 fall=5 timeout=1000;
|
||||
|
||||
#check interval=3000 rise=2 fall=5 timeout=1000 type=ssl_hello;
|
||||
|
||||
#check interval=3000 rise=2 fall=5 timeout=1000 type=http;
|
||||
#check_http_send "GET / HTTP/1.0\r\n\r\n";
|
||||
#check_http_expect_alive http_2xx http_3xx;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 8888;
|
||||
|
||||
proxy_pass cluster;
|
||||
}
|
||||
}
|
||||
</geshi>
|
||||
|
||||
= Description =
|
||||
|
||||
This module actually include many modules: ngx_tcp_module, ngx_tcp_core_module, ngx_tcp_upstream_module, ngx_tcp_proxy_module, ngx_tcp_websocket_module, ngx_tcp_ssl_module, ngx_tcp_upstream_ip_hash_module. All these modules work together to support TCP proxy with Nginx. I also added other features: ip_hash, upstream server health check, status monitor.
|
||||
|
||||
The motivation of writing these modules is Nginx's high performance and robustness. At first, I developed this module just for general TCP proxy. And now, this module is frequently used in websocket reverse proxying.
|
||||
|
||||
Note, You can't use the same listening port with HTTP modules.
|
||||
|
||||
= Directives =
|
||||
|
||||
== ngx_tcp_moodule ==
|
||||
|
||||
=== tcp ===
|
||||
|
||||
'''syntax:''' ''tcp {...}''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''main''
|
||||
|
||||
'''description:''' All the tcp related directives are contained in the tcp block.
|
||||
|
||||
|
||||
'''ngx_tcp_core_moodule'''
|
||||
|
||||
=== server ===
|
||||
|
||||
'''syntax:''' ''server {...}''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''tcp''
|
||||
|
||||
'''description:''' All the specific server directives are contained in the server block.
|
||||
|
||||
=== listen ===
|
||||
|
||||
'''syntax:''' ''listen address:port [ bind | ssl | default]''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''server''
|
||||
|
||||
'''description:''' The same as [http://wiki.nginx.org/NginxMailCoreModule#listen listen]. The parameter of default means the default server if you have several server blocks with the same port.
|
||||
|
||||
=== access_log ===
|
||||
|
||||
'''syntax:''' ''access_log path [buffer=size] | off''
|
||||
|
||||
'''default:''' ''access_log logs/tcp_access.log''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' Set the access.log. Each record's format is like this:
|
||||
|
||||
<pre>
|
||||
|
||||
log_time worker_process_pid client_ip host_ip accept_time upstream_ip bytes_read bytes_write
|
||||
|
||||
2011/08/02 06:19:07 [5972] 127.0.0.1 0.0.0.0:1982 2011/08/02 06:18:19 172.19.0.129:80 80 236305
|
||||
|
||||
</pre>
|
||||
|
||||
* ''log_time'': The current time when writing this log. The log action is called when the proxy session is closed.
|
||||
* ''worker_process_pid'': the pid of worker process
|
||||
* ''client_ip'': the client ip
|
||||
* ''host_ip'': the server ip and port
|
||||
* ''accept_time'': the time when the server accepts client's connection
|
||||
* ''upstream_ip'': the upstream server's ip
|
||||
* ''bytes_read'': the bytes read from client
|
||||
* ''bytes_write'': the bytes written to client
|
||||
|
||||
=== allow ===
|
||||
|
||||
'''syntax:''' ''allow [ address | CIDR | all ]''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''server''
|
||||
|
||||
'''description:''' Directive grants access for the network or addresses indicated.
|
||||
|
||||
=== deny ===
|
||||
|
||||
'''syntax:''' ''deny [ address | CIDR | all ]''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''server''
|
||||
|
||||
'''description:''' Directive grants access for the network or addresses indicated.
|
||||
|
||||
=== so_keepalive ===
|
||||
|
||||
'''syntax:''' ''so_keepalive on|off''
|
||||
|
||||
'''default:''' ''off''
|
||||
|
||||
'''context:''' ''main, server''
|
||||
|
||||
'''description:''' The same as [http://wiki.nginx.org/NginxMailCoreModule#so_keepalive so_keepalive].
|
||||
|
||||
=== tcp_nodelay ===
|
||||
|
||||
'''syntax:''' ''tcp_nodelay on|off''
|
||||
|
||||
'''default:''' ''on''
|
||||
|
||||
'''context:''' ''main, server''
|
||||
|
||||
'''description:''' The same as [http://wiki.nginx.org/NginxHttpCoreModule#tcp_nodelay tcp_nodelay].
|
||||
|
||||
=== timeout ===
|
||||
|
||||
'''syntax:''' ''timeout milliseconds''
|
||||
|
||||
'''default:''' ''60000''
|
||||
|
||||
'''context:''' ''main, server''
|
||||
|
||||
'''description:''' set the timeout value with clients.
|
||||
|
||||
=== server_name ===
|
||||
|
||||
'''syntax:''' ''server_name name''
|
||||
|
||||
'''default:''' ''The name of the host, obtained through gethostname()''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' The same as [http://wiki.nginx.org/NginxMailCoreModule#server_name server_name]. You can specify several server name in different server block with the same port. They can be used in websocket module.
|
||||
|
||||
=== resolver ===
|
||||
|
||||
'''syntax:''' ''resolver address''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' DNS server
|
||||
|
||||
=== resolver_timeout ===
|
||||
|
||||
'''syntax:''' ''resolver_timeout time''
|
||||
|
||||
'''default:''' ''30s''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' Resolver timeout in seconds.
|
||||
|
||||
|
||||
== ngx_tcp_upstream_module ==
|
||||
|
||||
=== upstream ===
|
||||
|
||||
'''syntax:''' ''upstream {...}''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''tcp''
|
||||
|
||||
'''description:''' All the upstream directives are contained in this block. The upstream server will be dispatched with round robin by default.
|
||||
|
||||
=== server ===
|
||||
|
||||
'''syntax:''' ''server name [parameters]''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''upstream''
|
||||
|
||||
'''description:''' Most of the parameters are the same as [http://wiki.nginx.org/NginxHttpUpstreamModule#server server]. Default port is 80.
|
||||
|
||||
=== check ===
|
||||
|
||||
'''syntax:''' ''check interval=milliseconds [fall=count] [rise=count] [timeout=milliseconds] [type=tcp|ssl_hello|smtp|mysql|pop3|imap]''
|
||||
|
||||
'''default:''' ''none, if parameters omitted, default parameters are interval=30000 fall=5 rise=2 timeout=1000''
|
||||
|
||||
'''context:''' ''upstream''
|
||||
|
||||
'''description:''' Add the health check for the upstream servers. At present, the check method is a simple tcp connect.
|
||||
|
||||
The parameters' meanings are:
|
||||
|
||||
* ''interval'': the check request's interval time.
|
||||
* ''fall''(fall_count): After fall_count check failures, the server is marked down.
|
||||
* ''rise''(rise_count): After rise_count check success, the server is marked up.
|
||||
* ''timeout'': the check request's timeout.
|
||||
* ''type'': the check protocol type:
|
||||
# ''tcp'' is a simple tcp socket connect and peek one byte.
|
||||
# ''ssl_hello'' sends a client ssl hello packet and receives the server ssl hello packet.
|
||||
# ''http'' sends a http request packet, receives and parses the http response to diagnose if the upstream server is alive.
|
||||
# ''smtp'' sends a smtp request packet, receives and parses the smtp response to diagnose if the upstream server is alive. The response begins with '2' should be an OK response.
|
||||
# ''mysql'' connects to the mysql server, receives the greeting response to diagnose if the upstream server is alive.
|
||||
# ''pop3'' receives and parses the pop3 response to diagnose if the upstream server is alive. The response begins with '+' should be an OK response.
|
||||
# ''imap'' connects to the imap server, receives the greeting response to diagnose if the upstream server is alive.
|
||||
|
||||
=== check_http_send ===
|
||||
|
||||
'''syntax:''' ''check_http_send http_packet''
|
||||
|
||||
'''default:''' ''"GET / HTTP/1.0\r\n\r\n"''
|
||||
|
||||
'''context:''' ''upstream''
|
||||
|
||||
'''description:''' If you set the check type is http, then the check function will sends this http packet to check the upstream server.
|
||||
|
||||
=== check_http_expect_alive ===
|
||||
|
||||
'''syntax:''' ''check_http_expect_alive [ http_2xx | http_3xx | http_4xx | http_5xx ]''
|
||||
|
||||
'''default:''' ''http_2xx | http_3xx''
|
||||
|
||||
'''context:''' ''upstream''
|
||||
|
||||
'''description:''' These status codes indicate the upstream server's http response is OK, the backend is alive.
|
||||
|
||||
=== check_smtp_send ===
|
||||
|
||||
'''syntax:''' ''check_smtp_send smtp_packet''
|
||||
|
||||
'''default:''' ''"HELO smtp.localdomain\r\n"''
|
||||
|
||||
'''context:''' ''upstream''
|
||||
|
||||
'''description:''' If you set the check type is smtp, then the check function will sends this smtp packet to check the upstream server.
|
||||
|
||||
=== check_smtp_expect_alive ===
|
||||
|
||||
'''syntax:''' ''check_smtp_expect_alive [smtp_2xx | smtp_3xx | smtp_4xx | smtp_5xx]''
|
||||
|
||||
'''default:''' ''smtp_2xx''
|
||||
|
||||
'''context:''' ''upstream''
|
||||
|
||||
'''description:''' These status codes indicate the upstream server's smtp response is OK, the backend is alive.
|
||||
|
||||
=== check_shm_size ===
|
||||
|
||||
'''syntax:''' ''check_shm_size size''
|
||||
|
||||
'''default:''' ''(number_of_checked_upstream_blocks + 1) * pagesize''
|
||||
|
||||
'''context:''' ''tcp''
|
||||
|
||||
'''description:''' If you store hundreds of servers in one upstream block, the shared memory for health check may be not enough, you can enlarged it by this directive.
|
||||
|
||||
=== tcp_check_status ===
|
||||
|
||||
'''syntax:''' ''tcp_check_status''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''location''
|
||||
|
||||
'''description:''' Display the health checking servers' status by HTTP. This directive is set in the http block.
|
||||
|
||||
The table field meanings are:
|
||||
|
||||
* ''Index'': The server index in the check table
|
||||
* ''Name'' : The upstream server name
|
||||
* ''Status'': The marked status of the server.
|
||||
* ''Busyness'': The number of connections which are connecting to the server.
|
||||
* ''Rise counts'': Count the successful checking
|
||||
* ''Fall counts'': Count the unsuccessful checking
|
||||
* ''Access counts'': Count the times accessing to this server
|
||||
* ''Check type'': The type of the check packet
|
||||
|
||||
|
||||
'''ngx_tcp_upstream_busyness_module'''
|
||||
|
||||
=== busyness ===
|
||||
|
||||
'''syntax:''' ''busyness''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''upstream''
|
||||
|
||||
'''description:''' the upstream server will be dispatched by backend servers' busyness.
|
||||
|
||||
|
||||
'''ngx_tcp_upstream_ip_hash_module'''
|
||||
|
||||
=== ip_hash ===
|
||||
|
||||
'''syntax:''' ''ip_hash''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''upstream''
|
||||
|
||||
'''description:''' the upstream server will be dispatched by ip_hash.
|
||||
|
||||
|
||||
== ngx_tcp_proxy_module ==
|
||||
|
||||
=== proxy_pass ===
|
||||
|
||||
'''syntax:''' ''proxy_pass host:port''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''server''
|
||||
|
||||
'''description:''' proxy the request to the backend server. Default port is 80.
|
||||
|
||||
=== proxy_buffer ===
|
||||
|
||||
'''syntax:''' ''proxy_buffer size''
|
||||
|
||||
'''default:''' ''4k''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' set the size of proxy buffer.
|
||||
|
||||
=== proxy_connect_timeout ===
|
||||
|
||||
'''syntax:''' ''proxy_connect_timeout miliseconds''
|
||||
|
||||
'''default:''' ''60000''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' set the timeout value of connection to backends.
|
||||
|
||||
=== proxy_read_timeout ===
|
||||
|
||||
'''syntax:''' ''proxy_read_timeout miliseconds''
|
||||
|
||||
'''default:''' ''60000''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' set the timeout value of reading from backends.
|
||||
|
||||
=== proxy_send_timeout ===
|
||||
|
||||
'''syntax:''' ''proxy_send_timeout miliseconds''
|
||||
|
||||
'''default:''' ''60000''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' set the timeout value of sending to backends.
|
||||
|
||||
|
||||
== ngx_tcp_websocket_module ==
|
||||
|
||||
=== websocket_pass ===
|
||||
|
||||
'''syntax:''' ''websocket_pass [path] host:port''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''server''
|
||||
|
||||
'''description:''' proxy the websocket request to the backend server. Default port is 80. You can specify several different paths in the same server block.
|
||||
|
||||
=== websocket_buffer ===
|
||||
|
||||
'''syntax:''' ''websocket_buffer size''
|
||||
|
||||
'''default:''' ''4k''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' set the size of proxy buffer.
|
||||
|
||||
=== websocket_connect_timeout ===
|
||||
|
||||
'''syntax:''' ''websocket_connect_timeout miliseconds''
|
||||
|
||||
'''default:''' ''60000''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' set the timeout value of connection to backends.
|
||||
|
||||
=== websocket_read_timeout ===
|
||||
|
||||
'''syntax:''' ''websocket_read_timeout miliseconds''
|
||||
|
||||
'''default:''' ''60000''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' set the timeout value of reading from backends. Your timeout will be the minimum of this and the *timeout* parameter, so if you want a long timeout for your websockets, make sure to set both paramaters.
|
||||
|
||||
=== websocket_send_timeout ===
|
||||
|
||||
'''syntax:''' ''websocket_send_timeout miliseconds''
|
||||
|
||||
'''default:''' ''60000''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' set the timeout value of sending to backends.
|
||||
|
||||
|
||||
== ngx_tcp_ssl_module ==
|
||||
|
||||
The default config file includes this ngx_tcp_ssl_module. If you want to just compile nginx without ngx_tcp_ssl_module, copy the ngx_tcp_proxy_module/config_without_ssl to ngx_tcp_proxy_module/config, reconfigrure and compile nginx.
|
||||
|
||||
=== ssl ===
|
||||
|
||||
'''syntax:''' ''ssl [on|off] ''
|
||||
|
||||
'''default:''' ''ssl off''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
Enables SSL for a server.
|
||||
|
||||
=== ssl_certificate ===
|
||||
|
||||
'''syntax:''' ''ssl_certificate file''
|
||||
|
||||
'''default:''' ''ssl_certificate cert.pem''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
This directive specifies the file containing the certificate, in PEM format. This file can contain also other certificates and the server private key.
|
||||
|
||||
=== ssl_certificate_key ===
|
||||
|
||||
'''syntax:''' ''ssl_certificate_key file''
|
||||
|
||||
'''default:''' ''ssl_certificate_key cert.pem''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
This directive specifies the file containing the private key, in PEM format.
|
||||
|
||||
=== ssl_client_certificate ===
|
||||
|
||||
'''syntax:''' ''ssl_client_certificate file''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
This directive specifies the file containing the CA (root) certificate, in PEM format, that is used for validating client certificates.
|
||||
|
||||
=== ssl_dhparam ===
|
||||
|
||||
'''syntax:''' ''ssl_dhparam file''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
This directive specifies a file containing Diffie-Hellman key agreement protocol cryptographic parameters, in PEM format, utilized for exchanging session keys between server and client.
|
||||
|
||||
=== ssl_ciphers ===
|
||||
|
||||
'''syntax:''' ''ssl_ciphers openssl_cipherlist_spec''
|
||||
|
||||
'''default:''' ''ssl_ciphers HIGH:!aNULL:!MD5''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
This directive describes the list of cipher suites the server supports for establishing a secure connection. Cipher suites are specified in the [http://openssl.org/docs/apps/ciphers.html OpenSSL] cipherlist format, for example:
|
||||
|
||||
<geshi lang="nginx">
|
||||
ssl_ciphers ALL:!ADH:!EXPORT56:RC4+RSA:+HIGH:+MEDIUM:+LOW:+SSLv2:+EXP;
|
||||
</geshi>
|
||||
|
||||
The complete cipherlist supported by the currently installed version of OpenSSL in your platform can be obtained by issuing the command:
|
||||
<pre>
|
||||
openssl ciphers
|
||||
</pre>
|
||||
|
||||
=== ssl_crl ===
|
||||
|
||||
'''syntax:''' ''ssl_crl file''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
This directive specifies the filename of a Certificate Revocation List, in PEM format, which is used to check the revocation status of certificates.
|
||||
|
||||
=== ssl_prefer_server_ciphers ===
|
||||
|
||||
'''syntax:''' ''ssl_prefer_server_ciphers [on|off] ''
|
||||
|
||||
'''default:''' ''ssl_prefer_server_ciphers off''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
The server requires that the cipher suite list for protocols SSLv3 and TLSv1 are to be preferred over the client supported cipher suite list.
|
||||
|
||||
=== ssl_protocols ===
|
||||
|
||||
'''syntax:''' ''ssl_protocols [SSLv2] [SSLv3] [TLSv1] [TLSv1.1] [TLSv1.2]''
|
||||
|
||||
'''default:''' ''ssl_protocols SSLv3 TLSv1 TLSv1.1 TLSv1.2''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
This directive enables the protocol versions specified.
|
||||
|
||||
=== ssl_verify_client ===
|
||||
|
||||
'''syntax:''' ''ssl_verify_client on|off|optional''
|
||||
|
||||
'''default:''' ''ssl_verify_client off''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
This directive enables the verification of the client identity. Parameter 'optional' checks the client identity using its certificate in case it was made available to the server.
|
||||
|
||||
=== ssl_verify_depth ===
|
||||
|
||||
'''syntax:''' ''ssl_verify_depth number''
|
||||
|
||||
'''default:''' ''ssl_verify_depth 1''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
This directive sets how deep the server should go in the client provided certificate chain in order to verify the client identity.
|
||||
|
||||
=== ssl_session_cache ===
|
||||
|
||||
'''syntax:''' ''ssl_session_cache off|none|builtin:size and/or shared:name:size''
|
||||
|
||||
'''default:''' ''ssl_session_cache off''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
The directive sets the types and sizes of caches to store the SSL sessions.
|
||||
|
||||
The cache types are:
|
||||
|
||||
* off -- Hard off: nginx says explicitly to a client that sessions can not reused.
|
||||
* none -- Soft off: nginx says to a client that session can be resued, but nginx actually never reuses them. This is workaround for some mail clients as ssl_session_cache may be used in mail proxy as well as in HTTP server.
|
||||
* builtin -- the OpenSSL builtin cache, is used inside one worker process only. The cache size is assigned in the number of the sessions. Note: there appears to be a memory fragmentation issue using this method, please take that into consideration when using this. See "References" below.
|
||||
* shared -- the cache is shared between all worker processes. The size of the cache is assigned in bytes: 1 MB cache can contain roughly 4000 sessions. Each shared cache must be given an arbitrary name. A shared cache with a given name can be used in several virtual hosts.
|
||||
It's possible to use both types of cache — builtin and shared — simultaneously, for example:
|
||||
|
||||
<geshi lang="nginx">
|
||||
ssl_session_cache builtin:1000 shared:SSL:10m;
|
||||
</geshi>
|
||||
|
||||
Bear in mind however, that using only shared cache, i.e., without builtin, should be more effective.
|
||||
|
||||
=== ssl_session_timeout ===
|
||||
|
||||
'''syntax:''' ''ssl_session_timeout time''
|
||||
|
||||
'''default:''' ''ssl_session_timeout 5m''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
This directive defines the maximum time during which the client can re-use the previously negotiated cryptographic parameters of the secure session that is stored in the SSL cache.
|
||||
|
||||
= Compatibility =
|
||||
|
||||
* My test bed is 0.7.65+
|
||||
|
||||
= Notes =
|
||||
|
||||
The http_response_parse.rl and smtp_response_parse.rl are [http://www.complang.org/ragel/ ragel] scripts , you can edit the script and compile it like this:
|
||||
|
||||
<geshi lang="bash">
|
||||
$ ragel -G2 http_response_parse.rl
|
||||
$ ragel -G2 smtp_response_parse.rl
|
||||
</geshi>
|
||||
|
||||
= TODO =
|
||||
|
||||
* refact this module, make it more extendable for adding third-party modules
|
||||
* manipulate header like http module's proxy_set_header
|
||||
* built-in variable support
|
||||
* custom log format
|
||||
* syslog support
|
||||
* FTP/IRC proxying
|
||||
|
||||
= Known Issues =
|
||||
|
||||
* This module can't use the same listening port with the HTTP module.
|
||||
|
||||
= Changelogs =
|
||||
|
||||
== v0.2.0 ==
|
||||
|
||||
* add ssl proxy module
|
||||
* add websocket proxy module
|
||||
* add upstream busyness module
|
||||
* add tcp access log module
|
||||
|
||||
== v0.19 ==
|
||||
|
||||
* add many check methods
|
||||
|
||||
== v0.1 ==
|
||||
|
||||
* first release
|
||||
|
||||
= Authors =
|
||||
|
||||
Weibin Yao(姚伟斌) ''yaoweibin at gmail dot com''
|
||||
|
||||
= Copyright & License =
|
||||
|
||||
This README template copy from [http://github.com/agentzh agentzh].
|
||||
|
||||
I borrowed a lot of code from upstream and mail module from the nginx 0.7.* core. This part of code is copyrighted by Igor Sysoev. And the health check part is borrowed the design of Jack Lindamood's healthcheck module [http://github.com/cep21/healthcheck_nginx_upstreams healthcheck_nginx_upstreams];
|
||||
|
||||
This module is licensed under the BSD license.
|
||||
|
||||
Copyright (C) 2013 by Weibin Yao <yaoweibin@gmail.com>.
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
99
samples/Metal/ITMVisualisationEngine.metal
Normal file
99
samples/Metal/ITMVisualisationEngine.metal
Normal file
@@ -0,0 +1,99 @@
|
||||
// Copyright 2014 Isis Innovation Limited and the authors of InfiniTAM
|
||||
|
||||
#include <metal_stdlib>
|
||||
|
||||
#include "../../DeviceAgnostic/ITMSceneReconstructionEngine.h"
|
||||
#include "../../DeviceAgnostic/ITMVisualisationEngine.h"
|
||||
#include "ITMVisualisationEngine_Metal.h"
|
||||
|
||||
using namespace metal;
|
||||
|
||||
kernel void genericRaycastVH_device(DEVICEPTR(Vector4f) *pointsRay [[ buffer(0) ]],
|
||||
const CONSTPTR(ITMVoxel) *voxelData [[ buffer(1) ]],
|
||||
const CONSTPTR(typename ITMVoxelIndex::IndexData) *voxelIndex [[ buffer(2) ]],
|
||||
const CONSTPTR(Vector2f) *minmaxdata [[ buffer(3) ]],
|
||||
const CONSTPTR(CreateICPMaps_Params) *params [[ buffer(4) ]],
|
||||
uint2 threadIdx [[ thread_position_in_threadgroup ]],
|
||||
uint2 blockIdx [[ threadgroup_position_in_grid ]],
|
||||
uint2 blockDim [[ threads_per_threadgroup ]])
|
||||
{
|
||||
int x = threadIdx.x + blockIdx.x * blockDim.x, y = threadIdx.y + blockIdx.y * blockDim.y;
|
||||
|
||||
if (x >= params->imgSize.x || y >= params->imgSize.y) return;
|
||||
|
||||
int locId = x + y * params->imgSize.x;
|
||||
int locId2 = (int)floor((float)x / minmaximg_subsample) + (int)floor((float)y / minmaximg_subsample) * params->imgSize.x;
|
||||
|
||||
castRay<ITMVoxel, ITMVoxelIndex>(pointsRay[locId], x, y, voxelData, voxelIndex, params->invM, params->invProjParams,
|
||||
params->voxelSizes.y, params->lightSource.w, minmaxdata[locId2]);
|
||||
}
|
||||
|
||||
kernel void genericRaycastVGMissingPoints_device(DEVICEPTR(Vector4f) *forwardProjection [[ buffer(0) ]],
|
||||
const CONSTPTR(int) *fwdProjMissingPoints [[ buffer(1) ]],
|
||||
const CONSTPTR(ITMVoxel) *voxelData [[ buffer(2) ]],
|
||||
const CONSTPTR(typename ITMVoxelIndex::IndexData) *voxelIndex [[ buffer(3) ]],
|
||||
const CONSTPTR(Vector2f) *minmaxdata [[ buffer(4) ]],
|
||||
const CONSTPTR(CreateICPMaps_Params) *params [[ buffer(5) ]],
|
||||
uint2 threadIdx [[ thread_position_in_threadgroup ]],
|
||||
uint2 blockIdx [[ threadgroup_position_in_grid ]],
|
||||
uint2 blockDim [[ threads_per_threadgroup ]])
|
||||
{
|
||||
int pointId = threadIdx.x + blockIdx.x * blockDim.x;
|
||||
|
||||
if (pointId >= params->imgSize.z) return;
|
||||
|
||||
int locId = fwdProjMissingPoints[pointId];
|
||||
int y = locId / params->imgSize.x, x = locId - y * params->imgSize.x;
|
||||
int locId2 = (int)floor((float)x / minmaximg_subsample) + (int)floor((float)y / minmaximg_subsample) * params->imgSize.x;
|
||||
|
||||
castRay<ITMVoxel, ITMVoxelIndex>(forwardProjection[locId], x, y, voxelData, voxelIndex, params->invM, params->invProjParams,
|
||||
params->voxelSizes.y, params->lightSource.w, minmaxdata[locId2]);
|
||||
}
|
||||
|
||||
kernel void renderICP_device(const CONSTPTR(Vector4f) *pointsRay [[ buffer(0) ]],
|
||||
DEVICEPTR(Vector4f) *pointsMap [[ buffer(1) ]],
|
||||
DEVICEPTR(Vector4f) *normalsMap [[ buffer(2) ]],
|
||||
DEVICEPTR(Vector4u) *outRendering [[ buffer(3) ]],
|
||||
const CONSTPTR(CreateICPMaps_Params) *params [[ buffer(4) ]],
|
||||
uint2 threadIdx [[ thread_position_in_threadgroup ]],
|
||||
uint2 blockIdx [[ threadgroup_position_in_grid ]],
|
||||
uint2 blockDim [[ threads_per_threadgroup ]])
|
||||
{
|
||||
int x = threadIdx.x + blockIdx.x * blockDim.x, y = threadIdx.y + blockIdx.y * blockDim.y;
|
||||
|
||||
if (x >= params->imgSize.x || y >= params->imgSize.y) return;
|
||||
|
||||
processPixelICP<false>(outRendering, pointsMap, normalsMap, pointsRay, params->imgSize.xy, x, y, params->voxelSizes.x, TO_VECTOR3(params->lightSource));
|
||||
}
|
||||
|
||||
kernel void renderForward_device(DEVICEPTR(Vector4u) *outRendering [[ buffer(0) ]],
|
||||
const CONSTPTR(Vector4f) *pointsRay [[ buffer(1) ]],
|
||||
const CONSTPTR(CreateICPMaps_Params) *params [[ buffer(2) ]],
|
||||
uint2 threadIdx [[ thread_position_in_threadgroup ]],
|
||||
uint2 blockIdx [[ threadgroup_position_in_grid ]],
|
||||
uint2 blockDim [[ threads_per_threadgroup ]])
|
||||
{
|
||||
int x = threadIdx.x + blockIdx.x * blockDim.x, y = threadIdx.y + blockIdx.y * blockDim.y;
|
||||
|
||||
if (x >= params->imgSize.x || y >= params->imgSize.y) return;
|
||||
|
||||
processPixelForwardRender<false>(outRendering, pointsRay, params->imgSize.xy, x, y, params->voxelSizes.x, TO_VECTOR3(params->lightSource));
|
||||
}
|
||||
|
||||
kernel void forwardProject_device(DEVICEPTR(Vector4f) *forwardProjection [[ buffer(0) ]],
|
||||
const CONSTPTR(Vector4f) *pointsRay [[ buffer(1) ]],
|
||||
const CONSTPTR(CreateICPMaps_Params) *params [[ buffer(2) ]],
|
||||
uint2 threadIdx [[ thread_position_in_threadgroup ]],
|
||||
uint2 blockIdx [[ threadgroup_position_in_grid ]],
|
||||
uint2 blockDim [[ threads_per_threadgroup ]])
|
||||
{
|
||||
int x = (threadIdx.x + blockIdx.x * blockDim.x), y = (threadIdx.y + blockIdx.y * blockDim.y);
|
||||
|
||||
if (x >= params->imgSize.x || y >= params->imgSize.y) return;
|
||||
|
||||
int locId = x + y * params->imgSize.x;
|
||||
Vector4f pixel = pointsRay[locId];
|
||||
|
||||
int locId_new = forwardProjectPixel(pixel * params->voxelSizes.x, params->M, params->projParams, params->imgSize.xy);
|
||||
if (locId_new >= 0) forwardProjection[locId_new] = pixel;
|
||||
}
|
||||
16
samples/Objective-C/Siesta.h
Normal file
16
samples/Objective-C/Siesta.h
Normal file
@@ -0,0 +1,16 @@
|
||||
//
|
||||
// Siesta.h
|
||||
// Siesta
|
||||
//
|
||||
// Created by Paul on 2015/6/14.
|
||||
// Copyright © 2015 Bust Out Solutions. MIT license.
|
||||
//
|
||||
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
//! Project version number for Siesta.
|
||||
FOUNDATION_EXPORT double SiestaVersionNumber;
|
||||
|
||||
//! Project version string for Siesta.
|
||||
FOUNDATION_EXPORT const unsigned char SiestaVersionString[];
|
||||
|
||||
31
samples/PHP/mail.phps
Normal file
31
samples/PHP/mail.phps
Normal file
@@ -0,0 +1,31 @@
|
||||
<?php
|
||||
/**
|
||||
* This example shows sending a message using PHP's mail() function.
|
||||
*/
|
||||
|
||||
require '../PHPMailerAutoload.php';
|
||||
|
||||
//Create a new PHPMailer instance
|
||||
$mail = new PHPMailer;
|
||||
//Set who the message is to be sent from
|
||||
$mail->setFrom('from@example.com', 'First Last');
|
||||
//Set an alternative reply-to address
|
||||
$mail->addReplyTo('replyto@example.com', 'First Last');
|
||||
//Set who the message is to be sent to
|
||||
$mail->addAddress('whoto@example.com', 'John Doe');
|
||||
//Set the subject line
|
||||
$mail->Subject = 'PHPMailer mail() test';
|
||||
//Read an HTML message body from an external file, convert referenced images to embedded,
|
||||
//convert HTML into a basic plain-text alternative body
|
||||
$mail->msgHTML(file_get_contents('contents.html'), dirname(__FILE__));
|
||||
//Replace the plain text body with one created manually
|
||||
$mail->AltBody = 'This is a plain-text message body';
|
||||
//Attach an image file
|
||||
$mail->addAttachment('images/phpmailer_mini.png');
|
||||
|
||||
//send the message, check for errors
|
||||
if (!$mail->send()) {
|
||||
echo "Mailer Error: " . $mail->ErrorInfo;
|
||||
} else {
|
||||
echo "Message sent!";
|
||||
}
|
||||
90
samples/PLSQL/plsqlguide.pck
Normal file
90
samples/PLSQL/plsqlguide.pck
Normal file
@@ -0,0 +1,90 @@
|
||||
create or replace package plsqlguide is
|
||||
|
||||
-- Author : Jared Petersen
|
||||
-- Created : 9/22/2015 12:26:22 AM
|
||||
-- Purpose : Basic PLSQL template/guide
|
||||
|
||||
/* Procedures */
|
||||
procedure p_main;
|
||||
|
||||
end plsqlguide;
|
||||
/
|
||||
create or replace package body plsqlguide is
|
||||
|
||||
/* Main entry point (homepage) */
|
||||
procedure p_main
|
||||
is
|
||||
begin
|
||||
|
||||
htp.prn('
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<!-- The above 3 meta tags *must* come first in the head; any other head content must come *after* these tags -->
|
||||
<title>PL/SQL Sample Application</title>
|
||||
|
||||
<!-- Bootstrap -->
|
||||
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/css/bootstrap.min.css">
|
||||
|
||||
<!-- HTML5 shim and Respond.js for IE8 support of HTML5 elements and media queries -->
|
||||
<!--[if lt IE 9]>
|
||||
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
|
||||
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
|
||||
<![endif]-->
|
||||
</head>
|
||||
<body>
|
||||
<!-- Static navbar -->
|
||||
<nav class="navbar navbar-default navbar-static-top">
|
||||
<div class="container">
|
||||
<div class="navbar-header">
|
||||
<a class="navbar-brand" href="#">PL/SQL Sample Application</a>
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<div class="container">
|
||||
<table class="table table-bordered">
|
||||
<tr>
|
||||
<th>#</th>
|
||||
<th>Name</th>
|
||||
<th>Description</th>
|
||||
<th>Quantity</th>
|
||||
<th>Price</th>
|
||||
</tr>
|
||||
');
|
||||
|
||||
-- Fill out the parts table
|
||||
for row in (select * from parts) loop
|
||||
htp.prn('
|
||||
<tr>
|
||||
<td>'||row.pid||'</td>
|
||||
<td>'||row.name||'</td>
|
||||
<td>'||row.description||'</td>
|
||||
<td>'||row.quantity||'</td>
|
||||
<td>'||row.price||'</td>
|
||||
</tr>
|
||||
');
|
||||
end loop;
|
||||
|
||||
htp.prn('
|
||||
</table>
|
||||
</div> <!-- /container -->
|
||||
|
||||
<!-- jQuery (necessary for Bootstrap''s JavaScript plugins) -->
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.3/jquery.min.js"></script>
|
||||
<!-- Include all compiled plugins (below), or include individual files as needed -->
|
||||
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/js/bootstrap.min.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
');
|
||||
|
||||
end p_main;
|
||||
|
||||
begin
|
||||
-- Initialization
|
||||
null;
|
||||
end plsqlguide;
|
||||
/
|
||||
@@ -12,7 +12,6 @@ unless EVAL 'EVAL("1", :lang<perl5>)' {
|
||||
die unless
|
||||
EVAL(q/
|
||||
package My::Hash;
|
||||
use strict;
|
||||
|
||||
sub new {
|
||||
my ($class, $ref) = @_;
|
||||
|
||||
24
samples/Pickle/data.pkl
Normal file
24
samples/Pickle/data.pkl
Normal file
@@ -0,0 +1,24 @@
|
||||
(dp0
|
||||
S'a'
|
||||
p1
|
||||
(lp2
|
||||
I1
|
||||
aF2.0
|
||||
aI3
|
||||
ac__builtin__
|
||||
complex
|
||||
p3
|
||||
(F4.0
|
||||
F6.0
|
||||
tp4
|
||||
Rp5
|
||||
asS'c'
|
||||
p6
|
||||
NsS'b'
|
||||
p7
|
||||
(S'string'
|
||||
p8
|
||||
VUnicode string
|
||||
p9
|
||||
tp10
|
||||
s.
|
||||
60
samples/Pickle/neural-network-ce-l2reg-784-10-30.pkl
Normal file
60
samples/Pickle/neural-network-ce-l2reg-784-10-30.pkl
Normal file
File diff suppressed because one or more lines are too long
36
samples/Pickle/random.pkl
Normal file
36
samples/Pickle/random.pkl
Normal file
@@ -0,0 +1,36 @@
|
||||
cnumpy.core.multiarray
|
||||
_reconstruct
|
||||
p0
|
||||
(cnumpy
|
||||
ndarray
|
||||
p1
|
||||
(I0
|
||||
tp2
|
||||
S'b'
|
||||
p3
|
||||
tp4
|
||||
Rp5
|
||||
(I1
|
||||
(I100
|
||||
tp6
|
||||
cnumpy
|
||||
dtype
|
||||
p7
|
||||
(S'f8'
|
||||
p8
|
||||
I0
|
||||
I1
|
||||
tp9
|
||||
Rp10
|
||||
(I3
|
||||
S'<'
|
||||
p11
|
||||
NNNI-1
|
||||
I-1
|
||||
I0
|
||||
tp12
|
||||
bI00
|
||||
S'\x1cc~\xc3\xa7r\xed?\xe5${\xec\xd6\xcd\xed?\x809-\x02%\xa9\xa2?F\x0f\x1d\xe8\xef\xa3\xdb?\xfe\xd1\x0c\xb7\x83\x13\xef?\xe0<o\xa1\xa9^\xdf?CE\x96\x88/o\xe2?<\xd8\xa1\x96\xa2T\xce?\x152\x8e\xe5\xa8\x7f\xe8?\xe4\xb7\x9a\xe0$\x0f\xdc?\x90\xe4\xe2\xd4=\xce\xc3?Ix\xe3P\xc4C\xe1?\x16\xd17\xc1Y\xfc\xed?5\xd7\xae@4\xfa\xe8?\x0f\x87\x8d>\xfcO\xe5?Y\x97\xcb"\xa7%\xe7?\x9b\x8d\x16\xda\x97\xe1\xeb?T\x14\xbd\xfe|\xf4\xd0?\x18\xdfH\xc56A\xba?\x90\xc5\xfb\xc63:\xe5?\xbf%\xad\xe5.\x86\xe9?\xc6\x0c\xa9\x8c\xd7\xd5\xe9?\xf8\xafc:\x84g\xd7?\xf8\x98\x879\x9a\x16\xee?\xba\xdf\x88\x8az\x06\xe2?~g-\xeb\xc8\xed\xee?\x08A\xcc\x8c\xe7>\xef?\xceD\xc4ar\n\xdc?\x92w\xbb\xa34\xb1\xd9?\x88\xb9\xc0{u\xa3\xdc?d\x1a\xad\xe8\xf3\x14\xdd?\x9c\x95\x13\x96o?\xe5?\x9cT[\xb8r\xa9\xe5?0\xf1\x01+(\x0f\xdf?W\xbdjqD&\xed?c\xcf1-W\xe6\xe1?\xce\xbc\xe1{zW\xd9?"d\xcf\xd7\x13\x93\xde?\xf2P\xf6\xc3\xd6\x87\xd5?\xc2\x0e\x92q\x89\xda\xd5?\xc0:B\x1bb\x00\x9e?Y\xafHmr\x80\xe3?\x1co\xa7\xba\xa5/\xe4?\xa2\xbc \x9c\xddB\xd0?\xd2L\x935\x17\'\xee?|\x8cM\xeb\x97=\xe8?\x0f0xN*V\xea?\x81p\xe3,!\xf2\xee?\xf5w\xed\x10\x9eu\xe0?\xc5\x16\\LR\xb5\xe1?\xbeh\x04\xa4g\xe5\xd6?\xea\xc0\xb9\xf0\xb2\xd8\xd9?\xac\x9c\xeep\x1a\xa9\xd8?@W9hp\x16\xb1?\xc4\xedS\xd6V\xa1\xed?\x93,!\xdc\xa1\x8b\xe9?\x80)\xb1\xa6[T\xc9?\xac\xbc\x8a\xd21\xdd\xc5?\x80\x9c.g\xf1\xf2\xc6?\tLu\xc3\xf7U\xe9?n\'\x9f?\xbe\xf9\xe9?\xa3\xe7K\x1c\xb3\xa9\xea?X\x98\x1a\xcb\xa0\xcd\xd3? \xb6O\x9c\x1bQ\xc2?"\x89[\xad1\x8e\xea?\xdd\x8f\xa0P\xc7\x0e\xe2?c\xa4j\xa3\r\xac\xef?\xba\xb6\x0f\x8emo\xef?\xe0\xed\xa0\xc5R9\xab?U\xf1\xcd\xcf\xbf\xcb\xea?\x89*#\x06\xb0|\xe8?d\xa3\xad\xcd\xe0]\xcc?\xb5\xe78\xa7w\x13\xe3?\xce\x99\x98\xefS%\xd7?\xb1\xf8\xd8\x8eI\x13\xef?\x91`]\x93\xd4 \xec?\xc0\rPz\xee\xbd\xe7?7\x92\xd4\x0fP\x8f\xe1?L\x0f\xaf\xa9\xc3\x19\xdd?\\}\x15X\x870\xc7? ~ t\xcat\xb1?@?\xec\x97u\x05\xe9?F\x8d:\xac4D\xdb?qY\xe1Qk|\xe2? \xaf\xeaj\xa5\x04\xab?J[\x1al;\x00\xd5?\x00^{n\xc2\xf1S?\xb0\x82dN\xda\xb5\xc7?\xe0 \x07\xe1?R\x92?\xc4\r\x08+\x99J\xe1?I|&U\x19\xc4\xe1?|*\xf9\xebq\x7f\xed?\xbc*\x93\x89k\xab\xe9?oiL\x90;\xe0\xef?\x96\xcd\x9b\xff\x18g\xdc?pt\xb4\xa5\x9c\xa2\xbc?Nu]w*\xb7\xd2?\x88k\xac\xd0\xfd\xbf\xd5?Q\x02$b\xfeH\xea?5\xf6\t\xb6K\x1a\xee?'
|
||||
p13
|
||||
tp14
|
||||
b.
|
||||
10
samples/Pickle/save.pkl
Normal file
10
samples/Pickle/save.pkl
Normal file
@@ -0,0 +1,10 @@
|
||||
(dp0
|
||||
S'lion'
|
||||
p1
|
||||
S'yellow'
|
||||
p2
|
||||
sS'kitty'
|
||||
p3
|
||||
S'red'
|
||||
p4
|
||||
s.
|
||||
30
samples/Pony/circle.pony
Normal file
30
samples/Pony/circle.pony
Normal file
@@ -0,0 +1,30 @@
|
||||
use "collections"
|
||||
|
||||
class Circle
|
||||
var _radius: F32
|
||||
|
||||
new create(radius': F32) =>
|
||||
_radius = radius'
|
||||
|
||||
fun ref get_radius(): F32 =>
|
||||
_radius
|
||||
|
||||
fun ref get_area(): F32 =>
|
||||
F32.pi() * _radius.pow(2)
|
||||
|
||||
fun ref get_circumference(): F32 =>
|
||||
2 * _radius * F32.pi()
|
||||
|
||||
actor Main
|
||||
new create(env: Env) =>
|
||||
|
||||
for i in Range[F32](1.0, 101.0) do
|
||||
let c = Circle(i)
|
||||
|
||||
var str =
|
||||
"Radius: " + c.get_radius().string() + "\n" +
|
||||
"Circumference: " + c.get_circumference().string() + "\n" +
|
||||
"Area: " + c.get_area().string() + "\n"
|
||||
|
||||
env.out.print(str)
|
||||
end
|
||||
32
samples/Pony/counter.pony
Normal file
32
samples/Pony/counter.pony
Normal file
@@ -0,0 +1,32 @@
|
||||
use "collections"
|
||||
|
||||
actor Counter
|
||||
var _count: U32
|
||||
|
||||
new create() =>
|
||||
_count = 0
|
||||
|
||||
be increment() =>
|
||||
_count = _count + 1
|
||||
|
||||
be get_and_reset(main: Main) =>
|
||||
main.display(_count)
|
||||
_count = 0
|
||||
|
||||
actor Main
|
||||
var _env: Env
|
||||
|
||||
new create(env: Env) =>
|
||||
_env = env
|
||||
|
||||
var count: U32 = try env.args(1).u32() else 10 end
|
||||
var counter = Counter
|
||||
|
||||
for i in Range[U32](0, count) do
|
||||
counter.increment()
|
||||
end
|
||||
|
||||
counter.get_and_reset(this)
|
||||
|
||||
be display(result: U32) =>
|
||||
_env.out.print(result.string())
|
||||
261
samples/Pony/gups-opt.pony
Normal file
261
samples/Pony/gups-opt.pony
Normal file
@@ -0,0 +1,261 @@
|
||||
use "options"
|
||||
use "time"
|
||||
use "collections"
|
||||
|
||||
class Config
|
||||
var logtable: U64 = 20
|
||||
var iterate: U64 = 10000
|
||||
var logchunk: U64 = 10
|
||||
var logactors: U64 = 2
|
||||
|
||||
fun ref apply(env: Env): Bool =>
|
||||
var options = Options(env)
|
||||
|
||||
options
|
||||
.add("logtable", "l", I64Argument)
|
||||
.add("iterate", "i", I64Argument)
|
||||
.add("chunk", "c", I64Argument)
|
||||
.add("actors", "a", I64Argument)
|
||||
|
||||
for option in options do
|
||||
match option
|
||||
| ("table", var arg: I64) => logtable = arg.u64()
|
||||
| ("iterate", var arg: I64) => iterate = arg.u64()
|
||||
| ("chunk", var arg: I64) => logchunk = arg.u64()
|
||||
| ("actors", var arg: I64) => logactors = arg.u64()
|
||||
| let err: ParseError =>
|
||||
err.report(env.out)
|
||||
env.out.print(
|
||||
"""
|
||||
gups_opt [OPTIONS]
|
||||
--table N log2 of the total table size. Defaults to 20.
|
||||
--iterate N number of iterations. Defaults to 10000.
|
||||
--chunk N log2 of the chunk size. Defaults to 10.
|
||||
--actors N log2 of the actor count. Defaults to 2.
|
||||
"""
|
||||
)
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
env.out.print(
|
||||
"logtable: " + logtable.string() +
|
||||
"\niterate: " + iterate.string() +
|
||||
"\nlogchunk: " + logchunk.string() +
|
||||
"\nlogactors: " + logactors.string()
|
||||
)
|
||||
true
|
||||
|
||||
actor Main
|
||||
let _env: Env
|
||||
let _config: Config = Config
|
||||
|
||||
var _updates: U64 = 0
|
||||
var _confirm: U64 = 0
|
||||
let _start: U64
|
||||
var _actors: Array[Updater] val
|
||||
|
||||
new create(env: Env) =>
|
||||
_env = env
|
||||
|
||||
if _config(env) then
|
||||
let actor_count = 1 << _config.logactors
|
||||
let loglocal = _config.logtable - _config.logactors
|
||||
let chunk_size = 1 << _config.logchunk
|
||||
let chunk_iterate = chunk_size * _config.iterate
|
||||
|
||||
_updates = chunk_iterate * actor_count
|
||||
_confirm = actor_count
|
||||
|
||||
var updaters = recover Array[Updater](actor_count) end
|
||||
|
||||
for i in Range[U64](0, actor_count) do
|
||||
updaters.push(Updater(this, actor_count, i, loglocal, chunk_size,
|
||||
chunk_iterate * i))
|
||||
end
|
||||
|
||||
_actors = consume updaters
|
||||
_start = Time.nanos()
|
||||
|
||||
for a in _actors.values() do
|
||||
a.start(_actors, _config.iterate)
|
||||
end
|
||||
else
|
||||
_start = 0
|
||||
_actors = recover Array[Updater] end
|
||||
end
|
||||
|
||||
be done() =>
|
||||
if (_confirm = _confirm - 1) == 1 then
|
||||
for a in _actors.values() do
|
||||
a.done()
|
||||
end
|
||||
end
|
||||
|
||||
be confirm() =>
|
||||
_confirm = _confirm + 1
|
||||
|
||||
if _confirm == _actors.size() then
|
||||
let elapsed = (Time.nanos() - _start).f64()
|
||||
let gups = _updates.f64() / elapsed
|
||||
|
||||
_env.out.print(
|
||||
"Time: " + (elapsed / 1e9).string() +
|
||||
"\nGUPS: " + gups.string()
|
||||
)
|
||||
end
|
||||
|
||||
actor Updater
|
||||
let _main: Main
|
||||
let _index: U64
|
||||
let _updaters: U64
|
||||
let _chunk: U64
|
||||
let _mask: U64
|
||||
let _loglocal: U64
|
||||
|
||||
let _output: Array[Array[U64] iso]
|
||||
let _reuse: List[Array[U64] iso] = List[Array[U64] iso]
|
||||
var _others: (Array[Updater] val | None) = None
|
||||
var _table: Array[U64]
|
||||
var _rand: U64
|
||||
|
||||
new create(main:Main, updaters: U64, index: U64, loglocal: U64, chunk: U64,
|
||||
seed: U64)
|
||||
=>
|
||||
_main = main
|
||||
_index = index
|
||||
_updaters = updaters
|
||||
_chunk = chunk
|
||||
_mask = updaters - 1
|
||||
_loglocal = loglocal
|
||||
|
||||
_rand = PolyRand.seed(seed)
|
||||
_output = _output.create(updaters)
|
||||
|
||||
let size = 1 << loglocal
|
||||
_table = Array[U64].undefined(size)
|
||||
|
||||
var offset = index * size
|
||||
|
||||
try
|
||||
for i in Range[U64](0, size) do
|
||||
_table(i) = i + offset
|
||||
end
|
||||
end
|
||||
|
||||
be start(others: Array[Updater] val, iterate: U64) =>
|
||||
_others = others
|
||||
iteration(iterate)
|
||||
|
||||
be apply(iterate: U64) =>
|
||||
iteration(iterate)
|
||||
|
||||
fun ref iteration(iterate: U64) =>
|
||||
let chk = _chunk
|
||||
|
||||
for i in Range(0, _updaters) do
|
||||
_output.push(
|
||||
try
|
||||
_reuse.pop()
|
||||
else
|
||||
recover Array[U64](chk) end
|
||||
end
|
||||
)
|
||||
end
|
||||
|
||||
for i in Range(0, _chunk) do
|
||||
var datum = _rand = PolyRand(_rand)
|
||||
var updater = (datum >> _loglocal) and _mask
|
||||
|
||||
try
|
||||
if updater == _index then
|
||||
_table(i) = _table(i) xor datum
|
||||
else
|
||||
_output(updater).push(datum)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
try
|
||||
let to = _others as Array[Updater] val
|
||||
|
||||
repeat
|
||||
let data = _output.pop()
|
||||
|
||||
if data.size() > 0 then
|
||||
to(_output.size()).receive(consume data)
|
||||
else
|
||||
_reuse.push(consume data)
|
||||
end
|
||||
until _output.size() == 0 end
|
||||
end
|
||||
|
||||
if iterate > 1 then
|
||||
apply(iterate - 1)
|
||||
else
|
||||
_main.done()
|
||||
end
|
||||
|
||||
be receive(data: Array[U64] iso) =>
|
||||
try
|
||||
for i in Range(0, data.size()) do
|
||||
let datum = data(i)
|
||||
var j = (datum >> _loglocal) and _mask
|
||||
_table(j) = _table(j) xor datum
|
||||
end
|
||||
|
||||
data.clear()
|
||||
_reuse.push(consume data)
|
||||
end
|
||||
|
||||
be done() =>
|
||||
_main.confirm()
|
||||
|
||||
primitive PolyRand
|
||||
fun apply(prev: U64): U64 =>
|
||||
(prev << 1) xor if prev.i64() < 0 then _poly() else 0 end
|
||||
|
||||
fun seed(from: U64): U64 =>
|
||||
var n = from % _period()
|
||||
|
||||
if n == 0 then
|
||||
return 1
|
||||
end
|
||||
|
||||
var m2 = Array[U64].undefined(64)
|
||||
var temp = U64(1)
|
||||
|
||||
try
|
||||
for i in Range(0, 64) do
|
||||
m2(i) = temp
|
||||
temp = this(temp)
|
||||
temp = this(temp)
|
||||
end
|
||||
end
|
||||
|
||||
var i: U64 = 64 - n.clz()
|
||||
var r = U64(2)
|
||||
|
||||
try
|
||||
while i > 0 do
|
||||
temp = 0
|
||||
|
||||
for j in Range(0, 64) do
|
||||
if ((r >> j) and 1) != 0 then
|
||||
temp = temp xor m2(j)
|
||||
end
|
||||
end
|
||||
|
||||
r = temp
|
||||
i = i - 1
|
||||
|
||||
if ((n >> i) and 1) != 0 then
|
||||
r = this(r)
|
||||
end
|
||||
end
|
||||
end
|
||||
r
|
||||
|
||||
fun _poly(): U64 => 7
|
||||
|
||||
fun _period(): U64 => 1317624576693539401
|
||||
3
samples/Pony/hello-world.pony
Normal file
3
samples/Pony/hello-world.pony
Normal file
@@ -0,0 +1,3 @@
|
||||
actor Main
|
||||
new create(env: Env) =>
|
||||
env.out.print("Hello, world.")
|
||||
188
samples/Pony/mandelbrot.pony
Normal file
188
samples/Pony/mandelbrot.pony
Normal file
@@ -0,0 +1,188 @@
|
||||
use "files"
|
||||
use "options"
|
||||
use "collections"
|
||||
|
||||
actor Worker
|
||||
new mandelbrot(main: Main, x: U64, y: U64, width: U64, iterations: U64,
|
||||
limit: F32, real: Array[F32] val, imaginary: Array[F32] val)
|
||||
=>
|
||||
var view: Array[U8] iso =
|
||||
recover
|
||||
Array[U8]((y - x) * (width >> 3))
|
||||
end
|
||||
|
||||
let group_r = Array[F32].undefined(8)
|
||||
let group_i = Array[F32].undefined(8)
|
||||
|
||||
var row = x
|
||||
|
||||
try
|
||||
while row < y do
|
||||
let prefetch_i = imaginary(row)
|
||||
|
||||
var col: U64 = 0
|
||||
|
||||
while col < width do
|
||||
var j: U64 = 0
|
||||
|
||||
while j < 8 do
|
||||
group_r.update(j, real(col + j))
|
||||
group_i.update(j, prefetch_i)
|
||||
j = j + 1
|
||||
end
|
||||
|
||||
var bitmap: U8 = 0xFF
|
||||
var n = iterations
|
||||
|
||||
repeat
|
||||
var mask: U8 = 0x80
|
||||
var k: U64 = 0
|
||||
|
||||
while k < 8 do
|
||||
let r = group_r(k)
|
||||
let i = group_i(k)
|
||||
|
||||
group_r.update(k, ((r * r) - (i * i)) + real(col + k))
|
||||
group_i.update(k, (2.0 * r * i) + prefetch_i)
|
||||
|
||||
if ((r * r) + (i * i)) > limit then
|
||||
bitmap = bitmap and not mask
|
||||
end
|
||||
|
||||
mask = mask >> 1
|
||||
k = k + 1
|
||||
end
|
||||
until (bitmap == 0) or ((n = n - 1) == 1) end
|
||||
|
||||
view.push(bitmap)
|
||||
|
||||
col = col + 8
|
||||
end
|
||||
row = row + 1
|
||||
end
|
||||
|
||||
main.draw(x * (width >> 3), consume view)
|
||||
end
|
||||
|
||||
actor Main
|
||||
var iterations: U64 = 50
|
||||
var limit: F32 = 4.0
|
||||
var chunks: U64 = 16
|
||||
var width: U64 = 16000
|
||||
var actors: U64 = 0
|
||||
var header: U64 = 0
|
||||
var real: Array[F32] val = recover Array[F32] end
|
||||
var imaginary: Array[F32] val = recover Array[F32] end
|
||||
var outfile: (File | None) = None
|
||||
|
||||
new create(env: Env) =>
|
||||
try
|
||||
arguments(env)
|
||||
|
||||
let length = width
|
||||
let recip_width = 2.0 / width.f32()
|
||||
|
||||
var r = recover Array[F32](length) end
|
||||
var i = recover Array[F32](length) end
|
||||
|
||||
for j in Range(0, width) do
|
||||
r.push((recip_width * j.f32()) - 1.5)
|
||||
i.push((recip_width * j.f32()) - 1.0)
|
||||
end
|
||||
|
||||
real = consume r
|
||||
imaginary = consume i
|
||||
|
||||
spawn_actors()
|
||||
create_outfile()
|
||||
end
|
||||
|
||||
be draw(offset: U64, pixels: Array[U8] val) =>
|
||||
match outfile
|
||||
| var out: File =>
|
||||
out.seek_start(header + offset)
|
||||
out.write(pixels)
|
||||
if (actors = actors - 1) == 1 then
|
||||
out.dispose()
|
||||
end
|
||||
end
|
||||
|
||||
fun ref create_outfile() =>
|
||||
match outfile
|
||||
| var f: File =>
|
||||
f.print("P4\n " + width.string() + " " + width.string() + "\n")
|
||||
header = f.size()
|
||||
f.set_length((width * (width >> 3)) + header)
|
||||
end
|
||||
|
||||
fun ref spawn_actors() =>
|
||||
actors = ((width + (chunks - 1)) / chunks)
|
||||
|
||||
var rest = width % chunks
|
||||
|
||||
if rest == 0 then rest = chunks end
|
||||
|
||||
var x: U64 = 0
|
||||
var y: U64 = 0
|
||||
|
||||
for i in Range(0, actors - 1) do
|
||||
x = i * chunks
|
||||
y = x + chunks
|
||||
Worker.mandelbrot(this, x, y, width, iterations, limit, real, imaginary)
|
||||
end
|
||||
|
||||
Worker.mandelbrot(this, y, y + rest, width, iterations, limit, real,
|
||||
imaginary)
|
||||
|
||||
fun ref arguments(env: Env) ? =>
|
||||
let options = Options(env)
|
||||
|
||||
options
|
||||
.add("iterations", "i", I64Argument)
|
||||
.add("limit", "l", F64Argument)
|
||||
.add("chunks", "c", I64Argument)
|
||||
.add("width", "w", I64Argument)
|
||||
.add("output", "o", StringArgument)
|
||||
|
||||
for option in options do
|
||||
match option
|
||||
| ("iterations", var arg: I64) => iterations = arg.u64()
|
||||
| ("limit", var arg: F64) => limit = arg.f32()
|
||||
| ("chunks", var arg: I64) => chunks = arg.u64()
|
||||
| ("width", var arg: I64) => width = arg.u64()
|
||||
| ("output", var arg: String) =>
|
||||
outfile = try File(FilePath(env.root, arg)) end
|
||||
| let err: ParseError => err.report(env.out) ; usage(env) ; error
|
||||
end
|
||||
end
|
||||
|
||||
fun tag usage(env: Env) =>
|
||||
env.out.print(
|
||||
"""
|
||||
mandelbrot [OPTIONS]
|
||||
|
||||
The binary output can be converted to a BMP with the following command
|
||||
(ImageMagick Tools required):
|
||||
|
||||
convert <output> JPEG:<output>.jpg
|
||||
|
||||
Available options:
|
||||
|
||||
--iterations, -i Maximum amount of iterations to be done for each pixel.
|
||||
Defaults to 50.
|
||||
|
||||
--limit, -l Square of the limit that pixels need to exceed in order
|
||||
to escape from the Mandelbrot set.
|
||||
Defaults to 4.0.
|
||||
|
||||
--chunks, -c Maximum line count of chunks the image should be
|
||||
divided into for divide & conquer processing.
|
||||
Defaults to 16.
|
||||
|
||||
--width, -w Lateral length of the resulting mandelbrot image.
|
||||
Defaults to 16000.
|
||||
|
||||
--output, -o File to write the output to.
|
||||
|
||||
"""
|
||||
)
|
||||
130
samples/Pony/mixed.pony
Normal file
130
samples/Pony/mixed.pony
Normal file
@@ -0,0 +1,130 @@
|
||||
use "collections"
|
||||
|
||||
actor Worker
|
||||
var _env: Env
|
||||
|
||||
new create(env: Env) =>
|
||||
_env = env
|
||||
|
||||
var a: U64 = 86028157
|
||||
var b: U64 = 329545133
|
||||
|
||||
var result = factorize(a*b)
|
||||
|
||||
var correct =
|
||||
try
|
||||
(result.size() == 2) and
|
||||
(result(0) == 86028157) and
|
||||
(result(1) == 329545133)
|
||||
else
|
||||
false
|
||||
end
|
||||
|
||||
fun ref factorize(bigint: U64) : Array[U64] =>
|
||||
var factors = Array[U64](2)
|
||||
|
||||
if bigint <= 3 then
|
||||
factors.push(bigint)
|
||||
else
|
||||
var d: U64 = 2
|
||||
var i: U64 = 0
|
||||
var n = bigint
|
||||
|
||||
while d < n do
|
||||
if (n % d) == 0 then
|
||||
i = i + 1
|
||||
factors.push(d)
|
||||
n = n / d
|
||||
else
|
||||
d = if d == 2 then 3 else (d + 2) end
|
||||
end
|
||||
end
|
||||
|
||||
factors.push(d)
|
||||
end
|
||||
|
||||
factors
|
||||
|
||||
actor Ring
|
||||
var _env: Env
|
||||
var _size: U32
|
||||
var _pass: U32
|
||||
var _repetitions: U32
|
||||
var _next: Ring
|
||||
|
||||
new create(env: Env, size: U32, pass: U32, repetitions: U32) =>
|
||||
_env = env
|
||||
_size = size
|
||||
_pass = pass
|
||||
_repetitions = repetitions
|
||||
_next = spawn_ring(_env, _size, _pass)
|
||||
run()
|
||||
|
||||
new neighbor(env: Env, next: Ring) =>
|
||||
_env = env
|
||||
_next = next
|
||||
_size = 0
|
||||
_pass = 0
|
||||
_repetitions = 0
|
||||
|
||||
be apply(i: U32) =>
|
||||
if i > 0 then
|
||||
_next(i - 1)
|
||||
else
|
||||
run()
|
||||
end
|
||||
|
||||
fun ref run() =>
|
||||
if _repetitions > 0 then
|
||||
_repetitions = _repetitions - 1
|
||||
_next(_pass * _size)
|
||||
Worker(_env)
|
||||
end
|
||||
|
||||
fun tag spawn_ring(env: Env, size: U32, pass': U32) : Ring =>
|
||||
var next: Ring = this
|
||||
|
||||
for i in Range[U32](0, size) do
|
||||
next = Ring.neighbor(env, next)
|
||||
end
|
||||
|
||||
next
|
||||
|
||||
actor Main
|
||||
var _size: U32 = 50
|
||||
var _count: U32 = 20
|
||||
var _pass: U32 = 10000
|
||||
var _repetitions: U32 = 5
|
||||
var _env: Env
|
||||
|
||||
new create(env: Env) =>
|
||||
_env = env
|
||||
|
||||
try
|
||||
arguments()
|
||||
start_benchmark()
|
||||
else
|
||||
usage()
|
||||
end
|
||||
|
||||
fun ref arguments() ? =>
|
||||
_count = _env.args(1).u32()
|
||||
_size = _env.args(2).u32()
|
||||
_pass = _env.args(3).u32()
|
||||
_repetitions = _env.args(4).u32()
|
||||
|
||||
fun ref start_benchmark() =>
|
||||
for i in Range[U32](0, _count) do
|
||||
Ring(_env, _size, _pass, _repetitions)
|
||||
end
|
||||
|
||||
fun ref usage() =>
|
||||
_env.out.print(
|
||||
"""
|
||||
mixed OPTIONS
|
||||
N number of actors in each ring"
|
||||
N number of rings"
|
||||
N number of messages to pass around each ring"
|
||||
N number of times to repeat"
|
||||
"""
|
||||
)
|
||||
1
samples/Puppet/hiera_include.pp
Normal file
1
samples/Puppet/hiera_include.pp
Normal file
@@ -0,0 +1 @@
|
||||
hiera_include('classes')
|
||||
1427
samples/Ren'Py/example.rpy
Normal file
1427
samples/Ren'Py/example.rpy
Normal file
File diff suppressed because it is too large
Load Diff
9
samples/Ruby/filenames/Brewfile
Normal file
9
samples/Ruby/filenames/Brewfile
Normal file
@@ -0,0 +1,9 @@
|
||||
tap 'caskroom/cask'
|
||||
tap 'telemachus/brew', 'https://telemachus@bitbucket.org/telemachus/brew.git'
|
||||
brew 'emacs', args: ['with-cocoa', 'with-gnutls']
|
||||
brew 'redis', restart_service: true
|
||||
brew 'mongodb'
|
||||
brew 'sphinx'
|
||||
brew 'imagemagick'
|
||||
brew 'mysql'
|
||||
cask 'google-chrome'
|
||||
267
samples/Ruby/racc.rb
Normal file
267
samples/Ruby/racc.rb
Normal file
@@ -0,0 +1,267 @@
|
||||
#
|
||||
# DO NOT MODIFY!!!!
|
||||
# This file is automatically generated by Racc 1.4.7
|
||||
# from Racc grammer file "".
|
||||
#
|
||||
|
||||
require 'racc/parser.rb'
|
||||
module RJSON
|
||||
class Parser < Racc::Parser
|
||||
|
||||
|
||||
require 'rjson/handler'
|
||||
|
||||
attr_reader :handler
|
||||
|
||||
def initialize tokenizer, handler = Handler.new
|
||||
@tokenizer = tokenizer
|
||||
@handler = handler
|
||||
super()
|
||||
end
|
||||
|
||||
def next_token
|
||||
@tokenizer.next_token
|
||||
end
|
||||
|
||||
def parse
|
||||
do_parse
|
||||
handler
|
||||
end
|
||||
##### State transition tables begin ###
|
||||
|
||||
racc_action_table = [
|
||||
9, 33, 9, 11, 13, 16, 19, 22, 9, 7,
|
||||
23, 1, 9, 11, 13, 16, 19, 29, 30, 7,
|
||||
21, 1, 9, 11, 13, 16, 19, 31, nil, 7,
|
||||
21, 1, 23, 7, nil, 1 ]
|
||||
|
||||
racc_action_check = [
|
||||
6, 27, 33, 33, 33, 33, 33, 3, 31, 33,
|
||||
6, 33, 29, 29, 29, 29, 29, 12, 22, 29,
|
||||
12, 29, 2, 2, 2, 2, 2, 25, nil, 2,
|
||||
2, 2, 25, 0, nil, 0 ]
|
||||
|
||||
racc_action_pointer = [
|
||||
24, nil, 20, 7, nil, nil, -2, nil, nil, nil,
|
||||
nil, nil, 10, nil, nil, nil, nil, nil, nil, nil,
|
||||
nil, nil, 18, nil, nil, 20, nil, -7, nil, 10,
|
||||
nil, 6, nil, 0, nil, nil, nil ]
|
||||
|
||||
racc_action_default = [
|
||||
-27, -12, -21, -27, -1, -2, -27, -10, -15, -26,
|
||||
-8, -22, -27, -23, -17, -16, -24, -20, -18, -25,
|
||||
-19, -11, -27, -13, -3, -27, -6, -27, -9, -21,
|
||||
37, -27, -4, -21, -14, -5, -7 ]
|
||||
|
||||
racc_goto_table = [
|
||||
8, 26, 24, 27, 10, 3, 25, 5, 4, 12,
|
||||
nil, nil, nil, nil, 28, nil, nil, nil, nil, nil,
|
||||
nil, 32, nil, nil, nil, nil, 35, 34, 27, nil,
|
||||
nil, 36 ]
|
||||
|
||||
racc_goto_check = [
|
||||
9, 7, 5, 8, 11, 1, 6, 3, 2, 12,
|
||||
nil, nil, nil, nil, 11, nil, nil, nil, nil, nil,
|
||||
nil, 5, nil, nil, nil, nil, 7, 9, 8, nil,
|
||||
nil, 9 ]
|
||||
|
||||
racc_goto_pointer = [
|
||||
nil, 5, 8, 7, nil, -4, 0, -5, -3, -2,
|
||||
nil, 2, 7, nil, nil ]
|
||||
|
||||
racc_goto_default = [
|
||||
nil, nil, 14, 18, 6, nil, nil, nil, 20, nil,
|
||||
2, nil, nil, 15, 17 ]
|
||||
|
||||
racc_reduce_table = [
|
||||
0, 0, :racc_error,
|
||||
1, 14, :_reduce_none,
|
||||
1, 14, :_reduce_none,
|
||||
2, 15, :_reduce_none,
|
||||
3, 15, :_reduce_none,
|
||||
3, 19, :_reduce_none,
|
||||
1, 19, :_reduce_none,
|
||||
3, 20, :_reduce_none,
|
||||
2, 16, :_reduce_none,
|
||||
3, 16, :_reduce_none,
|
||||
1, 23, :_reduce_10,
|
||||
1, 24, :_reduce_11,
|
||||
1, 17, :_reduce_12,
|
||||
1, 18, :_reduce_13,
|
||||
3, 25, :_reduce_none,
|
||||
1, 25, :_reduce_none,
|
||||
1, 22, :_reduce_none,
|
||||
1, 22, :_reduce_none,
|
||||
1, 22, :_reduce_none,
|
||||
1, 26, :_reduce_none,
|
||||
1, 26, :_reduce_20,
|
||||
0, 27, :_reduce_none,
|
||||
1, 27, :_reduce_22,
|
||||
1, 27, :_reduce_23,
|
||||
1, 27, :_reduce_24,
|
||||
1, 27, :_reduce_25,
|
||||
1, 21, :_reduce_26 ]
|
||||
|
||||
racc_reduce_n = 27
|
||||
|
||||
racc_shift_n = 37
|
||||
|
||||
racc_token_table = {
|
||||
false => 0,
|
||||
:error => 1,
|
||||
:STRING => 2,
|
||||
:NUMBER => 3,
|
||||
:TRUE => 4,
|
||||
:FALSE => 5,
|
||||
:NULL => 6,
|
||||
"," => 7,
|
||||
":" => 8,
|
||||
"[" => 9,
|
||||
"]" => 10,
|
||||
"{" => 11,
|
||||
"}" => 12 }
|
||||
|
||||
racc_nt_base = 13
|
||||
|
||||
racc_use_result_var = true
|
||||
|
||||
Racc_arg = [
|
||||
racc_action_table,
|
||||
racc_action_check,
|
||||
racc_action_default,
|
||||
racc_action_pointer,
|
||||
racc_goto_table,
|
||||
racc_goto_check,
|
||||
racc_goto_default,
|
||||
racc_goto_pointer,
|
||||
racc_nt_base,
|
||||
racc_reduce_table,
|
||||
racc_token_table,
|
||||
racc_shift_n,
|
||||
racc_reduce_n,
|
||||
racc_use_result_var ]
|
||||
|
||||
Racc_token_to_s_table = [
|
||||
"$end",
|
||||
"error",
|
||||
"STRING",
|
||||
"NUMBER",
|
||||
"TRUE",
|
||||
"FALSE",
|
||||
"NULL",
|
||||
"\",\"",
|
||||
"\":\"",
|
||||
"\"[\"",
|
||||
"\"]\"",
|
||||
"\"{\"",
|
||||
"\"}\"",
|
||||
"$start",
|
||||
"document",
|
||||
"object",
|
||||
"array",
|
||||
"start_object",
|
||||
"end_object",
|
||||
"pairs",
|
||||
"pair",
|
||||
"string",
|
||||
"value",
|
||||
"start_array",
|
||||
"end_array",
|
||||
"values",
|
||||
"scalar",
|
||||
"literal" ]
|
||||
|
||||
Racc_debug_parser = false
|
||||
|
||||
##### State transition tables end #####
|
||||
|
||||
# reduce 0 omitted
|
||||
|
||||
# reduce 1 omitted
|
||||
|
||||
# reduce 2 omitted
|
||||
|
||||
# reduce 3 omitted
|
||||
|
||||
# reduce 4 omitted
|
||||
|
||||
# reduce 5 omitted
|
||||
|
||||
# reduce 6 omitted
|
||||
|
||||
# reduce 7 omitted
|
||||
|
||||
# reduce 8 omitted
|
||||
|
||||
# reduce 9 omitted
|
||||
|
||||
def _reduce_10(val, _values, result)
|
||||
@handler.start_array
|
||||
result
|
||||
end
|
||||
|
||||
def _reduce_11(val, _values, result)
|
||||
@handler.end_array
|
||||
result
|
||||
end
|
||||
|
||||
def _reduce_12(val, _values, result)
|
||||
@handler.start_object
|
||||
result
|
||||
end
|
||||
|
||||
def _reduce_13(val, _values, result)
|
||||
@handler.end_object
|
||||
result
|
||||
end
|
||||
|
||||
# reduce 14 omitted
|
||||
|
||||
# reduce 15 omitted
|
||||
|
||||
# reduce 16 omitted
|
||||
|
||||
# reduce 17 omitted
|
||||
|
||||
# reduce 18 omitted
|
||||
|
||||
# reduce 19 omitted
|
||||
|
||||
def _reduce_20(val, _values, result)
|
||||
@handler.scalar val[0]
|
||||
result
|
||||
end
|
||||
|
||||
# reduce 21 omitted
|
||||
|
||||
def _reduce_22(val, _values, result)
|
||||
n = val[0]; result = n.count('.') > 0 ? n.to_f : n.to_i
|
||||
result
|
||||
end
|
||||
|
||||
def _reduce_23(val, _values, result)
|
||||
result = true
|
||||
result
|
||||
end
|
||||
|
||||
def _reduce_24(val, _values, result)
|
||||
result = false
|
||||
result
|
||||
end
|
||||
|
||||
def _reduce_25(val, _values, result)
|
||||
result = nil
|
||||
result
|
||||
end
|
||||
|
||||
def _reduce_26(val, _values, result)
|
||||
@handler.scalar val[0].gsub(/^"|"$/, '')
|
||||
result
|
||||
end
|
||||
|
||||
def _reduce_none(val, _values, result)
|
||||
val[0]
|
||||
end
|
||||
|
||||
end # class Parser
|
||||
end # module RJSON
|
||||
2324
samples/Rust/hashmap.rs
Normal file
2324
samples/Rust/hashmap.rs
Normal file
File diff suppressed because it is too large
Load Diff
12
samples/Rust/main.rs
Normal file
12
samples/Rust/main.rs
Normal file
@@ -0,0 +1,12 @@
|
||||
extern crate foo;
|
||||
extern crate bar;
|
||||
|
||||
use foo::{self, quix};
|
||||
use bar::car::*;
|
||||
use bar;
|
||||
|
||||
fn main() {
|
||||
println!("Hello {}", "World");
|
||||
|
||||
panic!("Goodbye")
|
||||
}
|
||||
14
samples/Stan/congress.stan
Normal file
14
samples/Stan/congress.stan
Normal file
@@ -0,0 +1,14 @@
|
||||
data {
|
||||
int<lower=0> N;
|
||||
vector[N] incumbency_88;
|
||||
vector[N] vote_86;
|
||||
vector[N] vote_88;
|
||||
}
|
||||
parameters {
|
||||
vector[3] beta;
|
||||
real<lower=0> sigma;
|
||||
}
|
||||
model {
|
||||
vote_88 ~ normal(beta[1] + beta[2] * vote_86
|
||||
+ beta[3] * incumbency_88,sigma);
|
||||
}
|
||||
31
samples/Stan/dogs.stan
Normal file
31
samples/Stan/dogs.stan
Normal file
@@ -0,0 +1,31 @@
|
||||
data {
|
||||
int<lower=0> n_dogs;
|
||||
int<lower=0> n_trials;
|
||||
int<lower=0,upper=1> y[n_dogs,n_trials];
|
||||
}
|
||||
parameters {
|
||||
vector[3] beta;
|
||||
}
|
||||
transformed parameters {
|
||||
matrix[n_dogs,n_trials] n_avoid;
|
||||
matrix[n_dogs,n_trials] n_shock;
|
||||
matrix[n_dogs,n_trials] p;
|
||||
|
||||
for (j in 1:n_dogs) {
|
||||
n_avoid[j,1] <- 0;
|
||||
n_shock[j,1] <- 0;
|
||||
for (t in 2:n_trials) {
|
||||
n_avoid[j,t] <- n_avoid[j,t-1] + 1 - y[j,t-1];
|
||||
n_shock[j,t] <- n_shock[j,t-1] + y[j,t-1];
|
||||
}
|
||||
for (t in 1:n_trials)
|
||||
p[j,t] <- beta[1] + beta[2] * n_avoid[j,t] + beta[3] * n_shock[j,t];
|
||||
}
|
||||
}
|
||||
model {
|
||||
beta ~ normal(0, 100);
|
||||
for (i in 1:n_dogs) {
|
||||
for (j in 1:n_trials)
|
||||
y[i,j] ~ bernoulli_logit(p[i,j]);
|
||||
}
|
||||
}
|
||||
26
samples/Stan/schools.stan
Normal file
26
samples/Stan/schools.stan
Normal file
@@ -0,0 +1,26 @@
|
||||
data {
|
||||
int<lower=0> N;
|
||||
vector[N] y;
|
||||
vector[N] sigma_y;
|
||||
}
|
||||
parameters {
|
||||
vector[N] eta;
|
||||
real mu_theta;
|
||||
real<lower=0,upper=100> sigma_eta;
|
||||
real xi;
|
||||
}
|
||||
transformed parameters {
|
||||
real<lower=0> sigma_theta;
|
||||
vector[N] theta;
|
||||
|
||||
theta <- mu_theta + xi * eta;
|
||||
sigma_theta <- fabs(xi) / sigma_eta;
|
||||
}
|
||||
model {
|
||||
mu_theta ~ normal(0, 100);
|
||||
sigma_eta ~ inv_gamma(1, 1); //prior distribution can be changed to uniform
|
||||
|
||||
eta ~ normal(0, sigma_eta);
|
||||
xi ~ normal(0, 5);
|
||||
y ~ normal(theta,sigma_y);
|
||||
}
|
||||
27
samples/TypeScript/tsxAttributeResolution9.tsx
Normal file
27
samples/TypeScript/tsxAttributeResolution9.tsx
Normal file
@@ -0,0 +1,27 @@
|
||||
//@jsx: preserve
|
||||
//@module: amd
|
||||
|
||||
//@filename: react.d.ts
|
||||
declare module JSX {
|
||||
interface Element { }
|
||||
interface IntrinsicElements {
|
||||
}
|
||||
interface ElementAttributesProperty {
|
||||
props;
|
||||
}
|
||||
}
|
||||
|
||||
interface Props {
|
||||
foo: string;
|
||||
}
|
||||
|
||||
//@filename: file.tsx
|
||||
export class MyComponent {
|
||||
render() {
|
||||
}
|
||||
|
||||
props: { foo: string; }
|
||||
}
|
||||
|
||||
<MyComponent foo="bar" />; // ok
|
||||
<MyComponent foo={0} />; // should be an error
|
||||
79
samples/UrWeb/iso8601.ur
Normal file
79
samples/UrWeb/iso8601.ur
Normal file
@@ -0,0 +1,79 @@
|
||||
open Parse.String
|
||||
|
||||
val digit = satisfy isdigit
|
||||
|
||||
val decimal_of_len n =
|
||||
ds <- count n digit;
|
||||
return (List.foldl (fn d acc => 10*acc + ((ord d)-(ord #"0"))) 0 ds)
|
||||
|
||||
val date =
|
||||
y <- decimal_of_len 4;
|
||||
char' #"-";
|
||||
m <- decimal_of_len 2;
|
||||
char' #"-";
|
||||
d <- decimal_of_len 2;
|
||||
if m > 0 && m <= 12 then
|
||||
return {Year=y, Month=(Datetime.intToMonth (m-1)), Day=d}
|
||||
else
|
||||
fail
|
||||
|
||||
(* We parse fractions of a second, but ignore them since Datetime
|
||||
doesn't permit representing them. *)
|
||||
val time =
|
||||
h <- decimal_of_len 2;
|
||||
char' #":";
|
||||
m <- decimal_of_len 2;
|
||||
s <- maybe (char' #":";
|
||||
s <- decimal_of_len 2;
|
||||
maybe' (char' #"."; skipWhile isdigit);
|
||||
return s);
|
||||
return {Hour=h, Minute=m, Second=Option.get 0 s}
|
||||
|
||||
val timezone_offset =
|
||||
let val zulu = char' #"Z"; return 0
|
||||
val digits = decimal_of_len 2
|
||||
val sign = or (char' #"+"; return 1)
|
||||
(char' #"-"; return (-1))
|
||||
in
|
||||
zulu `or` (s <- sign;
|
||||
h <- digits;
|
||||
m <- (maybe' (char' #":"); or digits (return 0));
|
||||
return (s*(h*60+m)))
|
||||
end
|
||||
|
||||
val datetime_with_tz =
|
||||
d <- date; char' #"T"; t <- time;
|
||||
tz <- timezone_offset;
|
||||
return (d ++ t ++ {TZOffsetMinutes=tz})
|
||||
|
||||
val datetime =
|
||||
d <- datetime_with_tz;
|
||||
return (d -- #TZOffsetMinutes)
|
||||
|
||||
fun process v =
|
||||
case parse (d <- datetime_with_tz; eof; return d) v of
|
||||
Some r =>
|
||||
let
|
||||
val {Year=year,Month=month,Day=day,
|
||||
Hour=hour,Minute=minute,Second=second} =
|
||||
Datetime.addMinutes (r.TZOffsetMinutes) (r -- #TZOffsetMinutes)
|
||||
fun pad x =
|
||||
if x < 10 then "0" `strcat` show x else show x
|
||||
in
|
||||
<xml>{[pad hour]}:{[pad minute]}:{[pad second]} {[month]} {[day]}, {[year]}</xml>
|
||||
end
|
||||
| None => <xml>none</xml>
|
||||
|
||||
fun main () : transaction page =
|
||||
input <- source "2012-01-01T01:10:42Z";
|
||||
return <xml>
|
||||
<body>
|
||||
<label>
|
||||
Enter an
|
||||
<a href="https://en.wikipedia.org/wiki/ISO_8601">ISO 8601</a>
|
||||
datetime here:
|
||||
<ctextbox source={input} />
|
||||
</label>
|
||||
<ul><dyn signal={v <- signal input; return (process v)} /></ul>
|
||||
</body>
|
||||
</xml>
|
||||
85
samples/UrWeb/parse.urs
Normal file
85
samples/UrWeb/parse.urs
Normal file
@@ -0,0 +1,85 @@
|
||||
functor Make(Stream : sig type t end) : sig
|
||||
con t :: Type -> Type
|
||||
|
||||
val mreturn : a ::: Type -> a -> t a
|
||||
val mbind : a ::: Type -> b ::: Type ->
|
||||
(t a) -> (a -> t b) -> (t b)
|
||||
val monad_parse : monad t
|
||||
|
||||
val parse : a ::: Type -> t a -> Stream.t -> option a
|
||||
|
||||
(** Combinators *)
|
||||
val fail : a ::: Type -> t a
|
||||
val or : a ::: Type -> t a -> t a -> t a
|
||||
val maybe : a ::: Type -> t a -> t (option a)
|
||||
val maybe' : a ::: Type -> t a -> t unit
|
||||
val many : a ::: Type -> t a -> t (list a)
|
||||
val count : a ::: Type -> int -> t a -> t (list a)
|
||||
val skipMany : a ::: Type -> t a -> t unit
|
||||
val sepBy : a ::: Type -> s ::: Type -> t a -> t s -> t (list a)
|
||||
end
|
||||
|
||||
structure String : sig
|
||||
con t :: Type -> Type
|
||||
val monad_parse : monad t
|
||||
|
||||
val parse : a ::: Type -> t a -> string -> option a
|
||||
|
||||
(** Combinators *)
|
||||
val fail : a ::: Type -> t a
|
||||
val or : a ::: Type -> t a -> t a -> t a
|
||||
val maybe : a ::: Type -> t a -> t (option a)
|
||||
val maybe' : a ::: Type -> t a -> t unit
|
||||
val many : a ::: Type -> t a -> t (list a)
|
||||
val count : a ::: Type -> int -> t a -> t (list a)
|
||||
val skipMany : a ::: Type -> t a -> t unit
|
||||
val sepBy : a ::: Type -> s ::: Type -> t a -> t s -> t (list a)
|
||||
|
||||
val eof : t unit
|
||||
(* We provide alternative versions of some of these predicates
|
||||
* that return t unit as a monadic syntactical convenience. *)
|
||||
val string : string -> t string
|
||||
val string' : string -> t unit
|
||||
val stringCI : string -> t string
|
||||
val stringCI' : string -> t unit
|
||||
val char : char -> t char
|
||||
val char' : char -> t unit
|
||||
val take : int -> t (string*int)
|
||||
val drop : int -> t unit
|
||||
val satisfy : (char -> bool) -> t char
|
||||
val skip : (char -> bool) -> t unit
|
||||
val skipWhile : (char -> bool) -> t unit
|
||||
val takeWhile : (char -> bool) -> t (string*int)
|
||||
val takeWhile' : (char -> bool) -> t string (* conses *)
|
||||
(* Well, "till" is the correct form; but "til" is in common enough
|
||||
* usage that I'll prefer it for terseness. *)
|
||||
val takeTil : (char -> bool) -> t (string*int)
|
||||
val takeTil' : (char -> bool) -> t string (* conses *)
|
||||
val takeRest : t string
|
||||
|
||||
(** Convenience functions *)
|
||||
val skipSpace : t unit
|
||||
val endOfLine : t unit
|
||||
val unsigned_int_of_radix : int -> t int
|
||||
(*
|
||||
* val signed_int_of_radix : int -> t int
|
||||
* val double : t float
|
||||
*)
|
||||
end
|
||||
|
||||
structure Blob : sig
|
||||
con t :: Type -> Type
|
||||
val monad_parse : monad t
|
||||
|
||||
val parse : a ::: Type -> t a -> blob -> option a
|
||||
|
||||
(** Combinators *)
|
||||
val fail : a ::: Type -> t a
|
||||
val or : a ::: Type -> t a -> t a -> t a
|
||||
val maybe : a ::: Type -> t a -> t (option a)
|
||||
val maybe' : a ::: Type -> t a -> t unit
|
||||
val many : a ::: Type -> t a -> t (list a)
|
||||
val count : a ::: Type -> int -> t a -> t (list a)
|
||||
val skipMany : a ::: Type -> t a -> t unit
|
||||
val sepBy : a ::: Type -> s ::: Type -> t a -> t s -> t (list a)
|
||||
end
|
||||
72
samples/X10/ArraySum.x10
Normal file
72
samples/X10/ArraySum.x10
Normal file
@@ -0,0 +1,72 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.io.Console;
|
||||
|
||||
/**
|
||||
* A simple illustration of loop parallelization within a single place.
|
||||
*/
|
||||
public class ArraySum {
|
||||
|
||||
var sum:Long;
|
||||
val data:Rail[Long];
|
||||
|
||||
public def this(n:Long) {
|
||||
// Create a Rail with n elements (0..(n-1)), all initialized to 1.
|
||||
data = new Rail[Long](n, 1);
|
||||
sum = 0;
|
||||
}
|
||||
|
||||
def sum(a:Rail[Long], start:Long, last:Long) {
|
||||
var mySum: Long = 0;
|
||||
for (i in start..(last-1)) {
|
||||
mySum += a(i);
|
||||
}
|
||||
return mySum;
|
||||
}
|
||||
|
||||
def sum(numThreads:Long) {
|
||||
val mySize = data.size/numThreads;
|
||||
finish for (p in 0..(numThreads-1)) async {
|
||||
val mySum = sum(data, p*mySize, (p+1)*mySize);
|
||||
// Multiple activities will simultaneously update
|
||||
// this location -- so use an atomic operation.
|
||||
atomic sum += mySum;
|
||||
}
|
||||
}
|
||||
|
||||
public static def main(args:Rail[String]) {
|
||||
var size:Long = 5*1000*1000;
|
||||
if (args.size >=1)
|
||||
size = Long.parse(args(0));
|
||||
|
||||
Console.OUT.println("Initializing.");
|
||||
val a = new ArraySum(size);
|
||||
val P = [1,2,4];
|
||||
|
||||
//warmup loop
|
||||
Console.OUT.println("Warming up.");
|
||||
for (numThreads in P)
|
||||
a.sum(numThreads);
|
||||
|
||||
for (numThreads in P) {
|
||||
Console.OUT.println("Starting with " + numThreads + " threads.");
|
||||
a.sum=0;
|
||||
var time: long = - System.nanoTime();
|
||||
a.sum(numThreads);
|
||||
time += System.nanoTime();
|
||||
Console.OUT.println("For p=" + numThreads
|
||||
+ " result: " + a.sum
|
||||
+ ((size==a.sum)? " ok" : " bad")
|
||||
+ " (time=" + (time/(1000*1000)) + " ms)");
|
||||
}
|
||||
}
|
||||
}
|
||||
50
samples/X10/Cancellation.x10
Normal file
50
samples/X10/Cancellation.x10
Normal file
@@ -0,0 +1,50 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.xrx.Runtime;
|
||||
|
||||
/**
|
||||
* Demonstrate how to instantiate the X10 runtime as an executor service
|
||||
* submit jobs to the runtime, wait jobs to complete and cancel all jobs
|
||||
*
|
||||
* Compile with: x10c -O -EXECUTOR_MODE=true Cancellation.x10
|
||||
* Run with: X10_CANCELLABLE=true X10_NPLACES=4 x10 -DX10RT_IMPL=JavaSockets Cancellation
|
||||
*/
|
||||
class Cancellation {
|
||||
static def job(id:Long, iterations:Long) = ()=>{
|
||||
at (Place.places().next(here)) async {
|
||||
for (i in 1..iterations) {
|
||||
finish for (p in Place.places()) {
|
||||
at (p) async Console.OUT.println(here+" says hello (job " + id + ", iteration " + i + ")");
|
||||
}
|
||||
Console.ERR.println();
|
||||
System.sleep(200);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public static def main(args:Rail[String]):void {
|
||||
val w1 = Runtime.submit(job(1, 5));
|
||||
w1.await(); Console.ERR.println("Job 1 completed\n");
|
||||
val w2 = Runtime.submit(job(2, 1000));
|
||||
System.threadSleep(1000);
|
||||
val c1 = Runtime.cancelAll();
|
||||
try { w2.await(); } catch (e:Exception) { Console.ERR.println("Job 2 aborted with exception " + e +"\n"); }
|
||||
c1.await(); // waiting for cancellation to be processed
|
||||
System.threadSleep(1000);
|
||||
Runtime.submit(job(3, 1000));
|
||||
Runtime.submit(job(4, 1000));
|
||||
System.threadSleep(1000);
|
||||
val c2 = Runtime.cancelAll();
|
||||
c2.await();
|
||||
Console.ERR.println("Goodbye\n");
|
||||
}
|
||||
}
|
||||
52
samples/X10/Fibonacci.x10
Normal file
52
samples/X10/Fibonacci.x10
Normal file
@@ -0,0 +1,52 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.io.Console;
|
||||
|
||||
/**
|
||||
* This is a small program to illustrate the use of
|
||||
* <code>async</code> and <code>finish</code> in a
|
||||
* prototypical recursive divide-and-conquer algorithm.
|
||||
* It is obviously not intended to show a efficient way to
|
||||
* compute Fibonacci numbers in X10.<p>
|
||||
*
|
||||
* The heart of the example is the <code>run</code> method,
|
||||
* which directly embodies the recursive definition of
|
||||
* <pre>
|
||||
* fib(n) = fib(n-1)+fib(n-2);
|
||||
* </pre>
|
||||
* by using an <code>async</code> to compute <code>fib(n-1)</code> while
|
||||
* the current activity computes <code>fib(n-2)</code>. A <code>finish</code>
|
||||
* is used to ensure that both computations are complete before
|
||||
* their results are added together to compute <code>fib(n)</code>
|
||||
*/
|
||||
public class Fibonacci {
|
||||
|
||||
public static def fib(n:long) {
|
||||
if (n<=2) return 1;
|
||||
|
||||
val f1:long;
|
||||
val f2:long;
|
||||
finish {
|
||||
async { f1 = fib(n-1); }
|
||||
f2 = fib(n-2);
|
||||
}
|
||||
return f1 + f2;
|
||||
}
|
||||
|
||||
public static def main(args:Rail[String]) {
|
||||
val n = (args.size > 0) ? Long.parse(args(0)) : 10;
|
||||
Console.OUT.println("Computing fib("+n+")");
|
||||
val f = fib(n);
|
||||
Console.OUT.println("fib("+n+") = "+f);
|
||||
}
|
||||
}
|
||||
|
||||
86
samples/X10/HeatTransfer_v0.x10
Normal file
86
samples/X10/HeatTransfer_v0.x10
Normal file
@@ -0,0 +1,86 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.array.*;
|
||||
import x10.compiler.Foreach;
|
||||
import x10.compiler.Inline;
|
||||
|
||||
|
||||
/**
|
||||
* This is a sample program illustrating how to use
|
||||
* X10's array classes. It also illustrates the use
|
||||
* of foreach to acheive intra-place parallelism.
|
||||
*
|
||||
* The program solves a set of 2D partial differential
|
||||
* equations by iteratively applying a 5-point stencil
|
||||
* operation until convergence is reached.
|
||||
*/
|
||||
public class HeatTransfer_v0 {
|
||||
static val EPSILON = 1.0e-5;
|
||||
|
||||
val N:Long;
|
||||
val A:Array_2[Double]{self!=null};
|
||||
val Tmp:Array_2[Double]{self!=null};
|
||||
|
||||
public def this(size:Long) {
|
||||
N = size;
|
||||
A = new Array_2[Double](N+2, N+2); // zero-initialized N+2 * N+2 array of doubles
|
||||
for (j in 1..N) A(0, j) = 1; // set one border row to 1
|
||||
Tmp = new Array_2[Double](A);
|
||||
}
|
||||
|
||||
final @Inline def stencil(x:Long, y:Long):Double {
|
||||
return (A(x-1,y) + A(x+1,y) + A(x,y-1) + A(x,y+1)) / 4;
|
||||
}
|
||||
|
||||
def run() {
|
||||
val is = new DenseIterationSpace_2(1,1,N,N);
|
||||
var delta:Double;
|
||||
do {
|
||||
// Compute new values, storing in tmp
|
||||
delta = Foreach.blockReduce(is,
|
||||
(i:Long, j:Long)=>{
|
||||
Tmp(i,j) = stencil(i,j);
|
||||
// Reduce max element-wise delta (A now holds previous values)
|
||||
return Math.abs(Tmp(i,j) - A(i,j));
|
||||
},
|
||||
(a:Double, b:Double)=>Math.max(a,b), 0.0
|
||||
);
|
||||
|
||||
// swap backing data of A and Tmp
|
||||
Array.swap(A, Tmp);
|
||||
} while (delta > EPSILON);
|
||||
}
|
||||
|
||||
def prettyPrintResult() {
|
||||
for (i in 1..N) {
|
||||
for (j in 1..N) {
|
||||
Console.OUT.printf("%1.4f ",A(i,j));
|
||||
}
|
||||
Console.OUT.println();
|
||||
}
|
||||
}
|
||||
|
||||
public static def main(args:Rail[String]) {
|
||||
val n = args.size > 0 ? Long.parse(args(0)) : 8;
|
||||
Console.OUT.println("HeatTransfer example with N="+n+" and epsilon="+EPSILON);
|
||||
Console.OUT.println("Initializing data structures");
|
||||
val ht = new HeatTransfer_v0(n);
|
||||
Console.OUT.println("Beginning computation...");
|
||||
val start = System.nanoTime();
|
||||
ht.run();
|
||||
val stop = System.nanoTime();
|
||||
Console.OUT.printf("...completed in %1.3f seconds.\n", ((stop-start) as double)/1e9);
|
||||
if (n <= 10) {
|
||||
ht.prettyPrintResult();
|
||||
}
|
||||
}
|
||||
}
|
||||
114
samples/X10/HeatTransfer_v1.x10
Normal file
114
samples/X10/HeatTransfer_v1.x10
Normal file
@@ -0,0 +1,114 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.array.*;
|
||||
import x10.compiler.Foreach;
|
||||
import x10.util.Team;
|
||||
|
||||
/**
|
||||
* This is a sample program illustrating how to use
|
||||
* X10's distributed array classes. It also illustrates the use
|
||||
* of foreach to achieve intra-place parallelism and the mixture
|
||||
* of APGAS finish/async/at with Team collective operations.
|
||||
*
|
||||
* This version of the program uses a vanilla DistArray without
|
||||
* ghost regions. As a result, the stencil function does
|
||||
* inefficient fine-grained neighbor communication to get individual values.
|
||||
* Compare this to HeatTransfer_v2 which utilizes ghost regions and
|
||||
* bulk ghost-region exchange functions.
|
||||
*
|
||||
* The program solves a set of 2D partial differential
|
||||
* equations by iteratively applying a 5-point stencil
|
||||
* operation until convergence is reached.
|
||||
*/
|
||||
public class HeatTransfer_v1 {
|
||||
static val EPSILON = 1.0e-5;
|
||||
|
||||
val N:Long;
|
||||
val A:DistArray_BlockBlock_2[Double]{self!=null};
|
||||
val Tmp:DistArray_BlockBlock_2[Double]{self!=null};
|
||||
|
||||
public def this(size:Long) {
|
||||
N = size;
|
||||
val init = (i:Long, j:Long)=>i==0 ? 1.0 : 0.0;
|
||||
A = new DistArray_BlockBlock_2[Double](N+2, N+2, init);
|
||||
Tmp = new DistArray_BlockBlock_2[Double](N+2, N+2, init);
|
||||
}
|
||||
|
||||
final def stencil(x:Long, y:Long):Double {
|
||||
val cls = (dx:Long, dy:Long)=>{
|
||||
val p = A.place(x+dx, y+dy);
|
||||
p == here ? A(x+dx,y+dy) : at (p) A(x+dx,y+dy)
|
||||
};
|
||||
val tmp = cls(-1,0) + cls(1,0) + cls(0,-1) + cls(0,1);
|
||||
return tmp / 4;
|
||||
}
|
||||
|
||||
def run() {
|
||||
val myTeam = new Team(A.placeGroup());
|
||||
finish for (p in A.placeGroup()) at (p) async {
|
||||
// Compute the subset of the local indices on which
|
||||
// we want to apply the stencil (the interior points of the N+2 x N+2 grid)
|
||||
val li = A.localIndices();
|
||||
val interior = new DenseIterationSpace_2(li.min(0) == 0 ? 1 : li.min(0),
|
||||
li.min(1) == 0 ? 1 : li.min(1),
|
||||
li.max(0) == N+1 ? N : li.max(0),
|
||||
li.max(1) == N+1 ? N : li.max(1));
|
||||
var delta:Double;
|
||||
do {
|
||||
// Compute new values, storing in tmp
|
||||
val myDelta = Foreach.blockReduce(interior,
|
||||
(i:Long, j:Long)=>{
|
||||
Tmp(i,j) = stencil(i,j);
|
||||
// Reduce max element-wise delta (A now holds previous values)
|
||||
return Math.abs(Tmp(i,j) - A(i,j));
|
||||
},
|
||||
(a:Double, b:Double)=>Math.max(a,b), 0.0
|
||||
);
|
||||
|
||||
myTeam.barrier();
|
||||
|
||||
// Unlike Array, DistArray doesn't provide an optimized swap.
|
||||
// So, until it does, we have to copy the data elements.
|
||||
Foreach.block(interior, (i:Long, j:Long)=>{
|
||||
A(i,j) = Tmp(i,j);
|
||||
});
|
||||
|
||||
delta = myTeam.allreduce(myDelta, Team.MAX);
|
||||
} while (delta > EPSILON);
|
||||
}
|
||||
}
|
||||
|
||||
def prettyPrintResult() {
|
||||
for (i in 1..N) {
|
||||
for (j in 1..N) {
|
||||
val x = at (A.place(i,j)) A(i,j);
|
||||
Console.OUT.printf("%1.4f ", x);
|
||||
}
|
||||
Console.OUT.println();
|
||||
}
|
||||
}
|
||||
|
||||
public static def main(args:Rail[String]) {
|
||||
val n = args.size > 0 ? Long.parse(args(0)) : 8;
|
||||
Console.OUT.println("HeatTransfer example with N="+n+" and epsilon="+EPSILON);
|
||||
Console.OUT.println("Initializing data structures");
|
||||
val ht = new HeatTransfer_v1(n);
|
||||
Console.OUT.println("Beginning computation...");
|
||||
val start = System.nanoTime();
|
||||
ht.run();
|
||||
val stop = System.nanoTime();
|
||||
Console.OUT.printf("...completed in %1.3f seconds.\n", ((stop-start) as double)/1e9);
|
||||
if (n <= 10) {
|
||||
ht.prettyPrintResult();
|
||||
}
|
||||
}
|
||||
}
|
||||
44
samples/X10/HelloWholeWorld.x10
Normal file
44
samples/X10/HelloWholeWorld.x10
Normal file
@@ -0,0 +1,44 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.io.Console;
|
||||
|
||||
/**
|
||||
* The classic hello world program, with a twist - prints a message
|
||||
* from the command line at every Place.
|
||||
* The messages from each Place may appear in any order, but the
|
||||
* finish ensures that the last message printed will be "Goodbye"
|
||||
* <pre>
|
||||
* Typical output:
|
||||
* [dgrove@linchen samples]$ ./HelloWholeWorld 'best wishes'
|
||||
* Place(1) says hello and best wishes
|
||||
* Place(2) says hello and best wishes
|
||||
* Place(3) says hello and best wishes
|
||||
* Place(0) says hello and best wishes
|
||||
* Goodbye
|
||||
* [dgrove@linchen samples]$
|
||||
* </pre>
|
||||
*/
|
||||
class HelloWholeWorld {
|
||||
public static def main(args:Rail[String]):void {
|
||||
if (args.size < 1) {
|
||||
Console.OUT.println("Usage: HelloWholeWorld message");
|
||||
return;
|
||||
}
|
||||
|
||||
finish for (p in Place.places()) {
|
||||
at (p) async Console.OUT.println(here+" says hello and "+args(0));
|
||||
}
|
||||
Console.OUT.println("Goodbye");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
23
samples/X10/HelloWorld.x10
Normal file
23
samples/X10/HelloWorld.x10
Normal file
@@ -0,0 +1,23 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.io.Console;
|
||||
|
||||
/**
|
||||
* The classic hello world program, shows how to output to the console.
|
||||
*/
|
||||
class HelloWorld {
|
||||
public static def main(Rail[String]) {
|
||||
Console.OUT.println("Hello World!" );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
45
samples/X10/Histogram.x10
Normal file
45
samples/X10/Histogram.x10
Normal file
@@ -0,0 +1,45 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
public class Histogram {
|
||||
public static def compute(data:Rail[Int], numBins:Int) {
|
||||
val bins = new Rail[Int](numBins);
|
||||
finish for (i in data.range) async {
|
||||
val b = data(i) % numBins;
|
||||
atomic bins(b)++;
|
||||
}
|
||||
return bins;
|
||||
}
|
||||
|
||||
public static def run(N:Int, S:Int):Boolean {
|
||||
val a = new Rail[Int](N, (i:long)=> i as int);
|
||||
val b = compute(a, S);
|
||||
val v = b(0);
|
||||
var ok:Boolean = true;
|
||||
for (x in b.range) ok &= (b(x)==v);
|
||||
return ok;
|
||||
}
|
||||
|
||||
public static def main(args:Rail[String]) {
|
||||
if (args.size != 2L) {
|
||||
Console.OUT.println("Usage: Histogram SizeOfArray NumberOfBins");
|
||||
return;
|
||||
}
|
||||
val N = Int.parse(args(0));
|
||||
val S = Int.parse(args(1));
|
||||
val ok = run(N,S);
|
||||
if (ok) {
|
||||
Console.OUT.println("Test ok.");
|
||||
} else {
|
||||
Console.OUT.println("Test failed.");
|
||||
}
|
||||
}
|
||||
}
|
||||
55
samples/X10/Integrate.x10
Normal file
55
samples/X10/Integrate.x10
Normal file
@@ -0,0 +1,55 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
/**
|
||||
* This is a slightly more realistic example of the
|
||||
* basic computational pattern of using async/finish
|
||||
* to express recursive divide-and-conquer algorithms.
|
||||
* The program does integration via Guassian Quadrature.
|
||||
* <p>
|
||||
* It also can serve as an example of using a closure.
|
||||
*/
|
||||
public class Integrate {
|
||||
static val epsilon = 1.0e-9;
|
||||
|
||||
val fun:(double)=>double;
|
||||
|
||||
public def this(f:(double)=>double) { fun = f; }
|
||||
|
||||
public def computeArea(left:double, right:double) {
|
||||
return recEval(left, fun(left), right, fun(right), 0);
|
||||
}
|
||||
|
||||
private def recEval(l:double, fl:double, r:double, fr:double, a:double) {
|
||||
val h = (r - l) / 2;
|
||||
val hh = h / 2;
|
||||
val c = l + h;
|
||||
val fc = fun(c);
|
||||
val al = (fl + fc) * hh;
|
||||
val ar = (fr + fc) * hh;
|
||||
val alr = al + ar;
|
||||
if (Math.abs(alr - a) < epsilon) return alr;
|
||||
val expr1:double;
|
||||
val expr2:double;
|
||||
finish {
|
||||
async { expr1 = recEval(c, fc, r, fr, ar); };
|
||||
expr2 = recEval(l, fl, c, fc, al);
|
||||
}
|
||||
return expr1 + expr2;
|
||||
}
|
||||
|
||||
public static def main(args:Rail[String]) {
|
||||
val obj = new Integrate((x:double)=>(x*x + 1.0) * x);
|
||||
val xMax = args.size > 0 ? Long.parse(args(0)) : 10;
|
||||
val area = obj.computeArea(0, xMax);
|
||||
Console.OUT.println("The area of (x*x +1) * x from 0 to "+xMax+" is "+area);
|
||||
}
|
||||
}
|
||||
151
samples/X10/KMeans.x10
Normal file
151
samples/X10/KMeans.x10
Normal file
@@ -0,0 +1,151 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.io.Console;
|
||||
import x10.util.Random;
|
||||
|
||||
/**
|
||||
* A KMeans object o can compute K means of a given set of
|
||||
* points of dimension o.myDim.
|
||||
* <p>
|
||||
* This class implements a sequential program, that is readily parallelizable.
|
||||
*
|
||||
* For a scalable, high-performance version of this benchmark see
|
||||
* KMeans.x10 in the X10 Benchmarks (separate download from x10-lang.org)
|
||||
*/
|
||||
public class KMeans(myDim:Long) {
|
||||
|
||||
static val DIM=2;
|
||||
static val K=4;
|
||||
static val POINTS=2000;
|
||||
static val ITERATIONS=50;
|
||||
static val EPS=0.01F;
|
||||
|
||||
static type ValVector(k:Long) = Rail[Float]{self.size==k};
|
||||
static type ValVector = ValVector(DIM);
|
||||
|
||||
static type Vector(k:Long) = Rail[Float]{self.size==k};
|
||||
static type Vector = Vector(DIM);
|
||||
|
||||
static type SumVector(d:Long) = V{self.dim==d};
|
||||
static type SumVector = SumVector(DIM);
|
||||
|
||||
/**
|
||||
* V represents the sum of 'count' number of vectors of dimension 'dim'.
|
||||
*/
|
||||
static class V(dim:Long) implements (Long)=>Float {
|
||||
var vec: Vector(dim);
|
||||
var count:Int;
|
||||
def this(dim:Long, init:(Long)=>Float): SumVector(dim) {
|
||||
property(dim);
|
||||
vec = new Rail[Float](this.dim, init);
|
||||
count = 0n;
|
||||
}
|
||||
public operator this(i:Long) = vec(i);
|
||||
def makeZero() {
|
||||
for (i in 0..(dim-1))
|
||||
vec(i) =0.0F;
|
||||
count=0n;
|
||||
}
|
||||
def addIn(a:ValVector(dim)) {
|
||||
for (i in 0..(dim-1))
|
||||
vec(i) += a(i);
|
||||
count++;
|
||||
}
|
||||
def div(f:Int) {
|
||||
for (i in 0..(dim-1))
|
||||
vec(i) /= f;
|
||||
}
|
||||
def dist(a:ValVector(dim)):Float {
|
||||
var dist:Float=0.0F;
|
||||
for (i in 0..(dim-1)) {
|
||||
val tmp = vec(i)-a(i);
|
||||
dist += tmp*tmp;
|
||||
}
|
||||
return dist;
|
||||
}
|
||||
def dist(a:SumVector(dim)):Float {
|
||||
var dist:Float=0.0F;
|
||||
for (i in 0..(dim-1)) {
|
||||
val tmp = vec(i)-a(i);
|
||||
dist += tmp*tmp;
|
||||
}
|
||||
return dist;
|
||||
}
|
||||
def print() {
|
||||
Console.OUT.println();
|
||||
for (i in 0..(dim-1)) {
|
||||
Console.OUT.print((i>0? " " : "") + vec(i));
|
||||
}
|
||||
}
|
||||
def normalize() { div(count);}
|
||||
def count() = count;
|
||||
}
|
||||
|
||||
|
||||
def this(myDim:Long):KMeans{self.myDim==myDim} {
|
||||
property(myDim);
|
||||
}
|
||||
static type KMeansData(myK:Long, myDim:Long)= Rail[SumVector(myDim)]{self.size==myK};
|
||||
|
||||
/**
|
||||
* Compute myK means for the given set of points of dimension myDim.
|
||||
*/
|
||||
def computeMeans(myK:Long, points:Rail[ValVector(myDim)]):KMeansData(myK, myDim) {
|
||||
var redCluster : KMeansData(myK, myDim) =
|
||||
new Rail[SumVector(myDim)](myK, (i:long)=> new V(myDim, (j:long)=>points(i)(j)));
|
||||
var blackCluster: KMeansData(myK, myDim) =
|
||||
new Rail[SumVector(myDim)](myK, (i:long)=> new V(myDim, (j:long)=>0.0F));
|
||||
for (i in 1..ITERATIONS) {
|
||||
val tmp = redCluster;
|
||||
redCluster = blackCluster;
|
||||
blackCluster=tmp;
|
||||
for (p in 0..(POINTS-1)) {
|
||||
var closest:Long = -1;
|
||||
var closestDist:Float = Float.MAX_VALUE;
|
||||
val point = points(p);
|
||||
for (k in 0..(myK-1)) { // compute closest mean in cluster.
|
||||
val dist = blackCluster(k).dist(point);
|
||||
if (dist < closestDist) {
|
||||
closestDist = dist;
|
||||
closest = k;
|
||||
}
|
||||
}
|
||||
redCluster(closest).addIn(point);
|
||||
}
|
||||
for (k in 0..(myK-1))
|
||||
redCluster(k).normalize();
|
||||
|
||||
var b:Boolean = true;
|
||||
for (k in 0..(myK-1)) {
|
||||
if (redCluster(k).dist(blackCluster(k)) > EPS) {
|
||||
b=false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (b)
|
||||
break;
|
||||
for (k in 0..(myK-1))
|
||||
blackCluster(k).makeZero();
|
||||
}
|
||||
return redCluster;
|
||||
}
|
||||
|
||||
public static def main (Rail[String]) {
|
||||
val rnd = new Random(0);
|
||||
val points = new Rail[ValVector](POINTS,
|
||||
(long)=>new Rail[Float](DIM, (long)=>rnd.nextFloat()));
|
||||
val result = new KMeans(DIM).computeMeans(K, points);
|
||||
for (k in 0..(K-1)) result(k).print();
|
||||
}
|
||||
}
|
||||
|
||||
// vim: shiftwidth=4:tabstop=4:expandtab
|
||||
147
samples/X10/KMeansDist.x10
Normal file
147
samples/X10/KMeansDist.x10
Normal file
@@ -0,0 +1,147 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.array.*;
|
||||
import x10.io.Console;
|
||||
import x10.util.Random;
|
||||
|
||||
/**
|
||||
* A low performance formulation of distributed KMeans using fine-grained asyncs.
|
||||
*
|
||||
* For a highly optimized and scalable, version of this benchmark see
|
||||
* KMeans.x10 in the X10 Benchmarks (separate download from x10-lang.org)
|
||||
*/
|
||||
public class KMeansDist {
|
||||
|
||||
static val DIM=2;
|
||||
static val CLUSTERS=4;
|
||||
static val POINTS=2000;
|
||||
static val ITERATIONS=50;
|
||||
|
||||
public static def main (Rail[String]) {
|
||||
val world = Place.places();
|
||||
val local_curr_clusters =
|
||||
PlaceLocalHandle.make[Array_2[Float]](world, () => new Array_2[Float](CLUSTERS, DIM));
|
||||
val local_new_clusters =
|
||||
PlaceLocalHandle.make[Array_2[Float]](world, () => new Array_2[Float](CLUSTERS, DIM));
|
||||
val local_cluster_counts =
|
||||
PlaceLocalHandle.make[Rail[Int]](world, ()=> new Rail[Int](CLUSTERS));
|
||||
|
||||
val rnd = PlaceLocalHandle.make[Random](world, () => new Random(0));
|
||||
val points = new DistArray_Block_2[Float](POINTS, DIM, world, (Long,Long)=>rnd().nextFloat());
|
||||
|
||||
val central_clusters = new Array_2[Float](CLUSTERS, DIM, (i:Long, j:Long) => {
|
||||
at (points.place(i,j)) points(i,j)
|
||||
});
|
||||
|
||||
val old_central_clusters = new Array_2[Float](CLUSTERS, DIM);
|
||||
|
||||
val central_cluster_counts = new Rail[Int](CLUSTERS);
|
||||
|
||||
for (iter in 1..ITERATIONS) {
|
||||
|
||||
Console.OUT.println("Iteration: "+iter);
|
||||
|
||||
finish {
|
||||
// reset state
|
||||
for (d in world) at (d) async {
|
||||
for ([i,j] in central_clusters.indices()) {
|
||||
local_curr_clusters()(i, j) = central_clusters(i, j);
|
||||
local_new_clusters()(i, j) = 0f;
|
||||
}
|
||||
|
||||
local_cluster_counts().clear();
|
||||
}
|
||||
}
|
||||
|
||||
finish {
|
||||
// compute new clusters and counters
|
||||
for (p in 0..(POINTS-1)) {
|
||||
at (points.place(p,0)) async {
|
||||
var closest:Long = -1;
|
||||
var closest_dist:Float = Float.MAX_VALUE;
|
||||
for (k in 0..(CLUSTERS-1)) {
|
||||
var dist : Float = 0;
|
||||
for (d in 0..(DIM-1)) {
|
||||
val tmp = points(p,d) - local_curr_clusters()(k, d);
|
||||
dist += tmp * tmp;
|
||||
}
|
||||
if (dist < closest_dist) {
|
||||
closest_dist = dist;
|
||||
closest = k;
|
||||
}
|
||||
}
|
||||
atomic {
|
||||
for (d in 0..(DIM-1)) {
|
||||
local_new_clusters()(closest,d) += points(p,d);
|
||||
}
|
||||
local_cluster_counts()(closest)++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for ([i,j] in old_central_clusters.indices()) {
|
||||
old_central_clusters(i, j) = central_clusters(i, j);
|
||||
central_clusters(i, j) = 0f;
|
||||
}
|
||||
|
||||
central_cluster_counts.clear();
|
||||
|
||||
finish {
|
||||
val central_clusters_gr = GlobalRef(central_clusters);
|
||||
val central_cluster_counts_gr = GlobalRef(central_cluster_counts);
|
||||
val there = here;
|
||||
for (d in world) at (d) async {
|
||||
// access PlaceLocalHandles 'here' and then data will be captured by at and transfered to 'there' for accumulation
|
||||
val tmp_new_clusters = local_new_clusters();
|
||||
val tmp_cluster_counts = local_cluster_counts();
|
||||
at (there) atomic {
|
||||
for ([i,j] in tmp_new_clusters.indices()) {
|
||||
central_clusters_gr()(i,j) += tmp_new_clusters(i,j);
|
||||
}
|
||||
for (j in 0..(CLUSTERS-1)) {
|
||||
central_cluster_counts_gr()(j) += tmp_cluster_counts(j);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (k in 0..(CLUSTERS-1)) {
|
||||
for (d in 0..(DIM-1)) {
|
||||
central_clusters(k, d) /= central_cluster_counts(k);
|
||||
}
|
||||
}
|
||||
|
||||
// TEST FOR CONVERGENCE
|
||||
var b:Boolean = true;
|
||||
for ([i,j] in old_central_clusters.indices()) {
|
||||
if (Math.abs(old_central_clusters(i, j)-central_clusters(i, j))>0.0001) {
|
||||
b = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (b) break;
|
||||
|
||||
}
|
||||
|
||||
for (d in 0..(DIM-1)) {
|
||||
for (k in 0..(CLUSTERS-1)) {
|
||||
if (k>0)
|
||||
Console.OUT.print(" ");
|
||||
Console.OUT.print(central_clusters(k,d));
|
||||
}
|
||||
Console.OUT.println();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// vim: shiftwidth=4:tabstop=4:expandtab
|
||||
144
samples/X10/KMeansDistPlh.x10
Normal file
144
samples/X10/KMeansDistPlh.x10
Normal file
@@ -0,0 +1,144 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2015.
|
||||
*/
|
||||
|
||||
import x10.array.Array;
|
||||
import x10.array.Array_2;
|
||||
import x10.compiler.Foreach;
|
||||
import x10.util.Random;
|
||||
|
||||
/**
|
||||
* A better formulation of distributed KMeans using coarse-grained asyncs to
|
||||
* implement an allreduce pattern for cluster centers and counts.
|
||||
*
|
||||
* For a highly optimized and scalable, version of this benchmark see
|
||||
* KMeans.x10 in the X10 Benchmarks (separate download from x10-lang.org)
|
||||
*/
|
||||
public class KMeansDistPlh {
|
||||
|
||||
static val DIM=2;
|
||||
static val CLUSTERS=4;
|
||||
|
||||
static class ClusterState {
|
||||
val clusters = new Array_2[Float](CLUSTERS, DIM);
|
||||
val clusterCounts = new Rail[Int](CLUSTERS);
|
||||
}
|
||||
|
||||
public static def main(args:Rail[String]) {
|
||||
val numPoints = args.size > 0 ? Long.parse(args(0)) : 2000;
|
||||
val iterations = args.size > 1 ? Long.parse(args(1)) : 50;
|
||||
val world = Place.places();
|
||||
|
||||
val clusterStatePlh = PlaceLocalHandle.make[ClusterState](world, () => new ClusterState());
|
||||
val currentClustersPlh = PlaceLocalHandle.make[Array_2[Float]](world, () => new Array_2[Float](CLUSTERS, DIM));
|
||||
val pointsPlh = PlaceLocalHandle.make[Array_2[Float]](world, () => {
|
||||
val rand = new Random(here.id);
|
||||
return new Array_2[Float](numPoints/world.size(), DIM, (Long,Long)=>rand.nextFloat());
|
||||
});
|
||||
|
||||
val centralCurrentClusters = new Array_2[Float](CLUSTERS, DIM);
|
||||
val centralNewClusters = new Array_2[Float](CLUSTERS, DIM);
|
||||
val centralClusterCounts = new Rail[Int](CLUSTERS);
|
||||
|
||||
// arbitrarily initialize central clusters to first few points
|
||||
for ([i,j] in centralCurrentClusters.indices()) {
|
||||
centralCurrentClusters(i,j) = pointsPlh()(i,j);
|
||||
}
|
||||
|
||||
for (iter in 1..iterations) {
|
||||
Console.OUT.println("Iteration: "+iter);
|
||||
|
||||
finish {
|
||||
for (place in world) async {
|
||||
val placeClusters = at(place) {
|
||||
val currentClusters = currentClustersPlh();
|
||||
Array.copy(centralCurrentClusters, currentClusters);
|
||||
|
||||
val clusterState = clusterStatePlh();
|
||||
val newClusters = clusterState.clusters;
|
||||
newClusters.clear();
|
||||
val clusterCounts = clusterState.clusterCounts;
|
||||
clusterCounts.clear();
|
||||
|
||||
// compute new clusters and counters
|
||||
val points = pointsPlh();
|
||||
|
||||
for (p in 0..(points.numElems_1-1)) {
|
||||
var closest:Long = -1;
|
||||
var closestDist:Float = Float.MAX_VALUE;
|
||||
for (k in 0..(CLUSTERS-1)) {
|
||||
var dist : Float = 0;
|
||||
for (d in 0..(DIM-1)) {
|
||||
val tmp = points(p,d) - currentClusters(k, d);
|
||||
dist += tmp * tmp;
|
||||
}
|
||||
if (dist < closestDist) {
|
||||
closestDist = dist;
|
||||
closest = k;
|
||||
}
|
||||
}
|
||||
|
||||
atomic {
|
||||
for (d in 0..(DIM-1)) {
|
||||
newClusters(closest,d) += points(p,d);
|
||||
}
|
||||
clusterCounts(closest)++;
|
||||
}
|
||||
}
|
||||
clusterState
|
||||
};
|
||||
|
||||
// combine place clusters to central
|
||||
atomic {
|
||||
for ([i,j] in centralNewClusters.indices()) {
|
||||
centralNewClusters(i,j) += placeClusters.clusters(i,j);
|
||||
}
|
||||
for (j in 0..(CLUSTERS-1)) {
|
||||
centralClusterCounts(j) += placeClusters.clusterCounts(j);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (k in 0..(CLUSTERS-1)) {
|
||||
for (d in 0..(DIM-1)) {
|
||||
centralNewClusters(k, d) /= centralClusterCounts(k);
|
||||
}
|
||||
}
|
||||
|
||||
// TEST FOR CONVERGENCE
|
||||
var b:Boolean = true;
|
||||
for ([i,j] in centralCurrentClusters.indices()) {
|
||||
if (Math.abs(centralCurrentClusters(i, j)-centralNewClusters(i, j)) > 0.0001) {
|
||||
b = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Array.copy(centralNewClusters, centralCurrentClusters);
|
||||
|
||||
if (b) break;
|
||||
|
||||
centralNewClusters.clear();
|
||||
centralClusterCounts.clear();
|
||||
}
|
||||
|
||||
for (d in 0..(DIM-1)) {
|
||||
for (k in 0..(CLUSTERS-1)) {
|
||||
if (k > 0)
|
||||
Console.OUT.print(" ");
|
||||
Console.OUT.print(centralCurrentClusters(k,d));
|
||||
}
|
||||
Console.OUT.println();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// vim: shiftwidth=4:tabstop=4:expandtab
|
||||
192
samples/X10/KMeansSPMD.x10
Normal file
192
samples/X10/KMeansSPMD.x10
Normal file
@@ -0,0 +1,192 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.io.Console;
|
||||
import x10.io.File;
|
||||
import x10.io.Marshal;
|
||||
import x10.io.IOException;
|
||||
import x10.util.OptionsParser;
|
||||
import x10.util.Option;
|
||||
import x10.util.Team;
|
||||
|
||||
/**
|
||||
* An SPMD formulation of KMeans.
|
||||
*
|
||||
* For a highly optimized and scalable version of this benchmark see
|
||||
* KMeans.x10 in the X10 Benchmarks (separate download from x10-lang.org)
|
||||
*/
|
||||
public class KMeansSPMD {
|
||||
|
||||
public static def printClusters (clusters:Rail[Float], dims:long) {
|
||||
for (d in 0..(dims-1)) {
|
||||
for (k in 0..(clusters.size/dims-1)) {
|
||||
if (k>0)
|
||||
Console.OUT.print(" ");
|
||||
Console.OUT.print(clusters(k*dims+d).toString());
|
||||
}
|
||||
Console.OUT.println();
|
||||
}
|
||||
}
|
||||
|
||||
public static def main (args:Rail[String]) {here == Place.FIRST_PLACE } {
|
||||
|
||||
val opts = new OptionsParser(args, [
|
||||
Option("q","quiet","just print time taken"),
|
||||
Option("v","verbose","print out each iteration"),
|
||||
Option("h","help","this information")
|
||||
], [
|
||||
Option("p","points","location of data file"),
|
||||
Option("i","iterations","quit after this many iterations"),
|
||||
Option("c","clusters","number of clusters to find"),
|
||||
Option("d","dim","number of dimensions"),
|
||||
Option("s","slices","factor by which to oversubscribe computational resources"),
|
||||
Option("n","num","quantity of points")
|
||||
]);
|
||||
if (opts.filteredArgs().size!=0L) {
|
||||
Console.ERR.println("Unexpected arguments: "+opts.filteredArgs());
|
||||
Console.ERR.println("Use -h or --help.");
|
||||
System.setExitCode(1n);
|
||||
return;
|
||||
}
|
||||
if (opts("-h")) {
|
||||
Console.OUT.println(opts.usage(""));
|
||||
return;
|
||||
}
|
||||
|
||||
val fname = opts("-p", "points.dat");
|
||||
val num_clusters=opts("-c",4);
|
||||
val num_slices=opts("-s",1);
|
||||
val num_global_points=opts("-n", 2000);
|
||||
val iterations=opts("-i",50);
|
||||
val dim=opts("-d", 4);
|
||||
val verbose = opts("-v");
|
||||
val quiet = opts("-q");
|
||||
|
||||
if (!quiet)
|
||||
Console.OUT.println("points: "+num_global_points+" clusters: "+num_clusters+" dim: "+dim);
|
||||
|
||||
// file is dimension-major
|
||||
val file = new File(fname);
|
||||
val fr = file.openRead();
|
||||
val init_points = (long) => Float.fromIntBits(Marshal.INT.read(fr).reverseBytes());
|
||||
val num_file_points = (file.size() / dim / 4) as Int;
|
||||
val file_points = new Rail[Float](num_file_points*dim, init_points);
|
||||
|
||||
val team = Team.WORLD;
|
||||
|
||||
val num_slice_points = num_global_points / num_slices / Place.numPlaces();
|
||||
|
||||
finish {
|
||||
for (h in Place.places()) at(h) async {
|
||||
var compute_time:Long = 0;
|
||||
var comm_time:Long = 0;
|
||||
var barrier_time:Long = 0;
|
||||
|
||||
val host_clusters = new Rail[Float](num_clusters*dim, (i:long)=>file_points(i));
|
||||
val host_cluster_counts = new Rail[Int](num_clusters);
|
||||
|
||||
for (slice in 0..(num_slices-1)) {
|
||||
// carve out local portion of points (point-major)
|
||||
val offset = (slice*Place.numPlaces() + here.id) * num_slice_points;
|
||||
if (verbose)
|
||||
Console.OUT.println(h.toString()+" gets "+offset+" len "+num_slice_points);
|
||||
val init = (i:long) => {
|
||||
val p=i%num_slice_points;
|
||||
val d=i/num_slice_points;
|
||||
return file_points(offset+p+d*num_file_points);
|
||||
};
|
||||
|
||||
// these are pretty big so allocate up front
|
||||
val host_points = new Rail[Float](num_slice_points*dim, init);
|
||||
val host_nearest = new Rail[Float](num_slice_points);
|
||||
|
||||
val start_time = System.currentTimeMillis();
|
||||
|
||||
barrier_time -= System.nanoTime();
|
||||
team.barrier();
|
||||
barrier_time += System.nanoTime();
|
||||
|
||||
main_loop: for (iter in 0..(iterations-1)) {
|
||||
|
||||
//if (offset==0) Console.OUT.println("Iteration: "+iter);
|
||||
|
||||
val old_clusters = new Rail[Float](host_clusters.size);
|
||||
Rail.copy(host_clusters, 0L, old_clusters, 0L, host_clusters.size);
|
||||
|
||||
host_clusters.clear();
|
||||
host_cluster_counts.clear();
|
||||
|
||||
compute_time -= System.nanoTime();
|
||||
for (p in 0..(num_slice_points-1)) {
|
||||
var closest:Long = -1;
|
||||
var closest_dist:Float = Float.MAX_VALUE;
|
||||
for (k in 0..(num_clusters-1)) {
|
||||
var dist : Float = 0;
|
||||
for (d in 0..(dim-1)) {
|
||||
val tmp = host_points(p+d*num_slice_points) - old_clusters(k*dim+d);
|
||||
dist += tmp * tmp;
|
||||
}
|
||||
if (dist < closest_dist) {
|
||||
closest_dist = dist;
|
||||
closest = k;
|
||||
}
|
||||
}
|
||||
for (d in 0..(dim-1)) {
|
||||
host_clusters(closest*dim+d) += host_points(p+d*num_slice_points);
|
||||
}
|
||||
host_cluster_counts(closest)++;
|
||||
}
|
||||
compute_time += System.nanoTime();
|
||||
|
||||
comm_time -= System.nanoTime();
|
||||
team.allreduce(host_clusters, 0L, host_clusters, 0L, host_clusters.size, Team.ADD);
|
||||
team.allreduce(host_cluster_counts, 0L, host_cluster_counts, 0L, host_cluster_counts.size, Team.ADD);
|
||||
comm_time += System.nanoTime();
|
||||
|
||||
for (k in 0..(num_clusters-1)) {
|
||||
for (d in 0..(dim-1)) host_clusters(k*dim+d) /= host_cluster_counts(k);
|
||||
}
|
||||
|
||||
if (offset==0 && verbose) {
|
||||
Console.OUT.println("Iteration: "+iter);
|
||||
printClusters(host_clusters,dim);
|
||||
}
|
||||
|
||||
// TEST FOR CONVERGENCE
|
||||
for (j in 0..(num_clusters*dim-1)) {
|
||||
if (true/*||Math.abs(clusters_old(j)-host_clusters(j))>0.0001*/) continue main_loop;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
} // main_loop
|
||||
|
||||
} // slice
|
||||
|
||||
Console.OUT.printf("%d: computation %.3f s communication %.3f s (barrier %.3f s)\n",
|
||||
here.id, compute_time/1E9, comm_time/1E9, barrier_time/1E9);
|
||||
|
||||
team.barrier();
|
||||
|
||||
if (here.id == 0) {
|
||||
Console.OUT.println("\nFinal results:");
|
||||
printClusters(host_clusters,dim);
|
||||
}
|
||||
|
||||
} // async
|
||||
|
||||
} // finish
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// vim: shiftwidth=4:tabstop=4:expandtab
|
||||
42
samples/X10/MontyPi.x10
Normal file
42
samples/X10/MontyPi.x10
Normal file
@@ -0,0 +1,42 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.array.DistArray_Unique;
|
||||
import x10.io.Console;
|
||||
import x10.util.Random;
|
||||
|
||||
/**
|
||||
* Calculation of an approximation to pi by using a Monte Carlo simulation
|
||||
* (throwing darts into the unit square and determining the fraction that land
|
||||
* in the unit circle).
|
||||
*/
|
||||
public class MontyPi {
|
||||
public static def main(args:Rail[String]) {
|
||||
if (args.size != 1L) {
|
||||
Console.OUT.println("Usage: MontyPi <number of points>");
|
||||
return;
|
||||
}
|
||||
val N = Long.parse(args(0));
|
||||
val initializer = () => {
|
||||
val r = new Random();
|
||||
var result:Long = 0;
|
||||
for(c in 1..N) {
|
||||
val x = r.nextDouble();
|
||||
val y = r.nextDouble();
|
||||
if (x*x +y*y <= 1.0) result++;
|
||||
}
|
||||
result
|
||||
};
|
||||
val result = new DistArray_Unique[Long](Place.places(), initializer);
|
||||
val pi = (4.0*result.reduce((x:Long,y:Long) => x+y, 0) as Double)/(N*Place.numPlaces());
|
||||
Console.OUT.println("The value of pi is " + pi);
|
||||
}
|
||||
}
|
||||
123
samples/X10/NQueensDist.x10
Normal file
123
samples/X10/NQueensDist.x10
Normal file
@@ -0,0 +1,123 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
* (C) Copyright Australian National University 2011.
|
||||
*/
|
||||
|
||||
import x10.array.DistArray_Unique;
|
||||
|
||||
/**
|
||||
* A distributed version of NQueens. Runs over NUM_PLACES.
|
||||
* Identical to NQueensPar, except that work is distributed
|
||||
* over multiple places rather than shared between threads.
|
||||
*/
|
||||
public class NQueensDist {
|
||||
public static val EXPECTED_SOLUTIONS =
|
||||
[0, 1, 0, 0, 2, 10, 4, 40, 92, 352, 724, 2680, 14200, 73712, 365596, 2279184, 14772512];
|
||||
|
||||
val N:Long;
|
||||
val P:Long;
|
||||
val results:DistArray_Unique[Long];
|
||||
val R:LongRange;
|
||||
|
||||
def this(N:Long, P:Long) {
|
||||
this.N=N;
|
||||
this.P=P;
|
||||
this.results = new DistArray_Unique[Long]();
|
||||
this.R = 0..(N-1);
|
||||
}
|
||||
def start() {
|
||||
new Board().distSearch();
|
||||
}
|
||||
def run():Long {
|
||||
finish start();
|
||||
val result = results.reduce(((x:Long,y:Long) => x+y),0);
|
||||
return result;
|
||||
}
|
||||
|
||||
class Board {
|
||||
val q: Rail[Long];
|
||||
/** The number of low-rank positions that are fixed in this board for the purposes of search. */
|
||||
var fixed:Long;
|
||||
def this() {
|
||||
q = new Rail[Long](N);
|
||||
fixed = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if it is safe to put a queen in file <code>j</code>
|
||||
* on the next rank after the last fixed position.
|
||||
*/
|
||||
def safe(j:Long) {
|
||||
for (k in 0..(fixed-1)) {
|
||||
if (j == q(k) || Math.abs(fixed-k) == Math.abs(j-q(k)))
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/** Search all positions for the current board. */
|
||||
def search() {
|
||||
for (k in R) searchOne(k);
|
||||
}
|
||||
|
||||
/**
|
||||
* Modify the current board by adding a new queen
|
||||
* in file <code>k</code> on rank <code>fixed</code>,
|
||||
* and search for all safe positions with this prefix.
|
||||
*/
|
||||
def searchOne(k:Long) {
|
||||
if (safe(k)) {
|
||||
if (fixed==(N-1)) {
|
||||
// all ranks safely filled
|
||||
atomic NQueensDist.this.results(here.id)++;
|
||||
} else {
|
||||
q(fixed++) = k;
|
||||
search();
|
||||
fixed--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Search this board, dividing the work between all places
|
||||
* using a block distribution of the current free rank.
|
||||
*/
|
||||
def distSearch() {
|
||||
val work = R.split(Place.numPlaces());
|
||||
finish for (p in Place.places()) {
|
||||
val myPiece = work(p.id);
|
||||
at (p) async {
|
||||
// implicit copy of 'this' made across the at divide
|
||||
for (k in myPiece) {
|
||||
searchOne(k);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static def main(args:Rail[String]) {
|
||||
val n = args.size > 0 ? Long.parse(args(0)) : 8;
|
||||
Console.OUT.println("N=" + n);
|
||||
//warmup
|
||||
//finish new NQueensPar(12, 1).start();
|
||||
val P = Place.numPlaces();
|
||||
val nq = new NQueensDist(n,P);
|
||||
var start:Long = -System.nanoTime();
|
||||
val answer = nq.run();
|
||||
val result = answer==EXPECTED_SOLUTIONS(n);
|
||||
start += System.nanoTime();
|
||||
start /= 1000000;
|
||||
Console.OUT.println("NQueensDist " + nq.N + "(P=" + P +
|
||||
") has " + answer + " solutions" +
|
||||
(result? " (ok)." : " (wrong).") +
|
||||
"time=" + start + "ms");
|
||||
}
|
||||
}
|
||||
117
samples/X10/NQueensPar.x10
Normal file
117
samples/X10/NQueensPar.x10
Normal file
@@ -0,0 +1,117 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
* (C) Copyright Australian National University 2011.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Compute the number of solutions to the N queens problem.
|
||||
*/
|
||||
public class NQueensPar {
|
||||
public static val EXPECTED_SOLUTIONS =
|
||||
[0, 1, 0, 0, 2, 10, 4, 40, 92, 352, 724, 2680, 14200, 73712, 365596, 2279184, 14772512];
|
||||
|
||||
val N:Int;
|
||||
val P:Int;
|
||||
var nSolutions:Int = 0n;
|
||||
val R:IntRange;
|
||||
|
||||
def this(N:Int, P:Int) {
|
||||
this.N=N;
|
||||
this.P=P;
|
||||
this.R = 0n..(N-1n);
|
||||
}
|
||||
|
||||
def start() {
|
||||
new Board().parSearch();
|
||||
}
|
||||
|
||||
class Board {
|
||||
val q: Rail[Int];
|
||||
/** The number of low-rank positions that are fixed in this board for the purposes of search. */
|
||||
var fixed:Int;
|
||||
def this() {
|
||||
q = new Rail[Int](N);
|
||||
fixed = 0n;
|
||||
}
|
||||
|
||||
def this(b:Board) {
|
||||
this.q = new Rail[Int](b.q);
|
||||
this.fixed = b.fixed;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if it is safe to put a queen in file <code>j</code>
|
||||
* on the next rank after the last fixed position.
|
||||
*/
|
||||
def safe(j:Int) {
|
||||
for (k in 0n..(fixed-1n)) {
|
||||
if (j == q(k) || Math.abs(fixed-k) == Math.abs(j-q(k)))
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/** Search all positions for the current board. */
|
||||
def search() {
|
||||
for (k in R) searchOne(k);
|
||||
}
|
||||
|
||||
/**
|
||||
* Modify the current board by adding a new queen
|
||||
* in file <code>k</code> on rank <code>fixed</code>,
|
||||
* and search for all safe positions with this prefix.
|
||||
*/
|
||||
def searchOne(k:Int) {
|
||||
if (safe(k)) {
|
||||
if (fixed==(N-1n)) {
|
||||
// all ranks safely filled
|
||||
atomic NQueensPar.this.nSolutions++;
|
||||
} else {
|
||||
q(fixed++) = k;
|
||||
search();
|
||||
fixed--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Search this board, dividing the work between threads
|
||||
* using a block distribution of the current free rank.
|
||||
*/
|
||||
def parSearch() {
|
||||
for (work in R.split(P)) async {
|
||||
val board = new Board(this);
|
||||
for (w in work) {
|
||||
board.searchOne(w);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static def main(args:Rail[String]) {
|
||||
val n = args.size > 0 ? Int.parse(args(0)) : 8n;
|
||||
Console.OUT.println("N=" + n);
|
||||
//warmup
|
||||
//finish new NQueensPar(12, 1).start();
|
||||
val ps = [1n,2n,4n];
|
||||
for (numTasks in ps) {
|
||||
Console.OUT.println("starting " + numTasks + " tasks");
|
||||
val nq = new NQueensPar(n,numTasks);
|
||||
var start:Long = -System.nanoTime();
|
||||
finish nq.start();
|
||||
val result = (nq.nSolutions as Long)==EXPECTED_SOLUTIONS(nq.N);
|
||||
start += System.nanoTime();
|
||||
start /= 1000000;
|
||||
Console.OUT.println("NQueensPar " + nq.N + "(P=" + numTasks +
|
||||
") has " + nq.nSolutions + " solutions" +
|
||||
(result? " (ok)." : " (wrong).") + "time=" + start + "ms");
|
||||
}
|
||||
}
|
||||
}
|
||||
73
samples/X10/QSort.x10
Normal file
73
samples/X10/QSort.x10
Normal file
@@ -0,0 +1,73 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* Straightforward quicksort implementation using
|
||||
* naive partition-in-the-middle and not bothering with
|
||||
* well-known optimizations such as using insertion sort
|
||||
* once the partitions get small. This is only intended
|
||||
* as a simple example of an array-based program that
|
||||
* combines a recirsive divide and conquer algorithm
|
||||
* with async and finish, not as a highly efficient
|
||||
* sorting procedure..
|
||||
*/
|
||||
public class QSort {
|
||||
|
||||
private static def partition(data:Rail[int], left:long, right:long) {
|
||||
var i:long = left;
|
||||
var j:long = right;
|
||||
var tmp:int;
|
||||
var pivot:long = data((left + right) / 2);
|
||||
|
||||
while (i <= j) {
|
||||
while (data(i) < pivot) i++;
|
||||
while (data(j) > pivot) j--;
|
||||
if (i <= j) {
|
||||
tmp = data(i);
|
||||
data(i) = data(j);
|
||||
data(j) = tmp;
|
||||
i++;
|
||||
j--;
|
||||
}
|
||||
}
|
||||
|
||||
return i;
|
||||
}
|
||||
|
||||
public static def qsort(data:Rail[int], left:long, right:long) {
|
||||
index:long = partition(data, left, right);
|
||||
finish {
|
||||
if (left < index - 1)
|
||||
async qsort(data, left, index - 1);
|
||||
|
||||
if (index < right)
|
||||
qsort(data, index, right);
|
||||
}
|
||||
}
|
||||
|
||||
public static def main(args:Rail[String]) {
|
||||
val N = args.size>0 ? Long.parse(args(0)) : 100;
|
||||
val r = new x10.util.Random();
|
||||
val data = new Rail[int](N, (long)=>r.nextInt(9999n));
|
||||
qsort(data, 0, N-1);
|
||||
for (i in 0..(N-1)) {
|
||||
Console.OUT.print(data(i));
|
||||
if (i%10 == 9) {
|
||||
Console.OUT.println();
|
||||
} else {
|
||||
Console.OUT.print(", ");
|
||||
}
|
||||
}
|
||||
Console.OUT.println();
|
||||
}
|
||||
}
|
||||
|
||||
123
samples/X10/StructSpheres.x10
Normal file
123
samples/X10/StructSpheres.x10
Normal file
@@ -0,0 +1,123 @@
|
||||
/*
|
||||
* This file is part of the X10 project (http://x10-lang.org).
|
||||
*
|
||||
* This file is licensed to You under the Eclipse Public License (EPL);
|
||||
* You may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.opensource.org/licenses/eclipse-1.0.php
|
||||
*
|
||||
* (C) Copyright IBM Corporation 2006-2014.
|
||||
*/
|
||||
|
||||
import x10.io.Console;
|
||||
import x10.util.Random;
|
||||
|
||||
/**
|
||||
* This class represents a real-world problem in graphics engines --
|
||||
* determining which objects in a large sprawling world are close enough to the
|
||||
* camera to be considered for rendering.
|
||||
*
|
||||
* It illustrates the usage of X10 structs to define new primitive types.
|
||||
* In Native X10, structs are allocated within their containing object/stack frame
|
||||
* and thus using structs instead of classes for Vector3 and WorldObject greatly
|
||||
* improves the memory efficiency of the computation.
|
||||
*
|
||||
* @Author Dave Cunningham
|
||||
* @Author Vijay Saraswat
|
||||
*/
|
||||
class StructSpheres {
|
||||
static type Real = Float;
|
||||
|
||||
static struct Vector3(x:Real, y:Real, z:Real) {
|
||||
public def getX () = x;
|
||||
public def getY () = y;
|
||||
public def getZ () = z;
|
||||
|
||||
public def add (other:Vector3)
|
||||
= Vector3(this.x+other.x, this.y+other.y, this.z+other.z);
|
||||
|
||||
public def neg () = Vector3(-this.x, -this.y, -this.z);
|
||||
|
||||
public def sub (other:Vector3) = add(other.neg());
|
||||
|
||||
public def length () = Math.sqrtf(length2());
|
||||
|
||||
public def length2 () = x*x + y*y + z*z;
|
||||
}
|
||||
|
||||
|
||||
static struct WorldObject {
|
||||
|
||||
def this (x:Real, y:Real, z:Real, r:Real) {
|
||||
pos = Vector3(x,y,z);
|
||||
renderingDistance = r;
|
||||
}
|
||||
|
||||
public def intersects (home:Vector3)
|
||||
= home.sub(pos).length2() < renderingDistance*renderingDistance;
|
||||
|
||||
protected val pos:Vector3;
|
||||
protected val renderingDistance:Real;
|
||||
}
|
||||
|
||||
|
||||
public static def compute():boolean {
|
||||
|
||||
val reps = 7500;
|
||||
|
||||
// The following correspond to a modern out-door computer game:
|
||||
val num_objects = 50000;
|
||||
val world_size = 6000;
|
||||
val obj_max_size = 400;
|
||||
|
||||
val ran = new Random(0);
|
||||
|
||||
// the array can go on the heap
|
||||
// but the elements ought to be /*inlined*/ in the array
|
||||
val spheres =
|
||||
new Rail[WorldObject](num_objects, (i:long) => {
|
||||
val x = (ran.nextDouble()*world_size) as Real;
|
||||
val y = (ran.nextDouble()*world_size) as Real;
|
||||
val z = (ran.nextDouble()*world_size) as Real;
|
||||
val r = (ran.nextDouble()*obj_max_size) as Real;
|
||||
return WorldObject(x,y,z,r);
|
||||
});
|
||||
|
||||
val time_start = System.nanoTime();
|
||||
|
||||
var counter : Long = 0;
|
||||
|
||||
// HOT LOOP BEGINS
|
||||
for (c in 1..reps) {
|
||||
|
||||
val x = (ran.nextDouble()*world_size) as Real;
|
||||
val y = (ran.nextDouble()*world_size) as Real;
|
||||
val z = (ran.nextDouble()*world_size) as Real;
|
||||
|
||||
val pos = Vector3(x,y,z);
|
||||
|
||||
for (i in spheres.range()) {
|
||||
if (spheres(i).intersects(pos)) {
|
||||
counter++;
|
||||
}
|
||||
}
|
||||
}
|
||||
// HOT LOOP ENDS
|
||||
|
||||
val time_taken = System.nanoTime() - time_start;
|
||||
Console.OUT.println("Total time: "+time_taken/1E9);
|
||||
|
||||
val expected = 109702;
|
||||
val ok = counter == expected;
|
||||
if (!ok) {
|
||||
Console.ERR.println("number of intersections: "+counter
|
||||
+" (expected "+expected+")");
|
||||
}
|
||||
return ok;
|
||||
}
|
||||
|
||||
public static def main (Rail[String]) {
|
||||
compute();
|
||||
}
|
||||
|
||||
}
|
||||
96
samples/XML/JSBrowser.jsproj
Normal file
96
samples/XML/JSBrowser.jsproj
Normal file
@@ -0,0 +1,96 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project ToolsVersion="14.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<ItemGroup Label="ProjectConfigurations">
|
||||
<ProjectConfiguration Include="Debug|AnyCPU">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>AnyCPU</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Debug|ARM">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>ARM</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Debug|x64">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Debug|x86">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>x86</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|AnyCPU">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>AnyCPU</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|ARM">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>ARM</Platform>
|
||||
<UseDotNetNativeToolchain>true</UseDotNetNativeToolchain>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|x64">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
<UseDotNetNativeToolchain>true</UseDotNetNativeToolchain>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|x86">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>x86</Platform>
|
||||
<UseDotNetNativeToolchain>true</UseDotNetNativeToolchain>
|
||||
</ProjectConfiguration>
|
||||
</ItemGroup>
|
||||
<PropertyGroup Label="Globals">
|
||||
<ProjectGuid>42fc11d8-64c6-4967-a15a-dfd787f68766</ProjectGuid>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
|
||||
<PropertyGroup Condition="'$(VisualStudioVersion)' == '' or '$(VisualStudioVersion)' < '14.0'">
|
||||
<VisualStudioVersion>14.0</VisualStudioVersion>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\$(WMSJSProjectDirectory)\Microsoft.VisualStudio.$(WMSJSProject).Default.props" />
|
||||
<Import Project="$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\$(WMSJSProjectDirectory)\Microsoft.VisualStudio.$(WMSJSProject).props" />
|
||||
<PropertyGroup>
|
||||
<EnableDotNetNativeCompatibleProfile>true</EnableDotNetNativeCompatibleProfile>
|
||||
<TargetPlatformIdentifier>UAP</TargetPlatformIdentifier>
|
||||
<TargetPlatformVersion>10.0.10240.0</TargetPlatformVersion>
|
||||
<TargetPlatformMinVersion>10.0.10240.0</TargetPlatformMinVersion>
|
||||
<MinimumVisualStudioVersion>$(VersionNumberMajor).$(VersionNumberMinor)</MinimumVisualStudioVersion>
|
||||
<DefaultLanguage>en-US</DefaultLanguage>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<AppxManifest Include="package.appxmanifest">
|
||||
<SubType>Designer</SubType>
|
||||
</AppxManifest>
|
||||
<Content Include="css\browser.css" />
|
||||
<Content Include="default.html" />
|
||||
<Content Include="images\icons.png" />
|
||||
<Content Include="images\logo_150x150.png" />
|
||||
<Content Include="images\logo_310x150.png" />
|
||||
<Content Include="images\logo_310x310.png" />
|
||||
<Content Include="images\logo_44x44.png" />
|
||||
<Content Include="images\logo_71x71.png" />
|
||||
<Content Include="images\logo_badge.png" />
|
||||
<Content Include="images\logo_bg.png" />
|
||||
<Content Include="images\logo_splash.png" />
|
||||
<Content Include="images\logo_store.png" />
|
||||
<Content Include="js\components\address-bar.js" />
|
||||
<Content Include="js\browser.js" />
|
||||
<Content Include="js\components\favorites.js" />
|
||||
<Content Include="js\components\navigation.js" />
|
||||
<Content Include="js\components\settings.js" />
|
||||
<Content Include="js\components\title-bar.js" />
|
||||
<Content Include="js\components\webview.js" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\NativeListener\NativeListener.vcxproj" />
|
||||
</ItemGroup>
|
||||
<Import Project="$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\$(WMSJSProjectDirectory)\Microsoft.VisualStudio.$(WMSJSProject).targets" />
|
||||
<!-- To modify your build process, add your task inside one of the targets below then uncomment
|
||||
that target and the DisableFastUpToDateCheck PropertyGroup.
|
||||
Other similar extension points exist, see Microsoft.Common.targets.
|
||||
<Target Name="BeforeBuild">
|
||||
</Target>
|
||||
<Target Name="AfterBuild">
|
||||
</Target>
|
||||
<PropertyGroup>
|
||||
<DisableFastUpToDateCheck>true</DisableFastUpToDateCheck>
|
||||
</PropertyGroup>
|
||||
-->
|
||||
</Project>
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user