mirror of
https://github.com/KevinMidboe/linguist.git
synced 2025-10-29 17:50:22 +00:00
Compare commits
378 Commits
v4.8.14
...
revert-384
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0698b0f36e | ||
|
|
0f4955e5d5 | ||
|
|
d968b0e9ee | ||
|
|
1f5ed3b3fe | ||
|
|
297be948d1 | ||
|
|
b4492e7205 | ||
|
|
c05bc99004 | ||
|
|
99eaf5faf9 | ||
|
|
21babbceb1 | ||
|
|
15885701cd | ||
|
|
9b942086f7 | ||
|
|
93cd47822f | ||
|
|
ea3e79a631 | ||
|
|
0af9a35ff1 | ||
|
|
44048c9ba8 | ||
|
|
e51b5ec9b7 | ||
|
|
a47008ea00 | ||
|
|
a0b38e8207 | ||
|
|
10dfe9f296 | ||
|
|
0b9c05f989 | ||
|
|
95dca67e2b | ||
|
|
e98728595b | ||
|
|
4cd558c374 | ||
|
|
adf6206ef5 | ||
|
|
c2d558b71d | ||
|
|
78c58f956e | ||
|
|
fc1404985a | ||
|
|
5d48ccd757 | ||
|
|
3530a18e46 | ||
|
|
ae8f4f9228 | ||
|
|
7c34d38786 | ||
|
|
38bc5fd336 | ||
|
|
6b06e47c67 | ||
|
|
061712ff78 | ||
|
|
7707585d5e | ||
|
|
fa7d433886 | ||
|
|
998e24cf36 | ||
|
|
63ff51e2ed | ||
|
|
b541b53b78 | ||
|
|
a878620a8e | ||
|
|
5633fd3668 | ||
|
|
9d0af0da40 | ||
|
|
edabdc75a5 | ||
|
|
6db7736860 | ||
|
|
dd3d8586c5 | ||
|
|
f1daf6192c | ||
|
|
2c41df376f | ||
|
|
3391dcce6a | ||
|
|
f75c5707a6 | ||
|
|
25de4e0ae2 | ||
|
|
7451424f12 | ||
|
|
b94e018c3a | ||
|
|
4dcf223c8e | ||
|
|
329f80d245 | ||
|
|
085604948e | ||
|
|
e60384b018 | ||
|
|
470a82d9f5 | ||
|
|
37979b26b0 | ||
|
|
1a6df12902 | ||
|
|
24e196df4e | ||
|
|
8d178bfaed | ||
|
|
e9ec699931 | ||
|
|
9a6c3f2c4d | ||
|
|
648720301d | ||
|
|
c552e25bd7 | ||
|
|
d5c8db3fb9 | ||
|
|
632bcdc1ad | ||
|
|
6b221172c0 | ||
|
|
6f0d801375 | ||
|
|
128abe3533 | ||
|
|
9312353d20 | ||
|
|
b6460f8ed6 | ||
|
|
60f864a138 | ||
|
|
ca6121e3ea | ||
|
|
7c17b1f10c | ||
|
|
d490fc303f | ||
|
|
20fdac95f6 | ||
|
|
234ee8b6d2 | ||
|
|
58ab593a64 | ||
|
|
ec1f6a4cd6 | ||
|
|
3eea8212f4 | ||
|
|
a1e09ae3e6 | ||
|
|
c1f76c26e5 | ||
|
|
0983f62e02 | ||
|
|
190e54c020 | ||
|
|
ded651159d | ||
|
|
acbab53198 | ||
|
|
fba4babdcd | ||
|
|
eb6a213921 | ||
|
|
5e2c79e950 | ||
|
|
e93f41f097 | ||
|
|
994bc1f135 | ||
|
|
44f03e64c1 | ||
|
|
4166f2e89d | ||
|
|
1a8f19c6f2 | ||
|
|
c0e242358a | ||
|
|
eb38c8dcf8 | ||
|
|
f146b4afbd | ||
|
|
db15d0f5d2 | ||
|
|
e6d57c771d | ||
|
|
eef0335c5f | ||
|
|
461c27c066 | ||
|
|
59d67d6743 | ||
|
|
7aeeb82d3d | ||
|
|
c98ca20076 | ||
|
|
4e0b5f02aa | ||
|
|
8da7cb805e | ||
|
|
e5e81a8560 | ||
|
|
dd53fa1585 | ||
|
|
354a8f079a | ||
|
|
f38d6bd124 | ||
|
|
e80b92e407 | ||
|
|
fa6ae1116f | ||
|
|
b7e27a9f58 | ||
|
|
69ba4c5586 | ||
|
|
c39d7fd6e8 | ||
|
|
44ed47cea1 | ||
|
|
de51cb08d2 | ||
|
|
3dd2d08190 | ||
|
|
3b625e1954 | ||
|
|
5c6f690b97 | ||
|
|
3bbfc907f3 | ||
|
|
053b8bca97 | ||
|
|
7fb3db6203 | ||
|
|
ba09394f85 | ||
|
|
c59c88f16e | ||
|
|
8a6e74799a | ||
|
|
4268769d2e | ||
|
|
6601864084 | ||
|
|
d57aa37fb7 | ||
|
|
e72347fd98 | ||
|
|
1b429ea46b | ||
|
|
9468ad4947 | ||
|
|
733ef63193 | ||
|
|
9ca6a5841e | ||
|
|
41ace5fba0 | ||
|
|
cc4295b3b3 | ||
|
|
1e4ce80fd9 | ||
|
|
74a71fd90d | ||
|
|
9b08318456 | ||
|
|
fa5b6b03dc | ||
|
|
cb59296fe0 | ||
|
|
f1be771611 | ||
|
|
b66fcb2529 | ||
|
|
f7fe1fee66 | ||
|
|
94367cc460 | ||
|
|
72bec1fddc | ||
|
|
4e2eba4ef8 | ||
|
|
10457b6639 | ||
|
|
d58cbc68a6 | ||
|
|
01de40faaa | ||
|
|
62d285fce6 | ||
|
|
0056095e8c | ||
|
|
d6dc3a3991 | ||
|
|
b524461b7c | ||
|
|
76d41697aa | ||
|
|
32147b629e | ||
|
|
e7b5e25bf8 | ||
|
|
d761658f8b | ||
|
|
3719214aba | ||
|
|
47b109be36 | ||
|
|
1ec4db97c2 | ||
|
|
9fe5fe0de2 | ||
|
|
b36ea7ac9d | ||
|
|
625b06c30d | ||
|
|
28bce533b2 | ||
|
|
93ec1922cb | ||
|
|
5d09fb67dd | ||
|
|
93dcb61742 | ||
|
|
3a03594685 | ||
|
|
5ce2c254f9 | ||
|
|
d7814c4899 | ||
|
|
50c08bf29e | ||
|
|
34928baee6 | ||
|
|
27bb41aa4d | ||
|
|
1415f4b52d | ||
|
|
ae8ffcad22 | ||
|
|
f43633bf10 | ||
|
|
a604de9846 | ||
|
|
3e224e0039 | ||
|
|
15b04f86c3 | ||
|
|
42af436c20 | ||
|
|
2b08c66f0b | ||
|
|
f98ab593fb | ||
|
|
f951ec07de | ||
|
|
e9ac71590f | ||
|
|
210cd19876 | ||
|
|
f473c555ac | ||
|
|
48e4394d87 | ||
|
|
e1ce88920d | ||
|
|
675cee1d72 | ||
|
|
1c4baf6dc2 | ||
|
|
8f2820e9cc | ||
|
|
04c268e535 | ||
|
|
ec749b3f8d | ||
|
|
08b63e7033 | ||
|
|
7867b946b9 | ||
|
|
a4d12cc8e4 | ||
|
|
a1165b74b1 | ||
|
|
0fa1fa5581 | ||
|
|
d8b91bd5c4 | ||
|
|
9b941a34f0 | ||
|
|
9d8392dab8 | ||
|
|
2c78dd2c66 | ||
|
|
3988f3e7a7 | ||
|
|
d9a4e831b4 | ||
|
|
45c27f26a2 | ||
|
|
0fbc29bf68 | ||
|
|
5569d2056d | ||
|
|
be262d0b4f | ||
|
|
33ce2d7264 | ||
|
|
c486f56204 | ||
|
|
9f3b7d0ba5 | ||
|
|
79f20e8057 | ||
|
|
cd30c7613c | ||
|
|
5aa53c0711 | ||
|
|
c17cdca896 | ||
|
|
ecdae83364 | ||
|
|
31aafa2c78 | ||
|
|
8a911b8ff3 | ||
|
|
9233f1d17f | ||
|
|
77eb36a982 | ||
|
|
4e6e58a099 | ||
|
|
c87976330f | ||
|
|
0e9109c3fc | ||
|
|
12f9295dd7 | ||
|
|
581723748b | ||
|
|
0980e304b1 | ||
|
|
d46a529b6a | ||
|
|
1d2ec4dbc3 | ||
|
|
829eea0139 | ||
|
|
78b2853d70 | ||
|
|
202f3c08cd | ||
|
|
b958779e3d | ||
|
|
00dc775daf | ||
|
|
009a4e67b6 | ||
|
|
faaa4470af | ||
|
|
2a320cb988 | ||
|
|
74931d1bd5 | ||
|
|
3ca93a84b9 | ||
|
|
aa27f18ea6 | ||
|
|
d3e2ea3f71 | ||
|
|
53aa1209ab | ||
|
|
b2a486fed2 | ||
|
|
4f1e5c34b1 | ||
|
|
85c9833081 | ||
|
|
33899b9d6b | ||
|
|
417239004a | ||
|
|
6a1423d28f | ||
|
|
96a23ce388 | ||
|
|
e8d7eed3aa | ||
|
|
9d419c4ab9 | ||
|
|
4eefc1f58e | ||
|
|
0b94b9cda7 | ||
|
|
c736038d94 | ||
|
|
ec562138f8 | ||
|
|
50013e8dd7 | ||
|
|
416c5d1185 | ||
|
|
8869912d31 | ||
|
|
43fa563b77 | ||
|
|
41c6aee8c3 | ||
|
|
8cf575c37d | ||
|
|
4e20928e04 | ||
|
|
3e37bd2680 | ||
|
|
a29f5b2d46 | ||
|
|
4efc6f8c95 | ||
|
|
359699c454 | ||
|
|
346aa99fcf | ||
|
|
d147778677 | ||
|
|
e520209e49 | ||
|
|
338cc16239 | ||
|
|
67ea35094b | ||
|
|
6f0393fcbd | ||
|
|
2923d50d7e | ||
|
|
4e26f609ef | ||
|
|
e86d6e8dd2 | ||
|
|
5fa02ad1fb | ||
|
|
5a06240f69 | ||
|
|
d6e0f74c80 | ||
|
|
a5c08bb203 | ||
|
|
c6dc29abb1 | ||
|
|
ffd984bb7e | ||
|
|
dc5473559b | ||
|
|
8e9c224952 | ||
|
|
d43f111723 | ||
|
|
de9ff713a4 | ||
|
|
98783560ec | ||
|
|
8f31fbbd55 | ||
|
|
e4cdbd2b2b | ||
|
|
ba52e48ceb | ||
|
|
a44ebe493b | ||
|
|
eb0e75e11e | ||
|
|
22c2cf4967 | ||
|
|
39e3688fb8 | ||
|
|
6b83e5fb7b | ||
|
|
dd2e5ffe07 | ||
|
|
f6b6c4e165 | ||
|
|
608ed60b5c | ||
|
|
2ce2945058 | ||
|
|
c8d376754e | ||
|
|
ecaef91fa1 | ||
|
|
d265b78e7e | ||
|
|
5a5bf7d5e5 | ||
|
|
e46781b903 | ||
|
|
9543a8c8e9 | ||
|
|
6ac1ac9232 | ||
|
|
1bbb919fef | ||
|
|
71dfed0e45 | ||
|
|
a2db058ce4 | ||
|
|
12695fee2f | ||
|
|
4a775dca37 | ||
|
|
d7c689fd6b | ||
|
|
20b8188384 | ||
|
|
26310d9515 | ||
|
|
e38cc75da5 | ||
|
|
8d55fc1bd5 | ||
|
|
7e63399196 | ||
|
|
520e5a5cfe | ||
|
|
5d85692c24 | ||
|
|
676861fff3 | ||
|
|
6589bd9dc7 | ||
|
|
e32a4f13ef | ||
|
|
4e4d851f71 | ||
|
|
a3628f86da | ||
|
|
fe70965906 | ||
|
|
c863435c84 | ||
|
|
eeec48198a | ||
|
|
82167063da | ||
|
|
3ae89b48ba | ||
|
|
cd9401c424 | ||
|
|
e7e8a7d835 | ||
|
|
7654032d2e | ||
|
|
05b536fc61 | ||
|
|
ebe85788ab | ||
|
|
524337d07b | ||
|
|
f8ce42e169 | ||
|
|
71032cd252 | ||
|
|
41593b3ea7 | ||
|
|
bed8add2f5 | ||
|
|
e424e8e88c | ||
|
|
07d4f218a3 | ||
|
|
67ed060d37 | ||
|
|
3abe081560 | ||
|
|
d3f3c0345c | ||
|
|
855f1a1f86 | ||
|
|
0406a5b326 | ||
|
|
0108ef4386 | ||
|
|
daefff86ff | ||
|
|
fdb962518f | ||
|
|
6564078061 | ||
|
|
39ea9be5f8 | ||
|
|
152b5ade5e | ||
|
|
c525e3fbef | ||
|
|
88c74fa9c2 | ||
|
|
6a54ee767f | ||
|
|
82af10e3fd | ||
|
|
63c8d2284c | ||
|
|
b61fe90d12 | ||
|
|
e6c849d92c | ||
|
|
3247d46e81 | ||
|
|
be316c2943 | ||
|
|
68c45be47d | ||
|
|
4584963dd2 | ||
|
|
f382abc2f3 | ||
|
|
9d57e1e1b5 | ||
|
|
2a4150b104 | ||
|
|
09612ae42e | ||
|
|
49e9ee48d0 | ||
|
|
a8719f3e82 | ||
|
|
00647be113 | ||
|
|
48b64c2d31 | ||
|
|
f95365946c | ||
|
|
b056df06f4 | ||
|
|
6bf223e641 | ||
|
|
fa817b6a1d | ||
|
|
789607d9bc | ||
|
|
d46530989c | ||
|
|
3c5bcb434c |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,3 +1,4 @@
|
||||
*.gem
|
||||
/Gemfile.lock
|
||||
.bundle/
|
||||
.idea
|
||||
|
||||
174
.gitmodules
vendored
174
.gitmodules
vendored
@@ -67,9 +67,6 @@
|
||||
[submodule "vendor/grammars/language-javascript"]
|
||||
path = vendor/grammars/language-javascript
|
||||
url = https://github.com/atom/language-javascript
|
||||
[submodule "vendor/grammars/language-python"]
|
||||
path = vendor/grammars/language-python
|
||||
url = https://github.com/atom/language-python
|
||||
[submodule "vendor/grammars/language-shellscript"]
|
||||
path = vendor/grammars/language-shellscript
|
||||
url = https://github.com/atom/language-shellscript
|
||||
@@ -115,9 +112,6 @@
|
||||
[submodule "vendor/grammars/fancy-tmbundle"]
|
||||
path = vendor/grammars/fancy-tmbundle
|
||||
url = https://github.com/fancy-lang/fancy-tmbundle
|
||||
[submodule "vendor/grammars/dart-sublime-bundle"]
|
||||
path = vendor/grammars/dart-sublime-bundle
|
||||
url = https://github.com/guillermooo/dart-sublime-bundle
|
||||
[submodule "vendor/grammars/sublimetext-cuda-cpp"]
|
||||
path = vendor/grammars/sublimetext-cuda-cpp
|
||||
url = https://github.com/harrism/sublimetext-cuda-cpp
|
||||
@@ -130,9 +124,6 @@
|
||||
[submodule "vendor/grammars/Sublime-Text-2-OpenEdge-ABL"]
|
||||
path = vendor/grammars/Sublime-Text-2-OpenEdge-ABL
|
||||
url = https://github.com/jfairbank/Sublime-Text-2-OpenEdge-ABL
|
||||
[submodule "vendor/grammars/sublime-rust"]
|
||||
path = vendor/grammars/sublime-rust
|
||||
url = https://github.com/jhasse/sublime-rust
|
||||
[submodule "vendor/grammars/sublime-befunge"]
|
||||
path = vendor/grammars/sublime-befunge
|
||||
url = https://github.com/johanasplund/sublime-befunge
|
||||
@@ -178,9 +169,6 @@
|
||||
[submodule "vendor/grammars/Agda.tmbundle"]
|
||||
path = vendor/grammars/Agda.tmbundle
|
||||
url = https://github.com/mokus0/Agda.tmbundle
|
||||
[submodule "vendor/grammars/Julia.tmbundle"]
|
||||
path = vendor/grammars/Julia.tmbundle
|
||||
url = https://github.com/nanoant/Julia.tmbundle
|
||||
[submodule "vendor/grammars/ooc.tmbundle"]
|
||||
path = vendor/grammars/ooc.tmbundle
|
||||
url = https://github.com/nilium/ooc.tmbundle
|
||||
@@ -202,9 +190,6 @@
|
||||
[submodule "vendor/grammars/sublime-robot-plugin"]
|
||||
path = vendor/grammars/sublime-robot-plugin
|
||||
url = https://github.com/shellderp/sublime-robot-plugin
|
||||
[submodule "vendor/grammars/actionscript3-tmbundle"]
|
||||
path = vendor/grammars/actionscript3-tmbundle
|
||||
url = https://github.com/honzabrecka/actionscript3-tmbundle
|
||||
[submodule "vendor/grammars/Sublime-QML"]
|
||||
path = vendor/grammars/Sublime-QML
|
||||
url = https://github.com/skozlovf/Sublime-QML
|
||||
@@ -250,9 +235,6 @@
|
||||
[submodule "vendor/grammars/cpp-qt.tmbundle"]
|
||||
path = vendor/grammars/cpp-qt.tmbundle
|
||||
url = https://github.com/textmate/cpp-qt.tmbundle
|
||||
[submodule "vendor/grammars/css.tmbundle"]
|
||||
path = vendor/grammars/css.tmbundle
|
||||
url = https://github.com/textmate/css.tmbundle
|
||||
[submodule "vendor/grammars/d.tmbundle"]
|
||||
path = vendor/grammars/d.tmbundle
|
||||
url = https://github.com/textmate/d.tmbundle
|
||||
@@ -328,9 +310,6 @@
|
||||
[submodule "vendor/grammars/nemerle.tmbundle"]
|
||||
path = vendor/grammars/nemerle.tmbundle
|
||||
url = https://github.com/textmate/nemerle.tmbundle
|
||||
[submodule "vendor/grammars/ninja.tmbundle"]
|
||||
path = vendor/grammars/ninja.tmbundle
|
||||
url = https://github.com/textmate/ninja.tmbundle
|
||||
[submodule "vendor/grammars/objective-c.tmbundle"]
|
||||
path = vendor/grammars/objective-c.tmbundle
|
||||
url = https://github.com/textmate/objective-c.tmbundle
|
||||
@@ -358,9 +337,6 @@
|
||||
[submodule "vendor/grammars/r.tmbundle"]
|
||||
path = vendor/grammars/r.tmbundle
|
||||
url = https://github.com/textmate/r.tmbundle
|
||||
[submodule "vendor/grammars/ruby-haml.tmbundle"]
|
||||
path = vendor/grammars/ruby-haml.tmbundle
|
||||
url = https://github.com/textmate/ruby-haml.tmbundle
|
||||
[submodule "vendor/grammars/scheme.tmbundle"]
|
||||
path = vendor/grammars/scheme.tmbundle
|
||||
url = https://github.com/textmate/scheme.tmbundle
|
||||
@@ -405,7 +381,7 @@
|
||||
url = https://github.com/textmate/c.tmbundle
|
||||
[submodule "vendor/grammars/zephir-sublime"]
|
||||
path = vendor/grammars/zephir-sublime
|
||||
url = https://github.com/vmg/zephir-sublime
|
||||
url = https://github.com/phalcon/zephir-sublime
|
||||
[submodule "vendor/grammars/llvm.tmbundle"]
|
||||
path = vendor/grammars/llvm.tmbundle
|
||||
url = https://github.com/whitequark/llvm.tmbundle
|
||||
@@ -421,10 +397,6 @@
|
||||
[submodule "vendor/grammars/sublime_cobol"]
|
||||
path = vendor/grammars/sublime_cobol
|
||||
url = https://bitbucket.org/bitlang/sublime_cobol
|
||||
[submodule "vendor/grammars/ruby.tmbundle"]
|
||||
path = vendor/grammars/ruby.tmbundle
|
||||
url = https://github.com/aroben/ruby.tmbundle
|
||||
branch = pl
|
||||
[submodule "vendor/grammars/IDL-Syntax"]
|
||||
path = vendor/grammars/IDL-Syntax
|
||||
url = https://github.com/andik/IDL-Syntax
|
||||
@@ -452,9 +424,6 @@
|
||||
[submodule "vendor/grammars/Sublime-Nit"]
|
||||
path = vendor/grammars/Sublime-Nit
|
||||
url = https://github.com/R4PaSs/Sublime-Nit
|
||||
[submodule "vendor/grammars/language-hy"]
|
||||
path = vendor/grammars/language-hy
|
||||
url = https://github.com/rwtolbert/language-hy
|
||||
[submodule "vendor/grammars/Racket"]
|
||||
path = vendor/grammars/Racket
|
||||
url = https://github.com/soegaard/racket-highlight-for-github
|
||||
@@ -593,9 +562,6 @@
|
||||
[submodule "vendor/grammars/sublime-aspectj"]
|
||||
path = vendor/grammars/sublime-aspectj
|
||||
url = https://github.com/pchaigno/sublime-aspectj
|
||||
[submodule "vendor/grammars/sublime-typescript"]
|
||||
path = vendor/grammars/sublime-typescript
|
||||
url = https://github.com/Microsoft/TypeScript-Sublime-Plugin
|
||||
[submodule "vendor/grammars/sublime-pony"]
|
||||
path = vendor/grammars/sublime-pony
|
||||
url = https://github.com/CausalityLtd/sublime-pony
|
||||
@@ -632,9 +598,6 @@
|
||||
[submodule "vendor/grammars/language-yang"]
|
||||
path = vendor/grammars/language-yang
|
||||
url = https://github.com/DzonyKalafut/language-yang.git
|
||||
[submodule "vendor/grammars/perl6fe"]
|
||||
path = vendor/grammars/perl6fe
|
||||
url = https://github.com/MadcapJake/language-perl6fe.git
|
||||
[submodule "vendor/grammars/language-less"]
|
||||
path = vendor/grammars/language-less
|
||||
url = https://github.com/atom/language-less.git
|
||||
@@ -779,9 +742,6 @@
|
||||
[submodule "vendor/grammars/vhdl"]
|
||||
path = vendor/grammars/vhdl
|
||||
url = https://github.com/textmate/vhdl.tmbundle
|
||||
[submodule "vendor/grammars/xquery"]
|
||||
path = vendor/grammars/xquery
|
||||
url = https://github.com/textmate/xquery.tmbundle
|
||||
[submodule "vendor/grammars/language-rpm-spec"]
|
||||
path = vendor/grammars/language-rpm-spec
|
||||
url = https://github.com/waveclaw/language-rpm-spec
|
||||
@@ -791,3 +751,135 @@
|
||||
[submodule "vendor/grammars/language-babel"]
|
||||
path = vendor/grammars/language-babel
|
||||
url = https://github.com/github-linguist/language-babel
|
||||
[submodule "vendor/CodeMirror"]
|
||||
path = vendor/CodeMirror
|
||||
url = https://github.com/codemirror/CodeMirror
|
||||
[submodule "vendor/grammars/MQL5-sublime"]
|
||||
path = vendor/grammars/MQL5-sublime
|
||||
url = https://github.com/mqsoft/MQL5-sublime
|
||||
[submodule "vendor/grammars/actionscript3-tmbundle"]
|
||||
path = vendor/grammars/actionscript3-tmbundle
|
||||
url = https://github.com/simongregory/actionscript3-tmbundle
|
||||
[submodule "vendor/grammars/ABNF.tmbundle"]
|
||||
path = vendor/grammars/ABNF.tmbundle
|
||||
url = https://github.com/sanssecours/ABNF.tmbundle
|
||||
[submodule "vendor/grammars/EBNF.tmbundle"]
|
||||
path = vendor/grammars/EBNF.tmbundle
|
||||
url = https://github.com/sanssecours/EBNF.tmbundle
|
||||
[submodule "vendor/grammars/language-haml"]
|
||||
path = vendor/grammars/language-haml
|
||||
url = https://github.com/ezekg/language-haml
|
||||
[submodule "vendor/grammars/language-ninja"]
|
||||
path = vendor/grammars/language-ninja
|
||||
url = https://github.com/khyo/language-ninja
|
||||
[submodule "vendor/grammars/language-fontforge"]
|
||||
path = vendor/grammars/language-fontforge
|
||||
url = https://github.com/Alhadis/language-fontforge
|
||||
[submodule "vendor/grammars/language-gn"]
|
||||
path = vendor/grammars/language-gn
|
||||
url = https://github.com/devoncarew/language-gn
|
||||
[submodule "vendor/grammars/rascal-syntax-highlighting"]
|
||||
path = vendor/grammars/rascal-syntax-highlighting
|
||||
url = https://github.com/usethesource/rascal-syntax-highlighting
|
||||
[submodule "vendor/grammars/atom-language-perl6"]
|
||||
path = vendor/grammars/atom-language-perl6
|
||||
url = https://github.com/perl6/atom-language-perl6
|
||||
[submodule "vendor/grammars/language-xcompose"]
|
||||
path = vendor/grammars/language-xcompose
|
||||
url = https://github.com/samcv/language-xcompose
|
||||
[submodule "vendor/grammars/SublimeEthereum"]
|
||||
path = vendor/grammars/SublimeEthereum
|
||||
url = https://github.com/davidhq/SublimeEthereum.git
|
||||
[submodule "vendor/grammars/atom-language-rust"]
|
||||
path = vendor/grammars/atom-language-rust
|
||||
url = https://github.com/zargony/atom-language-rust
|
||||
[submodule "vendor/grammars/language-css"]
|
||||
path = vendor/grammars/language-css
|
||||
url = https://github.com/atom/language-css
|
||||
[submodule "vendor/grammars/language-regexp"]
|
||||
path = vendor/grammars/language-regexp
|
||||
url = https://github.com/Alhadis/language-regexp
|
||||
[submodule "vendor/grammars/Terraform.tmLanguage"]
|
||||
path = vendor/grammars/Terraform.tmLanguage
|
||||
url = https://github.com/alexlouden/Terraform.tmLanguage
|
||||
[submodule "vendor/grammars/shaders-tmLanguage"]
|
||||
path = vendor/grammars/shaders-tmLanguage
|
||||
url = https://github.com/tgjones/shaders-tmLanguage
|
||||
[submodule "vendor/grammars/language-meson"]
|
||||
path = vendor/grammars/language-meson
|
||||
url = https://github.com/TingPing/language-meson
|
||||
[submodule "vendor/grammars/atom-language-p4"]
|
||||
path = vendor/grammars/atom-language-p4
|
||||
url = https://github.com/TakeshiTseng/atom-language-p4
|
||||
[submodule "vendor/grammars/language-jison"]
|
||||
path = vendor/grammars/language-jison
|
||||
url = https://github.com/cdibbs/language-jison
|
||||
[submodule "vendor/grammars/openscad.tmbundle"]
|
||||
path = vendor/grammars/openscad.tmbundle
|
||||
url = https://github.com/tbuser/openscad.tmbundle
|
||||
[submodule "vendor/grammars/marko-tmbundle"]
|
||||
path = vendor/grammars/marko-tmbundle
|
||||
url = https://github.com/marko-js/marko-tmbundle
|
||||
[submodule "vendor/grammars/language-jolie"]
|
||||
path = vendor/grammars/language-jolie
|
||||
url = https://github.com/fmontesi/language-jolie
|
||||
[submodule "vendor/grammars/language-typelanguage"]
|
||||
path = vendor/grammars/language-typelanguage
|
||||
url = https://github.com/goodmind/language-typelanguage
|
||||
[submodule "vendor/grammars/sublime-shen"]
|
||||
path = vendor/grammars/sublime-shen
|
||||
url = https://github.com/rkoeninger/sublime-shen
|
||||
[submodule "vendor/grammars/Sublime-Pep8"]
|
||||
path = vendor/grammars/Sublime-Pep8
|
||||
url = https://github.com/R4PaSs/Sublime-Pep8
|
||||
[submodule "vendor/grammars/dartlang"]
|
||||
path = vendor/grammars/dartlang
|
||||
url = https://github.com/dart-atom/dartlang
|
||||
[submodule "vendor/grammars/language-closure-templates"]
|
||||
path = vendor/grammars/language-closure-templates
|
||||
url = https://github.com/mthadley/language-closure-templates
|
||||
[submodule "vendor/grammars/language-webassembly"]
|
||||
path = vendor/grammars/language-webassembly
|
||||
url = https://github.com/Alhadis/language-webassembly
|
||||
[submodule "vendor/grammars/language-ring"]
|
||||
path = vendor/grammars/language-ring
|
||||
url = https://github.com/MahmoudFayed/atom-language-ring
|
||||
[submodule "vendor/grammars/sublime-fantom"]
|
||||
path = vendor/grammars/sublime-fantom
|
||||
url = https://github.com/rkoeninger/sublime-fantom
|
||||
[submodule "vendor/grammars/language-pan"]
|
||||
path = vendor/grammars/language-pan
|
||||
url = https://github.com/quattor/language-pan
|
||||
[submodule "vendor/grammars/language-pcb"]
|
||||
path = vendor/grammars/language-pcb
|
||||
url = https://github.com/Alhadis/language-pcb
|
||||
[submodule "vendor/grammars/language-reason"]
|
||||
path = vendor/grammars/language-reason
|
||||
url = https://github.com/reasonml-editor/language-reason
|
||||
[submodule "vendor/grammars/sublime-nearley"]
|
||||
path = vendor/grammars/sublime-nearley
|
||||
url = https://github.com/Hardmath123/sublime-nearley
|
||||
[submodule "vendor/grammars/data-weave-tmLanguage"]
|
||||
path = vendor/grammars/data-weave-tmLanguage
|
||||
url = https://github.com/mulesoft-labs/data-weave-tmLanguage
|
||||
[submodule "vendor/grammars/squirrel-language"]
|
||||
path = vendor/grammars/squirrel-language
|
||||
url = https://github.com/mathewmariani/squirrel-language
|
||||
[submodule "vendor/grammars/language-ballerina"]
|
||||
path = vendor/grammars/language-ballerina
|
||||
url = https://github.com/ballerinalang/plugin-vscode
|
||||
[submodule "vendor/grammars/language-ruby"]
|
||||
path = vendor/grammars/language-ruby
|
||||
url = https://github.com/atom/language-ruby
|
||||
[submodule "vendor/grammars/sublime-angelscript"]
|
||||
path = vendor/grammars/sublime-angelscript
|
||||
url = https://github.com/wronex/sublime-angelscript
|
||||
[submodule "vendor/grammars/TypeScript-TmLanguage"]
|
||||
path = vendor/grammars/TypeScript-TmLanguage
|
||||
url = https://github.com/Microsoft/TypeScript-TmLanguage
|
||||
[submodule "vendor/grammars/wdl-sublime-syntax-highlighter"]
|
||||
path = vendor/grammars/wdl-sublime-syntax-highlighter
|
||||
url = https://github.com/broadinstitute/wdl-sublime-syntax-highlighter
|
||||
[submodule "vendor/grammars/atom-language-julia"]
|
||||
path = vendor/grammars/atom-language-julia
|
||||
url = https://github.com/JuliaEditorSupport/atom-language-julia
|
||||
|
||||
14
.travis.yml
14
.travis.yml
@@ -1,20 +1,32 @@
|
||||
language: ruby
|
||||
sudo: false
|
||||
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- libicu-dev
|
||||
- libicu48
|
||||
|
||||
before_install: script/travis/before_install
|
||||
|
||||
script:
|
||||
- bundle exec rake
|
||||
- script/licensed verify
|
||||
|
||||
rvm:
|
||||
- 2.0.0
|
||||
- 2.1
|
||||
- 2.2
|
||||
- 2.3.3
|
||||
- 2.4.0
|
||||
|
||||
notifications:
|
||||
disabled: true
|
||||
|
||||
git:
|
||||
submodules: false
|
||||
depth: 3
|
||||
|
||||
cache: bundler
|
||||
dist: precise
|
||||
|
||||
bundler_args: --without debug
|
||||
|
||||
@@ -10,15 +10,15 @@ We try only to add new extensions once they have some usage on GitHub. In most c
|
||||
|
||||
To add support for a new extension:
|
||||
|
||||
0. Add your extension to the language entry in [`languages.yml`][languages], keeping the extensions in alphabetical order.
|
||||
0. Add at least one sample for your extension to the [samples directory][samples] in the correct subdirectory.
|
||||
0. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage.
|
||||
1. Add your extension to the language entry in [`languages.yml`][languages], keeping the extensions in alphabetical order.
|
||||
1. Add at least one sample for your extension to the [samples directory][samples] in the correct subdirectory.
|
||||
1. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage.
|
||||
|
||||
In addition, if this extension is already listed in [`languages.yml`][languages] then sometimes a few more steps will need to be taken:
|
||||
|
||||
0. Make sure that example `.yourextension` files are present in the [samples directory][samples] for each language that uses `.yourextension`.
|
||||
0. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.yourextension` files. (ping @arfon or @bkeepers to help with this) to ensure we're not misclassifying files.
|
||||
0. If the Bayesian classifier does a bad job with the sample `.yourextension` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
|
||||
1. Make sure that example `.yourextension` files are present in the [samples directory][samples] for each language that uses `.yourextension`.
|
||||
1. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.yourextension` files. (ping **@lildude** to help with this) to ensure we're not misclassifying files.
|
||||
1. If the Bayesian classifier does a bad job with the sample `.yourextension` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
|
||||
|
||||
|
||||
## Adding a language
|
||||
@@ -27,20 +27,17 @@ We try only to add languages once they have some usage on GitHub. In most cases
|
||||
|
||||
To add support for a new language:
|
||||
|
||||
0. Add an entry for your language to [`languages.yml`][languages].
|
||||
0. Add a grammar for your language. Please only add grammars that have [one of these licenses](https://github.com/github/linguist/blob/257425141d4e2a5232786bf0b13c901ada075f93/vendor/licenses/config.yml#L2-L11).
|
||||
0. Add your grammar as a submodule: `git submodule add https://github.com/JaneSmith/MyGrammar vendor/grammars/MyGrammar`.
|
||||
0. Add your grammar to [`grammars.yml`][grammars] by running `script/convert-grammars --add vendor/grammars/MyGrammar`.
|
||||
0. Download the license for the grammar: `script/licensed`. Be careful to only commit the file for the new grammar, as this script may update licenses for other grammars as well.
|
||||
0. Add samples for your language to the [samples directory][samples] in the correct subdirectory.
|
||||
0. Add a `language_id` for your language. See `script/set-language-ids` for more information. **You should only ever need to run `script/set-language-ids --update`. Anything other than this risks breaking GitHub search :cry:**
|
||||
0. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage.
|
||||
1. Add an entry for your language to [`languages.yml`][languages]. Omit the `language_id` field for now.
|
||||
1. Add a grammar for your language: `script/add-grammar https://github.com/JaneSmith/MyGrammar`. Please only add grammars that have [one of these licenses][licenses].
|
||||
1. Add samples for your language to the [samples directory][samples] in the correct subdirectory.
|
||||
1. Add a `language_id` for your language using `script/set-language-ids`. **You should only ever need to run `script/set-language-ids --update`. Anything other than this risks breaking GitHub search :cry:**
|
||||
1. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage.
|
||||
|
||||
In addition, if your new language defines an extension that's already listed in [`languages.yml`][languages] (such as `.foo`) then sometimes a few more steps will need to be taken:
|
||||
|
||||
0. Make sure that example `.foo` files are present in the [samples directory][samples] for each language that uses `.foo`.
|
||||
0. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.foo` files. (ping @arfon or @bkeepers to help with this) to ensure we're not misclassifying files.
|
||||
0. If the Bayesian classifier does a bad job with the sample `.foo` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
|
||||
1. Make sure that example `.foo` files are present in the [samples directory][samples] for each language that uses `.foo`.
|
||||
1. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.foo` files. (ping **@lildude** to help with this) to ensure we're not misclassifying files.
|
||||
1. If the Bayesian classifier does a bad job with the sample `.foo` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
|
||||
|
||||
Remember, the goal here is to try and avoid false positives!
|
||||
|
||||
@@ -70,6 +67,16 @@ For development you are going to want to checkout out the source. To get it, clo
|
||||
cd linguist/
|
||||
script/bootstrap
|
||||
|
||||
To run Linguist from the cloned repository, you will need to generate the code samples first:
|
||||
|
||||
bundle exec rake samples
|
||||
|
||||
Run this command each time a [sample][samples] has been modified.
|
||||
|
||||
To run Linguist from the cloned repository:
|
||||
|
||||
bundle exec bin/linguist --breakdown
|
||||
|
||||
To run the tests:
|
||||
|
||||
bundle exec rake test
|
||||
@@ -82,9 +89,15 @@ Here's our current build status: [
|
||||
- @larsbrinkhoff
|
||||
- @pchaigno
|
||||
- **@Alhadis**
|
||||
- **@BenEddy** (GitHub staff)
|
||||
- **@Caged** (GitHub staff)
|
||||
- **@grantr** (GitHub staff)
|
||||
- **@larsbrinkhoff**
|
||||
- **@lildude** (GitHub staff)
|
||||
- **@pchaigno**
|
||||
- **@rafer** (GitHub staff)
|
||||
- **@shreyasjoshis** (GitHub staff)
|
||||
|
||||
As Linguist is a production dependency for GitHub we have a couple of workflow restrictions:
|
||||
|
||||
@@ -95,23 +108,24 @@ As Linguist is a production dependency for GitHub we have a couple of workflow r
|
||||
|
||||
If you are the current maintainer of this gem:
|
||||
|
||||
0. Create a branch for the release: `git checkout -b cut-release-vxx.xx.xx`
|
||||
0. Make sure your local dependencies are up to date: `script/bootstrap`
|
||||
0. If grammar submodules have not been updated recently, update them: `git submodule update --remote && git commit -a`
|
||||
0. Ensure that samples are updated: `bundle exec rake samples`
|
||||
0. Ensure that tests are green: `bundle exec rake test`
|
||||
0. Bump gem version in `lib/linguist/version.rb`, [like this](https://github.com/github/linguist/commit/8d2ea90a5ba3b2fe6e1508b7155aa4632eea2985).
|
||||
0. Make a PR to github/linguist, [like this](https://github.com/github/linguist/pull/1238).
|
||||
0. Build a local gem: `bundle exec rake build_gem`
|
||||
0. Test the gem:
|
||||
0. Bump the Gemfile and Gemfile.lock versions for an app which relies on this gem
|
||||
0. Install the new gem locally
|
||||
0. Test behavior locally, branch deploy, whatever needs to happen
|
||||
0. Merge github/linguist PR
|
||||
0. Tag and push: `git tag vx.xx.xx; git push --tags`
|
||||
0. Push to rubygems.org -- `gem push github-linguist-3.0.0.gem`
|
||||
1. Create a branch for the release: `git checkout -b cut-release-vxx.xx.xx`
|
||||
1. Make sure your local dependencies are up to date: `script/bootstrap`
|
||||
1. If grammar submodules have not been updated recently, update them: `git submodule update --remote && git commit -a`
|
||||
1. Ensure that samples are updated: `bundle exec rake samples`
|
||||
1. Ensure that tests are green: `bundle exec rake test`
|
||||
1. Bump gem version in `lib/linguist/version.rb`, [like this](https://github.com/github/linguist/commit/8d2ea90a5ba3b2fe6e1508b7155aa4632eea2985).
|
||||
1. Make a PR to github/linguist, [like this](https://github.com/github/linguist/pull/1238).
|
||||
1. Build a local gem: `bundle exec rake build_gem`
|
||||
1. Test the gem:
|
||||
1. Bump the Gemfile and Gemfile.lock versions for an app which relies on this gem
|
||||
1. Install the new gem locally
|
||||
1. Test behavior locally, branch deploy, whatever needs to happen
|
||||
1. Merge github/linguist PR
|
||||
1. Tag and push: `git tag vx.xx.xx; git push --tags`
|
||||
1. Push to rubygems.org -- `gem push github-linguist-3.0.0.gem`
|
||||
|
||||
[grammars]: /grammars.yml
|
||||
[languages]: /lib/linguist/languages.yml
|
||||
[licenses]: https://github.com/github/linguist/blob/257425141d4e2a5232786bf0b13c901ada075f93/vendor/licenses/config.yml#L2-L11
|
||||
[samples]: /samples
|
||||
[new-issue]: https://github.com/github/linguist/issues/new
|
||||
|
||||
5
Gemfile
5
Gemfile
@@ -1,3 +1,6 @@
|
||||
source 'https://rubygems.org'
|
||||
gemspec :name => "github-linguist"
|
||||
gem 'byebug' if RUBY_VERSION >= '2.0'
|
||||
|
||||
group :debug do
|
||||
gem 'byebug' if RUBY_VERSION >= '2.2'
|
||||
end
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,4 +1,4 @@
|
||||
Copyright (c) 2011-2016 GitHub, Inc.
|
||||
Copyright (c) 2017 GitHub, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
|
||||
31
README.md
31
README.md
@@ -15,10 +15,16 @@ See [Troubleshooting](#troubleshooting) and [`CONTRIBUTING.md`](/CONTRIBUTING.md
|
||||
|
||||
The Language stats bar displays languages percentages for the files in the repository. The percentages are calculated based on the bytes of code for each language as reported by the [List Languages](https://developer.github.com/v3/repos/#list-languages) API. If the bar is reporting a language that you don't expect:
|
||||
|
||||
0. Click on the name of the language in the stats bar to see a list of the files that are identified as that language.
|
||||
0. If you see files that you didn't write, consider moving the files into one of the [paths for vendored code](/lib/linguist/vendor.yml), or use the [manual overrides](#overrides) feature to ignore them.
|
||||
0. If the files are being misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you can add, especially links to public repositories, is helpful.
|
||||
0. If there are no reported issues of this misclassification, [open an issue][new-issue] and include a link to the repository or a sample of the code that is being misclassified.
|
||||
1. Click on the name of the language in the stats bar to see a list of the files that are identified as that language.
|
||||
1. If you see files that you didn't write, consider moving the files into one of the [paths for vendored code](/lib/linguist/vendor.yml), or use the [manual overrides](#overrides) feature to ignore them.
|
||||
1. If the files are being misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you can add, especially links to public repositories, is helpful.
|
||||
1. If there are no reported issues of this misclassification, [open an issue][new-issue] and include a link to the repository or a sample of the code that is being misclassified.
|
||||
|
||||
### There's a problem with the syntax highlighting of a file
|
||||
|
||||
Linguist detects the language of a file but the actual syntax-highlighting is powered by a set of language grammars which are included in this project as a set of submodules [and may be found here](https://github.com/github/linguist/blob/master/vendor/README.md).
|
||||
|
||||
If you experience an issue with the syntax-highlighting on GitHub, **please report the issue to the upstream grammar repository, not here.** Grammars are updated every time we build the Linguist gem and so upstream bug fixes are automatically incorporated as they are fixed.
|
||||
|
||||
## Overrides
|
||||
|
||||
@@ -26,13 +32,15 @@ Linguist supports a number of different custom overrides strategies for language
|
||||
|
||||
### Using gitattributes
|
||||
|
||||
Add a `.gitattributes` file to your project and use standard git-style path matchers for the files you want to override to set `linguist-documentation`, `linguist-language`, and `linguist-vendored`. `.gitattributes` will be used to determine language statistics, but will not be used to syntax highlight files. To manually set syntax highlighting, use [Vim or Emacs modelines](#using-emacs-or-vim-modelines).
|
||||
Add a `.gitattributes` file to your project and use standard git-style path matchers for the files you want to override to set `linguist-documentation`, `linguist-language`, `linguist-vendored`, and `linguist-generated`. `.gitattributes` will be used to determine language statistics and will be used to syntax highlight files. You can also manually set syntax highlighting using [Vim or Emacs modelines](#using-emacs-or-vim-modelines).
|
||||
|
||||
```
|
||||
$ cat .gitattributes
|
||||
*.rb linguist-language=Java
|
||||
```
|
||||
|
||||
#### Vendored code
|
||||
|
||||
Checking code you didn't write, such as JavaScript libraries, into your git repo is a common practice, but this often inflates your project's language stats and may even cause your project to be labeled as another language. By default, Linguist treats all of the paths defined in [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository.
|
||||
|
||||
Use the `linguist-vendored` attribute to vendor or un-vendor paths.
|
||||
@@ -43,6 +51,8 @@ special-vendored-path/* linguist-vendored
|
||||
jquery.js linguist-vendored=false
|
||||
```
|
||||
|
||||
#### Documentation
|
||||
|
||||
Just like vendored files, Linguist excludes documentation files from your project's language stats. [lib/linguist/documentation.yml](lib/linguist/documentation.yml) lists common documentation paths and excludes them from the language statistics for your repository.
|
||||
|
||||
Use the `linguist-documentation` attribute to mark or unmark paths as documentation.
|
||||
@@ -53,19 +63,18 @@ project-docs/* linguist-documentation
|
||||
docs/formatter.rb linguist-documentation=false
|
||||
```
|
||||
|
||||
#### Generated file detection
|
||||
#### Generated code
|
||||
|
||||
Not all plain text files are true source files. Generated files like minified js and compiled CoffeeScript can be detected and excluded from language stats. As an added bonus, unlike vendored and documentation files, these files are suppressed in diffs.
|
||||
|
||||
```ruby
|
||||
Linguist::FileBlob.new("underscore.min.js").generated? # => true
|
||||
```
|
||||
|
||||
See [Linguist::Generated#generated?](https://github.com/github/linguist/blob/master/lib/linguist/generated.rb).
|
||||
$ cat .gitattributes
|
||||
Api.elm linguist-generated=true
|
||||
```
|
||||
|
||||
### Using Emacs or Vim modelines
|
||||
|
||||
Alternatively, you can use Vim or Emacs style modelines to set the language for a single file. Modelines can be placed anywhere within a file and are respected when determining how to syntax-highlight a file on GitHub.com
|
||||
If you do not want to use `.gitattributes` to override the syntax highlighting used on GitHub.com, you can use Vim or Emacs style modelines to set the language for a single file. Modelines can be placed anywhere within a file and are respected when determining how to syntax-highlight a file on GitHub.com
|
||||
|
||||
##### Vim
|
||||
```
|
||||
|
||||
1
Rakefile
1
Rakefile
@@ -4,6 +4,7 @@ require 'rake/testtask'
|
||||
require 'yaml'
|
||||
require 'yajl'
|
||||
require 'open-uri'
|
||||
require 'json'
|
||||
|
||||
task :default => :test
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
#!/usr/bin/env ruby
|
||||
|
||||
$LOAD_PATH[0, 0] = File.join(File.dirname(__FILE__), '..', 'lib')
|
||||
|
||||
require 'linguist'
|
||||
require 'rugged'
|
||||
require 'optparse'
|
||||
@@ -102,10 +104,16 @@ def git_linguist(args)
|
||||
commit = nil
|
||||
|
||||
parser = OptionParser.new do |opts|
|
||||
opts.banner = "Usage: git-linguist [OPTIONS] stats|breakdown|dump-cache|clear|disable"
|
||||
opts.banner = <<-HELP
|
||||
Linguist v#{Linguist::VERSION}
|
||||
Detect language type and determine language breakdown for a given Git repository.
|
||||
|
||||
Usage:
|
||||
git-linguist [OPTIONS] stats|breakdown|dump-cache|clear|disable"
|
||||
HELP
|
||||
|
||||
opts.on("-f", "--force", "Force a full rescan") { incremental = false }
|
||||
opts.on("--commit=COMMIT", "Commit to index") { |v| commit = v}
|
||||
opts.on("-c", "--commit=COMMIT", "Commit to index") { |v| commit = v}
|
||||
end
|
||||
|
||||
parser.parse!(args)
|
||||
|
||||
35
bin/linguist
35
bin/linguist
@@ -1,29 +1,37 @@
|
||||
#!/usr/bin/env ruby
|
||||
|
||||
# linguist — detect language type for a file, or, given a directory, determine language breakdown
|
||||
# usage: linguist <path> [<--breakdown>]
|
||||
#
|
||||
$LOAD_PATH[0, 0] = File.join(File.dirname(__FILE__), '..', 'lib')
|
||||
|
||||
require 'linguist'
|
||||
require 'rugged'
|
||||
require 'json'
|
||||
require 'optparse'
|
||||
|
||||
path = ARGV[0] || Dir.pwd
|
||||
|
||||
# special case if not given a directory but still given the --breakdown option
|
||||
# special case if not given a directory
|
||||
# but still given the --breakdown or --json options/
|
||||
if path == "--breakdown"
|
||||
path = Dir.pwd
|
||||
breakdown = true
|
||||
elsif path == "--json"
|
||||
path = Dir.pwd
|
||||
json_breakdown = true
|
||||
end
|
||||
|
||||
ARGV.shift
|
||||
breakdown = true if ARGV[0] == "--breakdown"
|
||||
json_breakdown = true if ARGV[0] == "--json"
|
||||
|
||||
if File.directory?(path)
|
||||
rugged = Rugged::Repository.new(path)
|
||||
repo = Linguist::Repository.new(rugged, rugged.head.target_id)
|
||||
repo.languages.sort_by { |_, size| size }.reverse.each do |language, size|
|
||||
percentage = ((size / repo.size.to_f) * 100)
|
||||
percentage = sprintf '%.2f' % percentage
|
||||
puts "%-7s %s" % ["#{percentage}%", language]
|
||||
if !json_breakdown
|
||||
repo.languages.sort_by { |_, size| size }.reverse.each do |language, size|
|
||||
percentage = ((size / repo.size.to_f) * 100)
|
||||
percentage = sprintf '%.2f' % percentage
|
||||
puts "%-7s %s" % ["#{percentage}%", language]
|
||||
end
|
||||
end
|
||||
if breakdown
|
||||
puts
|
||||
@@ -35,6 +43,8 @@ if File.directory?(path)
|
||||
end
|
||||
puts
|
||||
end
|
||||
elsif json_breakdown
|
||||
puts JSON.dump(repo.breakdown_by_file)
|
||||
end
|
||||
elsif File.file?(path)
|
||||
blob = Linguist::FileBlob.new(path, Dir.pwd)
|
||||
@@ -63,5 +73,12 @@ elsif File.file?(path)
|
||||
puts " appears to be a vendored file"
|
||||
end
|
||||
else
|
||||
abort "usage: linguist <path>"
|
||||
abort <<-HELP
|
||||
Linguist v#{Linguist::VERSION}
|
||||
Detect language type for a file, or, given a repository, determine language breakdown.
|
||||
|
||||
Usage: linguist <path>
|
||||
linguist <path> [--breakdown] [--json]
|
||||
linguist [--breakdown] [--json]
|
||||
HELP
|
||||
end
|
||||
|
||||
@@ -13,10 +13,10 @@ Gem::Specification.new do |s|
|
||||
s.files = Dir['lib/**/*'] + Dir['grammars/*'] + ['LICENSE']
|
||||
s.executables = ['linguist', 'git-linguist']
|
||||
|
||||
s.add_dependency 'charlock_holmes', '~> 0.7.3'
|
||||
s.add_dependency 'charlock_holmes', '~> 0.7.5'
|
||||
s.add_dependency 'escape_utils', '~> 1.1.0'
|
||||
s.add_dependency 'mime-types', '>= 1.19'
|
||||
s.add_dependency 'rugged', '>= 0.23.0b'
|
||||
s.add_dependency 'rugged', '>= 0.25.1'
|
||||
|
||||
s.add_development_dependency 'minitest', '>= 5.0'
|
||||
s.add_development_dependency 'mocha'
|
||||
@@ -26,6 +26,5 @@ Gem::Specification.new do |s|
|
||||
s.add_development_dependency 'yajl-ruby'
|
||||
s.add_development_dependency 'color-proximity', '~> 0.2.1'
|
||||
s.add_development_dependency 'licensed'
|
||||
s.add_development_dependency 'licensee', '>= 8.3.0'
|
||||
|
||||
s.add_development_dependency 'licensee', '~> 8.8.0'
|
||||
end
|
||||
|
||||
155
grammars.yml
155
grammars.yml
@@ -1,9 +1,11 @@
|
||||
---
|
||||
http://svn.edgewall.org/repos/genshi/contrib/textmate/Genshi.tmbundle/Syntaxes/Markup%20Template%20%28XML%29.tmLanguage:
|
||||
- text.xml.genshi
|
||||
https://bitbucket.org/Clams/sublimesystemverilog/get/default.tar.gz:
|
||||
- source.systemverilog
|
||||
- source.ucfconstraints
|
||||
https://svn.edgewall.org/repos/genshi/contrib/textmate/Genshi.tmbundle/Syntaxes/Markup%20Template%20%28XML%29.tmLanguage:
|
||||
- text.xml.genshi
|
||||
vendor/grammars/ABNF.tmbundle:
|
||||
- source.abnf
|
||||
vendor/grammars/Agda.tmbundle:
|
||||
- source.agda
|
||||
vendor/grammars/Alloy.tmbundle:
|
||||
@@ -20,6 +22,8 @@ vendor/grammars/ColdFusion:
|
||||
- text.html.cfm
|
||||
vendor/grammars/Docker.tmbundle:
|
||||
- source.dockerfile
|
||||
vendor/grammars/EBNF.tmbundle:
|
||||
- source.ebnf
|
||||
vendor/grammars/Elm/Syntaxes:
|
||||
- source.elm
|
||||
- text.html.mediawiki.elm-build-output
|
||||
@@ -41,15 +45,17 @@ vendor/grammars/Isabelle.tmbundle:
|
||||
- source.isabelle.theory
|
||||
vendor/grammars/JSyntax:
|
||||
- source.j
|
||||
vendor/grammars/Julia.tmbundle:
|
||||
- source.julia
|
||||
vendor/grammars/Lean.tmbundle:
|
||||
- source.lean
|
||||
vendor/grammars/LiveScript.tmbundle:
|
||||
- source.livescript
|
||||
vendor/grammars/MQL5-sublime:
|
||||
- source.mql5
|
||||
vendor/grammars/MagicPython:
|
||||
- source.python
|
||||
- source.regexp.python
|
||||
- text.python.console
|
||||
- text.python.traceback
|
||||
vendor/grammars/Modelica:
|
||||
- source.modelica
|
||||
vendor/grammars/NSIS:
|
||||
@@ -92,6 +98,8 @@ vendor/grammars/Sublime-Modula-2:
|
||||
- source.modula2
|
||||
vendor/grammars/Sublime-Nit:
|
||||
- source.nit
|
||||
vendor/grammars/Sublime-Pep8/:
|
||||
- source.pep8
|
||||
vendor/grammars/Sublime-QML:
|
||||
- source.qml
|
||||
vendor/grammars/Sublime-REBOL:
|
||||
@@ -107,7 +115,9 @@ vendor/grammars/SublimeBrainfuck:
|
||||
- source.bf
|
||||
vendor/grammars/SublimeClarion:
|
||||
- source.clarion
|
||||
vendor/grammars/SublimeGDB:
|
||||
vendor/grammars/SublimeEthereum:
|
||||
- source.solidity
|
||||
vendor/grammars/SublimeGDB/:
|
||||
- source.disasm
|
||||
- source.gdb
|
||||
- source.gdb.session
|
||||
@@ -122,8 +132,15 @@ vendor/grammars/TLA:
|
||||
- source.tla
|
||||
vendor/grammars/TXL:
|
||||
- source.txl
|
||||
vendor/grammars/Terraform.tmLanguage:
|
||||
- source.terraform
|
||||
vendor/grammars/Textmate-Gosu-Bundle:
|
||||
- source.gosu.2
|
||||
vendor/grammars/TypeScript-TmLanguage:
|
||||
- source.ts
|
||||
- source.tsx
|
||||
- text.error-list
|
||||
- text.find-refs
|
||||
vendor/grammars/UrWeb-Language-Definition:
|
||||
- source.ur
|
||||
vendor/grammars/VBDotNetSyntax:
|
||||
@@ -134,7 +151,7 @@ vendor/grammars/X10:
|
||||
- source.x10
|
||||
vendor/grammars/abap.tmbundle:
|
||||
- source.abap
|
||||
vendor/grammars/actionscript3-tmbundle:
|
||||
vendor/grammars/actionscript3-tmbundle/:
|
||||
- source.actionscript.3
|
||||
- text.html.asdoc
|
||||
- text.xml.flex-config
|
||||
@@ -172,8 +189,21 @@ vendor/grammars/atom-language-1c-bsl:
|
||||
- source.sdbl
|
||||
vendor/grammars/atom-language-clean:
|
||||
- source.clean
|
||||
- text.restructuredtext.clean
|
||||
vendor/grammars/atom-language-julia:
|
||||
- source.julia
|
||||
- source.julia.console
|
||||
vendor/grammars/atom-language-p4:
|
||||
- source.p4
|
||||
vendor/grammars/atom-language-perl6:
|
||||
- source.meta-info
|
||||
- source.perl6fe
|
||||
- source.quoting.perl6fe
|
||||
- source.regexp.perl6fe
|
||||
vendor/grammars/atom-language-purescript:
|
||||
- source.purescript
|
||||
vendor/grammars/atom-language-rust:
|
||||
- source.rust
|
||||
vendor/grammars/atom-language-srt:
|
||||
- text.srt
|
||||
vendor/grammars/atom-language-stan:
|
||||
@@ -205,7 +235,6 @@ vendor/grammars/capnproto.tmbundle:
|
||||
vendor/grammars/carto-atom:
|
||||
- source.css.mss
|
||||
vendor/grammars/ceylon-sublimetext:
|
||||
- module.ceylon
|
||||
- source.ceylon
|
||||
vendor/grammars/chapel-tmbundle:
|
||||
- source.chapel
|
||||
@@ -219,8 +248,6 @@ vendor/grammars/cpp-qt.tmbundle:
|
||||
- source.qmake
|
||||
vendor/grammars/creole:
|
||||
- text.html.creole
|
||||
vendor/grammars/css.tmbundle:
|
||||
- source.css
|
||||
vendor/grammars/cucumber-tmbundle:
|
||||
- source.ruby.rspec.cucumber.steps
|
||||
- text.gherkin.feature
|
||||
@@ -228,11 +255,11 @@ vendor/grammars/cython:
|
||||
- source.cython
|
||||
vendor/grammars/d.tmbundle:
|
||||
- source.d
|
||||
vendor/grammars/dart-sublime-bundle:
|
||||
vendor/grammars/dartlang:
|
||||
- source.dart
|
||||
- source.pubspec
|
||||
- text.dart-analysis-output
|
||||
- text.dart-doccomments
|
||||
- source.yaml-ext
|
||||
vendor/grammars/data-weave-tmLanguage:
|
||||
- source.data-weave
|
||||
vendor/grammars/desktop.tmbundle:
|
||||
- source.desktop
|
||||
vendor/grammars/diff.tmbundle:
|
||||
@@ -331,6 +358,8 @@ vendor/grammars/language-asn1:
|
||||
vendor/grammars/language-babel:
|
||||
- source.js.jsx
|
||||
- source.regexp.babel
|
||||
vendor/grammars/language-ballerina:
|
||||
- source.ballerina
|
||||
vendor/grammars/language-batchfile:
|
||||
- source.batchfile
|
||||
vendor/grammars/language-blade:
|
||||
@@ -339,6 +368,8 @@ vendor/grammars/language-click:
|
||||
- source.click
|
||||
vendor/grammars/language-clojure:
|
||||
- source.clojure
|
||||
vendor/grammars/language-closure-templates:
|
||||
- text.html.soy
|
||||
vendor/grammars/language-coffee-script:
|
||||
- source.coffee
|
||||
- source.litcoffee
|
||||
@@ -354,13 +385,26 @@ vendor/grammars/language-csound:
|
||||
- source.csound
|
||||
- source.csound-document
|
||||
- source.csound-score
|
||||
vendor/grammars/language-css:
|
||||
- source.css
|
||||
vendor/grammars/language-emacs-lisp:
|
||||
- source.emacs.lisp
|
||||
vendor/grammars/language-fontforge:
|
||||
- source.afm
|
||||
- source.fontforge
|
||||
- source.opentype
|
||||
- text.sfd
|
||||
vendor/grammars/language-gfm:
|
||||
- source.gfm
|
||||
vendor/grammars/language-gn:
|
||||
- source.gn
|
||||
vendor/grammars/language-graphql:
|
||||
- source.graphql
|
||||
vendor/grammars/language-haml:
|
||||
- text.haml
|
||||
- text.hamlc
|
||||
vendor/grammars/language-haskell:
|
||||
- annotation.liquidhaskell.haskell
|
||||
- hint.haskell
|
||||
- hint.message.haskell
|
||||
- hint.type.haskell
|
||||
@@ -368,15 +412,21 @@ vendor/grammars/language-haskell:
|
||||
- source.cabal
|
||||
- source.haskell
|
||||
- source.hsc2hs
|
||||
- source.hsig
|
||||
- text.tex.latex.haskell
|
||||
vendor/grammars/language-hy:
|
||||
- source.hy
|
||||
vendor/grammars/language-inform7:
|
||||
- source.inform7
|
||||
vendor/grammars/language-javascript:
|
||||
- source.js
|
||||
- source.js.regexp
|
||||
- source.js.regexp.replacement
|
||||
- source.jsdoc
|
||||
vendor/grammars/language-jison:
|
||||
- source.jison
|
||||
- source.jisonlex
|
||||
- source.jisonlex-injection
|
||||
vendor/grammars/language-jolie:
|
||||
- source.jolie
|
||||
vendor/grammars/language-jsoniq:
|
||||
- source.jq
|
||||
- source.xq
|
||||
@@ -384,18 +434,36 @@ vendor/grammars/language-less:
|
||||
- source.css.less
|
||||
vendor/grammars/language-maxscript:
|
||||
- source.maxscript
|
||||
vendor/grammars/language-meson:
|
||||
- source.meson
|
||||
vendor/grammars/language-ncl:
|
||||
- source.ncl
|
||||
vendor/grammars/language-ninja:
|
||||
- source.ninja
|
||||
vendor/grammars/language-pan:
|
||||
- source.pan
|
||||
vendor/grammars/language-pcb:
|
||||
- source.gerber
|
||||
- source.pcb.board
|
||||
- source.pcb.schematic
|
||||
- source.pcb.sexp
|
||||
vendor/grammars/language-povray:
|
||||
- source.pov-ray sdl
|
||||
vendor/grammars/language-python:
|
||||
- text.python.console
|
||||
- text.python.traceback
|
||||
vendor/grammars/language-reason:
|
||||
- source.reason
|
||||
- source.reason.hover.type
|
||||
vendor/grammars/language-regexp:
|
||||
- source.regexp
|
||||
- source.regexp.extended
|
||||
vendor/grammars/language-renpy:
|
||||
- source.renpy
|
||||
vendor/grammars/language-restructuredtext:
|
||||
- text.restructuredtext
|
||||
vendor/grammars/language-ring:
|
||||
- source.ring
|
||||
vendor/grammars/language-roff:
|
||||
- source.ditroff
|
||||
- source.ditroff.desc
|
||||
- source.ideal
|
||||
- source.pic
|
||||
- text.roff
|
||||
@@ -403,6 +471,10 @@ vendor/grammars/language-roff:
|
||||
vendor/grammars/language-rpm-spec:
|
||||
- source.changelogs.rpm-spec
|
||||
- source.rpm-spec
|
||||
vendor/grammars/language-ruby:
|
||||
- source.ruby
|
||||
- source.ruby.gemfile
|
||||
- text.html.erb
|
||||
vendor/grammars/language-shellscript:
|
||||
- source.shell
|
||||
- text.shell-session
|
||||
@@ -412,13 +484,19 @@ vendor/grammars/language-toc-wow:
|
||||
- source.toc
|
||||
vendor/grammars/language-turing:
|
||||
- source.turing
|
||||
vendor/grammars/language-typelanguage:
|
||||
- source.tl
|
||||
vendor/grammars/language-viml:
|
||||
- source.viml
|
||||
vendor/grammars/language-wavefront:
|
||||
- source.wavefront.mtl
|
||||
- source.wavefront.obj
|
||||
vendor/grammars/language-webassembly:
|
||||
- source.webassembly
|
||||
vendor/grammars/language-xbase:
|
||||
- source.harbour
|
||||
vendor/grammars/language-xcompose:
|
||||
- config.xcompose
|
||||
vendor/grammars/language-yaml:
|
||||
- source.yaml
|
||||
vendor/grammars/language-yang:
|
||||
@@ -448,6 +526,8 @@ vendor/grammars/make.tmbundle:
|
||||
- source.makefile
|
||||
vendor/grammars/mako-tmbundle:
|
||||
- text.html.mako
|
||||
vendor/grammars/marko-tmbundle:
|
||||
- text.marko
|
||||
vendor/grammars/mathematica-tmbundle:
|
||||
- source.mathematica
|
||||
vendor/grammars/matlab.tmbundle:
|
||||
@@ -467,8 +547,6 @@ vendor/grammars/nemerle.tmbundle:
|
||||
- source.nemerle
|
||||
vendor/grammars/nesC:
|
||||
- source.nesc
|
||||
vendor/grammars/ninja.tmbundle:
|
||||
- source.ninja
|
||||
vendor/grammars/nix:
|
||||
- source.nix
|
||||
vendor/grammars/nu.tmbundle:
|
||||
@@ -487,6 +565,8 @@ vendor/grammars/ooc.tmbundle:
|
||||
- source.ooc
|
||||
vendor/grammars/opa.tmbundle:
|
||||
- source.opa
|
||||
vendor/grammars/openscad.tmbundle:
|
||||
- source.scad
|
||||
vendor/grammars/oz-tmbundle/Syntaxes/Oz.tmLanguage:
|
||||
- source.oz
|
||||
vendor/grammars/parrot:
|
||||
@@ -498,10 +578,6 @@ vendor/grammars/pawn-sublime-language:
|
||||
vendor/grammars/perl.tmbundle:
|
||||
- source.perl
|
||||
- source.perl.6
|
||||
vendor/grammars/perl6fe:
|
||||
- source.meta-info
|
||||
- source.perl6fe
|
||||
- source.regexp.perl6fe
|
||||
vendor/grammars/php-smarty.tmbundle:
|
||||
- text.html.smarty
|
||||
vendor/grammars/php.tmbundle:
|
||||
@@ -524,13 +600,10 @@ vendor/grammars/python-django.tmbundle:
|
||||
vendor/grammars/r.tmbundle:
|
||||
- source.r
|
||||
- text.tex.latex.rd
|
||||
vendor/grammars/ruby-haml.tmbundle:
|
||||
- text.haml
|
||||
vendor/grammars/rascal-syntax-highlighting:
|
||||
- source.rascal
|
||||
vendor/grammars/ruby-slim.tmbundle:
|
||||
- text.slim
|
||||
vendor/grammars/ruby.tmbundle:
|
||||
- source.ruby
|
||||
- text.html.erb
|
||||
vendor/grammars/sas.tmbundle:
|
||||
- source.SASLog
|
||||
- source.sas
|
||||
@@ -545,6 +618,9 @@ vendor/grammars/scilab.tmbundle:
|
||||
- source.scilab
|
||||
vendor/grammars/secondlife-lsl:
|
||||
- source.lsl
|
||||
vendor/grammars/shaders-tmLanguage:
|
||||
- source.hlsl
|
||||
- source.shaderlab
|
||||
vendor/grammars/smali-sublime:
|
||||
- source.smali
|
||||
vendor/grammars/smalltalk-tmbundle:
|
||||
@@ -553,6 +629,8 @@ vendor/grammars/sourcepawn:
|
||||
- source.sp
|
||||
vendor/grammars/sql.tmbundle:
|
||||
- source.sql
|
||||
vendor/grammars/squirrel-language:
|
||||
- source.nut
|
||||
vendor/grammars/st2-zonefile:
|
||||
- text.zone_file
|
||||
vendor/grammars/standard-ml.tmbundle:
|
||||
@@ -560,6 +638,8 @@ vendor/grammars/standard-ml.tmbundle:
|
||||
- source.ml
|
||||
vendor/grammars/sublime-MuPAD:
|
||||
- source.mupad
|
||||
vendor/grammars/sublime-angelscript:
|
||||
- source.angelscript
|
||||
vendor/grammars/sublime-aspectj:
|
||||
- source.aspectj
|
||||
vendor/grammars/sublime-autoit:
|
||||
@@ -572,6 +652,8 @@ vendor/grammars/sublime-cirru:
|
||||
- source.cirru
|
||||
vendor/grammars/sublime-clips:
|
||||
- source.clips
|
||||
vendor/grammars/sublime-fantom:
|
||||
- source.fan
|
||||
vendor/grammars/sublime-glsl:
|
||||
- source.essl
|
||||
- source.glsl
|
||||
@@ -579,6 +661,8 @@ vendor/grammars/sublime-golo:
|
||||
- source.golo
|
||||
vendor/grammars/sublime-mask:
|
||||
- source.mask
|
||||
vendor/grammars/sublime-nearley:
|
||||
- source.ne
|
||||
vendor/grammars/sublime-netlinx:
|
||||
- source.netlinx
|
||||
- source.netlinx.erb
|
||||
@@ -593,8 +677,8 @@ vendor/grammars/sublime-rexx:
|
||||
- source.rexx
|
||||
vendor/grammars/sublime-robot-plugin:
|
||||
- text.robot
|
||||
vendor/grammars/sublime-rust:
|
||||
- source.rust
|
||||
vendor/grammars/sublime-shen:
|
||||
- source.shen
|
||||
vendor/grammars/sublime-spintools:
|
||||
- source.regexp.spin
|
||||
- source.spin
|
||||
@@ -604,11 +688,6 @@ vendor/grammars/sublime-terra:
|
||||
- source.terra
|
||||
vendor/grammars/sublime-text-ox:
|
||||
- source.ox
|
||||
vendor/grammars/sublime-typescript:
|
||||
- source.ts
|
||||
- source.tsx
|
||||
- text.error-list
|
||||
- text.find-refs
|
||||
vendor/grammars/sublime-varnish:
|
||||
- source.varnish.vcl
|
||||
vendor/grammars/sublime_cobol:
|
||||
@@ -641,12 +720,12 @@ vendor/grammars/vhdl:
|
||||
- source.vhdl
|
||||
vendor/grammars/vue-syntax-highlight:
|
||||
- text.html.vue
|
||||
vendor/grammars/wdl-sublime-syntax-highlighter:
|
||||
- source.wdl
|
||||
vendor/grammars/xc.tmbundle:
|
||||
- source.xc
|
||||
vendor/grammars/xml.tmbundle:
|
||||
- text.xml
|
||||
- text.xml.xsl
|
||||
vendor/grammars/xquery:
|
||||
- source.xquery
|
||||
vendor/grammars/zephir-sublime:
|
||||
- source.php.zephir
|
||||
|
||||
@@ -15,9 +15,9 @@ class << Linguist
|
||||
# see Linguist::LazyBlob and Linguist::FileBlob for examples
|
||||
#
|
||||
# Returns Language or nil.
|
||||
def detect(blob)
|
||||
def detect(blob, allow_empty: false)
|
||||
# Bail early if the blob is binary or empty.
|
||||
return nil if blob.likely_binary? || blob.binary? || blob.empty?
|
||||
return nil if blob.likely_binary? || blob.binary? || (!allow_empty && blob.empty?)
|
||||
|
||||
Linguist.instrument("linguist.detection", :blob => blob) do
|
||||
# Call each strategy until one candidate is returned.
|
||||
@@ -59,8 +59,9 @@ class << Linguist
|
||||
# Strategies are called in turn until a single Language is returned.
|
||||
STRATEGIES = [
|
||||
Linguist::Strategy::Modeline,
|
||||
Linguist::Shebang,
|
||||
Linguist::Strategy::Filename,
|
||||
Linguist::Shebang,
|
||||
Linguist::Strategy::Extension,
|
||||
Linguist::Heuristics,
|
||||
Linguist::Classifier
|
||||
]
|
||||
@@ -73,7 +74,7 @@ class << Linguist
|
||||
# end
|
||||
# end
|
||||
#
|
||||
# Linguist.instrumenter = CustomInstrumenter
|
||||
# Linguist.instrumenter = CustomInstrumenter.new
|
||||
#
|
||||
# The instrumenter must conform to the `ActiveSupport::Notifications`
|
||||
# interface, which defines `#instrument` and accepts:
|
||||
|
||||
@@ -63,7 +63,7 @@ module Linguist
|
||||
#
|
||||
# Returns an Array
|
||||
def extensions
|
||||
_, *segments = name.downcase.split(".")
|
||||
_, *segments = name.downcase.split(".", -1)
|
||||
|
||||
segments.map.with_index do |segment, index|
|
||||
"." + segments[index..-1].join(".")
|
||||
|
||||
@@ -95,7 +95,7 @@ module Linguist
|
||||
# Returns sorted Array of result pairs. Each pair contains the
|
||||
# String language name and a Float score.
|
||||
def classify(tokens, languages)
|
||||
return [] if tokens.nil?
|
||||
return [] if tokens.nil? || languages.empty?
|
||||
tokens = Tokenizer.tokenize(tokens) if tokens.is_a?(String)
|
||||
scores = {}
|
||||
|
||||
|
||||
@@ -9,11 +9,12 @@
|
||||
|
||||
## Documentation directories ##
|
||||
|
||||
- ^docs?/
|
||||
- ^[Dd]ocs?/
|
||||
- (^|/)[Dd]ocumentation/
|
||||
- (^|/)javadoc/
|
||||
- ^man/
|
||||
- (^|/)[Jj]avadoc/
|
||||
- ^[Mm]an/
|
||||
- ^[Ee]xamples/
|
||||
- ^[Dd]emos?/
|
||||
|
||||
## Documentation files ##
|
||||
|
||||
@@ -27,4 +28,4 @@
|
||||
- (^|/)[Rr]eadme(\.|$)
|
||||
|
||||
# Samples folders
|
||||
- ^[Ss]amples/
|
||||
- ^[Ss]amples?/
|
||||
|
||||
@@ -3,7 +3,7 @@ module Linguist
|
||||
# Public: Is the blob a generated file?
|
||||
#
|
||||
# name - String filename
|
||||
# data - String blob data. A block also maybe passed in for lazy
|
||||
# data - String blob data. A block also may be passed in for lazy
|
||||
# loading. This behavior is deprecated and you should always
|
||||
# pass in a String.
|
||||
#
|
||||
@@ -56,7 +56,8 @@ module Linguist
|
||||
generated_net_specflow_feature_file? ||
|
||||
composer_lock? ||
|
||||
node_modules? ||
|
||||
npm_shrinkwrap? ||
|
||||
go_vendor? ||
|
||||
npm_shrinkwrap_or_package_lock? ||
|
||||
godeps? ||
|
||||
generated_by_zephir? ||
|
||||
minified_files? ||
|
||||
@@ -69,6 +70,7 @@ module Linguist
|
||||
compiled_cython_file? ||
|
||||
generated_go? ||
|
||||
generated_protocol_buffer? ||
|
||||
generated_javascript_protocol_buffer? ||
|
||||
generated_apache_thrift? ||
|
||||
generated_jni_header? ||
|
||||
vcr_cassette? ||
|
||||
@@ -76,7 +78,11 @@ module Linguist
|
||||
generated_unity3d_meta? ||
|
||||
generated_racc? ||
|
||||
generated_jflex? ||
|
||||
generated_grammarkit?
|
||||
generated_grammarkit? ||
|
||||
generated_roxygen2? ||
|
||||
generated_jison? ||
|
||||
generated_yarn_lock? ||
|
||||
generated_grpc_cpp?
|
||||
end
|
||||
|
||||
# Internal: Is the blob an Xcode file?
|
||||
@@ -238,7 +244,11 @@ module Linguist
|
||||
#
|
||||
# Returns true or false.
|
||||
def generated_postscript?
|
||||
return false unless ['.ps', '.eps'].include? extname
|
||||
return false unless ['.ps', '.eps', '.pfa'].include? extname
|
||||
|
||||
# Type 1 and Type 42 fonts converted to PostScript are stored as hex-encoded byte streams; these
|
||||
# streams are always preceded the `eexec` operator (if Type 1), or the `/sfnts` key (if Type 42).
|
||||
return true if data =~ /(\n|\r\n|\r)\s*(?:currentfile eexec\s+|\/sfnts\s+\[\1<)\h{8,}\1/
|
||||
|
||||
# We analyze the "%%Creator:" comment, which contains the author/generator
|
||||
# of the file. If there is one, it should be in one of the first few lines.
|
||||
@@ -248,10 +258,12 @@ module Linguist
|
||||
# Most generators write their version number, while human authors' or companies'
|
||||
# names don't contain numbers. So look if the line contains digits. Also
|
||||
# look for some special cases without version numbers.
|
||||
return creator =~ /[0-9]/ ||
|
||||
creator.include?("mpage") ||
|
||||
creator.include?("draw") ||
|
||||
creator.include?("ImageMagick")
|
||||
return true if creator =~ /[0-9]|draw|mpage|ImageMagick|inkscape|MATLAB/ ||
|
||||
creator =~ /PCBNEW|pnmtops|\(Unknown\)|Serif Affinity|Filterimage -tops/
|
||||
|
||||
# EAGLE doesn't include a version number when it generates PostScript.
|
||||
# However, it does prepend its name to the document's "%%Title" field.
|
||||
!!creator.include?("EAGLE") and lines[0..4].find {|line| line =~ /^%%Title: EAGLE Drawing /}
|
||||
end
|
||||
|
||||
def generated_go?
|
||||
@@ -274,16 +286,25 @@ module Linguist
|
||||
return lines[0].include?("Generated by the protocol buffer compiler. DO NOT EDIT!")
|
||||
end
|
||||
|
||||
APACHE_THRIFT_EXTENSIONS = ['.rb', '.py', '.go', '.js', '.m', '.java', '.h', '.cc', '.cpp']
|
||||
# Internal: Is the blob a Javascript source file generated by the
|
||||
# Protocol Buffer compiler?
|
||||
#
|
||||
# Returns true of false.
|
||||
def generated_javascript_protocol_buffer?
|
||||
return false unless extname == ".js"
|
||||
return false unless lines.count > 6
|
||||
|
||||
return lines[5].include?("GENERATED CODE -- DO NOT EDIT!")
|
||||
end
|
||||
|
||||
APACHE_THRIFT_EXTENSIONS = ['.rb', '.py', '.go', '.js', '.m', '.java', '.h', '.cc', '.cpp', '.php']
|
||||
|
||||
# Internal: Is the blob generated by Apache Thrift compiler?
|
||||
#
|
||||
# Returns true or false
|
||||
def generated_apache_thrift?
|
||||
return false unless APACHE_THRIFT_EXTENSIONS.include?(extname)
|
||||
return false unless lines.count > 1
|
||||
|
||||
return lines[0].include?("Autogenerated by Thrift Compiler") || lines[1].include?("Autogenerated by Thrift Compiler")
|
||||
return lines.first(6).any? { |l| l.include?("Autogenerated by Thrift Compiler") }
|
||||
end
|
||||
|
||||
# Internal: Is the blob a C/C++ header generated by the Java JNI tool javah?
|
||||
@@ -304,11 +325,19 @@ module Linguist
|
||||
!!name.match(/node_modules\//)
|
||||
end
|
||||
|
||||
# Internal: Is the blob a generated npm shrinkwrap file.
|
||||
# Internal: Is the blob part of the Go vendor/ tree,
|
||||
# not meant for humans in pull requests.
|
||||
#
|
||||
# Returns true or false.
|
||||
def npm_shrinkwrap?
|
||||
!!name.match(/npm-shrinkwrap\.json/)
|
||||
def go_vendor?
|
||||
!!name.match(/vendor\/((?!-)[-0-9A-Za-z]+(?<!-)\.)+(com|edu|gov|in|me|net|org|fm|io)/)
|
||||
end
|
||||
|
||||
# Internal: Is the blob a generated npm shrinkwrap or package lock file?
|
||||
#
|
||||
# Returns true or false.
|
||||
def npm_shrinkwrap_or_package_lock?
|
||||
name.match(/npm-shrinkwrap\.json/) || name.match(/package-lock\.json/)
|
||||
end
|
||||
|
||||
# Internal: Is the blob part of Godeps/,
|
||||
@@ -326,7 +355,7 @@ module Linguist
|
||||
!!name.match(/composer\.lock/)
|
||||
end
|
||||
|
||||
# Internal: Is the blob a generated by Zephir
|
||||
# Internal: Is the blob generated by Zephir?
|
||||
#
|
||||
# Returns true or false.
|
||||
def generated_by_zephir?
|
||||
@@ -426,5 +455,59 @@ module Linguist
|
||||
return false unless lines.count > 1
|
||||
return lines[0].start_with?("// This is a generated file. Not intended for manual editing.")
|
||||
end
|
||||
|
||||
# Internal: Is this a roxygen2-generated file?
|
||||
#
|
||||
# A roxygen2-generated file typically contain:
|
||||
# % Generated by roxygen2: do not edit by hand
|
||||
# on the first line.
|
||||
#
|
||||
# Return true or false
|
||||
def generated_roxygen2?
|
||||
return false unless extname == '.Rd'
|
||||
return false unless lines.count > 1
|
||||
|
||||
return lines[0].include?("% Generated by roxygen2: do not edit by hand")
|
||||
end
|
||||
|
||||
# Internal: Is this a Jison-generated file?
|
||||
#
|
||||
# Jison-generated parsers typically contain:
|
||||
# /* parser generated by jison
|
||||
# on the first line.
|
||||
#
|
||||
# Jison-generated lexers typically contain:
|
||||
# /* generated by jison-lex
|
||||
# on the first line.
|
||||
#
|
||||
# Return true or false
|
||||
def generated_jison?
|
||||
return false unless extname == '.js'
|
||||
return false unless lines.count > 1
|
||||
return lines[0].start_with?("/* parser generated by jison ") ||
|
||||
lines[0].start_with?("/* generated by jison-lex ")
|
||||
end
|
||||
|
||||
# Internal: Is the blob a generated yarn lockfile?
|
||||
#
|
||||
# Returns true or false.
|
||||
def generated_yarn_lock?
|
||||
return false unless name.match(/yarn\.lock/)
|
||||
return false unless lines.count > 0
|
||||
return lines[0].include?("# THIS IS AN AUTOGENERATED FILE")
|
||||
end
|
||||
|
||||
# Internal: Is this a protobuf/grpc-generated C++ file?
|
||||
#
|
||||
# A generated file contains:
|
||||
# // Generated by the gRPC C++ plugin.
|
||||
# on the first line.
|
||||
#
|
||||
# Return true or false
|
||||
def generated_grpc_cpp?
|
||||
return false unless %w{.cpp .hpp .h .cc}.include? extname
|
||||
return false unless lines.count > 1
|
||||
return lines[0].start_with?("// Generated by the gRPC")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -17,9 +17,8 @@ module Linguist
|
||||
data = blob.data
|
||||
|
||||
@heuristics.each do |heuristic|
|
||||
if heuristic.matches?(blob.name)
|
||||
languages = Array(heuristic.call(data))
|
||||
return languages if languages.any? || languages.all? { |l| candidates.include?(l) }
|
||||
if heuristic.matches?(blob.name, candidates)
|
||||
return Array(heuristic.call(data))
|
||||
end
|
||||
end
|
||||
|
||||
@@ -28,7 +27,8 @@ module Linguist
|
||||
|
||||
# Internal: Define a new heuristic.
|
||||
#
|
||||
# languages - String names of languages to disambiguate.
|
||||
# exts_and_langs - String names of file extensions and languages to
|
||||
# disambiguate.
|
||||
# heuristic - Block which takes data as an argument and returns a Language or nil.
|
||||
#
|
||||
# Examples
|
||||
@@ -41,23 +41,28 @@ module Linguist
|
||||
# end
|
||||
# end
|
||||
#
|
||||
def self.disambiguate(*extensions, &heuristic)
|
||||
@heuristics << new(extensions, &heuristic)
|
||||
def self.disambiguate(*exts_and_langs, &heuristic)
|
||||
@heuristics << new(exts_and_langs, &heuristic)
|
||||
end
|
||||
|
||||
# Internal: Array of defined heuristics
|
||||
@heuristics = []
|
||||
|
||||
# Internal
|
||||
def initialize(extensions, &heuristic)
|
||||
@extensions = extensions
|
||||
def initialize(exts_and_langs, &heuristic)
|
||||
@exts_and_langs, @candidates = exts_and_langs.partition {|e| e =~ /\A\./}
|
||||
@heuristic = heuristic
|
||||
end
|
||||
|
||||
# Internal: Check if this heuristic matches the candidate languages.
|
||||
def matches?(filename)
|
||||
# Internal: Check if this heuristic matches the candidate filenames or
|
||||
# languages.
|
||||
def matches?(filename, candidates)
|
||||
filename = filename.downcase
|
||||
@extensions.any? { |ext| filename.end_with?(ext) }
|
||||
candidates = candidates.compact.map(&:name)
|
||||
@exts_and_langs.any? { |ext| filename.end_with?(ext) } ||
|
||||
(candidates.any? &&
|
||||
(@candidates - candidates == [] &&
|
||||
candidates - @candidates == []))
|
||||
end
|
||||
|
||||
# Internal: Perform the heuristic
|
||||
@@ -68,6 +73,14 @@ module Linguist
|
||||
# Common heuristics
|
||||
ObjectiveCRegex = /^\s*(@(interface|class|protocol|property|end|synchronised|selector|implementation)\b|#import\s+.+\.h[">])/
|
||||
|
||||
disambiguate ".as" do |data|
|
||||
if /^\s*(package\s+[a-z0-9_\.]+|import\s+[a-zA-Z0-9_\.]+;|class\s+[A-Za-z0-9_]+\s+extends\s+[A-Za-z0-9_]+)/.match(data)
|
||||
Language["ActionScript"]
|
||||
else
|
||||
Language["AngelScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".asc" do |data|
|
||||
if /^(----[- ]BEGIN|ssh-(rsa|dss)) /.match(data)
|
||||
Language["Public Key"]
|
||||
@@ -110,6 +123,12 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".cls" do |data|
|
||||
if /\\\w+{/.match(data)
|
||||
Language["TeX"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".cs" do |data|
|
||||
if /![\w\s]+methodsFor: /.match(data)
|
||||
Language["Smalltalk"]
|
||||
@@ -119,11 +138,18 @@ module Linguist
|
||||
end
|
||||
|
||||
disambiguate ".d" do |data|
|
||||
if /^module /.match(data)
|
||||
# see http://dlang.org/spec/grammar
|
||||
# ModuleDeclaration | ImportDeclaration | FuncDeclaration | unittest
|
||||
if /^module\s+[\w.]*\s*;|import\s+[\w\s,.:]*;|\w+\s+\w+\s*\(.*\)(?:\(.*\))?\s*{[^}]*}|unittest\s*(?:\(.*\))?\s*{[^}]*}/.match(data)
|
||||
Language["D"]
|
||||
elsif /^((dtrace:::)?BEGIN|provider |#pragma (D (option|attributes)|ident)\s)/.match(data)
|
||||
# see http://dtrace.org/guide/chp-prog.html, http://dtrace.org/guide/chp-profile.html, http://dtrace.org/guide/chp-opt.html
|
||||
elsif /^(\w+:\w*:\w*:\w*|BEGIN|END|provider\s+|(tick|profile)-\w+\s+{[^}]*}|#pragma\s+D\s+(option|attributes|depends_on)\s|#pragma\s+ident\s)/.match(data)
|
||||
Language["DTrace"]
|
||||
elsif /(\/.*:( .* \\)$| : \\$|^ : |: \\$)/.match(data)
|
||||
# path/target : dependency \
|
||||
# target : \
|
||||
# : dependency
|
||||
# path/file.ext1 : some/path/../file.ext2
|
||||
elsif /([\/\\].*:\s+.*\s\\$|: \\$|^ : |^[\w\s\/\\.]+\w+\.\w+\s*:\s+[\w\s\/\\.]+\w+\.\w+)/.match(data)
|
||||
Language["Makefile"]
|
||||
end
|
||||
end
|
||||
@@ -152,7 +178,7 @@ module Linguist
|
||||
elsif data.include?("flowop")
|
||||
Language["Filebench WML"]
|
||||
elsif fortran_rx.match(data)
|
||||
Language["FORTRAN"]
|
||||
Language["Fortran"]
|
||||
end
|
||||
end
|
||||
|
||||
@@ -160,7 +186,7 @@ module Linguist
|
||||
if /^: /.match(data)
|
||||
Language["Forth"]
|
||||
elsif fortran_rx.match(data)
|
||||
Language["FORTRAN"]
|
||||
Language["Fortran"]
|
||||
end
|
||||
end
|
||||
|
||||
@@ -213,7 +239,7 @@ module Linguist
|
||||
elsif /^(%[%{}]xs|<.*>)/.match(data)
|
||||
Language["Lex"]
|
||||
elsif /^\.[a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
Language["Roff"]
|
||||
elsif /^\((de|class|rel|code|data|must)\s/.match(data)
|
||||
Language["PicoLisp"]
|
||||
end
|
||||
@@ -244,7 +270,7 @@ module Linguist
|
||||
Language["MUF"]
|
||||
elsif /^\s*;/.match(data)
|
||||
Language["M"]
|
||||
elsif /^\s*\(\*/.match(data)
|
||||
elsif /\*\)$/.match(data)
|
||||
Language["Mathematica"]
|
||||
elsif /^\s*%/.match(data)
|
||||
Language["Matlab"]
|
||||
@@ -254,10 +280,12 @@ module Linguist
|
||||
end
|
||||
|
||||
disambiguate ".md" do |data|
|
||||
if /^[-a-z0-9=#!\*\[|]/i.match(data)
|
||||
if /(^[-a-z0-9=#!\*\[|>])|<\//i.match(data) || data.empty?
|
||||
Language["Markdown"]
|
||||
elsif /^(;;|\(define_)/.match(data)
|
||||
Language["GCC machine description"]
|
||||
Language["GCC Machine Description"]
|
||||
else
|
||||
Language["Markdown"]
|
||||
end
|
||||
end
|
||||
|
||||
@@ -272,7 +300,7 @@ module Linguist
|
||||
disambiguate ".mod" do |data|
|
||||
if data.include?('<!ENTITY ')
|
||||
Language["XML"]
|
||||
elsif /MODULE\s\w+\s*;/i.match(data) || /^\s*END \w+;$/i.match(data)
|
||||
elsif /^\s*MODULE [\w\.]+;/i.match(data) || /^\s*END [\w\.]+;/i.match(data)
|
||||
Language["Modula-2"]
|
||||
else
|
||||
[Language["Linux Kernel Module"], Language["AMPL"]]
|
||||
@@ -281,9 +309,9 @@ module Linguist
|
||||
|
||||
disambiguate ".ms" do |data|
|
||||
if /^[.'][a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
elsif /(?<!\S)\.(include|globa?l)\s/.match(data) || /(?<!\/\*)(\A|\n)\s*\.[A-Za-z]/.match(data.gsub(/"([^\\"]|\\.)*"|'([^\\']|\\.)*'|\\\s*(?:--.*)?\n/, ""))
|
||||
Language["GAS"]
|
||||
Language["Roff"]
|
||||
elsif /(?<!\S)\.(include|globa?l)\s/.match(data) || /(?<!\/\*)(\A|\n)\s*\.[A-Za-z][_A-Za-z0-9]*:/.match(data.gsub(/"([^\\"]|\\.)*"|'([^\\']|\\.)*'|\\\s*(?:--.*)?\n/, ""))
|
||||
Language["Unix Assembly"]
|
||||
else
|
||||
Language["MAXScript"]
|
||||
end
|
||||
@@ -291,7 +319,7 @@ module Linguist
|
||||
|
||||
disambiguate ".n" do |data|
|
||||
if /^[.']/.match(data)
|
||||
Language["Groff"]
|
||||
Language["Roff"]
|
||||
elsif /^(module|namespace|using)\s/.match(data)
|
||||
Language["Nemerle"]
|
||||
end
|
||||
@@ -320,28 +348,22 @@ module Linguist
|
||||
end
|
||||
|
||||
disambiguate ".pl" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
if /^[^#]*:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif /use strict|use\s+v?5\./.match(data)
|
||||
Language["Perl"]
|
||||
elsif /^(use v6|(my )?class|module)/.match(data)
|
||||
Language["Perl6"]
|
||||
Language["Perl 6"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pm", ".t" do |data|
|
||||
if /use strict|use\s+v?5\./.match(data)
|
||||
Language["Perl"]
|
||||
elsif /^(use v6|(my )?class|module)/.match(data)
|
||||
Language["Perl6"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pod" do |data|
|
||||
if /^=\w+$/.match(data)
|
||||
Language["Pod"]
|
||||
else
|
||||
disambiguate ".pm" do |data|
|
||||
if /\buse\s+(?:strict\b|v?5\.)/.match(data)
|
||||
Language["Perl"]
|
||||
elsif /^\s*(?:use\s+v6\s*;|(?:\bmy\s+)?class|module)\b/.match(data)
|
||||
Language["Perl 6"]
|
||||
elsif /^\s*\/\* XPM \*\//.match(data)
|
||||
Language["XPM"]
|
||||
end
|
||||
end
|
||||
|
||||
@@ -377,7 +399,7 @@ module Linguist
|
||||
if /^\.!|^\.end lit(?:eral)?\b/i.match(data)
|
||||
Language["RUNOFF"]
|
||||
elsif /^\.\\" /.match(data)
|
||||
Language["Groff"]
|
||||
Language["Roff"]
|
||||
end
|
||||
end
|
||||
|
||||
@@ -428,10 +450,12 @@ module Linguist
|
||||
end
|
||||
|
||||
disambiguate ".t" do |data|
|
||||
if /^\s*%|^\s*var\s+\w+\s*:\s*\w+/.match(data)
|
||||
if /^\s*%[ \t]+|^\s*var\s+\w+\s*:=\s*\w+/.match(data)
|
||||
Language["Turing"]
|
||||
elsif /^\s*use\s+v6\s*;/.match(data)
|
||||
Language["Perl6"]
|
||||
elsif /^\s*(?:use\s+v6\s*;|\bmodule\b|\b(?:my\s+)?class\b)/.match(data)
|
||||
Language["Perl 6"]
|
||||
elsif /\buse\s+(?:strict\b|v?5\.)/.match(data)
|
||||
Language["Perl"]
|
||||
end
|
||||
end
|
||||
|
||||
@@ -444,7 +468,7 @@ module Linguist
|
||||
end
|
||||
|
||||
disambiguate ".ts" do |data|
|
||||
if data.include?("<TS")
|
||||
if /<TS\b/.match(data)
|
||||
Language["XML"]
|
||||
else
|
||||
Language["TypeScript"]
|
||||
@@ -459,5 +483,22 @@ module Linguist
|
||||
Language["Scilab"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".tsx" do |data|
|
||||
if /^\s*(import.+(from\s+|require\()['"]react|\/\/\/\s*<reference\s)/.match(data)
|
||||
Language["TypeScript"]
|
||||
elsif /^\s*<\?xml\s+version/i.match(data)
|
||||
Language["XML"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".w" do |data|
|
||||
if (data.include?("&ANALYZE-SUSPEND _UIB-CODE-BLOCK _CUSTOM _DEFINITIONS"))
|
||||
Language["OpenEdge ABL"]
|
||||
elsif /^@(<|\w+\.)/.match(data)
|
||||
Language["CWeb"]
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
||||
@@ -11,6 +11,7 @@ require 'linguist/samples'
|
||||
require 'linguist/file_blob'
|
||||
require 'linguist/blob_helper'
|
||||
require 'linguist/strategy/filename'
|
||||
require 'linguist/strategy/extension'
|
||||
require 'linguist/strategy/modeline'
|
||||
require 'linguist/shebang'
|
||||
|
||||
@@ -90,17 +91,6 @@ module Linguist
|
||||
language
|
||||
end
|
||||
|
||||
# Public: Detects the Language of the blob.
|
||||
#
|
||||
# blob - an object that includes the Linguist `BlobHelper` interface;
|
||||
# see Linguist::LazyBlob and Linguist::FileBlob for examples
|
||||
#
|
||||
# Returns Language or nil.
|
||||
def self.detect(blob)
|
||||
warn "[DEPRECATED] `Linguist::Language.detect` is deprecated. Use `Linguist.detect`. #{caller[0]}"
|
||||
Linguist.detect(blob)
|
||||
end
|
||||
|
||||
# Public: Get all Languages
|
||||
#
|
||||
# Returns an Array of Languages
|
||||
@@ -119,7 +109,7 @@ module Linguist
|
||||
#
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.find_by_name(name)
|
||||
return nil if name.to_s.empty?
|
||||
return nil if !name.is_a?(String) || name.to_s.empty?
|
||||
name && (@name_index[name.downcase] || @name_index[name.split(',').first.downcase])
|
||||
end
|
||||
|
||||
@@ -134,52 +124,52 @@ module Linguist
|
||||
#
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.find_by_alias(name)
|
||||
return nil if name.to_s.empty?
|
||||
return nil if !name.is_a?(String) || name.to_s.empty?
|
||||
name && (@alias_index[name.downcase] || @alias_index[name.split(',').first.downcase])
|
||||
end
|
||||
|
||||
# Public: Look up Languages by filename.
|
||||
#
|
||||
# The behaviour of this method recently changed.
|
||||
# See the second example below.
|
||||
#
|
||||
# filename - The path String.
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# Language.find_by_filename('Cakefile')
|
||||
# # => [#<Language name="CoffeeScript">]
|
||||
# Language.find_by_filename('foo.rb')
|
||||
# # => [#<Language name="Ruby">]
|
||||
# # => []
|
||||
#
|
||||
# Returns all matching Languages or [] if none were found.
|
||||
def self.find_by_filename(filename)
|
||||
basename = File.basename(filename)
|
||||
|
||||
# find the first extension with language definitions
|
||||
extname = FileBlob.new(filename).extensions.detect do |e|
|
||||
!@extension_index[e].empty?
|
||||
end
|
||||
|
||||
(@filename_index[basename] + @extension_index[extname]).compact.uniq
|
||||
@filename_index[basename]
|
||||
end
|
||||
|
||||
# Public: Look up Languages by file extension.
|
||||
#
|
||||
# extname - The extension String.
|
||||
# The behaviour of this method recently changed.
|
||||
# See the second example below.
|
||||
#
|
||||
# filename - The path String.
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# Language.find_by_extension('.rb')
|
||||
# Language.find_by_extension('dummy.rb')
|
||||
# # => [#<Language name="Ruby">]
|
||||
#
|
||||
# Language.find_by_extension('rb')
|
||||
# # => [#<Language name="Ruby">]
|
||||
# # => []
|
||||
#
|
||||
# Returns all matching Languages or [] if none were found.
|
||||
def self.find_by_extension(extname)
|
||||
extname = ".#{extname}" unless extname.start_with?(".")
|
||||
@extension_index[extname.downcase]
|
||||
end
|
||||
def self.find_by_extension(filename)
|
||||
# find the first extension with language definitions
|
||||
extname = FileBlob.new(filename.downcase).extensions.detect do |e|
|
||||
!@extension_index[e].empty?
|
||||
end
|
||||
|
||||
# DEPRECATED
|
||||
def self.find_by_shebang(data)
|
||||
@interpreter_index[Shebang.interpreter(data)]
|
||||
@extension_index[extname]
|
||||
end
|
||||
|
||||
# Public: Look up Languages by interpreter.
|
||||
@@ -224,8 +214,15 @@ module Linguist
|
||||
#
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.[](name)
|
||||
return nil if !name.is_a?(String) || name.to_s.empty?
|
||||
|
||||
lang = @index[name.downcase]
|
||||
return lang if lang
|
||||
|
||||
name = name.split(',').first
|
||||
return nil if name.to_s.empty?
|
||||
name && (@index[name.downcase] || @index[name.split(',').first.downcase])
|
||||
|
||||
@index[name.downcase]
|
||||
end
|
||||
|
||||
# Public: A List of popular languages
|
||||
@@ -259,17 +256,6 @@ module Linguist
|
||||
@colors ||= all.select(&:color).sort_by { |lang| lang.name.downcase }
|
||||
end
|
||||
|
||||
# Public: A List of languages compatible with Ace.
|
||||
#
|
||||
# TODO: Remove this method in a 5.x release. Every language now needs an ace_mode
|
||||
# key, so this function isn't doing anything unique anymore.
|
||||
#
|
||||
# Returns an Array of Languages.
|
||||
def self.ace_modes
|
||||
warn "This method will be deprecated in a future 5.x release. Every language now has an `ace_mode` set."
|
||||
@ace_modes ||= all.select(&:ace_mode).sort_by { |lang| lang.name.downcase }
|
||||
end
|
||||
|
||||
# Internal: Initialize a new Language
|
||||
#
|
||||
# attributes - A hash of attributes
|
||||
@@ -286,7 +272,7 @@ module Linguist
|
||||
@color = attributes[:color]
|
||||
|
||||
# Set aliases
|
||||
@aliases = [default_alias_name] + (attributes[:aliases] || [])
|
||||
@aliases = [default_alias] + (attributes[:aliases] || [])
|
||||
|
||||
# Load the TextMate scope name or try to guess one
|
||||
@tm_scope = attributes[:tm_scope] || begin
|
||||
@@ -301,12 +287,10 @@ module Linguist
|
||||
|
||||
@ace_mode = attributes[:ace_mode]
|
||||
@codemirror_mode = attributes[:codemirror_mode]
|
||||
@codemirror_mime_type = attributes[:codemirror_mime_type]
|
||||
@wrap = attributes[:wrap] || false
|
||||
|
||||
# Set legacy search term
|
||||
@search_term = attributes[:search_term] || default_alias_name
|
||||
|
||||
# Set the language_id
|
||||
# Set the language_id
|
||||
@language_id = attributes[:language_id]
|
||||
|
||||
# Set extensions or default to [].
|
||||
@@ -360,17 +344,6 @@ module Linguist
|
||||
# Returns an Array of String names
|
||||
attr_reader :aliases
|
||||
|
||||
# Deprecated: Get code search term
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# # => "ruby"
|
||||
# # => "python"
|
||||
# # => "perl"
|
||||
#
|
||||
# Returns the name String
|
||||
attr_reader :search_term
|
||||
|
||||
# Public: Get language_id (used in GitHub search)
|
||||
#
|
||||
# Examples
|
||||
@@ -398,7 +371,10 @@ module Linguist
|
||||
# Returns a String name or nil
|
||||
attr_reader :ace_mode
|
||||
|
||||
# Public: Get Codemirror mode
|
||||
# Public: Get CodeMirror mode
|
||||
#
|
||||
# Maps to a directory in the `mode/` source code.
|
||||
# https://github.com/codemirror/CodeMirror/tree/master/mode
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
@@ -409,6 +385,17 @@ module Linguist
|
||||
# Returns a String name or nil
|
||||
attr_reader :codemirror_mode
|
||||
|
||||
# Public: Get CodeMirror MIME type mode
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# # => "nil"
|
||||
# # => "text/x-javascript"
|
||||
# # => "text/x-csrc"
|
||||
#
|
||||
# Returns a String name or nil
|
||||
attr_reader :codemirror_mime_type
|
||||
|
||||
# Public: Should language lines be wrapped
|
||||
#
|
||||
# Returns true or false
|
||||
@@ -441,22 +428,6 @@ module Linguist
|
||||
# Returns the extensions Array
|
||||
attr_reader :filenames
|
||||
|
||||
# Deprecated: Get primary extension
|
||||
#
|
||||
# Defaults to the first extension but can be overridden
|
||||
# in the languages.yml.
|
||||
#
|
||||
# The primary extension can not be nil. Tests should verify this.
|
||||
#
|
||||
# This method is only used by app/helpers/gists_helper.rb for creating
|
||||
# the language dropdown. It really should be using `name` instead.
|
||||
# Would like to drop primary extension.
|
||||
#
|
||||
# Returns the extension String.
|
||||
def primary_extension
|
||||
extensions.first
|
||||
end
|
||||
|
||||
# Public: Get URL escaped name.
|
||||
#
|
||||
# Examples
|
||||
@@ -470,12 +441,13 @@ module Linguist
|
||||
EscapeUtils.escape_url(name).gsub('+', '%20')
|
||||
end
|
||||
|
||||
# Internal: Get default alias name
|
||||
# Public: Get default alias name
|
||||
#
|
||||
# Returns the alias name String
|
||||
def default_alias_name
|
||||
def default_alias
|
||||
name.downcase.gsub(/\s/, '-')
|
||||
end
|
||||
alias_method :default_alias_name, :default_alias
|
||||
|
||||
# Public: Get Language group
|
||||
#
|
||||
@@ -586,10 +558,10 @@ module Linguist
|
||||
:tm_scope => options['tm_scope'],
|
||||
:ace_mode => options['ace_mode'],
|
||||
:codemirror_mode => options['codemirror_mode'],
|
||||
:codemirror_mime_type => options['codemirror_mime_type'],
|
||||
:wrap => options['wrap'],
|
||||
:group_name => options['group'],
|
||||
:searchable => options.fetch('searchable', true),
|
||||
:search_term => options['search_term'],
|
||||
:language_id => options['language_id'],
|
||||
:extensions => Array(options['extensions']),
|
||||
:interpreters => options['interpreters'].sort,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -26,4 +26,4 @@
|
||||
- Shell
|
||||
- Swift
|
||||
- TeX
|
||||
- VimL
|
||||
- Vim script
|
||||
|
||||
10
lib/linguist/strategy/extension.rb
Normal file
10
lib/linguist/strategy/extension.rb
Normal file
@@ -0,0 +1,10 @@
|
||||
module Linguist
|
||||
module Strategy
|
||||
# Detects language based on extension
|
||||
class Extension
|
||||
def self.call(blob, _)
|
||||
Language.find_by_extension(blob.name.to_s)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,9 +1,10 @@
|
||||
module Linguist
|
||||
module Strategy
|
||||
# Detects language based on filename and/or extension
|
||||
# Detects language based on filename
|
||||
class Filename
|
||||
def self.call(blob, _)
|
||||
Language.find_by_filename(blob.name.to_s)
|
||||
name = blob.name.to_s
|
||||
Language.find_by_filename(name)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -15,6 +15,9 @@
|
||||
# Dependencies
|
||||
- ^[Dd]ependencies/
|
||||
|
||||
# Distributions
|
||||
- (^|/)dist/
|
||||
|
||||
# C deps
|
||||
# https://github.com/joyent/node
|
||||
- ^deps/
|
||||
@@ -47,6 +50,9 @@
|
||||
# Go dependencies
|
||||
- Godeps/_workspace/
|
||||
|
||||
# GNU indent profiles
|
||||
- .indent.pro
|
||||
|
||||
# Minified JavaScript and CSS
|
||||
- (\.|-)min\.(js|css)$
|
||||
|
||||
@@ -66,12 +72,18 @@
|
||||
# Normalize.css
|
||||
- (^|/)normalize\.(css|less|scss|styl)$
|
||||
|
||||
# Skeleton.css
|
||||
- (^|/)skeleton\.(css|less|scss|styl)$
|
||||
|
||||
# Bourbon css
|
||||
- (^|/)[Bb]ourbon/.*\.(css|less|scss|styl)$
|
||||
|
||||
# Animate.css
|
||||
- (^|/)animate\.(css|less|scss|styl)$
|
||||
|
||||
# Select2
|
||||
- (^|/)select2/.*\.(css|scss|js)$
|
||||
|
||||
# Vendored dependencies
|
||||
- third[-_]?party/
|
||||
- 3rd[-_]?party/
|
||||
@@ -110,6 +122,15 @@
|
||||
# jQuery File Upload
|
||||
- (^|/)jquery\.fileupload(-\w+)?\.js$
|
||||
|
||||
# jQuery dataTables
|
||||
- jquery.dataTables.js
|
||||
|
||||
# bootboxjs
|
||||
- bootbox.js
|
||||
|
||||
# pdf-worker
|
||||
- pdf.worker.js
|
||||
|
||||
# Slick
|
||||
- (^|/)slick\.\w+.js$
|
||||
|
||||
@@ -126,6 +147,9 @@
|
||||
- .sublime-project
|
||||
- .sublime-workspace
|
||||
|
||||
# VS Code workspace files
|
||||
- .vscode
|
||||
|
||||
# Prototype
|
||||
- (^|/)prototype(.*)\.js$
|
||||
- (^|/)effects\.js$
|
||||
@@ -165,7 +189,7 @@
|
||||
# Chart.js
|
||||
- (^|/)Chart\.js$
|
||||
|
||||
# Codemirror
|
||||
# CodeMirror
|
||||
- (^|/)[Cc]ode[Mm]irror/(\d+\.\d+/)?(lib|mode|theme|addon|keymap|demo)
|
||||
|
||||
# SyntaxHighlighter - http://alexgorbatchev.com/
|
||||
@@ -182,6 +206,9 @@
|
||||
# React
|
||||
- (^|/)react(-[^.]*)?\.js$
|
||||
|
||||
# flow-typed
|
||||
- (^|/)flow-typed/.*\.js$
|
||||
|
||||
# Modernizr
|
||||
- (^|/)modernizr\-\d\.\d+(\.\d+)?\.js$
|
||||
- (^|/)modernizr\.custom\.\d+\.js$
|
||||
@@ -229,6 +256,15 @@
|
||||
# Fabric
|
||||
- Fabric.framework/
|
||||
|
||||
# BuddyBuild
|
||||
- BuddyBuildSDK.framework/
|
||||
|
||||
# Realm
|
||||
- Realm.framework
|
||||
|
||||
# RealmSwift
|
||||
- RealmSwift.framework
|
||||
|
||||
# git config files
|
||||
- gitattributes$
|
||||
- gitignore$
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
module Linguist
|
||||
VERSION = "4.8.14"
|
||||
VERSION = "5.3.2"
|
||||
end
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"repository": "https://github.com/github/linguist",
|
||||
"dependencies": {
|
||||
"season": "~>5.0"
|
||||
"season": "~>5.4"
|
||||
},
|
||||
"license": "MIT"
|
||||
}
|
||||
|
||||
190
samples/ABNF/toml.abnf
Normal file
190
samples/ABNF/toml.abnf
Normal file
@@ -0,0 +1,190 @@
|
||||
; Source: https://github.com/toml-lang/toml
|
||||
; License: MIT
|
||||
|
||||
;; This is an attempt to define TOML in ABNF according to the grammar defined
|
||||
;; in RFC 4234 (http://www.ietf.org/rfc/rfc4234.txt).
|
||||
|
||||
;; TOML
|
||||
|
||||
toml = expression *( newline expression )
|
||||
expression = (
|
||||
ws /
|
||||
ws comment /
|
||||
ws keyval ws [ comment ] /
|
||||
ws table ws [ comment ]
|
||||
)
|
||||
|
||||
;; Newline
|
||||
|
||||
newline = (
|
||||
%x0A / ; LF
|
||||
%x0D.0A ; CRLF
|
||||
)
|
||||
|
||||
newlines = 1*newline
|
||||
|
||||
;; Whitespace
|
||||
|
||||
ws = *(
|
||||
%x20 / ; Space
|
||||
%x09 ; Horizontal tab
|
||||
)
|
||||
|
||||
;; Comment
|
||||
|
||||
comment-start-symbol = %x23 ; #
|
||||
non-eol = %x09 / %x20-10FFFF
|
||||
comment = comment-start-symbol *non-eol
|
||||
|
||||
;; Key-Value pairs
|
||||
|
||||
keyval-sep = ws %x3D ws ; =
|
||||
keyval = key keyval-sep val
|
||||
|
||||
key = unquoted-key / quoted-key
|
||||
unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _
|
||||
quoted-key = quotation-mark 1*basic-char quotation-mark ; See Basic Strings
|
||||
|
||||
val = integer / float / string / boolean / date-time / array / inline-table
|
||||
|
||||
;; Table
|
||||
|
||||
table = std-table / array-table
|
||||
|
||||
;; Standard Table
|
||||
|
||||
std-table-open = %x5B ws ; [ Left square bracket
|
||||
std-table-close = ws %x5D ; ] Right square bracket
|
||||
table-key-sep = ws %x2E ws ; . Period
|
||||
|
||||
std-table = std-table-open key *( table-key-sep key) std-table-close
|
||||
|
||||
;; Array Table
|
||||
|
||||
array-table-open = %x5B.5B ws ; [[ Double left square bracket
|
||||
array-table-close = ws %x5D.5D ; ]] Double right square bracket
|
||||
|
||||
array-table = array-table-open key *( table-key-sep key) array-table-close
|
||||
|
||||
;; Integer
|
||||
|
||||
integer = [ minus / plus ] int
|
||||
minus = %x2D ; -
|
||||
plus = %x2B ; +
|
||||
digit1-9 = %x31-39 ; 1-9
|
||||
underscore = %x5F ; _
|
||||
int = DIGIT / digit1-9 1*( DIGIT / underscore DIGIT )
|
||||
|
||||
;; Float
|
||||
|
||||
float = integer ( frac / frac exp / exp )
|
||||
zero-prefixable-int = DIGIT *( DIGIT / underscore DIGIT )
|
||||
frac = decimal-point zero-prefixable-int
|
||||
decimal-point = %x2E ; .
|
||||
exp = e integer
|
||||
e = %x65 / %x45 ; e E
|
||||
|
||||
;; String
|
||||
|
||||
string = basic-string / ml-basic-string / literal-string / ml-literal-string
|
||||
|
||||
;; Basic String
|
||||
|
||||
basic-string = quotation-mark *basic-char quotation-mark
|
||||
|
||||
quotation-mark = %x22 ; "
|
||||
|
||||
basic-char = basic-unescaped / escaped
|
||||
escaped = escape ( %x22 / ; " quotation mark U+0022
|
||||
%x5C / ; \ reverse solidus U+005C
|
||||
%x2F / ; / solidus U+002F
|
||||
%x62 / ; b backspace U+0008
|
||||
%x66 / ; f form feed U+000C
|
||||
%x6E / ; n line feed U+000A
|
||||
%x72 / ; r carriage return U+000D
|
||||
%x74 / ; t tab U+0009
|
||||
%x75 4HEXDIG / ; uXXXX U+XXXX
|
||||
%x55 8HEXDIG ) ; UXXXXXXXX U+XXXXXXXX
|
||||
|
||||
basic-unescaped = %x20-21 / %x23-5B / %x5D-10FFFF
|
||||
|
||||
escape = %x5C ; \
|
||||
|
||||
;; Multiline Basic String
|
||||
|
||||
ml-basic-string-delim = quotation-mark quotation-mark quotation-mark
|
||||
ml-basic-string = ml-basic-string-delim ml-basic-body ml-basic-string-delim
|
||||
ml-basic-body = *( ml-basic-char / newline / ( escape newline ))
|
||||
|
||||
ml-basic-char = ml-basic-unescaped / escaped
|
||||
ml-basic-unescaped = %x20-5B / %x5D-10FFFF
|
||||
|
||||
;; Literal String
|
||||
|
||||
literal-string = apostraphe *literal-char apostraphe
|
||||
|
||||
apostraphe = %x27 ; ' Apostrophe
|
||||
|
||||
literal-char = %x09 / %x20-26 / %x28-10FFFF
|
||||
|
||||
;; Multiline Literal String
|
||||
|
||||
ml-literal-string-delim = apostraphe apostraphe apostraphe
|
||||
ml-literal-string = ml-literal-string-delim ml-literal-body ml-literal-string-delim
|
||||
|
||||
ml-literal-body = *( ml-literal-char / newline )
|
||||
ml-literal-char = %x09 / %x20-10FFFF
|
||||
|
||||
;; Boolean
|
||||
|
||||
boolean = true / false
|
||||
true = %x74.72.75.65 ; true
|
||||
false = %x66.61.6C.73.65 ; false
|
||||
|
||||
;; Datetime (as defined in RFC 3339)
|
||||
|
||||
date-fullyear = 4DIGIT
|
||||
date-month = 2DIGIT ; 01-12
|
||||
date-mday = 2DIGIT ; 01-28, 01-29, 01-30, 01-31 based on month/year
|
||||
time-hour = 2DIGIT ; 00-23
|
||||
time-minute = 2DIGIT ; 00-59
|
||||
time-second = 2DIGIT ; 00-58, 00-59, 00-60 based on leap second rules
|
||||
time-secfrac = "." 1*DIGIT
|
||||
time-numoffset = ( "+" / "-" ) time-hour ":" time-minute
|
||||
time-offset = "Z" / time-numoffset
|
||||
|
||||
partial-time = time-hour ":" time-minute ":" time-second [time-secfrac]
|
||||
full-date = date-fullyear "-" date-month "-" date-mday
|
||||
full-time = partial-time time-offset
|
||||
|
||||
date-time = full-date "T" full-time
|
||||
|
||||
;; Array
|
||||
|
||||
array-open = %x5B ws ; [
|
||||
array-close = ws %x5D ; ]
|
||||
|
||||
array = array-open array-values array-close
|
||||
|
||||
array-values = [ val [ array-sep ] [ ( comment newlines) / newlines ] /
|
||||
val array-sep [ ( comment newlines) / newlines ] array-values ]
|
||||
|
||||
array-sep = ws %x2C ws ; , Comma
|
||||
|
||||
;; Inline Table
|
||||
|
||||
inline-table-open = %x7B ws ; {
|
||||
inline-table-close = ws %x7D ; }
|
||||
inline-table-sep = ws %x2C ws ; , Comma
|
||||
|
||||
inline-table = inline-table-open inline-table-keyvals inline-table-close
|
||||
|
||||
inline-table-keyvals = [ inline-table-keyvals-non-empty ]
|
||||
inline-table-keyvals-non-empty = key keyval-sep val /
|
||||
key keyval-sep val inline-table-sep inline-table-keyvals-non-empty
|
||||
|
||||
;; Built-in ABNF terms, reproduced here for clarity
|
||||
|
||||
; ALPHA = %x41-5A / %x61-7A ; A-Z / a-z
|
||||
; DIGIT = %x30-39 ; 0-9
|
||||
; HEXDIG = DIGIT / "A" / "B" / "C" / "D" / "E" / "F"
|
||||
35
samples/ActionScript/FooBar.as
Normal file
35
samples/ActionScript/FooBar.as
Normal file
@@ -0,0 +1,35 @@
|
||||
// A sample for Actionscript.
|
||||
|
||||
package foobar
|
||||
{
|
||||
import flash.display.MovieClip;
|
||||
|
||||
class Bar
|
||||
{
|
||||
public function getNumber():Number
|
||||
{
|
||||
return 10;
|
||||
}
|
||||
}
|
||||
|
||||
class Foo extends Bar
|
||||
{
|
||||
private var ourNumber:Number = 25;
|
||||
|
||||
override public function getNumber():Number
|
||||
{
|
||||
return ourNumber;
|
||||
}
|
||||
}
|
||||
|
||||
class Main extends MovieClip
|
||||
{
|
||||
public function Main()
|
||||
{
|
||||
var x:Bar = new Bar();
|
||||
var y:Foo = new Foo();
|
||||
trace(x.getNumber());
|
||||
trace(y.getNumber());
|
||||
}
|
||||
}
|
||||
}
|
||||
13
samples/ActionScript/HelloWorld.as
Normal file
13
samples/ActionScript/HelloWorld.as
Normal file
@@ -0,0 +1,13 @@
|
||||
package mypackage
|
||||
{
|
||||
public class Hello
|
||||
{
|
||||
/* Let's say hello!
|
||||
* This is just a test script for Linguist's Actionscript detection.
|
||||
*/
|
||||
public function sayHello():void
|
||||
{
|
||||
trace("Hello, world");
|
||||
}
|
||||
}
|
||||
}
|
||||
69
samples/Adobe Font Metrics/OpenSansCondensed-Bold.afm
Normal file
69
samples/Adobe Font Metrics/OpenSansCondensed-Bold.afm
Normal file
@@ -0,0 +1,69 @@
|
||||
StartFontMetrics 2.0
|
||||
Comment Generated by FontForge 20170719
|
||||
Comment Creation Date: Sun Jul 23 19:47:25 2017
|
||||
FontName OpenSansCondensed-Bold
|
||||
FullName Open Sans Condensed Bold
|
||||
FamilyName Open Sans Condensed
|
||||
Weight Bold
|
||||
Notice (Digitized data copyright (c) 2010-2011, Google Corporation.)
|
||||
ItalicAngle 0
|
||||
IsFixedPitch false
|
||||
UnderlinePosition -205
|
||||
UnderlineThickness 102
|
||||
Version 1.11
|
||||
EncodingScheme ISO10646-1
|
||||
FontBBox -667 -290 1046 1062
|
||||
CapHeight 714
|
||||
XHeight 544
|
||||
Ascender 760
|
||||
Descender -240
|
||||
StartCharMetrics 939
|
||||
C 32 ; WX 247 ; N space ; B 0 0 0 0 ;
|
||||
C 33 ; WX 270 ; N exclam ; B 54 -14 216 714 ;
|
||||
C 34 ; WX 445 ; N quotedbl ; B 59 456 388 714 ;
|
||||
C 35 ; WX 543 ; N numbersign ; B 20 0 525 714 ;
|
||||
C 36 ; WX 462 ; N dollar ; B 36 -59 427 760 ;
|
||||
C 37 ; WX 758 ; N percent ; B 30 -9 729 725 ;
|
||||
C 38 ; WX 581 ; N ampersand ; B 28 -10 572 725 ;
|
||||
C 39 ; WX 246 ; N quotesingle ; B 59 456 188 714 ;
|
||||
C -1 ; WX 462 ; N six.os ; B 36 -10 427 724 ;
|
||||
C -1 ; WX 420 ; N seven.os ; B 19 -170 402 544 ;
|
||||
C -1 ; WX 462 ; N eight.os ; B 35 -10 429 724 ;
|
||||
C -1 ; WX 461 ; N nine.os ; B 33 -182 424 564 ;
|
||||
C -1 ; WX 496 ; N g.alt ; B 36 -241 442 555 ;
|
||||
C -1 ; WX 496 ; N gcircumflex.alt ; B 36 -241 442 767 ;
|
||||
C -1 ; WX 496 ; N gbreve.alt ; B 36 -241 442 766 ;
|
||||
C -1 ; WX 496 ; N gdot.alt ; B 36 -241 442 756 ;
|
||||
C -1 ; WX 496 ; N gcommaaccent.alt ; B 36 -241 442 767 ;
|
||||
C -1 ; WX 0 ; N cyrotmarkcomb ; B -203 591 203 714 ;
|
||||
EndCharMetrics
|
||||
StartKernData
|
||||
StartKernPairs 15878
|
||||
KPX quotedbl uni1ECA 20
|
||||
KPX quotedbl uni1EC8 20
|
||||
KPX quotedbl Idotaccent 20
|
||||
KPX quotedbl Iogonek 20
|
||||
KPX quotedbl Imacron 20
|
||||
KPX quotedbl Idieresis 20
|
||||
KPX quotedbl Icircumflex 20
|
||||
KPX quotedbl Iacute 20
|
||||
KPX quotedbl Igrave 20
|
||||
KPX quotedbl I 20
|
||||
KPX quotedbl uni1EF9 20
|
||||
KPX quoteleft q -20
|
||||
KPX quoteleft o -20
|
||||
KPX quoteleft g -9
|
||||
KPX quoteleft e -20
|
||||
KPX quoteleft d -20
|
||||
KPX quoteleft c -20
|
||||
KPX quoteleft Z 20
|
||||
KPX Delta C -9
|
||||
KPX Delta A -20
|
||||
KPX Delta question 20
|
||||
KPX Delta period -41
|
||||
KPX Delta comma -41
|
||||
KPX Delta quotesingle 41
|
||||
KPX Delta quotedbl 41
|
||||
EndKernPairs
|
||||
EndKernData
|
||||
EndFontMetrics
|
||||
464
samples/Adobe Font Metrics/SpecialElite.afm
Normal file
464
samples/Adobe Font Metrics/SpecialElite.afm
Normal file
@@ -0,0 +1,464 @@
|
||||
StartFontMetrics 2.0
|
||||
Comment Generated by FontForge 20170719
|
||||
Comment Creation Date: Sun Jul 23 19:52:19 2017
|
||||
FontName SpecialElite-Regular
|
||||
FullName Special Elite
|
||||
FamilyName Special Elite
|
||||
Weight Book
|
||||
Notice (Copyright (c) 2010 by Brian J. Bonislawsky DBA Astigmatic (AOETI). All rights reserved. Available under the Apache 2.0 licence.http://www.apache.org/licenses/LICENSE-2.0.html)
|
||||
ItalicAngle 0
|
||||
IsFixedPitch false
|
||||
UnderlinePosition -133
|
||||
UnderlineThickness 20
|
||||
Version 1.000
|
||||
EncodingScheme ISO10646-1
|
||||
FontBBox -33 -322 1052 959
|
||||
CapHeight 714
|
||||
XHeight 487
|
||||
Ascender 688
|
||||
Descender -225
|
||||
StartCharMetrics 371
|
||||
C 32 ; WX 292 ; N space ; B 0 0 0 0 ;
|
||||
C 33 ; WX 276 ; N exclam ; B 73 0 207 702 ;
|
||||
C 34 ; WX 352 ; N quotedbl ; B 48 449 295 704 ;
|
||||
C 35 ; WX 554 ; N numbersign ; B 31 -2 524 713 ;
|
||||
C 36 ; WX 526 ; N dollar ; B 31 -201 498 919 ;
|
||||
C 37 ; WX 666 ; N percent ; B 32 -186 642 872 ;
|
||||
C 38 ; WX 676 ; N ampersand ; B 31 -5 645 705 ;
|
||||
C 39 ; WX 196 ; N quotesingle ; B 48 449 143 703 ;
|
||||
C 40 ; WX 279 ; N parenleft ; B 55 -71 243 757 ;
|
||||
C 41 ; WX 281 ; N parenright ; B 37 -59 229 770 ;
|
||||
C 42 ; WX 522 ; N asterisk ; B 32 276 493 707 ;
|
||||
C 43 ; WX 496 ; N plus ; B 29 131 465 560 ;
|
||||
C 44 ; WX 336 ; N comma ; B 39 -197 290 251 ;
|
||||
C 45 ; WX 636 ; N hyphen ; B 63 273 573 397 ;
|
||||
C 46 ; WX 349 ; N period ; B 52 -3 298 245 ;
|
||||
C 47 ; WX 557 ; N slash ; B 23 -41 536 760 ;
|
||||
C 48 ; WX 610 ; N zero ; B 55 0 560 720 ;
|
||||
C 49 ; WX 569 ; N one ; B 27 -12 572 712 ;
|
||||
C 50 ; WX 573 ; N two ; B 50 -25 541 680 ;
|
||||
C 51 ; WX 557 ; N three ; B 44 -25 514 694 ;
|
||||
C 52 ; WX 612 ; N four ; B 15 4 584 708 ;
|
||||
C 53 ; WX 537 ; N five ; B 47 0 505 690 ;
|
||||
C 54 ; WX 588 ; N six ; B 48 -10 548 707 ;
|
||||
C 55 ; WX 555 ; N seven ; B 15 -34 549 734 ;
|
||||
C 56 ; WX 598 ; N eight ; B 51 1 551 720 ;
|
||||
C 57 ; WX 584 ; N nine ; B 48 -2 539 715 ;
|
||||
C 58 ; WX 343 ; N colon ; B 51 -3 297 518 ;
|
||||
C 59 ; WX 328 ; N semicolon ; B 45 -197 297 518 ;
|
||||
C 60 ; WX 463 ; N less ; B 31 120 401 565 ;
|
||||
C 61 ; WX 636 ; N equal ; B 63 186 573 513 ;
|
||||
C 62 ; WX 463 ; N greater ; B 62 120 433 565 ;
|
||||
C 63 ; WX 470 ; N question ; B 34 2 442 729 ;
|
||||
C 64 ; WX 665 ; N at ; B 46 -4 618 680 ;
|
||||
C 65 ; WX 549 ; N A ; B -1 -16 550 703 ;
|
||||
C 66 ; WX 604 ; N B ; B 29 -6 557 704 ;
|
||||
C 67 ; WX 579 ; N C ; B 46 -13 531 700 ;
|
||||
C 68 ; WX 622 ; N D ; B 36 -17 579 713 ;
|
||||
C 69 ; WX 638 ; N E ; B 38 -16 587 691 ;
|
||||
C 70 ; WX 605 ; N F ; B 29 -9 595 709 ;
|
||||
C 71 ; WX 615 ; N G ; B 45 -3 586 710 ;
|
||||
C 72 ; WX 652 ; N H ; B 40 -20 622 690 ;
|
||||
C 73 ; WX 495 ; N I ; B 26 -24 469 710 ;
|
||||
C 74 ; WX 541 ; N J ; B 16 -3 539 703 ;
|
||||
C 75 ; WX 582 ; N K ; B 28 -5 584 711 ;
|
||||
C 76 ; WX 602 ; N L ; B 23 -14 583 718 ;
|
||||
C 77 ; WX 697 ; N M ; B 46 -10 655 704 ;
|
||||
C 78 ; WX 627 ; N N ; B 41 -15 595 700 ;
|
||||
C 79 ; WX 616 ; N O ; B 42 -30 574 702 ;
|
||||
C 80 ; WX 553 ; N P ; B 30 -12 527 689 ;
|
||||
C 81 ; WX 602 ; N Q ; B 42 -98 571 711 ;
|
||||
C 82 ; WX 636 ; N R ; B 14 -9 624 706 ;
|
||||
C 83 ; WX 588 ; N S ; B 51 -13 547 690 ;
|
||||
C 84 ; WX 594 ; N T ; B 25 1 564 707 ;
|
||||
C 85 ; WX 621 ; N U ; B 24 -6 611 710 ;
|
||||
C 86 ; WX 611 ; N V ; B -1 -15 614 726 ;
|
||||
C 87 ; WX 643 ; N W ; B 8 0 614 689 ;
|
||||
C 88 ; WX 582 ; N X ; B 3 -1 580 697 ;
|
||||
C 89 ; WX 561 ; N Y ; B -21 -2 562 719 ;
|
||||
C 90 ; WX 592 ; N Z ; B 49 -1 551 709 ;
|
||||
C 91 ; WX 312 ; N bracketleft ; B 85 -72 297 754 ;
|
||||
C 92 ; WX 557 ; N backslash ; B 21 -41 534 760 ;
|
||||
C 249 ; WX 639 ; N ugrave ; B 5 -28 624 679 ;
|
||||
C 250 ; WX 639 ; N uacute ; B 5 -28 624 682 ;
|
||||
C 251 ; WX 639 ; N ucircumflex ; B 5 -28 624 691 ;
|
||||
C 252 ; WX 639 ; N udieresis ; B 5 -28 624 649 ;
|
||||
C 253 ; WX 592 ; N yacute ; B 0 -232 596 666 ;
|
||||
C 254 ; WX 552 ; N thorn ; B -33 -221 512 699 ;
|
||||
C 255 ; WX 592 ; N ydieresis ; B 0 -232 596 643 ;
|
||||
C -1 ; WX 549 ; N Amacron ; B -1 -16 550 809 ;
|
||||
C -1 ; WX 565 ; N amacron ; B 38 -6 561 619 ;
|
||||
C -1 ; WX 549 ; N Abreve ; B -1 -16 550 890 ;
|
||||
C -1 ; WX 565 ; N abreve ; B 38 -6 561 686 ;
|
||||
C -1 ; WX 549 ; N Aogonek ; B -1 -138 589 703 ;
|
||||
C -1 ; WX 565 ; N aogonek ; B 38 -118 624 502 ;
|
||||
C -1 ; WX 579 ; N Cacute ; B 46 -13 531 900 ;
|
||||
C -1 ; WX 547 ; N cacute ; B 39 -22 506 693 ;
|
||||
C -1 ; WX 579 ; N Ccircumflex ; B 46 -13 531 890 ;
|
||||
C -1 ; WX 547 ; N ccircumflex ; B 39 -22 506 689 ;
|
||||
C -1 ; WX 579 ; N Cdotaccent ; B 46 -13 531 859 ;
|
||||
C -1 ; WX 547 ; N cdotaccent ; B 39 -22 506 657 ;
|
||||
C -1 ; WX 579 ; N Ccaron ; B 46 -13 531 918 ;
|
||||
C -1 ; WX 547 ; N ccaron ; B 39 -22 506 710 ;
|
||||
C -1 ; WX 622 ; N Dcaron ; B 36 -17 579 924 ;
|
||||
C -1 ; WX 750 ; N dcaron ; B 40 -26 716 704 ;
|
||||
C -1 ; WX 623 ; N Dcroat ; B 36 -17 580 713 ;
|
||||
C -1 ; WX 603 ; N dcroat ; B 40 -26 597 714 ;
|
||||
C -1 ; WX 638 ; N Emacron ; B 38 -16 587 798 ;
|
||||
C -1 ; WX 543 ; N emacron ; B 40 -23 501 616 ;
|
||||
C -1 ; WX 638 ; N Ebreve ; B 38 -16 587 876 ;
|
||||
C -1 ; WX 543 ; N ebreve ; B 40 -23 501 683 ;
|
||||
C -1 ; WX 638 ; N Edotaccent ; B 38 -16 587 848 ;
|
||||
C -1 ; WX 543 ; N edotaccent ; B 40 -23 501 659 ;
|
||||
C -1 ; WX 638 ; N Eogonek ; B 38 -113 610 691 ;
|
||||
C -1 ; WX 543 ; N eogonek ; B 40 -145 501 499 ;
|
||||
C -1 ; WX 638 ; N Ecaron ; B 38 -16 587 913 ;
|
||||
C -1 ; WX 543 ; N ecaron ; B 40 -23 501 714 ;
|
||||
C -1 ; WX 615 ; N Gcircumflex ; B 45 -3 586 906 ;
|
||||
C -1 ; WX 583 ; N gcircumflex ; B 42 -224 562 676 ;
|
||||
C -1 ; WX 615 ; N Gbreve ; B 45 -3 586 899 ;
|
||||
C -1 ; WX 583 ; N gbreve ; B 42 -224 562 667 ;
|
||||
C -1 ; WX 615 ; N Gdotaccent ; B 45 -3 586 871 ;
|
||||
C -1 ; WX 583 ; N gdotaccent ; B 42 -224 562 637 ;
|
||||
C -1 ; WX 615 ; N Gcommaaccent ; B 45 -253 586 710 ;
|
||||
C -1 ; WX 583 ; N gcommaaccent ; B 42 -224 562 734 ;
|
||||
C -1 ; WX 652 ; N Hcircumflex ; B 40 -20 622 897 ;
|
||||
C -1 ; WX 616 ; N hcircumflex ; B 5 -29 601 688 ;
|
||||
C -1 ; WX 652 ; N Hbar ; B 40 -20 622 690 ;
|
||||
C -1 ; WX 616 ; N hbar ; B 5 -29 601 683 ;
|
||||
C -1 ; WX 495 ; N Itilde ; B 26 -24 469 859 ;
|
||||
C -1 ; WX 568 ; N itilde ; B 36 -42 568 615 ;
|
||||
C -1 ; WX 495 ; N Imacron ; B 26 -24 469 819 ;
|
||||
C -1 ; WX 568 ; N imacron ; B 36 -42 568 585 ;
|
||||
C -1 ; WX 495 ; N Ibreve ; B 26 -24 469 901 ;
|
||||
C -1 ; WX 568 ; N ibreve ; B 36 -42 568 661 ;
|
||||
C -1 ; WX 495 ; N Iogonek ; B 26 -154 469 710 ;
|
||||
C -1 ; WX 568 ; N iogonek ; B 36 -149 568 674 ;
|
||||
C -1 ; WX 495 ; N Idotaccent ; B 26 -24 469 873 ;
|
||||
C -1 ; WX 568 ; N dotlessi ; B 36 -42 568 468 ;
|
||||
C -1 ; WX 1036 ; N IJ ; B 26 -24 1034 710 ;
|
||||
C -1 ; WX 983 ; N ij ; B 36 -236 913 683 ;
|
||||
C -1 ; WX 541 ; N Jcircumflex ; B 16 -3 539 913 ;
|
||||
C -1 ; WX 415 ; N jcircumflex ; B -12 -236 405 699 ;
|
||||
C -1 ; WX 582 ; N Kcommaaccent ; B 28 -253 584 711 ;
|
||||
C -1 ; WX 620 ; N kcommaaccent ; B 11 -253 600 683 ;
|
||||
C -1 ; WX 620 ; N kgreenlandic ; B 11 -28 600 482 ;
|
||||
C -1 ; WX 602 ; N Lacute ; B 23 -14 583 923 ;
|
||||
C -1 ; WX 540 ; N lacute ; B 4 -28 538 902 ;
|
||||
C -1 ; WX 602 ; N Lcommaaccent ; B 23 -267 583 718 ;
|
||||
C -1 ; WX 540 ; N lcommaaccent ; B 4 -267 538 682 ;
|
||||
C -1 ; WX 602 ; N Lcaron ; B 23 -14 583 794 ;
|
||||
C -1 ; WX 582 ; N lcaron ; B 4 -28 549 704 ;
|
||||
C -1 ; WX 781 ; N Ldot ; B 23 -14 748 718 ;
|
||||
C -1 ; WX 571 ; N ldotaccent ; B 4 -28 538 682 ;
|
||||
C -1 ; WX 603 ; N Lslash ; B 24 -14 584 718 ;
|
||||
C -1 ; WX 541 ; N lslash ; B 4 -28 538 682 ;
|
||||
C -1 ; WX 627 ; N Nacute ; B 41 -15 595 894 ;
|
||||
C -1 ; WX 632 ; N nacute ; B 32 -23 612 696 ;
|
||||
C -1 ; WX 627 ; N Ncommaaccent ; B 41 -268 595 700 ;
|
||||
C -1 ; WX 632 ; N ncommaaccent ; B 32 -268 612 491 ;
|
||||
C -1 ; WX 627 ; N Ncaron ; B 41 -15 595 900 ;
|
||||
C -1 ; WX 632 ; N ncaron ; B 32 -23 612 712 ;
|
||||
C -1 ; WX 815 ; N napostrophe ; B 34 -23 795 704 ;
|
||||
C -1 ; WX 627 ; N Eng ; B 41 -320 595 700 ;
|
||||
C -1 ; WX 605 ; N eng ; B 32 -322 534 491 ;
|
||||
C -1 ; WX 616 ; N Omacron ; B 42 -30 574 815 ;
|
||||
C -1 ; WX 583 ; N omacron ; B 40 -34 543 598 ;
|
||||
C -1 ; WX 616 ; N Obreve ; B 42 -30 574 891 ;
|
||||
C -1 ; WX 583 ; N obreve ; B 40 -34 543 675 ;
|
||||
C -1 ; WX 616 ; N Ohungarumlaut ; B 42 -30 574 907 ;
|
||||
C -1 ; WX 583 ; N ohungarumlaut ; B 40 -34 545 693 ;
|
||||
C -1 ; WX 1018 ; N OE ; B 42 -30 967 702 ;
|
||||
C -1 ; WX 958 ; N oe ; B 40 -34 916 499 ;
|
||||
C -1 ; WX 636 ; N Racute ; B 14 -9 624 910 ;
|
||||
C -1 ; WX 579 ; N racute ; B 28 -16 566 693 ;
|
||||
C -1 ; WX 636 ; N Rcommaaccent ; B 14 -268 624 706 ;
|
||||
C -1 ; WX 579 ; N rcommaaccent ; B 28 -272 566 495 ;
|
||||
C -1 ; WX 636 ; N Rcaron ; B 14 -9 624 927 ;
|
||||
C -1 ; WX 579 ; N rcaron ; B 28 -16 566 698 ;
|
||||
C -1 ; WX 588 ; N Sacute ; B 51 -13 547 900 ;
|
||||
C -1 ; WX 519 ; N sacute ; B 48 -31 481 713 ;
|
||||
C -1 ; WX 588 ; N Scircumflex ; B 51 -13 547 904 ;
|
||||
C -1 ; WX 519 ; N scircumflex ; B 48 -31 481 710 ;
|
||||
C -1 ; WX 588 ; N Scedilla ; B 51 -145 547 690 ;
|
||||
C -1 ; WX 519 ; N scedilla ; B 48 -145 481 496 ;
|
||||
C -1 ; WX 588 ; N Scaron ; B 51 -13 547 904 ;
|
||||
C -1 ; WX 519 ; N scaron ; B 48 -31 481 710 ;
|
||||
C -1 ; WX 594 ; N Tcommaaccent ; B 25 -263 564 707 ;
|
||||
C -1 ; WX 510 ; N tcommaaccent ; B 0 -282 488 694 ;
|
||||
C -1 ; WX 594 ; N Tcaron ; B 25 1 564 920 ;
|
||||
C -1 ; WX 713 ; N tcaron ; B 0 -34 680 704 ;
|
||||
C -1 ; WX 594 ; N Tbar ; B 25 1 564 707 ;
|
||||
C -1 ; WX 510 ; N tbar ; B 0 -34 488 694 ;
|
||||
C -1 ; WX 621 ; N Utilde ; B 24 -6 611 850 ;
|
||||
C -1 ; WX 638 ; N utilde ; B 5 -28 624 636 ;
|
||||
C -1 ; WX 621 ; N Umacron ; B 24 -6 611 811 ;
|
||||
C -1 ; WX 638 ; N umacron ; B 5 -28 624 587 ;
|
||||
C -1 ; WX 621 ; N Ubreve ; B 24 -6 611 888 ;
|
||||
C -1 ; WX 638 ; N ubreve ; B 5 -28 624 665 ;
|
||||
C -1 ; WX 621 ; N Uring ; B 24 -6 611 959 ;
|
||||
C -1 ; WX 638 ; N uring ; B 5 -28 624 738 ;
|
||||
C -1 ; WX 621 ; N Uhungarumlaut ; B 24 -6 611 918 ;
|
||||
C -1 ; WX 638 ; N uhungarumlaut ; B 5 -28 624 691 ;
|
||||
C -1 ; WX 621 ; N Uogonek ; B 24 -136 611 710 ;
|
||||
C -1 ; WX 638 ; N uogonek ; B 5 -147 671 487 ;
|
||||
C -1 ; WX 643 ; N Wcircumflex ; B 8 0 614 901 ;
|
||||
C -1 ; WX 678 ; N wcircumflex ; B 5 -10 674 685 ;
|
||||
C -1 ; WX 561 ; N Ycircumflex ; B -21 -2 562 934 ;
|
||||
C -1 ; WX 592 ; N ycircumflex ; B 0 -232 596 691 ;
|
||||
C -1 ; WX 561 ; N Ydieresis ; B -21 -2 562 885 ;
|
||||
C -1 ; WX 592 ; N Zacute ; B 49 -1 551 905 ;
|
||||
C -1 ; WX 528 ; N zacute ; B 45 -22 487 684 ;
|
||||
C -1 ; WX 592 ; N Zdotaccent ; B 49 -1 551 866 ;
|
||||
C -1 ; WX 528 ; N zdotaccent ; B 45 -22 487 632 ;
|
||||
C -1 ; WX 592 ; N Zcaron ; B 49 -1 551 917 ;
|
||||
C -1 ; WX 528 ; N zcaron ; B 45 -22 487 688 ;
|
||||
C -1 ; WX 915 ; N AEacute ; B -11 -16 864 904 ;
|
||||
C -1 ; WX 888 ; N aeacute ; B 38 -23 846 670 ;
|
||||
C -1 ; WX 617 ; N Oslashacute ; B 43 -41 574 912 ;
|
||||
C -1 ; WX 583 ; N oslashacute ; B 40 -73 543 697 ;
|
||||
C -1 ; WX 415 ; N dotlessj ; B -12 -236 344 478 ;
|
||||
C -1 ; WX 281 ; N circumflex ; B 0 558 282 746 ;
|
||||
C -1 ; WX 281 ; N caron ; B 0 558 282 746 ;
|
||||
C -1 ; WX 281 ; N breve ; B 0 585 282 746 ;
|
||||
C -1 ; WX 132 ; N dotaccent ; B 0 600 133 729 ;
|
||||
C -1 ; WX 214 ; N ring ; B 0 547 215 780 ;
|
||||
C -1 ; WX 211 ; N ogonek ; B 0 -145 212 13 ;
|
||||
C -1 ; WX 283 ; N tilde ; B 0 583 284 701 ;
|
||||
C -1 ; WX 352 ; N hungarumlaut ; B 0 591 353 763 ;
|
||||
C -1 ; WX 185 ; N uni0312 ; B 28 474 152 694 ;
|
||||
C -1 ; WX 185 ; N uni0315 ; B 38 470 162 690 ;
|
||||
C -1 ; WX 192 ; N uni0326 ; B 32 -253 156 -33 ;
|
||||
C -1 ; WX 666 ; N mu ; B 24 -219 643 487 ;
|
||||
C -1 ; WX 643 ; N Wgrave ; B 8 0 614 895 ;
|
||||
C -1 ; WX 678 ; N wgrave ; B 5 -10 674 688 ;
|
||||
C -1 ; WX 643 ; N Wacute ; B 8 0 614 898 ;
|
||||
C -1 ; WX 678 ; N wacute ; B 5 -10 674 682 ;
|
||||
C -1 ; WX 643 ; N Wdieresis ; B 8 0 614 868 ;
|
||||
C -1 ; WX 678 ; N wdieresis ; B 5 -10 674 649 ;
|
||||
C -1 ; WX 561 ; N Ygrave ; B -21 -2 562 900 ;
|
||||
C -1 ; WX 592 ; N ygrave ; B 0 -232 596 666 ;
|
||||
C -1 ; WX 611 ; N endash ; B 50 270 551 391 ;
|
||||
C -1 ; WX 1113 ; N emdash ; B 51 270 1052 391 ;
|
||||
C -1 ; WX 265 ; N quoteleft ; B 41 390 217 704 ;
|
||||
C -1 ; WX 264 ; N quoteright ; B 54 390 230 704 ;
|
||||
C -1 ; WX 274 ; N quotesinglbase ; B 46 -138 223 176 ;
|
||||
C -1 ; WX 470 ; N quotedblleft ; B 41 390 422 704 ;
|
||||
C -1 ; WX 469 ; N quotedblright ; B 54 390 436 704 ;
|
||||
C -1 ; WX 479 ; N quotedblbase ; B 46 -138 428 176 ;
|
||||
C -1 ; WX 389 ; N dagger ; B 30 -16 359 724 ;
|
||||
C -1 ; WX 396 ; N daggerdbl ; B 35 -16 364 728 ;
|
||||
C -1 ; WX 316 ; N bullet ; B 50 246 266 479 ;
|
||||
C -1 ; WX 1063 ; N ellipsis ; B 52 -3 1016 245 ;
|
||||
C -1 ; WX 897 ; N perthousand ; B 33 -230 873 828 ;
|
||||
C -1 ; WX 296 ; N guilsinglleft ; B 44 149 232 434 ;
|
||||
C -1 ; WX 295 ; N guilsinglright ; B 63 149 251 434 ;
|
||||
C -1 ; WX 486 ; N fraction ; B -11 -53 501 748 ;
|
||||
C -1 ; WX 732 ; N Euro ; B 31 71 683 590 ;
|
||||
C -1 ; WX 757 ; N trademark ; B 60 303 703 693 ;
|
||||
C -1 ; WX 585 ; N partialdiff ; B 36 -47 553 772 ;
|
||||
C -1 ; WX 564 ; N product ; B 26 -17 534 707 ;
|
||||
C -1 ; WX 577 ; N minus ; B 63 282 514 395 ;
|
||||
C -1 ; WX 565 ; N approxequal ; B 59 137 513 522 ;
|
||||
C -1 ; WX 593 ; N notequal ; B 44 71 554 644 ;
|
||||
C -1 ; WX 1041 ; N fi ; B 20 -42 1041 702 ;
|
||||
C -1 ; WX 1013 ; N fl ; B 20 -29 1011 702 ;
|
||||
C -1 ; WX 292 ; N .notdef ; B 0 0 0 0 ;
|
||||
C -1 ; WX 0 ; N .null ; B 0 0 0 0 ;
|
||||
C -1 ; WX 292 ; N nonmarkingreturn ; B 0 0 0 0 ;
|
||||
EndCharMetrics
|
||||
StartKernData
|
||||
StartKernPairs 6408
|
||||
KPX quotedbl period -104
|
||||
KPX quotedbl comma -103
|
||||
KPX quotedbl Jcircumflex -34
|
||||
KPX quotedbl Aogonek -31
|
||||
KPX quotedbl Abreve -31
|
||||
KPX quotedbl Amacron -31
|
||||
KPX quotedbl AEacute -31
|
||||
KPX quotedbl Aacute -31
|
||||
KPX quotedbl Acircumflex -31
|
||||
KPX quotedbl Atilde -31
|
||||
KPX quotedbl Agrave -31
|
||||
KPX quotedbl Aring -31
|
||||
KPX quotedbl Adieresis -31
|
||||
KPX quotedbl AE -31
|
||||
KPX quotedbl J -34
|
||||
KPX quotedbl A -31
|
||||
KPX quotedbl quotedblbase -117
|
||||
KPX quotedbl quotesinglbase -117
|
||||
KPX quotedbl ellipsis -104
|
||||
KPX quotedbl slash -73
|
||||
KPX quotedbl ampersand -22
|
||||
KPX quotedbl four -27
|
||||
KPX ampersand Ycircumflex -40
|
||||
KPX ampersand Ygrave -40
|
||||
KPX ampersand Ydieresis -40
|
||||
KPX ampersand Yacute -40
|
||||
KPX ampersand Y -40
|
||||
KPX ampersand V -36
|
||||
KPX quotesingle period -97
|
||||
KPX quotesingle comma -97
|
||||
KPX quotesingle Jcircumflex -34
|
||||
KPX quotesingle Aogonek -31
|
||||
KPX quotesingle Abreve -31
|
||||
KPX quotesingle Amacron -31
|
||||
KPX hyphen T -28
|
||||
KPX hyphen one -68
|
||||
KPX hyphen B -25
|
||||
KPX hyphen seven -56
|
||||
KPX slash rcommaaccent -27
|
||||
KPX slash ncommaaccent -29
|
||||
KPX slash gcommaaccent -61
|
||||
KPX slash Jcircumflex -29
|
||||
KPX slash iogonek -26
|
||||
KPX slash ibreve -26
|
||||
KPX slash imacron -26
|
||||
KPX slash itilde -26
|
||||
KPX slash oslashacute -54
|
||||
KPX slash nacute -29
|
||||
KPX slash eng -29
|
||||
KPX slash ncaron -29
|
||||
KPX slash racute -27
|
||||
KPX slash scedilla -43
|
||||
KPX slash scircumflex -43
|
||||
KPX slash sacute -43
|
||||
KPX slash rcaron -27
|
||||
KPX slash ohungarumlaut -54
|
||||
KPX slash obreve -54
|
||||
KPX slash omacron -54
|
||||
KPX slash wgrave -23
|
||||
KPX slash wcircumflex -23
|
||||
KPX slash wdieresis -23
|
||||
KPX slash wacute -23
|
||||
KPX slash zdotaccent -41
|
||||
KPX J ebreve -32
|
||||
KPX J emacron -32
|
||||
KPX J edieresis -32
|
||||
KPX J ecircumflex -32
|
||||
KPX J egrave -32
|
||||
KPX J eacute -32
|
||||
KPX J e -32
|
||||
KPX J Aogonek -34
|
||||
KPX J Abreve -34
|
||||
KPX J Amacron -34
|
||||
KPX J AEacute -34
|
||||
KPX J Aacute -34
|
||||
KPX J Acircumflex -34
|
||||
KPX J Atilde -34
|
||||
KPX J Agrave -34
|
||||
KPX J Aring -34
|
||||
KPX J Adieresis -34
|
||||
KPX J AE -34
|
||||
KPX J A -34
|
||||
KPX J comma -29
|
||||
KPX J period -30
|
||||
KPX J v -29
|
||||
KPX J hyphen -30
|
||||
KPX J quotedblbase -34
|
||||
KPX J quotesinglbase -34
|
||||
KPX J guilsinglright -25
|
||||
KPX J guilsinglleft -25
|
||||
KPX J emdash -30
|
||||
KPX J endash -30
|
||||
KPX J guillemotright -25
|
||||
KPX J guillemotleft -25
|
||||
KPX J germandbls -36
|
||||
KPX J ellipsis -30
|
||||
KPX J slash -34
|
||||
KPX J p -28
|
||||
KPX J m -35
|
||||
KPX J b 54
|
||||
KPX K ycircumflex -60
|
||||
KPX K ygrave -60
|
||||
KPX K ydieresis -60
|
||||
KPX K yacute -60
|
||||
KPX K y -60
|
||||
KPX K wgrave -36
|
||||
KPX K wcircumflex -36
|
||||
KPX K wdieresis -36
|
||||
KPX K wacute -36
|
||||
KPX K w -36
|
||||
KPX K uogonek -25
|
||||
KPX K uhungarumlaut -25
|
||||
KPX K uring -25
|
||||
KPX K ubreve -25
|
||||
KPX K umacron -25
|
||||
KPX K utilde -25
|
||||
KPX K udieresis -25
|
||||
KPX K ucircumflex -25
|
||||
KPX K ugrave -25
|
||||
KPX K uacute -25
|
||||
KPX K u -25
|
||||
KPX K q -23
|
||||
KPX K oslashacute -28
|
||||
KPX K ohungarumlaut -28
|
||||
KPX K obreve -28
|
||||
KPX K omacron -28
|
||||
KPX K otilde -28
|
||||
KPX K odieresis -28
|
||||
KPX K ocircumflex -28
|
||||
KPX K ograve -28
|
||||
KPX K oacute -28
|
||||
KPX K eth -28
|
||||
KPX K oe -28
|
||||
KPX K oslash -28
|
||||
KPX K o -28
|
||||
KPX K dcaron -24
|
||||
KPX K d -24
|
||||
KPX K ccaron -27
|
||||
KPX K cdotaccent -27
|
||||
KPX K ccircumflex -27
|
||||
KPX K cacute -27
|
||||
KPX K ccedilla -27
|
||||
KPX K c -27
|
||||
KPX K ecaron -27
|
||||
KPX K eogonek -27
|
||||
KPX K edotaccent -27
|
||||
KPX K ebreve -27
|
||||
KPX K emacron -27
|
||||
KPX K edieresis -27
|
||||
KPX K ecircumflex -27
|
||||
KPX K egrave -27
|
||||
KPX K eacute -27
|
||||
KPX K e -27
|
||||
KPX K v -49
|
||||
KPX K hyphen -38
|
||||
KPX K guilsinglleft -24
|
||||
KPX K emdash -38
|
||||
KPX K endash -38
|
||||
KPX K guillemotleft -24
|
||||
KPX K b 49
|
||||
KPX L ycircumflex -36
|
||||
KPX L ygrave -36
|
||||
KPX L ydieresis -36
|
||||
KPX L yacute -36
|
||||
KPX L y -36
|
||||
KPX L wgrave -23
|
||||
KPX L wcircumflex -23
|
||||
KPX L wdieresis -23
|
||||
KPX L wacute -23
|
||||
KPX L w -23
|
||||
KPX L V -43
|
||||
KPX L Tcommaaccent -36
|
||||
KPX L Tbar -36
|
||||
KPX L Tcaron -36
|
||||
KPX L T -36
|
||||
KPX L quoteright -49
|
||||
KPX L v -32
|
||||
KPX L quoteleft -54
|
||||
KPX L quotedblright -49
|
||||
KPX L quotedblleft -54
|
||||
KPX L trademark -29
|
||||
KPX L backslash -50
|
||||
KPX L asterisk -30
|
||||
KPX trademark Aring -24
|
||||
KPX trademark Adieresis -24
|
||||
KPX trademark Yacute 29
|
||||
KPX trademark AE -24
|
||||
KPX trademark Y 29
|
||||
KPX trademark A -24
|
||||
KPX trademark b 31
|
||||
EndKernPairs
|
||||
EndKernData
|
||||
EndFontMetrics
|
||||
23
samples/Adobe Font Metrics/lambda.afm
Normal file
23
samples/Adobe Font Metrics/lambda.afm
Normal file
@@ -0,0 +1,23 @@
|
||||
StartFontMetrics 2.0
|
||||
Comment Generated by FontForge 20170719
|
||||
Comment Creation Date: Sun Jul 23 23:14:02 2017
|
||||
FontName Greek_Lambda_Character-Regular
|
||||
FullName Greek_Lambda_Character Regular
|
||||
FamilyName Greek_Lambda_Character
|
||||
Weight Regular
|
||||
Notice (NONE. NADA. PUBLIC DOMAIN, BOI)
|
||||
ItalicAngle 0
|
||||
IsFixedPitch false
|
||||
UnderlinePosition -175
|
||||
UnderlineThickness 90
|
||||
Version 020.017
|
||||
EncodingScheme ISO10646-1
|
||||
FontBBox 33 -177 566 760
|
||||
StartCharMetrics 5
|
||||
C 13 ; WX 602 ; N uni000D ; B 0 0 0 0 ;
|
||||
C 32 ; WX 602 ; N space ; B 0 0 0 0 ;
|
||||
C -1 ; WX 602 ; N lambda ; B 33 0 566 760 ;
|
||||
C -1 ; WX 602 ; N .notdef ; B 50 -177 551 706 ;
|
||||
C -1 ; WX 0 ; N NULL ; B 0 0 0 0 ;
|
||||
EndCharMetrics
|
||||
EndFontMetrics
|
||||
77
samples/AngelScript/botmanager.as
Normal file
77
samples/AngelScript/botmanager.as
Normal file
@@ -0,0 +1,77 @@
|
||||
/*
|
||||
* This is a sample script.
|
||||
*/
|
||||
|
||||
#include "BotManagerInterface.acs"
|
||||
|
||||
BotManager::BotManager g_BotManager( @CreateDumbBot );
|
||||
|
||||
CConCommand@ m_pAddBot;
|
||||
|
||||
void PluginInit()
|
||||
{
|
||||
g_BotManager.PluginInit();
|
||||
|
||||
@m_pAddBot = @CConCommand( "addbot", "Adds a new bot with the given name", @AddBotCallback );
|
||||
}
|
||||
|
||||
void AddBotCallback( const CCommand@ args )
|
||||
{
|
||||
if( args.ArgC() < 2 )
|
||||
{
|
||||
g_Game.AlertMessage( at_console, "Usage: addbot <name>" );
|
||||
return;
|
||||
}
|
||||
|
||||
BotManager::BaseBot@ pBot = g_BotManager.CreateBot( args[ 1 ] );
|
||||
|
||||
if( pBot !is null )
|
||||
{
|
||||
g_Game.AlertMessage( at_console, "Created bot " + args[ 1 ] + "\n" );
|
||||
}
|
||||
else
|
||||
{
|
||||
g_Game.AlertMessage( at_console, "Could not create bot\n" );
|
||||
}
|
||||
}
|
||||
|
||||
final class DumbBot : BotManager::BaseBot
|
||||
{
|
||||
DumbBot( CBasePlayer@ pPlayer )
|
||||
{
|
||||
super( pPlayer );
|
||||
}
|
||||
|
||||
void Think()
|
||||
{
|
||||
BotManager::BaseBot::Think();
|
||||
|
||||
// If the bot is dead and can be respawned, send a button press
|
||||
if( Player.pev.deadflag >= DEAD_RESPAWNABLE )
|
||||
{
|
||||
Player.pev.button |= IN_ATTACK;
|
||||
}
|
||||
else
|
||||
Player.pev.button &= ~IN_ATTACK;
|
||||
|
||||
KeyValueBuffer@ pInfoBuffer = g_EngineFuncs.GetInfoKeyBuffer( Player.edict() );
|
||||
|
||||
pInfoBuffer.SetValue( "topcolor", Math.RandomLong( 0, 255 ) );
|
||||
pInfoBuffer.SetValue( "bottomcolor", Math.RandomLong( 0, 255 ) );
|
||||
|
||||
if( Math.RandomLong( 0, 100 ) > 10 )
|
||||
Player.pev.button |= IN_ATTACK;
|
||||
else
|
||||
Player.pev.button &= ~IN_ATTACK;
|
||||
|
||||
for( uint uiIndex = 0; uiIndex < 3; ++uiIndex )
|
||||
{
|
||||
m_vecVelocity[ uiIndex ] = Math.RandomLong( -50, 50 );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
BotManager::BaseBot@ CreateDumbBot( CBasePlayer@ pPlayer )
|
||||
{
|
||||
return @DumbBot( pPlayer );
|
||||
}
|
||||
396
samples/AngelScript/payload.as
Normal file
396
samples/AngelScript/payload.as
Normal file
@@ -0,0 +1,396 @@
|
||||
// Sample script.
|
||||
// Source: https://github.com/codecat/ssbd-payload
|
||||
|
||||
array<WorldScript::PayloadBeginTrigger@> g_payloadBeginTriggers;
|
||||
array<WorldScript::PayloadTeamForcefield@> g_teamForceFields;
|
||||
|
||||
[GameMode]
|
||||
class Payload : TeamVersusGameMode
|
||||
{
|
||||
[Editable]
|
||||
UnitFeed PayloadUnit;
|
||||
|
||||
[Editable]
|
||||
UnitFeed FirstNode;
|
||||
|
||||
[Editable default=10]
|
||||
int PrepareTime;
|
||||
|
||||
[Editable default=300]
|
||||
int TimeLimit;
|
||||
|
||||
[Editable default=90]
|
||||
int TimeAddCheckpoint;
|
||||
|
||||
[Editable default=2]
|
||||
float TimeOvertime;
|
||||
|
||||
[Editable default=1000]
|
||||
int TimePayloadHeal;
|
||||
|
||||
[Editable default=1]
|
||||
int PayloadHeal;
|
||||
|
||||
PayloadBehavior@ m_payload;
|
||||
|
||||
int m_tmStarting;
|
||||
int m_tmStarted;
|
||||
int m_tmLimitCustom;
|
||||
int m_tmOvertime;
|
||||
int m_tmInOvertime;
|
||||
|
||||
PayloadHUD@ m_payloadHUD;
|
||||
PayloadClassSwitchWindow@ m_switchClass;
|
||||
|
||||
array<SValue@>@ m_switchedSidesData;
|
||||
|
||||
Payload(Scene@ scene)
|
||||
{
|
||||
super(scene);
|
||||
|
||||
m_tmRespawnCountdown = 5000;
|
||||
|
||||
@m_payloadHUD = PayloadHUD(m_guiBuilder);
|
||||
@m_switchTeam = PayloadTeamSwitchWindow(m_guiBuilder);
|
||||
@m_switchClass = PayloadClassSwitchWindow(m_guiBuilder);
|
||||
}
|
||||
|
||||
void UpdateFrame(int ms, GameInput& gameInput, MenuInput& menuInput) override
|
||||
{
|
||||
TeamVersusGameMode::UpdateFrame(ms, gameInput, menuInput);
|
||||
|
||||
m_payloadHUD.Update(ms);
|
||||
|
||||
if (Network::IsServer())
|
||||
{
|
||||
uint64 tmNow = CurrPlaytimeLevel();
|
||||
|
||||
if (m_tmStarting == 0)
|
||||
{
|
||||
if (GetPlayersInTeam(0) > 0 && GetPlayersInTeam(1) > 0)
|
||||
{
|
||||
m_tmStarting = tmNow;
|
||||
(Network::Message("GameStarting") << m_tmStarting).SendToAll();
|
||||
}
|
||||
}
|
||||
|
||||
if (m_tmStarting > 0 && m_tmStarted == 0 && tmNow - m_tmStarting > PrepareTime * 1000)
|
||||
{
|
||||
m_tmStarted = tmNow;
|
||||
(Network::Message("GameStarted") << m_tmStarted).SendToAll();
|
||||
|
||||
for (uint i = 0; i < g_payloadBeginTriggers.length(); i++)
|
||||
{
|
||||
WorldScript@ ws = WorldScript::GetWorldScript(g_scene, g_payloadBeginTriggers[i]);
|
||||
ws.Execute();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!m_ended && m_tmStarted > 0)
|
||||
CheckTimeReached(ms);
|
||||
}
|
||||
|
||||
string NameForTeam(int index) override
|
||||
{
|
||||
if (index == 0)
|
||||
return "Defenders";
|
||||
else if (index == 1)
|
||||
return "Attackers";
|
||||
|
||||
return "Unknown";
|
||||
}
|
||||
|
||||
void CheckTimeReached(int dt)
|
||||
{
|
||||
// Check if time limit is not reached yet
|
||||
if (m_tmLimitCustom - (CurrPlaytimeLevel() - m_tmStarted) > 0)
|
||||
{
|
||||
// Don't need to continue checking
|
||||
m_tmOvertime = 0;
|
||||
m_tmInOvertime = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
// Count how long we're in overtime for later time limit fixing when we reach a checkpoint
|
||||
if (m_tmOvertime > 0)
|
||||
m_tmInOvertime += dt;
|
||||
|
||||
// Check if there are any attackers still inside
|
||||
if (m_payload.AttackersInside() > 0)
|
||||
{
|
||||
// We have overtime
|
||||
m_tmOvertime = int(TimeOvertime * 1000);
|
||||
return;
|
||||
}
|
||||
|
||||
// If we have overtime
|
||||
if (m_tmOvertime > 0)
|
||||
{
|
||||
// Decrease timer
|
||||
m_tmOvertime -= dt;
|
||||
if (m_tmOvertime <= 0)
|
||||
{
|
||||
// Overtime countdown reached, time limit reached
|
||||
TimeReached();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// No overtime, so time limit is reached
|
||||
TimeReached();
|
||||
}
|
||||
}
|
||||
|
||||
void TimeReached()
|
||||
{
|
||||
if (!Network::IsServer())
|
||||
return;
|
||||
|
||||
(Network::Message("TimeReached")).SendToAll();
|
||||
SetWinner(false);
|
||||
}
|
||||
|
||||
bool ShouldFreezeControls() override
|
||||
{
|
||||
return m_switchClass.m_visible
|
||||
|| TeamVersusGameMode::ShouldFreezeControls();
|
||||
}
|
||||
|
||||
bool ShouldDisplayCursor() override
|
||||
{
|
||||
return m_switchClass.m_visible
|
||||
|| TeamVersusGameMode::ShouldDisplayCursor();
|
||||
}
|
||||
|
||||
bool CanSwitchTeams() override
|
||||
{
|
||||
return m_tmStarted == 0;
|
||||
}
|
||||
|
||||
PlayerRecord@ CreatePlayerRecord() override
|
||||
{
|
||||
return PayloadPlayerRecord();
|
||||
}
|
||||
|
||||
int GetPlayerClassCount(PlayerClass playerClass, TeamVersusScore@ team)
|
||||
{
|
||||
if (team is null)
|
||||
return 0;
|
||||
|
||||
int ret = 0;
|
||||
for (uint i = 0; i < team.m_players.length(); i++)
|
||||
{
|
||||
if (team.m_players[i].peer == 255)
|
||||
continue;
|
||||
auto record = cast<PayloadPlayerRecord>(team.m_players[i]);
|
||||
if (record.playerClass == playerClass)
|
||||
ret++;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
void PlayerClassesUpdated()
|
||||
{
|
||||
m_switchClass.PlayerClassesUpdated();
|
||||
}
|
||||
|
||||
void SetWinner(bool attackers)
|
||||
{
|
||||
if (attackers)
|
||||
print("Attackers win!");
|
||||
else
|
||||
print("Defenders win!");
|
||||
|
||||
m_payloadHUD.Winner(attackers);
|
||||
EndMatch();
|
||||
}
|
||||
|
||||
void DisplayPlayerName(int idt, SpriteBatch& sb, PlayerRecord@ record, PlayerHusk@ plr, vec2 pos) override
|
||||
{
|
||||
TeamVersusGameMode::DisplayPlayerName(idt, sb, record, plr, pos);
|
||||
|
||||
m_payloadHUD.DisplayPlayerName(idt, sb, cast<PayloadPlayerRecord>(record), plr, pos);
|
||||
}
|
||||
|
||||
void RenderFrame(int idt, SpriteBatch& sb) override
|
||||
{
|
||||
Player@ player = GetLocalPlayer();
|
||||
if (player !is null)
|
||||
{
|
||||
PlayerHealgun@ healgun = cast<PlayerHealgun>(player.m_currWeapon);
|
||||
if (healgun !is null)
|
||||
healgun.RenderMarkers(idt, sb);
|
||||
}
|
||||
|
||||
TeamVersusGameMode::RenderFrame(idt, sb);
|
||||
}
|
||||
|
||||
void RenderWidgets(PlayerRecord@ player, int idt, SpriteBatch& sb) override
|
||||
{
|
||||
m_payloadHUD.Draw(sb, idt);
|
||||
|
||||
TeamVersusGameMode::RenderWidgets(player, idt, sb);
|
||||
|
||||
m_switchClass.Draw(sb, idt);
|
||||
}
|
||||
|
||||
void GoNextMap() override
|
||||
{
|
||||
if (m_switchedSidesData !is null)
|
||||
{
|
||||
TeamVersusGameMode::GoNextMap();
|
||||
return;
|
||||
}
|
||||
|
||||
ChangeLevel(GetCurrentLevelFilename());
|
||||
}
|
||||
|
||||
void SpawnPlayers() override
|
||||
{
|
||||
if (m_switchedSidesData is null)
|
||||
{
|
||||
TeamVersusGameMode::SpawnPlayers();
|
||||
return;
|
||||
}
|
||||
|
||||
if (Network::IsServer())
|
||||
{
|
||||
for (uint i = 0; i < m_switchedSidesData.length(); i += 2)
|
||||
{
|
||||
uint peer = uint(m_switchedSidesData[i].GetInteger());
|
||||
uint team = uint(m_switchedSidesData[i + 1].GetInteger());
|
||||
|
||||
TeamVersusScore@ joinScore = FindTeamScore(team);
|
||||
if (joinScore is m_teamScores[0])
|
||||
@joinScore = m_teamScores[1];
|
||||
else
|
||||
@joinScore = m_teamScores[0];
|
||||
|
||||
for (uint j = 0; j < g_players.length(); j++)
|
||||
{
|
||||
if (g_players[j].peer != peer)
|
||||
continue;
|
||||
SpawnPlayer(j, vec2(), 0, joinScore.m_team);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Save(SValueBuilder& builder) override
|
||||
{
|
||||
if (m_switchedSidesData is null)
|
||||
{
|
||||
builder.PushArray("teams");
|
||||
for (uint i = 0; i < g_players.length(); i++)
|
||||
{
|
||||
if (g_players[i].peer == 255)
|
||||
continue;
|
||||
builder.PushInteger(g_players[i].peer);
|
||||
builder.PushInteger(g_players[i].team);
|
||||
}
|
||||
builder.PopArray();
|
||||
}
|
||||
|
||||
TeamVersusGameMode::Save(builder);
|
||||
}
|
||||
|
||||
void Start(uint8 peer, SValue@ save, StartMode sMode) override
|
||||
{
|
||||
if (save !is null)
|
||||
@m_switchedSidesData = GetParamArray(UnitPtr(), save, "teams", false);
|
||||
|
||||
TeamVersusGameMode::Start(peer, save, sMode);
|
||||
|
||||
m_tmLimit = 0; // infinite time limit as far as VersusGameMode is concerned
|
||||
m_tmLimitCustom = TimeLimit * 1000; // 5 minutes by default
|
||||
|
||||
@m_payload = cast<PayloadBehavior>(PayloadUnit.FetchFirst().GetScriptBehavior());
|
||||
|
||||
if (m_payload is null)
|
||||
PrintError("PayloadUnit is not a PayloadBehavior!");
|
||||
|
||||
UnitPtr unitFirstNode = FirstNode.FetchFirst();
|
||||
if (unitFirstNode.IsValid())
|
||||
{
|
||||
auto node = cast<WorldScript::PayloadNode>(unitFirstNode.GetScriptBehavior());
|
||||
if (node !is null)
|
||||
@m_payload.m_targetNode = node;
|
||||
else
|
||||
PrintError("First target node is not a PayloadNode script!");
|
||||
}
|
||||
else
|
||||
PrintError("First target node was not set!");
|
||||
|
||||
WorldScript::PayloadNode@ prevNode;
|
||||
|
||||
float totalDistance = 0.0f;
|
||||
|
||||
UnitPtr unitNode = unitFirstNode;
|
||||
while (unitNode.IsValid())
|
||||
{
|
||||
auto node = cast<WorldScript::PayloadNode>(unitNode.GetScriptBehavior());
|
||||
if (node is null)
|
||||
break;
|
||||
|
||||
unitNode = node.NextNode.FetchFirst();
|
||||
|
||||
@node.m_prevNode = prevNode;
|
||||
@node.m_nextNode = cast<WorldScript::PayloadNode>(unitNode.GetScriptBehavior());
|
||||
|
||||
if (prevNode !is null)
|
||||
totalDistance += dist(prevNode.Position, node.Position);
|
||||
|
||||
@prevNode = node;
|
||||
}
|
||||
|
||||
float currDistance = 0.0f;
|
||||
|
||||
auto distNode = cast<WorldScript::PayloadNode>(unitFirstNode.GetScriptBehavior());
|
||||
while (distNode !is null)
|
||||
{
|
||||
if (distNode.m_prevNode is null)
|
||||
distNode.m_locationFactor = 0.0f;
|
||||
else
|
||||
{
|
||||
currDistance += dist(distNode.m_prevNode.Position, distNode.Position);
|
||||
distNode.m_locationFactor = currDistance / totalDistance;
|
||||
}
|
||||
|
||||
@distNode = distNode.m_nextNode;
|
||||
}
|
||||
|
||||
m_payloadHUD.AddCheckpoints();
|
||||
}
|
||||
|
||||
void SpawnPlayer(int i, vec2 pos = vec2(), int unitId = 0, uint team = 0) override
|
||||
{
|
||||
TeamVersusGameMode::SpawnPlayer(i, pos, unitId, team);
|
||||
|
||||
PayloadPlayerRecord@ record = cast<PayloadPlayerRecord>(g_players[i]);
|
||||
record.HandlePlayerClass();
|
||||
|
||||
if (g_players[i].local)
|
||||
{
|
||||
//TODO: This doesn't work well
|
||||
bool localAttackers = (team == HashString("player_1"));
|
||||
for (uint j = 0; j < g_teamForceFields.length(); j++)
|
||||
{
|
||||
bool hasCollision = (localAttackers != g_teamForceFields[j].Attackers);
|
||||
|
||||
auto units = g_teamForceFields[j].Units.FetchAll();
|
||||
for (uint k = 0; k < units.length(); k++)
|
||||
{
|
||||
PhysicsBody@ body = units[k].GetPhysicsBody();
|
||||
if (body is null)
|
||||
{
|
||||
PrintError("PhysicsBody for unit " + units[k].GetDebugName() + "is null");
|
||||
continue;
|
||||
}
|
||||
body.SetActive(hasCollision);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
16
samples/Ballerina/hello-world-service.bal
Normal file
16
samples/Ballerina/hello-world-service.bal
Normal file
@@ -0,0 +1,16 @@
|
||||
import ballerina.lang.messages;
|
||||
import ballerina.net.http;
|
||||
import ballerina.doc;
|
||||
|
||||
@doc:Description {value:"By default Ballerina assumes that the service is to be exposed via HTTP/1.1 using the system default port and that all requests coming to the HTTP server will be delivered to this service."}
|
||||
service<http> helloWorld {
|
||||
@doc:Description {value:"All resources are invoked with an argument of type message, the built-in reference type representing a network invocation."}
|
||||
resource sayHello (message m) {
|
||||
// Creates an empty message.
|
||||
message response = {};
|
||||
// A util method that can be used to set string payload.
|
||||
messages:setStringPayload(response, "Hello, World!");
|
||||
// Reply keyword sends the response back to the client.
|
||||
reply response;
|
||||
}
|
||||
}
|
||||
6
samples/Ballerina/hello-world.bal
Normal file
6
samples/Ballerina/hello-world.bal
Normal file
@@ -0,0 +1,6 @@
|
||||
import ballerina.lang.system;
|
||||
|
||||
function main (string[] args) {
|
||||
system:println("Hello, World!");
|
||||
}
|
||||
|
||||
31
samples/Ballerina/json.bal
Normal file
31
samples/Ballerina/json.bal
Normal file
@@ -0,0 +1,31 @@
|
||||
import ballerina.lang.system;
|
||||
|
||||
function main (string[] args) {
|
||||
// JSON string value.
|
||||
json j1 = "Apple";
|
||||
system:println(j1);
|
||||
|
||||
// JSON number value.
|
||||
json j2 = 5.36;
|
||||
system:println(j2);
|
||||
|
||||
// JSON true value.
|
||||
json j3 = true;
|
||||
system:println(j3);
|
||||
|
||||
// JSON false value.
|
||||
json j4 = false;
|
||||
system:println(j4);
|
||||
|
||||
// JSON null value.
|
||||
json j5 = null;
|
||||
|
||||
//JSON Objects.
|
||||
json j6 = {name:"apple", color:"red", price:j2};
|
||||
system:println(j6);
|
||||
|
||||
//JSON Arrays. They are arrays of any JSON value.
|
||||
json j7 = [1, false, null, "foo",
|
||||
{first:"John", last:"Pala"}];
|
||||
system:println(j7);
|
||||
}
|
||||
28
samples/Ballerina/var.bal
Normal file
28
samples/Ballerina/var.bal
Normal file
@@ -0,0 +1,28 @@
|
||||
import ballerina.lang.system;
|
||||
|
||||
function divideBy10 (int d) (int, int) {
|
||||
return d / 10, d % 10;
|
||||
}
|
||||
|
||||
function main (string[] args) {
|
||||
//Here the variable type is inferred type from the initial value. This is same as "int k = 5";
|
||||
var k = 5;
|
||||
system:println(10 + k);
|
||||
|
||||
//Here the type of the 'strVar' is 'string'.
|
||||
var strVar = "Hello!";
|
||||
system:println(strVar);
|
||||
|
||||
//Multiple assignment with 'var' allows you to define the variable then and there.
|
||||
//Variable type is inferred from the right-hand side.
|
||||
var q, r = divideBy10(6);
|
||||
system:println("06/10: " + "quotient=" + q + " " +
|
||||
"remainder=" + r);
|
||||
|
||||
//To ignore a particular return value in a multiple assignment statement, use '_'.
|
||||
var q1, _ = divideBy10(57);
|
||||
system:println("57/10: " + "quotient=" + q1);
|
||||
|
||||
var _, r1 = divideBy10(9);
|
||||
system:println("09/10: " + "remainder=" + r1);
|
||||
}
|
||||
26
samples/Ballerina/xml.bal
Normal file
26
samples/Ballerina/xml.bal
Normal file
@@ -0,0 +1,26 @@
|
||||
import ballerina.lang.system;
|
||||
|
||||
function main (string[] args) {
|
||||
|
||||
// XML element. Can only have one root element.
|
||||
xml x1 = xml `<book>The Lost World</book>`;
|
||||
system:println(x1);
|
||||
|
||||
// XML text
|
||||
xml x2 = xml `Hello, world!`;
|
||||
system:println(x2);
|
||||
|
||||
// XML comment
|
||||
xml x3 = xml `<!--I am a comment-->`;
|
||||
system:println(x3);
|
||||
|
||||
// XML processing instruction
|
||||
xml x4 = xml `<?target data?>`;
|
||||
system:println(x4);
|
||||
|
||||
// Multiple XML items can be combined to form a sequence of XML. The resulting sequence is again an XML on its own.
|
||||
xml x5 = x1 + x2 + x3 + x4;
|
||||
system:println("\nResulting XML sequence:");
|
||||
system:println(x5);
|
||||
|
||||
}
|
||||
46
samples/C++/bug1163046.--skeleton.re
Normal file
46
samples/C++/bug1163046.--skeleton.re
Normal file
@@ -0,0 +1,46 @@
|
||||
#include <iostream>
|
||||
|
||||
#define YYCTYPE unsigned char
|
||||
#define YYCURSOR cursor
|
||||
#define YYLIMIT cursor
|
||||
#define YYMARKER marker
|
||||
#define YYFILL(n)
|
||||
|
||||
bool scan(const char *text)
|
||||
{
|
||||
YYCTYPE *start = (YYCTYPE *)text;
|
||||
YYCTYPE *cursor = (YYCTYPE *)text;
|
||||
YYCTYPE *marker = (YYCTYPE *)text;
|
||||
next:
|
||||
YYCTYPE *token = cursor;
|
||||
/*!re2c
|
||||
'(This file must be converted with BinHex 4.0)'
|
||||
{
|
||||
if (token == start || *(token - 1) == '\n')
|
||||
return true; else goto next;
|
||||
}
|
||||
[\001-\377]
|
||||
{ goto next; }
|
||||
[\000]
|
||||
{ return false; }
|
||||
*/
|
||||
return false;
|
||||
}
|
||||
|
||||
#define do_scan(str, expect) \
|
||||
res = scan(str) == expect ? 0 : 1; \
|
||||
std::cerr << str << "\t-\t" << (res ? "fail" : "ok") << std::endl; \
|
||||
result += res
|
||||
|
||||
/*!max:re2c */
|
||||
|
||||
int main(int,void**)
|
||||
{
|
||||
int res, result = 0;
|
||||
do_scan("(This file must be converted with BinHex 4.0)", 1);
|
||||
do_scan("x(This file must be converted with BinHex 4.0)", 0);
|
||||
do_scan("(This file must be converted with BinHex 4.0)x", 1);
|
||||
do_scan("x(This file must be converted with BinHex 4.0)x", 0);
|
||||
|
||||
return result;
|
||||
}
|
||||
239
samples/C++/cnokw.re
Normal file
239
samples/C++/cnokw.re
Normal file
@@ -0,0 +1,239 @@
|
||||
#include <stdlib.h>
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
|
||||
#define ADDEQ 257
|
||||
#define ANDAND 258
|
||||
#define ANDEQ 259
|
||||
#define ARRAY 260
|
||||
#define ASM 261
|
||||
#define AUTO 262
|
||||
#define BREAK 263
|
||||
#define CASE 264
|
||||
#define CHAR 265
|
||||
#define CONST 266
|
||||
#define CONTINUE 267
|
||||
#define DECR 268
|
||||
#define DEFAULT 269
|
||||
#define DEREF 270
|
||||
#define DIVEQ 271
|
||||
#define DO 272
|
||||
#define DOUBLE 273
|
||||
#define ELLIPSIS 274
|
||||
#define ELSE 275
|
||||
#define ENUM 276
|
||||
#define EQL 277
|
||||
#define EXTERN 278
|
||||
#define FCON 279
|
||||
#define FLOAT 280
|
||||
#define FOR 281
|
||||
#define FUNCTION 282
|
||||
#define GEQ 283
|
||||
#define GOTO 284
|
||||
#define ICON 285
|
||||
#define ID 286
|
||||
#define IF 287
|
||||
#define INCR 288
|
||||
#define INT 289
|
||||
#define LEQ 290
|
||||
#define LONG 291
|
||||
#define LSHIFT 292
|
||||
#define LSHIFTEQ 293
|
||||
#define MODEQ 294
|
||||
#define MULEQ 295
|
||||
#define NEQ 296
|
||||
#define OREQ 297
|
||||
#define OROR 298
|
||||
#define POINTER 299
|
||||
#define REGISTER 300
|
||||
#define RETURN 301
|
||||
#define RSHIFT 302
|
||||
#define RSHIFTEQ 303
|
||||
#define SCON 304
|
||||
#define SHORT 305
|
||||
#define SIGNED 306
|
||||
#define SIZEOF 307
|
||||
#define STATIC 308
|
||||
#define STRUCT 309
|
||||
#define SUBEQ 310
|
||||
#define SWITCH 311
|
||||
#define TYPEDEF 312
|
||||
#define UNION 313
|
||||
#define UNSIGNED 314
|
||||
#define VOID 315
|
||||
#define VOLATILE 316
|
||||
#define WHILE 317
|
||||
#define XOREQ 318
|
||||
#define EOI 319
|
||||
|
||||
typedef unsigned int uint;
|
||||
typedef unsigned char uchar;
|
||||
|
||||
#define BSIZE 8192
|
||||
|
||||
#define YYCTYPE uchar
|
||||
#define YYCURSOR cursor
|
||||
#define YYLIMIT s->lim
|
||||
#define YYMARKER s->ptr
|
||||
#define YYFILL(n) {cursor = fill(s, cursor);}
|
||||
|
||||
#define RET(i) {s->cur = cursor; return i;}
|
||||
|
||||
typedef struct Scanner {
|
||||
int fd;
|
||||
uchar *bot, *tok, *ptr, *cur, *pos, *lim, *top, *eof;
|
||||
uint line;
|
||||
} Scanner;
|
||||
|
||||
uchar *fill(Scanner *s, uchar *cursor){
|
||||
if(!s->eof){
|
||||
uint cnt = s->tok - s->bot;
|
||||
if(cnt){
|
||||
memcpy(s->bot, s->tok, s->lim - s->tok);
|
||||
s->tok = s->bot;
|
||||
s->ptr -= cnt;
|
||||
cursor -= cnt;
|
||||
s->pos -= cnt;
|
||||
s->lim -= cnt;
|
||||
}
|
||||
if((s->top - s->lim) < BSIZE){
|
||||
uchar *buf = (uchar*) malloc(((s->lim - s->bot) + BSIZE)*sizeof(uchar));
|
||||
memcpy(buf, s->tok, s->lim - s->tok);
|
||||
s->tok = buf;
|
||||
s->ptr = &buf[s->ptr - s->bot];
|
||||
cursor = &buf[cursor - s->bot];
|
||||
s->pos = &buf[s->pos - s->bot];
|
||||
s->lim = &buf[s->lim - s->bot];
|
||||
s->top = &s->lim[BSIZE];
|
||||
free(s->bot);
|
||||
s->bot = buf;
|
||||
}
|
||||
if((cnt = read(s->fd, (char*) s->lim, BSIZE)) != BSIZE){
|
||||
s->eof = &s->lim[cnt]; *(s->eof)++ = '\n';
|
||||
}
|
||||
s->lim += cnt;
|
||||
}
|
||||
return cursor;
|
||||
}
|
||||
|
||||
int scan(Scanner *s){
|
||||
uchar *cursor = s->cur;
|
||||
std:
|
||||
s->tok = cursor;
|
||||
/*!re2c
|
||||
any = [\000-\377];
|
||||
O = [0-7];
|
||||
D = [0-9];
|
||||
L = [a-zA-Z_];
|
||||
H = [a-fA-F0-9];
|
||||
E = [Ee] [+-]? D+;
|
||||
FS = [fFlL];
|
||||
IS = [uUlL]*;
|
||||
ESC = [\\] ([abfnrtv?'"\\] | "x" H+ | O+);
|
||||
*/
|
||||
|
||||
/*!re2c
|
||||
"/*" { goto comment; }
|
||||
|
||||
L (L|D)* { RET(ID); }
|
||||
|
||||
("0" [xX] H+ IS?) | ("0" D+ IS?) | (D+ IS?) |
|
||||
(['] (ESC|any\[\n\\'])* ['])
|
||||
{ RET(ICON); }
|
||||
|
||||
(D+ E FS?) | (D* "." D+ E? FS?) | (D+ "." D* E? FS?)
|
||||
{ RET(FCON); }
|
||||
|
||||
(["] (ESC|any\[\n\\"])* ["])
|
||||
{ RET(SCON); }
|
||||
|
||||
"..." { RET(ELLIPSIS); }
|
||||
">>=" { RET(RSHIFTEQ); }
|
||||
"<<=" { RET(LSHIFTEQ); }
|
||||
"+=" { RET(ADDEQ); }
|
||||
"-=" { RET(SUBEQ); }
|
||||
"*=" { RET(MULEQ); }
|
||||
"/=" { RET(DIVEQ); }
|
||||
"%=" { RET(MODEQ); }
|
||||
"&=" { RET(ANDEQ); }
|
||||
"^=" { RET(XOREQ); }
|
||||
"|=" { RET(OREQ); }
|
||||
">>" { RET(RSHIFT); }
|
||||
"<<" { RET(LSHIFT); }
|
||||
"++" { RET(INCR); }
|
||||
"--" { RET(DECR); }
|
||||
"->" { RET(DEREF); }
|
||||
"&&" { RET(ANDAND); }
|
||||
"||" { RET(OROR); }
|
||||
"<=" { RET(LEQ); }
|
||||
">=" { RET(GEQ); }
|
||||
"==" { RET(EQL); }
|
||||
"!=" { RET(NEQ); }
|
||||
";" { RET(';'); }
|
||||
"{" { RET('{'); }
|
||||
"}" { RET('}'); }
|
||||
"," { RET(','); }
|
||||
":" { RET(':'); }
|
||||
"=" { RET('='); }
|
||||
"(" { RET('('); }
|
||||
")" { RET(')'); }
|
||||
"[" { RET('['); }
|
||||
"]" { RET(']'); }
|
||||
"." { RET('.'); }
|
||||
"&" { RET('&'); }
|
||||
"!" { RET('!'); }
|
||||
"~" { RET('~'); }
|
||||
"-" { RET('-'); }
|
||||
"+" { RET('+'); }
|
||||
"*" { RET('*'); }
|
||||
"/" { RET('/'); }
|
||||
"%" { RET('%'); }
|
||||
"<" { RET('<'); }
|
||||
">" { RET('>'); }
|
||||
"^" { RET('^'); }
|
||||
"|" { RET('|'); }
|
||||
"?" { RET('?'); }
|
||||
|
||||
|
||||
[ \t\v\f]+ { goto std; }
|
||||
|
||||
"\n"
|
||||
{
|
||||
if(cursor == s->eof) RET(EOI);
|
||||
s->pos = cursor; s->line++;
|
||||
goto std;
|
||||
}
|
||||
|
||||
any
|
||||
{
|
||||
printf("unexpected character: %c\n", *s->tok);
|
||||
goto std;
|
||||
}
|
||||
*/
|
||||
|
||||
comment:
|
||||
/*!re2c
|
||||
"*/" { goto std; }
|
||||
"\n"
|
||||
{
|
||||
if(cursor == s->eof) RET(EOI);
|
||||
s->tok = s->pos = cursor; s->line++;
|
||||
goto comment;
|
||||
}
|
||||
any { goto comment; }
|
||||
*/
|
||||
}
|
||||
|
||||
main(){
|
||||
Scanner in;
|
||||
int t;
|
||||
memset((char*) &in, 0, sizeof(in));
|
||||
in.fd = 0;
|
||||
while((t = scan(&in)) != EOI){
|
||||
/*
|
||||
printf("%d\t%.*s\n", t, in.cur - in.tok, in.tok);
|
||||
printf("%d\n", t);
|
||||
*/
|
||||
}
|
||||
close(in.fd);
|
||||
}
|
||||
63
samples/C++/cvsignore.re
Normal file
63
samples/C++/cvsignore.re
Normal file
@@ -0,0 +1,63 @@
|
||||
|
||||
#define YYFILL(n) if (cursor >= limit) break;
|
||||
#define YYCTYPE char
|
||||
#define YYCURSOR cursor
|
||||
#define YYLIMIT limit
|
||||
#define YYMARKER marker
|
||||
|
||||
/*!re2c
|
||||
any = (.|"\n");
|
||||
value = (":" (.\"$")+)?;
|
||||
cvsdat = "Date";
|
||||
cvsid = "Id";
|
||||
cvslog = "Log";
|
||||
cvsrev = "Revision";
|
||||
cvssrc = "Source";
|
||||
*/
|
||||
|
||||
#define APPEND(text) \
|
||||
append(output, outsize, text, sizeof(text) - sizeof(YYCTYPE))
|
||||
|
||||
inline void append(YYCTYPE *output, size_t & outsize, const YYCTYPE * text, size_t len)
|
||||
{
|
||||
memcpy(output + outsize, text, len);
|
||||
outsize += (len / sizeof(YYCTYPE));
|
||||
}
|
||||
|
||||
void scan(YYCTYPE *pText, size_t *pSize, int *pbChanged)
|
||||
{
|
||||
// rule
|
||||
// scan lines
|
||||
// find $ in lines
|
||||
// compact $<keyword>: .. $ to $<keyword>$
|
||||
|
||||
YYCTYPE *output;
|
||||
const YYCTYPE *cursor, *limit, *marker;
|
||||
|
||||
cursor = marker = output = *pText;
|
||||
|
||||
size_t insize = *pSize;
|
||||
size_t outsize = 0;
|
||||
|
||||
limit = cursor + insize;
|
||||
|
||||
while(1) {
|
||||
loop:
|
||||
/*!re2c
|
||||
|
||||
"$" cvsdat value "$" { APPEND(L"$" L"Date$"); goto loop; }
|
||||
"$" cvsid value "$" { APPEND(L"$" L"Id$"); goto loop; }
|
||||
"$" cvslog value "$" { APPEND(L"$" L"Log$"); goto loop; }
|
||||
"$" cvsrev value "$" { APPEND(L"$" L"Revision$"); goto loop; }
|
||||
"$" cvssrc value "$" { APPEND(L"$" L"Source$"); goto loop; }
|
||||
any { output[outsize++] = cursor[-1]; if (cursor >= limit) break; goto loop; }
|
||||
|
||||
*/
|
||||
}
|
||||
output[outsize] = '\0';
|
||||
|
||||
// set the new size
|
||||
*pSize = outsize;
|
||||
|
||||
*pbChanged = (insize == outsize) ? 0 : 1;
|
||||
}
|
||||
2
samples/C++/grpc.pb.cc
Normal file
2
samples/C++/grpc.pb.cc
Normal file
@@ -0,0 +1,2 @@
|
||||
// Generated by the gRPC protobuf plugin.
|
||||
// If you make any local change, they will be lost.
|
||||
125
samples/C++/hello.grpc.pb.h
Normal file
125
samples/C++/hello.grpc.pb.h
Normal file
@@ -0,0 +1,125 @@
|
||||
// Generated by the gRPC C++ plugin.
|
||||
// If you make any local change, they will be lost.
|
||||
// source: hello.proto
|
||||
#ifndef GRPC_hello_2eproto__INCLUDED
|
||||
#define GRPC_hello_2eproto__INCLUDED
|
||||
|
||||
#include "hello.pb.h"
|
||||
|
||||
#include <grpc++/impl/codegen/async_stream.h>
|
||||
#include <grpc++/impl/codegen/async_unary_call.h>
|
||||
#include <grpc++/impl/codegen/method_handler_impl.h>
|
||||
#include <grpc++/impl/codegen/proto_utils.h>
|
||||
#include <grpc++/impl/codegen/rpc_method.h>
|
||||
#include <grpc++/impl/codegen/service_type.h>
|
||||
#include <grpc++/impl/codegen/status.h>
|
||||
#include <grpc++/impl/codegen/stub_options.h>
|
||||
#include <grpc++/impl/codegen/sync_stream.h>
|
||||
|
||||
namespace grpc {
|
||||
class CompletionQueue;
|
||||
class Channel;
|
||||
class RpcService;
|
||||
class ServerCompletionQueue;
|
||||
class ServerContext;
|
||||
} // namespace grpc
|
||||
|
||||
class HelloService final {
|
||||
public:
|
||||
class StubInterface {
|
||||
public:
|
||||
virtual ~StubInterface() {}
|
||||
virtual ::grpc::Status SayHello(::grpc::ClientContext* context, const ::HelloRequest& request, ::HelloResponse* response) = 0;
|
||||
std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::HelloResponse>> AsyncSayHello(::grpc::ClientContext* context, const ::HelloRequest& request, ::grpc::CompletionQueue* cq) {
|
||||
return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::HelloResponse>>(AsyncSayHelloRaw(context, request, cq));
|
||||
}
|
||||
private:
|
||||
virtual ::grpc::ClientAsyncResponseReaderInterface< ::HelloResponse>* AsyncSayHelloRaw(::grpc::ClientContext* context, const ::HelloRequest& request, ::grpc::CompletionQueue* cq) = 0;
|
||||
};
|
||||
class Stub final : public StubInterface {
|
||||
public:
|
||||
Stub(const std::shared_ptr< ::grpc::ChannelInterface>& channel);
|
||||
::grpc::Status SayHello(::grpc::ClientContext* context, const ::HelloRequest& request, ::HelloResponse* response) override;
|
||||
std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::HelloResponse>> AsyncSayHello(::grpc::ClientContext* context, const ::HelloRequest& request, ::grpc::CompletionQueue* cq) {
|
||||
return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::HelloResponse>>(AsyncSayHelloRaw(context, request, cq));
|
||||
}
|
||||
|
||||
private:
|
||||
std::shared_ptr< ::grpc::ChannelInterface> channel_;
|
||||
::grpc::ClientAsyncResponseReader< ::HelloResponse>* AsyncSayHelloRaw(::grpc::ClientContext* context, const ::HelloRequest& request, ::grpc::CompletionQueue* cq) override;
|
||||
const ::grpc::RpcMethod rpcmethod_SayHello_;
|
||||
};
|
||||
static std::unique_ptr<Stub> NewStub(const std::shared_ptr< ::grpc::ChannelInterface>& channel, const ::grpc::StubOptions& options = ::grpc::StubOptions());
|
||||
|
||||
class Service : public ::grpc::Service {
|
||||
public:
|
||||
Service();
|
||||
virtual ~Service();
|
||||
virtual ::grpc::Status SayHello(::grpc::ServerContext* context, const ::HelloRequest* request, ::HelloResponse* response);
|
||||
};
|
||||
template <class BaseClass>
|
||||
class WithAsyncMethod_SayHello : public BaseClass {
|
||||
private:
|
||||
void BaseClassMustBeDerivedFromService(const Service *service) {}
|
||||
public:
|
||||
WithAsyncMethod_SayHello() {
|
||||
::grpc::Service::MarkMethodAsync(0);
|
||||
}
|
||||
~WithAsyncMethod_SayHello() override {
|
||||
BaseClassMustBeDerivedFromService(this);
|
||||
}
|
||||
// disable synchronous version of this method
|
||||
::grpc::Status SayHello(::grpc::ServerContext* context, const ::HelloRequest* request, ::HelloResponse* response) final override {
|
||||
abort();
|
||||
return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "");
|
||||
}
|
||||
void RequestSayHello(::grpc::ServerContext* context, ::HelloRequest* request, ::grpc::ServerAsyncResponseWriter< ::HelloResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) {
|
||||
::grpc::Service::RequestAsyncUnary(0, context, request, response, new_call_cq, notification_cq, tag);
|
||||
}
|
||||
};
|
||||
typedef WithAsyncMethod_SayHello<Service > AsyncService;
|
||||
template <class BaseClass>
|
||||
class WithGenericMethod_SayHello : public BaseClass {
|
||||
private:
|
||||
void BaseClassMustBeDerivedFromService(const Service *service) {}
|
||||
public:
|
||||
WithGenericMethod_SayHello() {
|
||||
::grpc::Service::MarkMethodGeneric(0);
|
||||
}
|
||||
~WithGenericMethod_SayHello() override {
|
||||
BaseClassMustBeDerivedFromService(this);
|
||||
}
|
||||
// disable synchronous version of this method
|
||||
::grpc::Status SayHello(::grpc::ServerContext* context, const ::HelloRequest* request, ::HelloResponse* response) final override {
|
||||
abort();
|
||||
return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "");
|
||||
}
|
||||
};
|
||||
template <class BaseClass>
|
||||
class WithStreamedUnaryMethod_SayHello : public BaseClass {
|
||||
private:
|
||||
void BaseClassMustBeDerivedFromService(const Service *service) {}
|
||||
public:
|
||||
WithStreamedUnaryMethod_SayHello() {
|
||||
::grpc::Service::MarkMethodStreamed(0,
|
||||
new ::grpc::StreamedUnaryHandler< ::HelloRequest, ::HelloResponse>(std::bind(&WithStreamedUnaryMethod_SayHello<BaseClass>::StreamedSayHello, this, std::placeholders::_1, std::placeholders::_2)));
|
||||
}
|
||||
~WithStreamedUnaryMethod_SayHello() override {
|
||||
BaseClassMustBeDerivedFromService(this);
|
||||
}
|
||||
// disable regular version of this method
|
||||
::grpc::Status SayHello(::grpc::ServerContext* context, const ::HelloRequest* request, ::HelloResponse* response) final override {
|
||||
abort();
|
||||
return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "");
|
||||
}
|
||||
// replace default version of method with streamed unary
|
||||
virtual ::grpc::Status StreamedSayHello(::grpc::ServerContext* context, ::grpc::ServerUnaryStreamer< ::HelloRequest,::HelloResponse>* server_unary_streamer) = 0;
|
||||
};
|
||||
typedef WithStreamedUnaryMethod_SayHello<Service > StreamedUnaryService;
|
||||
typedef Service SplitStreamedService;
|
||||
typedef WithStreamedUnaryMethod_SayHello<Service > StreamedService;
|
||||
};
|
||||
|
||||
|
||||
#endif // GRPC_hello_2eproto__INCLUDED
|
||||
|
||||
13
samples/C++/simple.re
Normal file
13
samples/C++/simple.re
Normal file
@@ -0,0 +1,13 @@
|
||||
#define NULL ((char*) 0)
|
||||
char *scan(char *p){
|
||||
char *q;
|
||||
#define YYCTYPE char
|
||||
#define YYCURSOR p
|
||||
#define YYLIMIT p
|
||||
#define YYMARKER q
|
||||
#define YYFILL(n)
|
||||
/*!re2c
|
||||
[0-9]+ {return YYCURSOR;}
|
||||
[\000-\377] {return NULL;}
|
||||
*/
|
||||
}
|
||||
27
samples/C/asm.h
Normal file
27
samples/C/asm.h
Normal file
@@ -0,0 +1,27 @@
|
||||
/* CarbonOS System/Kernel
|
||||
* Copyright 2015-2017 David Aylaian
|
||||
* Licensed under Apache 2.0: https://github.com/DavidAylaian/CarbonOS/blob/master/LICENSE.md
|
||||
*/
|
||||
|
||||
#ifndef ASM_H
|
||||
#define ASM_H
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
// macros for enabling and disabling interrupts
|
||||
#define enable() asm("sti");
|
||||
#define disable() asm("cli");
|
||||
|
||||
// inb instruction
|
||||
uint8_t inb (uint16_t port) {
|
||||
uint8_t val;
|
||||
asm volatile ("inb %0, %1" : "=a"(val): "Nd"(port));
|
||||
return val;
|
||||
}
|
||||
|
||||
// outb instruction
|
||||
void outb (uint16_t port, uint8_t val) {
|
||||
asm volatile ("outb %1, %0" : : "a"(val), "Nd"(port));
|
||||
}
|
||||
|
||||
#endif
|
||||
25
samples/C/cpuid.h
Normal file
25
samples/C/cpuid.h
Normal file
@@ -0,0 +1,25 @@
|
||||
#ifndef CPUID_H
|
||||
#define CPUID_H
|
||||
|
||||
#include "misc.h"
|
||||
|
||||
static inline void do_cpuid(dword_t *eax, dword_t *ebx, dword_t *ecx, dword_t *edx) {
|
||||
dword_t leaf = *eax;
|
||||
switch (leaf) {
|
||||
case 0:
|
||||
*eax = 0x01; // we support barely anything
|
||||
*ebx = 0x756e6547; // Genu
|
||||
*edx = 0x49656e69; // ineI
|
||||
*ecx = 0x6c65746e; // ntel
|
||||
break;
|
||||
default: // if leaf is too high, use highest supported leaf
|
||||
case 1:
|
||||
*eax = 0x0; // say nothing about cpu model number
|
||||
*ebx = 0x0; // processor number 0, flushes 0 bytes on clflush
|
||||
*ecx = 0b00000000000000000000000000000000; // we support none of the features in ecx
|
||||
*edx = 0b00000000000000000000000000000000; // we also support none of the features in edx
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
72
samples/CSON/base.cson
Normal file
72
samples/CSON/base.cson
Normal file
@@ -0,0 +1,72 @@
|
||||
'atom-text-editor':
|
||||
# Platform Bindings
|
||||
'home': 'editor:move-to-first-character-of-line'
|
||||
'end': 'editor:move-to-end-of-screen-line'
|
||||
'shift-home': 'editor:select-to-first-character-of-line'
|
||||
'shift-end': 'editor:select-to-end-of-line'
|
||||
|
||||
'atom-text-editor:not([mini])':
|
||||
# Atom Specific
|
||||
'ctrl-C': 'editor:copy-path'
|
||||
|
||||
# Sublime Parity
|
||||
'tab': 'editor:indent'
|
||||
'enter': 'editor:newline'
|
||||
'shift-tab': 'editor:outdent-selected-rows'
|
||||
'ctrl-K': 'editor:delete-line'
|
||||
|
||||
'.select-list atom-text-editor[mini]':
|
||||
'enter': 'core:confirm'
|
||||
|
||||
'.tool-panel.panel-left, .tool-panel.panel-right':
|
||||
'escape': 'tool-panel:unfocus'
|
||||
|
||||
'atom-text-editor !important, atom-text-editor[mini] !important':
|
||||
'escape': 'editor:consolidate-selections'
|
||||
|
||||
# allow standard input fields to work correctly
|
||||
'body .native-key-bindings':
|
||||
'tab': 'core:focus-next'
|
||||
'shift-tab': 'core:focus-previous'
|
||||
'enter': 'native!'
|
||||
'backspace': 'native!'
|
||||
'shift-backspace': 'native!'
|
||||
'delete': 'native!'
|
||||
'up': 'native!'
|
||||
'down': 'native!'
|
||||
'shift-up': 'native!'
|
||||
'shift-down': 'native!'
|
||||
'alt-up': 'native!'
|
||||
'alt-down': 'native!'
|
||||
'alt-shift-up': 'native!'
|
||||
'alt-shift-down': 'native!'
|
||||
'cmd-up': 'native!'
|
||||
'cmd-down': 'native!'
|
||||
'cmd-shift-up': 'native!'
|
||||
'cmd-shift-down': 'native!'
|
||||
'ctrl-up': 'native!'
|
||||
'ctrl-down': 'native!'
|
||||
'ctrl-shift-up': 'native!'
|
||||
'ctrl-shift-down': 'native!'
|
||||
'left': 'native!'
|
||||
'right': 'native!'
|
||||
'shift-left': 'native!'
|
||||
'shift-right': 'native!'
|
||||
'alt-left': 'native!'
|
||||
'alt-right': 'native!'
|
||||
'alt-shift-left': 'native!'
|
||||
'alt-shift-right': 'native!'
|
||||
'cmd-left': 'native!'
|
||||
'cmd-right': 'native!'
|
||||
'cmd-shift-left': 'native!'
|
||||
'cmd-shift-right': 'native!'
|
||||
'ctrl-left': 'native!'
|
||||
'ctrl-right': 'native!'
|
||||
'ctrl-shift-left': 'native!'
|
||||
'ctrl-shift-right': 'native!'
|
||||
'ctrl-b': 'native!'
|
||||
'ctrl-f': 'native!'
|
||||
'ctrl-F': 'native!'
|
||||
'ctrl-B': 'native!'
|
||||
'ctrl-h': 'native!'
|
||||
'ctrl-d': 'native!'
|
||||
59
samples/CSON/config.cson
Normal file
59
samples/CSON/config.cson
Normal file
@@ -0,0 +1,59 @@
|
||||
directoryIcons:
|
||||
|
||||
Atom:
|
||||
icon: "atom"
|
||||
match: /^\.atom$/
|
||||
colour: "dark-green"
|
||||
|
||||
Bower:
|
||||
icon: "bower"
|
||||
match: /^bower[-_]components$/
|
||||
colour: "bower"
|
||||
|
||||
Dropbox:
|
||||
icon: "dropbox"
|
||||
match: /^(?:Dropbox|\.dropbox\.cache)$/
|
||||
colour: "medium-blue"
|
||||
|
||||
Git:
|
||||
icon: "git"
|
||||
match: /^\.git$/
|
||||
|
||||
GitHub:
|
||||
icon: "github"
|
||||
match: /^\.github$/
|
||||
|
||||
Meteor:
|
||||
icon: "meteor"
|
||||
match: /^\.meteor$/
|
||||
|
||||
NodeJS:
|
||||
icon: "node"
|
||||
match: /^node_modules$/
|
||||
colour: "medium-green"
|
||||
|
||||
Package:
|
||||
icon: "package"
|
||||
match: /^\.bundle$/i
|
||||
|
||||
TextMate:
|
||||
icon: "textmate"
|
||||
match: ".tmBundle"
|
||||
|
||||
|
||||
fileIcons:
|
||||
|
||||
ABAP:
|
||||
icon: "abap"
|
||||
scope: "abp"
|
||||
match: ".abap"
|
||||
colour: "medium-orange"
|
||||
|
||||
ActionScript: # Or Flash-related
|
||||
icon: "as"
|
||||
match: [
|
||||
[".swf", "medium-blue"]
|
||||
[".as", "medium-red", scope: /\.(?:flex-config|actionscript(?:\.\d+)?)$/i, alias: /ActionScript\s?3|as3/i]
|
||||
[".jsfl", "auto-yellow"]
|
||||
[".swc", "dark-red"]
|
||||
]
|
||||
108
samples/CSON/ff-sfd.cson
Normal file
108
samples/CSON/ff-sfd.cson
Normal file
@@ -0,0 +1,108 @@
|
||||
name: "Spline Font Database"
|
||||
scopeName: "text.sfd"
|
||||
fileTypes: ["sfd"]
|
||||
firstLineMatch: "^SplineFontDB: [\\d.]+"
|
||||
patterns: [include: "#main"]
|
||||
|
||||
repository:
|
||||
main:
|
||||
patterns: [
|
||||
{include: "#punctuation"}
|
||||
{include: "#private"}
|
||||
{include: "#image"}
|
||||
{include: "#pickleData"}
|
||||
{include: "#sections"}
|
||||
{include: "#copyright"}
|
||||
{include: "#property"}
|
||||
{include: "#control"}
|
||||
{include: "#address"}
|
||||
{include: "#encoding"}
|
||||
{include: "source.fontforge#shared"}
|
||||
{include: "#colour"}
|
||||
]
|
||||
|
||||
punctuation:
|
||||
patterns: [
|
||||
{match: "<|>", name: "punctuation.definition.brackets.angle.sfd"}
|
||||
{match: "[{}]", name: "punctuation.definition.brackets.curly.sfd"}
|
||||
]
|
||||
|
||||
private:
|
||||
name: "meta.section.private.sfd"
|
||||
begin: "^BeginPrivate(?=:)"
|
||||
end: "^EndPrivate\\b"
|
||||
beginCaptures: 0: name: "keyword.control.begin.private.sfd"
|
||||
endCaptures: 0: name: "keyword.control.end.private.sfd"
|
||||
patterns: [
|
||||
{match: "^\\S+", name: "entity.name.private.property.sfd"}
|
||||
{include: "$self"}
|
||||
]
|
||||
|
||||
image:
|
||||
name: "meta.image.sfd"
|
||||
begin: "^(Image)(?=:)(.+)$"
|
||||
end: "^(EndImage)\\b"
|
||||
contentName: "string.unquoted.raw.data.sfd"
|
||||
beginCaptures:
|
||||
1: name: "keyword.control.begin.image.sfd"
|
||||
2: patterns: [include: "$self"]
|
||||
endCaptures:
|
||||
1: name: "keyword.control.end.image.sfd"
|
||||
|
||||
pickleData:
|
||||
name: "meta.pickle-data.sfd"
|
||||
begin: "^(PickledData)(:)\\s*(\")"
|
||||
end: '"'
|
||||
beginCaptures:
|
||||
1: name: "entity.name.property.sfd"
|
||||
2: name: "punctuation.separator.dictionary.key-value.sfd"
|
||||
3: name: "punctuation.definition.string.begin.sfd"
|
||||
endCaptures:
|
||||
0: name: "punctuation.definition.string.end.sfd"
|
||||
patterns: [match: "\\\\.", name: "constant.character.escape.sfd"]
|
||||
|
||||
sections:
|
||||
name: "meta.section.${2:/downcase}.sfd"
|
||||
begin: "^(Start|Begin)([A-Z]\\w+)(?=:)"
|
||||
end: "^(End\\2)\\b"
|
||||
beginCaptures: 0: name: "keyword.control.begin.${2:/downcase}.sfd"
|
||||
endCaptures: 0: name: "keyword.control.end.${2:/downcase}.sfd"
|
||||
patterns: [include: "$self"]
|
||||
|
||||
control:
|
||||
name: "keyword.control.${1:/downcase}.sfd"
|
||||
match: "\\b(Fore|Back|SplineSet|^End\\w+)\\b"
|
||||
|
||||
colour:
|
||||
name: "constant.other.hex.colour.sfd"
|
||||
match: "(#)[A-Fa-f0-9]{3,}|(?<=\\s)[A-Fa-f0-9]{6,8}"
|
||||
captures:
|
||||
1: name: "punctuation.definition.colour.sfd"
|
||||
|
||||
encoding:
|
||||
name: "constant.language.encoding.sfd"
|
||||
match: "(?i)\\b(ISO[-\\w]+)(?<=\\d)(?=\\s|$)"
|
||||
|
||||
# Don't highlight numbers in freeform strings (years/version strings)
|
||||
copyright:
|
||||
name: "meta.${1:/downcase}-string.sfd"
|
||||
begin: "^(Copyright|U?Comments?|\\w+Name)(:)"
|
||||
end: "$"
|
||||
beginCaptures:
|
||||
1: name: "entity.name.property.sfd"
|
||||
2: name: "punctuation.separator.dictionary.key-value.sfd"
|
||||
patterns: [include: "source.fontforge#stringEscapes"]
|
||||
|
||||
# No idea what this is, but it looks distracting without a fix
|
||||
# Assuming it's referring to a memory register or something.
|
||||
address:
|
||||
match: "\\d+[xX][A-Fa-f0-9]+"
|
||||
name: "constant.numeric.hexadecimal.sfd"
|
||||
|
||||
property:
|
||||
match: "^([^:]+)(:)"
|
||||
name: "meta.dictionary.key-value.sfd"
|
||||
captures:
|
||||
1: name: "entity.name.property.sfd"
|
||||
2: name: "punctuation.separator.dictionary.key-value.sfd"
|
||||
|
||||
11
samples/CSON/wercker-status.cson
Normal file
11
samples/CSON/wercker-status.cson
Normal file
@@ -0,0 +1,11 @@
|
||||
'menu': [
|
||||
{
|
||||
'label': 'Packages'
|
||||
'submenu': [
|
||||
'label': 'Wercker Status'
|
||||
'submenu': [
|
||||
{ 'label': 'Check now!', 'command': 'wercker-status:checknow' }
|
||||
]
|
||||
]
|
||||
}
|
||||
]
|
||||
404
samples/CWeb/sat-life.w
Normal file
404
samples/CWeb/sat-life.w
Normal file
@@ -0,0 +1,404 @@
|
||||
\datethis
|
||||
@*Intro. This program generates clauses for the transition relation
|
||||
from time $t$ to time $t+1$ in Conway's Game of Life, assuming that
|
||||
all of the potentially live cells at time $t$ belong to a pattern
|
||||
that's specified in |stdin|. The pattern is defined by one or more
|
||||
lines representing rows of cells, where each line has `\..' in a
|
||||
cell that's guaranteed to be dead at time~$t$, otherwise it has `\.*'.
|
||||
The time is specified separately as a command-line parameter.
|
||||
|
||||
The Boolean variable for cell $(x,y)$ at time $t$ is named by its
|
||||
so-called ``xty code,'' namely by the decimal value of~$x$, followed
|
||||
by a code letter for~$t$, followed by the decimal value of~$y$. For
|
||||
example, if $x=10$ and $y=11$ and $t=0$, the variable that indicates
|
||||
liveness of the cell is \.{10a11}; and the corresponding variable
|
||||
for $t=1$ is \.{10b11}.
|
||||
|
||||
Up to 19 auxiliary variables are used together with each xty code,
|
||||
in order to construct clauses that define the successor state.
|
||||
The names of these variables are obtained by appending one of
|
||||
the following two-character combinations to the xty code:
|
||||
\.{A2}, \.{A3}, \.{A4},
|
||||
\.{B1}, \.{B2}, \.{B3}, \.{B4},
|
||||
\.{C1}, \.{C2}, \.{C3}, \.{C4},
|
||||
\.{D1}, \.{D2},
|
||||
\.{E1}, \.{E2},
|
||||
\.{F1}, \.{F2},
|
||||
\.{G1}, \.{G2}.
|
||||
These variables are derived from the Bailleux--Boufkhad method
|
||||
of encoding cardinality constraints:
|
||||
The auxiliary variable \.{A$k$} stands for the condition
|
||||
``at least $k$ of the eight neighbors are alive.'' Similarly,
|
||||
\.{B$k$} stands for ``at least $k$ of the first four neighbors
|
||||
are alive,'' and \.{C$k$} accounts for the other four neighbors.
|
||||
Codes \.D, \.E, \.F, and~\.G refer to pairs of neighbors.
|
||||
Thus, for instance, \.{10a11C2} means that at least two of the
|
||||
last four neighbors of cell $(10,11)$ are alive.
|
||||
|
||||
Those auxiliary variables receive values by means of up to 77 clauses per cell.
|
||||
For example, if $u$ and~$v$ are the neighbors of cell~$z$ that correspond
|
||||
to a pairing of type~\.D, there are six clauses
|
||||
$$\bar u d_1,\quad
|
||||
\bar v d_1,\quad
|
||||
\bar u\bar v d_2,\quad
|
||||
u v\bar d_1,\quad
|
||||
u\bar d_2,\quad
|
||||
v\bar d_2.$$
|
||||
The sixteen clauses
|
||||
$$\displaylines{\hfill
|
||||
\bar d_1b_1,\quad
|
||||
\bar e_1b_1,\quad
|
||||
\bar d_2b_2,\quad
|
||||
\bar d_1\bar e_1b_2,\quad
|
||||
\bar e_2b_2,\quad
|
||||
\bar d_2\bar e_1b_3,\quad
|
||||
\bar d_1\bar e_2b_3,\quad
|
||||
\bar d_2\bar e_2b_4,
|
||||
\hfill\cr\hfill
|
||||
d_1e_1\bar b_1,\quad
|
||||
d_1e_2\bar b_2,\quad
|
||||
d_2e_1\bar b_2,\quad
|
||||
d_1\bar b_3,\quad
|
||||
d_2e_2\bar b_3,\quad
|
||||
e_1\bar b_3,\quad
|
||||
d_2\bar b_4,\quad
|
||||
e_2\bar b_4
|
||||
\hfill}$$
|
||||
define $b$ variables from $d$'s and $e$'s; and another sixteen
|
||||
define $c$'s from $f$'s and $g$'s in the same fashion.
|
||||
A similar set of 21 clauses will define the $a$'s from the $b$'s and $c$'s.
|
||||
|
||||
Once the $a$'s are defined, thus essentially counting the
|
||||
live neighbors of cell $z$, the next
|
||||
state~$z'$ is defined by five further clauses
|
||||
$$\bar a_4\bar z',\quad
|
||||
a_2\bar z',\quad
|
||||
a_3z\bar z',\quad
|
||||
\bar a_3a_4z',\quad
|
||||
\bar a_2a_4\bar zz'.$$
|
||||
For example, the last of these states that $z'$ will be true
|
||||
(i.e., that cell $z$ will be alive at time $t+1$) if
|
||||
$z$ is alive at time~$t$ and has $\ge2$ live neighbors
|
||||
but not $\ge4$.
|
||||
|
||||
Nearby cells can share auxiliary variables, according to a tricky scheme that
|
||||
is worked out below. In consequence, the actual number of auxiliary variables
|
||||
and clauses per cell is reduced from 19 and $77+5$ to 13 and $57+5$,
|
||||
respectively, except at the boundaries.
|
||||
|
||||
@ So here's the overall outline of the program.
|
||||
|
||||
@d maxx 50 /* maximum number of lines in the pattern supplied by |stdin| */
|
||||
@d maxy 50 /* maximum number of columns per line in |stdin| */
|
||||
|
||||
@c
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
char p[maxx+2][maxy+2]; /* is cell $(x,y)$ potentially alive? */
|
||||
char have_b[maxx+2][maxy+2]; /* did we already generate $b(x,y)$? */
|
||||
char have_d[maxx+2][maxy+2]; /* did we already generate $d(x,y)$? */
|
||||
char have_e[maxx+2][maxy+4]; /* did we already generate $e(x,y)$? */
|
||||
char have_f[maxx+4][maxy+2]; /* did we already generate $f(x-2,y)$? */
|
||||
int tt; /* time as given on the command line */
|
||||
int xmax,ymax; /* the number of rows and columns in the input pattern */
|
||||
int xmin=maxx,ymin=maxy; /* limits in the other direction */
|
||||
char timecode[]="abcdefghijklmnopqrstuvwxyz"@|
|
||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"@|
|
||||
"!\"#$%&'()*+,-./:;<=>?@@[\\]^_`{|}~"; /* codes for $0\le t\le83$ */
|
||||
@q$@>
|
||||
char buf[maxy+2]; /* input buffer */
|
||||
unsigned int clause[4]; /* clauses are assembled here */
|
||||
int clauseptr; /* this many literals are in the current clause */
|
||||
@<Subroutines@>@;
|
||||
main(int argc,char*argv[]) {
|
||||
register int j,k,x,y;
|
||||
@<Process the command line@>;
|
||||
@<Input the pattern@>;
|
||||
for (x=xmin-1;x<=xmax+1;x++) for (y=ymin-1;y<=ymax+1;y++) {
|
||||
@<If cell $(x,y)$ is obviously dead at time $t+1$, |continue|@>;
|
||||
a(x,y);
|
||||
zprime(x,y);
|
||||
}
|
||||
}
|
||||
|
||||
@ @<Process the command line@>=
|
||||
if (argc!=2 || sscanf(argv[1],"%d",&tt)!=1) {
|
||||
fprintf(stderr,"Usage: %s t\n",argv[0]);
|
||||
exit(-1);
|
||||
}
|
||||
if (tt<0 || tt>82) {
|
||||
fprintf(stderr,"The time should be between 0 and 82 (not %d)!\n",tt);
|
||||
exit(-2);
|
||||
}
|
||||
|
||||
@ @<Input the pattern@>=
|
||||
for (x=1;;x++) {
|
||||
if (!fgets(buf,maxy+2,stdin)) break;
|
||||
if (x>maxx) {
|
||||
fprintf(stderr,"Sorry, the pattern should have at most %d rows!\n",maxx);
|
||||
exit(-3);
|
||||
}
|
||||
for (y=1;buf[y-1]!='\n';y++) {
|
||||
if (y>maxy) {
|
||||
fprintf(stderr,"Sorry, the pattern should have at most %d columns!\n",
|
||||
maxy);
|
||||
exit(-4);
|
||||
}
|
||||
if (buf[y-1]=='*') {
|
||||
p[x][y]=1;
|
||||
if (y>ymax) ymax=y;
|
||||
if (y<ymin) ymin=y;
|
||||
if (x>xmax) xmax=x;
|
||||
if (x<xmin) xmin=x;
|
||||
}@+else if (buf[y-1]!='.') {
|
||||
fprintf(stderr,"Unexpected character `%c' found in the pattern!\n",
|
||||
buf[y-1]);
|
||||
exit(-5);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ @d pp(xx,yy) ((xx)>=0 && (yy)>=0? p[xx][yy]: 0)
|
||||
|
||||
@<If cell $(x,y)$ is obviously dead at time $t+1$, |continue|@>=
|
||||
if (pp(x-1,y-1)+pp(x-1,y)+pp(x-1,y+1)+
|
||||
pp(x,y-1)+p[x][y]+p[x][y+1]+
|
||||
pp(x+1,y-1)+p[x+1][y]+p[x+1][y+1]<3) continue;
|
||||
|
||||
@ Clauses are assembled in the |clause| array (surprise), where we
|
||||
put encoded literals.
|
||||
|
||||
The code for a literal is an unsigned 32-bit quantity, where the leading
|
||||
bit is 1 if the literal should be complemented. The next three bits
|
||||
specify the type of the literal (0 thru 7 for plain and \.A--\.G);
|
||||
the next three bits specify an integer~$k$; and the next bit is zero.
|
||||
That leaves room for two 12-bit fields, which specify $x$ and $y$.
|
||||
|
||||
Type 0 literals have $k=0$ for the ordinary xty code. However, the
|
||||
value $k=1$ indicates that the time code should be for $t+1$ instead of~$t$.
|
||||
And $k=2$ denotes a special ``tautology'' literal, which is always true.
|
||||
If the tautology literal is complemented, we omit it from the clause;
|
||||
otherwise we omit the entire clause.
|
||||
Finally, $k=7$ denotes an auxiliary literal, used to avoid
|
||||
clauses of length~4.
|
||||
|
||||
Here's a subroutine that outputs the current clause and resets
|
||||
the |clause| array.
|
||||
|
||||
@d taut (2<<25)
|
||||
@d sign (1U<<31)
|
||||
|
||||
@<Sub...@>=
|
||||
void outclause(void) {
|
||||
register int c,k,x,y,p;
|
||||
for (p=0;p<clauseptr;p++)
|
||||
if (clause[p]==taut) goto done;
|
||||
for (p=0;p<clauseptr;p++) if (clause[p]!=taut+sign) {
|
||||
if (clause[p]>>31) printf(" ~");@+else printf(" ");
|
||||
c=(clause[p]>>28)&0x7;
|
||||
k=(clause[p]>>25)&0x7;
|
||||
x=(clause[p]>>12)&0xfff;
|
||||
y=clause[p]&0xfff;
|
||||
if (c) printf("%d%c%d%c%d",
|
||||
x,timecode[tt],y,c+'@@',k);
|
||||
else if (k==7) printf("%d%c%dx",
|
||||
x,timecode[tt],y);
|
||||
else printf("%d%c%d",
|
||||
x,timecode[tt+k],y);
|
||||
}
|
||||
printf("\n");
|
||||
done: clauseptr=0;
|
||||
}
|
||||
|
||||
@ And here's another, which puts a type-0 literal into |clause|.
|
||||
|
||||
@<Sub...@>=
|
||||
void applit(int x,int y,int bar,int k) {
|
||||
if (k==0 && (x<xmin || x>xmax || y<ymin || y>ymax || p[x][y]==0))
|
||||
clause[clauseptr++]=(bar? 0: sign)+taut;
|
||||
else clause[clauseptr++]=(bar? sign:0)+(k<<25)+(x<<12)+y;
|
||||
}
|
||||
|
||||
@ The |d| and |e| subroutines are called for only one-fourth
|
||||
of all cell addresses $(x,y)$. Indeed, one can show that
|
||||
$x$ is always odd, and that $y\bmod4<2$.
|
||||
|
||||
Therefore we remember if we've seen $(x,y)$ before.
|
||||
|
||||
Slight trick: If |yy| is not in range, we avoid generating the
|
||||
clause $\bar d_k$ twice.
|
||||
|
||||
@d newlit(x,y,c,k) clause[clauseptr++]=((c)<<28)+((k)<<25)+((x)<<12)+(y)
|
||||
@d newcomplit(x,y,c,k)
|
||||
clause[clauseptr++]=sign+((c)<<28)+((k)<<25)+((x)<<12)+(y)
|
||||
|
||||
@<Sub...@>=
|
||||
void d(int x,int y) {
|
||||
register x1=x-1,x2=x,yy=y+1;
|
||||
if (have_d[x][y]!=tt+1) {
|
||||
applit(x1,yy,1,0),newlit(x,y,4,1),outclause();
|
||||
applit(x2,yy,1,0),newlit(x,y,4,1),outclause();
|
||||
applit(x1,yy,1,0),applit(x2,yy,1,0),newlit(x,y,4,2),outclause();
|
||||
applit(x1,yy,0,0),applit(x2,yy,0,0),newcomplit(x,y,4,1),outclause();
|
||||
applit(x1,yy,0,0),newcomplit(x,y,4,2),outclause();
|
||||
if (yy>=ymin && yy<=ymax)
|
||||
applit(x2,yy,0,0),newcomplit(x,y,4,2),outclause();
|
||||
have_d[x][y]=tt+1;
|
||||
}
|
||||
}
|
||||
@#
|
||||
void e(int x,int y) {
|
||||
register x1=x-1,x2=x,yy=y-1;
|
||||
if (have_e[x][y]!=tt+1) {
|
||||
applit(x1,yy,1,0),newlit(x,y,5,1),outclause();
|
||||
applit(x2,yy,1,0),newlit(x,y,5,1),outclause();
|
||||
applit(x1,yy,1,0),applit(x2,yy,1,0),newlit(x,y,5,2),outclause();
|
||||
applit(x1,yy,0,0),applit(x2,yy,0,0),newcomplit(x,y,5,1),outclause();
|
||||
applit(x1,yy,0,0),newcomplit(x,y,5,2),outclause();
|
||||
if (yy>=ymin && yy<=ymax)
|
||||
applit(x2,yy,0,0),newcomplit(x,y,5,2),outclause();
|
||||
have_e[x][y]=tt+1;
|
||||
}
|
||||
}
|
||||
|
||||
@ The |f| subroutine can't be shared quite so often. But we
|
||||
do save a factor of~2, because $x+y$ is always even.
|
||||
|
||||
@<Sub...@>=
|
||||
void f(int x,int y) {
|
||||
register xx=x-1,y1=y,y2=y+1;
|
||||
if (have_f[x][y]!=tt+1) {
|
||||
applit(xx,y1,1,0),newlit(x,y,6,1),outclause();
|
||||
applit(xx,y2,1,0),newlit(x,y,6,1),outclause();
|
||||
applit(xx,y1,1,0),applit(xx,y2,1,0),newlit(x,y,6,2),outclause();
|
||||
applit(xx,y1,0,0),applit(xx,y2,0,0),newcomplit(x,y,6,1),outclause();
|
||||
applit(xx,y1,0,0),newcomplit(x,y,6,2),outclause();
|
||||
if (xx>=xmin && xx<=xmax)
|
||||
applit(xx,y2,0,0),newcomplit(x,y,6,2),outclause();
|
||||
have_f[x][y]=tt+1;
|
||||
}
|
||||
}
|
||||
|
||||
@ The |g| subroutine cleans up the dregs, by somewhat tediously
|
||||
locating the two neighbors that weren't handled by |d|, |e|, or~|f|.
|
||||
No sharing is possible here.
|
||||
|
||||
@<Sub...@>=
|
||||
void g(int x,int y) {
|
||||
register x1,x2,y1,y2;
|
||||
if (x&1) x1=x-1,y1=y,x2=x+1,y2=y^1;
|
||||
else x1=x+1,y1=y,x2=x-1,y2=y-1+((y&1)<<1);
|
||||
applit(x1,y1,1,0),newlit(x,y,7,1),outclause();
|
||||
applit(x2,y2,1,0),newlit(x,y,7,1),outclause();
|
||||
applit(x1,y1,1,0),applit(x2,y2,1,0),newlit(x,y,7,2),outclause();
|
||||
applit(x1,y1,0,0),applit(x2,y2,0,0),newcomplit(x,y,7,1),outclause();
|
||||
applit(x1,y1,0,0),newcomplit(x,y,7,2),outclause();
|
||||
applit(x2,y2,0,0),newcomplit(x,y,7,2),outclause();
|
||||
}
|
||||
|
||||
@ Fortunately the |b| subroutine {\it can\/} be shared (since |x| is always
|
||||
odd), thus saving half of the sixteen clauses generated.
|
||||
|
||||
@<Sub...@>=
|
||||
void b(int x,int y) {
|
||||
register j,k,xx=x,y1=y-(y&2),y2=y+(y&2);
|
||||
if (have_b[x][y]!=tt+1) {
|
||||
d(xx,y1);
|
||||
e(xx,y2);
|
||||
for (j=0;j<3;j++) for (k=0;k<3;k++) if (j+k) {
|
||||
if (j) newcomplit(xx,y1,4,j); /* $\bar d_j$ */
|
||||
if (k) newcomplit(xx,y2,5,k); /* $\bar e_k$ */
|
||||
newlit(x,y,2,j+k); /* $b_{j+k}$ */
|
||||
outclause();
|
||||
if (j) newlit(xx,y1,4,3-j); /* $d_{3-j}$ */
|
||||
if (k) newlit(xx,y2,5,3-k); /* $e_{3-k}$ */
|
||||
newcomplit(x,y,2,5-j-k); /* $\bar b_{5-j-k}$ */
|
||||
outclause();
|
||||
}
|
||||
have_b[x][y]=tt+1;
|
||||
}
|
||||
}
|
||||
|
||||
@ The (unshared) |c| subroutine handles the other four neighbors,
|
||||
by working with |f| and |g| instead of |d| and~|e|.
|
||||
|
||||
If |y=0|, the overlap rules set |y1=-1|, which can be problematic.
|
||||
I've decided to avoid this case by omitting |f| when it is
|
||||
guaranteed to be zero.
|
||||
|
||||
@<Sub...@>=
|
||||
void c(int x,int y) {
|
||||
register j,k,x1,y1;
|
||||
if (x&1) x1=x+2,y1=(y-1)|1;
|
||||
else x1=x,y1=y&-2;
|
||||
g(x,y);
|
||||
if (x1-1<xmin || x1-1>xmax || y1+1<ymin || y1>ymax)
|
||||
@<Set |c| equal to |g|@>@;
|
||||
else {
|
||||
f(x1,y1);
|
||||
for (j=0;j<3;j++) for (k=0;k<3;k++) if (j+k) {
|
||||
if (j) newcomplit(x1,y1,6,j); /* $\bar f_j$ */
|
||||
if (k) newcomplit(x,y,7,k); /* $\bar g_k$ */
|
||||
newlit(x,y,3,j+k); /* $c_{j+k}$ */
|
||||
outclause();
|
||||
if (j) newlit(x1,y1,6,3-j); /* $f_{3-j}$ */
|
||||
if (k) newlit(x,y,7,3-k); /* $g_{3-k}$ */
|
||||
newcomplit(x,y,3,5-j-k); /* $\bar c_{5-j-k}$ */
|
||||
outclause();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ @<Set |c| equal to |g|@>=
|
||||
{
|
||||
for (k=1;k<3;k++) {
|
||||
newcomplit(x,y,7,k),newlit(x,y,3,k),outclause(); /* $\bar g_k\lor c_k$ */
|
||||
newlit(x,y,7,k),newcomplit(x,y,3,k),outclause(); /* $g_k\lor\bar c_k$ */
|
||||
}
|
||||
newcomplit(x,y,3,3),outclause(); /* $\bar c_3$ */
|
||||
newcomplit(x,y,3,4),outclause(); /* $\bar c_4$ */
|
||||
}
|
||||
|
||||
@ Totals over all eight neighbors are then deduced by the |a|
|
||||
subroutine.
|
||||
|
||||
@<Sub...@>=
|
||||
void a(int x,int y) {
|
||||
register j,k,xx=x|1;
|
||||
b(xx,y);
|
||||
c(x,y);
|
||||
for (j=0;j<5;j++) for (k=0;k<5;k++) if (j+k>1 && j+k<5) {
|
||||
if (j) newcomplit(xx,y,2,j); /* $\bar b_j$ */
|
||||
if (k) newcomplit(x,y,3,k); /* $\bar c_k$ */
|
||||
newlit(x,y,1,j+k); /* $a_{j+k}$ */
|
||||
outclause();
|
||||
}
|
||||
for (j=0;j<5;j++) for (k=0;k<5;k++) if (j+k>2 && j+k<6 && j*k) {
|
||||
if (j) newlit(xx,y,2,j); /* $b_j$ */
|
||||
if (k) newlit(x,y,3,k); /* $c_k$ */
|
||||
newcomplit(x,y,1,j+k-1); /* $\bar a_{j+k-1}$ */
|
||||
outclause();
|
||||
}
|
||||
}
|
||||
|
||||
@ Finally, as mentioned at the beginning, $z'$ is determined
|
||||
from $z$, $a_2$, $a_3$, and $a_4$.
|
||||
|
||||
I actually generate six clauses, not five, in order to stick to
|
||||
{\mc 3SAT}.
|
||||
|
||||
@<Sub...@>=
|
||||
void zprime(int x,int y) {
|
||||
newcomplit(x,y,1,4),applit(x,y,1,1),outclause(); /* $\bar a_4\bar z'$ */
|
||||
newlit(x,y,1,2),applit(x,y,1,1),outclause(); /* $a_2\bar z'$ */
|
||||
newlit(x,y,1,3),applit(x,y,0,0),applit(x,y,1,1),outclause();
|
||||
/* $a_3z\bar z'$ */
|
||||
newcomplit(x,y,1,3),newlit(x,y,1,4),applit(x,y,0,1),outclause();
|
||||
/* $\bar a_3a_4z'$ */
|
||||
applit(x,y,0,7),newcomplit(x,y,1,2),newlit(x,y,1,4),outclause();
|
||||
/* $x\bar a_2a_4$ */
|
||||
applit(x,y,1,7),applit(x,y,1,0),applit(x,y,0,1),outclause();
|
||||
/* $\bar x\bar zz'$ */
|
||||
}
|
||||
|
||||
@*Index.
|
||||
24
samples/Closure Templates/example.soy
Normal file
24
samples/Closure Templates/example.soy
Normal file
@@ -0,0 +1,24 @@
|
||||
{namespace Exmaple}
|
||||
|
||||
/**
|
||||
* Example
|
||||
*/
|
||||
{template .foo}
|
||||
{@param count: string}
|
||||
{@param? name: int}
|
||||
|
||||
{if isNonnull($name)}
|
||||
<h1>{$name}</h1>
|
||||
{/if}
|
||||
|
||||
<div class="content">
|
||||
{switch count}
|
||||
{case 0}
|
||||
{call Empty.view}
|
||||
{param count: $count /}
|
||||
{/call}
|
||||
{default}
|
||||
<h2>Wow, so many!</h2>
|
||||
{/switch}
|
||||
</div>
|
||||
{/template}
|
||||
@@ -1,707 +0,0 @@
|
||||
Inductive day : Type :=
|
||||
| monday : day
|
||||
| tuesday : day
|
||||
| wednesday : day
|
||||
| thursday : day
|
||||
| friday : day
|
||||
| saturday : day
|
||||
| sunday : day.
|
||||
|
||||
Definition next_weekday (d:day) : day :=
|
||||
match d with
|
||||
| monday => tuesday
|
||||
| tuesday => wednesday
|
||||
| wednesday => thursday
|
||||
| thursday => friday
|
||||
| friday => monday
|
||||
| saturday => monday
|
||||
| sunday => monday
|
||||
end.
|
||||
|
||||
Example test_next_weekday:
|
||||
(next_weekday (next_weekday saturday)) = tuesday.
|
||||
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
|
||||
Inductive bool : Type :=
|
||||
| true : bool
|
||||
| false : bool.
|
||||
|
||||
Definition negb (b:bool) : bool :=
|
||||
match b with
|
||||
| true => false
|
||||
| false => true
|
||||
end.
|
||||
|
||||
Definition andb (b1:bool) (b2:bool) : bool :=
|
||||
match b1 with
|
||||
| true => b2
|
||||
| false => false
|
||||
end.
|
||||
|
||||
Definition orb (b1:bool) (b2:bool) : bool :=
|
||||
match b1 with
|
||||
| true => true
|
||||
| false => b2
|
||||
end.
|
||||
|
||||
Example test_orb1: (orb true false) = true.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
|
||||
Example test_orb2: (orb false false) = false.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
|
||||
Example test_orb3: (orb false true) = true.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
|
||||
Example test_orb4: (orb true true) = true.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
|
||||
Definition nandb (b1: bool) (b2:bool) : bool :=
|
||||
match b1 with
|
||||
| true => match b2 with
|
||||
| false => true
|
||||
| true => false
|
||||
end
|
||||
| false => true
|
||||
end.
|
||||
|
||||
Example test_nandb1: (nandb true false) = true.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
Example test_nandb2: (nandb false false) = true.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
Example test_nandb3: (nandb false true) = true.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
Example test_nandb4: (nandb true true) = false.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
|
||||
Definition andb3 (b1: bool) (b2:bool) (b3:bool) : bool :=
|
||||
match b1 with
|
||||
| false => false
|
||||
| true => match b2 with
|
||||
| false => false
|
||||
| true => b3
|
||||
end
|
||||
end.
|
||||
|
||||
Example test_andb31: (andb3 true true true) = true.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
Example test_andb32: (andb3 false true true) = false.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
Example test_andb33: (andb3 true false true) = false.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
Example test_andb34: (andb3 true true false) = false.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
|
||||
Module Playground1.
|
||||
|
||||
Inductive nat : Type :=
|
||||
| O : nat
|
||||
| S : nat -> nat.
|
||||
|
||||
Definition pred (n : nat) : nat :=
|
||||
match n with
|
||||
| O => O
|
||||
| S n' => n'
|
||||
end.
|
||||
|
||||
Definition minustwo (n : nat) : nat :=
|
||||
match n with
|
||||
| O => O
|
||||
| S O => O
|
||||
| S (S n') => n'
|
||||
end.
|
||||
|
||||
Fixpoint evenb (n : nat) : bool :=
|
||||
match n with
|
||||
| O => true
|
||||
| S O => false
|
||||
| S (S n') => evenb n'
|
||||
end.
|
||||
|
||||
Definition oddb (n : nat) : bool := negb (evenb n).
|
||||
|
||||
Example test_oddb1: (oddb (S O)) = true.
|
||||
Proof. reflexivity. Qed.
|
||||
Example test_oddb2: (oddb (S (S (S (S O))))) = false.
|
||||
Proof. reflexivity. Qed.
|
||||
|
||||
Fixpoint plus (n : nat) (m : nat) : nat :=
|
||||
match n with
|
||||
| O => m
|
||||
| S n' => S (plus n' m)
|
||||
end.
|
||||
|
||||
Fixpoint mult (n m : nat) : nat :=
|
||||
match n with
|
||||
| O => O
|
||||
| S n' => plus m (mult n' m)
|
||||
end.
|
||||
|
||||
Fixpoint minus (n m : nat) : nat :=
|
||||
match n, m with
|
||||
| O, _ => n
|
||||
| S n', O => S n'
|
||||
| S n', S m' => minus n' m'
|
||||
end.
|
||||
|
||||
Fixpoint exp (base power : nat) : nat :=
|
||||
match power with
|
||||
| O => S O
|
||||
| S p => mult base (exp base p)
|
||||
end.
|
||||
|
||||
Fixpoint factorial (n : nat) : nat :=
|
||||
match n with
|
||||
| O => S O
|
||||
| S n' => mult n (factorial n')
|
||||
end.
|
||||
|
||||
Example test_factorial1: (factorial (S (S (S O)))) = (S (S (S (S (S (S O)))))).
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
|
||||
Notation "x + y" := (plus x y) (at level 50, left associativity) : nat_scope.
|
||||
Notation "x - y" := (minus x y) (at level 50, left associativity) : nat_scope.
|
||||
Notation "x * y" := (mult x y) (at level 40, left associativity) : nat_scope.
|
||||
|
||||
Fixpoint beq_nat (n m : nat) : bool :=
|
||||
match n with
|
||||
| O => match m with
|
||||
| O => true
|
||||
| S m' => false
|
||||
end
|
||||
| S n' => match m with
|
||||
| O => false
|
||||
| S m' => beq_nat n' m'
|
||||
end
|
||||
end.
|
||||
|
||||
Fixpoint ble_nat (n m : nat) : bool :=
|
||||
match n with
|
||||
| O => true
|
||||
| S n' =>
|
||||
match m with
|
||||
| O => false
|
||||
| S m' => ble_nat n' m'
|
||||
end
|
||||
end.
|
||||
|
||||
Example test_ble_nat1: (ble_nat (S (S O)) (S (S O))) = true.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
Example test_ble_nat2: (ble_nat (S (S O)) (S (S (S (S O))))) = true.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
Example test_ble_nat3: (ble_nat (S (S (S (S O)))) (S (S O))) = false.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
|
||||
Definition blt_nat (n m : nat) : bool :=
|
||||
(andb (negb (beq_nat n m)) (ble_nat n m)).
|
||||
|
||||
Example test_blt_nat1: (blt_nat (S (S O)) (S (S O))) = false.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
Example test_blt_nat3: (blt_nat (S (S (S (S O)))) (S (S O))) = false.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
Example test_blt_nat2 : (blt_nat (S (S O)) (S (S (S (S O))))) = true.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
|
||||
Theorem plus_O_n : forall n : nat, O + n = n.
|
||||
Proof.
|
||||
simpl. reflexivity. Qed.
|
||||
|
||||
Theorem plus_O_n' : forall n : nat, O + n = n.
|
||||
Proof.
|
||||
reflexivity. Qed.
|
||||
|
||||
Theorem plus_O_n'' : forall n : nat, O + n = n.
|
||||
Proof.
|
||||
intros n. reflexivity. Qed.
|
||||
|
||||
Theorem plus_1_1 : forall n : nat, (S O) + n = S n.
|
||||
Proof.
|
||||
intros n. reflexivity. Qed.
|
||||
|
||||
Theorem mult_0_1: forall n : nat, O * n = O.
|
||||
Proof.
|
||||
intros n. reflexivity. Qed.
|
||||
|
||||
Theorem plus_id_example : forall n m:nat,
|
||||
n = m -> n + n = m + m.
|
||||
Proof.
|
||||
intros n m.
|
||||
intros H.
|
||||
rewrite -> H.
|
||||
reflexivity. Qed.
|
||||
|
||||
Theorem plus_id_exercise : forall n m o: nat,
|
||||
n = m -> m = o -> n + m = m + o.
|
||||
Proof.
|
||||
intros n m o.
|
||||
intros H.
|
||||
intros H'.
|
||||
rewrite -> H.
|
||||
rewrite <- H'.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem mult_0_plus : forall n m : nat,
|
||||
(O + n) * m = n * m.
|
||||
Proof.
|
||||
intros n m.
|
||||
rewrite -> plus_O_n.
|
||||
reflexivity. Qed.
|
||||
|
||||
Theorem mult_1_plus : forall n m: nat,
|
||||
((S O) + n) * m = m + (n * m).
|
||||
Proof.
|
||||
intros n m.
|
||||
rewrite -> plus_1_1.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem mult_1 : forall n : nat,
|
||||
n * (S O) = n.
|
||||
Proof.
|
||||
intros n.
|
||||
induction n as [| n'].
|
||||
reflexivity.
|
||||
simpl.
|
||||
rewrite -> IHn'.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem plus_1_neq_0 : forall n : nat,
|
||||
beq_nat (n + (S O)) O = false.
|
||||
Proof.
|
||||
intros n.
|
||||
destruct n as [| n'].
|
||||
reflexivity.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem zero_nbeq_plus_1 : forall n : nat,
|
||||
beq_nat O (n + (S O)) = false.
|
||||
Proof.
|
||||
intros n.
|
||||
destruct n.
|
||||
reflexivity.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Require String. Open Scope string_scope.
|
||||
|
||||
Ltac move_to_top x :=
|
||||
match reverse goal with
|
||||
| H : _ |- _ => try move x after H
|
||||
end.
|
||||
|
||||
Tactic Notation "assert_eq" ident(x) constr(v) :=
|
||||
let H := fresh in
|
||||
assert (x = v) as H by reflexivity;
|
||||
clear H.
|
||||
|
||||
Tactic Notation "Case_aux" ident(x) constr(name) :=
|
||||
first [
|
||||
set (x := name); move_to_top x
|
||||
| assert_eq x name; move_to_top x
|
||||
| fail 1 "because we are working on a different case" ].
|
||||
|
||||
Ltac Case name := Case_aux Case name.
|
||||
Ltac SCase name := Case_aux SCase name.
|
||||
Ltac SSCase name := Case_aux SSCase name.
|
||||
Ltac SSSCase name := Case_aux SSSCase name.
|
||||
Ltac SSSSCase name := Case_aux SSSSCase name.
|
||||
Ltac SSSSSCase name := Case_aux SSSSSCase name.
|
||||
Ltac SSSSSSCase name := Case_aux SSSSSSCase name.
|
||||
Ltac SSSSSSSCase name := Case_aux SSSSSSSCase name.
|
||||
|
||||
Theorem andb_true_elim1 : forall b c : bool,
|
||||
andb b c = true -> b = true.
|
||||
Proof.
|
||||
intros b c H.
|
||||
destruct b.
|
||||
Case "b = true".
|
||||
reflexivity.
|
||||
Case "b = false".
|
||||
rewrite <- H. reflexivity. Qed.
|
||||
|
||||
Theorem plus_0_r : forall n : nat, n + O = n.
|
||||
Proof.
|
||||
intros n. induction n as [| n'].
|
||||
Case "n = 0". reflexivity.
|
||||
Case "n = S n'". simpl. rewrite -> IHn'. reflexivity. Qed.
|
||||
|
||||
Theorem minus_diag : forall n,
|
||||
minus n n = O.
|
||||
Proof.
|
||||
intros n. induction n as [| n'].
|
||||
Case "n = 0".
|
||||
simpl. reflexivity.
|
||||
Case "n = S n'".
|
||||
simpl. rewrite -> IHn'. reflexivity. Qed.
|
||||
|
||||
|
||||
Theorem mult_0_r : forall n:nat,
|
||||
n * O = O.
|
||||
Proof.
|
||||
intros n. induction n as [| n'].
|
||||
Case "n = 0".
|
||||
reflexivity.
|
||||
Case "n = S n'".
|
||||
simpl. rewrite -> IHn'. reflexivity. Qed.
|
||||
|
||||
Theorem plus_n_Sm : forall n m : nat,
|
||||
S (n + m) = n + (S m).
|
||||
Proof.
|
||||
intros n m. induction n as [| n'].
|
||||
Case "n = 0".
|
||||
reflexivity.
|
||||
Case "n = S n'".
|
||||
simpl. rewrite -> IHn'. reflexivity. Qed.
|
||||
|
||||
Theorem plus_assoc : forall n m p : nat,
|
||||
n + (m + p) = (n + m) + p.
|
||||
Proof.
|
||||
intros n m p.
|
||||
induction n as [| n'].
|
||||
reflexivity.
|
||||
simpl.
|
||||
rewrite -> IHn'.
|
||||
reflexivity. Qed.
|
||||
|
||||
Theorem plus_distr : forall n m: nat, S (n + m) = n + (S m).
|
||||
Proof.
|
||||
intros n m. induction n as [| n'].
|
||||
Case "n = 0".
|
||||
reflexivity.
|
||||
Case "n = S n'".
|
||||
simpl. rewrite -> IHn'. reflexivity. Qed.
|
||||
|
||||
Theorem mult_distr : forall n m: nat, n * ((S O) + m) = n * (S m).
|
||||
Proof.
|
||||
intros n m.
|
||||
induction n as [| n'].
|
||||
reflexivity.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem plus_comm : forall n m : nat,
|
||||
n + m = m + n.
|
||||
Proof.
|
||||
intros n m.
|
||||
induction n as [| n'].
|
||||
Case "n = 0".
|
||||
simpl.
|
||||
rewrite -> plus_0_r.
|
||||
reflexivity.
|
||||
Case "n = S n'".
|
||||
simpl.
|
||||
rewrite -> IHn'.
|
||||
rewrite -> plus_distr.
|
||||
reflexivity. Qed.
|
||||
|
||||
Fixpoint double (n:nat) :=
|
||||
match n with
|
||||
| O => O
|
||||
| S n' => S (S (double n'))
|
||||
end.
|
||||
|
||||
Lemma double_plus : forall n, double n = n + n.
|
||||
Proof.
|
||||
intros n. induction n as [| n'].
|
||||
Case "n = 0".
|
||||
reflexivity.
|
||||
Case "n = S n'".
|
||||
simpl. rewrite -> IHn'.
|
||||
rewrite -> plus_distr. reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem beq_nat_refl : forall n : nat,
|
||||
true = beq_nat n n.
|
||||
Proof.
|
||||
intros n. induction n as [| n'].
|
||||
Case "n = 0".
|
||||
reflexivity.
|
||||
Case "n = S n".
|
||||
simpl. rewrite <- IHn'.
|
||||
reflexivity. Qed.
|
||||
|
||||
Theorem plus_rearrange: forall n m p q : nat,
|
||||
(n + m) + (p + q) = (m + n) + (p + q).
|
||||
Proof.
|
||||
intros n m p q.
|
||||
assert(H: n + m = m + n).
|
||||
Case "Proof by assertion".
|
||||
rewrite -> plus_comm. reflexivity.
|
||||
rewrite -> H. reflexivity. Qed.
|
||||
|
||||
Theorem plus_swap : forall n m p: nat,
|
||||
n + (m + p) = m + (n + p).
|
||||
Proof.
|
||||
intros n m p.
|
||||
rewrite -> plus_assoc.
|
||||
assert(H: m + (n + p) = (m + n) + p).
|
||||
rewrite -> plus_assoc.
|
||||
reflexivity.
|
||||
rewrite -> H.
|
||||
assert(H2: m + n = n + m).
|
||||
rewrite -> plus_comm.
|
||||
reflexivity.
|
||||
rewrite -> H2.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem plus_swap' : forall n m p: nat,
|
||||
n + (m + p) = m + (n + p).
|
||||
Proof.
|
||||
intros n m p.
|
||||
rewrite -> plus_assoc.
|
||||
assert(H: m + (n + p) = (m + n) + p).
|
||||
rewrite -> plus_assoc.
|
||||
reflexivity.
|
||||
rewrite -> H.
|
||||
replace (m + n) with (n + m).
|
||||
rewrite -> plus_comm.
|
||||
reflexivity.
|
||||
rewrite -> plus_comm.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem mult_1_distr: forall m n: nat,
|
||||
n * ((S O) + m) = n * (S O) + n * m.
|
||||
Proof.
|
||||
intros n m.
|
||||
rewrite -> mult_1.
|
||||
rewrite -> plus_1_1.
|
||||
simpl.
|
||||
induction m as [|m'].
|
||||
simpl.
|
||||
reflexivity.
|
||||
simpl.
|
||||
rewrite -> plus_swap.
|
||||
rewrite <- IHm'.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem mult_comm: forall m n : nat,
|
||||
m * n = n * m.
|
||||
Proof.
|
||||
intros m n.
|
||||
induction n as [| n'].
|
||||
Case "n = 0".
|
||||
simpl.
|
||||
rewrite -> mult_0_r.
|
||||
reflexivity.
|
||||
Case "n = S n'".
|
||||
simpl.
|
||||
rewrite <- mult_distr.
|
||||
rewrite -> mult_1_distr.
|
||||
rewrite -> mult_1.
|
||||
rewrite -> IHn'.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem evenb_next : forall n : nat,
|
||||
evenb n = evenb (S (S n)).
|
||||
Proof.
|
||||
intros n.
|
||||
Admitted.
|
||||
|
||||
Theorem negb_negb : forall n : bool,
|
||||
n = negb (negb n).
|
||||
Proof.
|
||||
intros n.
|
||||
destruct n.
|
||||
reflexivity.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem evenb_n_oddb_Sn : forall n : nat,
|
||||
evenb n = negb (evenb (S n)).
|
||||
Proof.
|
||||
intros n.
|
||||
induction n as [|n'].
|
||||
reflexivity.
|
||||
assert(H: evenb n' = evenb (S (S n'))).
|
||||
reflexivity.
|
||||
rewrite <- H.
|
||||
rewrite -> IHn'.
|
||||
rewrite <- negb_negb.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
(*Fixpoint bad (n : nat) : bool :=
|
||||
match n with
|
||||
| O => true
|
||||
| S O => bad (S n)
|
||||
| S (S n') => bad n'
|
||||
end.*)
|
||||
|
||||
Theorem ble_nat_refl : forall n:nat,
|
||||
true = ble_nat n n.
|
||||
Proof.
|
||||
intros n.
|
||||
induction n as [|n'].
|
||||
Case "n = 0".
|
||||
reflexivity.
|
||||
Case "n = S n".
|
||||
simpl.
|
||||
rewrite <- IHn'.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem zero_nbeq_S : forall n: nat,
|
||||
beq_nat O (S n) = false.
|
||||
Proof.
|
||||
intros n.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem andb_false_r : forall b : bool,
|
||||
andb b false = false.
|
||||
Proof.
|
||||
intros b.
|
||||
destruct b.
|
||||
reflexivity.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem plus_ble_compat_1 : forall n m p : nat,
|
||||
ble_nat n m = true -> ble_nat (p + n) (p + m) = true.
|
||||
Proof.
|
||||
intros n m p.
|
||||
intros H.
|
||||
induction p.
|
||||
Case "p = 0".
|
||||
simpl.
|
||||
rewrite -> H.
|
||||
reflexivity.
|
||||
Case "p = S p'".
|
||||
simpl.
|
||||
rewrite -> IHp.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem S_nbeq_0 : forall n:nat,
|
||||
beq_nat (S n) O = false.
|
||||
Proof.
|
||||
intros n.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem mult_1_1 : forall n:nat, (S O) * n = n.
|
||||
Proof.
|
||||
intros n.
|
||||
simpl.
|
||||
rewrite -> plus_0_r.
|
||||
reflexivity. Qed.
|
||||
|
||||
Theorem all3_spec : forall b c : bool,
|
||||
orb (andb b c)
|
||||
(orb (negb b)
|
||||
(negb c))
|
||||
= true.
|
||||
Proof.
|
||||
intros b c.
|
||||
destruct b.
|
||||
destruct c.
|
||||
reflexivity.
|
||||
reflexivity.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Lemma mult_plus_1 : forall n m : nat,
|
||||
S(m + n) = m + (S n).
|
||||
Proof.
|
||||
intros n m.
|
||||
induction m.
|
||||
reflexivity.
|
||||
simpl.
|
||||
rewrite -> IHm.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem mult_mult : forall n m : nat,
|
||||
n * (S m) = n * m + n.
|
||||
Proof.
|
||||
intros n m.
|
||||
induction n.
|
||||
reflexivity.
|
||||
simpl.
|
||||
rewrite -> IHn.
|
||||
rewrite -> plus_assoc.
|
||||
rewrite -> mult_plus_1.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem mult_plus_distr_r : forall n m p:nat,
|
||||
(n + m) * p = (n * p) + (m * p).
|
||||
Proof.
|
||||
intros n m p.
|
||||
induction p.
|
||||
rewrite -> mult_0_r.
|
||||
rewrite -> mult_0_r.
|
||||
rewrite -> mult_0_r.
|
||||
reflexivity.
|
||||
rewrite -> mult_mult.
|
||||
rewrite -> mult_mult.
|
||||
rewrite -> mult_mult.
|
||||
rewrite -> IHp.
|
||||
assert(H1: ((n * p) + n) + (m * p + m) = (n * p) + (n + (m * p + m))).
|
||||
rewrite <- plus_assoc.
|
||||
reflexivity.
|
||||
rewrite -> H1.
|
||||
assert(H2: (n + (m * p + m)) = (m * p + (n + m))).
|
||||
rewrite -> plus_swap.
|
||||
reflexivity.
|
||||
rewrite -> H2.
|
||||
assert(H3: (n * p) + (m * p + (n + m)) = ((n * p ) + (m * p)) + (n + m)).
|
||||
rewrite -> plus_assoc.
|
||||
reflexivity.
|
||||
rewrite -> H3.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem mult_assoc : forall n m p : nat,
|
||||
n * (m * p) = (n * m) * p.
|
||||
Proof.
|
||||
intros n m p.
|
||||
induction n.
|
||||
simpl.
|
||||
reflexivity.
|
||||
simpl.
|
||||
rewrite -> mult_plus_distr_r.
|
||||
rewrite -> IHn.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Inductive bin : Type :=
|
||||
| BO : bin
|
||||
| D : bin -> bin
|
||||
| M : bin -> bin.
|
||||
|
||||
Fixpoint incbin (n : bin) : bin :=
|
||||
match n with
|
||||
| BO => M (BO)
|
||||
| D n' => M n'
|
||||
| M n' => D (incbin n')
|
||||
end.
|
||||
|
||||
Fixpoint bin2un (n : bin) : nat :=
|
||||
match n with
|
||||
| BO => O
|
||||
| D n' => double (bin2un n')
|
||||
| M n' => S (double (bin2un n'))
|
||||
end.
|
||||
|
||||
Theorem bin_comm : forall n : bin,
|
||||
bin2un(incbin n) = S (bin2un n).
|
||||
Proof.
|
||||
intros n.
|
||||
induction n.
|
||||
reflexivity.
|
||||
reflexivity.
|
||||
simpl.
|
||||
rewrite -> IHn.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
End Playground1.
|
||||
85
samples/Coq/Computation.v
Normal file
85
samples/Coq/Computation.v
Normal file
@@ -0,0 +1,85 @@
|
||||
(** The definition of computations, used to represent interactive programs. *)
|
||||
Require Import Coq.NArith.NArith.
|
||||
Require Import ListString.All.
|
||||
|
||||
Local Open Scope type.
|
||||
|
||||
(** System calls. *)
|
||||
Module Command.
|
||||
Inductive t :=
|
||||
| AskCard
|
||||
| AskPIN
|
||||
| CheckPIN (pin : N)
|
||||
| AskAmount
|
||||
| CheckAmount (amount : N)
|
||||
| GiveCard
|
||||
| GiveAmount (amount : N)
|
||||
| ShowError (message : LString.t).
|
||||
|
||||
(** The type of an answer for a command depends on the value of the command. *)
|
||||
Definition answer (command : t) : Type :=
|
||||
match command with
|
||||
| AskCard => bool (* If the given card seems valid. *)
|
||||
| AskPIN => option N (* A number or cancellation. *)
|
||||
| CheckPIN _ => bool (* If the PIN number is valid. *)
|
||||
| AskAmount => option N (* A number or cancellation. *)
|
||||
| CheckAmount _ => bool (* If the amount can be withdrawn. *)
|
||||
| GiveCard => bool (* If the card was given. *)
|
||||
| GiveAmount _ => bool (* If the money was given. *)
|
||||
| ShowError _ => unit (* Show an error message. *)
|
||||
end.
|
||||
End Command.
|
||||
|
||||
(** Computations with I/Os. *)
|
||||
Module C.
|
||||
(** A computation can either does nothing, or do a system call and wait
|
||||
for the answer to run another computation. *)
|
||||
Inductive t : Type :=
|
||||
| Ret : t
|
||||
| Call : forall (command : Command.t), (Command.answer command -> t) -> t.
|
||||
Arguments Ret.
|
||||
Arguments Call _ _.
|
||||
|
||||
(** Some optional notations. *)
|
||||
Module Notations.
|
||||
(** A nicer notation for `Ret`. *)
|
||||
Definition ret : t :=
|
||||
Ret.
|
||||
|
||||
(** We define an explicit apply function so that Coq does not try to expand
|
||||
the notations everywhere. *)
|
||||
Definition apply {A B} (f : A -> B) (x : A) := f x.
|
||||
|
||||
(** System call. *)
|
||||
Notation "'call!' answer ':=' command 'in' X" :=
|
||||
(Call command (fun answer => X))
|
||||
(at level 200, answer ident, command at level 100, X at level 200).
|
||||
|
||||
(** System call with typed answer. *)
|
||||
Notation "'call!' answer : A ':=' command 'in' X" :=
|
||||
(Call command (fun (answer : A) => X))
|
||||
(at level 200, answer ident, command at level 100, A at level 200, X at level 200).
|
||||
|
||||
(** System call ignoring the answer. *)
|
||||
Notation "'do_call!' command 'in' X" :=
|
||||
(Call command (fun _ => X))
|
||||
(at level 200, command at level 100, X at level 200).
|
||||
|
||||
(** This notation is useful to compose computations which wait for a
|
||||
continuation. We do not have an explicit bind operator to simplify the
|
||||
language and the proofs. *)
|
||||
Notation "'let!' x ':=' X 'in' Y" :=
|
||||
(apply X (fun x => Y))
|
||||
(at level 200, x ident, X at level 100, Y at level 200).
|
||||
|
||||
(** Let with a typed answer. *)
|
||||
Notation "'let!' x : A ':=' X 'in' Y" :=
|
||||
(apply X (fun (x : A) => Y))
|
||||
(at level 200, x ident, X at level 100, A at level 200, Y at level 200).
|
||||
|
||||
(** Let ignoring the answer. *)
|
||||
Notation "'do!' X 'in' Y" :=
|
||||
(apply X (fun _ => Y))
|
||||
(at level 200, X at level 100, Y at level 200).
|
||||
End Notations.
|
||||
End C.
|
||||
@@ -1,290 +0,0 @@
|
||||
(** A development of Treesort on Heap trees. It has an average
|
||||
complexity of O(n.log n) but of O(n²) in the worst case (e.g. if
|
||||
the list is already sorted) *)
|
||||
|
||||
(* G. Huet 1-9-95 uses Multiset *)
|
||||
|
||||
Require Import List Multiset PermutSetoid Relations Sorting.
|
||||
|
||||
Section defs.
|
||||
|
||||
(** * Trees and heap trees *)
|
||||
|
||||
(** ** Definition of trees over an ordered set *)
|
||||
|
||||
Variable A : Type.
|
||||
Variable leA : relation A.
|
||||
Variable eqA : relation A.
|
||||
|
||||
Let gtA (x y:A) := ~ leA x y.
|
||||
|
||||
Hypothesis leA_dec : forall x y:A, {leA x y} + {leA y x}.
|
||||
Hypothesis eqA_dec : forall x y:A, {eqA x y} + {~ eqA x y}.
|
||||
Hypothesis leA_refl : forall x y:A, eqA x y -> leA x y.
|
||||
Hypothesis leA_trans : forall x y z:A, leA x y -> leA y z -> leA x z.
|
||||
Hypothesis leA_antisym : forall x y:A, leA x y -> leA y x -> eqA x y.
|
||||
|
||||
Hint Resolve leA_refl.
|
||||
Hint Immediate eqA_dec leA_dec leA_antisym.
|
||||
|
||||
Let emptyBag := EmptyBag A.
|
||||
Let singletonBag := SingletonBag _ eqA_dec.
|
||||
|
||||
Inductive Tree :=
|
||||
| Tree_Leaf : Tree
|
||||
| Tree_Node : A -> Tree -> Tree -> Tree.
|
||||
|
||||
(** [a] is lower than a Tree [T] if [T] is a Leaf
|
||||
or [T] is a Node holding [b>a] *)
|
||||
|
||||
Definition leA_Tree (a:A) (t:Tree) :=
|
||||
match t with
|
||||
| Tree_Leaf => True
|
||||
| Tree_Node b T1 T2 => leA a b
|
||||
end.
|
||||
|
||||
Lemma leA_Tree_Leaf : forall a:A, leA_Tree a Tree_Leaf.
|
||||
Proof.
|
||||
simpl; auto with datatypes.
|
||||
Qed.
|
||||
|
||||
Lemma leA_Tree_Node :
|
||||
forall (a b:A) (G D:Tree), leA a b -> leA_Tree a (Tree_Node b G D).
|
||||
Proof.
|
||||
simpl; auto with datatypes.
|
||||
Qed.
|
||||
|
||||
|
||||
(** ** The heap property *)
|
||||
|
||||
Inductive is_heap : Tree -> Prop :=
|
||||
| nil_is_heap : is_heap Tree_Leaf
|
||||
| node_is_heap :
|
||||
forall (a:A) (T1 T2:Tree),
|
||||
leA_Tree a T1 ->
|
||||
leA_Tree a T2 ->
|
||||
is_heap T1 -> is_heap T2 -> is_heap (Tree_Node a T1 T2).
|
||||
|
||||
Lemma invert_heap :
|
||||
forall (a:A) (T1 T2:Tree),
|
||||
is_heap (Tree_Node a T1 T2) ->
|
||||
leA_Tree a T1 /\ leA_Tree a T2 /\ is_heap T1 /\ is_heap T2.
|
||||
Proof.
|
||||
intros; inversion H; auto with datatypes.
|
||||
Qed.
|
||||
|
||||
(* This lemma ought to be generated automatically by the Inversion tools *)
|
||||
Lemma is_heap_rect :
|
||||
forall P:Tree -> Type,
|
||||
P Tree_Leaf ->
|
||||
(forall (a:A) (T1 T2:Tree),
|
||||
leA_Tree a T1 ->
|
||||
leA_Tree a T2 ->
|
||||
is_heap T1 -> P T1 -> is_heap T2 -> P T2 -> P (Tree_Node a T1 T2)) ->
|
||||
forall T:Tree, is_heap T -> P T.
|
||||
Proof.
|
||||
simple induction T; auto with datatypes.
|
||||
intros a G PG D PD PN.
|
||||
elim (invert_heap a G D); auto with datatypes.
|
||||
intros H1 H2; elim H2; intros H3 H4; elim H4; intros.
|
||||
apply X0; auto with datatypes.
|
||||
Qed.
|
||||
|
||||
(* This lemma ought to be generated automatically by the Inversion tools *)
|
||||
Lemma is_heap_rec :
|
||||
forall P:Tree -> Set,
|
||||
P Tree_Leaf ->
|
||||
(forall (a:A) (T1 T2:Tree),
|
||||
leA_Tree a T1 ->
|
||||
leA_Tree a T2 ->
|
||||
is_heap T1 -> P T1 -> is_heap T2 -> P T2 -> P (Tree_Node a T1 T2)) ->
|
||||
forall T:Tree, is_heap T -> P T.
|
||||
Proof.
|
||||
simple induction T; auto with datatypes.
|
||||
intros a G PG D PD PN.
|
||||
elim (invert_heap a G D); auto with datatypes.
|
||||
intros H1 H2; elim H2; intros H3 H4; elim H4; intros.
|
||||
apply X; auto with datatypes.
|
||||
Qed.
|
||||
|
||||
Lemma low_trans :
|
||||
forall (T:Tree) (a b:A), leA a b -> leA_Tree b T -> leA_Tree a T.
|
||||
Proof.
|
||||
simple induction T; auto with datatypes.
|
||||
intros; simpl; apply leA_trans with b; auto with datatypes.
|
||||
Qed.
|
||||
|
||||
(** ** Merging two sorted lists *)
|
||||
|
||||
Inductive merge_lem (l1 l2:list A) : Type :=
|
||||
merge_exist :
|
||||
forall l:list A,
|
||||
Sorted leA l ->
|
||||
meq (list_contents _ eqA_dec l)
|
||||
(munion (list_contents _ eqA_dec l1) (list_contents _ eqA_dec l2)) ->
|
||||
(forall a, HdRel leA a l1 -> HdRel leA a l2 -> HdRel leA a l) ->
|
||||
merge_lem l1 l2.
|
||||
Require Import Morphisms.
|
||||
|
||||
Instance: Equivalence (@meq A).
|
||||
Proof. constructor; auto with datatypes. red. apply meq_trans. Defined.
|
||||
|
||||
Instance: Proper (@meq A ++> @meq _ ++> @meq _) (@munion A).
|
||||
Proof. intros x y H x' y' H'. now apply meq_congr. Qed.
|
||||
|
||||
Lemma merge :
|
||||
forall l1:list A, Sorted leA l1 ->
|
||||
forall l2:list A, Sorted leA l2 -> merge_lem l1 l2.
|
||||
Proof.
|
||||
fix 1; intros; destruct l1.
|
||||
apply merge_exist with l2; auto with datatypes.
|
||||
rename l1 into l.
|
||||
revert l2 H0. fix 1. intros.
|
||||
destruct l2 as [|a0 l0].
|
||||
apply merge_exist with (a :: l); simpl; auto with datatypes.
|
||||
elim (leA_dec a a0); intros.
|
||||
|
||||
(* 1 (leA a a0) *)
|
||||
apply Sorted_inv in H. destruct H.
|
||||
destruct (merge l H (a0 :: l0) H0).
|
||||
apply merge_exist with (a :: l1). clear merge merge0.
|
||||
auto using cons_sort, cons_leA with datatypes.
|
||||
simpl. rewrite m. now rewrite munion_ass.
|
||||
intros. apply cons_leA.
|
||||
apply (@HdRel_inv _ leA) with l; trivial with datatypes.
|
||||
|
||||
(* 2 (leA a0 a) *)
|
||||
apply Sorted_inv in H0. destruct H0.
|
||||
destruct (merge0 l0 H0). clear merge merge0.
|
||||
apply merge_exist with (a0 :: l1);
|
||||
auto using cons_sort, cons_leA with datatypes.
|
||||
simpl; rewrite m. simpl. setoid_rewrite munion_ass at 1. rewrite munion_comm.
|
||||
repeat rewrite munion_ass. setoid_rewrite munion_comm at 3. reflexivity.
|
||||
intros. apply cons_leA.
|
||||
apply (@HdRel_inv _ leA) with l0; trivial with datatypes.
|
||||
Qed.
|
||||
|
||||
(** ** From trees to multisets *)
|
||||
|
||||
(** contents of a tree as a multiset *)
|
||||
|
||||
(** Nota Bene : In what follows the definition of SingletonBag
|
||||
in not used. Actually, we could just take as postulate:
|
||||
[Parameter SingletonBag : A->multiset]. *)
|
||||
|
||||
Fixpoint contents (t:Tree) : multiset A :=
|
||||
match t with
|
||||
| Tree_Leaf => emptyBag
|
||||
| Tree_Node a t1 t2 =>
|
||||
munion (contents t1) (munion (contents t2) (singletonBag a))
|
||||
end.
|
||||
|
||||
|
||||
(** equivalence of two trees is equality of corresponding multisets *)
|
||||
Definition equiv_Tree (t1 t2:Tree) := meq (contents t1) (contents t2).
|
||||
|
||||
|
||||
|
||||
(** * From lists to sorted lists *)
|
||||
|
||||
(** ** Specification of heap insertion *)
|
||||
|
||||
Inductive insert_spec (a:A) (T:Tree) : Type :=
|
||||
insert_exist :
|
||||
forall T1:Tree,
|
||||
is_heap T1 ->
|
||||
meq (contents T1) (munion (contents T) (singletonBag a)) ->
|
||||
(forall b:A, leA b a -> leA_Tree b T -> leA_Tree b T1) ->
|
||||
insert_spec a T.
|
||||
|
||||
|
||||
Lemma insert : forall T:Tree, is_heap T -> forall a:A, insert_spec a T.
|
||||
Proof.
|
||||
simple induction 1; intros.
|
||||
apply insert_exist with (Tree_Node a Tree_Leaf Tree_Leaf);
|
||||
auto using node_is_heap, nil_is_heap, leA_Tree_Leaf with datatypes.
|
||||
simpl; unfold meq, munion; auto using node_is_heap with datatypes.
|
||||
elim (leA_dec a a0); intros.
|
||||
elim (X a0); intros.
|
||||
apply insert_exist with (Tree_Node a T2 T0);
|
||||
auto using node_is_heap, nil_is_heap, leA_Tree_Leaf with datatypes.
|
||||
simpl; apply treesort_twist1; trivial with datatypes.
|
||||
elim (X a); intros T3 HeapT3 ConT3 LeA.
|
||||
apply insert_exist with (Tree_Node a0 T2 T3);
|
||||
auto using node_is_heap, nil_is_heap, leA_Tree_Leaf with datatypes.
|
||||
apply node_is_heap; auto using node_is_heap, nil_is_heap, leA_Tree_Leaf with datatypes.
|
||||
apply low_trans with a; auto with datatypes.
|
||||
apply LeA; auto with datatypes.
|
||||
apply low_trans with a; auto with datatypes.
|
||||
simpl; apply treesort_twist2; trivial with datatypes.
|
||||
Qed.
|
||||
|
||||
|
||||
(** ** Building a heap from a list *)
|
||||
|
||||
Inductive build_heap (l:list A) : Type :=
|
||||
heap_exist :
|
||||
forall T:Tree,
|
||||
is_heap T ->
|
||||
meq (list_contents _ eqA_dec l) (contents T) -> build_heap l.
|
||||
|
||||
Lemma list_to_heap : forall l:list A, build_heap l.
|
||||
Proof.
|
||||
simple induction l.
|
||||
apply (heap_exist nil Tree_Leaf); auto with datatypes.
|
||||
simpl; unfold meq; exact nil_is_heap.
|
||||
simple induction 1.
|
||||
intros T i m; elim (insert T i a).
|
||||
intros; apply heap_exist with T1; simpl; auto with datatypes.
|
||||
apply meq_trans with (munion (contents T) (singletonBag a)).
|
||||
apply meq_trans with (munion (singletonBag a) (contents T)).
|
||||
apply meq_right; trivial with datatypes.
|
||||
apply munion_comm.
|
||||
apply meq_sym; trivial with datatypes.
|
||||
Qed.
|
||||
|
||||
|
||||
(** ** Building the sorted list *)
|
||||
|
||||
Inductive flat_spec (T:Tree) : Type :=
|
||||
flat_exist :
|
||||
forall l:list A,
|
||||
Sorted leA l ->
|
||||
(forall a:A, leA_Tree a T -> HdRel leA a l) ->
|
||||
meq (contents T) (list_contents _ eqA_dec l) -> flat_spec T.
|
||||
|
||||
Lemma heap_to_list : forall T:Tree, is_heap T -> flat_spec T.
|
||||
Proof.
|
||||
intros T h; elim h; intros.
|
||||
apply flat_exist with (nil (A:=A)); auto with datatypes.
|
||||
elim X; intros l1 s1 i1 m1; elim X0; intros l2 s2 i2 m2.
|
||||
elim (merge _ s1 _ s2); intros.
|
||||
apply flat_exist with (a :: l); simpl; auto with datatypes.
|
||||
apply meq_trans with
|
||||
(munion (list_contents _ eqA_dec l1)
|
||||
(munion (list_contents _ eqA_dec l2) (singletonBag a))).
|
||||
apply meq_congr; auto with datatypes.
|
||||
apply meq_trans with
|
||||
(munion (singletonBag a)
|
||||
(munion (list_contents _ eqA_dec l1) (list_contents _ eqA_dec l2))).
|
||||
apply munion_rotate.
|
||||
apply meq_right; apply meq_sym; trivial with datatypes.
|
||||
Qed.
|
||||
|
||||
|
||||
(** * Specification of treesort *)
|
||||
|
||||
Theorem treesort :
|
||||
forall l:list A,
|
||||
{m : list A | Sorted leA m & permutation _ eqA_dec l m}.
|
||||
Proof.
|
||||
intro l; unfold permutation.
|
||||
elim (list_to_heap l).
|
||||
intros.
|
||||
elim (heap_to_list T); auto with datatypes.
|
||||
intros.
|
||||
exists l0; auto with datatypes.
|
||||
apply meq_trans with (contents T); trivial with datatypes.
|
||||
Qed.
|
||||
|
||||
End defs.
|
||||
5944
samples/Coq/JsCorrectness.v
Normal file
5944
samples/Coq/JsCorrectness.v
Normal file
File diff suppressed because it is too large
Load Diff
249
samples/Coq/JsInterpreterExtraction.v
Normal file
249
samples/Coq/JsInterpreterExtraction.v
Normal file
@@ -0,0 +1,249 @@
|
||||
Set Implicit Arguments.
|
||||
Require Import JsSyntax JsInterpreterMonads JsInterpreter JsInit.
|
||||
Require Import LibFix LibList.
|
||||
|
||||
Require Export Shared.
|
||||
Require Export LibTactics LibLogic LibReflect LibList
|
||||
LibOperation LibStruct LibNat LibEpsilon LibFunc LibHeap.
|
||||
Require Flocq.Appli.Fappli_IEEE Flocq.Appli.Fappli_IEEE_bits.
|
||||
|
||||
|
||||
|
||||
(* Here stands some commands to extract relatively correctly the interpreter to Ocaml. *)
|
||||
Extraction Language Ocaml.
|
||||
|
||||
Require Import ExtrOcamlBasic.
|
||||
Require Import ExtrOcamlNatInt.
|
||||
Require Import ExtrOcamlString.
|
||||
|
||||
(* Optimal fixpoint. *)
|
||||
Extraction Inline FixFun3 FixFun3Mod FixFun4 FixFun4Mod FixFunMod curry3 uncurry3 curry4 uncurry4.
|
||||
(* As classical logic statements are now unused, they should not be extracted
|
||||
(otherwise, useless errors will be launched). *)
|
||||
Extraction Inline epsilon epsilon_def classicT arbitrary indefinite_description Inhab_witness Fix isTrue.
|
||||
|
||||
(**************************************************************)
|
||||
(** ** Numerical values *)
|
||||
|
||||
(* number *)
|
||||
|
||||
Extract Inductive positive => float
|
||||
[ "(fun p -> 1. +. (2. *. p))"
|
||||
"(fun p -> 2. *. p)"
|
||||
"1." ]
|
||||
"(fun f2p1 f2p f1 p ->
|
||||
if p <= 1. then f1 () else if mod_float p 2. = 0. then f2p (floor (p /. 2.)) else f2p1 (floor (p /. 2.)))".
|
||||
|
||||
Extract Inductive Z => float [ "0." "" "(~-.)" ]
|
||||
"(fun f0 fp fn z -> if z=0. then f0 () else if z>0. then fp z else fn (~-. z))".
|
||||
|
||||
Extract Inductive N => float [ "0." "" ]
|
||||
"(fun f0 fp n -> if n=0. then f0 () else fp n)".
|
||||
|
||||
Extract Constant Z.add => "(+.)".
|
||||
Extract Constant Z.succ => "(+.) 1.".
|
||||
Extract Constant Z.pred => "(fun x -> x -. 1.)".
|
||||
Extract Constant Z.sub => "(-.)".
|
||||
Extract Constant Z.mul => "( *. )".
|
||||
Extract Constant Z.opp => "(~-.)".
|
||||
Extract Constant Z.abs => "abs_float".
|
||||
Extract Constant Z.min => "min".
|
||||
Extract Constant Z.max => "max".
|
||||
Extract Constant Z.compare =>
|
||||
"fun x y -> if x=y then Eq else if x<y then Lt else Gt".
|
||||
|
||||
Extract Constant Pos.add => "(+.)".
|
||||
Extract Constant Pos.succ => "(+.) 1.".
|
||||
Extract Constant Pos.pred => "(fun x -> x -. 1.)".
|
||||
Extract Constant Pos.sub => "(-.)".
|
||||
Extract Constant Pos.mul => "( *. )".
|
||||
Extract Constant Pos.min => "min".
|
||||
Extract Constant Pos.max => "max".
|
||||
Extract Constant Pos.compare =>
|
||||
"fun x y -> if x=y then Eq else if x<y then Lt else Gt".
|
||||
Extract Constant Pos.compare_cont =>
|
||||
"fun x y c -> if x=y then c else if x<y then Lt else Gt".
|
||||
|
||||
Extract Constant N.add => "(+.)".
|
||||
Extract Constant N.succ => "(+.) 1.".
|
||||
Extract Constant N.pred => "(fun x -> x -. 1.)".
|
||||
Extract Constant N.sub => "(-.)".
|
||||
Extract Constant N.mul => "( *. )".
|
||||
Extract Constant N.min => "min".
|
||||
Extract Constant N.max => "max".
|
||||
Extract Constant N.div => "(fun x y -> if x = 0. then 0. else floor (x /. y))".
|
||||
Extract Constant N.modulo => "mod_float".
|
||||
Extract Constant N.compare =>
|
||||
"fun x y -> if x=y then Eq else if x<y then Lt else Gt".
|
||||
|
||||
Extract Inductive Fappli_IEEE.binary_float => float [
|
||||
"(fun s -> if s then (0.) else (-0.))"
|
||||
"(fun s -> if s then infinity else neg_infinity)"
|
||||
"nan"
|
||||
"(fun (s, m, e) -> failwith ""FIXME: No extraction from binary float allowed yet."")"
|
||||
].
|
||||
|
||||
Extract Constant JsNumber.of_int => "fun x -> x".
|
||||
|
||||
Extract Constant JsNumber.nan => "nan".
|
||||
Extract Constant JsNumber.zero => "0.".
|
||||
Extract Constant JsNumber.neg_zero => "(-0.)".
|
||||
Extract Constant JsNumber.one => "1.".
|
||||
Extract Constant JsNumber.infinity => "infinity".
|
||||
Extract Constant JsNumber.neg_infinity => "neg_infinity".
|
||||
Extract Constant JsNumber.max_value => "max_float".
|
||||
Extract Constant JsNumber.min_value => "(Int64.float_of_bits Int64.one)".
|
||||
Extract Constant JsNumber.pi => "(4. *. atan 1.)".
|
||||
Extract Constant JsNumber.e => "(exp 1.)".
|
||||
Extract Constant JsNumber.ln2 => "(log 2.)".
|
||||
Extract Constant JsNumber.floor => "floor".
|
||||
Extract Constant JsNumber.absolute => "abs_float".
|
||||
|
||||
Extract Constant JsNumber.from_string =>
|
||||
"(fun s ->
|
||||
try
|
||||
let s = (String.concat """" (List.map (String.make 1) s)) in
|
||||
if s = """" then 0. else float_of_string s
|
||||
with Failure ""float_of_string"" -> nan)
|
||||
(* Note that we're using `float_of_string' there, which does not have the same
|
||||
behavior than JavaScript. For instance it will read ""022"" as 22 instead of
|
||||
18, which should be the JavaScript result for it. *)".
|
||||
|
||||
Extract Constant JsNumber.to_string =>
|
||||
"(fun f ->
|
||||
prerr_string (""Warning: JsNumber.to_string called. This might be responsible for errors. Argument value: "" ^ string_of_float f ^ ""."");
|
||||
prerr_newline();
|
||||
let string_of_number n =
|
||||
let sfn = string_of_float n in
|
||||
(if (sfn = ""inf"") then ""Infinity"" else
|
||||
if (sfn = ""-inf"") then ""-Infinity"" else
|
||||
if (sfn = ""nan"") then ""NaN"" else
|
||||
let inum = int_of_float n in
|
||||
if (float_of_int inum = n) then (string_of_int inum) else (string_of_float n)) in
|
||||
let ret = ref [] in (* Ugly, but the API for OCaml string is not very functional... *)
|
||||
String.iter (fun c -> ret := c :: !ret) (string_of_number f);
|
||||
List.rev !ret)
|
||||
(* Note that this is ugly, we should use the spec of JsNumber.to_string here (9.8.1). *)".
|
||||
|
||||
Extract Constant JsNumber.add => "(+.)".
|
||||
Extract Constant JsNumber.sub => "(-.)".
|
||||
Extract Constant JsNumber.mult => "( *. )".
|
||||
Extract Constant JsNumber.div => "(/.)".
|
||||
Extract Constant JsNumber.fmod => "mod_float".
|
||||
Extract Constant JsNumber.neg => "(~-.)".
|
||||
Extract Constant JsNumber.sign => "(fun f -> float_of_int (compare f 0.))".
|
||||
Extract Constant JsNumber.number_comparable => "(fun n1 n2 -> 0 = compare n1 n2)".
|
||||
Extract Constant JsNumber.lt_bool => "(<)".
|
||||
|
||||
Extract Constant JsNumber.to_int32 =>
|
||||
"fun n ->
|
||||
match classify_float n with
|
||||
| FP_normal | FP_subnormal ->
|
||||
let i32 = 2. ** 32. in
|
||||
let i31 = 2. ** 31. in
|
||||
let posint = (if n < 0. then (-1.) else 1.) *. (floor (abs_float n)) in
|
||||
let int32bit =
|
||||
let smod = mod_float posint i32 in
|
||||
if smod < 0. then smod +. i32 else smod
|
||||
in
|
||||
(if int32bit >= i31 then int32bit -. i32 else int32bit)
|
||||
| _ -> 0.". (* LATER: do in Coq. Spec is 9.5, p. 47.*)
|
||||
|
||||
Extract Constant JsNumber.to_uint32 =>
|
||||
"fun n ->
|
||||
match classify_float n with
|
||||
| FP_normal | FP_subnormal ->
|
||||
let i32 = 2. ** 32. in
|
||||
let posint = (if n < 0. then (-1.) else 1.) *. (floor (abs_float n)) in
|
||||
let int32bit =
|
||||
let smod = mod_float posint i32 in
|
||||
if smod < 0. then smod +. i32 else smod
|
||||
in
|
||||
int32bit
|
||||
| _ -> 0.". (* LAER: do in Coq. Spec is 9.6, p47.*)
|
||||
|
||||
Extract Constant JsNumber.modulo_32 => "(fun x -> let r = mod_float x 32. in if x < 0. then r +. 32. else r)".
|
||||
Extract Constant JsNumber.int32_bitwise_not => "fun x -> Int32.to_float (Int32.lognot (Int32.of_float x))".
|
||||
Extract Constant JsNumber.int32_bitwise_and => "fun x y -> Int32.to_float (Int32.logand (Int32.of_float x) (Int32.of_float y))".
|
||||
Extract Constant JsNumber.int32_bitwise_or => "fun x y -> Int32.to_float (Int32.logor (Int32.of_float x) (Int32.of_float y))".
|
||||
Extract Constant JsNumber.int32_bitwise_xor => "fun x y -> Int32.to_float (Int32.logxor (Int32.of_float x) (Int32.of_float y))".
|
||||
Extract Constant JsNumber.int32_left_shift => "(fun x y -> Int32.to_float (Int32.shift_left (Int32.of_float x) (int_of_float y)))".
|
||||
Extract Constant JsNumber.int32_right_shift => "(fun x y -> Int32.to_float (Int32.shift_right (Int32.of_float x) (int_of_float y)))".
|
||||
Extract Constant JsNumber.uint32_right_shift =>
|
||||
"(fun x y ->
|
||||
let i31 = 2. ** 31. in
|
||||
let i32 = 2. ** 32. in
|
||||
let newx = if x >= i31 then x -. i32 else x in
|
||||
let r = Int32.to_float (Int32.shift_right_logical (Int32.of_float newx) (int_of_float y)) in
|
||||
if r < 0. then r +. i32 else r)".
|
||||
|
||||
Extract Constant int_of_char => "(fun c -> float_of_int (int_of_char c))".
|
||||
|
||||
Extract Constant ascii_comparable => "(=)".
|
||||
Extract Constant lt_int_decidable => "(<)".
|
||||
Extract Constant le_int_decidable => "(<=)".
|
||||
Extract Constant ge_nat_decidable => "(>=)".
|
||||
|
||||
(* TODO ARTHUR: This TLC lemma does not extract to something computable... whereas it should! *)
|
||||
Extract Constant prop_eq_decidable => "(=)".
|
||||
|
||||
Extract Constant env_loc_global_env_record => "0".
|
||||
|
||||
(* The following functions make pattern matches with floats and shall thus be removed. *)
|
||||
Extraction Inline Fappli_IEEE.Bplus Fappli_IEEE.binary_normalize Fappli_IEEE_bits.b64_plus.
|
||||
Extraction Inline Fappli_IEEE.Bmult Fappli_IEEE.Bmult_FF Fappli_IEEE_bits.b64_mult.
|
||||
Extraction Inline Fappli_IEEE.Bdiv Fappli_IEEE_bits.b64_div.
|
||||
|
||||
(* New options for the interpreter to work in Coq 8.4 *)
|
||||
Set Extraction AccessOpaque.
|
||||
|
||||
(* These parameters are implementation-dependant according to the spec.
|
||||
I've chosed some very simple values, but we could choose another thing for them. *)
|
||||
Extract Constant object_prealloc_global_proto => "(Coq_value_prim Coq_prim_null)".
|
||||
Extract Constant object_prealloc_global_class => "(
|
||||
let rec aux s = function
|
||||
| 0 -> []
|
||||
| n -> let n' = n - 1 in
|
||||
s.[n'] :: aux s n'
|
||||
in let aux2 s =
|
||||
List.rev (aux s (String.length s))
|
||||
in aux2 ""GlobalClass"")".
|
||||
|
||||
|
||||
(* Parsing *)
|
||||
Extract Constant parse_pickable => "(fun s strict ->
|
||||
let str = String.concat """" (List.map (String.make 1) s) in
|
||||
try
|
||||
let parserExp = Parser_main.exp_from_string ~force_strict:strict str in
|
||||
Some (JsSyntaxInfos.add_infos_prog strict
|
||||
(Translate_syntax.exp_to_prog parserExp))
|
||||
with
|
||||
(* | Translate_syntax.CoqSyntaxDoesNotSupport _ -> assert false (* Temporary *) *)
|
||||
| Parser.ParserFailure _
|
||||
| Parser.InvalidArgument ->
|
||||
prerr_string (""Warning: Parser error on eval. Input string: \"""" ^ str ^ ""\""\n"");
|
||||
None
|
||||
)".
|
||||
|
||||
|
||||
(* Debugging *)
|
||||
Extract Inlined Constant not_yet_implemented_because => "(fun s ->
|
||||
print_endline (__LOC__ ^ "": Not implemented because: "" ^ Prheap.string_of_char_list s) ;
|
||||
Coq_result_not_yet_implemented)".
|
||||
Extract Inlined Constant impossible_because => "(fun s ->
|
||||
print_endline (__LOC__ ^ "": Stuck because: "" ^ Prheap.string_of_char_list s) ;
|
||||
Coq_result_impossible)".
|
||||
Extract Inlined Constant impossible_with_heap_because => "(fun s message ->
|
||||
print_endline (__LOC__ ^ "": Stuck!\nState: "" ^ Prheap.prstate true s
|
||||
^ ""\nMessage:\t"" ^ Prheap.string_of_char_list message) ;
|
||||
Coq_result_impossible)".
|
||||
|
||||
|
||||
(* Final Extraction *)
|
||||
Extraction Blacklist string list bool.
|
||||
Separate Extraction runs run_javascript.
|
||||
|
||||
|
||||
|
||||
(* -- LATER: extract inequality_test_string in more efficient way*)
|
||||
|
||||
107
samples/Coq/JsNumber.v
Normal file
107
samples/Coq/JsNumber.v
Normal file
@@ -0,0 +1,107 @@
|
||||
Set Implicit Arguments.
|
||||
Require Export Shared.
|
||||
Require Flocq.Appli.Fappli_IEEE Flocq.Appli.Fappli_IEEE_bits.
|
||||
|
||||
|
||||
(**************************************************************)
|
||||
(** ** Type for number (IEEE floats) *)
|
||||
|
||||
Definition number : Type :=
|
||||
Fappli_IEEE_bits.binary64.
|
||||
|
||||
|
||||
(**************************************************************)
|
||||
(** ** Particular values of numbers *)
|
||||
|
||||
(* LATER: find definitions in Flocq *)
|
||||
Parameter nan : number.
|
||||
Parameter zero : number.
|
||||
Parameter neg_zero : number.
|
||||
Definition one := Fappli_IEEE.binary_normalize 53 1024 eq_refl eq_refl Fappli_IEEE.mode_NE 1 0 false.
|
||||
Parameter infinity : number.
|
||||
Parameter neg_infinity : number.
|
||||
Parameter max_value : number.
|
||||
Parameter min_value : number.
|
||||
Parameter pi : number.
|
||||
Parameter e : number.
|
||||
Parameter ln2 : number.
|
||||
|
||||
(**************************************************************)
|
||||
(** ** Conversions on numbers *)
|
||||
|
||||
(* LATER: implement definitions *)
|
||||
Parameter from_string : string -> number.
|
||||
Parameter to_string : number -> string.
|
||||
|
||||
(**************************************************************)
|
||||
(** ** Unary operations on numbers *)
|
||||
|
||||
(* LATER: find definitions in Flocq *)
|
||||
|
||||
Parameter neg : number -> number.
|
||||
Parameter floor : number -> number.
|
||||
Parameter absolute : number -> number.
|
||||
Parameter sign : number -> number. (* returns arbitrary when x is zero or nan *)
|
||||
Parameter lt_bool : number -> number -> bool.
|
||||
|
||||
|
||||
(**************************************************************)
|
||||
(** ** Binary operations on numbers *)
|
||||
|
||||
Definition add : number -> number -> number :=
|
||||
Fappli_IEEE_bits.b64_plus Fappli_IEEE.mode_NE.
|
||||
|
||||
Parameter sub : number -> number -> number. (* todo: bind *)
|
||||
|
||||
Parameter fmod : number -> number -> number. (* todo: bind *)
|
||||
|
||||
Definition mult : number -> number -> number :=
|
||||
Fappli_IEEE_bits.b64_mult Fappli_IEEE.mode_NE.
|
||||
|
||||
Definition div : number -> number -> number :=
|
||||
Fappli_IEEE_bits.b64_div Fappli_IEEE.mode_NE.
|
||||
|
||||
(* Todo: find comparison operator *)
|
||||
Global Instance number_comparable : Comparable number.
|
||||
Proof. Admitted.
|
||||
|
||||
|
||||
|
||||
(**************************************************************)
|
||||
(** ** Conversions with Int32 *)
|
||||
|
||||
Parameter of_int : int -> number. (* LATER: this is quite complex. Should we make it precise? *)
|
||||
|
||||
Parameter to_int32 : number -> int. (* Remark: extracted code could, for efficiency reasons, use Ocaml Int32 *)
|
||||
|
||||
Parameter to_uint32 : number -> int.
|
||||
|
||||
Parameter to_int16 : number -> int. (* currently not used *)
|
||||
|
||||
(* LATER: Check that the OCaml extraction is correct. *)
|
||||
|
||||
|
||||
(**************************************************************)
|
||||
|
||||
(** Implements the operation that masks all but the 5 least significant bits
|
||||
of a non-negative number (obtained as the result of to_uint32 *)
|
||||
|
||||
Parameter modulo_32 : int -> int.
|
||||
|
||||
(** Implements int32 operation *)
|
||||
|
||||
Parameter int32_bitwise_not : int -> int.
|
||||
|
||||
Parameter int32_bitwise_and : int -> int -> int.
|
||||
Parameter int32_bitwise_or : int -> int -> int.
|
||||
Parameter int32_bitwise_xor : int -> int -> int.
|
||||
|
||||
Parameter int32_left_shift : int -> int -> int.
|
||||
Parameter int32_right_shift : int -> int -> int.
|
||||
Parameter uint32_right_shift : int -> int -> int.
|
||||
|
||||
|
||||
|
||||
|
||||
(**************************************************************)
|
||||
(** ** Int32 related conversion *)
|
||||
1766
samples/Coq/JsPrettyInterm.v
Normal file
1766
samples/Coq/JsPrettyInterm.v
Normal file
File diff suppressed because it is too large
Load Diff
42
samples/Coq/Main.v
Normal file
42
samples/Coq/Main.v
Normal file
@@ -0,0 +1,42 @@
|
||||
Require Import FunctionNinjas.All.
|
||||
Require Import ListString.All.
|
||||
Require Import Computation.
|
||||
|
||||
Import C.Notations.
|
||||
|
||||
Definition error (message : LString.t) : C.t :=
|
||||
do_call! Command.ShowError message in
|
||||
ret.
|
||||
|
||||
Definition main : C.t :=
|
||||
call! card_is_valid := Command.AskCard in
|
||||
if card_is_valid then
|
||||
call! pin := Command.AskPIN in
|
||||
match pin with
|
||||
| None => error @@ LString.s "No PIN given."
|
||||
| Some pin =>
|
||||
call! pin_is_valid := Command.CheckPIN pin in
|
||||
if pin_is_valid then
|
||||
call! ask_amount := Command.AskAmount in
|
||||
match ask_amount with
|
||||
| None => error @@ LString.s "No amount given."
|
||||
| Some amount =>
|
||||
call! amount_is_valid := Command.CheckAmount amount in
|
||||
if amount_is_valid then
|
||||
call! card_is_given := Command.GiveCard in
|
||||
if card_is_given then
|
||||
call! amount_is_given := Command.GiveAmount amount in
|
||||
if amount_is_given then
|
||||
ret
|
||||
else
|
||||
error @@ LString.s "Cannot give you the amount. Please contact your bank."
|
||||
else
|
||||
error @@ LString.s "Cannot give you back the card. Please contact your bank."
|
||||
else
|
||||
error @@ LString.s "Invalid amount."
|
||||
end
|
||||
else
|
||||
error @@ LString.s "Invalid PIN."
|
||||
end
|
||||
else
|
||||
error @@ LString.s "Invalid card.".
|
||||
@@ -1,539 +0,0 @@
|
||||
Require Import Omega Relations Multiset SetoidList.
|
||||
|
||||
(** This file is deprecated, use [Permutation.v] instead.
|
||||
|
||||
Indeed, this file defines a notion of permutation based on
|
||||
multisets (there exists a permutation between two lists iff every
|
||||
elements have the same multiplicity in the two lists) which
|
||||
requires a more complex apparatus (the equipment of the domain
|
||||
with a decidable equality) than [Permutation] in [Permutation.v].
|
||||
|
||||
The relation between the two relations are in lemma
|
||||
[permutation_Permutation].
|
||||
|
||||
File [Permutation] concerns Leibniz equality : it shows in particular
|
||||
that [List.Permutation] and [permutation] are equivalent in this context.
|
||||
*)
|
||||
|
||||
Set Implicit Arguments.
|
||||
|
||||
Local Notation "[ ]" := nil.
|
||||
Local Notation "[ a ; .. ; b ]" := (a :: .. (b :: []) ..).
|
||||
|
||||
Section Permut.
|
||||
|
||||
(** * From lists to multisets *)
|
||||
|
||||
Variable A : Type.
|
||||
Variable eqA : relation A.
|
||||
Hypothesis eqA_equiv : Equivalence eqA.
|
||||
Hypothesis eqA_dec : forall x y:A, {eqA x y} + {~ eqA x y}.
|
||||
|
||||
Let emptyBag := EmptyBag A.
|
||||
Let singletonBag := SingletonBag _ eqA_dec.
|
||||
|
||||
(** contents of a list *)
|
||||
|
||||
Fixpoint list_contents (l:list A) : multiset A :=
|
||||
match l with
|
||||
| [] => emptyBag
|
||||
| a :: l => munion (singletonBag a) (list_contents l)
|
||||
end.
|
||||
|
||||
Lemma list_contents_app :
|
||||
forall l m:list A,
|
||||
meq (list_contents (l ++ m)) (munion (list_contents l) (list_contents m)).
|
||||
Proof.
|
||||
simple induction l; simpl; auto with datatypes.
|
||||
intros.
|
||||
apply meq_trans with
|
||||
(munion (singletonBag a) (munion (list_contents l0) (list_contents m)));
|
||||
auto with datatypes.
|
||||
Qed.
|
||||
|
||||
(** * [permutation]: definition and basic properties *)
|
||||
|
||||
Definition permutation (l m:list A) := meq (list_contents l) (list_contents m).
|
||||
|
||||
Lemma permut_refl : forall l:list A, permutation l l.
|
||||
Proof.
|
||||
unfold permutation; auto with datatypes.
|
||||
Qed.
|
||||
|
||||
Lemma permut_sym :
|
||||
forall l1 l2 : list A, permutation l1 l2 -> permutation l2 l1.
|
||||
Proof.
|
||||
unfold permutation, meq; intros; symmetry; trivial.
|
||||
Qed.
|
||||
|
||||
Lemma permut_trans :
|
||||
forall l m n:list A, permutation l m -> permutation m n -> permutation l n.
|
||||
Proof.
|
||||
unfold permutation; intros.
|
||||
apply meq_trans with (list_contents m); auto with datatypes.
|
||||
Qed.
|
||||
|
||||
Lemma permut_cons_eq :
|
||||
forall l m:list A,
|
||||
permutation l m -> forall a a', eqA a a' -> permutation (a :: l) (a' :: m).
|
||||
Proof.
|
||||
unfold permutation; simpl; intros.
|
||||
apply meq_trans with (munion (singletonBag a') (list_contents l)).
|
||||
apply meq_left, meq_singleton; auto.
|
||||
auto with datatypes.
|
||||
Qed.
|
||||
|
||||
Lemma permut_cons :
|
||||
forall l m:list A,
|
||||
permutation l m -> forall a:A, permutation (a :: l) (a :: m).
|
||||
Proof.
|
||||
unfold permutation; simpl; auto with datatypes.
|
||||
Qed.
|
||||
|
||||
Lemma permut_app :
|
||||
forall l l' m m':list A,
|
||||
permutation l l' -> permutation m m' -> permutation (l ++ m) (l' ++ m').
|
||||
Proof.
|
||||
unfold permutation; intros.
|
||||
apply meq_trans with (munion (list_contents l) (list_contents m));
|
||||
auto using permut_cons, list_contents_app with datatypes.
|
||||
apply meq_trans with (munion (list_contents l') (list_contents m'));
|
||||
auto using permut_cons, list_contents_app with datatypes.
|
||||
apply meq_trans with (munion (list_contents l') (list_contents m));
|
||||
auto using permut_cons, list_contents_app with datatypes.
|
||||
Qed.
|
||||
|
||||
Lemma permut_add_inside_eq :
|
||||
forall a a' l1 l2 l3 l4, eqA a a' ->
|
||||
permutation (l1 ++ l2) (l3 ++ l4) ->
|
||||
permutation (l1 ++ a :: l2) (l3 ++ a' :: l4).
|
||||
Proof.
|
||||
unfold permutation, meq in *; intros.
|
||||
specialize H0 with a0.
|
||||
repeat rewrite list_contents_app in *; simpl in *.
|
||||
destruct (eqA_dec a a0) as [Ha|Ha]; rewrite H in Ha;
|
||||
decide (eqA_dec a' a0) with Ha; simpl; auto with arith.
|
||||
do 2 rewrite <- plus_n_Sm; f_equal; auto.
|
||||
Qed.
|
||||
|
||||
Lemma permut_add_inside :
|
||||
forall a l1 l2 l3 l4,
|
||||
permutation (l1 ++ l2) (l3 ++ l4) ->
|
||||
permutation (l1 ++ a :: l2) (l3 ++ a :: l4).
|
||||
Proof.
|
||||
unfold permutation, meq in *; intros.
|
||||
generalize (H a0); clear H.
|
||||
do 4 rewrite list_contents_app.
|
||||
simpl.
|
||||
destruct (eqA_dec a a0); simpl; auto with arith.
|
||||
do 2 rewrite <- plus_n_Sm; f_equal; auto.
|
||||
Qed.
|
||||
|
||||
Lemma permut_add_cons_inside_eq :
|
||||
forall a a' l l1 l2, eqA a a' ->
|
||||
permutation l (l1 ++ l2) ->
|
||||
permutation (a :: l) (l1 ++ a' :: l2).
|
||||
Proof.
|
||||
intros;
|
||||
replace (a :: l) with ([] ++ a :: l); trivial;
|
||||
apply permut_add_inside_eq; trivial.
|
||||
Qed.
|
||||
|
||||
Lemma permut_add_cons_inside :
|
||||
forall a l l1 l2,
|
||||
permutation l (l1 ++ l2) ->
|
||||
permutation (a :: l) (l1 ++ a :: l2).
|
||||
Proof.
|
||||
intros;
|
||||
replace (a :: l) with ([] ++ a :: l); trivial;
|
||||
apply permut_add_inside; trivial.
|
||||
Qed.
|
||||
|
||||
Lemma permut_middle :
|
||||
forall (l m:list A) (a:A), permutation (a :: l ++ m) (l ++ a :: m).
|
||||
Proof.
|
||||
intros; apply permut_add_cons_inside; auto using permut_sym, permut_refl.
|
||||
Qed.
|
||||
|
||||
Lemma permut_sym_app :
|
||||
forall l1 l2, permutation (l1 ++ l2) (l2 ++ l1).
|
||||
Proof.
|
||||
intros l1 l2;
|
||||
unfold permutation, meq;
|
||||
intro a; do 2 rewrite list_contents_app; simpl;
|
||||
auto with arith.
|
||||
Qed.
|
||||
|
||||
Lemma permut_rev :
|
||||
forall l, permutation l (rev l).
|
||||
Proof.
|
||||
induction l.
|
||||
simpl; trivial using permut_refl.
|
||||
simpl.
|
||||
apply permut_add_cons_inside.
|
||||
rewrite <- app_nil_end. trivial.
|
||||
Qed.
|
||||
|
||||
(** * Some inversion results. *)
|
||||
Lemma permut_conv_inv :
|
||||
forall e l1 l2, permutation (e :: l1) (e :: l2) -> permutation l1 l2.
|
||||
Proof.
|
||||
intros e l1 l2; unfold permutation, meq; simpl; intros H a;
|
||||
generalize (H a); apply plus_reg_l.
|
||||
Qed.
|
||||
|
||||
Lemma permut_app_inv1 :
|
||||
forall l l1 l2, permutation (l1 ++ l) (l2 ++ l) -> permutation l1 l2.
|
||||
Proof.
|
||||
intros l l1 l2; unfold permutation, meq; simpl;
|
||||
intros H a; generalize (H a); clear H.
|
||||
do 2 rewrite list_contents_app.
|
||||
simpl.
|
||||
intros; apply plus_reg_l with (multiplicity (list_contents l) a).
|
||||
rewrite plus_comm; rewrite H; rewrite plus_comm.
|
||||
trivial.
|
||||
Qed.
|
||||
|
||||
(** we can use [multiplicity] to define [InA] and [NoDupA]. *)
|
||||
|
||||
Fact if_eqA_then : forall a a' (B:Type)(b b':B),
|
||||
eqA a a' -> (if eqA_dec a a' then b else b') = b.
|
||||
Proof.
|
||||
intros. destruct eqA_dec as [_|NEQ]; auto.
|
||||
contradict NEQ; auto.
|
||||
Qed.
|
||||
|
||||
Lemma permut_app_inv2 :
|
||||
forall l l1 l2, permutation (l ++ l1) (l ++ l2) -> permutation l1 l2.
|
||||
Proof.
|
||||
intros l l1 l2; unfold permutation, meq; simpl;
|
||||
intros H a; generalize (H a); clear H.
|
||||
do 2 rewrite list_contents_app.
|
||||
simpl.
|
||||
intros; apply plus_reg_l with (multiplicity (list_contents l) a).
|
||||
trivial.
|
||||
Qed.
|
||||
|
||||
Lemma permut_remove_hd_eq :
|
||||
forall l l1 l2 a b, eqA a b ->
|
||||
permutation (a :: l) (l1 ++ b :: l2) -> permutation l (l1 ++ l2).
|
||||
Proof.
|
||||
unfold permutation, meq; simpl; intros l l1 l2 a b Heq H a0.
|
||||
specialize H with a0.
|
||||
rewrite list_contents_app in *; simpl in *.
|
||||
apply plus_reg_l with (if eqA_dec a a0 then 1 else 0).
|
||||
rewrite H; clear H.
|
||||
symmetry; rewrite plus_comm, <- ! plus_assoc; f_equal.
|
||||
rewrite plus_comm.
|
||||
destruct (eqA_dec a a0) as [Ha|Ha]; rewrite Heq in Ha;
|
||||
decide (eqA_dec b a0) with Ha; reflexivity.
|
||||
Qed.
|
||||
|
||||
Lemma permut_remove_hd :
|
||||
forall l l1 l2 a,
|
||||
permutation (a :: l) (l1 ++ a :: l2) -> permutation l (l1 ++ l2).
|
||||
Proof.
|
||||
eauto using permut_remove_hd_eq, Equivalence_Reflexive.
|
||||
Qed.
|
||||
|
||||
Fact if_eqA_else : forall a a' (B:Type)(b b':B),
|
||||
~eqA a a' -> (if eqA_dec a a' then b else b') = b'.
|
||||
Proof.
|
||||
intros. decide (eqA_dec a a') with H; auto.
|
||||
Qed.
|
||||
|
||||
Fact if_eqA_refl : forall a (B:Type)(b b':B),
|
||||
(if eqA_dec a a then b else b') = b.
|
||||
Proof.
|
||||
intros; apply (decide_left (eqA_dec a a)); auto with *.
|
||||
Qed.
|
||||
|
||||
(** PL: Inutilisable dans un rewrite sans un change prealable. *)
|
||||
|
||||
Global Instance if_eqA (B:Type)(b b':B) :
|
||||
Proper (eqA==>eqA==>@eq _) (fun x y => if eqA_dec x y then b else b').
|
||||
Proof.
|
||||
intros x x' Hxx' y y' Hyy'.
|
||||
intros; destruct (eqA_dec x y) as [H|H];
|
||||
destruct (eqA_dec x' y') as [H'|H']; auto.
|
||||
contradict H'; transitivity x; auto with *; transitivity y; auto with *.
|
||||
contradict H; transitivity x'; auto with *; transitivity y'; auto with *.
|
||||
Qed.
|
||||
|
||||
Fact if_eqA_rewrite_l : forall a1 a1' a2 (B:Type)(b b':B),
|
||||
eqA a1 a1' -> (if eqA_dec a1 a2 then b else b') =
|
||||
(if eqA_dec a1' a2 then b else b').
|
||||
Proof.
|
||||
intros; destruct (eqA_dec a1 a2) as [A1|A1];
|
||||
destruct (eqA_dec a1' a2) as [A1'|A1']; auto.
|
||||
contradict A1'; transitivity a1; eauto with *.
|
||||
contradict A1; transitivity a1'; eauto with *.
|
||||
Qed.
|
||||
|
||||
Fact if_eqA_rewrite_r : forall a1 a2 a2' (B:Type)(b b':B),
|
||||
eqA a2 a2' -> (if eqA_dec a1 a2 then b else b') =
|
||||
(if eqA_dec a1 a2' then b else b').
|
||||
Proof.
|
||||
intros; destruct (eqA_dec a1 a2) as [A2|A2];
|
||||
destruct (eqA_dec a1 a2') as [A2'|A2']; auto.
|
||||
contradict A2'; transitivity a2; eauto with *.
|
||||
contradict A2; transitivity a2'; eauto with *.
|
||||
Qed.
|
||||
|
||||
|
||||
Global Instance multiplicity_eqA (l:list A) :
|
||||
Proper (eqA==>@eq _) (multiplicity (list_contents l)).
|
||||
Proof.
|
||||
intros x x' Hxx'.
|
||||
induction l as [|y l Hl]; simpl; auto.
|
||||
rewrite (@if_eqA_rewrite_r y x x'); auto.
|
||||
Qed.
|
||||
|
||||
Lemma multiplicity_InA :
|
||||
forall l a, InA eqA a l <-> 0 < multiplicity (list_contents l) a.
|
||||
Proof.
|
||||
induction l.
|
||||
simpl.
|
||||
split; inversion 1.
|
||||
simpl.
|
||||
intros a'; split; intros H. inversion_clear H.
|
||||
apply (decide_left (eqA_dec a a')); auto with *.
|
||||
destruct (eqA_dec a a'); auto with *. simpl; rewrite <- IHl; auto.
|
||||
destruct (eqA_dec a a'); auto with *. right. rewrite IHl; auto.
|
||||
Qed.
|
||||
|
||||
Lemma multiplicity_InA_O :
|
||||
forall l a, ~ InA eqA a l -> multiplicity (list_contents l) a = 0.
|
||||
Proof.
|
||||
intros l a; rewrite multiplicity_InA;
|
||||
destruct (multiplicity (list_contents l) a); auto with arith.
|
||||
destruct 1; auto with arith.
|
||||
Qed.
|
||||
|
||||
Lemma multiplicity_InA_S :
|
||||
forall l a, InA eqA a l -> multiplicity (list_contents l) a >= 1.
|
||||
Proof.
|
||||
intros l a; rewrite multiplicity_InA; auto with arith.
|
||||
Qed.
|
||||
|
||||
Lemma multiplicity_NoDupA : forall l,
|
||||
NoDupA eqA l <-> (forall a, multiplicity (list_contents l) a <= 1).
|
||||
Proof.
|
||||
induction l.
|
||||
simpl.
|
||||
split; auto with arith.
|
||||
split; simpl.
|
||||
inversion_clear 1.
|
||||
rewrite IHl in H1.
|
||||
intros; destruct (eqA_dec a a0) as [EQ|NEQ]; simpl; auto with *.
|
||||
rewrite <- EQ.
|
||||
rewrite multiplicity_InA_O; auto.
|
||||
intros; constructor.
|
||||
rewrite multiplicity_InA.
|
||||
specialize (H a).
|
||||
rewrite if_eqA_refl in H.
|
||||
clear IHl; omega.
|
||||
rewrite IHl; intros.
|
||||
specialize (H a0). omega.
|
||||
Qed.
|
||||
|
||||
(** Permutation is compatible with InA. *)
|
||||
Lemma permut_InA_InA :
|
||||
forall l1 l2 e, permutation l1 l2 -> InA eqA e l1 -> InA eqA e l2.
|
||||
Proof.
|
||||
intros l1 l2 e.
|
||||
do 2 rewrite multiplicity_InA.
|
||||
unfold permutation, meq.
|
||||
intros H;rewrite H; auto.
|
||||
Qed.
|
||||
|
||||
Lemma permut_cons_InA :
|
||||
forall l1 l2 e, permutation (e :: l1) l2 -> InA eqA e l2.
|
||||
Proof.
|
||||
intros; apply (permut_InA_InA (e:=e) H); auto with *.
|
||||
Qed.
|
||||
|
||||
(** Permutation of an empty list. *)
|
||||
Lemma permut_nil :
|
||||
forall l, permutation l [] -> l = [].
|
||||
Proof.
|
||||
intro l; destruct l as [ | e l ]; trivial.
|
||||
assert (InA eqA e (e::l)) by (auto with *).
|
||||
intro Abs; generalize (permut_InA_InA Abs H).
|
||||
inversion 1.
|
||||
Qed.
|
||||
|
||||
(** Permutation for short lists. *)
|
||||
|
||||
Lemma permut_length_1:
|
||||
forall a b, permutation [a] [b] -> eqA a b.
|
||||
Proof.
|
||||
intros a b; unfold permutation, meq.
|
||||
intro P; specialize (P b); simpl in *.
|
||||
rewrite if_eqA_refl in *.
|
||||
destruct (eqA_dec a b); simpl; auto; discriminate.
|
||||
Qed.
|
||||
|
||||
Lemma permut_length_2 :
|
||||
forall a1 b1 a2 b2, permutation [a1; b1] [a2; b2] ->
|
||||
(eqA a1 a2) /\ (eqA b1 b2) \/ (eqA a1 b2) /\ (eqA a2 b1).
|
||||
Proof.
|
||||
intros a1 b1 a2 b2 P.
|
||||
assert (H:=permut_cons_InA P).
|
||||
inversion_clear H.
|
||||
left; split; auto.
|
||||
apply permut_length_1.
|
||||
red; red; intros.
|
||||
specialize (P a). simpl in *.
|
||||
rewrite (@if_eqA_rewrite_l a1 a2 a) in P by auto. omega.
|
||||
right.
|
||||
inversion_clear H0; [|inversion H].
|
||||
split; auto.
|
||||
apply permut_length_1.
|
||||
red; red; intros.
|
||||
specialize (P a); simpl in *.
|
||||
rewrite (@if_eqA_rewrite_l a1 b2 a) in P by auto. omega.
|
||||
Qed.
|
||||
|
||||
(** Permutation is compatible with length. *)
|
||||
Lemma permut_length :
|
||||
forall l1 l2, permutation l1 l2 -> length l1 = length l2.
|
||||
Proof.
|
||||
induction l1; intros l2 H.
|
||||
rewrite (permut_nil (permut_sym H)); auto.
|
||||
assert (H0:=permut_cons_InA H).
|
||||
destruct (InA_split H0) as (h2,(b,(t2,(H1,H2)))).
|
||||
subst l2.
|
||||
rewrite app_length.
|
||||
simpl; rewrite <- plus_n_Sm; f_equal.
|
||||
rewrite <- app_length.
|
||||
apply IHl1.
|
||||
apply permut_remove_hd with b.
|
||||
apply permut_trans with (a::l1); auto.
|
||||
revert H1; unfold permutation, meq; simpl.
|
||||
intros; f_equal; auto.
|
||||
rewrite (@if_eqA_rewrite_l a b a0); auto.
|
||||
Qed.
|
||||
|
||||
Lemma NoDupA_equivlistA_permut :
|
||||
forall l l', NoDupA eqA l -> NoDupA eqA l' ->
|
||||
equivlistA eqA l l' -> permutation l l'.
|
||||
Proof.
|
||||
intros.
|
||||
red; unfold meq; intros.
|
||||
rewrite multiplicity_NoDupA in H, H0.
|
||||
generalize (H a) (H0 a) (H1 a); clear H H0 H1.
|
||||
do 2 rewrite multiplicity_InA.
|
||||
destruct 3; omega.
|
||||
Qed.
|
||||
|
||||
End Permut.
|
||||
|
||||
Section Permut_map.
|
||||
|
||||
Variables A B : Type.
|
||||
|
||||
Variable eqA : relation A.
|
||||
Hypothesis eqA_dec : forall x y:A, {eqA x y} + {~ eqA x y}.
|
||||
Hypothesis eqA_equiv : Equivalence eqA.
|
||||
|
||||
Variable eqB : B->B->Prop.
|
||||
Hypothesis eqB_dec : forall x y:B, { eqB x y }+{ ~eqB x y }.
|
||||
Hypothesis eqB_trans : Transitive eqB.
|
||||
|
||||
(** Permutation is compatible with map. *)
|
||||
|
||||
Lemma permut_map :
|
||||
forall f,
|
||||
(Proper (eqA==>eqB) f) ->
|
||||
forall l1 l2, permutation _ eqA_dec l1 l2 ->
|
||||
permutation _ eqB_dec (map f l1) (map f l2).
|
||||
Proof.
|
||||
intros f; induction l1.
|
||||
intros l2 P; rewrite (permut_nil eqA_equiv (permut_sym P)); apply permut_refl.
|
||||
intros l2 P.
|
||||
simpl.
|
||||
assert (H0:=permut_cons_InA eqA_equiv P).
|
||||
destruct (InA_split H0) as (h2,(b,(t2,(H1,H2)))).
|
||||
subst l2.
|
||||
rewrite map_app.
|
||||
simpl.
|
||||
apply permut_trans with (f b :: map f l1).
|
||||
revert H1; unfold permutation, meq; simpl.
|
||||
intros; f_equal; auto.
|
||||
destruct (eqB_dec (f b) a0) as [H2|H2];
|
||||
destruct (eqB_dec (f a) a0) as [H3|H3]; auto.
|
||||
destruct H3; transitivity (f b); auto with *.
|
||||
destruct H2; transitivity (f a); auto with *.
|
||||
apply permut_add_cons_inside.
|
||||
rewrite <- map_app.
|
||||
apply IHl1; auto.
|
||||
apply permut_remove_hd with b; trivial.
|
||||
apply permut_trans with (a::l1); auto.
|
||||
revert H1; unfold permutation, meq; simpl.
|
||||
intros; f_equal; auto.
|
||||
rewrite (@if_eqA_rewrite_l _ _ eqA_equiv eqA_dec a b a0); auto.
|
||||
Qed.
|
||||
|
||||
End Permut_map.
|
||||
|
||||
Require Import Permutation.
|
||||
|
||||
Section Permut_permut.
|
||||
|
||||
Variable A : Type.
|
||||
|
||||
Variable eqA : relation A.
|
||||
Hypothesis eqA_dec : forall x y:A, {eqA x y} + {~ eqA x y}.
|
||||
Hypothesis eqA_equiv : Equivalence eqA.
|
||||
|
||||
Lemma Permutation_impl_permutation : forall l l',
|
||||
Permutation l l' -> permutation _ eqA_dec l l'.
|
||||
Proof.
|
||||
induction 1.
|
||||
apply permut_refl.
|
||||
apply permut_cons; auto using Equivalence_Reflexive.
|
||||
change (x :: y :: l) with ([x] ++ y :: l);
|
||||
apply permut_add_cons_inside; simpl;
|
||||
apply permut_cons_eq; auto using Equivalence_Reflexive, permut_refl.
|
||||
apply permut_trans with l'; trivial.
|
||||
Qed.
|
||||
|
||||
Lemma permut_eqA : forall l l', Forall2 eqA l l' -> permutation _ eqA_dec l l'.
|
||||
Proof.
|
||||
induction 1.
|
||||
apply permut_refl.
|
||||
apply permut_cons_eq; trivial.
|
||||
Qed.
|
||||
|
||||
Lemma permutation_Permutation : forall l l',
|
||||
permutation _ eqA_dec l l' <->
|
||||
exists l'', Permutation l l'' /\ Forall2 eqA l'' l'.
|
||||
Proof.
|
||||
split; intro H.
|
||||
(* -> *)
|
||||
induction l in l', H |- *.
|
||||
exists []; apply permut_sym, permut_nil in H as ->; auto using Forall2.
|
||||
pose proof H as H'.
|
||||
apply permut_cons_InA, InA_split in H
|
||||
as (l1 & y & l2 & Heq & ->); trivial.
|
||||
apply permut_remove_hd_eq, IHl in H'
|
||||
as (l'' & IHP & IHA); clear IHl; trivial.
|
||||
apply Forall2_app_inv_r in IHA as (l1'' & l2'' & Hl1 & Hl2 & ->).
|
||||
exists (l1'' ++ a :: l2''); split.
|
||||
apply Permutation_cons_app; trivial.
|
||||
apply Forall2_app, Forall2_cons; trivial.
|
||||
(* <- *)
|
||||
destruct H as (l'' & H & Heq).
|
||||
apply permut_trans with l''.
|
||||
apply Permutation_impl_permutation; trivial.
|
||||
apply permut_eqA; trivial.
|
||||
Qed.
|
||||
|
||||
End Permut_permut.
|
||||
|
||||
(* begin hide *)
|
||||
(** For compatibilty *)
|
||||
Notation permut_right := permut_cons (only parsing).
|
||||
Notation permut_tran := permut_trans (only parsing).
|
||||
(* end hide *)
|
||||
@@ -1,632 +0,0 @@
|
||||
(* Adapted in May 2006 by Jean-Marc Notin from initial contents by
|
||||
Laurent Thery (Huffmann contribution, October 2003) *)
|
||||
|
||||
Require Import List Setoid Compare_dec Morphisms.
|
||||
Import ListNotations. (* For notations [] and [a;b;c] *)
|
||||
Set Implicit Arguments.
|
||||
|
||||
Section Permutation.
|
||||
|
||||
Variable A:Type.
|
||||
|
||||
Inductive Permutation : list A -> list A -> Prop :=
|
||||
| perm_nil: Permutation [] []
|
||||
| perm_skip x l l' : Permutation l l' -> Permutation (x::l) (x::l')
|
||||
| perm_swap x y l : Permutation (y::x::l) (x::y::l)
|
||||
| perm_trans l l' l'' :
|
||||
Permutation l l' -> Permutation l' l'' -> Permutation l l''.
|
||||
|
||||
Local Hint Constructors Permutation.
|
||||
|
||||
(** Some facts about [Permutation] *)
|
||||
|
||||
Theorem Permutation_nil : forall (l : list A), Permutation [] l -> l = [].
|
||||
Proof.
|
||||
intros l HF.
|
||||
remember (@nil A) as m in HF.
|
||||
induction HF; discriminate || auto.
|
||||
Qed.
|
||||
|
||||
Theorem Permutation_nil_cons : forall (l : list A) (x : A),
|
||||
~ Permutation nil (x::l).
|
||||
Proof.
|
||||
intros l x HF.
|
||||
apply Permutation_nil in HF; discriminate.
|
||||
Qed.
|
||||
|
||||
(** Permutation over lists is a equivalence relation *)
|
||||
|
||||
Theorem Permutation_refl : forall l : list A, Permutation l l.
|
||||
Proof.
|
||||
induction l; constructor. exact IHl.
|
||||
Qed.
|
||||
|
||||
Theorem Permutation_sym : forall l l' : list A,
|
||||
Permutation l l' -> Permutation l' l.
|
||||
Proof.
|
||||
intros l l' Hperm; induction Hperm; auto.
|
||||
apply perm_trans with (l':=l'); assumption.
|
||||
Qed.
|
||||
|
||||
Theorem Permutation_trans : forall l l' l'' : list A,
|
||||
Permutation l l' -> Permutation l' l'' -> Permutation l l''.
|
||||
Proof.
|
||||
exact perm_trans.
|
||||
Qed.
|
||||
|
||||
End Permutation.
|
||||
|
||||
Hint Resolve Permutation_refl perm_nil perm_skip.
|
||||
|
||||
(* These hints do not reduce the size of the problem to solve and they
|
||||
must be used with care to avoid combinatoric explosions *)
|
||||
|
||||
Local Hint Resolve perm_swap perm_trans.
|
||||
Local Hint Resolve Permutation_sym Permutation_trans.
|
||||
|
||||
(* This provides reflexivity, symmetry and transitivity and rewriting
|
||||
on morphims to come *)
|
||||
|
||||
Instance Permutation_Equivalence A : Equivalence (@Permutation A) | 10 := {
|
||||
Equivalence_Reflexive := @Permutation_refl A ;
|
||||
Equivalence_Symmetric := @Permutation_sym A ;
|
||||
Equivalence_Transitive := @Permutation_trans A }.
|
||||
|
||||
Instance Permutation_cons A :
|
||||
Proper (Logic.eq ==> @Permutation A ==> @Permutation A) (@cons A) | 10.
|
||||
Proof.
|
||||
repeat intro; subst; auto using perm_skip.
|
||||
Qed.
|
||||
|
||||
Section Permutation_properties.
|
||||
|
||||
Variable A:Type.
|
||||
|
||||
Implicit Types a b : A.
|
||||
Implicit Types l m : list A.
|
||||
|
||||
(** Compatibility with others operations on lists *)
|
||||
|
||||
Theorem Permutation_in : forall (l l' : list A) (x : A),
|
||||
Permutation l l' -> In x l -> In x l'.
|
||||
Proof.
|
||||
intros l l' x Hperm; induction Hperm; simpl; tauto.
|
||||
Qed.
|
||||
|
||||
Global Instance Permutation_in' :
|
||||
Proper (Logic.eq ==> @Permutation A ==> iff) (@In A) | 10.
|
||||
Proof.
|
||||
repeat red; intros; subst; eauto using Permutation_in.
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_app_tail : forall (l l' tl : list A),
|
||||
Permutation l l' -> Permutation (l++tl) (l'++tl).
|
||||
Proof.
|
||||
intros l l' tl Hperm; induction Hperm as [|x l l'|x y l|l l' l'']; simpl; auto.
|
||||
eapply Permutation_trans with (l':=l'++tl); trivial.
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_app_head : forall (l tl tl' : list A),
|
||||
Permutation tl tl' -> Permutation (l++tl) (l++tl').
|
||||
Proof.
|
||||
intros l tl tl' Hperm; induction l;
|
||||
[trivial | repeat rewrite <- app_comm_cons; constructor; assumption].
|
||||
Qed.
|
||||
|
||||
Theorem Permutation_app : forall (l m l' m' : list A),
|
||||
Permutation l l' -> Permutation m m' -> Permutation (l++m) (l'++m').
|
||||
Proof.
|
||||
intros l m l' m' Hpermll' Hpermmm';
|
||||
induction Hpermll' as [|x l l'|x y l|l l' l''];
|
||||
repeat rewrite <- app_comm_cons; auto.
|
||||
apply Permutation_trans with (l' := (x :: y :: l ++ m));
|
||||
[idtac | repeat rewrite app_comm_cons; apply Permutation_app_head]; trivial.
|
||||
apply Permutation_trans with (l' := (l' ++ m')); try assumption.
|
||||
apply Permutation_app_tail; assumption.
|
||||
Qed.
|
||||
|
||||
Global Instance Permutation_app' :
|
||||
Proper (@Permutation A ==> @Permutation A ==> @Permutation A) (@app A) | 10.
|
||||
Proof.
|
||||
repeat intro; now apply Permutation_app.
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_add_inside : forall a (l l' tl tl' : list A),
|
||||
Permutation l l' -> Permutation tl tl' ->
|
||||
Permutation (l ++ a :: tl) (l' ++ a :: tl').
|
||||
Proof.
|
||||
intros; apply Permutation_app; auto.
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_cons_append : forall (l : list A) x,
|
||||
Permutation (x :: l) (l ++ x :: nil).
|
||||
Proof. induction l; intros; auto. simpl. rewrite <- IHl; auto. Qed.
|
||||
Local Hint Resolve Permutation_cons_append.
|
||||
|
||||
Theorem Permutation_app_comm : forall (l l' : list A),
|
||||
Permutation (l ++ l') (l' ++ l).
|
||||
Proof.
|
||||
induction l as [|x l]; simpl; intro l'.
|
||||
rewrite app_nil_r; trivial. rewrite IHl.
|
||||
rewrite app_comm_cons, Permutation_cons_append.
|
||||
now rewrite <- app_assoc.
|
||||
Qed.
|
||||
Local Hint Resolve Permutation_app_comm.
|
||||
|
||||
Theorem Permutation_cons_app : forall (l l1 l2:list A) a,
|
||||
Permutation l (l1 ++ l2) -> Permutation (a :: l) (l1 ++ a :: l2).
|
||||
Proof.
|
||||
intros l l1 l2 a H. rewrite H.
|
||||
rewrite app_comm_cons, Permutation_cons_append.
|
||||
now rewrite <- app_assoc.
|
||||
Qed.
|
||||
Local Hint Resolve Permutation_cons_app.
|
||||
|
||||
Theorem Permutation_middle : forall (l1 l2:list A) a,
|
||||
Permutation (a :: l1 ++ l2) (l1 ++ a :: l2).
|
||||
Proof.
|
||||
auto.
|
||||
Qed.
|
||||
Local Hint Resolve Permutation_middle.
|
||||
|
||||
Theorem Permutation_rev : forall (l : list A), Permutation l (rev l).
|
||||
Proof.
|
||||
induction l as [| x l]; simpl; trivial. now rewrite IHl at 1.
|
||||
Qed.
|
||||
|
||||
Global Instance Permutation_rev' :
|
||||
Proper (@Permutation A ==> @Permutation A) (@rev A) | 10.
|
||||
Proof.
|
||||
repeat intro; now rewrite <- 2 Permutation_rev.
|
||||
Qed.
|
||||
|
||||
Theorem Permutation_length : forall (l l' : list A),
|
||||
Permutation l l' -> length l = length l'.
|
||||
Proof.
|
||||
intros l l' Hperm; induction Hperm; simpl; auto. now transitivity (length l').
|
||||
Qed.
|
||||
|
||||
Global Instance Permutation_length' :
|
||||
Proper (@Permutation A ==> Logic.eq) (@length A) | 10.
|
||||
Proof.
|
||||
exact Permutation_length.
|
||||
Qed.
|
||||
|
||||
Theorem Permutation_ind_bis :
|
||||
forall P : list A -> list A -> Prop,
|
||||
P [] [] ->
|
||||
(forall x l l', Permutation l l' -> P l l' -> P (x :: l) (x :: l')) ->
|
||||
(forall x y l l', Permutation l l' -> P l l' -> P (y :: x :: l) (x :: y :: l')) ->
|
||||
(forall l l' l'', Permutation l l' -> P l l' -> Permutation l' l'' -> P l' l'' -> P l l'') ->
|
||||
forall l l', Permutation l l' -> P l l'.
|
||||
Proof.
|
||||
intros P Hnil Hskip Hswap Htrans.
|
||||
induction 1; auto.
|
||||
apply Htrans with (x::y::l); auto.
|
||||
apply Hswap; auto.
|
||||
induction l; auto.
|
||||
apply Hskip; auto.
|
||||
apply Hskip; auto.
|
||||
induction l; auto.
|
||||
eauto.
|
||||
Qed.
|
||||
|
||||
Ltac break_list l x l' H :=
|
||||
destruct l as [|x l']; simpl in *;
|
||||
injection H; intros; subst; clear H.
|
||||
|
||||
Theorem Permutation_nil_app_cons : forall (l l' : list A) (x : A),
|
||||
~ Permutation nil (l++x::l').
|
||||
Proof.
|
||||
intros l l' x HF.
|
||||
apply Permutation_nil in HF. destruct l; discriminate.
|
||||
Qed.
|
||||
|
||||
Theorem Permutation_app_inv : forall (l1 l2 l3 l4:list A) a,
|
||||
Permutation (l1++a::l2) (l3++a::l4) -> Permutation (l1++l2) (l3 ++ l4).
|
||||
Proof.
|
||||
intros l1 l2 l3 l4 a; revert l1 l2 l3 l4.
|
||||
set (P l l' :=
|
||||
forall l1 l2 l3 l4, l=l1++a::l2 -> l'=l3++a::l4 ->
|
||||
Permutation (l1++l2) (l3++l4)).
|
||||
cut (forall l l', Permutation l l' -> P l l').
|
||||
intros H; intros; eapply H; eauto.
|
||||
apply (Permutation_ind_bis P); unfold P; clear P.
|
||||
- (* nil *)
|
||||
intros; now destruct l1.
|
||||
- (* skip *)
|
||||
intros x l l' H IH; intros.
|
||||
break_list l1 b l1' H0; break_list l3 c l3' H1.
|
||||
auto.
|
||||
now rewrite H.
|
||||
now rewrite <- H.
|
||||
now rewrite (IH _ _ _ _ eq_refl eq_refl).
|
||||
- (* swap *)
|
||||
intros x y l l' Hp IH; intros.
|
||||
break_list l1 b l1' H; break_list l3 c l3' H0.
|
||||
auto.
|
||||
break_list l3' b l3'' H.
|
||||
auto.
|
||||
constructor. now rewrite Permutation_middle.
|
||||
break_list l1' c l1'' H1.
|
||||
auto.
|
||||
constructor. now rewrite Permutation_middle.
|
||||
break_list l3' d l3'' H; break_list l1' e l1'' H1.
|
||||
auto.
|
||||
rewrite perm_swap. constructor. now rewrite Permutation_middle.
|
||||
rewrite perm_swap. constructor. now rewrite Permutation_middle.
|
||||
now rewrite perm_swap, (IH _ _ _ _ eq_refl eq_refl).
|
||||
- (*trans*)
|
||||
intros.
|
||||
destruct (In_split a l') as (l'1,(l'2,H6)).
|
||||
rewrite <- H.
|
||||
subst l.
|
||||
apply in_or_app; right; red; auto.
|
||||
apply perm_trans with (l'1++l'2).
|
||||
apply (H0 _ _ _ _ H3 H6).
|
||||
apply (H2 _ _ _ _ H6 H4).
|
||||
Qed.
|
||||
|
||||
Theorem Permutation_cons_inv l l' a :
|
||||
Permutation (a::l) (a::l') -> Permutation l l'.
|
||||
Proof.
|
||||
intro H; exact (Permutation_app_inv [] l [] l' a H).
|
||||
Qed.
|
||||
|
||||
Theorem Permutation_cons_app_inv l l1 l2 a :
|
||||
Permutation (a :: l) (l1 ++ a :: l2) -> Permutation l (l1 ++ l2).
|
||||
Proof.
|
||||
intro H; exact (Permutation_app_inv [] l l1 l2 a H).
|
||||
Qed.
|
||||
|
||||
Theorem Permutation_app_inv_l : forall l l1 l2,
|
||||
Permutation (l ++ l1) (l ++ l2) -> Permutation l1 l2.
|
||||
Proof.
|
||||
induction l; simpl; auto.
|
||||
intros.
|
||||
apply IHl.
|
||||
apply Permutation_cons_inv with a; auto.
|
||||
Qed.
|
||||
|
||||
Theorem Permutation_app_inv_r : forall l l1 l2,
|
||||
Permutation (l1 ++ l) (l2 ++ l) -> Permutation l1 l2.
|
||||
Proof.
|
||||
induction l.
|
||||
intros l1 l2; do 2 rewrite app_nil_r; auto.
|
||||
intros.
|
||||
apply IHl.
|
||||
apply Permutation_app_inv with a; auto.
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_length_1_inv: forall a l, Permutation [a] l -> l = [a].
|
||||
Proof.
|
||||
intros a l H; remember [a] as m in H.
|
||||
induction H; try (injection Heqm as -> ->; clear Heqm);
|
||||
discriminate || auto.
|
||||
apply Permutation_nil in H as ->; trivial.
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_length_1: forall a b, Permutation [a] [b] -> a = b.
|
||||
Proof.
|
||||
intros a b H.
|
||||
apply Permutation_length_1_inv in H; injection H as ->; trivial.
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_length_2_inv :
|
||||
forall a1 a2 l, Permutation [a1;a2] l -> l = [a1;a2] \/ l = [a2;a1].
|
||||
Proof.
|
||||
intros a1 a2 l H; remember [a1;a2] as m in H.
|
||||
revert a1 a2 Heqm.
|
||||
induction H; intros; try (injection Heqm; intros; subst; clear Heqm);
|
||||
discriminate || (try tauto).
|
||||
apply Permutation_length_1_inv in H as ->; left; auto.
|
||||
apply IHPermutation1 in Heqm as [H1|H1]; apply IHPermutation2 in H1 as ();
|
||||
auto.
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_length_2 :
|
||||
forall a1 a2 b1 b2, Permutation [a1;a2] [b1;b2] ->
|
||||
a1 = b1 /\ a2 = b2 \/ a1 = b2 /\ a2 = b1.
|
||||
Proof.
|
||||
intros a1 b1 a2 b2 H.
|
||||
apply Permutation_length_2_inv in H as [H|H]; injection H as -> ->; auto.
|
||||
Qed.
|
||||
|
||||
Let in_middle l l1 l2 (a:A) : l = l1 ++ a :: l2 ->
|
||||
forall x, In x l <-> a = x \/ In x (l1++l2).
|
||||
Proof.
|
||||
intros; subst; rewrite !in_app_iff; simpl. tauto.
|
||||
Qed.
|
||||
|
||||
Lemma NoDup_cardinal_incl (l l' : list A) : NoDup l -> NoDup l' ->
|
||||
length l = length l' -> incl l l' -> incl l' l.
|
||||
Proof.
|
||||
intros N. revert l'. induction N as [|a l Hal Hl IH].
|
||||
- destruct l'; now auto.
|
||||
- intros l' Hl' E H x Hx.
|
||||
assert (Ha : In a l') by (apply H; simpl; auto).
|
||||
destruct (in_split _ _ Ha) as (l1 & l2 & H12). clear Ha.
|
||||
rewrite in_middle in Hx; eauto.
|
||||
destruct Hx as [Hx|Hx]; [left|right]; auto.
|
||||
apply (IH (l1++l2)); auto.
|
||||
* apply NoDup_remove_1 with a; rewrite <- H12; auto.
|
||||
* apply eq_add_S.
|
||||
simpl in E; rewrite E, H12, !app_length; simpl; auto with arith.
|
||||
* intros y Hy. assert (Hy' : In y l') by (apply H; simpl; auto).
|
||||
rewrite in_middle in Hy'; eauto.
|
||||
destruct Hy'; auto. subst y; intuition.
|
||||
Qed.
|
||||
|
||||
Lemma NoDup_Permutation l l' : NoDup l -> NoDup l' ->
|
||||
(forall x:A, In x l <-> In x l') -> Permutation l l'.
|
||||
Proof.
|
||||
intros N. revert l'. induction N as [|a l Hal Hl IH].
|
||||
- destruct l'; simpl; auto.
|
||||
intros Hl' H. exfalso. rewrite (H a); auto.
|
||||
- intros l' Hl' H.
|
||||
assert (Ha : In a l') by (apply H; simpl; auto).
|
||||
destruct (In_split _ _ Ha) as (l1 & l2 & H12).
|
||||
rewrite H12.
|
||||
apply Permutation_cons_app.
|
||||
apply IH; auto.
|
||||
* apply NoDup_remove_1 with a; rewrite <- H12; auto.
|
||||
* intro x. split; intros Hx.
|
||||
+ assert (Hx' : In x l') by (apply H; simpl; auto).
|
||||
rewrite in_middle in Hx'; eauto.
|
||||
destruct Hx'; auto. subst; intuition.
|
||||
+ assert (Hx' : In x l') by (rewrite (in_middle l1 l2 a); eauto).
|
||||
rewrite <- H in Hx'. destruct Hx'; auto.
|
||||
subst. destruct (NoDup_remove_2 _ _ _ Hl' Hx).
|
||||
Qed.
|
||||
|
||||
Lemma NoDup_Permutation_bis l l' : NoDup l -> NoDup l' ->
|
||||
length l = length l' -> incl l l' -> Permutation l l'.
|
||||
Proof.
|
||||
intros. apply NoDup_Permutation; auto.
|
||||
split; auto. apply NoDup_cardinal_incl; auto.
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_NoDup l l' : Permutation l l' -> NoDup l -> NoDup l'.
|
||||
Proof.
|
||||
induction 1; auto.
|
||||
* inversion_clear 1; constructor; eauto using Permutation_in.
|
||||
* inversion_clear 1 as [|? ? H1 H2]. inversion_clear H2; simpl in *.
|
||||
constructor. simpl; intuition. constructor; intuition.
|
||||
Qed.
|
||||
|
||||
Global Instance Permutation_NoDup' :
|
||||
Proper (@Permutation A ==> iff) (@NoDup A) | 10.
|
||||
Proof.
|
||||
repeat red; eauto using Permutation_NoDup.
|
||||
Qed.
|
||||
|
||||
End Permutation_properties.
|
||||
|
||||
Section Permutation_map.
|
||||
|
||||
Variable A B : Type.
|
||||
Variable f : A -> B.
|
||||
|
||||
Lemma Permutation_map l l' :
|
||||
Permutation l l' -> Permutation (map f l) (map f l').
|
||||
Proof.
|
||||
induction 1; simpl; eauto.
|
||||
Qed.
|
||||
|
||||
Global Instance Permutation_map' :
|
||||
Proper (@Permutation A ==> @Permutation B) (map f) | 10.
|
||||
Proof.
|
||||
exact Permutation_map.
|
||||
Qed.
|
||||
|
||||
End Permutation_map.
|
||||
|
||||
Section Injection.
|
||||
|
||||
Definition injective {A B} (f : A->B) :=
|
||||
forall x y, f x = f y -> x = y.
|
||||
|
||||
Lemma injective_map_NoDup {A B} (f:A->B) (l:list A) :
|
||||
injective f -> NoDup l -> NoDup (map f l).
|
||||
Proof.
|
||||
intros Hf. induction 1 as [|x l Hx Hl IH]; simpl; constructor; trivial.
|
||||
rewrite in_map_iff. intros (y & Hy & Hy'). apply Hf in Hy. now subst.
|
||||
Qed.
|
||||
|
||||
Lemma injective_bounded_surjective n f :
|
||||
injective f ->
|
||||
(forall x, x < n -> f x < n) ->
|
||||
(forall y, y < n -> exists x, x < n /\ f x = y).
|
||||
Proof.
|
||||
intros Hf H.
|
||||
set (l := seq 0 n).
|
||||
assert (P : incl (map f l) l).
|
||||
{ intros x. rewrite in_map_iff. intros (y & <- & Hy').
|
||||
unfold l in *. rewrite in_seq in *. simpl in *.
|
||||
destruct Hy' as (_,Hy'). auto with arith. }
|
||||
assert (P' : incl l (map f l)).
|
||||
{ unfold l.
|
||||
apply NoDup_cardinal_incl; auto using injective_map_NoDup, seq_NoDup.
|
||||
now rewrite map_length. }
|
||||
intros x Hx.
|
||||
assert (Hx' : In x l) by (unfold l; rewrite in_seq; auto with arith).
|
||||
apply P' in Hx'.
|
||||
rewrite in_map_iff in Hx'. destruct Hx' as (y & Hy & Hy').
|
||||
exists y; split; auto. unfold l in *; rewrite in_seq in Hy'.
|
||||
destruct Hy'; auto with arith.
|
||||
Qed.
|
||||
|
||||
Lemma nat_bijection_Permutation n f :
|
||||
injective f -> (forall x, x < n -> f x < n) ->
|
||||
let l := seq 0 n in Permutation (map f l) l.
|
||||
Proof.
|
||||
intros Hf BD.
|
||||
apply NoDup_Permutation_bis; auto using injective_map_NoDup, seq_NoDup.
|
||||
* now rewrite map_length.
|
||||
* intros x. rewrite in_map_iff. intros (y & <- & Hy').
|
||||
rewrite in_seq in *. simpl in *.
|
||||
destruct Hy' as (_,Hy'). auto with arith.
|
||||
Qed.
|
||||
|
||||
End Injection.
|
||||
|
||||
Section Permutation_alt.
|
||||
Variable A:Type.
|
||||
Implicit Type a : A.
|
||||
Implicit Type l : list A.
|
||||
|
||||
(** Alternative characterization of permutation
|
||||
via [nth_error] and [nth] *)
|
||||
|
||||
Let adapt f n :=
|
||||
let m := f (S n) in if le_lt_dec m (f 0) then m else pred m.
|
||||
|
||||
Let adapt_injective f : injective f -> injective (adapt f).
|
||||
Proof.
|
||||
unfold adapt. intros Hf x y EQ.
|
||||
destruct le_lt_dec as [LE|LT]; destruct le_lt_dec as [LE'|LT'].
|
||||
- now apply eq_add_S, Hf.
|
||||
- apply Lt.le_lt_or_eq in LE.
|
||||
destruct LE as [LT|EQ']; [|now apply Hf in EQ'].
|
||||
unfold lt in LT. rewrite EQ in LT.
|
||||
rewrite <- (Lt.S_pred _ _ LT') in LT.
|
||||
elim (Lt.lt_not_le _ _ LT' LT).
|
||||
- apply Lt.le_lt_or_eq in LE'.
|
||||
destruct LE' as [LT'|EQ']; [|now apply Hf in EQ'].
|
||||
unfold lt in LT'. rewrite <- EQ in LT'.
|
||||
rewrite <- (Lt.S_pred _ _ LT) in LT'.
|
||||
elim (Lt.lt_not_le _ _ LT LT').
|
||||
- apply eq_add_S, Hf.
|
||||
now rewrite (Lt.S_pred _ _ LT), (Lt.S_pred _ _ LT'), EQ.
|
||||
Qed.
|
||||
|
||||
Let adapt_ok a l1 l2 f : injective f -> length l1 = f 0 ->
|
||||
forall n, nth_error (l1++a::l2) (f (S n)) = nth_error (l1++l2) (adapt f n).
|
||||
Proof.
|
||||
unfold adapt. intros Hf E n.
|
||||
destruct le_lt_dec as [LE|LT].
|
||||
- apply Lt.le_lt_or_eq in LE.
|
||||
destruct LE as [LT|EQ]; [|now apply Hf in EQ].
|
||||
rewrite <- E in LT.
|
||||
rewrite 2 nth_error_app1; auto.
|
||||
- rewrite (Lt.S_pred _ _ LT) at 1.
|
||||
rewrite <- E, (Lt.S_pred _ _ LT) in LT.
|
||||
rewrite 2 nth_error_app2; auto with arith.
|
||||
rewrite <- Minus.minus_Sn_m; auto with arith.
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_nth_error l l' :
|
||||
Permutation l l' <->
|
||||
(length l = length l' /\
|
||||
exists f:nat->nat,
|
||||
injective f /\ forall n, nth_error l' n = nth_error l (f n)).
|
||||
Proof.
|
||||
split.
|
||||
{ intros P.
|
||||
split; [now apply Permutation_length|].
|
||||
induction P.
|
||||
- exists (fun n => n).
|
||||
split; try red; auto.
|
||||
- destruct IHP as (f & Hf & Hf').
|
||||
exists (fun n => match n with O => O | S n => S (f n) end).
|
||||
split; try red.
|
||||
* intros [|y] [|z]; simpl; now auto.
|
||||
* intros [|n]; simpl; auto.
|
||||
- exists (fun n => match n with 0 => 1 | 1 => 0 | n => n end).
|
||||
split; try red.
|
||||
* intros [|[|z]] [|[|t]]; simpl; now auto.
|
||||
* intros [|[|n]]; simpl; auto.
|
||||
- destruct IHP1 as (f & Hf & Hf').
|
||||
destruct IHP2 as (g & Hg & Hg').
|
||||
exists (fun n => f (g n)).
|
||||
split; try red.
|
||||
* auto.
|
||||
* intros n. rewrite <- Hf'; auto. }
|
||||
{ revert l. induction l'.
|
||||
- intros [|l] (E & _); now auto.
|
||||
- intros l (E & f & Hf & Hf').
|
||||
simpl in E.
|
||||
assert (Ha : nth_error l (f 0) = Some a)
|
||||
by (symmetry; apply (Hf' 0)).
|
||||
destruct (nth_error_split l (f 0) Ha) as (l1 & l2 & L12 & L1).
|
||||
rewrite L12. rewrite <- Permutation_middle. constructor.
|
||||
apply IHl'; split; [|exists (adapt f); split].
|
||||
* revert E. rewrite L12, !app_length. simpl.
|
||||
rewrite <- plus_n_Sm. now injection 1.
|
||||
* now apply adapt_injective.
|
||||
* intro n. rewrite <- (adapt_ok a), <- L12; auto.
|
||||
apply (Hf' (S n)). }
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_nth_error_bis l l' :
|
||||
Permutation l l' <->
|
||||
exists f:nat->nat,
|
||||
injective f /\
|
||||
(forall n, n < length l -> f n < length l) /\
|
||||
(forall n, nth_error l' n = nth_error l (f n)).
|
||||
Proof.
|
||||
rewrite Permutation_nth_error; split.
|
||||
- intros (E & f & Hf & Hf').
|
||||
exists f. do 2 (split; trivial).
|
||||
intros n Hn.
|
||||
destruct (Lt.le_or_lt (length l) (f n)) as [LE|LT]; trivial.
|
||||
rewrite <- nth_error_None, <- Hf', nth_error_None, <- E in LE.
|
||||
elim (Lt.lt_not_le _ _ Hn LE).
|
||||
- intros (f & Hf & Hf2 & Hf3); split; [|exists f; auto].
|
||||
assert (H : length l' <= length l') by auto with arith.
|
||||
rewrite <- nth_error_None, Hf3, nth_error_None in H.
|
||||
destruct (Lt.le_or_lt (length l) (length l')) as [LE|LT];
|
||||
[|apply Hf2 in LT; elim (Lt.lt_not_le _ _ LT H)].
|
||||
apply Lt.le_lt_or_eq in LE. destruct LE as [LT|EQ]; trivial.
|
||||
rewrite <- nth_error_Some, Hf3, nth_error_Some in LT.
|
||||
destruct (injective_bounded_surjective Hf Hf2 LT) as (y & Hy & Hy').
|
||||
apply Hf in Hy'. subst y. elim (Lt.lt_irrefl _ Hy).
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_nth l l' d :
|
||||
Permutation l l' <->
|
||||
(let n := length l in
|
||||
length l' = n /\
|
||||
exists f:nat->nat,
|
||||
(forall x, x < n -> f x < n) /\
|
||||
(forall x y, x < n -> y < n -> f x = f y -> x = y) /\
|
||||
(forall x, x < n -> nth x l' d = nth (f x) l d)).
|
||||
Proof.
|
||||
split.
|
||||
- intros H.
|
||||
assert (E := Permutation_length H).
|
||||
split; auto.
|
||||
apply Permutation_nth_error_bis in H.
|
||||
destruct H as (f & Hf & Hf2 & Hf3).
|
||||
exists f. split; [|split]; auto.
|
||||
intros n Hn. rewrite <- 2 nth_default_eq. unfold nth_default.
|
||||
now rewrite Hf3.
|
||||
- intros (E & f & Hf1 & Hf2 & Hf3).
|
||||
rewrite Permutation_nth_error.
|
||||
split; auto.
|
||||
exists (fun n => if le_lt_dec (length l) n then n else f n).
|
||||
split.
|
||||
* intros x y.
|
||||
destruct le_lt_dec as [LE|LT];
|
||||
destruct le_lt_dec as [LE'|LT']; auto.
|
||||
+ apply Hf1 in LT'. intros ->.
|
||||
elim (Lt.lt_irrefl (f y)). eapply Lt.lt_le_trans; eauto.
|
||||
+ apply Hf1 in LT. intros <-.
|
||||
elim (Lt.lt_irrefl (f x)). eapply Lt.lt_le_trans; eauto.
|
||||
* intros n.
|
||||
destruct le_lt_dec as [LE|LT].
|
||||
+ assert (LE' : length l' <= n) by (now rewrite E).
|
||||
rewrite <- nth_error_None in LE, LE'. congruence.
|
||||
+ assert (LT' : n < length l') by (now rewrite E).
|
||||
specialize (Hf3 n LT). rewrite <- 2 nth_default_eq in Hf3.
|
||||
unfold nth_default in Hf3.
|
||||
apply Hf1 in LT.
|
||||
rewrite <- nth_error_Some in LT, LT'.
|
||||
do 2 destruct nth_error; congruence.
|
||||
Qed.
|
||||
|
||||
End Permutation_alt.
|
||||
|
||||
(* begin hide *)
|
||||
Notation Permutation_app_swap := Permutation_app_comm (only parsing).
|
||||
(* end hide *)
|
||||
@@ -1,2 +0,0 @@
|
||||
Require Export Sorted.
|
||||
Require Export Mergesort.
|
||||
62
samples/Coq/Spec.v
Normal file
62
samples/Coq/Spec.v
Normal file
@@ -0,0 +1,62 @@
|
||||
(** Specifications. *)
|
||||
Require Import Coq.Lists.List.
|
||||
Require Import Coq.Strings.Ascii.
|
||||
Require Import FunctionNinjas.All.
|
||||
Require Import ListString.All.
|
||||
Require Import Computation.
|
||||
|
||||
Import ListNotations.
|
||||
Local Open Scope char.
|
||||
|
||||
(** A run is an execution of the program with explicit answers for the
|
||||
system calls. *)
|
||||
Module Run.
|
||||
(** We define a run by induction on the structure of a computation. *)
|
||||
Inductive t : C.t -> Type :=
|
||||
| Ret : t C.Ret
|
||||
| Call : forall (command : Command.t) (answer : Command.answer command)
|
||||
{handler : Command.answer command -> C.t}, t (handler answer) ->
|
||||
t (C.Call command handler).
|
||||
|
||||
(** The trace of a run. *)
|
||||
Fixpoint trace {x : C.t} (run : t x)
|
||||
: list {command : Command.t & Command.answer command} :=
|
||||
match run with
|
||||
| Ret => []
|
||||
| Call command answer _ run => existT _ command answer :: trace run
|
||||
end.
|
||||
End Run.
|
||||
|
||||
Module Temporal.
|
||||
Module All.
|
||||
Inductive t (P : Command.t -> Prop) : C.t -> Prop :=
|
||||
| Ret : t P C.Ret
|
||||
| Call : forall (c : Command.t) (h : Command.answer c -> C.t),
|
||||
P c -> (forall a, t P (h a)) ->
|
||||
t P (C.Call c h).
|
||||
End All.
|
||||
|
||||
Module One.
|
||||
Inductive t (P : Command.t -> Prop) : C.t -> Prop :=
|
||||
| CallThis : forall (c : Command.t) (h : Command.answer c -> C.t),
|
||||
P c ->
|
||||
t P (C.Call c h)
|
||||
| CallOther : forall (c : Command.t) (h : Command.answer c -> C.t),
|
||||
(forall a, t P (h a)) ->
|
||||
t P (C.Call c h).
|
||||
End One.
|
||||
|
||||
Module Then.
|
||||
Inductive t (P1 P2 : Command.t -> Prop) : C.t -> Prop :=
|
||||
| Ret : t P1 P2 C.Ret
|
||||
| Call : forall (c : Command.t) (h : Command.answer c -> C.t),
|
||||
(forall a, t P1 P2 (h a)) ->
|
||||
t P1 P2 (C.Call c h)
|
||||
| CallThen : forall (c : Command.t) (h : Command.answer c -> C.t),
|
||||
P1 c -> (forall a, One.t P2 (h a)) ->
|
||||
t P1 P2 (C.Call c h).
|
||||
End Then.
|
||||
End Temporal.
|
||||
|
||||
Module CardBeforeMoney.
|
||||
End CardBeforeMoney.
|
||||
@@ -1,419 +0,0 @@
|
||||
(** Sketch of the proof of {p:nat|p<=n} = {p:nat|p<=m} -> n=m
|
||||
|
||||
- preliminary results on the irrelevance of boundedness proofs
|
||||
- introduce the notion of finite cardinal |A|
|
||||
- prove that |{p:nat|p<=n}| = n
|
||||
- prove that |A| = n /\ |A| = m -> n = m if equality is decidable on A
|
||||
- prove that equality is decidable on A
|
||||
- conclude
|
||||
*)
|
||||
|
||||
(** * Preliminary results on [nat] and [le] *)
|
||||
|
||||
(** Proving axiom K on [nat] *)
|
||||
|
||||
Require Import Eqdep_dec.
|
||||
Require Import Arith.
|
||||
|
||||
Theorem eq_rect_eq_nat :
|
||||
forall (p:nat) (Q:nat->Type) (x:Q p) (h:p=p), x = eq_rect p Q x p h.
|
||||
Proof.
|
||||
intros.
|
||||
apply K_dec_set with (p := h).
|
||||
apply eq_nat_dec.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
(** Proving unicity of proofs of [(n<=m)%nat] *)
|
||||
|
||||
Scheme le_ind' := Induction for le Sort Prop.
|
||||
|
||||
Theorem le_uniqueness_proof : forall (n m : nat) (p q : n <= m), p = q.
|
||||
Proof.
|
||||
induction p using le_ind'; intro q.
|
||||
replace (le_n n) with
|
||||
(eq_rect _ (fun n0 => n <= n0) (le_n n) _ (refl_equal n)).
|
||||
2:reflexivity.
|
||||
generalize (refl_equal n).
|
||||
pattern n at 2 4 6 10, q; case q; [intro | intros m l e].
|
||||
rewrite <- eq_rect_eq_nat; trivial.
|
||||
contradiction (le_Sn_n m); rewrite <- e; assumption.
|
||||
replace (le_S n m p) with
|
||||
(eq_rect _ (fun n0 => n <= n0) (le_S n m p) _ (refl_equal (S m))).
|
||||
2:reflexivity.
|
||||
generalize (refl_equal (S m)).
|
||||
pattern (S m) at 1 3 4 6, q; case q; [intro Heq | intros m0 l HeqS].
|
||||
contradiction (le_Sn_n m); rewrite Heq; assumption.
|
||||
injection HeqS; intro Heq; generalize l HeqS.
|
||||
rewrite <- Heq; intros; rewrite <- eq_rect_eq_nat.
|
||||
rewrite (IHp l0); reflexivity.
|
||||
Qed.
|
||||
|
||||
(** Proving irrelevance of boundedness proofs while building
|
||||
elements of interval *)
|
||||
|
||||
Lemma dep_pair_intro :
|
||||
forall (n x y:nat) (Hx : x<=n) (Hy : y<=n), x=y ->
|
||||
exist (fun x => x <= n) x Hx = exist (fun x => x <= n) y Hy.
|
||||
Proof.
|
||||
intros n x y Hx Hy Heq.
|
||||
generalize Hy.
|
||||
rewrite <- Heq.
|
||||
intros.
|
||||
rewrite (le_uniqueness_proof x n Hx Hy0).
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
(** * Proving that {p:nat|p<=n} = {p:nat|p<=m} -> n=m *)
|
||||
|
||||
(** Definition of having finite cardinality [n+1] for a set [A] *)
|
||||
|
||||
Definition card (A:Set) n :=
|
||||
exists f,
|
||||
(forall x:A, f x <= n) /\
|
||||
(forall x y:A, f x = f y -> x = y) /\
|
||||
(forall m, m <= n -> exists x:A, f x = m).
|
||||
|
||||
Require Import Arith.
|
||||
|
||||
(** Showing that the interval [0;n] has cardinality [n+1] *)
|
||||
|
||||
Theorem card_interval : forall n, card {x:nat|x<=n} n.
|
||||
Proof.
|
||||
intro n.
|
||||
exists (fun x:{x:nat|x<=n} => proj1_sig x).
|
||||
split.
|
||||
(* bounded *)
|
||||
intro x; apply (proj2_sig x).
|
||||
split.
|
||||
(* injectivity *)
|
||||
intros (p,Hp) (q,Hq).
|
||||
simpl.
|
||||
intro Hpq.
|
||||
apply dep_pair_intro; assumption.
|
||||
(* surjectivity *)
|
||||
intros m Hmn.
|
||||
exists (exist (fun x : nat => x <= n) m Hmn).
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
(** Showing that equality on the interval [0;n] is decidable *)
|
||||
|
||||
Lemma interval_dec :
|
||||
forall n (x y : {m:nat|m<=n}), {x=y}+{x<>y}.
|
||||
Proof.
|
||||
intros n (p,Hp).
|
||||
induction p; intros ([|q],Hq).
|
||||
left.
|
||||
apply dep_pair_intro.
|
||||
reflexivity.
|
||||
right.
|
||||
intro H; discriminate H.
|
||||
right.
|
||||
intro H; discriminate H.
|
||||
assert (Hp' : p <= n).
|
||||
apply le_Sn_le; assumption.
|
||||
assert (Hq' : q <= n).
|
||||
apply le_Sn_le; assumption.
|
||||
destruct (IHp Hp' (exist (fun m => m <= n) q Hq'))
|
||||
as [Heq|Hneq].
|
||||
left.
|
||||
injection Heq; intro Heq'.
|
||||
apply dep_pair_intro.
|
||||
apply eq_S.
|
||||
assumption.
|
||||
right.
|
||||
intro HeqS.
|
||||
injection HeqS; intro Heq.
|
||||
apply Hneq.
|
||||
apply dep_pair_intro.
|
||||
assumption.
|
||||
Qed.
|
||||
|
||||
(** Showing that the cardinality relation is functional on decidable sets *)
|
||||
|
||||
Lemma card_inj_aux :
|
||||
forall (A:Type) f g n,
|
||||
(forall x:A, f x <= 0) ->
|
||||
(forall x y:A, f x = f y -> x = y) ->
|
||||
(forall m, m <= S n -> exists x:A, g x = m)
|
||||
-> False.
|
||||
Proof.
|
||||
intros A f g n Hfbound Hfinj Hgsurj.
|
||||
destruct (Hgsurj (S n) (le_n _)) as (x,Hx).
|
||||
destruct (Hgsurj n (le_S _ _ (le_n _))) as (x',Hx').
|
||||
assert (Hfx : 0 = f x).
|
||||
apply le_n_O_eq.
|
||||
apply Hfbound.
|
||||
assert (Hfx' : 0 = f x').
|
||||
apply le_n_O_eq.
|
||||
apply Hfbound.
|
||||
assert (x=x').
|
||||
apply Hfinj.
|
||||
rewrite <- Hfx.
|
||||
rewrite <- Hfx'.
|
||||
reflexivity.
|
||||
rewrite H in Hx.
|
||||
rewrite Hx' in Hx.
|
||||
apply (n_Sn _ Hx).
|
||||
Qed.
|
||||
|
||||
(** For [dec_restrict], we use a lemma on the negation of equality
|
||||
that requires proof-irrelevance. It should be possible to avoid this
|
||||
lemma by generalizing over a first-order definition of [x<>y], say
|
||||
[neq] such that [{x=y}+{neq x y}] and [~(x=y /\ neq x y)]; for such
|
||||
[neq], unicity of proofs could be proven *)
|
||||
|
||||
Require Import Classical.
|
||||
Lemma neq_dep_intro :
|
||||
forall (A:Set) (z x y:A) (p:x<>z) (q:y<>z), x=y ->
|
||||
exist (fun x => x <> z) x p = exist (fun x => x <> z) y q.
|
||||
Proof.
|
||||
intros A z x y p q Heq.
|
||||
generalize q; clear q; rewrite <- Heq; intro q.
|
||||
rewrite (proof_irrelevance _ p q); reflexivity.
|
||||
Qed.
|
||||
|
||||
Lemma dec_restrict :
|
||||
forall (A:Set),
|
||||
(forall x y :A, {x=y}+{x<>y}) ->
|
||||
forall z (x y :{a:A|a<>z}), {x=y}+{x<>y}.
|
||||
Proof.
|
||||
intros A Hdec z (x,Hx) (y,Hy).
|
||||
destruct (Hdec x y) as [Heq|Hneq].
|
||||
left; apply neq_dep_intro; assumption.
|
||||
right; intro Heq; injection Heq; exact Hneq.
|
||||
Qed.
|
||||
|
||||
Lemma pred_inj : forall n m,
|
||||
0 <> n -> 0 <> m -> pred m = pred n -> m = n.
|
||||
Proof.
|
||||
destruct n.
|
||||
intros m H; destruct H; reflexivity.
|
||||
destruct m.
|
||||
intros _ H; destruct H; reflexivity.
|
||||
simpl; intros _ _ H.
|
||||
rewrite H.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Lemma le_neq_lt : forall n m, n <= m -> n<>m -> n < m.
|
||||
Proof.
|
||||
intros n m Hle Hneq.
|
||||
destruct (le_lt_eq_dec n m Hle).
|
||||
assumption.
|
||||
contradiction.
|
||||
Qed.
|
||||
|
||||
Lemma inj_restrict :
|
||||
forall (A:Set) (f:A->nat) x y z,
|
||||
(forall x y : A, f x = f y -> x = y)
|
||||
-> x <> z -> f y < f z -> f z <= f x
|
||||
-> pred (f x) = f y
|
||||
-> False.
|
||||
|
||||
(* Search error sans le type de f !! *)
|
||||
Proof.
|
||||
intros A f x y z Hfinj Hneqx Hfy Hfx Heq.
|
||||
assert (f z <> f x).
|
||||
apply sym_not_eq.
|
||||
intro Heqf.
|
||||
apply Hneqx.
|
||||
apply Hfinj.
|
||||
assumption.
|
||||
assert (f x = S (f y)).
|
||||
assert (0 < f x).
|
||||
apply le_lt_trans with (f z).
|
||||
apply le_O_n.
|
||||
apply le_neq_lt; assumption.
|
||||
apply pred_inj.
|
||||
apply O_S.
|
||||
apply lt_O_neq; assumption.
|
||||
exact Heq.
|
||||
assert (f z <= f y).
|
||||
destruct (le_lt_or_eq _ _ Hfx).
|
||||
apply lt_n_Sm_le.
|
||||
rewrite <- H0.
|
||||
assumption.
|
||||
contradiction Hneqx.
|
||||
symmetry.
|
||||
apply Hfinj.
|
||||
assumption.
|
||||
contradiction (lt_not_le (f y) (f z)).
|
||||
Qed.
|
||||
|
||||
Theorem card_inj : forall m n (A:Set),
|
||||
(forall x y :A, {x=y}+{x<>y}) ->
|
||||
card A m -> card A n -> m = n.
|
||||
Proof.
|
||||
induction m; destruct n;
|
||||
intros A Hdec
|
||||
(f,(Hfbound,(Hfinj,Hfsurj)))
|
||||
(g,(Hgbound,(Hginj,Hgsurj))).
|
||||
(* 0/0 *)
|
||||
reflexivity.
|
||||
(* 0/Sm *)
|
||||
destruct (card_inj_aux _ _ _ _ Hfbound Hfinj Hgsurj).
|
||||
(* Sn/0 *)
|
||||
destruct (card_inj_aux _ _ _ _ Hgbound Hginj Hfsurj).
|
||||
(* Sn/Sm *)
|
||||
destruct (Hgsurj (S n) (le_n _)) as (xSn,HSnx).
|
||||
rewrite IHm with (n:=n) (A := {x:A|x<>xSn}).
|
||||
reflexivity.
|
||||
(* decidability of eq on {x:A|x<>xSm} *)
|
||||
apply dec_restrict.
|
||||
assumption.
|
||||
(* cardinality of {x:A|x<>xSn} is m *)
|
||||
pose (f' := fun x' : {x:A|x<>xSn} =>
|
||||
let (x,Hneq) := x' in
|
||||
if le_lt_dec (f xSn) (f x)
|
||||
then pred (f x)
|
||||
else f x).
|
||||
exists f'.
|
||||
split.
|
||||
(* f' is bounded *)
|
||||
unfold f'.
|
||||
intros (x,_).
|
||||
destruct (le_lt_dec (f xSn) (f x)) as [Hle|Hge].
|
||||
change m with (pred (S m)).
|
||||
apply le_pred.
|
||||
apply Hfbound.
|
||||
apply le_S_n.
|
||||
apply le_trans with (f xSn).
|
||||
exact Hge.
|
||||
apply Hfbound.
|
||||
split.
|
||||
(* f' is injective *)
|
||||
unfold f'.
|
||||
intros (x,Hneqx) (y,Hneqy) Heqf'.
|
||||
destruct (le_lt_dec (f xSn) (f x)) as [Hlefx|Hgefx];
|
||||
destruct (le_lt_dec (f xSn) (f y)) as [Hlefy|Hgefy].
|
||||
(* f xSn <= f x et f xSn <= f y *)
|
||||
assert (Heq : x = y).
|
||||
apply Hfinj.
|
||||
assert (f xSn <> f y).
|
||||
apply sym_not_eq.
|
||||
intro Heqf.
|
||||
apply Hneqy.
|
||||
apply Hfinj.
|
||||
assumption.
|
||||
assert (0 < f y).
|
||||
apply le_lt_trans with (f xSn).
|
||||
apply le_O_n.
|
||||
apply le_neq_lt; assumption.
|
||||
assert (f xSn <> f x).
|
||||
apply sym_not_eq.
|
||||
intro Heqf.
|
||||
apply Hneqx.
|
||||
apply Hfinj.
|
||||
assumption.
|
||||
assert (0 < f x).
|
||||
apply le_lt_trans with (f xSn).
|
||||
apply le_O_n.
|
||||
apply le_neq_lt; assumption.
|
||||
apply pred_inj.
|
||||
apply lt_O_neq; assumption.
|
||||
apply lt_O_neq; assumption.
|
||||
assumption.
|
||||
apply neq_dep_intro; assumption.
|
||||
(* f y < f xSn <= f x *)
|
||||
destruct (inj_restrict A f x y xSn); assumption.
|
||||
(* f x < f xSn <= f y *)
|
||||
symmetry in Heqf'.
|
||||
destruct (inj_restrict A f y x xSn); assumption.
|
||||
(* f x < f xSn et f y < f xSn *)
|
||||
assert (Heq : x=y).
|
||||
apply Hfinj; assumption.
|
||||
apply neq_dep_intro; assumption.
|
||||
(* f' is surjective *)
|
||||
intros p Hlep.
|
||||
destruct (le_lt_dec (f xSn) p) as [Hle|Hlt].
|
||||
(* case f xSn <= p *)
|
||||
destruct (Hfsurj (S p) (le_n_S _ _ Hlep)) as (x,Hx).
|
||||
assert (Hneq : x <> xSn).
|
||||
intro Heqx.
|
||||
rewrite Heqx in Hx.
|
||||
rewrite Hx in Hle.
|
||||
apply le_Sn_n with p; assumption.
|
||||
exists (exist (fun a => a<>xSn) x Hneq).
|
||||
unfold f'.
|
||||
destruct (le_lt_dec (f xSn) (f x)) as [Hle'|Hlt'].
|
||||
rewrite Hx; reflexivity.
|
||||
rewrite Hx in Hlt'.
|
||||
contradiction (le_not_lt (f xSn) p).
|
||||
apply lt_trans with (S p).
|
||||
apply lt_n_Sn.
|
||||
assumption.
|
||||
(* case p < f xSn *)
|
||||
destruct (Hfsurj p (le_S _ _ Hlep)) as (x,Hx).
|
||||
assert (Hneq : x <> xSn).
|
||||
intro Heqx.
|
||||
rewrite Heqx in Hx.
|
||||
rewrite Hx in Hlt.
|
||||
apply (lt_irrefl p).
|
||||
assumption.
|
||||
exists (exist (fun a => a<>xSn) x Hneq).
|
||||
unfold f'.
|
||||
destruct (le_lt_dec (f xSn) (f x)) as [Hle'|Hlt'].
|
||||
rewrite Hx in Hle'.
|
||||
contradiction (lt_irrefl p).
|
||||
apply lt_le_trans with (f xSn); assumption.
|
||||
assumption.
|
||||
(* cardinality of {x:A|x<>xSn} is n *)
|
||||
pose (g' := fun x' : {x:A|x<>xSn} =>
|
||||
let (x,Hneq) := x' in
|
||||
if Hdec x xSn then 0 else g x).
|
||||
exists g'.
|
||||
split.
|
||||
(* g is bounded *)
|
||||
unfold g'.
|
||||
intros (x,_).
|
||||
destruct (Hdec x xSn) as [_|Hneq].
|
||||
apply le_O_n.
|
||||
assert (Hle_gx:=Hgbound x).
|
||||
destruct (le_lt_or_eq _ _ Hle_gx).
|
||||
apply lt_n_Sm_le.
|
||||
assumption.
|
||||
contradiction Hneq.
|
||||
apply Hginj.
|
||||
rewrite HSnx.
|
||||
assumption.
|
||||
split.
|
||||
(* g is injective *)
|
||||
unfold g'.
|
||||
intros (x,Hneqx) (y,Hneqy) Heqg'.
|
||||
destruct (Hdec x xSn) as [Heqx|_].
|
||||
contradiction Hneqx.
|
||||
destruct (Hdec y xSn) as [Heqy|_].
|
||||
contradiction Hneqy.
|
||||
assert (Heq : x=y).
|
||||
apply Hginj; assumption.
|
||||
apply neq_dep_intro; assumption.
|
||||
(* g is surjective *)
|
||||
intros p Hlep.
|
||||
destruct (Hgsurj p (le_S _ _ Hlep)) as (x,Hx).
|
||||
assert (Hneq : x<>xSn).
|
||||
intro Heq.
|
||||
rewrite Heq in Hx.
|
||||
rewrite Hx in HSnx.
|
||||
rewrite HSnx in Hlep.
|
||||
contradiction (le_Sn_n _ Hlep).
|
||||
exists (exist (fun a => a<>xSn) x Hneq).
|
||||
simpl.
|
||||
destruct (Hdec x xSn) as [Heqx|_].
|
||||
contradiction Hneq.
|
||||
assumption.
|
||||
Qed.
|
||||
|
||||
(** Conclusion *)
|
||||
|
||||
Theorem interval_discr :
|
||||
forall n m, {p:nat|p<=n} = {p:nat|p<=m} -> n=m.
|
||||
Proof.
|
||||
intros n m Heq.
|
||||
apply card_inj with (A := {p:nat|p<=n}).
|
||||
apply interval_dec.
|
||||
apply card_interval.
|
||||
rewrite Heq.
|
||||
apply card_interval.
|
||||
Qed.
|
||||
440
samples/D/aa.d
Normal file
440
samples/D/aa.d
Normal file
@@ -0,0 +1,440 @@
|
||||
/**
|
||||
* Implementation of associative arrays.
|
||||
*
|
||||
* Copyright: Martin Nowak 2015 -.
|
||||
* License: $(LINK2 http://www.boost.org/LICENSE_1_0.txt, Boost License 1.0)
|
||||
* Authors: Martin Nowak
|
||||
*/
|
||||
module core.aa;
|
||||
|
||||
import core.memory : GC;
|
||||
|
||||
private
|
||||
{
|
||||
// grow threshold
|
||||
enum GROW_NUM = 4;
|
||||
enum GROW_DEN = 5;
|
||||
// shrink threshold
|
||||
enum SHRINK_NUM = 1;
|
||||
enum SHRINK_DEN = 8;
|
||||
// grow factor
|
||||
enum GROW_FAC = 4;
|
||||
// growing the AA doubles it's size, so the shrink threshold must be
|
||||
// smaller than half the grow threshold to have a hysteresis
|
||||
static assert(GROW_FAC * SHRINK_NUM * GROW_DEN < GROW_NUM * SHRINK_DEN);
|
||||
// initial load factor (for literals), mean of both thresholds
|
||||
enum INIT_NUM = (GROW_DEN * SHRINK_NUM + GROW_NUM * SHRINK_DEN) / 2;
|
||||
enum INIT_DEN = SHRINK_DEN * GROW_DEN;
|
||||
|
||||
// magic hash constants to distinguish empty, deleted, and filled buckets
|
||||
enum HASH_EMPTY = 0;
|
||||
enum HASH_DELETED = 0x1;
|
||||
enum HASH_FILLED_MARK = size_t(1) << 8 * size_t.sizeof - 1;
|
||||
}
|
||||
|
||||
enum INIT_NUM_BUCKETS = 8;
|
||||
|
||||
struct AA(Key, Val)
|
||||
{
|
||||
this(size_t sz)
|
||||
{
|
||||
impl = new Impl(nextpow2(sz));
|
||||
}
|
||||
|
||||
@property bool empty() const pure nothrow @safe @nogc
|
||||
{
|
||||
return !length;
|
||||
}
|
||||
|
||||
@property size_t length() const pure nothrow @safe @nogc
|
||||
{
|
||||
return impl is null ? 0 : impl.length;
|
||||
}
|
||||
|
||||
void opIndexAssign(Val val, in Key key)
|
||||
{
|
||||
// lazily alloc implementation
|
||||
if (impl is null)
|
||||
impl = new Impl(INIT_NUM_BUCKETS);
|
||||
|
||||
// get hash and bucket for key
|
||||
immutable hash = calcHash(key);
|
||||
|
||||
// found a value => assignment
|
||||
if (auto p = impl.findSlotLookup(hash, key))
|
||||
{
|
||||
p.entry.val = val;
|
||||
return;
|
||||
}
|
||||
|
||||
auto p = findSlotInsert(hash);
|
||||
if (p.deleted)
|
||||
--deleted;
|
||||
// check load factor and possibly grow
|
||||
else if (++used * GROW_DEN > dim * GROW_NUM)
|
||||
{
|
||||
grow();
|
||||
p = findSlotInsert(hash);
|
||||
assert(p.empty);
|
||||
}
|
||||
|
||||
// update search cache and allocate entry
|
||||
firstUsed = min(firstUsed, cast(uint)(p - buckets.ptr));
|
||||
p.hash = hash;
|
||||
p.entry = new Impl.Entry(key, val); // TODO: move
|
||||
return;
|
||||
}
|
||||
|
||||
ref inout(Val) opIndex(in Key key) inout @trusted
|
||||
{
|
||||
auto p = opIn_r(key);
|
||||
assert(p !is null);
|
||||
return *p;
|
||||
}
|
||||
|
||||
inout(Val)* opIn_r(in Key key) inout @trusted
|
||||
{
|
||||
if (empty)
|
||||
return null;
|
||||
|
||||
immutable hash = calcHash(key);
|
||||
if (auto p = findSlotLookup(hash, key))
|
||||
return &p.entry.val;
|
||||
return null;
|
||||
}
|
||||
|
||||
bool remove(in Key key)
|
||||
{
|
||||
if (empty)
|
||||
return false;
|
||||
|
||||
immutable hash = calcHash(key);
|
||||
if (auto p = findSlotLookup(hash, key))
|
||||
{
|
||||
// clear entry
|
||||
p.hash = HASH_DELETED;
|
||||
p.entry = null;
|
||||
|
||||
++deleted;
|
||||
if (length * SHRINK_DEN < dim * SHRINK_NUM)
|
||||
shrink();
|
||||
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
Val get(in Key key, lazy Val val)
|
||||
{
|
||||
auto p = opIn_r(key);
|
||||
return p is null ? val : *p;
|
||||
}
|
||||
|
||||
ref Val getOrSet(in Key key, lazy Val val)
|
||||
{
|
||||
// lazily alloc implementation
|
||||
if (impl is null)
|
||||
impl = new Impl(INIT_NUM_BUCKETS);
|
||||
|
||||
// get hash and bucket for key
|
||||
immutable hash = calcHash(key);
|
||||
|
||||
// found a value => assignment
|
||||
if (auto p = impl.findSlotLookup(hash, key))
|
||||
return p.entry.val;
|
||||
|
||||
auto p = findSlotInsert(hash);
|
||||
if (p.deleted)
|
||||
--deleted;
|
||||
// check load factor and possibly grow
|
||||
else if (++used * GROW_DEN > dim * GROW_NUM)
|
||||
{
|
||||
grow();
|
||||
p = findSlotInsert(hash);
|
||||
assert(p.empty);
|
||||
}
|
||||
|
||||
// update search cache and allocate entry
|
||||
firstUsed = min(firstUsed, cast(uint)(p - buckets.ptr));
|
||||
p.hash = hash;
|
||||
p.entry = new Impl.Entry(key, val);
|
||||
return p.entry.val;
|
||||
}
|
||||
|
||||
/**
|
||||
Convert the AA to the type of the builtin language AA.
|
||||
*/
|
||||
Val[Key] toBuiltinAA() pure nothrow
|
||||
{
|
||||
return cast(Val[Key]) _aaFromCoreAA(impl, rtInterface);
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
private this(inout(Impl)* impl) inout
|
||||
{
|
||||
this.impl = impl;
|
||||
}
|
||||
|
||||
ref Val getLValue(in Key key)
|
||||
{
|
||||
// lazily alloc implementation
|
||||
if (impl is null)
|
||||
impl = new Impl(INIT_NUM_BUCKETS);
|
||||
|
||||
// get hash and bucket for key
|
||||
immutable hash = calcHash(key);
|
||||
|
||||
// found a value => assignment
|
||||
if (auto p = impl.findSlotLookup(hash, key))
|
||||
return p.entry.val;
|
||||
|
||||
auto p = findSlotInsert(hash);
|
||||
if (p.deleted)
|
||||
--deleted;
|
||||
// check load factor and possibly grow
|
||||
else if (++used * GROW_DEN > dim * GROW_NUM)
|
||||
{
|
||||
grow();
|
||||
p = findSlotInsert(hash);
|
||||
assert(p.empty);
|
||||
}
|
||||
|
||||
// update search cache and allocate entry
|
||||
firstUsed = min(firstUsed, cast(uint)(p - buckets.ptr));
|
||||
p.hash = hash;
|
||||
p.entry = new Impl.Entry(key); // TODO: move
|
||||
return p.entry.val;
|
||||
}
|
||||
|
||||
static struct Impl
|
||||
{
|
||||
this(size_t sz)
|
||||
{
|
||||
buckets = allocBuckets(sz);
|
||||
}
|
||||
|
||||
@property size_t length() const pure nothrow @nogc
|
||||
{
|
||||
assert(used >= deleted);
|
||||
return used - deleted;
|
||||
}
|
||||
|
||||
@property size_t dim() const pure nothrow @nogc
|
||||
{
|
||||
return buckets.length;
|
||||
}
|
||||
|
||||
@property size_t mask() const pure nothrow @nogc
|
||||
{
|
||||
return dim - 1;
|
||||
}
|
||||
|
||||
// find the first slot to insert a value with hash
|
||||
inout(Bucket)* findSlotInsert(size_t hash) inout pure nothrow @nogc
|
||||
{
|
||||
for (size_t i = hash & mask, j = 1;; ++j)
|
||||
{
|
||||
if (!buckets[i].filled)
|
||||
return &buckets[i];
|
||||
i = (i + j) & mask;
|
||||
}
|
||||
}
|
||||
|
||||
// lookup a key
|
||||
inout(Bucket)* findSlotLookup(size_t hash, in Key key) inout
|
||||
{
|
||||
for (size_t i = hash & mask, j = 1;; ++j)
|
||||
{
|
||||
if (buckets[i].hash == hash && key == buckets[i].entry.key)
|
||||
return &buckets[i];
|
||||
else if (buckets[i].empty)
|
||||
return null;
|
||||
i = (i + j) & mask;
|
||||
}
|
||||
}
|
||||
|
||||
void grow()
|
||||
{
|
||||
// If there are so many deleted entries, that growing would push us
|
||||
// below the shrink threshold, we just purge deleted entries instead.
|
||||
if (length * SHRINK_DEN < GROW_FAC * dim * SHRINK_NUM)
|
||||
resize(dim);
|
||||
else
|
||||
resize(GROW_FAC * dim);
|
||||
}
|
||||
|
||||
void shrink()
|
||||
{
|
||||
if (dim > INIT_NUM_BUCKETS)
|
||||
resize(dim / GROW_FAC);
|
||||
}
|
||||
|
||||
void resize(size_t ndim) pure nothrow
|
||||
{
|
||||
auto obuckets = buckets;
|
||||
buckets = allocBuckets(ndim);
|
||||
|
||||
foreach (ref b; obuckets)
|
||||
if (b.filled)
|
||||
*findSlotInsert(b.hash) = b;
|
||||
|
||||
firstUsed = 0;
|
||||
used -= deleted;
|
||||
deleted = 0;
|
||||
GC.free(obuckets.ptr); // safe to free b/c impossible to reference
|
||||
}
|
||||
|
||||
static struct Entry
|
||||
{
|
||||
Key key;
|
||||
Val val;
|
||||
}
|
||||
|
||||
static struct Bucket
|
||||
{
|
||||
size_t hash;
|
||||
Entry* entry;
|
||||
|
||||
@property bool empty() const
|
||||
{
|
||||
return hash == HASH_EMPTY;
|
||||
}
|
||||
|
||||
@property bool deleted() const
|
||||
{
|
||||
return hash == HASH_DELETED;
|
||||
}
|
||||
|
||||
@property bool filled() const
|
||||
{
|
||||
return cast(ptrdiff_t) hash < 0;
|
||||
}
|
||||
}
|
||||
|
||||
Bucket[] allocBuckets(size_t dim) @trusted pure nothrow
|
||||
{
|
||||
enum attr = GC.BlkAttr.NO_INTERIOR;
|
||||
immutable sz = dim * Bucket.sizeof;
|
||||
return (cast(Bucket*) GC.calloc(sz, attr))[0 .. dim];
|
||||
}
|
||||
|
||||
Bucket[] buckets;
|
||||
uint used;
|
||||
uint deleted;
|
||||
uint firstUsed;
|
||||
}
|
||||
|
||||
RTInterface* rtInterface()() pure nothrow @nogc
|
||||
{
|
||||
static size_t aaLen(in void* pimpl) pure nothrow @nogc
|
||||
{
|
||||
auto aa = const(AA)(cast(const(Impl)*) pimpl);
|
||||
return aa.length;
|
||||
}
|
||||
|
||||
static void* aaGetY(void** pimpl, in void* pkey)
|
||||
{
|
||||
auto aa = AA(cast(Impl*)*pimpl);
|
||||
auto res = &aa.getLValue(*cast(const(Key*)) pkey);
|
||||
*pimpl = aa.impl; // might have changed
|
||||
return res;
|
||||
}
|
||||
|
||||
static inout(void)* aaInX(inout void* pimpl, in void* pkey)
|
||||
{
|
||||
auto aa = inout(AA)(cast(inout(Impl)*) pimpl);
|
||||
return aa.opIn_r(*cast(const(Key*)) pkey);
|
||||
}
|
||||
|
||||
static bool aaDelX(void* pimpl, in void* pkey)
|
||||
{
|
||||
auto aa = AA(cast(Impl*) pimpl);
|
||||
return aa.remove(*cast(const(Key*)) pkey);
|
||||
}
|
||||
|
||||
static immutable vtbl = RTInterface(&aaLen, &aaGetY, &aaInX, &aaDelX);
|
||||
return cast(RTInterface*)&vtbl;
|
||||
}
|
||||
|
||||
static size_t calcHash(in ref Key key)
|
||||
{
|
||||
return hashOf(key) | HASH_FILLED_MARK;
|
||||
}
|
||||
|
||||
Impl* impl;
|
||||
alias impl this;
|
||||
}
|
||||
|
||||
package extern (C) void* _aaFromCoreAA(void* impl, RTInterface* rtIntf) pure nothrow;
|
||||
|
||||
private:
|
||||
|
||||
struct RTInterface
|
||||
{
|
||||
alias AA = void*;
|
||||
|
||||
size_t function(in AA aa) pure nothrow @nogc len;
|
||||
void* function(AA* aa, in void* pkey) getY;
|
||||
inout(void)* function(inout AA aa, in void* pkey) inX;
|
||||
bool function(AA aa, in void* pkey) delX;
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
AA!(int, int) aa;
|
||||
assert(aa.length == 0);
|
||||
aa[0] = 1;
|
||||
assert(aa.length == 1 && aa[0] == 1);
|
||||
aa[1] = 2;
|
||||
assert(aa.length == 2 && aa[1] == 2);
|
||||
import core.stdc.stdio;
|
||||
|
||||
int[int] rtaa = aa.toBuiltinAA();
|
||||
assert(rtaa.length == 2);
|
||||
puts("length");
|
||||
assert(rtaa[0] == 1);
|
||||
assert(rtaa[1] == 2);
|
||||
rtaa[2] = 3;
|
||||
|
||||
assert(aa[2] == 3);
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
auto aa = AA!(int, int)(3);
|
||||
aa[0] = 0;
|
||||
aa[1] = 1;
|
||||
aa[2] = 2;
|
||||
assert(aa.length == 3);
|
||||
}
|
||||
|
||||
//==============================================================================
|
||||
// Helper functions
|
||||
//------------------------------------------------------------------------------
|
||||
|
||||
size_t nextpow2(in size_t n) pure nothrow @nogc
|
||||
{
|
||||
import core.bitop : bsr;
|
||||
|
||||
if (n < 2)
|
||||
return 1;
|
||||
return size_t(1) << bsr(n - 1) + 1;
|
||||
}
|
||||
|
||||
pure nothrow @nogc unittest
|
||||
{
|
||||
// 0, 1, 2, 3, 4, 5, 6, 7, 8, 9
|
||||
foreach (const n, const pow2; [1, 1, 2, 4, 4, 8, 8, 8, 8, 16])
|
||||
assert(nextpow2(n) == pow2);
|
||||
}
|
||||
|
||||
T min(T)(T a, T b) pure nothrow @nogc
|
||||
{
|
||||
return a < b ? a : b;
|
||||
}
|
||||
|
||||
T max(T)(T a, T b) pure nothrow @nogc
|
||||
{
|
||||
return b < a ? a : b;
|
||||
}
|
||||
187
samples/D/arrayops.d
Normal file
187
samples/D/arrayops.d
Normal file
@@ -0,0 +1,187 @@
|
||||
/**
|
||||
* Benchmark for array ops.
|
||||
*
|
||||
* Copyright: Copyright Martin Nowak 2016 -.
|
||||
* License: $(LINK2 http://www.boost.org/LICENSE_1_0.txt, Boost License 1.0)
|
||||
* Authors: Martin Nowak
|
||||
*/
|
||||
import core.cpuid, std.algorithm, std.datetime, std.meta, std.stdio, std.string,
|
||||
std.range;
|
||||
|
||||
float[6] getLatencies(T, string op)()
|
||||
{
|
||||
enum N = (64 * (1 << 6) + 64) * T.sizeof;
|
||||
auto a = Array!T(N), b = Array!T(N), c = Array!T(N);
|
||||
float[6] latencies = float.max;
|
||||
foreach (i, ref latency; latencies)
|
||||
{
|
||||
auto len = 1 << i;
|
||||
foreach (_; 1 .. 32)
|
||||
{
|
||||
a[] = 24;
|
||||
b[] = 4;
|
||||
c[] = 2;
|
||||
auto sw = StopWatch(AutoStart.yes);
|
||||
foreach (off; size_t(0) .. size_t(64))
|
||||
{
|
||||
off = off * len + off;
|
||||
enum op = op.replace("const", "2").replace("a",
|
||||
"a[off .. off + len]").replace("b",
|
||||
"b[off .. off + len]").replace("c", "c[off .. off + len]");
|
||||
mixin(op ~ ";");
|
||||
}
|
||||
latency = min(latency, sw.peek.nsecs);
|
||||
}
|
||||
}
|
||||
float[6] res = latencies[] / 1024;
|
||||
return res;
|
||||
}
|
||||
|
||||
float[4] getThroughput(T, string op)()
|
||||
{
|
||||
enum N = (40 * 1024 * 1024 + 64 * T.sizeof) / T.sizeof;
|
||||
auto a = Array!T(N), b = Array!T(N), c = Array!T(N);
|
||||
float[4] latencies = float.max;
|
||||
size_t[4] lengths = [
|
||||
8 * 1024 / T.sizeof, 32 * 1024 / T.sizeof, 512 * 1024 / T.sizeof, 32 * 1024 * 1024 / T
|
||||
.sizeof
|
||||
];
|
||||
foreach (i, ref latency; latencies)
|
||||
{
|
||||
auto len = lengths[i] / 64;
|
||||
foreach (_; 1 .. 4)
|
||||
{
|
||||
a[] = 24;
|
||||
b[] = 4;
|
||||
c[] = 2;
|
||||
auto sw = StopWatch(AutoStart.yes);
|
||||
foreach (off; size_t(0) .. size_t(64))
|
||||
{
|
||||
off = off * len + off;
|
||||
enum op = op.replace("const", "2").replace("a",
|
||||
"a[off .. off + len]").replace("b",
|
||||
"b[off .. off + len]").replace("c", "c[off .. off + len]");
|
||||
mixin(op ~ ";");
|
||||
}
|
||||
immutable nsecs = sw.peek.nsecs;
|
||||
runMasked({latency = min(latency, nsecs);});
|
||||
}
|
||||
}
|
||||
float[4] throughputs = void;
|
||||
runMasked({throughputs = T.sizeof * lengths[] / latencies[];});
|
||||
return throughputs;
|
||||
}
|
||||
|
||||
string[] genOps()
|
||||
{
|
||||
string[] ops;
|
||||
foreach (op1; ["+", "-", "*", "/"])
|
||||
{
|
||||
ops ~= "a " ~ op1 ~ "= b";
|
||||
ops ~= "a " ~ op1 ~ "= const";
|
||||
foreach (op2; ["+", "-", "*", "/"])
|
||||
{
|
||||
ops ~= "a " ~ op1 ~ "= b " ~ op2 ~ " c";
|
||||
ops ~= "a " ~ op1 ~ "= b " ~ op2 ~ " const";
|
||||
}
|
||||
}
|
||||
return ops;
|
||||
}
|
||||
|
||||
void runOp(string op)()
|
||||
{
|
||||
foreach (T; AliasSeq!(ubyte, ushort, uint, ulong, byte, short, int, long, float,
|
||||
double))
|
||||
writefln("%s, %s, %(%.2f, %), %(%s, %)", T.stringof, op,
|
||||
getLatencies!(T, op), getThroughput!(T, op));
|
||||
}
|
||||
|
||||
struct Array(T)
|
||||
{
|
||||
import core.stdc.stdlib : free, malloc;
|
||||
|
||||
this(size_t n)
|
||||
{
|
||||
ary = (cast(T*) malloc(T.sizeof * n))[0 .. n];
|
||||
}
|
||||
|
||||
~this()
|
||||
{
|
||||
free(ary.ptr);
|
||||
}
|
||||
|
||||
T[] ary;
|
||||
alias ary this;
|
||||
}
|
||||
|
||||
version (X86)
|
||||
version = SSE;
|
||||
else version (X86_64)
|
||||
version = SSE;
|
||||
else
|
||||
static assert(0, "unimplemented");
|
||||
|
||||
version (SSE)
|
||||
{
|
||||
uint mxcsr()
|
||||
{
|
||||
uint ret = void;
|
||||
asm
|
||||
{
|
||||
stmxcsr ret;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
void mxcsr(uint val)
|
||||
{
|
||||
asm
|
||||
{
|
||||
ldmxcsr val;
|
||||
}
|
||||
}
|
||||
|
||||
// http://softpixel.com/~cwright/programming/simd/sse.php
|
||||
enum FPU_EXCEPTION_MASKS = 1 << 12 | 1 << 11 | 1 << 10 | 1 << 9 | 1 << 8 | 1 << 7;
|
||||
enum FPU_EXCEPTION_FLAGS = 1 << 5 | 1 << 4 | 1 << 3 | 1 << 2 | 1 << 1 | 1 << 0;
|
||||
|
||||
void maskFPUExceptions()
|
||||
{
|
||||
mxcsr = mxcsr | FPU_EXCEPTION_MASKS;
|
||||
}
|
||||
|
||||
void unmaskFPUExceptions()
|
||||
{
|
||||
mxcsr = mxcsr & ~FPU_EXCEPTION_MASKS;
|
||||
}
|
||||
|
||||
uint FPUExceptionFlags()
|
||||
{
|
||||
return mxcsr & FPU_EXCEPTION_FLAGS;
|
||||
}
|
||||
|
||||
void clearFPUExceptionFlags()
|
||||
{
|
||||
mxcsr = mxcsr & ~FPU_EXCEPTION_FLAGS;
|
||||
}
|
||||
}
|
||||
|
||||
void runMasked(scope void delegate() dg)
|
||||
{
|
||||
assert(FPUExceptionFlags == 0);
|
||||
maskFPUExceptions;
|
||||
dg();
|
||||
clearFPUExceptionFlags;
|
||||
unmaskFPUExceptions;
|
||||
}
|
||||
|
||||
void main()
|
||||
{
|
||||
unmaskFPUExceptions;
|
||||
|
||||
writefln("type, op, %(latency%s, %), %-(throughput%s, %)", iota(6)
|
||||
.map!(i => 1 << i), ["8KB", "32KB", "512KB", "32MB"]);
|
||||
foreach (op; mixin("AliasSeq!(%(%s, %))".format(genOps)))
|
||||
runOp!op;
|
||||
maskFPUExceptions;
|
||||
}
|
||||
3
samples/D/function.d
Normal file
3
samples/D/function.d
Normal file
@@ -0,0 +1,3 @@
|
||||
void foo()
|
||||
{
|
||||
}
|
||||
6
samples/D/hello_world.d
Normal file
6
samples/D/hello_world.d
Normal file
@@ -0,0 +1,6 @@
|
||||
import std.stdio;
|
||||
|
||||
void main()
|
||||
{
|
||||
writeln("Hello World");
|
||||
}
|
||||
7
samples/D/template.d
Normal file
7
samples/D/template.d
Normal file
@@ -0,0 +1,7 @@
|
||||
template Fib(size_t N)
|
||||
{
|
||||
static if (N < 2)
|
||||
enum Fib = size_t(1);
|
||||
else
|
||||
enum Fib = Fib!(N - 2) + Fib!(N - 1);
|
||||
}
|
||||
3
samples/D/template_function.d
Normal file
3
samples/D/template_function.d
Normal file
@@ -0,0 +1,3 @@
|
||||
void bar(T)(T t)
|
||||
{
|
||||
}
|
||||
3
samples/D/unittest1.d
Normal file
3
samples/D/unittest1.d
Normal file
@@ -0,0 +1,3 @@
|
||||
unittest
|
||||
{
|
||||
}
|
||||
3
samples/D/unittest2.d
Normal file
3
samples/D/unittest2.d
Normal file
@@ -0,0 +1,3 @@
|
||||
unittest("optional name")
|
||||
{
|
||||
}
|
||||
12
samples/DataWeave/customInterpolator.dwl
Normal file
12
samples/DataWeave/customInterpolator.dwl
Normal file
@@ -0,0 +1,12 @@
|
||||
fun SQL(literals, parts) = ''
|
||||
---
|
||||
[
|
||||
SQL `SELECT * FROM table WHERE id = $(1) AND name = $('a')`,
|
||||
SQL `$('p')`,
|
||||
SQL `$('a')$('b')`,
|
||||
SQL `$('a')---$('b')`,
|
||||
SQL `---$('a')---$('b')---`,
|
||||
SQL `$('p')bbb`,
|
||||
SQL `aaa$('p')`,
|
||||
SQL `aaa$('p')bbb`
|
||||
]
|
||||
9
samples/DataWeave/directives.dwl
Normal file
9
samples/DataWeave/directives.dwl
Normal file
@@ -0,0 +1,9 @@
|
||||
%dw 2.0
|
||||
var number = 1234
|
||||
fun foo(func,name="Mariano") = func(name)
|
||||
input payload application/test arg="value"
|
||||
output application/json
|
||||
---
|
||||
{
|
||||
foo: "bar"
|
||||
}
|
||||
27
samples/DataWeave/functions.dwl
Normal file
27
samples/DataWeave/functions.dwl
Normal file
@@ -0,0 +1,27 @@
|
||||
%dw 2.0
|
||||
var x=(param1, param2) -> { "$param1": param2 }
|
||||
var y=(param1, param2 = "c") -> { "$param1": param2 }
|
||||
var toUser = (user) -> { name: user.name, lastName: user.lastName }
|
||||
fun z(param1, param2) = { "$param1": param2 }
|
||||
var a = { name: "Mariano" , toUser: ((param1, param2) -> { "$param1": param2 }) }
|
||||
var applyFirst = (array, func) -> (func(array[0]) ++ array[1 to -1])
|
||||
|
||||
var nested = (array, func) -> (a) -> (b) -> (c) -> array map func(a ++ b ++ c)
|
||||
|
||||
|
||||
fun f2(a1, a2) = ""
|
||||
fun f3(a1:String, a2:Number):String = a1
|
||||
fun f4(a1:String, a2:(a:Number) -> Number):String = a1
|
||||
---
|
||||
result: {
|
||||
a: x("a", "b"),
|
||||
b: y("a"),
|
||||
c: y("a", "b"),
|
||||
users: { (in1 map ((user) -> { user: (toUser(user) ++ user) })) },
|
||||
d: z("a", "b"),
|
||||
e: a.toUser("name","Mariano"),
|
||||
f: a.toUser("name","Mariano").name,
|
||||
f: applyFirst("mariano", (s) -> upper(s) ),
|
||||
g: [] map (s) -> upper(s),
|
||||
h: 1 f2 2
|
||||
}
|
||||
36
samples/DataWeave/literals.dwl
Normal file
36
samples/DataWeave/literals.dwl
Normal file
@@ -0,0 +1,36 @@
|
||||
%dw 2.0
|
||||
---
|
||||
{
|
||||
"boolean":{
|
||||
"true" : true,
|
||||
"false": false
|
||||
},
|
||||
"Number": {
|
||||
"int": 123,
|
||||
"decimal": 123.23
|
||||
},
|
||||
"string": {
|
||||
"singleQuote" : 'A String',
|
||||
"doubleQuote" : "A String"
|
||||
},
|
||||
"regex": /foo/,
|
||||
"date": {
|
||||
a: |2003-10-01|,
|
||||
b: |2005-045|,
|
||||
c: |2003-W14-3|,
|
||||
d: |23:57:59|,
|
||||
e: |23:57:30.700|,
|
||||
f: |23:50:30Z|,
|
||||
g: |+13:00|,
|
||||
h: |Z|,
|
||||
i: |-02:00|,
|
||||
j: |2005-06-02T15:10:16|,
|
||||
k: |2005-06-02T15:10:16Z|,
|
||||
l: |2005-06-02T15:10:16+03:00|,
|
||||
m: |P12Y7M11D|,
|
||||
n: |P12Y5M|,
|
||||
o: |P45DT9H20M8S|,
|
||||
p: |PT9H20M8S|
|
||||
}
|
||||
}
|
||||
|
||||
33
samples/DataWeave/match.dwl
Normal file
33
samples/DataWeave/match.dwl
Normal file
@@ -0,0 +1,33 @@
|
||||
{
|
||||
// Regex Pattern Matching (Can be named or unnamed)
|
||||
a: in0.phones map $ match {
|
||||
case matches /\+(\d+)\s\((\d+)\)\s(\d+\-\d+)/ -> { country: $[0], area: $[1], number: $[2] }
|
||||
case matches /\((\d+)\)\s(\d+\-\d+)/ -> { area: $[1], number: $[2] }
|
||||
case phone matches /\((\d+)\)\s(\d+\-\d+)/ -> { area: phone[1], number: phone[2] }
|
||||
},
|
||||
// Type Pattern Matching (Can be named or unnamed)
|
||||
b: in0.object match {
|
||||
case is Object -> { object: $ }
|
||||
case is Number -> { number: $ }
|
||||
// This is how you name variables if needed
|
||||
case y is Boolean -> { boolean: y }
|
||||
},
|
||||
// Literal Pattern Matching (Can be named or unnamed)
|
||||
c: in0.value match {
|
||||
case "Emiliano" -> { string: $ }
|
||||
case 123 -> { number: $ }
|
||||
// This is how you name variables if needed
|
||||
case value: "Mariano" -> { name: value }
|
||||
},
|
||||
// Boolean Expression Pattern Matching (Always named)
|
||||
d: in0.value match {
|
||||
case x if x > 30 -> { biggerThan30: x }
|
||||
case x if x == 9 -> { nine: x }
|
||||
},
|
||||
// Default matches
|
||||
e: in0.value match {
|
||||
case "Emiliano" -> "string"
|
||||
case 3.14 -> number
|
||||
else -> "1234"
|
||||
}
|
||||
}
|
||||
24
samples/EBNF/grammar.ebnf
Normal file
24
samples/EBNF/grammar.ebnf
Normal file
@@ -0,0 +1,24 @@
|
||||
(*
|
||||
Source: https://github.com/sunjay/lion
|
||||
License: MIT
|
||||
*)
|
||||
|
||||
Statement = ( NamedFunction | AnonymousFunction | Assignment | Expr ) , "\n" ;
|
||||
Expr = AnonymousFunction | Term | "(" , Expr , ")" ,
|
||||
{ AnonymousFunction | Term | "(" , Expr , ")" } ;
|
||||
|
||||
Assignment = Symbol , "=" , Expr ;
|
||||
|
||||
AnonymousFunction = "\" , FunctionRHS ;
|
||||
NamedFunction = Symbol , FunctionRHS ;
|
||||
|
||||
FunctionRHS = FunctionParams , "=" , FunctionBody ;
|
||||
FunctionParams = FunctionParam , { FunctionParam } ;
|
||||
FunctionParam = Term ;
|
||||
FunctionBody = Expr ;
|
||||
|
||||
Term = Symbol | Number | SingleWordString ;
|
||||
SingleWordString = '"' , Symbol , '"' ;
|
||||
(* Symbol is a collection of valid symbol characters, not defined here *)
|
||||
(* Number is a valid numeric literal *)
|
||||
|
||||
40
samples/EBNF/material.ebnf
Normal file
40
samples/EBNF/material.ebnf
Normal file
@@ -0,0 +1,40 @@
|
||||
(*
|
||||
Source: https://github.com/io7m/jsom0
|
||||
License: ISC
|
||||
*)
|
||||
|
||||
name =
|
||||
"name" , string , ";" ;
|
||||
|
||||
diffuse =
|
||||
"diffuse" , real , real , real , ";" ;
|
||||
|
||||
ambient =
|
||||
"ambient" , real , real , real , ";" ;
|
||||
|
||||
specular =
|
||||
"specular" , real , real , real , real , ";" ;
|
||||
|
||||
shininess =
|
||||
"shininess" , real , ";" ;
|
||||
|
||||
alpha =
|
||||
"alpha" , real , ";" ;
|
||||
|
||||
mapping =
|
||||
"map_chrome" | "map_uv" ;
|
||||
|
||||
texture =
|
||||
"texture" , string , real , mapping , ";" ;
|
||||
|
||||
material =
|
||||
"material" , ";" ,
|
||||
name ,
|
||||
diffuse ,
|
||||
ambient ,
|
||||
specular ,
|
||||
shininess ,
|
||||
alpha ,
|
||||
[ texture ] ,
|
||||
"end" , ";" ;
|
||||
|
||||
61
samples/EBNF/object.ebnf
Normal file
61
samples/EBNF/object.ebnf
Normal file
@@ -0,0 +1,61 @@
|
||||
(*
|
||||
Source: https://github.com/io7m/jsom0
|
||||
License: ISC
|
||||
*)
|
||||
|
||||
vertex_p3n3_name =
|
||||
"vertex_p3n3" ;
|
||||
|
||||
vertex_p3n3t2_name =
|
||||
"vertex_p3n3t2" ;
|
||||
|
||||
vertex_type =
|
||||
vertex_p3n3_name | vertex_p3n3t2_name ;
|
||||
|
||||
vertex_position =
|
||||
"position" , real , real , real , ";" ;
|
||||
|
||||
vertex_normal =
|
||||
"normal" , real , real , real , ";" ;
|
||||
|
||||
vertex_uv =
|
||||
"uv" , real , real , ";" ;
|
||||
|
||||
vertex_p3n3 =
|
||||
vertex_p3n3_name , vertex_position , vertex_normal , "end" , ";" ;
|
||||
|
||||
vertex_p3n3t2 =
|
||||
vertex_p3n3t2_name , vertex_position , vertex_normal , vertex_uv , "end" , ";" ;
|
||||
|
||||
vertex =
|
||||
vertex_p3n3 | vertex_p3n3t2 ;
|
||||
|
||||
vertex_array =
|
||||
"array" , positive , vertex_type , { vertex } , "end" , ";" ;
|
||||
|
||||
vertices =
|
||||
"vertices" , ";" , vertex_array , "end" , ";" ;
|
||||
|
||||
triangle =
|
||||
"triangle" , natural , natural , natural , ";" ;
|
||||
|
||||
triangle_array =
|
||||
"array" , positive, "triangle" , { triangle } , "end" , ";" ;
|
||||
|
||||
triangles =
|
||||
"triangles" , ";" , triangle_array , "end" , ";" ;
|
||||
|
||||
name =
|
||||
"name" , string , ";" ;
|
||||
|
||||
material_name =
|
||||
"material_name" , string , ";" ;
|
||||
|
||||
object =
|
||||
"object" , ";" ,
|
||||
name ,
|
||||
material_name ,
|
||||
vertices ,
|
||||
triangles ,
|
||||
"end" , ";" ;
|
||||
|
||||
20
samples/EBNF/types.ebnf
Normal file
20
samples/EBNF/types.ebnf
Normal file
@@ -0,0 +1,20 @@
|
||||
(*
|
||||
Source: https://github.com/io7m/jsom0
|
||||
License: ISC
|
||||
*)
|
||||
|
||||
digit_without_zero =
|
||||
"1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" ;
|
||||
|
||||
digit =
|
||||
"0" | digit_without_zero ;
|
||||
|
||||
positive =
|
||||
digit_without_zero , { digit } ;
|
||||
|
||||
natural =
|
||||
"0" | positive ;
|
||||
|
||||
real =
|
||||
[ "-" ] , digit , [ "." , { digit } ] ;
|
||||
|
||||
20
samples/Easybuild/bzip2-1.0.6-GCC-4.9.2.eb
Normal file
20
samples/Easybuild/bzip2-1.0.6-GCC-4.9.2.eb
Normal file
@@ -0,0 +1,20 @@
|
||||
# not really (there's an EB_bzip2 easyblock), but fine for use in unit tests
|
||||
easyblock = 'ConfigureMake'
|
||||
|
||||
name = 'bzip2'
|
||||
version = '1.0.6'
|
||||
|
||||
homepage = 'http://www.bzip.org/'
|
||||
description = """bzip2 is a freely available, patent free, high-quality data compressor. It typically
|
||||
compresses files to within 10% to 15% of the best available techniques (the PPM family of statistical
|
||||
compressors), whilst being around twice as fast at compression and six times faster at decompression."""
|
||||
|
||||
toolchain = {'name': 'GCC', 'version': '4.9.2'}
|
||||
toolchainopts = {'pic': True}
|
||||
|
||||
sources = [SOURCE_TAR_GZ]
|
||||
source_urls = ['http://www.bzip.org/%(version)s']
|
||||
|
||||
builddependencies = [('gzip', '1.6')]
|
||||
|
||||
moduleclass = 'tools'
|
||||
2061
samples/Edje Data Collection/mild.edc
Normal file
2061
samples/Edje Data Collection/mild.edc
Normal file
File diff suppressed because it is too large
Load Diff
9
samples/Emacs Lisp/filenames/Cask
Normal file
9
samples/Emacs Lisp/filenames/Cask
Normal file
@@ -0,0 +1,9 @@
|
||||
(package "composer" "0.0.7" "Interface to PHP Composer")
|
||||
(source "melpa" "https://melpa.org/packages/")
|
||||
|
||||
(package-file "composer.el")
|
||||
|
||||
(depends-on "f")
|
||||
(depends-on "s")
|
||||
(depends-on "request")
|
||||
(depends-on "seq")
|
||||
7
samples/Erlang/filenames/Emakefile
Normal file
7
samples/Erlang/filenames/Emakefile
Normal file
@@ -0,0 +1,7 @@
|
||||
{"src/*", [
|
||||
report,
|
||||
verbose,
|
||||
{i, "include"},
|
||||
{outdir, "ebin"},
|
||||
debug_info
|
||||
]}.
|
||||
97
samples/Fantom/sample1.fan
Normal file
97
samples/Fantom/sample1.fan
Normal file
@@ -0,0 +1,97 @@
|
||||
/*
|
||||
* Author: Robert Koeninger
|
||||
* License: WTFPL (http://www.wtfpl.net/)
|
||||
*/
|
||||
|
||||
class Spelling {
|
||||
|
||||
** Load sample text and offer corrections for input
|
||||
static Void main(Str[] args) {
|
||||
text := File.os("big.txt").readAllStr
|
||||
counts := Str:Int[:] { def = 0 }
|
||||
text.split.each |word| { counts[word] += 1 }
|
||||
args.each |arg| { echo(correction(counts, arg)) }
|
||||
}
|
||||
|
||||
static const Range letters := Range.makeInclusive(97, 122)
|
||||
|
||||
** Most probable spelling correction for `word`.
|
||||
static Str correction(Str:Int counts, Str word) {
|
||||
candidates(counts, word).max |x, y| { counts[x] <=> counts[y] }
|
||||
}
|
||||
|
||||
** Generate possible spelling corrections for `word`.
|
||||
static Str[] candidates(Str:Int counts, Str word) {
|
||||
result := known(counts, Str[word])
|
||||
if (result.size > 0) return result
|
||||
|
||||
result = known(counts, edits1(word))
|
||||
if (result.size > 0) return result
|
||||
|
||||
result = known(counts, edits2(word))
|
||||
if (result.size > 0) return result
|
||||
|
||||
return Str[word]
|
||||
}
|
||||
|
||||
** The subset of `words` that appear in the map of `counts`.
|
||||
static Str[] known(Str:Int counts, Str[] words) {
|
||||
words.findAll |word, i| { counts[word] > 0 }.unique
|
||||
}
|
||||
|
||||
** All edits that are one edit away from `word`.
|
||||
static Str[] edits1(Str word) {
|
||||
edits := Str[,]
|
||||
|
||||
for (i := 0; i < word.size; ++i) {
|
||||
edits.add(delete(word, i))
|
||||
|
||||
if (i < word.size - 2) {
|
||||
edits.add(transpose(word, i))
|
||||
}
|
||||
|
||||
edits.addAll(replace(word, i))
|
||||
edits.addAll(insert(word, i))
|
||||
}
|
||||
|
||||
edits = edits.unique
|
||||
edits.remove(word)
|
||||
return edits
|
||||
}
|
||||
|
||||
** Word with `i`th letter removed.
|
||||
static Str delete(Str word, Int i) {
|
||||
left := word.getRange(Range.makeExclusive(0, i))
|
||||
right := word.getRange(Range.makeExclusive(i + 1, word.size))
|
||||
return left + right
|
||||
}
|
||||
|
||||
** Word with `i`th and `i+1`st letter swapped.
|
||||
static Str transpose(Str word, Int i) {
|
||||
left := word.getRange(Range.makeExclusive(0, i))
|
||||
right := word.getRange(Range.makeExclusive(i, word.size))
|
||||
first := right.get(0).toChar
|
||||
second := right.get(1).toChar
|
||||
rest := right.getRange(Range.makeExclusive(2, right.size))
|
||||
return left + second + first + rest
|
||||
}
|
||||
|
||||
** Word with `i`th letter replaced with every other letter.
|
||||
static Str[] replace(Str word, Int i) {
|
||||
left := word.getRange(Range.makeExclusive(0, i))
|
||||
right := word.getRange(Range.makeExclusive(i + 1, word.size))
|
||||
return letters.map |ch| { left + ch.toChar + right }
|
||||
}
|
||||
|
||||
** Word with each letter inserted at `i`.
|
||||
static Str[] insert(Str word, Int i) {
|
||||
left := word.getRange(Range.makeExclusive(0, i))
|
||||
right := word.getRange(Range.makeExclusive(i, word.size))
|
||||
return letters.map |ch| { left + ch.toChar + right }
|
||||
}
|
||||
|
||||
** All edits that are two edits away from `word`.
|
||||
static Str[] edits2(Str word) {
|
||||
(Str[])(edits1(word).map |w| { edits1(w) }.flatten)
|
||||
}
|
||||
}
|
||||
50
samples/Fantom/sample2.fan
Normal file
50
samples/Fantom/sample2.fan
Normal file
@@ -0,0 +1,50 @@
|
||||
/*
|
||||
* Author: Robert Koeninger
|
||||
* License: WTFPL (http://www.wtfpl.net/)
|
||||
*/
|
||||
|
||||
mixin Expr
|
||||
{
|
||||
abstract Obj? eval()
|
||||
}
|
||||
|
||||
class Constant : Expr
|
||||
{
|
||||
Obj? value
|
||||
|
||||
new make(Obj? value) { this.value = value }
|
||||
override Obj? eval() { value }
|
||||
}
|
||||
|
||||
enum class Op
|
||||
{
|
||||
plus,
|
||||
minus
|
||||
}
|
||||
|
||||
class Infix : Expr
|
||||
{
|
||||
Op op
|
||||
Expr left
|
||||
Expr right
|
||||
|
||||
new make(Op op, Expr left, Expr right)
|
||||
{
|
||||
this.op = op
|
||||
this.left = left
|
||||
this.right = right
|
||||
}
|
||||
|
||||
override Obj? eval()
|
||||
{
|
||||
switch (op)
|
||||
{
|
||||
case Op.plus:
|
||||
return (Int)left.eval() + (Int)right.eval()
|
||||
case Op.minus:
|
||||
return (Int)left.eval() - (Int)right.eval()
|
||||
default:
|
||||
throw Err("undefined Op")
|
||||
}
|
||||
}
|
||||
}
|
||||
161
samples/GLSL/SyLens.shader
Normal file
161
samples/GLSL/SyLens.shader
Normal file
@@ -0,0 +1,161 @@
|
||||
#version 120
|
||||
|
||||
/*
|
||||
Original Lens Distortion Algorithm from SSontech (Syntheyes)
|
||||
http://www.ssontech.com/content/lensalg.htm
|
||||
|
||||
r2 is radius squared.
|
||||
|
||||
r2 = image_aspect*image_aspect*u*u + v*v
|
||||
f = 1 + r2*(k + kcube*sqrt(r2))
|
||||
u' = f*u
|
||||
v' = f*v
|
||||
|
||||
*/
|
||||
|
||||
// Controls
|
||||
uniform float kCoeff, kCube, uShift, vShift;
|
||||
uniform float chroma_red, chroma_green, chroma_blue;
|
||||
uniform bool apply_disto;
|
||||
|
||||
// Uniform inputs
|
||||
uniform sampler2D input1;
|
||||
uniform float adsk_input1_w, adsk_input1_h, adsk_input1_aspect, adsk_input1_frameratio;
|
||||
uniform float adsk_result_w, adsk_result_h;
|
||||
|
||||
float distortion_f(float r) {
|
||||
float f = 1 + (r*r)*(kCoeff + kCube * r);
|
||||
return f;
|
||||
}
|
||||
|
||||
|
||||
float inverse_f(float r)
|
||||
{
|
||||
|
||||
// Build a lookup table on the radius, as a fixed-size table.
|
||||
// We will use a vec3 since we will store the multipled number in the Z coordinate.
|
||||
// So to recap: x will be the radius, y will be the f(x) distortion, and Z will be x * y;
|
||||
vec3[48] lut;
|
||||
|
||||
// Since out LUT is shader-global check if it's been computed alrite
|
||||
// Flame has no overflow bbox so we can safely max out at the image edge, plus some cushion
|
||||
float max_r = sqrt((adsk_input1_frameratio * adsk_input1_frameratio) + 1) + 0.1;
|
||||
float incr = max_r / 48;
|
||||
float lut_r = 0;
|
||||
float f;
|
||||
for(int i=0; i < 48; i++) {
|
||||
f = distortion_f(lut_r);
|
||||
lut[i] = vec3(lut_r, f, lut_r * f);
|
||||
lut_r += incr;
|
||||
}
|
||||
|
||||
float t;
|
||||
// Now find the nehgbouring elements
|
||||
// only iterate to 46 since we will need
|
||||
// 47 as i+1
|
||||
for(int i=0; i < 47; i++) {
|
||||
if(lut[i].z < r && lut[i+1].z > r) {
|
||||
// BAM! our value is between these two segments
|
||||
// get the T interpolant and mix
|
||||
t = (r - lut[i].z) / (lut[i+1].z - lut[i]).z;
|
||||
return mix(lut[i].y, lut[i+1].y, t );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
float aberrate(float f, float chroma)
|
||||
{
|
||||
return f + (f * chroma);
|
||||
}
|
||||
|
||||
vec3 chromaticize_and_invert(float f)
|
||||
{
|
||||
vec3 rgb_f = vec3(aberrate(f, chroma_red), aberrate(f, chroma_green), aberrate(f, chroma_blue));
|
||||
// We need to DIVIDE by F when we redistort, and x / y == x * (1 / y)
|
||||
if(apply_disto) {
|
||||
rgb_f = 1 / rgb_f;
|
||||
}
|
||||
return rgb_f;
|
||||
}
|
||||
|
||||
void main(void)
|
||||
{
|
||||
vec2 px, uv;
|
||||
float f = 1;
|
||||
float r = 1;
|
||||
|
||||
px = gl_FragCoord.xy;
|
||||
|
||||
// Make sure we are still centered
|
||||
px.x -= (adsk_result_w - adsk_input1_w) / 2;
|
||||
px.y -= (adsk_result_h - adsk_input1_h) / 2;
|
||||
|
||||
// Push the destination coordinates into the [0..1] range
|
||||
uv.x = px.x / adsk_input1_w;
|
||||
uv.y = px.y / adsk_input1_h;
|
||||
|
||||
|
||||
// And to Syntheyes UV which are [1..-1] on both X and Y
|
||||
uv.x = (uv.x *2 ) - 1;
|
||||
uv.y = (uv.y *2 ) - 1;
|
||||
|
||||
// Add UV shifts
|
||||
uv.x += uShift;
|
||||
uv.y += vShift;
|
||||
|
||||
// Make the X value the aspect value, so that the X coordinates go to [-aspect..aspect]
|
||||
uv.x = uv.x * adsk_input1_frameratio;
|
||||
|
||||
// Compute the radius
|
||||
r = sqrt(uv.x*uv.x + uv.y*uv.y);
|
||||
|
||||
// If we are redistorting, account for the oversize plate in the input, assume that
|
||||
// the input aspect is the same
|
||||
if(apply_disto) {
|
||||
r = r / (float(adsk_input1_w) / float(adsk_result_w));
|
||||
}
|
||||
|
||||
// Apply or remove disto, per channel honoring chromatic aberration
|
||||
if(apply_disto) {
|
||||
f = inverse_f(r);
|
||||
} else {
|
||||
f = distortion_f(r);
|
||||
}
|
||||
|
||||
vec2[3] rgb_uvs = vec2[](uv, uv, uv);
|
||||
|
||||
// Compute distortions per component
|
||||
vec3 rgb_f = chromaticize_and_invert(f);
|
||||
|
||||
// Apply the disto coefficients, per component
|
||||
rgb_uvs[0] = rgb_uvs[0] * rgb_f.rr;
|
||||
rgb_uvs[1] = rgb_uvs[1] * rgb_f.gg;
|
||||
rgb_uvs[2] = rgb_uvs[2] * rgb_f.bb;
|
||||
|
||||
// Convert all the UVs back to the texture space, per color component
|
||||
for(int i=0; i < 3; i++) {
|
||||
uv = rgb_uvs[i];
|
||||
|
||||
// Back from [-aspect..aspect] to [-1..1]
|
||||
uv.x = uv.x / adsk_input1_frameratio;
|
||||
|
||||
// Remove UV shifts
|
||||
uv.x -= uShift;
|
||||
uv.y -= vShift;
|
||||
|
||||
// Back to OGL UV
|
||||
uv.x = (uv.x + 1) / 2;
|
||||
uv.y = (uv.y + 1) / 2;
|
||||
|
||||
rgb_uvs[i] = uv;
|
||||
}
|
||||
|
||||
// Sample the input plate, per component
|
||||
vec4 sampled;
|
||||
sampled.r = texture2D(input1, rgb_uvs[0]).r;
|
||||
sampled.g = texture2D(input1, rgb_uvs[1]).g;
|
||||
sampled.b = texture2D(input1, rgb_uvs[2]).b;
|
||||
|
||||
// and assign to the output
|
||||
gl_FragColor.rgba = vec4(sampled.rgb, 1.0 );
|
||||
}
|
||||
630
samples/GLSL/islandScene.shader
Normal file
630
samples/GLSL/islandScene.shader
Normal file
@@ -0,0 +1,630 @@
|
||||
//// High quality (Some browsers may freeze or crash)
|
||||
//#define HIGHQUALITY
|
||||
|
||||
//// Medium quality (Should be fine on all systems, works on Intel HD2000 on Win7 but quite slow)
|
||||
//#define MEDIUMQUALITY
|
||||
|
||||
//// Defaults
|
||||
//#define REFLECTIONS
|
||||
#define SHADOWS
|
||||
//#define GRASS
|
||||
//#define SMALL_WAVES
|
||||
#define RAGGED_LEAVES
|
||||
//#define DETAILED_NOISE
|
||||
//#define LIGHT_AA // 2 sample SSAA
|
||||
//#define HEAVY_AA // 2x2 RG SSAA
|
||||
//#define TONEMAP
|
||||
|
||||
//// Configurations
|
||||
#ifdef MEDIUMQUALITY
|
||||
#define SHADOWS
|
||||
#define SMALL_WAVES
|
||||
#define RAGGED_LEAVES
|
||||
#define TONEMAP
|
||||
#endif
|
||||
|
||||
#ifdef HIGHQUALITY
|
||||
#define REFLECTIONS
|
||||
#define SHADOWS
|
||||
//#define GRASS
|
||||
#define SMALL_WAVES
|
||||
#define RAGGED_LEAVES
|
||||
#define DETAILED_NOISE
|
||||
#define LIGHT_AA
|
||||
#define TONEMAP
|
||||
#endif
|
||||
|
||||
// Constants
|
||||
const float eps = 1e-5;
|
||||
const float PI = 3.14159265359;
|
||||
|
||||
const vec3 sunDir = vec3(0.79057,-0.47434, 0.0);
|
||||
const vec3 skyCol = vec3(0.3, 0.5, 0.8);
|
||||
const vec3 sandCol = vec3(0.9, 0.8, 0.5);
|
||||
const vec3 treeCol = vec3(0.8, 0.65, 0.3);
|
||||
const vec3 grassCol = vec3(0.4, 0.5, 0.18);
|
||||
const vec3 leavesCol = vec3(0.3, 0.6, 0.2);
|
||||
const vec3 leavesPos = vec3(-5.1,13.4, 0.0);
|
||||
|
||||
#ifdef TONEMAP
|
||||
const vec3 sunCol = vec3(1.8, 1.7, 1.6);
|
||||
#else
|
||||
const vec3 sunCol = vec3(0.9, 0.85, 0.8);
|
||||
#endif
|
||||
|
||||
const float exposure = 1.1; // Only used when tonemapping
|
||||
|
||||
// Description : Array and textureless GLSL 2D/3D/4D simplex
|
||||
// noise functions.
|
||||
// Author : Ian McEwan, Ashima Arts.
|
||||
// License : Copyright (C) 2011 Ashima Arts. All rights reserved.
|
||||
// Distributed under the MIT License. See LICENSE file.
|
||||
// https://github.com/ashima/webgl-noise
|
||||
vec3 mod289(vec3 x) {
|
||||
return x - floor(x * (1.0 / 289.0)) * 289.0;
|
||||
}
|
||||
|
||||
vec4 mod289(vec4 x) {
|
||||
return x - floor(x * (1.0 / 289.0)) * 289.0;
|
||||
}
|
||||
|
||||
vec4 permute(vec4 x) {
|
||||
return mod289(((x*34.0)+1.0)*x);
|
||||
}
|
||||
|
||||
vec4 taylorInvSqrt(vec4 r) {
|
||||
return 1.79284291400159 - 0.85373472095314 * r;
|
||||
}
|
||||
|
||||
float snoise(vec3 v) {
|
||||
const vec2 C = vec2(1.0/6.0, 1.0/3.0) ;
|
||||
const vec4 D = vec4(0.0, 0.5, 1.0, 2.0);
|
||||
|
||||
// First corner
|
||||
vec3 i = floor(v + dot(v, C.yyy) );
|
||||
vec3 x0 = v - i + dot(i, C.xxx) ;
|
||||
|
||||
// Other corners
|
||||
vec3 g = step(x0.yzx, x0.xyz);
|
||||
vec3 l = 1.0 - g;
|
||||
vec3 i1 = min( g.xyz, l.zxy );
|
||||
vec3 i2 = max( g.xyz, l.zxy );
|
||||
|
||||
// x0 = x0 - 0.0 + 0.0 * C.xxx;
|
||||
// x1 = x0 - i1 + 1.0 * C.xxx;
|
||||
// x2 = x0 - i2 + 2.0 * C.xxx;
|
||||
// x3 = x0 - 1.0 + 3.0 * C.xxx;
|
||||
vec3 x1 = x0 - i1 + C.xxx;
|
||||
vec3 x2 = x0 - i2 + C.yyy; // 2.0*C.x = 1/3 = C.y
|
||||
vec3 x3 = x0 - D.yyy; // -1.0+3.0*C.x = -0.5 = -D.y
|
||||
|
||||
// Permutations
|
||||
i = mod289(i);
|
||||
vec4 p = permute( permute( permute(
|
||||
i.z + vec4(0.0, i1.z, i2.z, 1.0 ))
|
||||
+ i.y + vec4(0.0, i1.y, i2.y, 1.0 ))
|
||||
+ i.x + vec4(0.0, i1.x, i2.x, 1.0 ));
|
||||
|
||||
// Gradients: 7x7 points over a square, mapped onto an octahedron.
|
||||
// The ring size 17*17 = 289 is close to a multiple of 49 (49*6 = 294)
|
||||
float n_ = 0.142857142857; // 1.0/7.0
|
||||
vec3 ns = n_ * D.wyz - D.xzx;
|
||||
|
||||
vec4 j = p - 49.0 * floor(p * ns.z * ns.z); // mod(p,7*7)
|
||||
|
||||
vec4 x_ = floor(j * ns.z);
|
||||
vec4 y_ = floor(j - 7.0 * x_ ); // mod(j,N)
|
||||
|
||||
vec4 x = x_ *ns.x + ns.yyyy;
|
||||
vec4 y = y_ *ns.x + ns.yyyy;
|
||||
vec4 h = 1.0 - abs(x) - abs(y);
|
||||
|
||||
vec4 b0 = vec4( x.xy, y.xy );
|
||||
vec4 b1 = vec4( x.zw, y.zw );
|
||||
|
||||
//vec4 s0 = vec4(lessThan(b0,0.0))*2.0 - 1.0;
|
||||
//vec4 s1 = vec4(lessThan(b1,0.0))*2.0 - 1.0;
|
||||
vec4 s0 = floor(b0)*2.0 + 1.0;
|
||||
vec4 s1 = floor(b1)*2.0 + 1.0;
|
||||
vec4 sh = -step(h, vec4(0.0));
|
||||
|
||||
vec4 a0 = b0.xzyw + s0.xzyw*sh.xxyy ;
|
||||
vec4 a1 = b1.xzyw + s1.xzyw*sh.zzww ;
|
||||
|
||||
vec3 p0 = vec3(a0.xy,h.x);
|
||||
vec3 p1 = vec3(a0.zw,h.y);
|
||||
vec3 p2 = vec3(a1.xy,h.z);
|
||||
vec3 p3 = vec3(a1.zw,h.w);
|
||||
|
||||
//Normalise gradients
|
||||
vec4 norm = taylorInvSqrt(vec4(dot(p0,p0), dot(p1,p1), dot(p2, p2), dot(p3,p3)));
|
||||
p0 *= norm.x;
|
||||
p1 *= norm.y;
|
||||
p2 *= norm.z;
|
||||
p3 *= norm.w;
|
||||
|
||||
// Mix final noise value
|
||||
vec4 m = max(0.6 - vec4(dot(x0,x0), dot(x1,x1), dot(x2,x2), dot(x3,x3)), 0.0);
|
||||
m = m * m;
|
||||
return 42.0 * dot( m*m, vec4( dot(p0,x0), dot(p1,x1),
|
||||
dot(p2,x2), dot(p3,x3) ) );
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Main
|
||||
float fbm(vec3 p)
|
||||
{
|
||||
float final = snoise(p);
|
||||
p *= 1.94; final += snoise(p) * 0.5;
|
||||
#ifdef DETAILED_NOISE
|
||||
p *= 3.75; final += snoise(p) * 0.25;
|
||||
return final / 1.75;
|
||||
#else
|
||||
return final / 1.5;
|
||||
#endif
|
||||
}
|
||||
|
||||
float waterHeight(vec3 p)
|
||||
{
|
||||
float d = length(p.xz);
|
||||
float h = sin(d * 1.5 + iGlobalTime * 3.0) * 12.0 / d; // Island waves
|
||||
#ifdef SMALL_WAVES
|
||||
h += fbm(p*0.5); // Other waves
|
||||
#endif
|
||||
return h;
|
||||
}
|
||||
|
||||
vec3 bump(vec3 pos, vec3 rayDir)
|
||||
{
|
||||
float s = 2.0;
|
||||
|
||||
// Fade out waves to reduce aliasing
|
||||
float dist = dot(pos, rayDir);
|
||||
s *= dist < 2.0 ? 1.0 : 1.4142 / sqrt(dist);
|
||||
|
||||
// Calculate normal from heightmap
|
||||
vec2 e = vec2(1e-2, 0.0);
|
||||
vec3 p = vec3(pos.x, iGlobalTime*0.5, pos.z)*0.7;
|
||||
float m = waterHeight(p)*s;
|
||||
return normalize(vec3(
|
||||
waterHeight(p+e.xyy)*s-m,
|
||||
1.0,
|
||||
waterHeight(p+e.yxy)*s-m
|
||||
));
|
||||
}
|
||||
|
||||
// Ray intersections
|
||||
vec4 intersectSphere(vec3 rpos, vec3 rdir, vec3 pos, float rad)
|
||||
{
|
||||
vec3 op = pos - rpos;
|
||||
float b = dot(op, rdir);
|
||||
float det = b*b - dot(op, op) + rad*rad;
|
||||
|
||||
if (det > 0.0)
|
||||
{
|
||||
det = sqrt(det);
|
||||
float t = b - det;
|
||||
if (t > eps)
|
||||
return vec4(-normalize(rpos+rdir*t-pos), t);
|
||||
}
|
||||
|
||||
return vec4(0.0);
|
||||
}
|
||||
|
||||
vec4 intersectCylinder(vec3 rpos, vec3 rdir, vec3 pos, float rad)
|
||||
{
|
||||
vec3 op = pos - rpos;
|
||||
vec2 rdir2 = normalize(rdir.yz);
|
||||
float b = dot(op.yz, rdir2);
|
||||
float det = b*b - dot(op.yz, op.yz) + rad*rad;
|
||||
|
||||
if (det > 0.0)
|
||||
{
|
||||
det = sqrt(det);
|
||||
float t = b - det;
|
||||
if (t > eps)
|
||||
return vec4(-normalize(rpos.yz+rdir2*t-pos.yz), 0.0, t);
|
||||
t = b + det;
|
||||
if (t > eps)
|
||||
return vec4(-normalize(rpos.yz+rdir2*t-pos.yz), 0.0, t);
|
||||
}
|
||||
|
||||
return vec4(0.0);
|
||||
}
|
||||
|
||||
vec4 intersectPlane(vec3 rayPos, vec3 rayDir, vec3 n, float d)
|
||||
{
|
||||
float t = -(dot(rayPos, n) + d) / dot(rayDir, n);
|
||||
return vec4(n * sign(dot(rayDir, n)), t);
|
||||
}
|
||||
|
||||
// Helper functions
|
||||
vec3 rotate(vec3 p, float theta)
|
||||
{
|
||||
float c = cos(theta), s = sin(theta);
|
||||
return vec3(p.x * c + p.z * s, p.y,
|
||||
p.z * c - p.x * s);
|
||||
}
|
||||
|
||||
float impulse(float k, float x) // by iq
|
||||
{
|
||||
float h = k*x;
|
||||
return h * exp(1.0 - h);
|
||||
}
|
||||
|
||||
// Raymarched parts of scene
|
||||
float grass(vec3 pos)
|
||||
{
|
||||
float h = length(pos - vec3(0.0, -7.0, 0.0)) - 8.0;
|
||||
|
||||
if (h > 2.0) return h; // Optimization (Avoid noise if too far away)
|
||||
|
||||
return h + snoise(pos * 3.0) * 0.1 + pos.y * 0.9;
|
||||
}
|
||||
|
||||
float tree(vec3 pos)
|
||||
{
|
||||
pos.y -= 0.5;
|
||||
float s = sin(pos.y*0.03);
|
||||
float c = cos(pos.y*0.03);
|
||||
mat2 m = mat2(c, -s, s, c);
|
||||
vec3 p = vec3(m*pos.xy, pos.z);
|
||||
|
||||
float width = 1.0 - pos.y * 0.02 - clamp(sin(pos.y * 8.0) * 0.1, 0.05, 0.1);
|
||||
|
||||
return max(length(p.xz) - width, pos.y - 12.5);
|
||||
}
|
||||
|
||||
vec2 scene(vec3 pos)
|
||||
{
|
||||
float vtree = tree(pos);
|
||||
#ifdef GRASS
|
||||
float vgrass = grass(pos);
|
||||
float v = min(vtree, vgrass);
|
||||
#else
|
||||
float v = vtree;
|
||||
#endif
|
||||
return vec2(v, v == vtree ? 2.0 : 1.0);
|
||||
}
|
||||
|
||||
vec3 normal(vec3 pos)
|
||||
{
|
||||
vec2 eps = vec2(1e-3, 0.0);
|
||||
float h = scene(pos).x;
|
||||
return normalize(vec3(
|
||||
scene(pos-eps.xyy).x-h,
|
||||
scene(pos-eps.yxy).x-h,
|
||||
scene(pos-eps.yyx).x-h
|
||||
));
|
||||
}
|
||||
|
||||
float plantsShadow(vec3 rayPos, vec3 rayDir)
|
||||
{
|
||||
// Soft shadow taken from iq
|
||||
float k = 6.0;
|
||||
float t = 0.0;
|
||||
float s = 1.0;
|
||||
for (int i = 0; i < 30; i++)
|
||||
{
|
||||
vec3 pos = rayPos+rayDir*t;
|
||||
vec2 res = scene(pos);
|
||||
if (res.x < 0.001) return 0.0;
|
||||
s = min(s, k*res.x/t);
|
||||
t += max(res.x, 0.01);
|
||||
}
|
||||
|
||||
return s*s*(3.0 - 2.0*s);
|
||||
}
|
||||
|
||||
// Ray-traced parts of scene
|
||||
vec4 intersectWater(vec3 rayPos, vec3 rayDir)
|
||||
{
|
||||
float h = sin(20.5 + iGlobalTime * 2.0) * 0.03;
|
||||
float t = -(rayPos.y + 2.5 + h) / rayDir.y;
|
||||
return vec4(0.0, 1.0, 0.0, t);
|
||||
}
|
||||
|
||||
vec4 intersectSand(vec3 rayPos, vec3 rayDir)
|
||||
{
|
||||
return intersectSphere(rayPos, rayDir, vec3(0.0,-24.1,0.0), 24.1);
|
||||
}
|
||||
|
||||
vec4 intersectTreasure(vec3 rayPos, vec3 rayDir)
|
||||
{
|
||||
return vec4(0.0);
|
||||
}
|
||||
|
||||
vec4 intersectLeaf(vec3 rayPos, vec3 rayDir, float openAmount)
|
||||
{
|
||||
vec3 dir = normalize(vec3(0.0, 1.0, openAmount));
|
||||
float offset = 0.0;
|
||||
|
||||
vec4 res = intersectPlane(rayPos, rayDir, dir, 0.0);
|
||||
vec3 pos = rayPos+rayDir*res.w;
|
||||
#ifdef RAGGED_LEAVES
|
||||
offset = snoise(pos*0.8) * 0.3;
|
||||
#endif
|
||||
if (pos.y > 0.0 || length(pos * vec3(0.9, 2.0, 1.0)) > 4.0 - offset) res.w = 0.0;
|
||||
|
||||
vec4 res2 = intersectPlane(rayPos, rayDir, vec3(dir.xy, -dir.z), 0.0);
|
||||
pos = rayPos+rayDir*res2.w;
|
||||
#ifdef RAGGED_LEAVES
|
||||
offset = snoise(pos*0.8) * 0.3;
|
||||
#endif
|
||||
if (pos.y > 0.0 || length(pos * vec3(0.9, 2.0, 1.0)) > 4.0 - offset) res2.w = 0.0;
|
||||
|
||||
if (res2.w > 0.0 && res2.w < res.w || res.w <= 0.0)
|
||||
res = res2;
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
vec4 leaves(vec3 rayPos, vec3 rayDir)
|
||||
{
|
||||
float t = 1e20;
|
||||
vec3 n = vec3(0.0);
|
||||
|
||||
rayPos -= leavesPos;
|
||||
|
||||
float sway = impulse(15.0, fract(iGlobalTime / PI * 0.125));
|
||||
float upDownSway = sway * -sin(iGlobalTime) * 0.06;
|
||||
float openAmount = sway * max(-cos(iGlobalTime) * 0.4, 0.0);
|
||||
|
||||
float angleOffset = -0.1;
|
||||
for (float k = 0.0; k < 6.2; k += 0.75)
|
||||
{
|
||||
// Left-right
|
||||
float alpha = k + (k - PI) * sway * 0.015;
|
||||
vec3 p = rotate(rayPos, alpha);
|
||||
vec3 d = rotate(rayDir, alpha);
|
||||
|
||||
// Up-down
|
||||
angleOffset *= -1.0;
|
||||
float theta = -0.4 +
|
||||
angleOffset +
|
||||
cos(k) * 0.35 +
|
||||
upDownSway +
|
||||
sin(iGlobalTime+k*10.0) * 0.03 * (sway + 0.2);
|
||||
|
||||
p = rotate(p.xzy, theta).xzy;
|
||||
d = rotate(d.xzy, theta).xzy;
|
||||
|
||||
// Shift
|
||||
p -= vec3(5.4, 0.0, 0.0);
|
||||
|
||||
// Intersect individual leaf
|
||||
vec4 res = intersectLeaf(p, d, 1.0+openAmount);
|
||||
if (res.w > 0.0 && res.w < t)
|
||||
{
|
||||
t = res.w;
|
||||
n = res.xyz;
|
||||
}
|
||||
}
|
||||
|
||||
return vec4(n, t);
|
||||
}
|
||||
|
||||
// Lighting
|
||||
float shadow(vec3 rayPos, vec3 rayDir)
|
||||
{
|
||||
float s = 1.0;
|
||||
|
||||
// Intersect sand
|
||||
//vec4 resSand = intersectSand(rayPos, rayDir);
|
||||
//if (resSand.w > 0.0) return 0.0;
|
||||
|
||||
// Intersect plants
|
||||
s = min(s, plantsShadow(rayPos, rayDir));
|
||||
if (s < 0.0001) return 0.0;
|
||||
|
||||
// Intersect leaves
|
||||
vec4 resLeaves = leaves(rayPos, rayDir);
|
||||
if (resLeaves.w > 0.0 && resLeaves.w < 1e7) return 0.0;
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
vec3 light(vec3 p, vec3 n)
|
||||
{
|
||||
float s = 1.0;
|
||||
|
||||
#ifdef SHADOWS
|
||||
s = shadow(p-sunDir*0.01, -sunDir);
|
||||
#endif
|
||||
|
||||
vec3 col = sunCol * min(max(dot(n, sunDir), 0.0), s);
|
||||
col += skyCol * (-n.y * 0.5 + 0.5) * 0.3;
|
||||
return col;
|
||||
}
|
||||
|
||||
vec3 lightLeaves(vec3 p, vec3 n)
|
||||
{
|
||||
float s = 1.0;
|
||||
|
||||
#ifdef SHADOWS
|
||||
s = shadow(p-sunDir*0.01, -sunDir);
|
||||
#endif
|
||||
|
||||
float ao = min(length(p - leavesPos) * 0.1, 1.0);
|
||||
|
||||
float ns = dot(n, sunDir);
|
||||
float d = sqrt(max(ns, 0.0));
|
||||
vec3 col = sunCol * min(d, s);
|
||||
col += sunCol * max(-ns, 0.0) * vec3(0.3, 0.3, 0.1) * ao;
|
||||
col += skyCol * (-n.y * 0.5 + 0.5) * 0.3 * ao;
|
||||
return col;
|
||||
}
|
||||
|
||||
vec3 sky(vec3 n)
|
||||
{
|
||||
return skyCol * (1.0 - n.y * 0.8);
|
||||
}
|
||||
|
||||
// Ray-marching
|
||||
vec4 plants(vec3 rayPos, vec3 rayDir)
|
||||
{
|
||||
float t = 0.0;
|
||||
|
||||
for (int i = 0; i < 40; i++)
|
||||
{
|
||||
vec3 pos = rayPos+rayDir*t;
|
||||
vec2 res = scene(pos);
|
||||
float h = res.x;
|
||||
|
||||
if (h < 0.001)
|
||||
{
|
||||
vec3 col = res.y == 2.0 ? treeCol : grassCol;
|
||||
float uvFact = res.y == 2.0 ? 1.0 : 10.0;
|
||||
|
||||
vec3 n = normal(pos);
|
||||
vec2 uv = vec2(n.x, pos.y * 0.5) * 0.2 * uvFact;
|
||||
vec3 tex = texture2D(iChannel0, uv).rgb * 0.6 + 0.4;
|
||||
float ao = min(length(pos - leavesPos) * 0.1, 1.0);
|
||||
return vec4(col * light(pos, n) * ao * tex, t);
|
||||
}
|
||||
|
||||
t += h;
|
||||
}
|
||||
|
||||
return vec4(sky(rayDir), 1e8);
|
||||
}
|
||||
|
||||
// Final combination
|
||||
vec3 traceReflection(vec3 rayPos, vec3 rayDir)
|
||||
{
|
||||
vec3 col = vec3(0.0);
|
||||
float t = 1e20;
|
||||
|
||||
// Intersect plants
|
||||
vec4 resPlants = plants(rayPos, rayDir);
|
||||
if (resPlants.w > 0.0 && resPlants.w < t)
|
||||
{
|
||||
t = resPlants.w;
|
||||
col = resPlants.xyz;
|
||||
}
|
||||
|
||||
// Intersect leaves
|
||||
vec4 resLeaves = leaves(rayPos, rayDir);
|
||||
if (resLeaves.w > 0.0 && resLeaves.w < t)
|
||||
{
|
||||
vec3 pos = rayPos + rayDir * resLeaves.w;
|
||||
vec2 uv = (pos.xz - leavesPos.xz) * 0.3;
|
||||
float tex = texture2D(iChannel0, uv).r * 0.6 + 0.5;
|
||||
|
||||
t = resLeaves.w;
|
||||
col = leavesCol * lightLeaves(pos, resLeaves.xyz) * tex;
|
||||
}
|
||||
|
||||
if (t > 1e7) return sky(rayDir);
|
||||
|
||||
return col;
|
||||
}
|
||||
|
||||
vec3 trace(vec3 rayPos, vec3 rayDir)
|
||||
{
|
||||
vec3 col = vec3(0.0);
|
||||
float t = 1e20;
|
||||
|
||||
// Intersect sand
|
||||
vec4 resSand = intersectSand(rayPos, rayDir);
|
||||
if (resSand.w > 0.0)
|
||||
{
|
||||
vec3 pos = rayPos + rayDir * resSand.w;
|
||||
t = resSand.w;
|
||||
|
||||
col = sandCol * light(pos, resSand.xyz);
|
||||
}
|
||||
|
||||
// Intersect treasure chest
|
||||
vec4 resTreasure = intersectTreasure(rayPos, rayDir);
|
||||
if (resTreasure.w > 0.0 && resTreasure.w < t)
|
||||
{
|
||||
vec3 pos = rayPos + rayDir * resTreasure.w;
|
||||
t = resTreasure.w;
|
||||
col = leavesCol * light(pos, resTreasure.xyz);
|
||||
}
|
||||
|
||||
// Intersect leaves
|
||||
vec4 resLeaves = leaves(rayPos, rayDir);
|
||||
if (resLeaves.w > 0.0 && resLeaves.w < t)
|
||||
{
|
||||
vec3 pos = rayPos + rayDir * resLeaves.w;
|
||||
vec2 uv = (pos.xz - leavesPos.xz) * 0.3;
|
||||
float tex = texture2D(iChannel0, uv).r * 0.6 + 0.5;
|
||||
|
||||
t = resLeaves.w;
|
||||
col = leavesCol * lightLeaves(pos, resLeaves.xyz) * tex;
|
||||
}
|
||||
|
||||
// Intersect plants
|
||||
vec4 resPlants = plants(rayPos, rayDir);
|
||||
if (resPlants.w > 0.0 && resPlants.w < t)
|
||||
{
|
||||
t = resPlants.w;
|
||||
col = resPlants.xyz;
|
||||
}
|
||||
|
||||
// Intersect water
|
||||
vec4 resWater = intersectWater(rayPos, rayDir);
|
||||
if (resWater.w > 0.0 && resWater.w < t)
|
||||
{
|
||||
vec3 pos = rayPos + rayDir * resWater.w;
|
||||
float dist = t - resWater.w;
|
||||
vec3 n = bump(pos, rayDir);
|
||||
|
||||
float ct = -min(dot(n,rayDir), 0.0);
|
||||
float fresnel = 0.9 - 0.9 * pow(1.0 - ct, 5.0);
|
||||
|
||||
vec3 trans = col * exp(-dist * vec3(1.0, 0.7, 0.4) * 3.0);
|
||||
vec3 reflDir = normalize(reflect(rayDir, n));
|
||||
vec3 refl = sky(reflDir);
|
||||
|
||||
#ifdef REFLECTIONS
|
||||
if (dot(pos, rayDir) < -2.0)
|
||||
refl = traceReflection(pos, reflDir).rgb;
|
||||
#endif
|
||||
|
||||
t = resWater.t;
|
||||
col = mix(refl, trans, fresnel);
|
||||
}
|
||||
|
||||
if (t > 1e7) return sky(rayDir);
|
||||
|
||||
return col;
|
||||
}
|
||||
|
||||
// Ray-generation
|
||||
vec3 camera(vec2 px)
|
||||
{
|
||||
vec2 rd = (px / iResolution.yy - vec2(iResolution.x/iResolution.y*0.5-0.5, 0.0)) * 2.0 - 1.0;
|
||||
float t = sin(iGlobalTime * 0.1) * 0.2;
|
||||
vec3 rayDir = normalize(vec3(rd.x, rd.y, 1.0));
|
||||
vec3 rayPos = vec3(0.0, 3.0, -18.0);
|
||||
return trace(rayPos, rayDir);
|
||||
}
|
||||
|
||||
void main(void)
|
||||
{
|
||||
#ifdef HEAVY_AA
|
||||
vec3 col = camera(gl_FragCoord.xy+vec2(0.0,0.5))*0.25;
|
||||
col += camera(gl_FragCoord.xy+vec2(0.25,0.0))*0.25;
|
||||
col += camera(gl_FragCoord.xy+vec2(0.5,0.75))*0.25;
|
||||
col += camera(gl_FragCoord.xy+vec2(0.75,0.25))*0.25;
|
||||
#else
|
||||
vec3 col = camera(gl_FragCoord.xy);
|
||||
#ifdef LIGHT_AA
|
||||
col = col * 0.5 + camera(gl_FragCoord.xy+vec2(0.5,0.5))*0.5;
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#ifdef TONEMAP
|
||||
// Optimized Haarm-Peter Duiker’s curve
|
||||
vec3 x = max(vec3(0.0),col*exposure-0.004);
|
||||
col = (x*(6.2*x+.5))/(x*(6.2*x+1.7)+0.06);
|
||||
#else
|
||||
col = pow(col, vec3(0.4545));
|
||||
#endif
|
||||
|
||||
gl_FragColor = vec4(col, 1.0);
|
||||
}
|
||||
98
samples/GLSL/pntriangles.tesc
Normal file
98
samples/GLSL/pntriangles.tesc
Normal file
@@ -0,0 +1,98 @@
|
||||
/**
|
||||
* The MIT License (MIT)
|
||||
*
|
||||
* Copyright (c) 2016 Sascha Willems
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
#version 450
|
||||
|
||||
#extension GL_ARB_separate_shader_objects : enable
|
||||
#extension GL_ARB_shading_language_420pack : enable
|
||||
|
||||
// PN patch data
|
||||
struct PnPatch
|
||||
{
|
||||
float b210;
|
||||
float b120;
|
||||
float b021;
|
||||
float b012;
|
||||
float b102;
|
||||
float b201;
|
||||
float b111;
|
||||
float n110;
|
||||
float n011;
|
||||
float n101;
|
||||
};
|
||||
|
||||
// tessellation levels
|
||||
layout (binding = 0) uniform UBO
|
||||
{
|
||||
float tessLevel;
|
||||
} ubo;
|
||||
|
||||
layout(vertices=3) out;
|
||||
|
||||
layout(location = 0) in vec3 inNormal[];
|
||||
layout(location = 1) in vec2 inUV[];
|
||||
|
||||
layout(location = 0) out vec3 outNormal[3];
|
||||
layout(location = 3) out vec2 outUV[3];
|
||||
layout(location = 6) out PnPatch outPatch[3];
|
||||
|
||||
float wij(int i, int j)
|
||||
{
|
||||
return dot(gl_in[j].gl_Position.xyz - gl_in[i].gl_Position.xyz, inNormal[i]);
|
||||
}
|
||||
|
||||
float vij(int i, int j)
|
||||
{
|
||||
vec3 Pj_minus_Pi = gl_in[j].gl_Position.xyz
|
||||
- gl_in[i].gl_Position.xyz;
|
||||
vec3 Ni_plus_Nj = inNormal[i]+inNormal[j];
|
||||
return 2.0*dot(Pj_minus_Pi, Ni_plus_Nj)/dot(Pj_minus_Pi, Pj_minus_Pi);
|
||||
}
|
||||
|
||||
void main()
|
||||
{
|
||||
// get data
|
||||
gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;
|
||||
outNormal[gl_InvocationID] = inNormal[gl_InvocationID];
|
||||
outUV[gl_InvocationID] = inUV[gl_InvocationID];
|
||||
|
||||
// set base
|
||||
float P0 = gl_in[0].gl_Position[gl_InvocationID];
|
||||
float P1 = gl_in[1].gl_Position[gl_InvocationID];
|
||||
float P2 = gl_in[2].gl_Position[gl_InvocationID];
|
||||
float N0 = inNormal[0][gl_InvocationID];
|
||||
float N1 = inNormal[1][gl_InvocationID];
|
||||
float N2 = inNormal[2][gl_InvocationID];
|
||||
|
||||
// compute control points
|
||||
outPatch[gl_InvocationID].b210 = (2.0*P0 + P1 - wij(0,1)*N0)/3.0;
|
||||
outPatch[gl_InvocationID].b120 = (2.0*P1 + P0 - wij(1,0)*N1)/3.0;
|
||||
outPatch[gl_InvocationID].b021 = (2.0*P1 + P2 - wij(1,2)*N1)/3.0;
|
||||
outPatch[gl_InvocationID].b012 = (2.0*P2 + P1 - wij(2,1)*N2)/3.0;
|
||||
outPatch[gl_InvocationID].b102 = (2.0*P2 + P0 - wij(2,0)*N2)/3.0;
|
||||
outPatch[gl_InvocationID].b201 = (2.0*P0 + P2 - wij(0,2)*N0)/3.0;
|
||||
float E = ( outPatch[gl_InvocationID].b210
|
||||
+ outPatch[gl_InvocationID].b120
|
||||
+ outPatch[gl_InvocationID].b021
|
||||
+ outPatch[gl_InvocationID].b012
|
||||
+ outPatch[gl_InvocationID].b102
|
||||
+ outPatch[gl_InvocationID].b201 ) / 6.0;
|
||||
float V = (P0 + P1 + P2)/3.0;
|
||||
outPatch[gl_InvocationID].b111 = E + (E - V)*0.5;
|
||||
outPatch[gl_InvocationID].n110 = N0+N1-vij(0,1)*(P1-P0);
|
||||
outPatch[gl_InvocationID].n011 = N1+N2-vij(1,2)*(P2-P1);
|
||||
outPatch[gl_InvocationID].n101 = N2+N0-vij(2,0)*(P0-P2);
|
||||
|
||||
// set tess levels
|
||||
gl_TessLevelOuter[gl_InvocationID] = ubo.tessLevel;
|
||||
gl_TessLevelInner[0] = ubo.tessLevel;
|
||||
}
|
||||
103
samples/GLSL/pntriangles.tese
Normal file
103
samples/GLSL/pntriangles.tese
Normal file
@@ -0,0 +1,103 @@
|
||||
/**
|
||||
* The MIT License (MIT)
|
||||
*
|
||||
* Copyright (c) 2016 Sascha Willems
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
#version 450
|
||||
|
||||
#extension GL_ARB_separate_shader_objects : enable
|
||||
#extension GL_ARB_shading_language_420pack : enable
|
||||
|
||||
// PN patch data
|
||||
struct PnPatch
|
||||
{
|
||||
float b210;
|
||||
float b120;
|
||||
float b021;
|
||||
float b012;
|
||||
float b102;
|
||||
float b201;
|
||||
float b111;
|
||||
float n110;
|
||||
float n011;
|
||||
float n101;
|
||||
};
|
||||
|
||||
layout (binding = 1) uniform UBO
|
||||
{
|
||||
mat4 projection;
|
||||
mat4 model;
|
||||
float tessAlpha;
|
||||
} ubo;
|
||||
|
||||
layout(triangles, fractional_odd_spacing, ccw) in;
|
||||
|
||||
layout(location = 0) in vec3 iNormal[];
|
||||
layout(location = 3) in vec2 iTexCoord[];
|
||||
layout(location = 6) in PnPatch iPnPatch[];
|
||||
|
||||
layout(location = 0) out vec3 oNormal;
|
||||
layout(location = 1) out vec2 oTexCoord;
|
||||
|
||||
#define uvw gl_TessCoord
|
||||
|
||||
void main()
|
||||
{
|
||||
vec3 uvwSquared = uvw * uvw;
|
||||
vec3 uvwCubed = uvwSquared * uvw;
|
||||
|
||||
// extract control points
|
||||
vec3 b210 = vec3(iPnPatch[0].b210, iPnPatch[1].b210, iPnPatch[2].b210);
|
||||
vec3 b120 = vec3(iPnPatch[0].b120, iPnPatch[1].b120, iPnPatch[2].b120);
|
||||
vec3 b021 = vec3(iPnPatch[0].b021, iPnPatch[1].b021, iPnPatch[2].b021);
|
||||
vec3 b012 = vec3(iPnPatch[0].b012, iPnPatch[1].b012, iPnPatch[2].b012);
|
||||
vec3 b102 = vec3(iPnPatch[0].b102, iPnPatch[1].b102, iPnPatch[2].b102);
|
||||
vec3 b201 = vec3(iPnPatch[0].b201, iPnPatch[1].b201, iPnPatch[2].b201);
|
||||
vec3 b111 = vec3(iPnPatch[0].b111, iPnPatch[1].b111, iPnPatch[2].b111);
|
||||
|
||||
// extract control normals
|
||||
vec3 n110 = normalize(vec3(iPnPatch[0].n110, iPnPatch[1].n110, iPnPatch[2].n110));
|
||||
vec3 n011 = normalize(vec3(iPnPatch[0].n011, iPnPatch[1].n011, iPnPatch[2].n011));
|
||||
vec3 n101 = normalize(vec3(iPnPatch[0].n101, iPnPatch[1].n101, iPnPatch[2].n101));
|
||||
|
||||
// compute texcoords
|
||||
oTexCoord = gl_TessCoord[2]*iTexCoord[0] + gl_TessCoord[0]*iTexCoord[1] + gl_TessCoord[1]*iTexCoord[2];
|
||||
|
||||
// normal
|
||||
// Barycentric normal
|
||||
vec3 barNormal = gl_TessCoord[2]*iNormal[0] + gl_TessCoord[0]*iNormal[1] + gl_TessCoord[1]*iNormal[2];
|
||||
vec3 pnNormal = iNormal[0]*uvwSquared[2] + iNormal[1]*uvwSquared[0] + iNormal[2]*uvwSquared[1]
|
||||
+ n110*uvw[2]*uvw[0] + n011*uvw[0]*uvw[1]+ n101*uvw[2]*uvw[1];
|
||||
oNormal = ubo.tessAlpha*pnNormal + (1.0-ubo.tessAlpha) * barNormal;
|
||||
|
||||
// compute interpolated pos
|
||||
vec3 barPos = gl_TessCoord[2]*gl_in[0].gl_Position.xyz
|
||||
+ gl_TessCoord[0]*gl_in[1].gl_Position.xyz
|
||||
+ gl_TessCoord[1]*gl_in[2].gl_Position.xyz;
|
||||
|
||||
// save some computations
|
||||
uvwSquared *= 3.0;
|
||||
|
||||
// compute PN position
|
||||
vec3 pnPos = gl_in[0].gl_Position.xyz*uvwCubed[2]
|
||||
+ gl_in[1].gl_Position.xyz*uvwCubed[0]
|
||||
+ gl_in[2].gl_Position.xyz*uvwCubed[1]
|
||||
+ b210*uvwSquared[2]*uvw[0]
|
||||
+ b120*uvwSquared[0]*uvw[2]
|
||||
+ b201*uvwSquared[2]*uvw[1]
|
||||
+ b021*uvwSquared[0]*uvw[1]
|
||||
+ b102*uvwSquared[1]*uvw[2]
|
||||
+ b012*uvwSquared[1]*uvw[0]
|
||||
+ b111*6.0*uvw[0]*uvw[1]*uvw[2];
|
||||
|
||||
// final position and normal
|
||||
vec3 finalPos = (1.0-ubo.tessAlpha)*barPos + ubo.tessAlpha*pnPos;
|
||||
gl_Position = ubo.projection * ubo.model * vec4(finalPos,1.0);
|
||||
}
|
||||
59
samples/GN/BUILD.2.gn
Normal file
59
samples/GN/BUILD.2.gn
Normal file
@@ -0,0 +1,59 @@
|
||||
# Copyright 2016 the V8 project authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import("../gni/isolate.gni")
|
||||
|
||||
group("gn_all") {
|
||||
testonly = true
|
||||
|
||||
if (v8_test_isolation_mode != "noop") {
|
||||
deps = [
|
||||
":check-static-initializers_run",
|
||||
":jsfunfuzz_run",
|
||||
":run-deopt-fuzzer_run",
|
||||
":run-gcmole_run",
|
||||
":run-valgrind_run",
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
v8_isolate_run("check-static-initializers") {
|
||||
deps = [
|
||||
"..:d8_run",
|
||||
]
|
||||
|
||||
isolate = "check-static-initializers.isolate"
|
||||
}
|
||||
|
||||
v8_isolate_run("jsfunfuzz") {
|
||||
deps = [
|
||||
"..:d8_run",
|
||||
]
|
||||
|
||||
isolate = "jsfunfuzz/jsfunfuzz.isolate"
|
||||
}
|
||||
|
||||
v8_isolate_run("run-deopt-fuzzer") {
|
||||
deps = [
|
||||
"..:d8_run",
|
||||
]
|
||||
|
||||
isolate = "run-deopt-fuzzer.isolate"
|
||||
}
|
||||
|
||||
v8_isolate_run("run-gcmole") {
|
||||
deps = [
|
||||
"..:d8_run",
|
||||
]
|
||||
|
||||
isolate = "gcmole/run-gcmole.isolate"
|
||||
}
|
||||
|
||||
v8_isolate_run("run-valgrind") {
|
||||
deps = [
|
||||
"..:d8_run",
|
||||
]
|
||||
|
||||
isolate = "run-valgrind.isolate"
|
||||
}
|
||||
1646
samples/GN/BUILD.3.gn
Normal file
1646
samples/GN/BUILD.3.gn
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user