mirror of
https://github.com/KevinMidboe/linguist.git
synced 2025-10-29 17:50:22 +00:00
Compare commits
799 Commits
v4.7.4
...
puppet-fix
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
110b42fe16 | ||
|
|
e93f41f097 | ||
|
|
994bc1f135 | ||
|
|
44f03e64c1 | ||
|
|
4166f2e89d | ||
|
|
1a8f19c6f2 | ||
|
|
c0e242358a | ||
|
|
eb38c8dcf8 | ||
|
|
f146b4afbd | ||
|
|
db15d0f5d2 | ||
|
|
e6d57c771d | ||
|
|
eef0335c5f | ||
|
|
461c27c066 | ||
|
|
59d67d6743 | ||
|
|
7aeeb82d3d | ||
|
|
c98ca20076 | ||
|
|
4e0b5f02aa | ||
|
|
8da7cb805e | ||
|
|
e5e81a8560 | ||
|
|
dd53fa1585 | ||
|
|
354a8f079a | ||
|
|
f38d6bd124 | ||
|
|
e80b92e407 | ||
|
|
fa6ae1116f | ||
|
|
b7e27a9f58 | ||
|
|
69ba4c5586 | ||
|
|
c39d7fd6e8 | ||
|
|
44ed47cea1 | ||
|
|
de51cb08d2 | ||
|
|
3dd2d08190 | ||
|
|
3b625e1954 | ||
|
|
5c6f690b97 | ||
|
|
3bbfc907f3 | ||
|
|
053b8bca97 | ||
|
|
7fb3db6203 | ||
|
|
ba09394f85 | ||
|
|
c59c88f16e | ||
|
|
8a6e74799a | ||
|
|
4268769d2e | ||
|
|
6601864084 | ||
|
|
d57aa37fb7 | ||
|
|
e72347fd98 | ||
|
|
1b429ea46b | ||
|
|
9468ad4947 | ||
|
|
733ef63193 | ||
|
|
9ca6a5841e | ||
|
|
41ace5fba0 | ||
|
|
cc4295b3b3 | ||
|
|
1e4ce80fd9 | ||
|
|
74a71fd90d | ||
|
|
9b08318456 | ||
|
|
fa5b6b03dc | ||
|
|
cb59296fe0 | ||
|
|
f1be771611 | ||
|
|
b66fcb2529 | ||
|
|
f7fe1fee66 | ||
|
|
94367cc460 | ||
|
|
72bec1fddc | ||
|
|
4e2eba4ef8 | ||
|
|
10457b6639 | ||
|
|
d58cbc68a6 | ||
|
|
01de40faaa | ||
|
|
62d285fce6 | ||
|
|
0056095e8c | ||
|
|
d6dc3a3991 | ||
|
|
b524461b7c | ||
|
|
76d41697aa | ||
|
|
32147b629e | ||
|
|
e7b5e25bf8 | ||
|
|
d761658f8b | ||
|
|
3719214aba | ||
|
|
47b109be36 | ||
|
|
1ec4db97c2 | ||
|
|
9fe5fe0de2 | ||
|
|
b36ea7ac9d | ||
|
|
625b06c30d | ||
|
|
28bce533b2 | ||
|
|
93ec1922cb | ||
|
|
5d09fb67dd | ||
|
|
93dcb61742 | ||
|
|
3a03594685 | ||
|
|
5ce2c254f9 | ||
|
|
d7814c4899 | ||
|
|
50c08bf29e | ||
|
|
34928baee6 | ||
|
|
27bb41aa4d | ||
|
|
1415f4b52d | ||
|
|
ae8ffcad22 | ||
|
|
f43633bf10 | ||
|
|
a604de9846 | ||
|
|
3e224e0039 | ||
|
|
15b04f86c3 | ||
|
|
42af436c20 | ||
|
|
2b08c66f0b | ||
|
|
f98ab593fb | ||
|
|
f951ec07de | ||
|
|
e9ac71590f | ||
|
|
210cd19876 | ||
|
|
f473c555ac | ||
|
|
48e4394d87 | ||
|
|
e1ce88920d | ||
|
|
675cee1d72 | ||
|
|
1c4baf6dc2 | ||
|
|
8f2820e9cc | ||
|
|
04c268e535 | ||
|
|
ec749b3f8d | ||
|
|
08b63e7033 | ||
|
|
7867b946b9 | ||
|
|
a4d12cc8e4 | ||
|
|
a1165b74b1 | ||
|
|
0fa1fa5581 | ||
|
|
d8b91bd5c4 | ||
|
|
9b941a34f0 | ||
|
|
9d8392dab8 | ||
|
|
2c78dd2c66 | ||
|
|
3988f3e7a7 | ||
|
|
d9a4e831b4 | ||
|
|
45c27f26a2 | ||
|
|
0fbc29bf68 | ||
|
|
5569d2056d | ||
|
|
be262d0b4f | ||
|
|
33ce2d7264 | ||
|
|
c486f56204 | ||
|
|
9f3b7d0ba5 | ||
|
|
79f20e8057 | ||
|
|
cd30c7613c | ||
|
|
5aa53c0711 | ||
|
|
c17cdca896 | ||
|
|
ecdae83364 | ||
|
|
31aafa2c78 | ||
|
|
8a911b8ff3 | ||
|
|
9233f1d17f | ||
|
|
77eb36a982 | ||
|
|
4e6e58a099 | ||
|
|
c87976330f | ||
|
|
0e9109c3fc | ||
|
|
12f9295dd7 | ||
|
|
581723748b | ||
|
|
0980e304b1 | ||
|
|
d46a529b6a | ||
|
|
1d2ec4dbc3 | ||
|
|
829eea0139 | ||
|
|
78b2853d70 | ||
|
|
202f3c08cd | ||
|
|
b958779e3d | ||
|
|
00dc775daf | ||
|
|
009a4e67b6 | ||
|
|
faaa4470af | ||
|
|
2a320cb988 | ||
|
|
74931d1bd5 | ||
|
|
3ca93a84b9 | ||
|
|
aa27f18ea6 | ||
|
|
d3e2ea3f71 | ||
|
|
53aa1209ab | ||
|
|
b2a486fed2 | ||
|
|
4f1e5c34b1 | ||
|
|
85c9833081 | ||
|
|
33899b9d6b | ||
|
|
417239004a | ||
|
|
6a1423d28f | ||
|
|
96a23ce388 | ||
|
|
e8d7eed3aa | ||
|
|
9d419c4ab9 | ||
|
|
4eefc1f58e | ||
|
|
0b94b9cda7 | ||
|
|
c736038d94 | ||
|
|
ec562138f8 | ||
|
|
50013e8dd7 | ||
|
|
416c5d1185 | ||
|
|
8869912d31 | ||
|
|
43fa563b77 | ||
|
|
41c6aee8c3 | ||
|
|
8cf575c37d | ||
|
|
4e20928e04 | ||
|
|
3e37bd2680 | ||
|
|
a29f5b2d46 | ||
|
|
4efc6f8c95 | ||
|
|
359699c454 | ||
|
|
346aa99fcf | ||
|
|
d147778677 | ||
|
|
e520209e49 | ||
|
|
338cc16239 | ||
|
|
67ea35094b | ||
|
|
6f0393fcbd | ||
|
|
2923d50d7e | ||
|
|
4e26f609ef | ||
|
|
e86d6e8dd2 | ||
|
|
5fa02ad1fb | ||
|
|
5a06240f69 | ||
|
|
d6e0f74c80 | ||
|
|
a5c08bb203 | ||
|
|
c6dc29abb1 | ||
|
|
ffd984bb7e | ||
|
|
dc5473559b | ||
|
|
8e9c224952 | ||
|
|
d43f111723 | ||
|
|
de9ff713a4 | ||
|
|
98783560ec | ||
|
|
8f31fbbd55 | ||
|
|
e4cdbd2b2b | ||
|
|
ba52e48ceb | ||
|
|
a44ebe493b | ||
|
|
eb0e75e11e | ||
|
|
22c2cf4967 | ||
|
|
39e3688fb8 | ||
|
|
6b83e5fb7b | ||
|
|
dd2e5ffe07 | ||
|
|
f6b6c4e165 | ||
|
|
608ed60b5c | ||
|
|
2ce2945058 | ||
|
|
c8d376754e | ||
|
|
ecaef91fa1 | ||
|
|
d265b78e7e | ||
|
|
5a5bf7d5e5 | ||
|
|
e46781b903 | ||
|
|
9543a8c8e9 | ||
|
|
6ac1ac9232 | ||
|
|
1bbb919fef | ||
|
|
71dfed0e45 | ||
|
|
a2db058ce4 | ||
|
|
12695fee2f | ||
|
|
4a775dca37 | ||
|
|
d7c689fd6b | ||
|
|
20b8188384 | ||
|
|
26310d9515 | ||
|
|
e38cc75da5 | ||
|
|
8d55fc1bd5 | ||
|
|
7e63399196 | ||
|
|
520e5a5cfe | ||
|
|
5d85692c24 | ||
|
|
676861fff3 | ||
|
|
6589bd9dc7 | ||
|
|
e32a4f13ef | ||
|
|
4e4d851f71 | ||
|
|
a3628f86da | ||
|
|
fe70965906 | ||
|
|
c863435c84 | ||
|
|
eeec48198a | ||
|
|
82167063da | ||
|
|
3ae89b48ba | ||
|
|
cd9401c424 | ||
|
|
e7e8a7d835 | ||
|
|
7654032d2e | ||
|
|
05b536fc61 | ||
|
|
ebe85788ab | ||
|
|
524337d07b | ||
|
|
f8ce42e169 | ||
|
|
71032cd252 | ||
|
|
41593b3ea7 | ||
|
|
bed8add2f5 | ||
|
|
e424e8e88c | ||
|
|
07d4f218a3 | ||
|
|
67ed060d37 | ||
|
|
3abe081560 | ||
|
|
d3f3c0345c | ||
|
|
855f1a1f86 | ||
|
|
0406a5b326 | ||
|
|
0108ef4386 | ||
|
|
daefff86ff | ||
|
|
fdb962518f | ||
|
|
6564078061 | ||
|
|
39ea9be5f8 | ||
|
|
152b5ade5e | ||
|
|
c525e3fbef | ||
|
|
88c74fa9c2 | ||
|
|
6a54ee767f | ||
|
|
2ea1ff2736 | ||
|
|
a1901fceff | ||
|
|
b4035a3804 | ||
|
|
fc67fc525c | ||
|
|
f0659d3aa5 | ||
|
|
a7a123a8db | ||
|
|
0e5327a77a | ||
|
|
ecd4ae3bda | ||
|
|
7a8bd628e1 | ||
|
|
8e19aea39e | ||
|
|
6fcba83f3e | ||
|
|
d6d7d38eb8 | ||
|
|
c8094d3775 | ||
|
|
de478d2f2d | ||
|
|
991dcef18b | ||
|
|
f30e9270f1 | ||
|
|
1d7ba18b15 | ||
|
|
35a06d6cb8 | ||
|
|
4cf7feb275 | ||
|
|
30298a9ef8 | ||
|
|
cc5f1c57ca | ||
|
|
82af10e3fd | ||
|
|
63c8d2284c | ||
|
|
697380336c | ||
|
|
5fd8d71858 | ||
|
|
5bc88814e2 | ||
|
|
00efd6a463 | ||
|
|
81ca6e7766 | ||
|
|
cd288a8ee4 | ||
|
|
a8d84f3d55 | ||
|
|
600115afed | ||
|
|
e57273c839 | ||
|
|
65491d460e | ||
|
|
8dc4a1308f | ||
|
|
6841b4d259 | ||
|
|
a53423b6e0 | ||
|
|
02f3ba1840 | ||
|
|
e1216ea4ee | ||
|
|
a3227c2c27 | ||
|
|
9f1c950a1f | ||
|
|
c7a0d7b83d | ||
|
|
53c9b2b435 | ||
|
|
b4a77abd82 | ||
|
|
8a622823b0 | ||
|
|
3310d925b6 | ||
|
|
65201b322a | ||
|
|
b71bf19e37 | ||
|
|
1f43664a51 | ||
|
|
7cda13afcb | ||
|
|
e0d890240b | ||
|
|
abf7bee464 | ||
|
|
e73a4ecd0e | ||
|
|
70779c9986 | ||
|
|
1fc4c9fdc6 | ||
|
|
fdec52c89a | ||
|
|
6e40de47da | ||
|
|
28be72892e | ||
|
|
6df0e4591d | ||
|
|
879c63a25e | ||
|
|
ab2e640759 | ||
|
|
b61fe90d12 | ||
|
|
e6c849d92c | ||
|
|
5e4e38b39a | ||
|
|
22d4865c52 | ||
|
|
3247d46e81 | ||
|
|
dad3191238 | ||
|
|
35a13b3633 | ||
|
|
56fb48ea96 | ||
|
|
983ff20d3c | ||
|
|
2d51a5dba4 | ||
|
|
df98c86acd | ||
|
|
98118eb70b | ||
|
|
1ec84da277 | ||
|
|
3112e6deda | ||
|
|
be316c2943 | ||
|
|
68c45be47d | ||
|
|
4584963dd2 | ||
|
|
f382abc2f3 | ||
|
|
9d57e1e1b5 | ||
|
|
2a4150b104 | ||
|
|
09612ae42e | ||
|
|
49e9ee48d0 | ||
|
|
a8719f3e82 | ||
|
|
04e1cc6d0a | ||
|
|
dd7b125869 | ||
|
|
426818120c | ||
|
|
50bd2cc3c8 | ||
|
|
00647be113 | ||
|
|
e930ee1a8e | ||
|
|
48b64c2d31 | ||
|
|
f95365946c | ||
|
|
5ddccaac83 | ||
|
|
51d7c8f905 | ||
|
|
ed71855612 | ||
|
|
742faebd8b | ||
|
|
6763b73d9c | ||
|
|
5c3744dfd6 | ||
|
|
cb5bc91fe3 | ||
|
|
9171ee602b | ||
|
|
4da0463768 | ||
|
|
8d0a2d9dc1 | ||
|
|
6a2cebea7d | ||
|
|
ae10395b3a | ||
|
|
510abc7cee | ||
|
|
e8a700e4e3 | ||
|
|
c13e384e18 | ||
|
|
4204078b19 | ||
|
|
17fc3d0640 | ||
|
|
7715254212 | ||
|
|
2f50aa460a | ||
|
|
6adec161fa | ||
|
|
c802ba3a1d | ||
|
|
06e80f3889 | ||
|
|
216d63f575 | ||
|
|
ff042a87a4 | ||
|
|
56f0f93bbb | ||
|
|
99a3a5b85b | ||
|
|
1868d1d190 | ||
|
|
705e234044 | ||
|
|
0dd78704f7 | ||
|
|
257425141d | ||
|
|
6482a60c6e | ||
|
|
1466b70f10 | ||
|
|
b87146056b | ||
|
|
b29696d684 | ||
|
|
514a8d54db | ||
|
|
abfe89d8ff | ||
|
|
22609dc297 | ||
|
|
93341be396 | ||
|
|
f2ae3b6223 | ||
|
|
e84204a274 | ||
|
|
7024c7cb37 | ||
|
|
91e0823b04 | ||
|
|
b056df06f4 | ||
|
|
3f4b8368e8 | ||
|
|
d9edfb7088 | ||
|
|
2d62a475d1 | ||
|
|
6bf223e641 | ||
|
|
311a687740 | ||
|
|
44e532c9a2 | ||
|
|
1d48ff51d5 | ||
|
|
5076539df5 | ||
|
|
36ba378344 | ||
|
|
c1203b7dad | ||
|
|
fad13d901c | ||
|
|
1e022f53e3 | ||
|
|
da2f4ed711 | ||
|
|
6a7439141a | ||
|
|
9fcf546ae6 | ||
|
|
1f1ca3e689 | ||
|
|
01b14de046 | ||
|
|
284486a2ed | ||
|
|
9fae24099c | ||
|
|
2fab4045e4 | ||
|
|
05205ddbf1 | ||
|
|
329c9a7144 | ||
|
|
80a3ea0cd9 | ||
|
|
6c20525375 | ||
|
|
f56c31bacb | ||
|
|
fa817b6a1d | ||
|
|
17168d5fdc | ||
|
|
6493b48434 | ||
|
|
c148ecfd9b | ||
|
|
abbc132977 | ||
|
|
b96ed4b56a | ||
|
|
0019f60ba7 | ||
|
|
a1b236ddfa | ||
|
|
badcb87845 | ||
|
|
10be4be18f | ||
|
|
59b3e48bd1 | ||
|
|
7624eb459f | ||
|
|
1c7f516534 | ||
|
|
cba9b95416 | ||
|
|
228c26948b | ||
|
|
6333f39743 | ||
|
|
68728bcc94 | ||
|
|
16bd70d84f | ||
|
|
4361ccda32 | ||
|
|
018922349c | ||
|
|
03c674a648 | ||
|
|
5066f66dcd | ||
|
|
77a4883763 | ||
|
|
482aa15585 | ||
|
|
b8892250d5 | ||
|
|
94928bc78f | ||
|
|
ead63163b4 | ||
|
|
265d576510 | ||
|
|
a5eb6e9e15 | ||
|
|
776a6a0619 | ||
|
|
4514363c84 | ||
|
|
39cd635086 | ||
|
|
7165611679 | ||
|
|
ead0593976 | ||
|
|
adaf4011bc | ||
|
|
4a031107ac | ||
|
|
85516563f7 | ||
|
|
aa32a5f58b | ||
|
|
49cdc4a930 | ||
|
|
144a85b775 | ||
|
|
a027904278 | ||
|
|
5ac2cdde50 | ||
|
|
5c705b3367 | ||
|
|
e78e9e4747 | ||
|
|
81f9079f43 | ||
|
|
16eaa533b6 | ||
|
|
2e521a6c74 | ||
|
|
62192e17e8 | ||
|
|
0b6f17c676 | ||
|
|
6d9b5390c4 | ||
|
|
56a65d0975 | ||
|
|
e4c6c1d245 | ||
|
|
a9f366aed2 | ||
|
|
96bd08e391 | ||
|
|
02fe28eb25 | ||
|
|
e77530b390 | ||
|
|
d0370a3b4c | ||
|
|
ebce4890b2 | ||
|
|
1bc87aadb3 | ||
|
|
53a532dc76 | ||
|
|
0669a83e40 | ||
|
|
c4ab3b276f | ||
|
|
920f825496 | ||
|
|
f28573420e | ||
|
|
c471990aa3 | ||
|
|
baf56666d4 | ||
|
|
cbbc05f7b8 | ||
|
|
051906727b | ||
|
|
cfd5cbaba0 | ||
|
|
caaad886c3 | ||
|
|
2305f9051c | ||
|
|
392ab2960f | ||
|
|
1e134b5754 | ||
|
|
d356ea28af | ||
|
|
a015138dcd | ||
|
|
ff99d1bac8 | ||
|
|
f4af4727a1 | ||
|
|
2c3069db77 | ||
|
|
8845cd9c58 | ||
|
|
090f765c7e | ||
|
|
b58c0e8f3e | ||
|
|
b4ff170603 | ||
|
|
3bc540a283 | ||
|
|
25b761b506 | ||
|
|
1e502808c9 | ||
|
|
27e0c8f78a | ||
|
|
897218678e | ||
|
|
4eb33fe3be | ||
|
|
0614055efd | ||
|
|
e70f3f595a | ||
|
|
28af996bf9 | ||
|
|
8bd8f0960c | ||
|
|
255db77f1f | ||
|
|
8b0b14c9a6 | ||
|
|
95e83311b6 | ||
|
|
6c07476c45 | ||
|
|
1968f8193c | ||
|
|
5155ad89e8 | ||
|
|
c2505e8b7b | ||
|
|
b38f4b786b | ||
|
|
1a04c79738 | ||
|
|
a464c234b8 | ||
|
|
aa2319a052 | ||
|
|
f22181f47d | ||
|
|
3191ff498d | ||
|
|
91ea482ea6 | ||
|
|
905d87a112 | ||
|
|
8eae4e56ef | ||
|
|
5466fcfd2f | ||
|
|
176a0e9926 | ||
|
|
aa049b4677 | ||
|
|
5c19f1f546 | ||
|
|
9ae19a1f94 | ||
|
|
8cf3b7ad51 | ||
|
|
03d16835aa | ||
|
|
7174130e46 | ||
|
|
59f64c47b1 | ||
|
|
f3655e8a1e | ||
|
|
f97d796f90 | ||
|
|
d342aa4841 | ||
|
|
5c655e3b20 | ||
|
|
9a0ac4a477 | ||
|
|
8ea9632ccf | ||
|
|
03ef4f30e8 | ||
|
|
12228fb525 | ||
|
|
92897046ed | ||
|
|
91aa843a4e | ||
|
|
c3145d3c08 | ||
|
|
1ad2123896 | ||
|
|
9b9a256c60 | ||
|
|
0f3644d23a | ||
|
|
04d3023f76 | ||
|
|
5c7aa5406a | ||
|
|
bce676e902 | ||
|
|
7c9fd59a99 | ||
|
|
b89d1a2e77 | ||
|
|
f8c5015b20 | ||
|
|
f28cdc8a15 | ||
|
|
0e147f1f66 | ||
|
|
a91705724d | ||
|
|
5e3e8133fb | ||
|
|
5a3758f1c7 | ||
|
|
57237106f3 | ||
|
|
99ad2368b0 | ||
|
|
21d7f99a4e | ||
|
|
24b368a30c | ||
|
|
7c8bc8561d | ||
|
|
ce37cd665d | ||
|
|
bd0f4f6f78 | ||
|
|
4867db8831 | ||
|
|
e6ab516fb7 | ||
|
|
7501b82df1 | ||
|
|
aa6b881971 | ||
|
|
3928734d0f | ||
|
|
c7868a95bc | ||
|
|
2012647f78 | ||
|
|
84471a5463 | ||
|
|
57a3c14f2b | ||
|
|
d9914307eb | ||
|
|
71cdf46197 | ||
|
|
8a27884c70 | ||
|
|
b881e3e6cb | ||
|
|
ca718d8f2a | ||
|
|
c6625b1b8a | ||
|
|
16a6d680c4 | ||
|
|
270fa8f5d3 | ||
|
|
b1f5e93b4a | ||
|
|
79a61c72e1 | ||
|
|
3f04c11537 | ||
|
|
b2270613d7 | ||
|
|
0fe854421b | ||
|
|
de074f421e | ||
|
|
27590c39bd | ||
|
|
67191d4d5e | ||
|
|
00764f3d59 | ||
|
|
4a2cb32149 | ||
|
|
1a11664239 | ||
|
|
9520cbb44c | ||
|
|
1aea6b2cdb | ||
|
|
6ff950341a | ||
|
|
b9501e42b2 | ||
|
|
065c809dd5 | ||
|
|
5b9ea4a78f | ||
|
|
b72c4d4400 | ||
|
|
d46e214985 | ||
|
|
799c47ce7a | ||
|
|
b5121e59dd | ||
|
|
f6a7b4929f | ||
|
|
162b77ab5a | ||
|
|
92904efd45 | ||
|
|
93fabe487f | ||
|
|
74d704bea2 | ||
|
|
ee1bd50dd1 | ||
|
|
07096f84f5 | ||
|
|
a9b3bd632b | ||
|
|
eec324890e | ||
|
|
ca6ac8f0db | ||
|
|
60ab4a5fe7 | ||
|
|
10eb5830f0 | ||
|
|
835ceae6f6 | ||
|
|
abe3aa47f6 | ||
|
|
53e34072ed | ||
|
|
f83f761d0a | ||
|
|
9c18bf3a89 | ||
|
|
f6e1ab444e | ||
|
|
0ae8b2959d | ||
|
|
46b0b1e5e2 | ||
|
|
b44dfb4ab8 | ||
|
|
868e528810 | ||
|
|
0a4c850ef1 | ||
|
|
b3c4232251 | ||
|
|
0c38df47b9 | ||
|
|
bfd4005760 | ||
|
|
fc9fad15a3 | ||
|
|
b5091e88ad | ||
|
|
2610808b6d | ||
|
|
3cfee4f214 | ||
|
|
70fd116eaf | ||
|
|
62aac9c2f7 | ||
|
|
afcf1c6c22 | ||
|
|
f3f0365b13 | ||
|
|
9bc12843fe | ||
|
|
5e3ceddf69 | ||
|
|
d377e23193 | ||
|
|
e6dabd59ad | ||
|
|
f0c7380132 | ||
|
|
697ad4c568 | ||
|
|
1efd9b384d | ||
|
|
c1e71dc215 | ||
|
|
d2c7d27d13 | ||
|
|
1efd4c83f9 | ||
|
|
0f7677423f | ||
|
|
2a0b0e9f93 | ||
|
|
faec60188f | ||
|
|
709a688858 | ||
|
|
2448ff8314 | ||
|
|
311202102d | ||
|
|
6812a22706 | ||
|
|
fb727ce731 | ||
|
|
6af499e352 | ||
|
|
66ec33cf8e | ||
|
|
f2694f3a74 | ||
|
|
d069d0e444 | ||
|
|
56ee61b17c | ||
|
|
b945726017 | ||
|
|
6f8a7d1070 | ||
|
|
b032886c21 | ||
|
|
988739d566 | ||
|
|
8cd80801e8 | ||
|
|
c3b7a1a6fb | ||
|
|
9d0eff75ad | ||
|
|
3ccb548b6d | ||
|
|
eeedd53f32 | ||
|
|
11a3b5b73c | ||
|
|
eacc48e8c7 | ||
|
|
5b72b4d353 | ||
|
|
3f940ce8b8 | ||
|
|
b2e3ea2334 | ||
|
|
4637da8c32 | ||
|
|
6b88c5ba86 | ||
|
|
5fdb596214 | ||
|
|
c989b02285 | ||
|
|
c8301dc20b | ||
|
|
ca4ea03828 | ||
|
|
ae27c71d5a | ||
|
|
3d1555e278 | ||
|
|
54fab9eb4e | ||
|
|
8fea8a0b47 | ||
|
|
f14ae8e51b | ||
|
|
6b60e5e786 | ||
|
|
40413dfcc7 | ||
|
|
07f5ad1daa | ||
|
|
57f5a3e780 | ||
|
|
3be007526c | ||
|
|
9bfbd0550c | ||
|
|
0301a5dcdf | ||
|
|
db994a1197 | ||
|
|
855c13ea2a | ||
|
|
bfa7eced44 | ||
|
|
b1d103b1f3 | ||
|
|
fc816d3429 | ||
|
|
04a4e8c8e6 | ||
|
|
ab69fd01ac | ||
|
|
cc6106f31b | ||
|
|
ead85379ed | ||
|
|
f8d6be55ee | ||
|
|
a241d75043 | ||
|
|
864a6c0a20 | ||
|
|
1c20c54191 | ||
|
|
4d722d1fd1 | ||
|
|
b67254e986 | ||
|
|
041cf9c94e | ||
|
|
b08c5a8421 | ||
|
|
125eaa4cc3 | ||
|
|
6b001cf861 | ||
|
|
5c4129f85b | ||
|
|
789607d9bc | ||
|
|
d46530989c | ||
|
|
fa56879790 | ||
|
|
41713d7719 | ||
|
|
17a9463588 | ||
|
|
fb9f271720 | ||
|
|
8de50edb41 | ||
|
|
ab33fccddd | ||
|
|
bd95ac0beb | ||
|
|
7b3efb185f | ||
|
|
a0065febe2 | ||
|
|
9374784651 | ||
|
|
aa6af3deed | ||
|
|
a19e501b44 | ||
|
|
889a395340 | ||
|
|
eb8eb28ca7 | ||
|
|
697b3351e6 | ||
|
|
9fd80bfd67 | ||
|
|
7b58b1ea59 | ||
|
|
c454396c26 | ||
|
|
2e9d8f5520 | ||
|
|
c8ea3fba5a | ||
|
|
56af13047c | ||
|
|
c46900396a | ||
|
|
b235ed1223 | ||
|
|
16d9612603 | ||
|
|
721e5b4656 | ||
|
|
9b8b39f444 | ||
|
|
e32a837fb2 | ||
|
|
9961f8bc1c | ||
|
|
c066867d59 | ||
|
|
21093165e1 | ||
|
|
df88de14e3 | ||
|
|
94de431aa5 | ||
|
|
502557a97f | ||
|
|
52938f6dbf | ||
|
|
d87fad649c | ||
|
|
d8666e5309 | ||
|
|
9d11128362 | ||
|
|
ee17ab3e26 | ||
|
|
06af36dac2 | ||
|
|
51d6d741e5 | ||
|
|
b593a8ae67 | ||
|
|
7b30240a7f | ||
|
|
71f124faa5 | ||
|
|
15232fc072 | ||
|
|
0a7aab947c | ||
|
|
5906fa81bb | ||
|
|
33dc865c30 | ||
|
|
0d469e2966 | ||
|
|
3c5bcb434c | ||
|
|
4d2b38497d | ||
|
|
fc5ae1cfbc | ||
|
|
7e76d1cc6b | ||
|
|
cf834e8a21 | ||
|
|
ee61466042 | ||
|
|
35884d482c | ||
|
|
802de8112c | ||
|
|
9a76cfc85f | ||
|
|
dc41dd888d | ||
|
|
827ad80311 | ||
|
|
9e3d8ac4e9 | ||
|
|
1b327e29ba | ||
|
|
26a35ea43d | ||
|
|
81ebef2e29 | ||
|
|
1068cfb4b5 | ||
|
|
73b1737dc7 | ||
|
|
1d86f40fcd | ||
|
|
59fb481138 | ||
|
|
16e22b3b77 | ||
|
|
aa701c6766 | ||
|
|
ed4bbe97d1 | ||
|
|
f05c437221 | ||
|
|
483488a2fa | ||
|
|
b36c4f2428 | ||
|
|
924fddf698 | ||
|
|
23e55e92ca | ||
|
|
0cfdbfb91c |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,5 +1,7 @@
|
||||
*.gem
|
||||
/Gemfile.lock
|
||||
.bundle/
|
||||
.idea
|
||||
benchmark/
|
||||
lib/linguist/samples.json
|
||||
/grammars
|
||||
|
||||
364
.gitmodules
vendored
364
.gitmodules
vendored
@@ -7,15 +7,12 @@
|
||||
[submodule "vendor/grammars/sublime-cirru"]
|
||||
path = vendor/grammars/sublime-cirru
|
||||
url = https://github.com/Cirru/sublime-cirru
|
||||
[submodule "vendor/grammars/Sublime-Logos"]
|
||||
path = vendor/grammars/Sublime-Logos
|
||||
url = https://github.com/Cykey/Sublime-Logos
|
||||
[submodule "vendor/grammars/SublimeBrainfuck"]
|
||||
path = vendor/grammars/SublimeBrainfuck
|
||||
url = https://github.com/Drako/SublimeBrainfuck
|
||||
[submodule "vendor/grammars/awk-sublime"]
|
||||
path = vendor/grammars/awk-sublime
|
||||
url = https://github.com/JohnNilsson/awk-sublime
|
||||
url = https://github.com/github-linguist/awk-sublime
|
||||
[submodule "vendor/grammars/Sublime-SQF-Language"]
|
||||
path = vendor/grammars/Sublime-SQF-Language
|
||||
url = https://github.com/JonBons/Sublime-SQF-Language
|
||||
@@ -25,21 +22,15 @@
|
||||
[submodule "vendor/grammars/Sublime-REBOL"]
|
||||
path = vendor/grammars/Sublime-REBOL
|
||||
url = https://github.com/Oldes/Sublime-REBOL
|
||||
[submodule "vendor/grammars/autoitv3-tmbundle"]
|
||||
path = vendor/grammars/autoitv3-tmbundle
|
||||
url = https://github.com/Red-Nova-Technologies/autoitv3-tmbundle
|
||||
[submodule "vendor/grammars/Sublime-VimL"]
|
||||
path = vendor/grammars/Sublime-VimL
|
||||
url = https://github.com/SalGnt/Sublime-VimL
|
||||
[submodule "vendor/grammars/boo-sublime"]
|
||||
path = vendor/grammars/boo-sublime
|
||||
url = https://github.com/Shammah/boo-sublime
|
||||
[submodule "vendor/grammars/language-viml"]
|
||||
path = vendor/grammars/language-viml
|
||||
url = https://github.com/Alhadis/language-viml
|
||||
[submodule "vendor/grammars/ColdFusion"]
|
||||
path = vendor/grammars/ColdFusion
|
||||
url = https://github.com/SublimeText/ColdFusion
|
||||
[submodule "vendor/grammars/NSIS"]
|
||||
path = vendor/grammars/NSIS
|
||||
url = https://github.com/SublimeText/NSIS
|
||||
url = https://github.com/github-linguist/NSIS
|
||||
[submodule "vendor/grammars/NimLime"]
|
||||
path = vendor/grammars/NimLime
|
||||
url = https://github.com/Varriount/NimLime
|
||||
@@ -76,9 +67,6 @@
|
||||
[submodule "vendor/grammars/language-javascript"]
|
||||
path = vendor/grammars/language-javascript
|
||||
url = https://github.com/atom/language-javascript
|
||||
[submodule "vendor/grammars/language-python"]
|
||||
path = vendor/grammars/language-python
|
||||
url = https://github.com/atom/language-python
|
||||
[submodule "vendor/grammars/language-shellscript"]
|
||||
path = vendor/grammars/language-shellscript
|
||||
url = https://github.com/atom/language-shellscript
|
||||
@@ -88,9 +76,6 @@
|
||||
[submodule "vendor/grammars/language-yaml"]
|
||||
path = vendor/grammars/language-yaml
|
||||
url = https://github.com/atom/language-yaml
|
||||
[submodule "vendor/grammars/sublime-sourcepawn"]
|
||||
path = vendor/grammars/sublime-sourcepawn
|
||||
url = https://github.com/austinwagner/sublime-sourcepawn
|
||||
[submodule "vendor/grammars/Sublime-Lasso"]
|
||||
path = vendor/grammars/Sublime-Lasso
|
||||
url = https://github.com/bfad/Sublime-Lasso
|
||||
@@ -103,15 +88,9 @@
|
||||
[submodule "vendor/grammars/bro-sublime"]
|
||||
path = vendor/grammars/bro-sublime
|
||||
url = https://github.com/bro/bro-sublime
|
||||
[submodule "vendor/grammars/sublime_man_page_support"]
|
||||
path = vendor/grammars/sublime_man_page_support
|
||||
url = https://github.com/carsonoid/sublime_man_page_support
|
||||
[submodule "vendor/grammars/sublime-MuPAD"]
|
||||
path = vendor/grammars/sublime-MuPAD
|
||||
url = https://github.com/ccreutzig/sublime-MuPAD
|
||||
[submodule "vendor/grammars/nesC.tmbundle"]
|
||||
path = vendor/grammars/nesC.tmbundle
|
||||
url = https://github.com/cdwilson/nesC.tmbundle
|
||||
[submodule "vendor/grammars/haxe-sublime-bundle"]
|
||||
path = vendor/grammars/haxe-sublime-bundle
|
||||
url = https://github.com/clemos/haxe-sublime-bundle
|
||||
@@ -133,9 +112,6 @@
|
||||
[submodule "vendor/grammars/fancy-tmbundle"]
|
||||
path = vendor/grammars/fancy-tmbundle
|
||||
url = https://github.com/fancy-lang/fancy-tmbundle
|
||||
[submodule "vendor/grammars/monkey.tmbundle"]
|
||||
path = vendor/grammars/monkey.tmbundle
|
||||
url = https://github.com/gingerbeardman/monkey.tmbundle
|
||||
[submodule "vendor/grammars/dart-sublime-bundle"]
|
||||
path = vendor/grammars/dart-sublime-bundle
|
||||
url = https://github.com/guillermooo/dart-sublime-bundle
|
||||
@@ -151,9 +127,6 @@
|
||||
[submodule "vendor/grammars/Sublime-Text-2-OpenEdge-ABL"]
|
||||
path = vendor/grammars/Sublime-Text-2-OpenEdge-ABL
|
||||
url = https://github.com/jfairbank/Sublime-Text-2-OpenEdge-ABL
|
||||
[submodule "vendor/grammars/sublime-rust"]
|
||||
path = vendor/grammars/sublime-rust
|
||||
url = https://github.com/jhasse/sublime-rust
|
||||
[submodule "vendor/grammars/sublime-befunge"]
|
||||
path = vendor/grammars/sublime-befunge
|
||||
url = https://github.com/johanasplund/sublime-befunge
|
||||
@@ -166,18 +139,12 @@
|
||||
[submodule "vendor/grammars/fish-tmbundle"]
|
||||
path = vendor/grammars/fish-tmbundle
|
||||
url = https://github.com/l15n/fish-tmbundle
|
||||
[submodule "vendor/grammars/sublime-idris"]
|
||||
path = vendor/grammars/sublime-idris
|
||||
url = https://github.com/laughedelic/sublime-idris
|
||||
[submodule "vendor/grammars/moonscript-tmbundle"]
|
||||
path = vendor/grammars/moonscript-tmbundle
|
||||
url = https://github.com/leafo/moonscript-tmbundle
|
||||
[submodule "vendor/grammars/Isabelle.tmbundle"]
|
||||
path = vendor/grammars/Isabelle.tmbundle
|
||||
url = https://github.com/lsf37/Isabelle.tmbundle
|
||||
[submodule "vendor/grammars/x86-assembly-textmate-bundle"]
|
||||
path = vendor/grammars/x86-assembly-textmate-bundle
|
||||
url = https://github.com/lunixbochs/x86-assembly-textmate-bundle
|
||||
[submodule "vendor/grammars/Alloy.tmbundle"]
|
||||
path = vendor/grammars/Alloy.tmbundle
|
||||
url = https://github.com/macekond/Alloy.tmbundle
|
||||
@@ -207,10 +174,7 @@
|
||||
url = https://github.com/mokus0/Agda.tmbundle
|
||||
[submodule "vendor/grammars/Julia.tmbundle"]
|
||||
path = vendor/grammars/Julia.tmbundle
|
||||
url = https://github.com/nanoant/Julia.tmbundle
|
||||
[submodule "vendor/grammars/assembly.tmbundle"]
|
||||
path = vendor/grammars/assembly.tmbundle
|
||||
url = https://github.com/nanoant/assembly.tmbundle
|
||||
url = https://github.com/JuliaEditorSupport/Julia.tmbundle
|
||||
[submodule "vendor/grammars/ooc.tmbundle"]
|
||||
path = vendor/grammars/ooc.tmbundle
|
||||
url = https://github.com/nilium/ooc.tmbundle
|
||||
@@ -220,9 +184,6 @@
|
||||
[submodule "vendor/grammars/sublime-tea"]
|
||||
path = vendor/grammars/sublime-tea
|
||||
url = https://github.com/pferruggiaro/sublime-tea
|
||||
[submodule "vendor/grammars/puppet-textmate-bundle"]
|
||||
path = vendor/grammars/puppet-textmate-bundle
|
||||
url = https://github.com/puppet-textmate-bundle/puppet-textmate-bundle
|
||||
[submodule "vendor/grammars/abap.tmbundle"]
|
||||
path = vendor/grammars/abap.tmbundle
|
||||
url = https://github.com/pvl/abap.tmbundle
|
||||
@@ -235,9 +196,6 @@
|
||||
[submodule "vendor/grammars/sublime-robot-plugin"]
|
||||
path = vendor/grammars/sublime-robot-plugin
|
||||
url = https://github.com/shellderp/sublime-robot-plugin
|
||||
[submodule "vendor/grammars/actionscript3-tmbundle"]
|
||||
path = vendor/grammars/actionscript3-tmbundle
|
||||
url = https://github.com/honzabrecka/actionscript3-tmbundle
|
||||
[submodule "vendor/grammars/Sublime-QML"]
|
||||
path = vendor/grammars/Sublime-QML
|
||||
url = https://github.com/skozlovf/Sublime-QML
|
||||
@@ -283,9 +241,6 @@
|
||||
[submodule "vendor/grammars/cpp-qt.tmbundle"]
|
||||
path = vendor/grammars/cpp-qt.tmbundle
|
||||
url = https://github.com/textmate/cpp-qt.tmbundle
|
||||
[submodule "vendor/grammars/css.tmbundle"]
|
||||
path = vendor/grammars/css.tmbundle
|
||||
url = https://github.com/textmate/css.tmbundle
|
||||
[submodule "vendor/grammars/d.tmbundle"]
|
||||
path = vendor/grammars/d.tmbundle
|
||||
url = https://github.com/textmate/d.tmbundle
|
||||
@@ -313,9 +268,6 @@
|
||||
[submodule "vendor/grammars/groovy.tmbundle"]
|
||||
path = vendor/grammars/groovy.tmbundle
|
||||
url = https://github.com/textmate/groovy.tmbundle
|
||||
[submodule "vendor/grammars/haskell.tmbundle"]
|
||||
path = vendor/grammars/haskell.tmbundle
|
||||
url = https://github.com/textmate/haskell.tmbundle
|
||||
[submodule "vendor/grammars/html.tmbundle"]
|
||||
path = vendor/grammars/html.tmbundle
|
||||
url = https://github.com/textmate/html.tmbundle
|
||||
@@ -340,9 +292,6 @@
|
||||
[submodule "vendor/grammars/latex.tmbundle"]
|
||||
path = vendor/grammars/latex.tmbundle
|
||||
url = https://github.com/textmate/latex.tmbundle
|
||||
[submodule "vendor/grammars/less.tmbundle"]
|
||||
path = vendor/grammars/less.tmbundle
|
||||
url = https://github.com/textmate/less.tmbundle
|
||||
[submodule "vendor/grammars/lilypond.tmbundle"]
|
||||
path = vendor/grammars/lilypond.tmbundle
|
||||
url = https://github.com/textmate/lilypond.tmbundle
|
||||
@@ -367,9 +316,6 @@
|
||||
[submodule "vendor/grammars/nemerle.tmbundle"]
|
||||
path = vendor/grammars/nemerle.tmbundle
|
||||
url = https://github.com/textmate/nemerle.tmbundle
|
||||
[submodule "vendor/grammars/ninja.tmbundle"]
|
||||
path = vendor/grammars/ninja.tmbundle
|
||||
url = https://github.com/textmate/ninja.tmbundle
|
||||
[submodule "vendor/grammars/objective-c.tmbundle"]
|
||||
path = vendor/grammars/objective-c.tmbundle
|
||||
url = https://github.com/textmate/objective-c.tmbundle
|
||||
@@ -384,7 +330,7 @@
|
||||
url = https://github.com/textmate/php-smarty.tmbundle
|
||||
[submodule "vendor/grammars/php.tmbundle"]
|
||||
path = vendor/grammars/php.tmbundle
|
||||
url = https://github.com/textmate/php.tmbundle
|
||||
url = https://github.com/brandonblack/php.tmbundle
|
||||
[submodule "vendor/grammars/postscript.tmbundle"]
|
||||
path = vendor/grammars/postscript.tmbundle
|
||||
url = https://github.com/textmate/postscript.tmbundle
|
||||
@@ -397,12 +343,6 @@
|
||||
[submodule "vendor/grammars/r.tmbundle"]
|
||||
path = vendor/grammars/r.tmbundle
|
||||
url = https://github.com/textmate/r.tmbundle
|
||||
[submodule "vendor/grammars/restructuredtext.tmbundle"]
|
||||
path = vendor/grammars/restructuredtext.tmbundle
|
||||
url = https://github.com/textmate/restructuredtext.tmbundle
|
||||
[submodule "vendor/grammars/ruby-haml.tmbundle"]
|
||||
path = vendor/grammars/ruby-haml.tmbundle
|
||||
url = https://github.com/textmate/ruby-haml.tmbundle
|
||||
[submodule "vendor/grammars/scheme.tmbundle"]
|
||||
path = vendor/grammars/scheme.tmbundle
|
||||
url = https://github.com/textmate/scheme.tmbundle
|
||||
@@ -451,15 +391,12 @@
|
||||
[submodule "vendor/grammars/llvm.tmbundle"]
|
||||
path = vendor/grammars/llvm.tmbundle
|
||||
url = https://github.com/whitequark/llvm.tmbundle
|
||||
[submodule "vendor/grammars/sublime-nix"]
|
||||
path = vendor/grammars/sublime-nix
|
||||
url = https://github.com/wmertens/sublime-nix
|
||||
[submodule "vendor/grammars/oz-tmbundle"]
|
||||
path = vendor/grammars/oz-tmbundle
|
||||
url = https://github.com/eregon/oz-tmbundle
|
||||
[submodule "vendor/grammars/ebundles"]
|
||||
path = vendor/grammars/ebundles
|
||||
url = https://github.com/ericzou/ebundles
|
||||
[submodule "vendor/grammars/language-batchfile"]
|
||||
path = vendor/grammars/language-batchfile
|
||||
url = https://github.com/mmims/language-batchfile
|
||||
[submodule "vendor/grammars/sublime-mask"]
|
||||
path = vendor/grammars/sublime-mask
|
||||
url = https://github.com/tenbits/sublime-mask
|
||||
@@ -482,9 +419,6 @@
|
||||
[submodule "vendor/grammars/Scalate.tmbundle"]
|
||||
path = vendor/grammars/Scalate.tmbundle
|
||||
url = https://github.com/scalate/Scalate.tmbundle
|
||||
[submodule "vendor/grammars/Elm.tmLanguage"]
|
||||
path = vendor/grammars/Elm.tmLanguage
|
||||
url = https://github.com/deadfoxygrandpa/Elm.tmLanguage
|
||||
[submodule "vendor/grammars/sublime-bsv"]
|
||||
path = vendor/grammars/sublime-bsv
|
||||
url = https://github.com/thotypous/sublime-bsv
|
||||
@@ -500,9 +434,6 @@
|
||||
[submodule "vendor/grammars/Sublime-Nit"]
|
||||
path = vendor/grammars/Sublime-Nit
|
||||
url = https://github.com/R4PaSs/Sublime-Nit
|
||||
[submodule "vendor/grammars/language-hy"]
|
||||
path = vendor/grammars/language-hy
|
||||
url = https://github.com/rwtolbert/language-hy
|
||||
[submodule "vendor/grammars/Racket"]
|
||||
path = vendor/grammars/Racket
|
||||
url = https://github.com/soegaard/racket-highlight-for-github
|
||||
@@ -512,24 +443,9 @@
|
||||
[submodule "vendor/grammars/liquid.tmbundle"]
|
||||
path = vendor/grammars/liquid.tmbundle
|
||||
url = https://github.com/bastilian/validcode-textmate-bundles
|
||||
[submodule "vendor/grammars/ats.sublime"]
|
||||
path = vendor/grammars/ats.sublime
|
||||
url = https://github.com/steinwaywhw/ats-mode-sublimetext
|
||||
[submodule "vendor/grammars/Modelica"]
|
||||
path = vendor/grammars/Modelica
|
||||
url = https://github.com/BorisChumichev/modelicaSublimeTextPackage
|
||||
[submodule "vendor/grammars/sublime-apl"]
|
||||
path = vendor/grammars/sublime-apl
|
||||
url = https://github.com/StoneCypher/sublime-apl
|
||||
[submodule "vendor/grammars/CLIPS-sublime"]
|
||||
path = vendor/grammars/CLIPS-sublime
|
||||
url = https://github.com/psicomante/CLIPS-sublime
|
||||
[submodule "vendor/grammars/Creole"]
|
||||
path = vendor/grammars/Creole
|
||||
url = https://github.com/Siddley/Creole
|
||||
[submodule "vendor/grammars/GDScript-sublime"]
|
||||
path = vendor/grammars/GDScript-sublime
|
||||
url = https://github.com/beefsack/GDScript-sublime
|
||||
[submodule "vendor/grammars/sublime-golo"]
|
||||
path = vendor/grammars/sublime-golo
|
||||
url = https://github.com/TypeUnsafe/sublime-golo
|
||||
@@ -542,9 +458,6 @@
|
||||
[submodule "vendor/grammars/G-Code"]
|
||||
path = vendor/grammars/G-Code
|
||||
url = https://github.com/robotmaster/sublime-text-syntax-highlighting
|
||||
[submodule "vendor/grammars/grace-tmbundle"]
|
||||
path = vendor/grammars/grace-tmbundle
|
||||
url = https://github.com/zmthy/grace-tmbundle
|
||||
[submodule "vendor/grammars/sublime-text-ox"]
|
||||
path = vendor/grammars/sublime-text-ox
|
||||
url = https://github.com/andreashetland/sublime-text-ox
|
||||
@@ -554,9 +467,6 @@
|
||||
[submodule "vendor/grammars/ec.tmbundle"]
|
||||
path = vendor/grammars/ec.tmbundle
|
||||
url = https://github.com/ecere/ec.tmbundle
|
||||
[submodule "vendor/grammars/InnoSetup"]
|
||||
path = vendor/grammars/InnoSetup
|
||||
url = https://github.com/idleberg/InnoSetup-Sublime-Text
|
||||
[submodule "vendor/grammars/gap-tmbundle"]
|
||||
path = vendor/grammars/gap-tmbundle
|
||||
url = https://github.com/dhowden/gap-tmbundle
|
||||
@@ -578,9 +488,6 @@
|
||||
[submodule "vendor/grammars/SublimeClarion"]
|
||||
path = vendor/grammars/SublimeClarion
|
||||
url = https://github.com/fushnisoft/SublimeClarion
|
||||
[submodule "vendor/grammars/oracle.tmbundle"]
|
||||
path = vendor/grammars/oracle.tmbundle
|
||||
url = https://github.com/mulander/oracle.tmbundle.git
|
||||
[submodule "vendor/grammars/BrightScript.tmbundle"]
|
||||
path = vendor/grammars/BrightScript.tmbundle
|
||||
url = https://github.com/cmink/BrightScript.tmbundle
|
||||
@@ -590,18 +497,12 @@
|
||||
[submodule "vendor/grammars/asciidoc.tmbundle"]
|
||||
path = vendor/grammars/asciidoc.tmbundle
|
||||
url = https://github.com/zuckschwerdt/asciidoc.tmbundle
|
||||
[submodule "vendor/grammars/sublime-text-pig-latin"]
|
||||
path = vendor/grammars/sublime-text-pig-latin
|
||||
url = https://github.com/goblindegook/sublime-text-pig-latin
|
||||
[submodule "vendor/grammars/Lean.tmbundle"]
|
||||
path = vendor/grammars/Lean.tmbundle
|
||||
url = https://github.com/leanprover/Lean.tmbundle
|
||||
[submodule "vendor/grammars/ampl"]
|
||||
path = vendor/grammars/ampl
|
||||
url = https://github.com/ampl/sublime-ampl
|
||||
[submodule "vendor/grammars/openscad.tmbundle"]
|
||||
path = vendor/grammars/openscad.tmbundle
|
||||
url = https://github.com/tbuser/openscad.tmbundle
|
||||
[submodule "vendor/grammars/sublime-varnish"]
|
||||
path = vendor/grammars/sublime-varnish
|
||||
url = https://github.com/brandonwamboldt/sublime-varnish
|
||||
@@ -637,7 +538,7 @@
|
||||
url = https://github.com/ShaneWilton/sublime-smali
|
||||
[submodule "vendor/grammars/language-jsoniq"]
|
||||
path = vendor/grammars/language-jsoniq
|
||||
url = http://github.com/wcandillon/language-jsoniq
|
||||
url = https://github.com/wcandillon/language-jsoniq
|
||||
[submodule "vendor/grammars/atom-fsharp"]
|
||||
path = vendor/grammars/atom-fsharp
|
||||
url = https://github.com/fsprojects/atom-fsharp
|
||||
@@ -653,6 +554,9 @@
|
||||
[submodule "vendor/grammars/language-ncl"]
|
||||
path = vendor/grammars/language-ncl
|
||||
url = https://github.com/rpavlick/language-ncl.git
|
||||
[submodule "vendor/grammars/pawn-sublime-language"]
|
||||
path = vendor/grammars/pawn-sublime-language
|
||||
url = https://github.com/Southclaw/pawn-sublime-language.git
|
||||
[submodule "vendor/grammars/atom-language-purescript"]
|
||||
path = vendor/grammars/atom-language-purescript
|
||||
url = https://github.com/purescript-contrib/atom-language-purescript
|
||||
@@ -676,10 +580,7 @@
|
||||
url = https://github.com/CausalityLtd/sublime-pony
|
||||
[submodule "vendor/grammars/X10"]
|
||||
path = vendor/grammars/X10
|
||||
url = git@github.com:x10-lang/x10-highlighting.git
|
||||
[submodule "vendor/grammars/language-babel"]
|
||||
path = vendor/grammars/language-babel
|
||||
url = https://github.com/gandm/language-babel
|
||||
url = https://github.com/x10-lang/x10-highlighting
|
||||
[submodule "vendor/grammars/UrWeb-Language-Definition"]
|
||||
path = vendor/grammars/UrWeb-Language-Definition
|
||||
url = https://github.com/gwalborn/UrWeb-Language-Definition.git
|
||||
@@ -691,7 +592,7 @@
|
||||
url = https://github.com/freemarker/FreeMarker.tmbundle
|
||||
[submodule "vendor/grammars/MagicPython"]
|
||||
path = vendor/grammars/MagicPython
|
||||
url = git@github.com:MagicStack/MagicPython.git
|
||||
url = https://github.com/MagicStack/MagicPython
|
||||
[submodule "vendor/grammars/language-click"]
|
||||
path = vendor/grammars/language-click
|
||||
url = https://github.com/stenverbois/language-click.git
|
||||
@@ -706,4 +607,235 @@
|
||||
url = https://github.com/erkyrath/language-inform7
|
||||
[submodule "vendor/grammars/atom-language-stan"]
|
||||
path = vendor/grammars/atom-language-stan
|
||||
url = git@github.com:jrnold/atom-language-stan.git
|
||||
url = https://github.com/jrnold/atom-language-stan
|
||||
[submodule "vendor/grammars/language-yang"]
|
||||
path = vendor/grammars/language-yang
|
||||
url = https://github.com/DzonyKalafut/language-yang.git
|
||||
[submodule "vendor/grammars/language-less"]
|
||||
path = vendor/grammars/language-less
|
||||
url = https://github.com/atom/language-less.git
|
||||
[submodule "vendor/grammars/language-povray"]
|
||||
path = vendor/grammars/language-povray
|
||||
url = https://github.com/c-lipka/language-povray
|
||||
[submodule "vendor/grammars/sublime-terra"]
|
||||
path = vendor/grammars/sublime-terra
|
||||
url = https://github.com/pyk/sublime-terra
|
||||
[submodule "vendor/grammars/SublimePuppet"]
|
||||
path = vendor/grammars/SublimePuppet
|
||||
url = https://github.com/russCloak/SublimePuppet
|
||||
[submodule "vendor/grammars/sublimeassembly"]
|
||||
path = vendor/grammars/sublimeassembly
|
||||
url = https://github.com/Nessphoro/sublimeassembly
|
||||
[submodule "vendor/grammars/monkey"]
|
||||
path = vendor/grammars/monkey
|
||||
url = https://github.com/gingerbeardman/monkey.tmbundle
|
||||
[submodule "vendor/grammars/assembly"]
|
||||
path = vendor/grammars/assembly
|
||||
url = https://github.com/nanoant/assembly.tmbundle
|
||||
[submodule "vendor/grammars/boo"]
|
||||
path = vendor/grammars/boo
|
||||
url = https://github.com/Shammah/boo-sublime
|
||||
[submodule "vendor/grammars/logos"]
|
||||
path = vendor/grammars/logos
|
||||
url = https://github.com/Cykey/Sublime-Logos
|
||||
[submodule "vendor/grammars/pig-latin"]
|
||||
path = vendor/grammars/pig-latin
|
||||
url = https://github.com/goblindegook/sublime-text-pig-latin
|
||||
[submodule "vendor/grammars/sourcepawn"]
|
||||
path = vendor/grammars/sourcepawn
|
||||
url = https://github.com/github-linguist/sublime-sourcepawn
|
||||
[submodule "vendor/grammars/gdscript"]
|
||||
path = vendor/grammars/gdscript
|
||||
url = https://github.com/beefsack/GDScript-sublime
|
||||
[submodule "vendor/grammars/nesC"]
|
||||
path = vendor/grammars/nesC
|
||||
url = https://github.com/cdwilson/nesC.tmbundle
|
||||
[submodule "vendor/grammars/ats"]
|
||||
path = vendor/grammars/ats
|
||||
url = https://github.com/steinwaywhw/ats-mode-sublimetext
|
||||
[submodule "vendor/grammars/grace"]
|
||||
path = vendor/grammars/grace
|
||||
url = https://github.com/zmthy/grace-tmbundle
|
||||
[submodule "vendor/grammars/ejs-tmbundle"]
|
||||
path = vendor/grammars/ejs-tmbundle
|
||||
url = https://github.com/gregory-m/ejs-tmbundle
|
||||
[submodule "vendor/grammars/nix"]
|
||||
path = vendor/grammars/nix
|
||||
url = https://github.com/wmertens/sublime-nix
|
||||
[submodule "vendor/grammars/idris"]
|
||||
path = vendor/grammars/idris
|
||||
url = https://github.com/idris-hackers/idris-sublime.git
|
||||
[submodule "vendor/grammars/atomic-dreams"]
|
||||
path = vendor/grammars/atomic-dreams
|
||||
url = https://github.com/PJB3005/atomic-dreams
|
||||
[submodule "vendor/grammars/language-apl"]
|
||||
path = vendor/grammars/language-apl
|
||||
url = https://github.com/Alhadis/language-apl.git
|
||||
[submodule "vendor/grammars/language-graphql"]
|
||||
path = vendor/grammars/language-graphql
|
||||
url = https://github.com/rmosolgo/language-graphql
|
||||
[submodule "vendor/grammars/language-toc-wow"]
|
||||
path = vendor/grammars/language-toc-wow
|
||||
url = https://github.com/nebularg/language-toc-wow
|
||||
[submodule "vendor/grammars/sublime-autoit"]
|
||||
path = vendor/grammars/sublime-autoit
|
||||
url = https://github.com/AutoIt/SublimeAutoItScript
|
||||
[submodule "vendor/grammars/TLA"]
|
||||
path = vendor/grammars/TLA
|
||||
url = https://github.com/agentultra/TLAGrammar
|
||||
[submodule "vendor/grammars/sublime-clips"]
|
||||
path = vendor/grammars/sublime-clips
|
||||
url = https://github.com/psicomante/CLIPS-sublime
|
||||
[submodule "vendor/grammars/creole"]
|
||||
path = vendor/grammars/creole
|
||||
url = https://github.com/Siddley/Creole
|
||||
[submodule "vendor/grammars/language-csound"]
|
||||
path = vendor/grammars/language-csound
|
||||
url = https://github.com/nwhetsell/language-csound
|
||||
[submodule "vendor/grammars/language-wavefront"]
|
||||
path = vendor/grammars/language-wavefront
|
||||
url = https://github.com/Alhadis/language-wavefront
|
||||
[submodule "vendor/grammars/nu.tmbundle"]
|
||||
path = vendor/grammars/nu.tmbundle
|
||||
url = https://github.com/jsallis/nu.tmbundle
|
||||
[submodule "vendor/grammars/Elm"]
|
||||
path = vendor/grammars/Elm
|
||||
url = https://github.com/elm-community/Elm.tmLanguage
|
||||
[submodule "vendor/grammars/language-restructuredtext"]
|
||||
path = vendor/grammars/language-restructuredtext
|
||||
url = https://github.com/Lukasa/language-restructuredtext
|
||||
[submodule "vendor/grammars/atom-language-clean"]
|
||||
path = vendor/grammars/atom-language-clean
|
||||
url = https://github.com/timjs/atom-language-clean.git
|
||||
[submodule "vendor/grammars/language-turing"]
|
||||
path = vendor/grammars/language-turing
|
||||
url = https://github.com/Alhadis/language-turing
|
||||
[submodule "vendor/grammars/atom-language-srt"]
|
||||
path = vendor/grammars/atom-language-srt
|
||||
url = https://github.com/314eter/atom-language-srt
|
||||
[submodule "vendor/grammars/language-agc"]
|
||||
path = vendor/grammars/language-agc
|
||||
url = https://github.com/Alhadis/language-agc
|
||||
[submodule "vendor/grammars/language-blade"]
|
||||
path = vendor/grammars/language-blade
|
||||
url = https://github.com/jawee/language-blade
|
||||
[submodule "vendor/grammars/SublimeGDB"]
|
||||
path = vendor/grammars/SublimeGDB
|
||||
url = https://github.com/quarnster/SublimeGDB
|
||||
[submodule "vendor/grammars/language-roff"]
|
||||
path = vendor/grammars/language-roff
|
||||
url = https://github.com/Alhadis/language-roff
|
||||
[submodule "vendor/grammars/language-haskell"]
|
||||
path = vendor/grammars/language-haskell
|
||||
url = https://github.com/atom-haskell/language-haskell
|
||||
[submodule "vendor/grammars/language-asn1"]
|
||||
path = vendor/grammars/language-asn1
|
||||
url = https://github.com/ajLangley12/language-asn1
|
||||
[submodule "vendor/grammars/atom-language-1c-bsl"]
|
||||
path = vendor/grammars/atom-language-1c-bsl
|
||||
url = https://github.com/xDrivenDevelopment/atom-language-1c-bsl.git
|
||||
[submodule "vendor/grammars/sublime-rexx"]
|
||||
path = vendor/grammars/sublime-rexx
|
||||
url = https://github.com/mblocker/rexx-sublime
|
||||
[submodule "vendor/grammars/blitzmax"]
|
||||
path = vendor/grammars/blitzmax
|
||||
url = https://github.com/textmate/blitzmax.tmbundle
|
||||
[submodule "vendor/grammars/cython"]
|
||||
path = vendor/grammars/cython
|
||||
url = https://github.com/textmate/cython.tmbundle
|
||||
[submodule "vendor/grammars/forth"]
|
||||
path = vendor/grammars/forth
|
||||
url = https://github.com/textmate/forth.tmbundle
|
||||
[submodule "vendor/grammars/parrot"]
|
||||
path = vendor/grammars/parrot
|
||||
url = https://github.com/textmate/parrot.tmbundle
|
||||
[submodule "vendor/grammars/secondlife-lsl"]
|
||||
path = vendor/grammars/secondlife-lsl
|
||||
url = https://github.com/textmate/secondlife-lsl.tmbundle
|
||||
[submodule "vendor/grammars/vhdl"]
|
||||
path = vendor/grammars/vhdl
|
||||
url = https://github.com/textmate/vhdl.tmbundle
|
||||
[submodule "vendor/grammars/language-rpm-spec"]
|
||||
path = vendor/grammars/language-rpm-spec
|
||||
url = https://github.com/waveclaw/language-rpm-spec
|
||||
[submodule "vendor/grammars/language-emacs-lisp"]
|
||||
path = vendor/grammars/language-emacs-lisp
|
||||
url = https://github.com/Alhadis/language-emacs-lisp
|
||||
[submodule "vendor/grammars/language-babel"]
|
||||
path = vendor/grammars/language-babel
|
||||
url = https://github.com/github-linguist/language-babel
|
||||
[submodule "vendor/CodeMirror"]
|
||||
path = vendor/CodeMirror
|
||||
url = https://github.com/codemirror/CodeMirror
|
||||
[submodule "vendor/grammars/MQL5-sublime"]
|
||||
path = vendor/grammars/MQL5-sublime
|
||||
url = https://github.com/mqsoft/MQL5-sublime
|
||||
[submodule "vendor/grammars/actionscript3-tmbundle"]
|
||||
path = vendor/grammars/actionscript3-tmbundle
|
||||
url = https://github.com/simongregory/actionscript3-tmbundle
|
||||
[submodule "vendor/grammars/ABNF.tmbundle"]
|
||||
path = vendor/grammars/ABNF.tmbundle
|
||||
url = https://github.com/sanssecours/ABNF.tmbundle
|
||||
[submodule "vendor/grammars/EBNF.tmbundle"]
|
||||
path = vendor/grammars/EBNF.tmbundle
|
||||
url = https://github.com/sanssecours/EBNF.tmbundle
|
||||
[submodule "vendor/grammars/language-haml"]
|
||||
path = vendor/grammars/language-haml
|
||||
url = https://github.com/ezekg/language-haml
|
||||
[submodule "vendor/grammars/language-ninja"]
|
||||
path = vendor/grammars/language-ninja
|
||||
url = https://github.com/khyo/language-ninja
|
||||
[submodule "vendor/grammars/language-fontforge"]
|
||||
path = vendor/grammars/language-fontforge
|
||||
url = https://github.com/Alhadis/language-fontforge
|
||||
[submodule "vendor/grammars/language-gn"]
|
||||
path = vendor/grammars/language-gn
|
||||
url = https://github.com/devoncarew/language-gn
|
||||
[submodule "vendor/grammars/rascal-syntax-highlighting"]
|
||||
path = vendor/grammars/rascal-syntax-highlighting
|
||||
url = https://github.com/usethesource/rascal-syntax-highlighting
|
||||
[submodule "vendor/grammars/atom-language-perl6"]
|
||||
path = vendor/grammars/atom-language-perl6
|
||||
url = https://github.com/perl6/atom-language-perl6
|
||||
[submodule "vendor/grammars/reason"]
|
||||
path = vendor/grammars/reason
|
||||
url = https://github.com/facebook/reason
|
||||
[submodule "vendor/grammars/language-xcompose"]
|
||||
path = vendor/grammars/language-xcompose
|
||||
url = https://github.com/samcv/language-xcompose
|
||||
[submodule "vendor/grammars/SublimeEthereum"]
|
||||
path = vendor/grammars/SublimeEthereum
|
||||
url = https://github.com/davidhq/SublimeEthereum.git
|
||||
[submodule "vendor/grammars/atom-language-rust"]
|
||||
path = vendor/grammars/atom-language-rust
|
||||
url = https://github.com/zargony/atom-language-rust
|
||||
[submodule "vendor/grammars/language-css"]
|
||||
path = vendor/grammars/language-css
|
||||
url = https://github.com/atom/language-css
|
||||
[submodule "vendor/grammars/language-regexp"]
|
||||
path = vendor/grammars/language-regexp
|
||||
url = https://github.com/Alhadis/language-regexp
|
||||
[submodule "vendor/grammars/Terraform.tmLanguage"]
|
||||
path = vendor/grammars/Terraform.tmLanguage
|
||||
url = https://github.com/alexlouden/Terraform.tmLanguage
|
||||
[submodule "vendor/grammars/shaders-tmLanguage"]
|
||||
path = vendor/grammars/shaders-tmLanguage
|
||||
url = https://github.com/tgjones/shaders-tmLanguage
|
||||
[submodule "vendor/grammars/language-meson"]
|
||||
path = vendor/grammars/language-meson
|
||||
url = https://github.com/TingPing/language-meson
|
||||
[submodule "vendor/grammars/atom-language-p4"]
|
||||
path = vendor/grammars/atom-language-p4
|
||||
url = https://github.com/TakeshiTseng/atom-language-p4
|
||||
[submodule "vendor/grammars/language-jison"]
|
||||
path = vendor/grammars/language-jison
|
||||
url = https://github.com/cdibbs/language-jison
|
||||
[submodule "vendor/grammars/openscad.tmbundle"]
|
||||
path = vendor/grammars/openscad.tmbundle
|
||||
url = https://github.com/tbuser/openscad.tmbundle
|
||||
[submodule "vendor/grammars/marko-tmbundle"]
|
||||
path = vendor/grammars/marko-tmbundle
|
||||
url = https://github.com/marko-js/marko-tmbundle
|
||||
[submodule "vendor/grammars/language-jolie"]
|
||||
path = vendor/grammars/language-jolie
|
||||
url = https://github.com/fmontesi/language-jolie
|
||||
|
||||
18
.travis.yml
18
.travis.yml
@@ -1,17 +1,33 @@
|
||||
language: ruby
|
||||
sudo: false
|
||||
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- libicu-dev
|
||||
- libicu48
|
||||
|
||||
before_install: script/travis/before_install
|
||||
|
||||
script:
|
||||
- bundle exec rake
|
||||
- script/licensed verify
|
||||
|
||||
rvm:
|
||||
- 2.0.0
|
||||
- 2.1
|
||||
- 2.2
|
||||
- 2.3.3
|
||||
- 2.4.0
|
||||
|
||||
matrix:
|
||||
allow_failures:
|
||||
- rvm: 2.4.0
|
||||
|
||||
notifications:
|
||||
disabled: true
|
||||
|
||||
git:
|
||||
submodules: false
|
||||
depth: 3
|
||||
|
||||
cache: bundler
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
# Contributing
|
||||
|
||||
[code-of-conduct]: http://todogroup.org/opencodeofconduct/#Linguist/opensource@github.com
|
||||
|
||||
Hi there! We're thrilled that you'd like to contribute to this project. Your help is essential for keeping it great. This project adheres to the [Open Code of Conduct][code-of-conduct]. By participating, you are expected to uphold this code.
|
||||
Hi there! We're thrilled that you'd like to contribute to this project. Your help is essential for keeping it great. This project adheres to the [Contributor Covenant Code of Conduct](http://contributor-covenant.org/). By participating, you are expected to uphold this code.
|
||||
|
||||
The majority of contributions won't need to touch any Ruby code at all.
|
||||
|
||||
@@ -12,15 +10,15 @@ We try only to add new extensions once they have some usage on GitHub. In most c
|
||||
|
||||
To add support for a new extension:
|
||||
|
||||
0. Add your extension to the language entry in [`languages.yml`][languages], keeping the extensions in alphabetical order.
|
||||
0. Add at least one sample for your extension to the [samples directory][samples] in the correct subdirectory.
|
||||
0. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage.
|
||||
1. Add your extension to the language entry in [`languages.yml`][languages], keeping the extensions in alphabetical order.
|
||||
1. Add at least one sample for your extension to the [samples directory][samples] in the correct subdirectory.
|
||||
1. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage.
|
||||
|
||||
In addition, if this extension is already listed in [`languages.yml`][languages] then sometimes a few more steps will need to be taken:
|
||||
|
||||
0. Make sure that example `.yourextension` files are present in the [samples directory][samples] for each language that uses `.yourextension`.
|
||||
0. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.yourextension` files. (ping @arfon or @bkeepers to help with this) to ensure we're not misclassifying files.
|
||||
0. If the Bayesian classifier does a bad job with the sample `.yourextension` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
|
||||
1. Make sure that example `.yourextension` files are present in the [samples directory][samples] for each language that uses `.yourextension`.
|
||||
1. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.yourextension` files. (ping **@bkeepers** to help with this) to ensure we're not misclassifying files.
|
||||
1. If the Bayesian classifier does a bad job with the sample `.yourextension` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
|
||||
|
||||
|
||||
## Adding a language
|
||||
@@ -29,18 +27,17 @@ We try only to add languages once they have some usage on GitHub. In most cases
|
||||
|
||||
To add support for a new language:
|
||||
|
||||
0. Add an entry for your language to [`languages.yml`][languages].
|
||||
0. Add a grammar for your language. Please only add grammars that have a license that permits redistribution.
|
||||
0. Add your grammar as a submodule: `git submodule add https://github.com/JaneSmith/MyGrammar vendor/grammars/MyGrammar`.
|
||||
0. Add your grammar to [`grammars.yml`][grammars] by running `script/convert-grammars --add vendor/grammars/MyGrammar`.
|
||||
0. Add samples for your language to the [samples directory][samples] in the correct subdirectory.
|
||||
0. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage.
|
||||
1. Add an entry for your language to [`languages.yml`][languages]. Omit the `language_id` field for now.
|
||||
1. Add a grammar for your language: `script/add-grammar https://github.com/JaneSmith/MyGrammar`. Please only add grammars that have [one of these licenses][licenses].
|
||||
1. Add samples for your language to the [samples directory][samples] in the correct subdirectory.
|
||||
1. Add a `language_id` for your language using `script/set-language-ids`. **You should only ever need to run `script/set-language-ids --update`. Anything other than this risks breaking GitHub search :cry:**
|
||||
1. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage.
|
||||
|
||||
In addition, if your new language defines an extension that's already listed in [`languages.yml`][languages] (such as `.foo`) then sometimes a few more steps will need to be taken:
|
||||
|
||||
0. Make sure that example `.foo` files are present in the [samples directory][samples] for each language that uses `.foo`.
|
||||
0. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.foo` files. (ping @arfon or @bkeepers to help with this) to ensure we're not misclassifying files.
|
||||
0. If the Bayesian classifier does a bad job with the sample `.foo` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
|
||||
1. Make sure that example `.foo` files are present in the [samples directory][samples] for each language that uses `.foo`.
|
||||
1. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.foo` files. (ping **@bkeepers** to help with this) to ensure we're not misclassifying files.
|
||||
1. If the Bayesian classifier does a bad job with the sample `.foo` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
|
||||
|
||||
Remember, the goal here is to try and avoid false positives!
|
||||
|
||||
@@ -78,28 +75,47 @@ Sometimes getting the tests running can be too much work, especially if you don'
|
||||
|
||||
Here's our current build status: [](https://travis-ci.org/github/linguist)
|
||||
|
||||
## Maintainers
|
||||
|
||||
## Releasing
|
||||
Linguist is maintained with :heart: by:
|
||||
|
||||
- **@Alhadis**
|
||||
- **@BenEddy** (GitHub staff)
|
||||
- **@Caged** (GitHub staff)
|
||||
- **@grantr** (GitHub staff)
|
||||
- **@larsbrinkhoff**
|
||||
- **@lildude** (GitHub staff)
|
||||
- **@pchaigno**
|
||||
- **@rafer** (GitHub staff)
|
||||
- **@shreyasjoshis** (GitHub staff)
|
||||
|
||||
As Linguist is a production dependency for GitHub we have a couple of workflow restrictions:
|
||||
|
||||
- Anyone with commit rights can merge Pull Requests provided that there is a :+1: from a GitHub member of staff
|
||||
- Releases are performed by GitHub staff so we can ensure GitHub.com always stays up to date with the latest release of Linguist and there are no regressions in production.
|
||||
|
||||
### Releasing
|
||||
|
||||
If you are the current maintainer of this gem:
|
||||
|
||||
0. Create a branch for the release: `git checkout -b cut-release-vxx.xx.xx`
|
||||
0. Make sure your local dependencies are up to date: `script/bootstrap`
|
||||
0. If grammar submodules have not been updated recently, update them: `git submodule update --remote && git commit -a`
|
||||
0. Ensure that samples are updated: `bundle exec rake samples`
|
||||
0. Ensure that tests are green: `bundle exec rake test`
|
||||
0. Bump gem version in `lib/linguist/version.rb`, [like this](https://github.com/github/linguist/commit/8d2ea90a5ba3b2fe6e1508b7155aa4632eea2985).
|
||||
0. Make a PR to github/linguist, [like this](https://github.com/github/linguist/pull/1238).
|
||||
0. Build a local gem: `bundle exec rake build_gem`
|
||||
0. Test the gem:
|
||||
0. Bump the Gemfile and Gemfile.lock versions for an app which relies on this gem
|
||||
0. Install the new gem locally
|
||||
0. Test behavior locally, branch deploy, whatever needs to happen
|
||||
0. Merge github/linguist PR
|
||||
0. Tag and push: `git tag vx.xx.xx; git push --tags`
|
||||
0. Push to rubygems.org -- `gem push github-linguist-3.0.0.gem`
|
||||
1. Create a branch for the release: `git checkout -b cut-release-vxx.xx.xx`
|
||||
1. Make sure your local dependencies are up to date: `script/bootstrap`
|
||||
1. If grammar submodules have not been updated recently, update them: `git submodule update --remote && git commit -a`
|
||||
1. Ensure that samples are updated: `bundle exec rake samples`
|
||||
1. Ensure that tests are green: `bundle exec rake test`
|
||||
1. Bump gem version in `lib/linguist/version.rb`, [like this](https://github.com/github/linguist/commit/8d2ea90a5ba3b2fe6e1508b7155aa4632eea2985).
|
||||
1. Make a PR to github/linguist, [like this](https://github.com/github/linguist/pull/1238).
|
||||
1. Build a local gem: `bundle exec rake build_gem`
|
||||
1. Test the gem:
|
||||
1. Bump the Gemfile and Gemfile.lock versions for an app which relies on this gem
|
||||
1. Install the new gem locally
|
||||
1. Test behavior locally, branch deploy, whatever needs to happen
|
||||
1. Merge github/linguist PR
|
||||
1. Tag and push: `git tag vx.xx.xx; git push --tags`
|
||||
1. Push to rubygems.org -- `gem push github-linguist-3.0.0.gem`
|
||||
|
||||
[grammars]: /grammars.yml
|
||||
[languages]: /lib/linguist/languages.yml
|
||||
[licenses]: https://github.com/github/linguist/blob/257425141d4e2a5232786bf0b13c901ada075f93/vendor/licenses/config.yml#L2-L11
|
||||
[samples]: /samples
|
||||
[new-issue]: https://github.com/github/linguist/issues/new
|
||||
|
||||
1
Gemfile
1
Gemfile
@@ -1,4 +1,3 @@
|
||||
source 'https://rubygems.org'
|
||||
gemspec :name => "github-linguist"
|
||||
gemspec :name => "github-linguist-grammars"
|
||||
gem 'byebug' if RUBY_VERSION >= '2.0'
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,4 +1,4 @@
|
||||
Copyright (c) 2011-2016 GitHub, Inc.
|
||||
Copyright (c) 2017 GitHub, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
|
||||
43
README.md
43
README.md
@@ -15,10 +15,16 @@ See [Troubleshooting](#troubleshooting) and [`CONTRIBUTING.md`](/CONTRIBUTING.md
|
||||
|
||||
The Language stats bar displays languages percentages for the files in the repository. The percentages are calculated based on the bytes of code for each language as reported by the [List Languages](https://developer.github.com/v3/repos/#list-languages) API. If the bar is reporting a language that you don't expect:
|
||||
|
||||
0. Click on the name of the language in the stats bar to see a list of the files that are identified as that language.
|
||||
0. If you see files that you didn't write, consider moving the files into one of the [paths for vendored code](/lib/linguist/vendor.yml), or use the [manual overrides](#overrides) feature to ignore them.
|
||||
0. If the files are being misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you can add, especially links to public repositories, is helpful.
|
||||
0. If there are no reported issues of this misclassification, [open an issue][new-issue] and include a link to the repository or a sample of the code that is being misclassified.
|
||||
1. Click on the name of the language in the stats bar to see a list of the files that are identified as that language.
|
||||
1. If you see files that you didn't write, consider moving the files into one of the [paths for vendored code](/lib/linguist/vendor.yml), or use the [manual overrides](#overrides) feature to ignore them.
|
||||
1. If the files are being misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you can add, especially links to public repositories, is helpful.
|
||||
1. If there are no reported issues of this misclassification, [open an issue][new-issue] and include a link to the repository or a sample of the code that is being misclassified.
|
||||
|
||||
### There's a problem with the syntax highlighting of a file
|
||||
|
||||
Linguist detects the language of a file but the actual syntax-highlighting is powered by a set of language grammars which are included in this project as a set of submodules [and may be found here](https://github.com/github/linguist/blob/master/vendor/README.md).
|
||||
|
||||
If you experience an issue with the syntax-highlighting on GitHub, **please report the issue to the upstream grammar repository, not here.** Grammars are updated every time we build the Linguist gem and so upstream bug fixes are automatically incorporated as they are fixed.
|
||||
|
||||
## Overrides
|
||||
|
||||
@@ -26,13 +32,15 @@ Linguist supports a number of different custom overrides strategies for language
|
||||
|
||||
### Using gitattributes
|
||||
|
||||
Add a `.gitattributes` file to your project and use standard git-style path matchers for the files you want to override to set `linguist-documentation`, `linguist-language`, and `linguist-vendored`. `.gitattributes` will be used to determine language statistics, but will not be used to syntax highlight files. To manually set syntax highlighting, use [Vim or Emacs modelines](#using-emacs-or-vim-modelines).
|
||||
Add a `.gitattributes` file to your project and use standard git-style path matchers for the files you want to override to set `linguist-documentation`, `linguist-language`, `linguist-vendored`, and `linguist-generated`. `.gitattributes` will be used to determine language statistics and will be used to syntax highlight files. You can also manually set syntax highlighting using [Vim or Emacs modelines](#using-emacs-or-vim-modelines).
|
||||
|
||||
```
|
||||
$ cat .gitattributes
|
||||
*.rb linguist-language=Java
|
||||
```
|
||||
|
||||
#### Vendored code
|
||||
|
||||
Checking code you didn't write, such as JavaScript libraries, into your git repo is a common practice, but this often inflates your project's language stats and may even cause your project to be labeled as another language. By default, Linguist treats all of the paths defined in [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository.
|
||||
|
||||
Use the `linguist-vendored` attribute to vendor or un-vendor paths.
|
||||
@@ -43,7 +51,9 @@ special-vendored-path/* linguist-vendored
|
||||
jquery.js linguist-vendored=false
|
||||
```
|
||||
|
||||
Similar to vendored files, Linguist excludes documentation files from your project's language stats. (Unlike vendored files, documentation files are displayed in diffs on github.com.) [lib/linguist/documentation.yml](lib/linguist/documentation.yml) lists common documentation paths and excludes them from the language statistics for your repository.
|
||||
#### Documentation
|
||||
|
||||
Just like vendored files, Linguist excludes documentation files from your project's language stats. [lib/linguist/documentation.yml](lib/linguist/documentation.yml) lists common documentation paths and excludes them from the language statistics for your repository.
|
||||
|
||||
Use the `linguist-documentation` attribute to mark or unmark paths as documentation.
|
||||
|
||||
@@ -53,12 +63,24 @@ project-docs/* linguist-documentation
|
||||
docs/formatter.rb linguist-documentation=false
|
||||
```
|
||||
|
||||
#### Generated code
|
||||
|
||||
Not all plain text files are true source files. Generated files like minified js and compiled CoffeeScript can be detected and excluded from language stats. As an added bonus, unlike vendored and documentation files, these files are suppressed in diffs.
|
||||
|
||||
```
|
||||
$ cat .gitattributes
|
||||
Api.elm linguist-generated=true
|
||||
```
|
||||
|
||||
### Using Emacs or Vim modelines
|
||||
|
||||
Alternatively, you can use Vim or Emacs style modelines to set the language for a single file. Modelines can be placed anywhere within a file and are respected when determining how to syntax-highlight a file on GitHub.com
|
||||
If you do not want to use `.gitattributes` to override the syntax highlighting used on GitHub.com, you can use Vim or Emacs style modelines to set the language for a single file. Modelines can be placed anywhere within a file and are respected when determining how to syntax-highlight a file on GitHub.com
|
||||
|
||||
##### Vim
|
||||
```
|
||||
# Some examples of various styles:
|
||||
vim: syntax=java
|
||||
vim: set syntax=ruby:
|
||||
vim: set filetype=prolog:
|
||||
vim: set ft=cpp:
|
||||
```
|
||||
@@ -111,4 +133,9 @@ lib/linguist.rb
|
||||
|
||||
Please check out our [contributing guidelines](CONTRIBUTING.md).
|
||||
|
||||
##
|
||||
## License
|
||||
|
||||
The language grammars included in this gem are covered by their repositories'
|
||||
respective licenses. `grammars.yml` specifies the repository for each grammar.
|
||||
|
||||
All other files are covered by the MIT license, see `LICENSE`.
|
||||
|
||||
9
Rakefile
9
Rakefile
@@ -4,6 +4,7 @@ require 'rake/testtask'
|
||||
require 'yaml'
|
||||
require 'yajl'
|
||||
require 'open-uri'
|
||||
require 'json'
|
||||
|
||||
task :default => :test
|
||||
|
||||
@@ -40,18 +41,14 @@ task :samples do
|
||||
end
|
||||
|
||||
task :build_gem => :samples do
|
||||
rm_rf "grammars"
|
||||
sh "script/convert-grammars"
|
||||
languages = YAML.load_file("lib/linguist/languages.yml")
|
||||
File.write("lib/linguist/languages.json", Yajl.dump(languages))
|
||||
`gem build github-linguist.gemspec`
|
||||
File.delete("lib/linguist/languages.json")
|
||||
end
|
||||
|
||||
task :build_grammars_gem do
|
||||
rm_rf "grammars"
|
||||
sh "script/convert-grammars"
|
||||
sh "gem", "build", "github-linguist-grammars.gemspec"
|
||||
end
|
||||
|
||||
namespace :benchmark do
|
||||
benchmark_path = "benchmark/results"
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
#!/usr/bin/env ruby
|
||||
|
||||
$LOAD_PATH[0, 0] = File.join(File.dirname(__FILE__), '..', 'lib')
|
||||
|
||||
require 'linguist'
|
||||
require 'rugged'
|
||||
require 'optparse'
|
||||
@@ -23,7 +25,7 @@ class GitLinguist
|
||||
if @incremental && stats = load_language_stats
|
||||
old_commit_oid, old_stats = stats
|
||||
|
||||
# A cache with NULL oid means that we want to froze
|
||||
# A cache with NULL oid means that we want to freeze
|
||||
# these language stats in place and stop computing
|
||||
# them (for performance reasons)
|
||||
return old_stats if old_commit_oid == NULL_OID
|
||||
@@ -102,16 +104,22 @@ def git_linguist(args)
|
||||
commit = nil
|
||||
|
||||
parser = OptionParser.new do |opts|
|
||||
opts.banner = "Usage: git-linguist [OPTIONS] stats|breakdown|dump-cache|clear|disable"
|
||||
opts.banner = <<-HELP
|
||||
Linguist v#{Linguist::VERSION}
|
||||
Detect language type and determine language breakdown for a given Git repository.
|
||||
|
||||
Usage:
|
||||
git-linguist [OPTIONS] stats|breakdown|dump-cache|clear|disable"
|
||||
HELP
|
||||
|
||||
opts.on("-f", "--force", "Force a full rescan") { incremental = false }
|
||||
opts.on("--commit=COMMIT", "Commit to index") { |v| commit = v}
|
||||
opts.on("-c", "--commit=COMMIT", "Commit to index") { |v| commit = v}
|
||||
end
|
||||
|
||||
parser.parse!(args)
|
||||
|
||||
git_dir = `git rev-parse --git-dir`.strip
|
||||
raise "git-linguist must be ran in a Git repository" unless $?.success?
|
||||
raise "git-linguist must be run in a Git repository (#{Dir.pwd})" unless $?.success?
|
||||
wrapper = GitLinguist.new(git_dir, commit, incremental)
|
||||
|
||||
case args.pop
|
||||
|
||||
35
bin/linguist
35
bin/linguist
@@ -1,29 +1,37 @@
|
||||
#!/usr/bin/env ruby
|
||||
|
||||
# linguist — detect language type for a file, or, given a directory, determine language breakdown
|
||||
# usage: linguist <path> [<--breakdown>]
|
||||
#
|
||||
$LOAD_PATH[0, 0] = File.join(File.dirname(__FILE__), '..', 'lib')
|
||||
|
||||
require 'linguist'
|
||||
require 'rugged'
|
||||
require 'json'
|
||||
require 'optparse'
|
||||
|
||||
path = ARGV[0] || Dir.pwd
|
||||
|
||||
# special case if not given a directory but still given the --breakdown option
|
||||
# special case if not given a directory
|
||||
# but still given the --breakdown or --json options/
|
||||
if path == "--breakdown"
|
||||
path = Dir.pwd
|
||||
breakdown = true
|
||||
elsif path == "--json"
|
||||
path = Dir.pwd
|
||||
json_breakdown = true
|
||||
end
|
||||
|
||||
ARGV.shift
|
||||
breakdown = true if ARGV[0] == "--breakdown"
|
||||
json_breakdown = true if ARGV[0] == "--json"
|
||||
|
||||
if File.directory?(path)
|
||||
rugged = Rugged::Repository.new(path)
|
||||
repo = Linguist::Repository.new(rugged, rugged.head.target_id)
|
||||
repo.languages.sort_by { |_, size| size }.reverse.each do |language, size|
|
||||
percentage = ((size / repo.size.to_f) * 100)
|
||||
percentage = sprintf '%.2f' % percentage
|
||||
puts "%-7s %s" % ["#{percentage}%", language]
|
||||
if !json_breakdown
|
||||
repo.languages.sort_by { |_, size| size }.reverse.each do |language, size|
|
||||
percentage = ((size / repo.size.to_f) * 100)
|
||||
percentage = sprintf '%.2f' % percentage
|
||||
puts "%-7s %s" % ["#{percentage}%", language]
|
||||
end
|
||||
end
|
||||
if breakdown
|
||||
puts
|
||||
@@ -35,6 +43,8 @@ if File.directory?(path)
|
||||
end
|
||||
puts
|
||||
end
|
||||
elsif json_breakdown
|
||||
puts JSON.dump(repo.breakdown_by_file)
|
||||
end
|
||||
elsif File.file?(path)
|
||||
blob = Linguist::FileBlob.new(path, Dir.pwd)
|
||||
@@ -63,5 +73,12 @@ elsif File.file?(path)
|
||||
puts " appears to be a vendored file"
|
||||
end
|
||||
else
|
||||
abort "usage: linguist <path>"
|
||||
abort <<-HELP
|
||||
Linguist v#{Linguist::VERSION}
|
||||
Detect language type for a file, or, given a repository, determine language breakdown.
|
||||
|
||||
Usage: linguist <path>
|
||||
linguist <path> [--breakdown] [--json]
|
||||
linguist [--breakdown] [--json]
|
||||
HELP
|
||||
end
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
require File.expand_path('../lib/linguist/version', __FILE__)
|
||||
|
||||
Gem::Specification.new do |s|
|
||||
s.name = 'github-linguist-grammars'
|
||||
s.version = Linguist::VERSION
|
||||
s.summary = "Language grammars for use with github-linguist"
|
||||
|
||||
s.authors = "GitHub"
|
||||
s.homepage = "https://github.com/github/linguist"
|
||||
|
||||
s.files = ['lib/linguist/grammars.rb'] + Dir['grammars/*']
|
||||
|
||||
s.add_development_dependency 'plist', '~>3.1'
|
||||
end
|
||||
@@ -10,20 +10,21 @@ Gem::Specification.new do |s|
|
||||
s.homepage = "https://github.com/github/linguist"
|
||||
s.license = "MIT"
|
||||
|
||||
s.files = Dir['lib/**/*'] - ['lib/linguist/grammars.rb'] + ['LICENSE']
|
||||
s.files = Dir['lib/**/*'] + Dir['grammars/*'] + ['LICENSE']
|
||||
s.executables = ['linguist', 'git-linguist']
|
||||
|
||||
s.add_dependency 'charlock_holmes', '~> 0.7.3'
|
||||
s.add_dependency 'escape_utils', '~> 1.1.0'
|
||||
s.add_dependency 'mime-types', '>= 1.19'
|
||||
s.add_dependency 'rugged', '>= 0.23.0b'
|
||||
s.add_dependency 'rugged', '>= 0.25.1'
|
||||
|
||||
s.add_development_dependency 'minitest', '>= 5.0'
|
||||
s.add_development_dependency 'mocha'
|
||||
s.add_development_dependency 'plist', '~>3.1'
|
||||
s.add_development_dependency 'pry'
|
||||
s.add_development_dependency 'rake'
|
||||
s.add_development_dependency 'yajl-ruby'
|
||||
s.add_development_dependency 'color-proximity', '~> 0.2.1'
|
||||
s.add_development_dependency 'licensee', '6.0.0b1'
|
||||
|
||||
s.add_development_dependency 'licensed'
|
||||
s.add_development_dependency 'licensee', '~> 8.8.0'
|
||||
end
|
||||
|
||||
344
grammars.yml
Normal file → Executable file
344
grammars.yml
Normal file → Executable file
@@ -1,67 +1,49 @@
|
||||
---
|
||||
http://svn.edgewall.org/repos/genshi/contrib/textmate/Genshi.tmbundle/Syntaxes/Markup%20Template%20%28XML%29.tmLanguage:
|
||||
- text.xml.genshi
|
||||
http://svn.textmate.org/trunk/Review/Bundles/BlitzMax.tmbundle:
|
||||
- source.blitzmax
|
||||
http://svn.textmate.org/trunk/Review/Bundles/Cython.tmbundle:
|
||||
- source.cython
|
||||
http://svn.textmate.org/trunk/Review/Bundles/Forth.tmbundle:
|
||||
- source.forth
|
||||
http://svn.textmate.org/trunk/Review/Bundles/Parrot.tmbundle:
|
||||
- source.parrot.pir
|
||||
http://svn.textmate.org/trunk/Review/Bundles/SecondLife%20LSL.tmbundle:
|
||||
- source.lsl
|
||||
http://svn.textmate.org/trunk/Review/Bundles/VHDL.tmbundle:
|
||||
- source.vhdl
|
||||
http://svn.textmate.org/trunk/Review/Bundles/XQuery.tmbundle:
|
||||
- source.xquery
|
||||
https://bitbucket.org/Clams/sublimesystemverilog/get/default.tar.gz:
|
||||
- source.systemverilog
|
||||
- source.ucfconstraints
|
||||
https://fan.googlecode.com/hg-history/Build%201.0.55/adm/tools/textmate/Fan.tmbundle/Syntaxes/Fan.tmLanguage:
|
||||
- source.fan
|
||||
https://svn.edgewall.org/repos/genshi/contrib/textmate/Genshi.tmbundle/Syntaxes/Markup%20Template%20%28XML%29.tmLanguage:
|
||||
- text.xml.genshi
|
||||
vendor/grammars/ABNF.tmbundle:
|
||||
- source.abnf
|
||||
vendor/grammars/Agda.tmbundle:
|
||||
- source.agda
|
||||
vendor/grammars/Alloy.tmbundle:
|
||||
- source.alloy
|
||||
vendor/grammars/AutoHotkey/:
|
||||
vendor/grammars/AutoHotkey:
|
||||
- source.ahk
|
||||
vendor/grammars/BrightScript.tmbundle/:
|
||||
vendor/grammars/BrightScript.tmbundle:
|
||||
- source.brightauthorproject
|
||||
- source.brightscript
|
||||
vendor/grammars/CLIPS-sublime:
|
||||
- source.clips
|
||||
vendor/grammars/ColdFusion:
|
||||
- source.cfscript
|
||||
- source.cfscript.cfc
|
||||
- text.cfml.basic
|
||||
- text.html.cfm
|
||||
vendor/grammars/Creole:
|
||||
- text.html.creole
|
||||
vendor/grammars/Docker.tmbundle:
|
||||
- source.dockerfile
|
||||
vendor/grammars/Elm.tmLanguage:
|
||||
vendor/grammars/EBNF.tmbundle:
|
||||
- source.ebnf
|
||||
vendor/grammars/Elm/Syntaxes:
|
||||
- source.elm
|
||||
- text.html.mediawiki.elm-build-output
|
||||
- text.html.mediawiki.elm-documentation
|
||||
vendor/grammars/FreeMarker.tmbundle:
|
||||
- text.html.ftl
|
||||
vendor/grammars/G-Code/:
|
||||
vendor/grammars/G-Code:
|
||||
- source.LS
|
||||
- source.MCPOST
|
||||
- source.MOD
|
||||
- source.apt
|
||||
- source.gcode
|
||||
vendor/grammars/GDScript-sublime/:
|
||||
- source.gdscript
|
||||
vendor/grammars/Handlebars:
|
||||
- text.html.handlebars
|
||||
vendor/grammars/IDL-Syntax:
|
||||
- source.webidl
|
||||
vendor/grammars/InnoSetup/:
|
||||
- source.inno
|
||||
vendor/grammars/Isabelle.tmbundle:
|
||||
- source.isabelle.root
|
||||
- source.isabelle.theory
|
||||
vendor/grammars/JSyntax/:
|
||||
vendor/grammars/JSyntax:
|
||||
- source.j
|
||||
vendor/grammars/Julia.tmbundle:
|
||||
- source.julia
|
||||
@@ -69,10 +51,14 @@ vendor/grammars/Lean.tmbundle:
|
||||
- source.lean
|
||||
vendor/grammars/LiveScript.tmbundle:
|
||||
- source.livescript
|
||||
vendor/grammars/MQL5-sublime:
|
||||
- source.mql5
|
||||
vendor/grammars/MagicPython:
|
||||
- source.python
|
||||
- source.regexp.python
|
||||
vendor/grammars/Modelica/:
|
||||
- text.python.console
|
||||
- text.python.traceback
|
||||
vendor/grammars/Modelica:
|
||||
- source.modelica
|
||||
vendor/grammars/NSIS:
|
||||
- source.nsis
|
||||
@@ -82,7 +68,7 @@ vendor/grammars/NimLime:
|
||||
- source.nimcfg
|
||||
vendor/grammars/PHP-Twig.tmbundle:
|
||||
- text.html.twig
|
||||
vendor/grammars/PogoScript.tmbundle/:
|
||||
vendor/grammars/PogoScript.tmbundle:
|
||||
- source.pogoscript
|
||||
vendor/grammars/RDoc.tmbundle:
|
||||
- text.rdoc
|
||||
@@ -97,10 +83,10 @@ vendor/grammars/Scalate.tmbundle:
|
||||
- text.html.ssp
|
||||
vendor/grammars/Slash.tmbundle:
|
||||
- text.html.slash
|
||||
vendor/grammars/Stata.tmbundle/:
|
||||
vendor/grammars/Stata.tmbundle:
|
||||
- source.mata
|
||||
- source.stata
|
||||
vendor/grammars/Stylus/:
|
||||
vendor/grammars/Stylus:
|
||||
- source.stylus
|
||||
vendor/grammars/Sublime-Coq:
|
||||
- source.coq
|
||||
@@ -108,11 +94,9 @@ vendor/grammars/Sublime-HTTP:
|
||||
- source.httpspec
|
||||
vendor/grammars/Sublime-Lasso:
|
||||
- file.lasso
|
||||
vendor/grammars/Sublime-Logos:
|
||||
- source.logos
|
||||
vendor/grammars/Sublime-Loom:
|
||||
- source.loomscript
|
||||
vendor/grammars/Sublime-Modula-2/:
|
||||
vendor/grammars/Sublime-Modula-2:
|
||||
- source.modula2
|
||||
vendor/grammars/Sublime-Nit:
|
||||
- source.nit
|
||||
@@ -127,20 +111,29 @@ vendor/grammars/Sublime-SQF-Language:
|
||||
vendor/grammars/Sublime-Text-2-OpenEdge-ABL:
|
||||
- source.abl
|
||||
- text.html.abl
|
||||
vendor/grammars/Sublime-VimL:
|
||||
- source.viml
|
||||
vendor/grammars/SublimeBrainfuck:
|
||||
- source.bf
|
||||
vendor/grammars/SublimeClarion/:
|
||||
vendor/grammars/SublimeClarion:
|
||||
- source.clarion
|
||||
vendor/grammars/SublimePapyrus/:
|
||||
- source.compiled-papyrus
|
||||
- source.papyrus
|
||||
- source.papyrus-assembly
|
||||
vendor/grammars/SublimeEthereum:
|
||||
- source.solidity
|
||||
vendor/grammars/SublimeGDB/:
|
||||
- source.disasm
|
||||
- source.gdb
|
||||
- source.gdb.session
|
||||
- source.gdbregs
|
||||
vendor/grammars/SublimePapyrus:
|
||||
- source.papyrus.skyrim
|
||||
vendor/grammars/SublimePuppet:
|
||||
- source.puppet
|
||||
vendor/grammars/SublimeXtend:
|
||||
- source.xtend
|
||||
vendor/grammars/TXL/:
|
||||
vendor/grammars/TLA:
|
||||
- source.tla
|
||||
vendor/grammars/TXL:
|
||||
- source.txl
|
||||
vendor/grammars/Terraform.tmLanguage:
|
||||
- source.terraform
|
||||
vendor/grammars/Textmate-Gosu-Bundle:
|
||||
- source.gosu.2
|
||||
vendor/grammars/UrWeb-Language-Definition:
|
||||
@@ -153,7 +146,7 @@ vendor/grammars/X10:
|
||||
- source.x10
|
||||
vendor/grammars/abap.tmbundle:
|
||||
- source.abap
|
||||
vendor/grammars/actionscript3-tmbundle:
|
||||
vendor/grammars/actionscript3-tmbundle/:
|
||||
- source.actionscript.3
|
||||
- text.html.asdoc
|
||||
- text.xml.flex-config
|
||||
@@ -168,40 +161,60 @@ vendor/grammars/antlr.tmbundle:
|
||||
vendor/grammars/apache.tmbundle:
|
||||
- source.apache-config
|
||||
- source.apache-config.mod_perl
|
||||
vendor/grammars/api-blueprint-sublime-plugin/:
|
||||
vendor/grammars/api-blueprint-sublime-plugin:
|
||||
- text.html.markdown.source.gfm.apib
|
||||
- text.html.markdown.source.gfm.mson
|
||||
vendor/grammars/applescript.tmbundle:
|
||||
- source.applescript
|
||||
vendor/grammars/asciidoc.tmbundle/:
|
||||
vendor/grammars/asciidoc.tmbundle:
|
||||
- text.html.asciidoc
|
||||
vendor/grammars/asp.tmbundle:
|
||||
- source.asp
|
||||
- text.html.asp
|
||||
vendor/grammars/assembly.tmbundle:
|
||||
vendor/grammars/assembly:
|
||||
- objdump.x86asm
|
||||
- source.x86asm
|
||||
vendor/grammars/atom-fsharp/:
|
||||
vendor/grammars/atom-fsharp:
|
||||
- source.fsharp
|
||||
- source.fsharp.fsi
|
||||
- source.fsharp.fsl
|
||||
- source.fsharp.fsx
|
||||
vendor/grammars/atom-language-purescript/:
|
||||
vendor/grammars/atom-language-1c-bsl:
|
||||
- source.bsl
|
||||
- source.sdbl
|
||||
vendor/grammars/atom-language-clean:
|
||||
- source.clean
|
||||
- text.restructuredtext.clean
|
||||
vendor/grammars/atom-language-p4:
|
||||
- source.p4
|
||||
vendor/grammars/atom-language-perl6:
|
||||
- source.meta-info
|
||||
- source.perl6fe
|
||||
- source.quoting.perl6fe
|
||||
- source.regexp.perl6fe
|
||||
vendor/grammars/atom-language-purescript:
|
||||
- source.purescript
|
||||
vendor/grammars/atom-language-stan/:
|
||||
vendor/grammars/atom-language-rust:
|
||||
- source.rust
|
||||
vendor/grammars/atom-language-srt:
|
||||
- text.srt
|
||||
vendor/grammars/atom-language-stan:
|
||||
- source.stan
|
||||
vendor/grammars/atom-salt:
|
||||
- source.python.salt
|
||||
- source.yaml.salt
|
||||
vendor/grammars/ats.sublime:
|
||||
vendor/grammars/atomic-dreams:
|
||||
- source.dm
|
||||
- source.dmf
|
||||
vendor/grammars/ats:
|
||||
- source.ats
|
||||
vendor/grammars/autoitv3-tmbundle:
|
||||
- source.autoit.3
|
||||
vendor/grammars/awk-sublime:
|
||||
- source.awk
|
||||
vendor/grammars/bison.tmbundle:
|
||||
- source.bison
|
||||
vendor/grammars/boo-sublime:
|
||||
vendor/grammars/blitzmax:
|
||||
- source.blitzmax
|
||||
vendor/grammars/boo:
|
||||
- source.boo
|
||||
vendor/grammars/bro-sublime:
|
||||
- source.bro
|
||||
@@ -214,7 +227,6 @@ vendor/grammars/capnproto.tmbundle:
|
||||
vendor/grammars/carto-atom:
|
||||
- source.css.mss
|
||||
vendor/grammars/ceylon-sublimetext:
|
||||
- module.ceylon
|
||||
- source.ceylon
|
||||
vendor/grammars/chapel-tmbundle:
|
||||
- source.chapel
|
||||
@@ -226,11 +238,13 @@ vendor/grammars/cool-tmbundle:
|
||||
vendor/grammars/cpp-qt.tmbundle:
|
||||
- source.c++.qt
|
||||
- source.qmake
|
||||
vendor/grammars/css.tmbundle:
|
||||
- source.css
|
||||
vendor/grammars/creole:
|
||||
- text.html.creole
|
||||
vendor/grammars/cucumber-tmbundle:
|
||||
- source.ruby.rspec.cucumber.steps
|
||||
- text.gherkin.feature
|
||||
vendor/grammars/cython:
|
||||
- source.cython
|
||||
vendor/grammars/d.tmbundle:
|
||||
- source.d
|
||||
vendor/grammars/dart-sublime-bundle:
|
||||
@@ -246,12 +260,12 @@ vendor/grammars/dylan.tmbundle:
|
||||
- source.dylan
|
||||
- source.lid
|
||||
- source.makegen
|
||||
vendor/grammars/ebundles/Bundles/MSDOS batch file.tmbundle:
|
||||
- source.dosbatch
|
||||
vendor/grammars/ec.tmbundle/:
|
||||
vendor/grammars/ec.tmbundle:
|
||||
- source.c.ec
|
||||
vendor/grammars/eiffel.tmbundle:
|
||||
- source.eiffel
|
||||
vendor/grammars/ejs-tmbundle:
|
||||
- text.html.js
|
||||
vendor/grammars/elixir-tmbundle:
|
||||
- source.elixir
|
||||
- text.elixir
|
||||
@@ -266,18 +280,22 @@ vendor/grammars/fancy-tmbundle:
|
||||
- source.fancy
|
||||
vendor/grammars/fish-tmbundle:
|
||||
- source.fish
|
||||
vendor/grammars/forth:
|
||||
- source.forth
|
||||
vendor/grammars/fortran.tmbundle:
|
||||
- source.fortran
|
||||
- source.fortran.modern
|
||||
vendor/grammars/gap-tmbundle/:
|
||||
vendor/grammars/gap-tmbundle:
|
||||
- source.gap
|
||||
vendor/grammars/gdscript:
|
||||
- source.gdscript
|
||||
vendor/grammars/gettext.tmbundle:
|
||||
- source.po
|
||||
vendor/grammars/gnuplot-tmbundle:
|
||||
- source.gnuplot
|
||||
vendor/grammars/go-tmbundle:
|
||||
- source.go
|
||||
vendor/grammars/grace-tmbundle/:
|
||||
vendor/grammars/grace:
|
||||
- source.grace
|
||||
vendor/grammars/gradle.tmbundle:
|
||||
- source.groovy.gradle
|
||||
@@ -285,9 +303,6 @@ vendor/grammars/graphviz.tmbundle:
|
||||
- source.dot
|
||||
vendor/grammars/groovy.tmbundle:
|
||||
- source.groovy
|
||||
vendor/grammars/haskell.tmbundle:
|
||||
- source.haskell
|
||||
- text.tex.latex.haskell
|
||||
vendor/grammars/haxe-sublime-bundle:
|
||||
- source.erazor
|
||||
- source.haxe.2
|
||||
@@ -300,6 +315,8 @@ vendor/grammars/idl.tmbundle:
|
||||
- source.idl
|
||||
- source.idl-dlm
|
||||
- text.idl-idldoc
|
||||
vendor/grammars/idris:
|
||||
- source.idris
|
||||
vendor/grammars/ini.tmbundle:
|
||||
- source.ini
|
||||
vendor/grammars/io.tmbundle:
|
||||
@@ -324,10 +341,20 @@ vendor/grammars/json.tmbundle:
|
||||
- source.json
|
||||
vendor/grammars/kotlin-sublime-package:
|
||||
- source.Kotlin
|
||||
vendor/grammars/language-babel/:
|
||||
vendor/grammars/language-agc:
|
||||
- source.agc
|
||||
vendor/grammars/language-apl:
|
||||
- source.apl
|
||||
vendor/grammars/language-asn1:
|
||||
- source.asn
|
||||
vendor/grammars/language-babel:
|
||||
- source.js.jsx
|
||||
- source.regexp.babel
|
||||
vendor/grammars/language-click/:
|
||||
vendor/grammars/language-batchfile:
|
||||
- source.batchfile
|
||||
vendor/grammars/language-blade:
|
||||
- text.html.php.blade
|
||||
vendor/grammars/language-click:
|
||||
- source.click
|
||||
vendor/grammars/language-clojure:
|
||||
- source.clojure
|
||||
@@ -336,42 +363,109 @@ vendor/grammars/language-coffee-script:
|
||||
- source.litcoffee
|
||||
vendor/grammars/language-crystal:
|
||||
- source.crystal
|
||||
- text.html.ecr
|
||||
vendor/grammars/language-csharp:
|
||||
- source.cake
|
||||
- source.cs
|
||||
- source.csx
|
||||
- source.nant-build
|
||||
vendor/grammars/language-csound:
|
||||
- source.csound
|
||||
- source.csound-document
|
||||
- source.csound-score
|
||||
vendor/grammars/language-css:
|
||||
- source.css
|
||||
vendor/grammars/language-emacs-lisp:
|
||||
- source.emacs.lisp
|
||||
vendor/grammars/language-fontforge:
|
||||
- source.fontforge
|
||||
- source.opentype
|
||||
- text.sfd
|
||||
vendor/grammars/language-gfm:
|
||||
- source.gfm
|
||||
vendor/grammars/language-hy:
|
||||
- source.hy
|
||||
vendor/grammars/language-gn:
|
||||
- source.gn
|
||||
vendor/grammars/language-graphql:
|
||||
- source.graphql
|
||||
vendor/grammars/language-haml:
|
||||
- text.haml
|
||||
- text.hamlc
|
||||
vendor/grammars/language-haskell:
|
||||
- hint.haskell
|
||||
- hint.message.haskell
|
||||
- hint.type.haskell
|
||||
- source.c2hs
|
||||
- source.cabal
|
||||
- source.haskell
|
||||
- source.hsc2hs
|
||||
- text.tex.latex.haskell
|
||||
vendor/grammars/language-inform7:
|
||||
- source.inform7
|
||||
vendor/grammars/language-javascript:
|
||||
- source.js
|
||||
- source.js.regexp
|
||||
- source.js.regexp.replacement
|
||||
vendor/grammars/language-jsoniq/:
|
||||
- source.jsdoc
|
||||
vendor/grammars/language-jison:
|
||||
- source.jison
|
||||
- source.jisonlex
|
||||
- source.jisonlex-injection
|
||||
vendor/grammars/language-jolie:
|
||||
- source.jolie
|
||||
vendor/grammars/language-jsoniq:
|
||||
- source.jq
|
||||
- source.xq
|
||||
vendor/grammars/language-less:
|
||||
- source.css.less
|
||||
vendor/grammars/language-maxscript:
|
||||
- source.maxscript
|
||||
vendor/grammars/language-meson:
|
||||
- source.meson
|
||||
vendor/grammars/language-ncl:
|
||||
- source.ncl
|
||||
vendor/grammars/language-python:
|
||||
- text.python.console
|
||||
- text.python.traceback
|
||||
vendor/grammars/language-ninja:
|
||||
- source.ninja
|
||||
vendor/grammars/language-povray:
|
||||
- source.pov-ray sdl
|
||||
vendor/grammars/language-regexp:
|
||||
- source.regexp
|
||||
- source.regexp.extended
|
||||
vendor/grammars/language-renpy:
|
||||
- source.renpy
|
||||
vendor/grammars/language-restructuredtext:
|
||||
- text.restructuredtext
|
||||
vendor/grammars/language-roff:
|
||||
- source.ditroff
|
||||
- source.ditroff.desc
|
||||
- source.ideal
|
||||
- source.pic
|
||||
- text.roff
|
||||
- text.runoff
|
||||
vendor/grammars/language-rpm-spec:
|
||||
- source.changelogs.rpm-spec
|
||||
- source.rpm-spec
|
||||
vendor/grammars/language-shellscript:
|
||||
- source.shell
|
||||
- text.shell-session
|
||||
vendor/grammars/language-supercollider:
|
||||
- source.supercollider
|
||||
vendor/grammars/language-toc-wow:
|
||||
- source.toc
|
||||
vendor/grammars/language-turing:
|
||||
- source.turing
|
||||
vendor/grammars/language-viml:
|
||||
- source.viml
|
||||
vendor/grammars/language-wavefront:
|
||||
- source.wavefront.mtl
|
||||
- source.wavefront.obj
|
||||
vendor/grammars/language-xbase:
|
||||
- source.harbour
|
||||
vendor/grammars/language-xcompose:
|
||||
- config.xcompose
|
||||
vendor/grammars/language-yaml:
|
||||
- source.yaml
|
||||
vendor/grammars/language-yang:
|
||||
- source.yang
|
||||
vendor/grammars/latex.tmbundle:
|
||||
- text.bibtex
|
||||
- text.log.latex
|
||||
@@ -379,8 +473,6 @@ vendor/grammars/latex.tmbundle:
|
||||
- text.tex.latex
|
||||
- text.tex.latex.beamer
|
||||
- text.tex.latex.memoir
|
||||
vendor/grammars/less.tmbundle:
|
||||
- source.css.less
|
||||
vendor/grammars/lilypond.tmbundle:
|
||||
- source.lilypond
|
||||
vendor/grammars/liquid.tmbundle:
|
||||
@@ -389,6 +481,8 @@ vendor/grammars/lisp.tmbundle:
|
||||
- source.lisp
|
||||
vendor/grammars/llvm.tmbundle:
|
||||
- source.llvm
|
||||
vendor/grammars/logos:
|
||||
- source.logos
|
||||
vendor/grammars/logtalk.tmbundle:
|
||||
- source.logtalk
|
||||
vendor/grammars/lua.tmbundle:
|
||||
@@ -397,6 +491,8 @@ vendor/grammars/make.tmbundle:
|
||||
- source.makefile
|
||||
vendor/grammars/mako-tmbundle:
|
||||
- text.html.mako
|
||||
vendor/grammars/marko-tmbundle:
|
||||
- text.marko
|
||||
vendor/grammars/mathematica-tmbundle:
|
||||
- source.mathematica
|
||||
vendor/grammars/matlab.tmbundle:
|
||||
@@ -404,20 +500,22 @@ vendor/grammars/matlab.tmbundle:
|
||||
- source.octave
|
||||
vendor/grammars/maven.tmbundle:
|
||||
- text.xml.pom
|
||||
vendor/grammars/mediawiki.tmbundle/:
|
||||
vendor/grammars/mediawiki.tmbundle:
|
||||
- text.html.mediawiki
|
||||
vendor/grammars/mercury-tmlanguage:
|
||||
- source.mercury
|
||||
vendor/grammars/monkey.tmbundle:
|
||||
vendor/grammars/monkey:
|
||||
- source.monkey
|
||||
vendor/grammars/moonscript-tmbundle:
|
||||
- source.moonscript
|
||||
vendor/grammars/nemerle.tmbundle:
|
||||
- source.nemerle
|
||||
vendor/grammars/nesC.tmbundle:
|
||||
vendor/grammars/nesC:
|
||||
- source.nesc
|
||||
vendor/grammars/ninja.tmbundle:
|
||||
- source.ninja
|
||||
vendor/grammars/nix:
|
||||
- source.nix
|
||||
vendor/grammars/nu.tmbundle:
|
||||
- source.nu
|
||||
vendor/grammars/objective-c.tmbundle:
|
||||
- source.objc
|
||||
- source.objc++
|
||||
@@ -432,21 +530,25 @@ vendor/grammars/ooc.tmbundle:
|
||||
- source.ooc
|
||||
vendor/grammars/opa.tmbundle:
|
||||
- source.opa
|
||||
vendor/grammars/openscad.tmbundle/:
|
||||
vendor/grammars/openscad.tmbundle:
|
||||
- source.scad
|
||||
vendor/grammars/oracle.tmbundle:
|
||||
- source.plsql.oracle
|
||||
vendor/grammars/oz-tmbundle/Syntaxes/Oz.tmLanguage:
|
||||
- source.oz
|
||||
vendor/grammars/parrot:
|
||||
- source.parrot.pir
|
||||
vendor/grammars/pascal.tmbundle:
|
||||
- source.pascal
|
||||
vendor/grammars/perl.tmbundle/:
|
||||
vendor/grammars/pawn-sublime-language:
|
||||
- source.pawn
|
||||
vendor/grammars/perl.tmbundle:
|
||||
- source.perl
|
||||
- source.perl.6
|
||||
vendor/grammars/php-smarty.tmbundle:
|
||||
- text.html.smarty
|
||||
vendor/grammars/php.tmbundle:
|
||||
- text.html.php
|
||||
vendor/grammars/pig-latin:
|
||||
- source.pig_latin
|
||||
vendor/grammars/pike-textmate:
|
||||
- source.pike
|
||||
vendor/grammars/postscript.tmbundle:
|
||||
@@ -457,26 +559,24 @@ vendor/grammars/processing.tmbundle:
|
||||
- source.processing
|
||||
vendor/grammars/protobuf-tmbundle:
|
||||
- source.protobuf
|
||||
vendor/grammars/puppet-textmate-bundle:
|
||||
- source.puppet
|
||||
vendor/grammars/python-django.tmbundle:
|
||||
- source.python.django
|
||||
- text.html.django
|
||||
vendor/grammars/r.tmbundle:
|
||||
- source.r
|
||||
- text.tex.latex.rd
|
||||
vendor/grammars/restructuredtext.tmbundle:
|
||||
- text.restructuredtext
|
||||
vendor/grammars/ruby-haml.tmbundle:
|
||||
- text.haml
|
||||
vendor/grammars/rascal-syntax-highlighting:
|
||||
- source.rascal
|
||||
vendor/grammars/reason:
|
||||
- source.reason
|
||||
vendor/grammars/ruby-slim.tmbundle:
|
||||
- text.slim
|
||||
vendor/grammars/ruby.tmbundle:
|
||||
- source.ruby
|
||||
- text.html.erb
|
||||
vendor/grammars/sas.tmbundle:
|
||||
- source.SASLog
|
||||
- source.sas
|
||||
- source.sas_log
|
||||
vendor/grammars/sass-textmate-bundle:
|
||||
- source.sass
|
||||
vendor/grammars/scala.tmbundle:
|
||||
@@ -486,10 +586,17 @@ vendor/grammars/scheme.tmbundle:
|
||||
- source.scheme
|
||||
vendor/grammars/scilab.tmbundle:
|
||||
- source.scilab
|
||||
vendor/grammars/smali-sublime/smali.tmLanguage:
|
||||
vendor/grammars/secondlife-lsl:
|
||||
- source.lsl
|
||||
vendor/grammars/shaders-tmLanguage:
|
||||
- source.hlsl
|
||||
- source.shaderlab
|
||||
vendor/grammars/smali-sublime:
|
||||
- source.smali
|
||||
vendor/grammars/smalltalk-tmbundle:
|
||||
- source.smalltalk
|
||||
vendor/grammars/sourcepawn:
|
||||
- source.sp
|
||||
vendor/grammars/sql.tmbundle:
|
||||
- source.sql
|
||||
vendor/grammars/st2-zonefile:
|
||||
@@ -499,23 +606,23 @@ vendor/grammars/standard-ml.tmbundle:
|
||||
- source.ml
|
||||
vendor/grammars/sublime-MuPAD:
|
||||
- source.mupad
|
||||
vendor/grammars/sublime-apl/:
|
||||
- source.apl
|
||||
vendor/grammars/sublime-aspectj/:
|
||||
vendor/grammars/sublime-aspectj:
|
||||
- source.aspectj
|
||||
vendor/grammars/sublime-autoit:
|
||||
- source.autoit
|
||||
vendor/grammars/sublime-befunge:
|
||||
- source.befunge
|
||||
vendor/grammars/sublime-bsv:
|
||||
- source.bsv
|
||||
vendor/grammars/sublime-cirru:
|
||||
- source.cirru
|
||||
vendor/grammars/sublime-clips:
|
||||
- source.clips
|
||||
vendor/grammars/sublime-glsl:
|
||||
- source.essl
|
||||
- source.glsl
|
||||
vendor/grammars/sublime-golo/:
|
||||
vendor/grammars/sublime-golo:
|
||||
- source.golo
|
||||
vendor/grammars/sublime-idris:
|
||||
- source.idris
|
||||
vendor/grammars/sublime-mask:
|
||||
- source.mask
|
||||
vendor/grammars/sublime-netlinx:
|
||||
@@ -523,31 +630,29 @@ vendor/grammars/sublime-netlinx:
|
||||
- source.netlinx.erb
|
||||
vendor/grammars/sublime-nginx:
|
||||
- source.nginx
|
||||
vendor/grammars/sublime-nix:
|
||||
- source.nix
|
||||
vendor/grammars/sublime-opal/:
|
||||
vendor/grammars/sublime-opal:
|
||||
- source.opal
|
||||
- source.opalsysdefs
|
||||
vendor/grammars/sublime-pony:
|
||||
- source.pony
|
||||
vendor/grammars/sublime-rexx:
|
||||
- source.rexx
|
||||
vendor/grammars/sublime-robot-plugin:
|
||||
- text.robot
|
||||
vendor/grammars/sublime-rust:
|
||||
- source.rust
|
||||
vendor/grammars/sublime-sourcepawn:
|
||||
- source.sp
|
||||
vendor/grammars/sublime-spintools/:
|
||||
vendor/grammars/sublime-spintools:
|
||||
- source.regexp.spin
|
||||
- source.spin
|
||||
vendor/grammars/sublime-tea:
|
||||
- source.tea
|
||||
vendor/grammars/sublime-text-ox/:
|
||||
vendor/grammars/sublime-terra:
|
||||
- source.terra
|
||||
vendor/grammars/sublime-text-ox:
|
||||
- source.ox
|
||||
vendor/grammars/sublime-text-pig-latin/:
|
||||
- source.pig_latin
|
||||
vendor/grammars/sublime-typescript/:
|
||||
vendor/grammars/sublime-typescript:
|
||||
- source.ts
|
||||
- source.tsx
|
||||
- text.error-list
|
||||
- text.find-refs
|
||||
vendor/grammars/sublime-varnish:
|
||||
- source.varnish.vcl
|
||||
vendor/grammars/sublime_cobol:
|
||||
@@ -555,10 +660,9 @@ vendor/grammars/sublime_cobol:
|
||||
- source.cobol
|
||||
- source.jcl
|
||||
- source.opencobol
|
||||
vendor/grammars/sublime_man_page_support:
|
||||
- source.man
|
||||
- text.groff
|
||||
vendor/grammars/sublimeprolog/:
|
||||
vendor/grammars/sublimeassembly:
|
||||
- source.assembly
|
||||
vendor/grammars/sublimeprolog:
|
||||
- source.prolog
|
||||
- source.prolog.eclipse
|
||||
vendor/grammars/sublimetext-cuda-cpp:
|
||||
@@ -577,11 +681,11 @@ vendor/grammars/turtle.tmbundle:
|
||||
- source.turtle
|
||||
vendor/grammars/verilog.tmbundle:
|
||||
- source.verilog
|
||||
vendor/grammars/vhdl:
|
||||
- source.vhdl
|
||||
vendor/grammars/vue-syntax-highlight:
|
||||
- text.html.vue
|
||||
vendor/grammars/x86-assembly-textmate-bundle:
|
||||
- source.asm.x86
|
||||
vendor/grammars/xc.tmbundle/:
|
||||
vendor/grammars/xc.tmbundle:
|
||||
- source.xc
|
||||
vendor/grammars/xml.tmbundle:
|
||||
- text.xml
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
require 'linguist/blob_helper'
|
||||
require 'linguist/generated'
|
||||
require 'linguist/grammars'
|
||||
require 'linguist/heuristics'
|
||||
require 'linguist/language'
|
||||
require 'linguist/repository'
|
||||
@@ -8,8 +9,86 @@ require 'linguist/shebang'
|
||||
require 'linguist/version'
|
||||
|
||||
class << Linguist
|
||||
# Public: Detects the Language of the blob.
|
||||
#
|
||||
# blob - an object that includes the Linguist `BlobHelper` interface;
|
||||
# see Linguist::LazyBlob and Linguist::FileBlob for examples
|
||||
#
|
||||
# Returns Language or nil.
|
||||
def detect(blob, allow_empty: false)
|
||||
# Bail early if the blob is binary or empty.
|
||||
return nil if blob.likely_binary? || blob.binary? || (!allow_empty && blob.empty?)
|
||||
|
||||
Linguist.instrument("linguist.detection", :blob => blob) do
|
||||
# Call each strategy until one candidate is returned.
|
||||
languages = []
|
||||
returning_strategy = nil
|
||||
|
||||
STRATEGIES.each do |strategy|
|
||||
returning_strategy = strategy
|
||||
candidates = Linguist.instrument("linguist.strategy", :blob => blob, :strategy => strategy, :candidates => languages) do
|
||||
strategy.call(blob, languages)
|
||||
end
|
||||
if candidates.size == 1
|
||||
languages = candidates
|
||||
break
|
||||
elsif candidates.size > 1
|
||||
# More than one candidate was found, pass them to the next strategy.
|
||||
languages = candidates
|
||||
else
|
||||
# No candidates, try the next strategy
|
||||
end
|
||||
end
|
||||
|
||||
Linguist.instrument("linguist.detected", :blob => blob, :strategy => returning_strategy, :language => languages.first)
|
||||
|
||||
languages.first
|
||||
end
|
||||
end
|
||||
|
||||
# Internal: The strategies used to detect the language of a file.
|
||||
#
|
||||
# A strategy is an object that has a `.call` method that takes two arguments:
|
||||
#
|
||||
# blob - An object that quacks like a blob.
|
||||
# languages - An Array of candidate Language objects that were returned by the
|
||||
# previous strategy.
|
||||
#
|
||||
# A strategy should return an Array of Language candidates.
|
||||
#
|
||||
# Strategies are called in turn until a single Language is returned.
|
||||
STRATEGIES = [
|
||||
Linguist::Strategy::Modeline,
|
||||
Linguist::Strategy::Filename,
|
||||
Linguist::Shebang,
|
||||
Linguist::Strategy::Extension,
|
||||
Linguist::Heuristics,
|
||||
Linguist::Classifier
|
||||
]
|
||||
|
||||
# Public: Set an instrumenter.
|
||||
#
|
||||
# class CustomInstrumenter
|
||||
# def instrument(name, payload = {})
|
||||
# warn "Instrumenting #{name}: #{payload[:blob]}"
|
||||
# end
|
||||
# end
|
||||
#
|
||||
# Linguist.instrumenter = CustomInstrumenter.new
|
||||
#
|
||||
# The instrumenter must conform to the `ActiveSupport::Notifications`
|
||||
# interface, which defines `#instrument` and accepts:
|
||||
#
|
||||
# name - the String name of the event (e.g. "linguist.detected")
|
||||
# payload - a Hash of the exception context.
|
||||
attr_accessor :instrumenter
|
||||
|
||||
# Internal: Perform instrumentation on a block
|
||||
#
|
||||
# Linguist.instrument("linguist.dosomething", :blob => blob) do
|
||||
# # logic to instrument here.
|
||||
# end
|
||||
#
|
||||
def instrument(*args, &bk)
|
||||
if instrumenter
|
||||
instrumenter.instrument(*args, &bk)
|
||||
@@ -17,4 +96,5 @@ class << Linguist
|
||||
yield
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
@@ -63,7 +63,7 @@ module Linguist
|
||||
#
|
||||
# Returns an Array
|
||||
def extensions
|
||||
basename, *segments = name.downcase.split(".")
|
||||
_, *segments = name.downcase.split(".", -1)
|
||||
|
||||
segments.map.with_index do |segment, index|
|
||||
"." + segments[index..-1].join(".")
|
||||
|
||||
@@ -6,7 +6,7 @@ require 'yaml'
|
||||
|
||||
module Linguist
|
||||
# DEPRECATED Avoid mixing into Blob classes. Prefer functional interfaces
|
||||
# like `Language.detect` over `Blob#language`. Functions are much easier to
|
||||
# like `Linguist.detect` over `Blob#language`. Functions are much easier to
|
||||
# cache and compose.
|
||||
#
|
||||
# Avoid adding additional bloat to this module.
|
||||
@@ -325,7 +325,7 @@ module Linguist
|
||||
#
|
||||
# Returns a Language or nil if none is detected
|
||||
def language
|
||||
@language ||= Language.detect(self)
|
||||
@language ||= Linguist.detect(self)
|
||||
end
|
||||
|
||||
# Internal: Get the TextMate compatible scope for the blob
|
||||
|
||||
@@ -95,7 +95,7 @@ module Linguist
|
||||
# Returns sorted Array of result pairs. Each pair contains the
|
||||
# String language name and a Float score.
|
||||
def classify(tokens, languages)
|
||||
return [] if tokens.nil?
|
||||
return [] if tokens.nil? || languages.empty?
|
||||
tokens = Tokenizer.tokenize(tokens) if tokens.is_a?(String)
|
||||
scores = {}
|
||||
|
||||
|
||||
@@ -9,11 +9,12 @@
|
||||
|
||||
## Documentation directories ##
|
||||
|
||||
- ^docs?/
|
||||
- ^[Dd]ocs?/
|
||||
- (^|/)[Dd]ocumentation/
|
||||
- (^|/)javadoc/
|
||||
- ^man/
|
||||
- (^|/)[Jj]avadoc/
|
||||
- ^[Mm]an/
|
||||
- ^[Ee]xamples/
|
||||
- ^[Dd]emos?/
|
||||
|
||||
## Documentation files ##
|
||||
|
||||
@@ -27,4 +28,4 @@
|
||||
- (^|/)[Rr]eadme(\.|$)
|
||||
|
||||
# Samples folders
|
||||
- ^[Ss]amples/
|
||||
- ^[Ss]amples?/
|
||||
|
||||
@@ -3,7 +3,7 @@ module Linguist
|
||||
# Public: Is the blob a generated file?
|
||||
#
|
||||
# name - String filename
|
||||
# data - String blob data. A block also maybe passed in for lazy
|
||||
# data - String blob data. A block also may be passed in for lazy
|
||||
# loading. This behavior is deprecated and you should always
|
||||
# pass in a String.
|
||||
#
|
||||
@@ -56,9 +56,12 @@ module Linguist
|
||||
generated_net_specflow_feature_file? ||
|
||||
composer_lock? ||
|
||||
node_modules? ||
|
||||
go_vendor? ||
|
||||
npm_shrinkwrap? ||
|
||||
godeps? ||
|
||||
generated_by_zephir? ||
|
||||
minified_files? ||
|
||||
has_source_map? ||
|
||||
source_map? ||
|
||||
compiled_coffeescript? ||
|
||||
generated_parser? ||
|
||||
@@ -67,12 +70,18 @@ module Linguist
|
||||
compiled_cython_file? ||
|
||||
generated_go? ||
|
||||
generated_protocol_buffer? ||
|
||||
generated_javascript_protocol_buffer? ||
|
||||
generated_apache_thrift? ||
|
||||
generated_jni_header? ||
|
||||
vcr_cassette? ||
|
||||
generated_module? ||
|
||||
generated_unity3d_meta? ||
|
||||
generated_racc?
|
||||
generated_racc? ||
|
||||
generated_jflex? ||
|
||||
generated_grammarkit? ||
|
||||
generated_roxygen2? ||
|
||||
generated_jison? ||
|
||||
generated_yarn_lock?
|
||||
end
|
||||
|
||||
# Internal: Is the blob an Xcode file?
|
||||
@@ -102,6 +111,21 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
# Internal: Does the blob contain a source map reference?
|
||||
#
|
||||
# We assume that if one of the last 2 lines starts with a source map
|
||||
# reference, then the current file was generated from other files.
|
||||
#
|
||||
# We use the last 2 lines because the last line might be empty.
|
||||
#
|
||||
# We only handle JavaScript, no CSS support yet.
|
||||
#
|
||||
# Returns true or false.
|
||||
def has_source_map?
|
||||
return false unless extname.downcase == '.js'
|
||||
lines.last(2).any? { |line| line.start_with?('//# sourceMappingURL') }
|
||||
end
|
||||
|
||||
# Internal: Is the blob a generated source map?
|
||||
#
|
||||
# Source Maps usually have .css.map or .js.map extensions. In case they
|
||||
@@ -255,16 +279,25 @@ module Linguist
|
||||
return lines[0].include?("Generated by the protocol buffer compiler. DO NOT EDIT!")
|
||||
end
|
||||
|
||||
APACHE_THRIFT_EXTENSIONS = ['.rb', '.py', '.go', '.js', '.m', '.java', '.h', '.cc', '.cpp']
|
||||
# Internal: Is the blob a Javascript source file generated by the
|
||||
# Protocol Buffer compiler?
|
||||
#
|
||||
# Returns true of false.
|
||||
def generated_javascript_protocol_buffer?
|
||||
return false unless extname == ".js"
|
||||
return false unless lines.count > 6
|
||||
|
||||
return lines[5].include?("GENERATED CODE -- DO NOT EDIT!")
|
||||
end
|
||||
|
||||
APACHE_THRIFT_EXTENSIONS = ['.rb', '.py', '.go', '.js', '.m', '.java', '.h', '.cc', '.cpp', '.php']
|
||||
|
||||
# Internal: Is the blob generated by Apache Thrift compiler?
|
||||
#
|
||||
# Returns true or false
|
||||
def generated_apache_thrift?
|
||||
return false unless APACHE_THRIFT_EXTENSIONS.include?(extname)
|
||||
return false unless lines.count > 1
|
||||
|
||||
return lines[0].include?("Autogenerated by Thrift Compiler") || lines[1].include?("Autogenerated by Thrift Compiler")
|
||||
return lines.first(6).any? { |l| l.include?("Autogenerated by Thrift Compiler") }
|
||||
end
|
||||
|
||||
# Internal: Is the blob a C/C++ header generated by the Java JNI tool javah?
|
||||
@@ -285,6 +318,21 @@ module Linguist
|
||||
!!name.match(/node_modules\//)
|
||||
end
|
||||
|
||||
# Internal: Is the blob part of the Go vendor/ tree,
|
||||
# not meant for humans in pull requests.
|
||||
#
|
||||
# Returns true or false.
|
||||
def go_vendor?
|
||||
!!name.match(/vendor\/((?!-)[-0-9A-Za-z]+(?<!-)\.)+(com|edu|gov|in|me|net|org|fm|io)/)
|
||||
end
|
||||
|
||||
# Internal: Is the blob a generated npm shrinkwrap file?
|
||||
#
|
||||
# Returns true or false.
|
||||
def npm_shrinkwrap?
|
||||
!!name.match(/npm-shrinkwrap\.json/)
|
||||
end
|
||||
|
||||
# Internal: Is the blob part of Godeps/,
|
||||
# which are not meant for humans in pull requests.
|
||||
#
|
||||
@@ -300,7 +348,7 @@ module Linguist
|
||||
!!name.match(/composer\.lock/)
|
||||
end
|
||||
|
||||
# Internal: Is the blob a generated by Zephir
|
||||
# Internal: Is the blob generated by Zephir?
|
||||
#
|
||||
# Returns true or false.
|
||||
def generated_by_zephir?
|
||||
@@ -337,14 +385,14 @@ module Linguist
|
||||
# on the first line.
|
||||
#
|
||||
# GFortran module files contain:
|
||||
# GFORTRAN module version 'x' created from
|
||||
# GFORTRAN module version 'x' created from
|
||||
# on the first line.
|
||||
#
|
||||
# Return true of false
|
||||
def generated_module?
|
||||
return false unless extname == '.mod'
|
||||
return false unless lines.count > 1
|
||||
return lines[0].include?("PCBNEW-LibModule-V") ||
|
||||
return lines[0].include?("PCBNEW-LibModule-V") ||
|
||||
lines[0].include?("GFORTRAN module version '")
|
||||
end
|
||||
|
||||
@@ -373,5 +421,73 @@ module Linguist
|
||||
return false unless lines.count > 2
|
||||
return lines[2].start_with?("# This file is automatically generated by Racc")
|
||||
end
|
||||
|
||||
# Internal: Is this a JFlex-generated file?
|
||||
#
|
||||
# A JFlex-generated file contains:
|
||||
# /* The following code was generated by JFlex x.y.z on d/at/e ti:me */
|
||||
# on the first line.
|
||||
#
|
||||
# Return true or false
|
||||
def generated_jflex?
|
||||
return false unless extname == '.java'
|
||||
return false unless lines.count > 1
|
||||
return lines[0].start_with?("/* The following code was generated by JFlex ")
|
||||
end
|
||||
|
||||
# Internal: Is this a GrammarKit-generated file?
|
||||
#
|
||||
# A GrammarKit-generated file typically contain:
|
||||
# // This is a generated file. Not intended for manual editing.
|
||||
# on the first line. This is not always the case, as it's possible to
|
||||
# customize the class header.
|
||||
#
|
||||
# Return true or false
|
||||
def generated_grammarkit?
|
||||
return false unless extname == '.java'
|
||||
return false unless lines.count > 1
|
||||
return lines[0].start_with?("// This is a generated file. Not intended for manual editing.")
|
||||
end
|
||||
|
||||
# Internal: Is this a roxygen2-generated file?
|
||||
#
|
||||
# A roxygen2-generated file typically contain:
|
||||
# % Generated by roxygen2: do not edit by hand
|
||||
# on the first line.
|
||||
#
|
||||
# Return true or false
|
||||
def generated_roxygen2?
|
||||
return false unless extname == '.Rd'
|
||||
return false unless lines.count > 1
|
||||
|
||||
return lines[0].include?("% Generated by roxygen2: do not edit by hand")
|
||||
end
|
||||
|
||||
# Internal: Is this a Jison-generated file?
|
||||
#
|
||||
# Jison-generated parsers typically contain:
|
||||
# /* parser generated by jison
|
||||
# on the first line.
|
||||
#
|
||||
# Jison-generated lexers typically contain:
|
||||
# /* generated by jison-lex
|
||||
# on the first line.
|
||||
#
|
||||
# Return true or false
|
||||
def generated_jison?
|
||||
return false unless extname == '.js'
|
||||
return false unless lines.count > 1
|
||||
return lines[0].start_with?("/* parser generated by jison ") ||
|
||||
lines[0].start_with?("/* generated by jison-lex ")
|
||||
end
|
||||
|
||||
# Internal: Is the blob a generated yarn lockfile?
|
||||
#
|
||||
# Returns true or false.
|
||||
def generated_yarn_lock?
|
||||
return false unless name.match(/yarn\.lock/)
|
||||
return false unless lines.count > 0
|
||||
return lines[0].include?("# THIS IS AN AUTOGENERATED FILE")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
# Note: This file is included in the github-linguist-grammars gem, not the
|
||||
# github-linguist gem.
|
||||
|
||||
module Linguist
|
||||
module Grammars
|
||||
# Get the path to the directory containing the language grammar JSON files.
|
||||
|
||||
@@ -86,6 +86,14 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".builds" do |data|
|
||||
if /^(\s*)(<Project|<Import|<Property|<?xml|xmlns)/i.match(data)
|
||||
Language["XML"]
|
||||
else
|
||||
Language["Text"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ch" do |data|
|
||||
if /^\s*#\s*(if|ifdef|ifndef|define|command|xcommand|translate|xtranslate|include|pragma|undef)\b/i.match(data)
|
||||
Language["xBase"]
|
||||
@@ -102,6 +110,12 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".cls" do |data|
|
||||
if /\\\w+{/.match(data)
|
||||
Language["TeX"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".cs" do |data|
|
||||
if /![\w\s]+methodsFor: /.match(data)
|
||||
Language["Smalltalk"]
|
||||
@@ -111,11 +125,18 @@ module Linguist
|
||||
end
|
||||
|
||||
disambiguate ".d" do |data|
|
||||
if /^module /.match(data)
|
||||
# see http://dlang.org/spec/grammar
|
||||
# ModuleDeclaration | ImportDeclaration | FuncDeclaration | unittest
|
||||
if /^module\s+[\w.]*\s*;|import\s+[\w\s,.:]*;|\w+\s+\w+\s*\(.*\)(?:\(.*\))?\s*{[^}]*}|unittest\s*(?:\(.*\))?\s*{[^}]*}/.match(data)
|
||||
Language["D"]
|
||||
elsif /^((dtrace:::)?BEGIN|provider |#pragma (D (option|attributes)|ident)\s)/.match(data)
|
||||
# see http://dtrace.org/guide/chp-prog.html, http://dtrace.org/guide/chp-profile.html, http://dtrace.org/guide/chp-opt.html
|
||||
elsif /^(\w+:\w*:\w*:\w*|BEGIN|END|provider\s+|(tick|profile)-\w+\s+{[^}]*}|#pragma\s+D\s+(option|attributes|depends_on)\s|#pragma\s+ident\s)/.match(data)
|
||||
Language["DTrace"]
|
||||
elsif /(\/.*:( .* \\)$| : \\$|^ : |: \\$)/.match(data)
|
||||
# path/target : dependency \
|
||||
# target : \
|
||||
# : dependency
|
||||
# path/file.ext1 : some/path/../file.ext2
|
||||
elsif /([\/\\].*:\s+.*\s\\$|: \\$|^ : |^[\w\s\/\\.]+\w+\.\w+\s*:\s+[\w\s\/\\.]+\w+\.\w+)/.match(data)
|
||||
Language["Makefile"]
|
||||
end
|
||||
end
|
||||
@@ -127,12 +148,32 @@ module Linguist
|
||||
Language["ECL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".es" do |data|
|
||||
if /^\s*(?:%%|main\s*\(.*?\)\s*->)/.match(data)
|
||||
Language["Erlang"]
|
||||
elsif /(?:\/\/|("|')use strict\1|export\s+default\s|\/\*.*?\*\/)/m.match(data)
|
||||
Language["JavaScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".for", ".f" do |data|
|
||||
fortran_rx = /^([c*][^abd-z]| (subroutine|program|end|data)\s|\s*!)/i
|
||||
|
||||
disambiguate ".f" do |data|
|
||||
if /^: /.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^([c*][^abd-z]| (subroutine|program|end)\s|\s*!)/i.match(data)
|
||||
Language["FORTRAN"]
|
||||
elsif data.include?("flowop")
|
||||
Language["Filebench WML"]
|
||||
elsif fortran_rx.match(data)
|
||||
Language["Fortran"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".for" do |data|
|
||||
if /^: /.match(data)
|
||||
Language["Forth"]
|
||||
elsif fortran_rx.match(data)
|
||||
Language["Fortran"]
|
||||
end
|
||||
end
|
||||
|
||||
@@ -171,13 +212,21 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".inc" do |data|
|
||||
if /^<\?(?:php)?/.match(data)
|
||||
Language["PHP"]
|
||||
elsif /^\s*#(declare|local|macro|while)\s/.match(data)
|
||||
Language["POV-Ray SDL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".l" do |data|
|
||||
if /\(def(un|macro)\s/.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^(%[%{}]xs|<.*>)/.match(data)
|
||||
Language["Lex"]
|
||||
elsif /^\.[a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
Language["Roff"]
|
||||
elsif /^\((de|class|rel|code|data|must)\s/.match(data)
|
||||
Language["PicoLisp"]
|
||||
end
|
||||
@@ -208,7 +257,7 @@ module Linguist
|
||||
Language["MUF"]
|
||||
elsif /^\s*;/.match(data)
|
||||
Language["M"]
|
||||
elsif /^\s*\(\*/.match(data)
|
||||
elsif /\*\)$/.match(data)
|
||||
Language["Mathematica"]
|
||||
elsif /^\s*%/.match(data)
|
||||
Language["Matlab"]
|
||||
@@ -217,6 +266,16 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".md" do |data|
|
||||
if /(^[-a-z0-9=#!\*\[|>])|<\//i.match(data) || data.empty?
|
||||
Language["Markdown"]
|
||||
elsif /^(;;|\(define_)/.match(data)
|
||||
Language["GCC Machine Description"]
|
||||
else
|
||||
Language["Markdown"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ml" do |data|
|
||||
if /(^\s*module)|let rec |match\s+(\S+\s)+with/.match(data)
|
||||
Language["OCaml"]
|
||||
@@ -228,7 +287,7 @@ module Linguist
|
||||
disambiguate ".mod" do |data|
|
||||
if data.include?('<!ENTITY ')
|
||||
Language["XML"]
|
||||
elsif /MODULE\s\w+\s*;/i.match(data) || /^\s*END \w+;$/i.match(data)
|
||||
elsif /^\s*MODULE [\w\.]+;/i.match(data) || /^\s*END [\w\.]+;/i.match(data)
|
||||
Language["Modula-2"]
|
||||
else
|
||||
[Language["Linux Kernel Module"], Language["AMPL"]]
|
||||
@@ -237,9 +296,9 @@ module Linguist
|
||||
|
||||
disambiguate ".ms" do |data|
|
||||
if /^[.'][a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
Language["Roff"]
|
||||
elsif /(?<!\S)\.(include|globa?l)\s/.match(data) || /(?<!\/\*)(\A|\n)\s*\.[A-Za-z]/.match(data.gsub(/"([^\\"]|\\.)*"|'([^\\']|\\.)*'|\\\s*(?:--.*)?\n/, ""))
|
||||
Language["GAS"]
|
||||
Language["Unix Assembly"]
|
||||
else
|
||||
Language["MAXScript"]
|
||||
end
|
||||
@@ -247,7 +306,7 @@ module Linguist
|
||||
|
||||
disambiguate ".n" do |data|
|
||||
if /^[.']/.match(data)
|
||||
Language["Groff"]
|
||||
Language["Roff"]
|
||||
elsif /^(module|namespace|using)\s/.match(data)
|
||||
Language["Nemerle"]
|
||||
end
|
||||
@@ -276,7 +335,7 @@ module Linguist
|
||||
end
|
||||
|
||||
disambiguate ".pl" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
if /^[^#]*:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif /use strict|use\s+v?5\./.match(data)
|
||||
Language["Perl"]
|
||||
@@ -285,11 +344,19 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pm", ".t" do |data|
|
||||
if /use strict|use\s+v?5\./.match(data)
|
||||
Language["Perl"]
|
||||
elsif /^(use v6|(my )?class|module)/.match(data)
|
||||
disambiguate ".pm" do |data|
|
||||
if /^\s*(?:use\s+v6\s*;|(?:\bmy\s+)?class|module)\b/.match(data)
|
||||
Language["Perl6"]
|
||||
elsif /\buse\s+(?:strict\b|v?5\.)/.match(data)
|
||||
Language["Perl"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pod" do |data|
|
||||
if /^=\w+\b/.match(data)
|
||||
Language["Pod"]
|
||||
else
|
||||
Language["Perl"]
|
||||
end
|
||||
end
|
||||
|
||||
@@ -305,14 +372,38 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".props" do |data|
|
||||
if /^(\s*)(<Project|<Import|<Property|<?xml|xmlns)/i.match(data)
|
||||
Language["XML"]
|
||||
elsif /\w+\s*=\s*/i.match(data)
|
||||
Language["INI"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".r" do |data|
|
||||
if /\bRebol\b/i.match(data)
|
||||
Language["Rebol"]
|
||||
elsif data.include?("<-")
|
||||
elsif /<-|^\s*#/.match(data)
|
||||
Language["R"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".rno" do |data|
|
||||
if /^\.!|^\.end lit(?:eral)?\b/i.match(data)
|
||||
Language["RUNOFF"]
|
||||
elsif /^\.\\" /.match(data)
|
||||
Language["Roff"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".rpy" do |data|
|
||||
if /(^(import|from|class|def)\s)/m.match(data)
|
||||
Language["Python"]
|
||||
else
|
||||
Language["Ren'Py"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".rs" do |data|
|
||||
if /^(use |fn |mod |pub |macro_rules|impl|#!?\[)/.match(data)
|
||||
Language["Rust"]
|
||||
@@ -330,13 +421,13 @@ module Linguist
|
||||
end
|
||||
|
||||
disambiguate ".sql" do |data|
|
||||
if /^\\i\b|AS \$\$|LANGUAGE '+plpgsql'+/i.match(data) || /SECURITY (DEFINER|INVOKER)/i.match(data) || /BEGIN( WORK| TRANSACTION)?;/i.match(data)
|
||||
if /^\\i\b|AS \$\$|LANGUAGE '?plpgsql'?/i.match(data) || /SECURITY (DEFINER|INVOKER)/i.match(data) || /BEGIN( WORK| TRANSACTION)?;/i.match(data)
|
||||
#Postgres
|
||||
Language["PLpgSQL"]
|
||||
elsif /(alter module)|(language sql)|(begin( NOT)+ atomic)/i.match(data) || /signal SQLSTATE '[0-9]+'/i.match(data)
|
||||
#IBM db2
|
||||
Language["SQLPL"]
|
||||
elsif /pragma|\$\$PLSQL_|XMLTYPE|sysdate|systimestamp|\.nextval|connect by|AUTHID (DEFINER|CURRENT_USER)/i.match(data) || /constructor\W+function/i.match(data)
|
||||
elsif /\$\$PLSQL_|XMLTYPE|sysdate|systimestamp|\.nextval|connect by|AUTHID (DEFINER|CURRENT_USER)/i.match(data) || /constructor\W+function/i.match(data)
|
||||
#Oracle
|
||||
Language["PLSQL"]
|
||||
elsif ! /begin|boolean|package|exception/i.match(data)
|
||||
@@ -344,9 +435,33 @@ module Linguist
|
||||
Language["SQL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".srt" do |data|
|
||||
if /^(\d{2}:\d{2}:\d{2},\d{3})\s*(-->)\s*(\d{2}:\d{2}:\d{2},\d{3})$/.match(data)
|
||||
Language["SubRip Text"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".t" do |data|
|
||||
if /^\s*%[ \t]+|^\s*var\s+\w+\s*:=\s*\w+/.match(data)
|
||||
Language["Turing"]
|
||||
elsif /^\s*(?:use\s+v6\s*;|\bmodule\b|\b(?:my\s+)?class\b)/.match(data)
|
||||
Language["Perl6"]
|
||||
elsif /\buse\s+(?:strict\b|v?5\.)/.match(data)
|
||||
Language["Perl"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".toc" do |data|
|
||||
if /^## |@no-lib-strip@/.match(data)
|
||||
Language["World of Warcraft Addon Data"]
|
||||
elsif /^\\(contentsline|defcounter|beamer|boolfalse)/.match(data)
|
||||
Language["TeX"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ts" do |data|
|
||||
if data.include?("<TS ")
|
||||
if data.include?("<TS")
|
||||
Language["XML"]
|
||||
else
|
||||
Language["TypeScript"]
|
||||
@@ -361,5 +476,13 @@ module Linguist
|
||||
Language["Scilab"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".tsx" do |data|
|
||||
if /^\s*(import.+(from\s+|require\()['"]react|\/\/\/\s*<reference\s)/.match(data)
|
||||
Language["TypeScript"]
|
||||
elsif /^\s*<\?xml\s+version/i.match(data)
|
||||
Language["XML"]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -11,6 +11,7 @@ require 'linguist/samples'
|
||||
require 'linguist/file_blob'
|
||||
require 'linguist/blob_helper'
|
||||
require 'linguist/strategy/filename'
|
||||
require 'linguist/strategy/extension'
|
||||
require 'linguist/strategy/modeline'
|
||||
require 'linguist/shebang'
|
||||
|
||||
@@ -20,10 +21,11 @@ module Linguist
|
||||
#
|
||||
# Languages are defined in `lib/linguist/languages.yml`.
|
||||
class Language
|
||||
@languages = []
|
||||
@index = {}
|
||||
@name_index = {}
|
||||
@alias_index = {}
|
||||
@languages = []
|
||||
@index = {}
|
||||
@name_index = {}
|
||||
@alias_index = {}
|
||||
@language_id_index = {}
|
||||
|
||||
@extension_index = Hash.new { |h,k| h[k] = [] }
|
||||
@interpreter_index = Hash.new { |h,k| h[k] = [] }
|
||||
@@ -84,54 +86,11 @@ module Linguist
|
||||
@filename_index[filename] << language
|
||||
end
|
||||
|
||||
@language_id_index[language.language_id] = language
|
||||
|
||||
language
|
||||
end
|
||||
|
||||
STRATEGIES = [
|
||||
Linguist::Strategy::Modeline,
|
||||
Linguist::Shebang,
|
||||
Linguist::Strategy::Filename,
|
||||
Linguist::Heuristics,
|
||||
Linguist::Classifier
|
||||
]
|
||||
|
||||
# Public: Detects the Language of the blob.
|
||||
#
|
||||
# blob - an object that includes the Linguist `BlobHelper` interface;
|
||||
# see Linguist::LazyBlob and Linguist::FileBlob for examples
|
||||
#
|
||||
# Returns Language or nil.
|
||||
def self.detect(blob)
|
||||
# Bail early if the blob is binary or empty.
|
||||
return nil if blob.likely_binary? || blob.binary? || blob.empty?
|
||||
|
||||
Linguist.instrument("linguist.detection", :blob => blob) do
|
||||
# Call each strategy until one candidate is returned.
|
||||
languages = []
|
||||
returning_strategy = nil
|
||||
|
||||
STRATEGIES.each do |strategy|
|
||||
returning_strategy = strategy
|
||||
candidates = Linguist.instrument("linguist.strategy", :blob => blob, :strategy => strategy, :candidates => languages) do
|
||||
strategy.call(blob, languages)
|
||||
end
|
||||
if candidates.size == 1
|
||||
languages = candidates
|
||||
break
|
||||
elsif candidates.size > 1
|
||||
# More than one candidate was found, pass them to the next strategy.
|
||||
languages = candidates
|
||||
else
|
||||
# No candidates, try the next strategy
|
||||
end
|
||||
end
|
||||
|
||||
Linguist.instrument("linguist.detected", :blob => blob, :strategy => returning_strategy, :language => languages.first)
|
||||
|
||||
languages.first
|
||||
end
|
||||
end
|
||||
|
||||
# Public: Get all Languages
|
||||
#
|
||||
# Returns an Array of Languages
|
||||
@@ -171,46 +130,46 @@ module Linguist
|
||||
|
||||
# Public: Look up Languages by filename.
|
||||
#
|
||||
# The behaviour of this method recently changed.
|
||||
# See the second example below.
|
||||
#
|
||||
# filename - The path String.
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# Language.find_by_filename('Cakefile')
|
||||
# # => [#<Language name="CoffeeScript">]
|
||||
# Language.find_by_filename('foo.rb')
|
||||
# # => [#<Language name="Ruby">]
|
||||
# # => []
|
||||
#
|
||||
# Returns all matching Languages or [] if none were found.
|
||||
def self.find_by_filename(filename)
|
||||
basename = File.basename(filename)
|
||||
|
||||
# find the first extension with language definitions
|
||||
extname = FileBlob.new(filename).extensions.detect do |e|
|
||||
!@extension_index[e].empty?
|
||||
end
|
||||
|
||||
(@filename_index[basename] + @extension_index[extname]).compact.uniq
|
||||
@filename_index[basename]
|
||||
end
|
||||
|
||||
# Public: Look up Languages by file extension.
|
||||
#
|
||||
# extname - The extension String.
|
||||
# The behaviour of this method recently changed.
|
||||
# See the second example below.
|
||||
#
|
||||
# filename - The path String.
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# Language.find_by_extension('.rb')
|
||||
# Language.find_by_extension('dummy.rb')
|
||||
# # => [#<Language name="Ruby">]
|
||||
#
|
||||
# Language.find_by_extension('rb')
|
||||
# # => [#<Language name="Ruby">]
|
||||
# # => []
|
||||
#
|
||||
# Returns all matching Languages or [] if none were found.
|
||||
def self.find_by_extension(extname)
|
||||
extname = ".#{extname}" unless extname.start_with?(".")
|
||||
@extension_index[extname.downcase]
|
||||
end
|
||||
def self.find_by_extension(filename)
|
||||
# find the first extension with language definitions
|
||||
extname = FileBlob.new(filename.downcase).extensions.detect do |e|
|
||||
!@extension_index[e].empty?
|
||||
end
|
||||
|
||||
# DEPRECATED
|
||||
def self.find_by_shebang(data)
|
||||
@interpreter_index[Shebang.interpreter(data)]
|
||||
@extension_index[extname]
|
||||
end
|
||||
|
||||
# Public: Look up Languages by interpreter.
|
||||
@@ -227,6 +186,19 @@ module Linguist
|
||||
@interpreter_index[interpreter]
|
||||
end
|
||||
|
||||
# Public: Look up Languages by its language_id.
|
||||
#
|
||||
# language_id - Integer of language_id
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# Language.find_by_id(100)
|
||||
# # => [#<Language name="Elixir">]
|
||||
#
|
||||
# Returns the matching Language
|
||||
def self.find_by_id(language_id)
|
||||
@language_id_index[language_id.to_i]
|
||||
end
|
||||
|
||||
# Public: Look up Language by its name.
|
||||
#
|
||||
@@ -243,7 +215,14 @@ module Linguist
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.[](name)
|
||||
return nil if name.to_s.empty?
|
||||
name && (@index[name.downcase] || @index[name.split(',').first.downcase])
|
||||
|
||||
lang = @index[name.downcase]
|
||||
return lang if lang
|
||||
|
||||
name = name.split(',').first
|
||||
return nil if name.to_s.empty?
|
||||
|
||||
@index[name.downcase]
|
||||
end
|
||||
|
||||
# Public: A List of popular languages
|
||||
@@ -277,17 +256,6 @@ module Linguist
|
||||
@colors ||= all.select(&:color).sort_by { |lang| lang.name.downcase }
|
||||
end
|
||||
|
||||
# Public: A List of languages compatible with Ace.
|
||||
#
|
||||
# TODO: Remove this method in a 5.x release. Every language now needs an ace_mode
|
||||
# key, so this function isn't doing anything unique anymore.
|
||||
#
|
||||
# Returns an Array of Languages.
|
||||
def self.ace_modes
|
||||
warn "This method will be deprecated in a future 5.x release. Every language now has an `ace_mode` set."
|
||||
@ace_modes ||= all.select(&:ace_mode).sort_by { |lang| lang.name.downcase }
|
||||
end
|
||||
|
||||
# Internal: Initialize a new Language
|
||||
#
|
||||
# attributes - A hash of attributes
|
||||
@@ -304,7 +272,7 @@ module Linguist
|
||||
@color = attributes[:color]
|
||||
|
||||
# Set aliases
|
||||
@aliases = [default_alias_name] + (attributes[:aliases] || [])
|
||||
@aliases = [default_alias] + (attributes[:aliases] || [])
|
||||
|
||||
# Load the TextMate scope name or try to guess one
|
||||
@tm_scope = attributes[:tm_scope] || begin
|
||||
@@ -318,10 +286,12 @@ module Linguist
|
||||
end
|
||||
|
||||
@ace_mode = attributes[:ace_mode]
|
||||
@codemirror_mode = attributes[:codemirror_mode]
|
||||
@codemirror_mime_type = attributes[:codemirror_mime_type]
|
||||
@wrap = attributes[:wrap] || false
|
||||
|
||||
# Set legacy search term
|
||||
@search_term = attributes[:search_term] || default_alias_name
|
||||
# Set the language_id
|
||||
@language_id = attributes[:language_id]
|
||||
|
||||
# Set extensions or default to [].
|
||||
@extensions = attributes[:extensions] || []
|
||||
@@ -374,16 +344,16 @@ module Linguist
|
||||
# Returns an Array of String names
|
||||
attr_reader :aliases
|
||||
|
||||
# Deprecated: Get code search term
|
||||
# Public: Get language_id (used in GitHub search)
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# # => "ruby"
|
||||
# # => "python"
|
||||
# # => "perl"
|
||||
# # => "1"
|
||||
# # => "2"
|
||||
# # => "3"
|
||||
#
|
||||
# Returns the name String
|
||||
attr_reader :search_term
|
||||
# Returns the integer language_id
|
||||
attr_reader :language_id
|
||||
|
||||
# Public: Get the name of a TextMate-compatible scope
|
||||
#
|
||||
@@ -401,6 +371,31 @@ module Linguist
|
||||
# Returns a String name or nil
|
||||
attr_reader :ace_mode
|
||||
|
||||
# Public: Get CodeMirror mode
|
||||
#
|
||||
# Maps to a directory in the `mode/` source code.
|
||||
# https://github.com/codemirror/CodeMirror/tree/master/mode
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# # => "nil"
|
||||
# # => "javascript"
|
||||
# # => "clike"
|
||||
#
|
||||
# Returns a String name or nil
|
||||
attr_reader :codemirror_mode
|
||||
|
||||
# Public: Get CodeMirror MIME type mode
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# # => "nil"
|
||||
# # => "text/x-javascript"
|
||||
# # => "text/x-csrc"
|
||||
#
|
||||
# Returns a String name or nil
|
||||
attr_reader :codemirror_mime_type
|
||||
|
||||
# Public: Should language lines be wrapped
|
||||
#
|
||||
# Returns true or false
|
||||
@@ -433,22 +428,6 @@ module Linguist
|
||||
# Returns the extensions Array
|
||||
attr_reader :filenames
|
||||
|
||||
# Deprecated: Get primary extension
|
||||
#
|
||||
# Defaults to the first extension but can be overridden
|
||||
# in the languages.yml.
|
||||
#
|
||||
# The primary extension can not be nil. Tests should verify this.
|
||||
#
|
||||
# This method is only used by app/helpers/gists_helper.rb for creating
|
||||
# the language dropdown. It really should be using `name` instead.
|
||||
# Would like to drop primary extension.
|
||||
#
|
||||
# Returns the extension String.
|
||||
def primary_extension
|
||||
extensions.first
|
||||
end
|
||||
|
||||
# Public: Get URL escaped name.
|
||||
#
|
||||
# Examples
|
||||
@@ -462,12 +441,13 @@ module Linguist
|
||||
EscapeUtils.escape_url(name).gsub('+', '%20')
|
||||
end
|
||||
|
||||
# Internal: Get default alias name
|
||||
# Public: Get default alias name
|
||||
#
|
||||
# Returns the alias name String
|
||||
def default_alias_name
|
||||
def default_alias
|
||||
name.downcase.gsub(/\s/, '-')
|
||||
end
|
||||
alias_method :default_alias_name, :default_alias
|
||||
|
||||
# Public: Get Language group
|
||||
#
|
||||
@@ -577,10 +557,12 @@ module Linguist
|
||||
:aliases => options['aliases'],
|
||||
:tm_scope => options['tm_scope'],
|
||||
:ace_mode => options['ace_mode'],
|
||||
:codemirror_mode => options['codemirror_mode'],
|
||||
:codemirror_mime_type => options['codemirror_mime_type'],
|
||||
:wrap => options['wrap'],
|
||||
:group_name => options['group'],
|
||||
:searchable => options.fetch('searchable', true),
|
||||
:search_term => options['search_term'],
|
||||
:language_id => options['language_id'],
|
||||
:extensions => Array(options['extensions']),
|
||||
:interpreters => options['interpreters'].sort,
|
||||
:filenames => options['filenames'],
|
||||
|
||||
3991
lib/linguist/languages.yml
Normal file → Executable file
3991
lib/linguist/languages.yml
Normal file → Executable file
File diff suppressed because it is too large
Load Diff
@@ -28,6 +28,7 @@ module Linguist
|
||||
@oid = oid
|
||||
@path = path
|
||||
@mode = mode
|
||||
@data = nil
|
||||
end
|
||||
|
||||
def git_attributes
|
||||
|
||||
@@ -26,4 +26,4 @@
|
||||
- Shell
|
||||
- Swift
|
||||
- TeX
|
||||
- VimL
|
||||
- Vim script
|
||||
|
||||
@@ -30,6 +30,9 @@ module Linguist
|
||||
@repository = repo
|
||||
@commit_oid = commit_oid
|
||||
|
||||
@old_commit_oid = nil
|
||||
@old_stats = nil
|
||||
|
||||
raise TypeError, 'commit_oid must be a commit SHA1' unless commit_oid.is_a?(String)
|
||||
end
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ module Linguist
|
||||
def self.cache
|
||||
@cache ||= begin
|
||||
serializer = defined?(Yajl) ? Yajl : YAML
|
||||
serializer.load(File.read(PATH))
|
||||
serializer.load(File.read(PATH, encoding: 'utf-8'))
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
@@ -42,10 +42,10 @@ module Linguist
|
||||
return unless script
|
||||
|
||||
# "python2.6" -> "python2"
|
||||
script.sub! /(\.\d+)$/, ''
|
||||
script.sub!(/(\.\d+)$/, '')
|
||||
|
||||
# #! perl -> perl
|
||||
script.sub! /^#!\s*/, ''
|
||||
script.sub!(/^#!\s*/, '')
|
||||
|
||||
# Check for multiline shebang hacks that call `exec`
|
||||
if script == 'sh' &&
|
||||
|
||||
10
lib/linguist/strategy/extension.rb
Normal file
10
lib/linguist/strategy/extension.rb
Normal file
@@ -0,0 +1,10 @@
|
||||
module Linguist
|
||||
module Strategy
|
||||
# Detects language based on extension
|
||||
class Extension
|
||||
def self.call(blob, _)
|
||||
Language.find_by_extension(blob.name.to_s)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,9 +1,10 @@
|
||||
module Linguist
|
||||
module Strategy
|
||||
# Detects language based on filename and/or extension
|
||||
# Detects language based on filename
|
||||
class Filename
|
||||
def self.call(blob, _)
|
||||
Language.find_by_filename(blob.name.to_s)
|
||||
name = blob.name.to_s
|
||||
Language.find_by_filename(name)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,8 +1,102 @@
|
||||
module Linguist
|
||||
module Strategy
|
||||
class Modeline
|
||||
EmacsModeline = /-\*-\s*(?:(?!mode)[\w-]+\s*:\s*(?:[\w+-]+)\s*;?\s*)*(?:mode\s*:)?\s*([\w+-]+)\s*(?:;\s*(?!mode)[\w-]+\s*:\s*[\w+-]+\s*)*;?\s*-\*-/i
|
||||
VimModeline = /vim:\s*set.*\s(?:ft|filetype)=(\w+)\s?.*:/i
|
||||
EMACS_MODELINE = /
|
||||
-\*-
|
||||
(?:
|
||||
# Short form: `-*- ruby -*-`
|
||||
\s* (?= [^:;\s]+ \s* -\*-)
|
||||
|
|
||||
# Longer form: `-*- foo:bar; mode: ruby; -*-`
|
||||
(?:
|
||||
.*? # Preceding variables: `-*- foo:bar bar:baz;`
|
||||
[;\s] # Which are delimited by spaces or semicolons
|
||||
|
|
||||
(?<=-\*-) # Not preceded by anything: `-*-mode:ruby-*-`
|
||||
)
|
||||
mode # Major mode indicator
|
||||
\s*:\s* # Allow whitespace around colon: `mode : ruby`
|
||||
)
|
||||
([^:;\s]+) # Name of mode
|
||||
|
||||
# Ensure the mode is terminated correctly
|
||||
(?=
|
||||
# Followed by semicolon or whitespace
|
||||
[\s;]
|
||||
|
|
||||
# Touching the ending sequence: `ruby-*-`
|
||||
(?<![-*]) # Don't allow stuff like `ruby--*-` to match; it'll invalidate the mode
|
||||
-\*- # Emacs has no problems reading `ruby --*-`, however.
|
||||
)
|
||||
.*? # Anything between a cleanly-terminated mode and the ending -*-
|
||||
-\*-
|
||||
/xi
|
||||
|
||||
VIM_MODELINE = /
|
||||
|
||||
# Start modeline. Could be `vim:`, `vi:` or `ex:`
|
||||
(?:
|
||||
(?:\s|^)
|
||||
vi
|
||||
(?:m[<=>]?\d+|m)? # Version-specific modeline
|
||||
|
|
||||
[\t\x20] # `ex:` requires whitespace, because "ex:" might be short for "example:"
|
||||
ex
|
||||
)
|
||||
|
||||
# If the option-list begins with `set ` or `se `, it indicates an alternative
|
||||
# modeline syntax partly-compatible with older versions of Vi. Here, the colon
|
||||
# serves as a terminator for an option sequence, delimited by whitespace.
|
||||
(?=
|
||||
# So we have to ensure the modeline ends with a colon
|
||||
: (?=\s* set? \s [^\n:]+ :) |
|
||||
|
||||
# Otherwise, it isn't valid syntax and should be ignored
|
||||
: (?!\s* set? \s)
|
||||
)
|
||||
|
||||
# Possible (unrelated) `option=value` pairs to skip past
|
||||
(?:
|
||||
# Option separator. Vim uses whitespace or colons to separate options (except if
|
||||
# the alternate "vim: set " form is used, where only whitespace is used)
|
||||
(?:
|
||||
\s
|
||||
|
|
||||
\s* : \s* # Note that whitespace around colons is accepted too:
|
||||
) # vim: noai : ft=ruby:noexpandtab
|
||||
|
||||
# Option's name. All recognised Vim options have an alphanumeric form.
|
||||
\w*
|
||||
|
||||
# Possible value. Not every option takes an argument.
|
||||
(?:
|
||||
# Whitespace between name and value is allowed: `vim: ft =ruby`
|
||||
\s*=
|
||||
|
||||
# Option's value. Might be blank; `vim: ft= ` says "use no filetype".
|
||||
(?:
|
||||
[^\\\s] # Beware of escaped characters: titlestring=\ ft=ruby
|
||||
| # will be read by Vim as { titlestring: " ft=ruby" }.
|
||||
\\.
|
||||
)*
|
||||
)?
|
||||
)*
|
||||
|
||||
# The actual filetype declaration
|
||||
[\s:] (?:filetype|ft|syntax) \s*=
|
||||
|
||||
# Language's name
|
||||
(\w+)
|
||||
|
||||
# Ensure it's followed by a legal separator
|
||||
(?=\s|:|$)
|
||||
/xi
|
||||
|
||||
MODELINES = [EMACS_MODELINE, VIM_MODELINE]
|
||||
|
||||
# Scope of the search for modelines
|
||||
# Number of lines to check at the beginning and at the end of the file
|
||||
SEARCH_SCOPE = 5
|
||||
|
||||
# Public: Detects language based on Vim and Emacs modelines
|
||||
#
|
||||
@@ -15,14 +109,16 @@ module Linguist
|
||||
# Returns an Array with one Language if the blob has a Vim or Emacs modeline
|
||||
# that matches a Language name or alias. Returns an empty array if no match.
|
||||
def self.call(blob, _ = nil)
|
||||
Array(Language.find_by_alias(modeline(blob.data)))
|
||||
header = blob.lines.first(SEARCH_SCOPE).join("\n")
|
||||
footer = blob.lines.last(SEARCH_SCOPE).join("\n")
|
||||
Array(Language.find_by_alias(modeline(header + footer)))
|
||||
end
|
||||
|
||||
# Public: Get the modeline from the first n-lines of the file
|
||||
#
|
||||
# Returns a String or nil
|
||||
def self.modeline(data)
|
||||
match = data.match(EmacsModeline) || data.match(VimModeline)
|
||||
match = MODELINES.map { |regex| data.match(regex) }.reject(&:nil?).first
|
||||
match[1] if match
|
||||
end
|
||||
end
|
||||
|
||||
@@ -15,15 +15,25 @@
|
||||
# Dependencies
|
||||
- ^[Dd]ependencies/
|
||||
|
||||
# Distributions
|
||||
- (^|/)dist/
|
||||
|
||||
# C deps
|
||||
# https://github.com/joyent/node
|
||||
- ^deps/
|
||||
- ^tools/
|
||||
- (^|/)configure$
|
||||
- (^|/)configure.ac$
|
||||
- (^|/)config.guess$
|
||||
- (^|/)config.sub$
|
||||
|
||||
# stuff autogenerated by autoconf - still C deps
|
||||
- (^|/)aclocal.m4
|
||||
- (^|/)libtool.m4
|
||||
- (^|/)ltoptions.m4
|
||||
- (^|/)ltsugar.m4
|
||||
- (^|/)ltversion.m4
|
||||
- (^|/)lt~obsolete.m4
|
||||
|
||||
# Linters
|
||||
- cpplint.py
|
||||
|
||||
@@ -40,6 +50,9 @@
|
||||
# Go dependencies
|
||||
- Godeps/_workspace/
|
||||
|
||||
# GNU indent profiles
|
||||
- .indent.pro
|
||||
|
||||
# Minified JavaScript and CSS
|
||||
- (\.|-)min\.(js|css)$
|
||||
|
||||
@@ -146,13 +159,19 @@
|
||||
- (^|/)tiny_mce([^.]*)\.js$
|
||||
- (^|/)tiny_mce/(langs|plugins|themes|utils)
|
||||
|
||||
# Ace Editor
|
||||
- (^|/)ace-builds/
|
||||
|
||||
# Fontello CSS files
|
||||
- (^|/)fontello(.*?)\.css$
|
||||
|
||||
# MathJax
|
||||
- (^|/)MathJax/
|
||||
|
||||
# Chart.js
|
||||
- (^|/)Chart\.js$
|
||||
|
||||
# Codemirror
|
||||
# CodeMirror
|
||||
- (^|/)[Cc]ode[Mm]irror/(\d+\.\d+/)?(lib|mode|theme|addon|keymap|demo)
|
||||
|
||||
# SyntaxHighlighter - http://alexgorbatchev.com/
|
||||
@@ -183,6 +202,7 @@
|
||||
|
||||
# django
|
||||
- (^|/)admin_media/
|
||||
- (^|/)env/
|
||||
|
||||
# Fabric
|
||||
- ^fabfile\.py$
|
||||
@@ -215,6 +235,15 @@
|
||||
# Fabric
|
||||
- Fabric.framework/
|
||||
|
||||
# BuddyBuild
|
||||
- BuddyBuildSDK.framework/
|
||||
|
||||
# Realm
|
||||
- Realm.framework
|
||||
|
||||
# RealmSwift
|
||||
- RealmSwift.framework
|
||||
|
||||
# git config files
|
||||
- gitattributes$
|
||||
- gitignore$
|
||||
@@ -302,3 +331,6 @@
|
||||
|
||||
# Android Google APIs
|
||||
- (^|/)\.google_apis/
|
||||
|
||||
# Jenkins Pipeline
|
||||
- ^Jenkinsfile$
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
module Linguist
|
||||
VERSION = "4.7.4"
|
||||
VERSION = "5.0.9"
|
||||
end
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"repository": "https://github.com/github/linguist",
|
||||
"dependencies": {
|
||||
"season": "~>5.0"
|
||||
}
|
||||
"season": "~>5.4"
|
||||
},
|
||||
"license": "MIT"
|
||||
}
|
||||
|
||||
@@ -0,0 +1,265 @@
|
||||
&НаСервереБезКонтекста
|
||||
Функция ПолучитьКонтактноеЛицоПоЭлектроннойПочте(ЭлектроннаяПочта)
|
||||
Запрос = Новый Запрос;
|
||||
Запрос.Текст = "ВЫБРАТЬ КонтактноеЛицо ИЗ Справочник.Контрагенты ГДЕ ЭлектроннаяПочта = &ЭлектроннаяПочта";
|
||||
Запрос.Параметры.Вставить("ЭлектроннаяПочта", СокрЛП(ЭлектроннаяПочта));
|
||||
Выборка = Запрос.Выполнить().Выбрать();
|
||||
КонтактноеЛицо = "";
|
||||
Если Выборка.Следующий() Тогда
|
||||
КонтактноеЛицо = Выборка.КонтактноеЛицо;
|
||||
КонецЕсли;
|
||||
Возврат КонтактноеЛицо;
|
||||
КонецФункции
|
||||
|
||||
&НаСервереБезКонтекста
|
||||
Функция ПолучитьКонтактноеЛицоПоПолучателю(Получатель)
|
||||
Запрос = Новый Запрос;
|
||||
Запрос.Текст = "ВЫБРАТЬ КонтактноеЛицо ИЗ Справочник.Контрагенты ГДЕ Ссылка = &Получатель";
|
||||
Запрос.Параметры.Вставить("Получатель", Получатель);
|
||||
Выборка = Запрос.Выполнить().Выбрать();
|
||||
КонтактноеЛицо = "";
|
||||
Если Выборка.Следующий() Тогда
|
||||
КонтактноеЛицо = Выборка.КонтактноеЛицо;
|
||||
КонецЕсли;
|
||||
Возврат КонтактноеЛицо;
|
||||
КонецФункции
|
||||
|
||||
&НаСервереБезКонтекста
|
||||
Процедура ДобавитьПолучателей(Получатель, Получатели)
|
||||
Запрос = Новый Запрос;
|
||||
Запрос.Текст = "ВЫБРАТЬ ЭлектроннаяПочта ИЗ Справочник.Контрагенты ГДЕ Ссылка ";
|
||||
Если ТипЗнч(Получатели) = Тип("Массив") Тогда
|
||||
Запрос.Текст = Запрос.Текст + "В (&Получатели)";
|
||||
Иначе
|
||||
Запрос.Текст = Запрос.Текст + "= &Получатели";
|
||||
КонецЕсли;
|
||||
Запрос.Параметры.Вставить("Получатели", Получатели);
|
||||
Выборка = Запрос.Выполнить().Выбрать();
|
||||
Пока Выборка.Следующий() Цикл
|
||||
Если Получатель <> "" Тогда
|
||||
Получатель = Получатель + "; ";
|
||||
КонецЕсли;
|
||||
Получатель = Получатель + Выборка.ЭлектроннаяПочта;
|
||||
КонецЦикла;
|
||||
КонецПроцедуры
|
||||
|
||||
&НаСервере
|
||||
Процедура ПриСозданииНаСервере(Отказ, СтандартнаяОбработка)
|
||||
Если Параметры.Ключ.Пустая() Тогда
|
||||
Заголовок = "Исходящее письмо (Создание)";
|
||||
Объект.Дата = ТекущаяДата();
|
||||
ПоШаблону = Параметры.Свойство("ПоШаблону");
|
||||
ВходящееПисьмо = Параметры.ВходящееПисьмо;
|
||||
Если ПоШаблону = Истина Тогда
|
||||
Элементы.ЗаполнитьПоШаблону.Видимость = Истина;
|
||||
РаботаСПочтой.ЗаполнитьПисьмоПоШаблону(Объект, Содержимое);
|
||||
ИначеЕсли Не ВходящееПисьмо.Пустая() Тогда
|
||||
РаботаСПочтой.ЗаполнитьОтветНаПисьмо(ВходящееПисьмо, Объект, Содержимое);
|
||||
КонецЕсли;
|
||||
Адресаты = Параметры.Адресаты;
|
||||
Если Адресаты <> Неопределено Тогда
|
||||
Запрос = Новый Запрос;
|
||||
Запрос.Текст = "ВЫБРАТЬ
|
||||
| Контрагенты.ЭлектроннаяПочта
|
||||
|ИЗ
|
||||
| Справочник.Контрагенты КАК Контрагенты
|
||||
|ГДЕ
|
||||
| Контрагенты.Ссылка В(&Адресаты)
|
||||
| И Контрагенты.ЭлектроннаяПочта <> """"";
|
||||
Запрос.УстановитьПараметр("Адресаты", Адресаты);
|
||||
Получатель = "";
|
||||
Выборка = Запрос.Выполнить().Выбрать();
|
||||
Пока Выборка.Следующий() Цикл
|
||||
Если Получатель <> "" Тогда
|
||||
Получатель = Получатель + "; ";
|
||||
КонецЕсли;
|
||||
Получатель = Получатель + Выборка.ЭлектроннаяПочта;
|
||||
КонецЦикла;
|
||||
Объект.Получатель = Получатель;
|
||||
КонецЕсли;
|
||||
КонецЕсли;
|
||||
КонецПроцедуры
|
||||
|
||||
&НаСервере
|
||||
Процедура ПриЧтенииНаСервере(ТекущийОбъект)
|
||||
Содержимое = ТекущийОбъект.Содержимое.Получить();
|
||||
Заголовок = ТекущийОбъект.Наименование + " (Исходящее письмо)";
|
||||
Если РаботаСПочтой.ПисьмоОтправлено(ТекущийОбъект.Ссылка) Тогда
|
||||
Заголовок = Заголовок + " - Отправлено";
|
||||
КонецЕсли;
|
||||
КонецПроцедуры
|
||||
|
||||
&НаСервере
|
||||
Процедура ПередЗаписьюНаСервере(Отказ, ТекущийОбъект, ПараметрыЗаписи)
|
||||
ТекущийОбъект.Содержимое = Новый ХранилищеЗначения(Содержимое, Новый СжатиеДанных());
|
||||
ТекущийОбъект.Текст = Содержимое.ПолучитьТекст();
|
||||
КонецПроцедуры
|
||||
|
||||
&НаСервере
|
||||
Функция ОтправитьПисьмо(Ошибка)
|
||||
Если Не Записать() Тогда
|
||||
Ошибка = "ОшибкаЗаписи";
|
||||
Возврат Ложь;
|
||||
КонецЕсли;
|
||||
Если Не РаботаСПочтой.ОтправитьПисьмо(Объект.Ссылка) Тогда
|
||||
Ошибка = "ОшибкаОтправки";
|
||||
Возврат Ложь;
|
||||
КонецЕсли;
|
||||
Заголовок = Заголовок + " - Отправлено";
|
||||
Возврат Истина;
|
||||
КонецФункции
|
||||
|
||||
&НаКлиенте
|
||||
Функция ОтправитьПисьмоКлиент()
|
||||
Ошибка = "";
|
||||
Если Не ОтправитьПисьмо(Ошибка) Тогда
|
||||
Если Ошибка = "ОшибкаОтправки" Тогда
|
||||
Кнопки = Новый СписокЗначений;
|
||||
Кнопки.Добавить(1, "Настроить почту");
|
||||
Кнопки.Добавить(2, "Закрыть");
|
||||
|
||||
Оп = Новый ОписаниеОповещения(
|
||||
"ОтправитьПисьмоКлиентВопросЗавершение",
|
||||
ЭтотОбъект);
|
||||
ПоказатьВопрос(Оп,
|
||||
"Не указаны настройки интернет почты!",
|
||||
Кнопки, , 1);
|
||||
КонецЕсли;
|
||||
Возврат Ложь;
|
||||
КонецЕсли;
|
||||
|
||||
НавигационнаяСсылка = ПолучитьНавигационнуюСсылку(Объект.Ссылка);
|
||||
ПоказатьОповещениеПользователя("Письмо отправлено", НавигационнаяСсылка, Объект.Наименование);
|
||||
ОповеститьОбИзменении(Объект.Ссылка);
|
||||
Возврат Истина;
|
||||
КонецФункции
|
||||
|
||||
&НаКлиенте
|
||||
Процедура ОтправитьПисьмоКлиентВопросЗавершение(Результат, Параметры) Экспорт
|
||||
Если Результат = 1 Тогда
|
||||
ОткрытьФорму("ОбщаяФорма.НастройкаПочты");
|
||||
КонецЕсли;
|
||||
КонецПроцедуры
|
||||
|
||||
&НаКлиенте
|
||||
Процедура Отправить(Команда)
|
||||
ОтправитьПисьмоКлиент();
|
||||
КонецПроцедуры
|
||||
|
||||
&НаКлиенте
|
||||
Процедура ОтправитьИЗакрыть(Команда)
|
||||
Если Не ОтправитьПисьмоКлиент() Тогда
|
||||
Возврат;
|
||||
КонецЕсли;
|
||||
Закрыть();
|
||||
КонецПроцедуры
|
||||
|
||||
&НаКлиенте
|
||||
Процедура ВставитьСтрокуВТекущуюПозицию(Поле, Документ, Строка)
|
||||
Перем Начало, Конец;
|
||||
Поле.ПолучитьГраницыВыделения(Начало, Конец);
|
||||
Позиция = Документ.ПолучитьПозициюПоЗакладке(Начало);
|
||||
Документ.Удалить(Начало, Конец);
|
||||
Начало = Документ.ПолучитьЗакладкуПоПозиции(Позиция);
|
||||
Документ.Вставить(Начало, Строка);
|
||||
Позиция = Позиция + СтрДлина(Строка);
|
||||
Закладка = Документ.ПолучитьЗакладкуПоПозиции(Позиция);
|
||||
Поле.УстановитьГраницыВыделения(Закладка, Закладка);
|
||||
КонецПроцедуры
|
||||
|
||||
&НаКлиенте
|
||||
Процедура ВставитьКонтактноеЛицо(Команда)
|
||||
Если Объект.Контрагент.Пустая() Тогда
|
||||
Сообщить("Выберите контрагента");
|
||||
Иначе
|
||||
КонтактноеЛицо = ПолучитьКонтактноеЛицоПоПолучателю(Объект.Контрагент);
|
||||
ВставитьСтрокуВТекущуюПозицию(Элементы.Содержимое, Содержимое, КонтактноеЛицо + " ");
|
||||
КонецЕсли;
|
||||
КонецПроцедуры
|
||||
|
||||
&НаСервере
|
||||
Процедура ПослеЗаписиНаСервере(ТекущийОбъект, ПараметрыЗаписи)
|
||||
Заголовок = ТекущийОбъект.Наименование + " (Исходящее письмо)";
|
||||
КонецПроцедуры
|
||||
|
||||
&НаКлиенте
|
||||
Процедура КонтрагентПриИзменении(Элемент)
|
||||
ДобавитьПолучателей(Объект.Получатель, Объект.Контрагент);
|
||||
КонецПроцедуры
|
||||
|
||||
&НаКлиенте
|
||||
Процедура ВыделитьВажное(Команда)
|
||||
Перем Начало, Конец;
|
||||
ВсеВажное = Истина;
|
||||
Элементы.Содержимое.ПолучитьГраницыВыделения(Начало, Конец);
|
||||
Если Начало = Конец Тогда
|
||||
Возврат;
|
||||
КонецЕсли;
|
||||
|
||||
НаборТекстовыхЭлементов = Новый Массив();
|
||||
Для Каждого ТекстовыйЭлемент Из Содержимое.СформироватьЭлементы(Начало, Конец) Цикл
|
||||
Если Тип(ТекстовыйЭлемент) = Тип("ТекстФорматированногоДокумента") Тогда
|
||||
НаборТекстовыхЭлементов.Добавить(ТекстовыйЭлемент);
|
||||
КонецЕсли;
|
||||
КонецЦикла;
|
||||
|
||||
Для Каждого ТекстовыйЭлемент Из НаборТекстовыхЭлементов Цикл
|
||||
Если ТекстовыйЭлемент.Шрифт.Жирный <> Истина И
|
||||
ТекстовыйЭлемент.ЦветТекста <> Новый Цвет(255, 0, 0) Тогда
|
||||
ВсеВажное = Ложь;
|
||||
Прервать;
|
||||
КонецЕсли;
|
||||
КонецЦикла;
|
||||
|
||||
Для Каждого ТекстовыйЭлемент Из НаборТекстовыхЭлементов Цикл
|
||||
ТекстовыйЭлемент.Шрифт = Новый Шрифт(ТекстовыйЭлемент.Шрифт, , , Не ВсеВажное);
|
||||
ТекстовыйЭлемент.ЦветТекста = Новый Цвет(?(ВсеВажное, 0, 255), 0, 0);
|
||||
КонецЦикла;
|
||||
КонецПроцедуры
|
||||
|
||||
&НаКлиенте
|
||||
Процедура ЗаполнитьПоШаблону(Команда)
|
||||
Если Объект.Контрагент.Пустая() Тогда
|
||||
Сообщить("Выберите контрагента");
|
||||
Иначе
|
||||
НайтиИЗаменить("[Контрагент]", Объект.Контрагент);
|
||||
НайтиИЗаменить("[КонтактноеЛицо]", ПолучитьКонтактноеЛицоПоПолучателю(Объект.Контрагент));
|
||||
КонецЕсли;
|
||||
НайтиИЗаменить("[ДатаПисьма]", Объект.Дата);
|
||||
КонецПроцедуры
|
||||
|
||||
&НаКлиенте
|
||||
Процедура НайтиИЗаменить(СтрокаДляПоиска, СтрокаДляЗамены)
|
||||
Перем ВставленныйТекст, ШрифтОформления, ЦветТекстаОформления, ЦветФонаОформления, НавигационнаяСсылкаОформления;
|
||||
РезультатПоиска = Содержимое.НайтиТекст(СтрокаДляПоиска);
|
||||
Пока ((РезультатПоиска <> Неопределено) И (РезультатПоиска.ЗакладкаНачала <> Неопределено) И (РезультатПоиска.ЗакладкаКонца <> Неопределено)) Цикл
|
||||
ПозицияНачалаСледующегоЦиклаПоиска = Содержимое.ПолучитьПозициюПоЗакладке(РезультатПоиска.ЗакладкаНачала) + СтрДлина(СтрокаДляЗамены);
|
||||
МассивЭлементовДляОформления = Содержимое.ПолучитьЭлементы(РезультатПоиска.ЗакладкаНачала, РезультатПоиска.ЗакладкаКонца);
|
||||
Для Каждого ЭлементДляОформления Из МассивЭлементовДляОформления Цикл
|
||||
Если Тип(ЭлементДляОформления) = Тип("ТекстФорматированногоДокумента") Тогда
|
||||
ШрифтОформления = ЭлементДляОформления.Шрифт;
|
||||
ЦветТекстаОформления = ЭлементДляОформления.ЦветТекста;
|
||||
ЦветФонаОформления = ЭлементДляОформления.ЦветФона;
|
||||
НавигационнаяСсылкаОформления = ЭлементДляОформления.НавигационнаяССылка;
|
||||
Прервать;
|
||||
КонецЕсли;
|
||||
КонецЦикла;
|
||||
Содержимое.Удалить(РезультатПоиска.ЗакладкаНачала, РезультатПоиска.ЗакладкаКонца);
|
||||
ВставленныйТекст = Содержимое.Вставить(РезультатПоиска.ЗакладкаНачала, СтрокаДляЗамены);
|
||||
Если ВставленныйТекст <> Неопределено И ШрифтОформления <> Неопределено Тогда
|
||||
ВставленныйТекст.Шрифт = ШрифтОформления;
|
||||
КонецЕсли;
|
||||
Если ВставленныйТекст <> Неопределено И ЦветТекстаОформления <> Неопределено Тогда
|
||||
ВставленныйТекст.ЦветТекста = ЦветТекстаОформления;
|
||||
КонецЕсли;
|
||||
Если ВставленныйТекст <> Неопределено И ЦветФонаОформления <> Неопределено Тогда
|
||||
ВставленныйТекст.ЦветФона = ЦветФонаОформления;
|
||||
КонецЕсли;
|
||||
Если ВставленныйТекст <> Неопределено И НавигационнаяСсылкаОформления <> Неопределено Тогда
|
||||
ВставленныйТекст.НавигационнаяССылка = НавигационнаяСсылкаОформления;
|
||||
КонецЕсли;
|
||||
|
||||
РезультатПоиска = Содержимое.НайтиТекст(СтрокаДляПоиска, Содержимое.ПолучитьЗакладкуПоПозиции(ПозицияНачалаСледующегоЦиклаПоиска));
|
||||
КонецЦикла;
|
||||
КонецПроцедуры
|
||||
|
||||
@@ -0,0 +1,85 @@
|
||||
&НаСервере
|
||||
Функция ПечатнаяФорма(ПараметрКоманды)
|
||||
ТабличныйДокумент = Новый ТабличныйДокумент;
|
||||
ТабличныйДокумент.ОтображатьСетку = Истина;
|
||||
ТабличныйДокумент.ОтображатьЗаголовки = Истина;
|
||||
|
||||
Сформирован = Ложь;
|
||||
ТабМакет = Справочники.Товары.ПолучитьМакет("МакетПрайсЛиста");
|
||||
|
||||
Шапка = ТабМакет.ПолучитьОбласть("Шапка");
|
||||
ТабличныйДокумент.Вывести(Шапка);
|
||||
|
||||
ОбластьНоменклатура = ТабМакет.ПолучитьОбласть("ОбластьНоменклатура");
|
||||
|
||||
Запрос = Новый Запрос;
|
||||
Запрос.Текст = "ВЫБРАТЬ
|
||||
| Товары.Код КАК Код,
|
||||
| Товары.Наименование КАК Наименование,
|
||||
| Товары.Артикул КАК Артикул,
|
||||
| Товары.ФайлКартинки КАК Картинка,
|
||||
| Товары.Описание КАК Описание,
|
||||
| Товары.Вид КАК Вид,
|
||||
| ЦеныТоваров.Цена КАК Цена
|
||||
|ИЗ
|
||||
| РегистрСведений.ЦеныТоваров КАК ЦеныТоваров
|
||||
| ЛЕВОЕ СОЕДИНЕНИЕ Справочник.Товары КАК Товары
|
||||
| ПО ЦеныТоваров.Товар = Товары.Ссылка
|
||||
|ГДЕ
|
||||
| Товары.ЭтоГруппа = ЛОЖЬ
|
||||
| И ЦеныТоваров.ВидЦен = &ВидЦен
|
||||
|
|
||||
|УПОРЯДОЧИТЬ ПО
|
||||
| Вид,
|
||||
| Товары.Родитель.Код,
|
||||
| Код";
|
||||
|
||||
Запрос.УстановитьПараметр("ВидЦен", Справочники.ВидыЦен.НайтиПоНаименованию("Розничная"));
|
||||
|
||||
Выборка = Запрос.Выполнить().Выбрать();
|
||||
Пока Выборка.Следующий() Цикл
|
||||
ОбластьНоменклатура.Параметры.Заполнить(Выборка);
|
||||
|
||||
Описание = "";
|
||||
|
||||
Чтение = Новый ЧтениеHTML();
|
||||
Чтение.УстановитьСтроку(Выборка.Описание);
|
||||
|
||||
ДокDOM = Новый ПостроительDOM();
|
||||
HTML = ДокDOM.Прочитать(Чтение);
|
||||
|
||||
Если Не HTML.ЭлементДокумента = Неопределено Тогда
|
||||
Для Каждого Узел из HTML.ЭлементДокумента.ДочерниеУзлы Цикл
|
||||
Если Узел.ИмяУзла = "body" Тогда
|
||||
Для Каждого ЭлементОписания из Узел.ДочерниеУзлы Цикл
|
||||
Описание = Описание + ЭлементОписания.ТекстовоеСодержимое;
|
||||
КонецЦикла;
|
||||
КонецЕсли;
|
||||
КонецЦикла;
|
||||
КонецЕсли;
|
||||
ОбластьНоменклатура.Параметры.Описание = Описание;
|
||||
|
||||
Если (Выборка.Картинка <> Null) Тогда
|
||||
ОбластьНоменклатура.Параметры.ПараметрКартинки = Новый Картинка(Выборка.Картинка.ДанныеФайла.Получить());
|
||||
КонецЕсли;
|
||||
|
||||
ТабличныйДокумент.Вывести(ОбластьНоменклатура, Выборка.Уровень());
|
||||
Сформирован = Истина;
|
||||
КонецЦикла;
|
||||
|
||||
Если Сформирован Тогда
|
||||
Возврат ТабличныйДокумент;
|
||||
Иначе
|
||||
Возврат Неопределено;
|
||||
КонецЕсли;
|
||||
КонецФункции
|
||||
|
||||
&НаКлиенте
|
||||
Процедура ОбработкаКоманды(ПараметрКоманды, ПараметрыВыполненияКоманды)
|
||||
ТабличныйДокумент = ПечатнаяФорма(ПараметрКоманды);
|
||||
|
||||
Если ТабличныйДокумент <> Неопределено Тогда
|
||||
ТабличныйДокумент.Показать();
|
||||
КонецЕсли;
|
||||
|
||||
КонецПроцедуры
|
||||
@@ -0,0 +1,109 @@
|
||||
// Процедура на основании анализа типа данных заменяет их на данные, удаляющие
|
||||
// информацию из узла в котором их не должно быть
|
||||
//
|
||||
// Параметры:
|
||||
// Данные – Объект, набор записей,... который нужно преобразовать
|
||||
//
|
||||
Процедура УдалениеДанных(Данные)
|
||||
|
||||
// Получаем объект описания метаданного, соответствующий данным
|
||||
ОбъектМетаданных = ?(ТипЗнч(Данные) = Тип("УдалениеОбъекта"), Данные.Ссылка.Метаданные(), Данные.Метаданные());
|
||||
// Проверяем тип, интересуют только те типы, которые реализованы на мобильной платформе
|
||||
Если Метаданные.Справочники.Содержит(ОбъектМетаданных)
|
||||
ИЛИ Метаданные.Документы.Содержит(ОбъектМетаданных) Тогда
|
||||
|
||||
// Перенос удаления объекта для объектных
|
||||
Данные = Новый УдалениеОбъекта(Данные.Ссылка);
|
||||
|
||||
ИначеЕсли Метаданные.РегистрыСведений.Содержит(ОбъектМетаданных)
|
||||
ИЛИ Метаданные.РегистрыНакопления.Содержит(ОбъектМетаданных)
|
||||
ИЛИ Метаданные.Последовательности.Содержит(ОбъектМетаданных) Тогда
|
||||
|
||||
// Очищаем данные
|
||||
Данные.Очистить();
|
||||
|
||||
КонецЕсли;
|
||||
|
||||
КонецПроцедуры
|
||||
|
||||
// Функция формирует пакет обмена, который будет отправлен узлу "УзелОбмена"
|
||||
//
|
||||
// Параметры:
|
||||
// УзелОбмена – узел плана обмена "мобильные", с которым осуществляется обмен
|
||||
//
|
||||
// Возвращаемое значение:
|
||||
// сформированный пакет, помещенный в хранилище значения
|
||||
Функция СформироватьПакетОбмена(УзелОбмена) Экспорт
|
||||
|
||||
ЗаписьXML = Новый ЗаписьXML;
|
||||
|
||||
ЗаписьXML.УстановитьСтроку("UTF-8");
|
||||
ЗаписьXML.ЗаписатьОбъявлениеXML();
|
||||
|
||||
ЗаписьСообщения = ПланыОбмена.СоздатьЗаписьСообщения();
|
||||
ЗаписьСообщения.НачатьЗапись(ЗаписьXML, УзелОбмена);
|
||||
|
||||
ЗаписьXML.ЗаписатьСоответствиеПространстваИмен("xsi", "http://www.w3.org/2001/XMLSchema-instance");
|
||||
ЗаписьXML.ЗаписатьСоответствиеПространстваИмен("v8", "http://v8.1c.ru/data");
|
||||
|
||||
ТипДанныхУдаления = Тип("УдалениеОбъекта");
|
||||
|
||||
ВыборкаИзменений = ПланыОбмена.ВыбратьИзменения(УзелОбмена, ЗаписьСообщения.НомерСообщения);
|
||||
Пока ВыборкаИзменений.Следующий() Цикл
|
||||
|
||||
Данные = ВыборкаИзменений.Получить();
|
||||
|
||||
// Если перенос данных не нужен, то, возможно, необходимо записать удаление данных
|
||||
Если Не ОбменМобильныеПереопределяемый.НуженПереносДанных(Данные, УзелОбмена) Тогда
|
||||
|
||||
// Получаем значение с возможным удалением данных
|
||||
УдалениеДанных(Данные);
|
||||
|
||||
КонецЕсли;
|
||||
|
||||
// Записываем данные в сообщение
|
||||
ОбменМобильныеПереопределяемый.ЗаписатьДанные(ЗаписьXML, Данные);
|
||||
|
||||
КонецЦикла;
|
||||
|
||||
ЗаписьСообщения.ЗакончитьЗапись();
|
||||
|
||||
Возврат Новый ХранилищеЗначения(ЗаписьXML.Закрыть(), Новый СжатиеДанных(9));
|
||||
|
||||
КонецФункции
|
||||
|
||||
// Процедура вносит в информационную базу данные, которые присланы из узла "УзелОбмена"
|
||||
//
|
||||
// Параметры:
|
||||
// УзелОбмена – узел плана обмена "мобильные", с которым осуществляется обмен
|
||||
// ДанныеОбмена - пакет обмена полученный из узла УзелОбмена, помещен в ХранилищеЗначения
|
||||
//
|
||||
Процедура ПринятьПакетОбмена(УзелОбмена, ДанныеОбмена) Экспорт
|
||||
|
||||
ЧтениеXML = Новый ЧтениеXML;
|
||||
ЧтениеXML.УстановитьСтроку(ДанныеОбмена.Получить());
|
||||
ЧтениеСообщения = ПланыОбмена.СоздатьЧтениеСообщения();
|
||||
ЧтениеСообщения.НачатьЧтение(ЧтениеXML);
|
||||
ПланыОбмена.УдалитьРегистрациюИзменений(ЧтениеСообщения.Отправитель,ЧтениеСообщения.НомерПринятого);
|
||||
|
||||
НачатьТранзакцию();
|
||||
Пока ВозможностьЧтенияXML(ЧтениеXML) Цикл
|
||||
|
||||
Данные = ОбменМобильныеПереопределяемый.ПрочитатьДанные(ЧтениеXML);
|
||||
|
||||
Если Не Данные = Неопределено Тогда
|
||||
|
||||
Данные.ОбменДанными.Отправитель = ЧтениеСообщения.Отправитель;
|
||||
Данные.ОбменДанными.Загрузка = Истина;
|
||||
|
||||
Данные.Записать();
|
||||
|
||||
КонецЕсли;
|
||||
|
||||
КонецЦикла;
|
||||
ЗафиксироватьТранзакцию();
|
||||
|
||||
ЧтениеСообщения.ЗакончитьЧтение();
|
||||
ЧтениеXML.Закрыть();
|
||||
|
||||
КонецПроцедуры
|
||||
302
samples/1C Enterprise/Document.РасходТовара.ObjectModule.bsl
Normal file
302
samples/1C Enterprise/Document.РасходТовара.ObjectModule.bsl
Normal file
@@ -0,0 +1,302 @@
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// ПРОЦЕДУРЫ И ФУНКЦИИ
|
||||
|
||||
//
|
||||
|
||||
|
||||
|
||||
// Формирование печатной формы документа
|
||||
|
||||
//
|
||||
|
||||
// Параметры:
|
||||
|
||||
// Нет.
|
||||
|
||||
//
|
||||
|
||||
// Возвращаемое значение:
|
||||
|
||||
// ТабличныйДокумент - Сформированный табличный документ.
|
||||
|
||||
Процедура ПечатнаяФорма(ТабличныйДокумент) Экспорт
|
||||
|
||||
|
||||
Макет = Документы.РасходТовара.ПолучитьМакет("МакетПечати");
|
||||
|
||||
|
||||
// Заголовок
|
||||
|
||||
Область = Макет.ПолучитьОбласть("Заголовок");
|
||||
|
||||
ТабличныйДокумент.Вывести(Область);
|
||||
|
||||
|
||||
// Шапка
|
||||
|
||||
Шапка = Макет.ПолучитьОбласть("Шапка");
|
||||
|
||||
Шапка.Параметры.Заполнить(ЭтотОбъект);
|
||||
|
||||
ТабличныйДокумент.Вывести(Шапка);
|
||||
|
||||
|
||||
// Товары
|
||||
|
||||
Область = Макет.ПолучитьОбласть("ТоварыШапка");
|
||||
|
||||
ТабличныйДокумент.Вывести(Область);
|
||||
|
||||
ОбластьТовары = Макет.ПолучитьОбласть("Товары");
|
||||
|
||||
|
||||
Для каждого ТекСтрокаТовары Из Товары Цикл
|
||||
|
||||
|
||||
ОбластьТовары.Параметры.Заполнить(ТекСтрокаТовары);
|
||||
|
||||
ТабличныйДокумент.Вывести(ОбластьТовары);
|
||||
|
||||
|
||||
КонецЦикла;
|
||||
|
||||
|
||||
КонецПроцедуры
|
||||
|
||||
|
||||
// Формирование печатной формы документа
|
||||
//
|
||||
// Параметры:
|
||||
// Нет.
|
||||
//
|
||||
// Возвращаемое значение:
|
||||
// ТабличныйДокумент - Сформированный табличный документ.
|
||||
Процедура Пересчитать() Экспорт
|
||||
|
||||
Для каждого ТекСтрокаТовары Из Товары Цикл
|
||||
|
||||
ТекСтрокаТовары.Сумма = ТекСтрокаТовары.Количество * ТекСтрокаТовары.Цена;
|
||||
|
||||
КонецЦикла;
|
||||
|
||||
КонецПроцедуры
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// ОБРАБОТЧИКИ СОБЫТИЙ ОБЪЕКТА
|
||||
|
||||
|
||||
Процедура ОбработкаПроведения(Отказ, Режим)
|
||||
|
||||
// Формирование движений регистров накопления ТоварныеЗапасы и Продажи.
|
||||
Движения.ТоварныеЗапасы.Записывать = Истина;
|
||||
Движения.Продажи.Записывать = Истина;
|
||||
Если Режим = РежимПроведенияДокумента.Оперативный Тогда
|
||||
Движения.ТоварныеЗапасы.БлокироватьДляИзменения = Истина;
|
||||
КонецЕсли;
|
||||
|
||||
// Создадим запрос, чтобы получать информацию об услугах
|
||||
Запрос = Новый Запрос("ВЫБРАТЬ
|
||||
| ТоварыВДокументе.НомерСтроки КАК НомерСтроки
|
||||
|ИЗ
|
||||
| Документ.РасходТовара.Товары КАК ТоварыВДокументе
|
||||
|ГДЕ
|
||||
| ТоварыВДокументе.Ссылка = &Ссылка
|
||||
| И ТоварыВДокументе.Товар.Вид = ЗНАЧЕНИЕ(Перечисление.ВидыТоваров.Услуга)");
|
||||
|
||||
Запрос.УстановитьПараметр("Ссылка", Ссылка);
|
||||
РезультатУслуги = Запрос.Выполнить().Выгрузить();
|
||||
РезультатУслуги.Индексы.Добавить("НомерСтроки");
|
||||
|
||||
Для каждого ТекСтрокаТовары Из Товары Цикл
|
||||
|
||||
Строка = РезультатУслуги.Найти(ТекСтрокаТовары.НомерСтроки, "НомерСтроки");
|
||||
Если Строка = Неопределено Тогда
|
||||
|
||||
// Не услуга
|
||||
Движение = Движения.ТоварныеЗапасы.Добавить();
|
||||
Движение.ВидДвижения = ВидДвиженияНакопления.Расход;
|
||||
Движение.Период = Дата;
|
||||
Движение.Товар = ТекСтрокаТовары.Товар;
|
||||
Движение.Склад = Склад;
|
||||
Движение.Количество = ТекСтрокаТовары.Количество;
|
||||
|
||||
КонецЕсли;
|
||||
|
||||
Движение = Движения.Продажи.Добавить();
|
||||
Движение.Период = Дата;
|
||||
Движение.Товар = ТекСтрокаТовары.Товар;
|
||||
Движение.Покупатель = Покупатель;
|
||||
Движение.Количество = ТекСтрокаТовары.Количество;
|
||||
Движение.Сумма = ТекСтрокаТовары.Сумма;
|
||||
|
||||
КонецЦикла;
|
||||
|
||||
// Формирование движения регистра накопления Взаиморасчеты.
|
||||
Движения.Взаиморасчеты.Записывать = Истина;
|
||||
Движение = Движения.Взаиморасчеты.Добавить();
|
||||
Движение.ВидДвижения = ВидДвиженияНакопления.Расход;
|
||||
Движение.Период = Дата;
|
||||
Движение.Контрагент = Покупатель;
|
||||
Движение.Валюта = Валюта;
|
||||
|
||||
Если Валюта.Пустая() Тогда
|
||||
Движение.Сумма = Товары.Итог("Сумма");
|
||||
Иначе
|
||||
|
||||
Курс = РегистрыСведений.КурсыВалют.ПолучитьПоследнее(Дата, Новый Структура("Валюта", Валюта)).Курс;
|
||||
|
||||
Если Курс = 0 Тогда
|
||||
Движение.Сумма = Товары.Итог("Сумма");
|
||||
Иначе
|
||||
Движение.Сумма = Товары.Итог("Сумма") / Курс;
|
||||
КонецЕсли;
|
||||
|
||||
КонецЕсли;
|
||||
|
||||
//Запишем движения
|
||||
Движения.Записать();
|
||||
|
||||
//Контроль остатков при оперативном проведении
|
||||
Если Режим = РежимПроведенияДокумента.Оперативный Тогда
|
||||
// Создадим запрос, чтобы контролировать остатки по товарам
|
||||
Запрос = Новый Запрос("ВЫБРАТЬ
|
||||
| ТоварыВДокументе.Товар КАК Товар,
|
||||
| СУММА(ТоварыВДокументе.Количество) КАК Количество,
|
||||
| МАКСИМУМ(ТоварыВДокументе.НомерСтроки) КАК НомерСтроки
|
||||
|
|
||||
|ПОМЕСТИТЬ ТребуетсяТовара
|
||||
|
|
||||
|ИЗ
|
||||
| Документ.РасходТовара.Товары КАК ТоварыВДокументе
|
||||
|
|
||||
|ГДЕ
|
||||
| ТоварыВДокументе.Ссылка = &Ссылка
|
||||
| И ТоварыВДокументе.Товар.Вид = ЗНАЧЕНИЕ(Перечисление.ВидыТоваров.Товар)
|
||||
|
|
||||
|СГРУППИРОВАТЬ ПО
|
||||
| ТоварыВДокументе.Товар
|
||||
|
|
||||
|ИНДЕКСИРОВАТЬ ПО
|
||||
| Товар
|
||||
|;
|
||||
|
|
||||
|////////////////////////////////////////////////////////////////////////////////
|
||||
|ВЫБРАТЬ
|
||||
| ПРЕДСТАВЛЕНИЕ(ТребуетсяТовара.Товар) КАК ТоварПредставление,
|
||||
| ВЫБОР
|
||||
| КОГДА - ЕСТЬNULL(ТоварныеЗапасыОстатки.КоличествоОстаток, 0) > ТоварыВДокументе.Количество
|
||||
| ТОГДА ТоварыВДокументе.Количество
|
||||
| ИНАЧЕ - ЕСТЬNULL(ТоварныеЗапасыОстатки.КоличествоОстаток, 0)
|
||||
| КОНЕЦ КАК Нехватка,
|
||||
| ТоварыВДокументе.Количество - ВЫБОР
|
||||
| КОГДА - ЕСТЬNULL(ТоварныеЗапасыОстатки.КоличествоОстаток, 0) > ТоварыВДокументе.Количество
|
||||
| ТОГДА ТоварыВДокументе.Количество
|
||||
| ИНАЧЕ - ЕСТЬNULL(ТоварныеЗапасыОстатки.КоличествоОстаток, 0)
|
||||
| КОНЕЦ КАК МаксимальноеКоличество,
|
||||
| ТребуетсяТовара.НомерСтроки КАК НомерСтроки
|
||||
|
|
||||
|ИЗ
|
||||
| ТребуетсяТовара КАК ТребуетсяТовара
|
||||
| ЛЕВОЕ СОЕДИНЕНИЕ РегистрНакопления.ТоварныеЗапасы.Остатки(
|
||||
| ,
|
||||
| Товар В
|
||||
| (ВЫБРАТЬ
|
||||
| ТребуетсяТовара.Товар
|
||||
| ИЗ
|
||||
| ТребуетсяТовара)
|
||||
| И Склад = &Склад) КАК ТоварныеЗапасыОстатки
|
||||
| ПО ТребуетсяТовара.Товар = ТоварныеЗапасыОстатки.Товар
|
||||
| ЛЕВОЕ СОЕДИНЕНИЕ Документ.РасходТовара.Товары КАК ТоварыВДокументе
|
||||
| ПО ТребуетсяТовара.Товар = ТоварыВДокументе.Товар
|
||||
| И ТребуетсяТовара.НомерСтроки = ТоварыВДокументе.НомерСтроки
|
||||
|
|
||||
|ГДЕ
|
||||
| ТоварыВДокументе.Ссылка = &Ссылка И
|
||||
| 0 > ЕСТЬNULL(ТоварныеЗапасыОстатки.КоличествоОстаток, 0)
|
||||
|
|
||||
|УПОРЯДОЧИТЬ ПО
|
||||
| НомерСтроки");
|
||||
|
||||
Запрос.УстановитьПараметр("Склад", Склад);
|
||||
Запрос.УстановитьПараметр("Ссылка", Ссылка);
|
||||
РезультатСНехваткой = Запрос.Выполнить();
|
||||
|
||||
ВыборкаРезультатаСНехваткой = РезультатСНехваткой.Выбрать();
|
||||
|
||||
// Выдадим ошибки для строк, в которых не хватает остатка
|
||||
Пока ВыборкаРезультатаСНехваткой.Следующий() Цикл
|
||||
|
||||
Сообщение = Новый СообщениеПользователю();
|
||||
Сообщение.Текст = НСтр("ru = 'Не хватает '", "ru")
|
||||
+ ВыборкаРезультатаСНехваткой.Нехватка
|
||||
+ НСтр("ru = ' единиц товара'", "ru") + """"
|
||||
+ ВыборкаРезультатаСНехваткой.ТоварПредставление
|
||||
+ """"
|
||||
+ НСтр("ru = ' на складе'", "ru")
|
||||
+ """"
|
||||
+ Склад
|
||||
+ """."
|
||||
+ НСтр("ru = 'Максимальное количество: '", "ru")
|
||||
+ ВыборкаРезультатаСНехваткой.МаксимальноеКоличество
|
||||
+ ".";
|
||||
Сообщение.Поле = НСтр("ru = 'Товары'", "ru")
|
||||
+ "["
|
||||
+ (ВыборкаРезультатаСНехваткой.НомерСтроки - 1)
|
||||
+ "]."
|
||||
+ НСтр("ru = 'Количество'", "ru");
|
||||
Сообщение.УстановитьДанные(ЭтотОбъект);
|
||||
Сообщение.Сообщить();
|
||||
Отказ = Истина;
|
||||
|
||||
КонецЦикла;
|
||||
|
||||
КонецЕсли;
|
||||
|
||||
КонецПроцедуры
|
||||
|
||||
Процедура ОбработкаПроверкиЗаполнения(Отказ, ПроверяемыеРеквизиты)
|
||||
// Проверим заполненность поля "Покупатель"
|
||||
|
||||
Если Покупатель.Пустая() Тогда
|
||||
|
||||
|
||||
// Если поле Покупатель не заполнено, сообщим об этом пользователю
|
||||
|
||||
Сообщение = Новый СообщениеПользователю();
|
||||
|
||||
Сообщение.Текст = НСтр("ru = 'Не указан Покупатель, для которого выписывается накладная!'", "ru");
|
||||
|
||||
Сообщение.Поле = НСтр("ru = 'Покупатель'", "ru");
|
||||
Сообщение.УстановитьДанные(ЭтотОбъект);
|
||||
|
||||
|
||||
Сообщение.Сообщить();
|
||||
|
||||
|
||||
|
||||
// Сообщим платформе, что мы сами обработали проверку заполнения поля "Покупатель"
|
||||
|
||||
ПроверяемыеРеквизиты.Удалить(ПроверяемыеРеквизиты.Найти("Покупатель"));
|
||||
|
||||
// Так как информация в документе не консистентна, то продолжать работу дальше смысла нет
|
||||
|
||||
Отказ = Истина;
|
||||
|
||||
|
||||
КонецЕсли;
|
||||
|
||||
|
||||
//Если склад не заполнен, то проверим есть ли в документе что-то кроме услуг
|
||||
Если Склад.Пустая() И Товары.Количество() > 0 Тогда
|
||||
|
||||
// Создадим запрос, чтобы получать информацию об товарах
|
||||
Запрос = Новый Запрос("ВЫБРАТЬ
|
||||
| Количество(*) КАК Количество
|
||||
|ИЗ
|
||||
| Справочник.Товары КАК Товары
|
||||
|ГДЕ
|
||||
| Товары.Ссылка В (&ТоварыВДокументе)
|
||||
| И Товары.Вид = ЗНАЧЕНИЕ(Перечисление.ВидыТоваров.Товар)");
|
||||
|
||||
20
samples/1C Enterprise/ci_before_script.os
Normal file
20
samples/1C Enterprise/ci_before_script.os
Normal file
@@ -0,0 +1,20 @@
|
||||
Каталог = ОбъединитьПути(ТекущийКаталог(), "libs\oscript-library\src");
|
||||
Загрузчик_Оригинал_ИмяФайла = ОбъединитьПути(Каталог, "package-loader.os");
|
||||
|
||||
Файлы = НайтиФайлы(Каталог, , Ложь);
|
||||
Для Каждого ВыбФайл Из Файлы Цикл
|
||||
|
||||
Если ВыбФайл.ЭтоФайл() Тогда
|
||||
Продолжить;
|
||||
КонецЕсли;
|
||||
|
||||
Загрузчик_ИмяФайла = ОбъединитьПути(ВыбФайл.ПолноеИмя, "package-loader.os");
|
||||
Загрузчик_Файл = Новый Файл(Загрузчик_ИмяФайла);
|
||||
|
||||
Если Загрузчик_Файл.Существует() Тогда
|
||||
Продолжить;
|
||||
КонецЕсли;
|
||||
|
||||
КопироватьФайл(Загрузчик_Оригинал_ИмяФайла, Загрузчик_ИмяФайла);
|
||||
|
||||
КонецЦикла;
|
||||
42
samples/1C Enterprise/test_canCompile.os
Normal file
42
samples/1C Enterprise/test_canCompile.os
Normal file
@@ -0,0 +1,42 @@
|
||||
#Использовать "../libs/oscript-library/src/v8runner"
|
||||
#Использовать "../libs/oscript-library/src/tempfiles"
|
||||
|
||||
Перем Лог;
|
||||
Перем КодВозврата;
|
||||
|
||||
Процедура Инициализация()
|
||||
|
||||
Лог = Логирование.ПолучитьЛог("oscript.app.gitlab-test_CanCompile");
|
||||
КодВозврата = 0;
|
||||
|
||||
КонецПроцедуры
|
||||
|
||||
Процедура ВыполнитьТест()
|
||||
|
||||
Конфигуратор = Новый УправлениеКонфигуратором();
|
||||
|
||||
ПараметрыЗапуска = Конфигуратор.ПолучитьПараметрыЗапуска();
|
||||
КомандаЗапуска = "/LoadConfigFromFiles ""%1""";
|
||||
КомандаЗапуска = СтрШаблон(КомандаЗапуска, ТекущийКаталог() + "\source\cf");
|
||||
|
||||
Лог.Информация("Команда обновления конфигурации: " + КомандаЗапуска);
|
||||
|
||||
ПараметрыЗапуска.Добавить(КомандаЗапуска);
|
||||
|
||||
Попытка
|
||||
Конфигуратор.ВыполнитьКоманду(ПараметрыЗапуска);
|
||||
Исключение
|
||||
|
||||
Лог.Ошибка(Конфигуратор.ВыводКоманды());
|
||||
КодВозврата = 1;
|
||||
|
||||
КонецПопытки;
|
||||
|
||||
УдалитьФайлы(Конфигуратор.ПутьКВременнойБазе());
|
||||
|
||||
КонецПроцедуры
|
||||
|
||||
Инициализация();
|
||||
ВыполнитьТест();
|
||||
|
||||
ЗавершитьРаботу(КодВозврата);
|
||||
190
samples/ABNF/toml.abnf
Normal file
190
samples/ABNF/toml.abnf
Normal file
@@ -0,0 +1,190 @@
|
||||
; Source: https://github.com/toml-lang/toml
|
||||
; License: MIT
|
||||
|
||||
;; This is an attempt to define TOML in ABNF according to the grammar defined
|
||||
;; in RFC 4234 (http://www.ietf.org/rfc/rfc4234.txt).
|
||||
|
||||
;; TOML
|
||||
|
||||
toml = expression *( newline expression )
|
||||
expression = (
|
||||
ws /
|
||||
ws comment /
|
||||
ws keyval ws [ comment ] /
|
||||
ws table ws [ comment ]
|
||||
)
|
||||
|
||||
;; Newline
|
||||
|
||||
newline = (
|
||||
%x0A / ; LF
|
||||
%x0D.0A ; CRLF
|
||||
)
|
||||
|
||||
newlines = 1*newline
|
||||
|
||||
;; Whitespace
|
||||
|
||||
ws = *(
|
||||
%x20 / ; Space
|
||||
%x09 ; Horizontal tab
|
||||
)
|
||||
|
||||
;; Comment
|
||||
|
||||
comment-start-symbol = %x23 ; #
|
||||
non-eol = %x09 / %x20-10FFFF
|
||||
comment = comment-start-symbol *non-eol
|
||||
|
||||
;; Key-Value pairs
|
||||
|
||||
keyval-sep = ws %x3D ws ; =
|
||||
keyval = key keyval-sep val
|
||||
|
||||
key = unquoted-key / quoted-key
|
||||
unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _
|
||||
quoted-key = quotation-mark 1*basic-char quotation-mark ; See Basic Strings
|
||||
|
||||
val = integer / float / string / boolean / date-time / array / inline-table
|
||||
|
||||
;; Table
|
||||
|
||||
table = std-table / array-table
|
||||
|
||||
;; Standard Table
|
||||
|
||||
std-table-open = %x5B ws ; [ Left square bracket
|
||||
std-table-close = ws %x5D ; ] Right square bracket
|
||||
table-key-sep = ws %x2E ws ; . Period
|
||||
|
||||
std-table = std-table-open key *( table-key-sep key) std-table-close
|
||||
|
||||
;; Array Table
|
||||
|
||||
array-table-open = %x5B.5B ws ; [[ Double left square bracket
|
||||
array-table-close = ws %x5D.5D ; ]] Double right square bracket
|
||||
|
||||
array-table = array-table-open key *( table-key-sep key) array-table-close
|
||||
|
||||
;; Integer
|
||||
|
||||
integer = [ minus / plus ] int
|
||||
minus = %x2D ; -
|
||||
plus = %x2B ; +
|
||||
digit1-9 = %x31-39 ; 1-9
|
||||
underscore = %x5F ; _
|
||||
int = DIGIT / digit1-9 1*( DIGIT / underscore DIGIT )
|
||||
|
||||
;; Float
|
||||
|
||||
float = integer ( frac / frac exp / exp )
|
||||
zero-prefixable-int = DIGIT *( DIGIT / underscore DIGIT )
|
||||
frac = decimal-point zero-prefixable-int
|
||||
decimal-point = %x2E ; .
|
||||
exp = e integer
|
||||
e = %x65 / %x45 ; e E
|
||||
|
||||
;; String
|
||||
|
||||
string = basic-string / ml-basic-string / literal-string / ml-literal-string
|
||||
|
||||
;; Basic String
|
||||
|
||||
basic-string = quotation-mark *basic-char quotation-mark
|
||||
|
||||
quotation-mark = %x22 ; "
|
||||
|
||||
basic-char = basic-unescaped / escaped
|
||||
escaped = escape ( %x22 / ; " quotation mark U+0022
|
||||
%x5C / ; \ reverse solidus U+005C
|
||||
%x2F / ; / solidus U+002F
|
||||
%x62 / ; b backspace U+0008
|
||||
%x66 / ; f form feed U+000C
|
||||
%x6E / ; n line feed U+000A
|
||||
%x72 / ; r carriage return U+000D
|
||||
%x74 / ; t tab U+0009
|
||||
%x75 4HEXDIG / ; uXXXX U+XXXX
|
||||
%x55 8HEXDIG ) ; UXXXXXXXX U+XXXXXXXX
|
||||
|
||||
basic-unescaped = %x20-21 / %x23-5B / %x5D-10FFFF
|
||||
|
||||
escape = %x5C ; \
|
||||
|
||||
;; Multiline Basic String
|
||||
|
||||
ml-basic-string-delim = quotation-mark quotation-mark quotation-mark
|
||||
ml-basic-string = ml-basic-string-delim ml-basic-body ml-basic-string-delim
|
||||
ml-basic-body = *( ml-basic-char / newline / ( escape newline ))
|
||||
|
||||
ml-basic-char = ml-basic-unescaped / escaped
|
||||
ml-basic-unescaped = %x20-5B / %x5D-10FFFF
|
||||
|
||||
;; Literal String
|
||||
|
||||
literal-string = apostraphe *literal-char apostraphe
|
||||
|
||||
apostraphe = %x27 ; ' Apostrophe
|
||||
|
||||
literal-char = %x09 / %x20-26 / %x28-10FFFF
|
||||
|
||||
;; Multiline Literal String
|
||||
|
||||
ml-literal-string-delim = apostraphe apostraphe apostraphe
|
||||
ml-literal-string = ml-literal-string-delim ml-literal-body ml-literal-string-delim
|
||||
|
||||
ml-literal-body = *( ml-literal-char / newline )
|
||||
ml-literal-char = %x09 / %x20-10FFFF
|
||||
|
||||
;; Boolean
|
||||
|
||||
boolean = true / false
|
||||
true = %x74.72.75.65 ; true
|
||||
false = %x66.61.6C.73.65 ; false
|
||||
|
||||
;; Datetime (as defined in RFC 3339)
|
||||
|
||||
date-fullyear = 4DIGIT
|
||||
date-month = 2DIGIT ; 01-12
|
||||
date-mday = 2DIGIT ; 01-28, 01-29, 01-30, 01-31 based on month/year
|
||||
time-hour = 2DIGIT ; 00-23
|
||||
time-minute = 2DIGIT ; 00-59
|
||||
time-second = 2DIGIT ; 00-58, 00-59, 00-60 based on leap second rules
|
||||
time-secfrac = "." 1*DIGIT
|
||||
time-numoffset = ( "+" / "-" ) time-hour ":" time-minute
|
||||
time-offset = "Z" / time-numoffset
|
||||
|
||||
partial-time = time-hour ":" time-minute ":" time-second [time-secfrac]
|
||||
full-date = date-fullyear "-" date-month "-" date-mday
|
||||
full-time = partial-time time-offset
|
||||
|
||||
date-time = full-date "T" full-time
|
||||
|
||||
;; Array
|
||||
|
||||
array-open = %x5B ws ; [
|
||||
array-close = ws %x5D ; ]
|
||||
|
||||
array = array-open array-values array-close
|
||||
|
||||
array-values = [ val [ array-sep ] [ ( comment newlines) / newlines ] /
|
||||
val array-sep [ ( comment newlines) / newlines ] array-values ]
|
||||
|
||||
array-sep = ws %x2C ws ; , Comma
|
||||
|
||||
;; Inline Table
|
||||
|
||||
inline-table-open = %x7B ws ; {
|
||||
inline-table-close = ws %x7D ; }
|
||||
inline-table-sep = ws %x2C ws ; , Comma
|
||||
|
||||
inline-table = inline-table-open inline-table-keyvals inline-table-close
|
||||
|
||||
inline-table-keyvals = [ inline-table-keyvals-non-empty ]
|
||||
inline-table-keyvals-non-empty = key keyval-sep val /
|
||||
key keyval-sep val inline-table-sep inline-table-keyvals-non-empty
|
||||
|
||||
;; Built-in ABNF terms, reproduced here for clarity
|
||||
|
||||
; ALPHA = %x41-5A / %x61-7A ; A-Z / a-z
|
||||
; DIGIT = %x30-39 ; 0-9
|
||||
; HEXDIG = DIGIT / "A" / "B" / "C" / "D" / "E" / "F"
|
||||
7
samples/APL/hashbang
Executable file
7
samples/APL/hashbang
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/usr/local/bin/apl --script
|
||||
NEWLINE ← ⎕UCS 10
|
||||
HEADERS ← 'Content-Type: text/plain', NEWLINE
|
||||
HEADERS
|
||||
⍝ ⎕←HEADERS
|
||||
⍝ ⍕⎕TS
|
||||
)OFF
|
||||
33
samples/ASN.1/example.asn
Normal file
33
samples/ASN.1/example.asn
Normal file
@@ -0,0 +1,33 @@
|
||||
MyShopPurchaseOrders DEFINITIONS AUTOMATIC TAGS ::= BEGIN
|
||||
|
||||
PurchaseOrder ::= SEQUENCE {
|
||||
dateOfOrder DATE,
|
||||
customer CustomerInfo,
|
||||
items ListOfItems
|
||||
}
|
||||
|
||||
CustomerInfo ::= SEQUENCE {
|
||||
companyName VisibleString (SIZE (3..50)),
|
||||
billingAddress Address,
|
||||
contactPhone NumericString (SIZE (7..12))
|
||||
}
|
||||
|
||||
Address::= SEQUENCE {
|
||||
street VisibleString (SIZE (5 .. 50)) OPTIONAL,
|
||||
city VisibleString (SIZE (2..30)),
|
||||
state VisibleString (SIZE(2) ^ FROM ("A".."Z")),
|
||||
zipCode NumericString (SIZE(5 | 9))
|
||||
}
|
||||
|
||||
ListOfItems ::= SEQUENCE (SIZE (1..100)) OF Item
|
||||
|
||||
Item ::= SEQUENCE {
|
||||
itemCode INTEGER (1..99999),
|
||||
color VisibleString ("Black" | "Blue" | "Brown"),
|
||||
power INTEGER (110 | 220),
|
||||
deliveryTime INTEGER (8..12 | 14..19),
|
||||
quantity INTEGER (1..1000),
|
||||
unitPrice REAL (1.00 .. 9999.00),
|
||||
isTaxable BOOLEAN
|
||||
}
|
||||
END
|
||||
318
samples/ATS/basis_ssntype.sats
Normal file
318
samples/ATS/basis_ssntype.sats
Normal file
@@ -0,0 +1,318 @@
|
||||
(*
|
||||
* The MIT License (MIT)
|
||||
*
|
||||
* Copyright (c) 2014 Hongwei Xi
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.)
|
||||
*)
|
||||
|
||||
// Source: https://github.com/githwxi/ATS-Postiats-contrib/blob/201d635062d0ea64ff5ba5457a4ea0bb4d5ae202/contrib/libats-/hwxi/teaching/mysession-g/SATS/basis_ssntype.sats
|
||||
|
||||
(*
|
||||
** Basis for g-session types
|
||||
*)
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
staload
|
||||
"./basis_intset.sats"
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun{}
|
||||
channel_cap(): intGte(1)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
abstype
|
||||
session_msg
|
||||
(i:int, j:int, a:vt@ype)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
abstype ssession_nil
|
||||
abstype ssession_cons(a:type, ssn:type)
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
stadef msg = session_msg
|
||||
//
|
||||
stadef nil = ssession_nil
|
||||
//
|
||||
stadef :: = ssession_cons
|
||||
stadef cons = ssession_cons
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
abstype
|
||||
session_append
|
||||
(ssn1: type, ssn2: type)
|
||||
//
|
||||
stadef append = session_append
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
abstype
|
||||
session_choose
|
||||
(
|
||||
i:int, ssn1:type, ssn2:type
|
||||
) (* session_choose *)
|
||||
//
|
||||
stadef choose = session_choose
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
abstype
|
||||
session_repeat
|
||||
(
|
||||
i:int, ssn:type(*body*)
|
||||
) (* session_repeat *)
|
||||
//
|
||||
stadef repeat = session_repeat
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
typedef
|
||||
session_sing
|
||||
(
|
||||
i: int
|
||||
, j: int
|
||||
, a:vt@ype
|
||||
) = cons(msg(i, j, a), nil)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
absvtype
|
||||
channel1_vtype
|
||||
(G:iset, n:int, ssn:type) = ptr
|
||||
//
|
||||
vtypedef
|
||||
channel1
|
||||
(G:iset, n:int, ssn:type) = channel1_vtype(G, n, ssn)
|
||||
//
|
||||
vtypedef
|
||||
cchannel1
|
||||
(G:iset, n:int, ssn:type) = channel1_vtype(ncomp(n, G), n, ssn)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun{}
|
||||
channel1_get_nrole
|
||||
{n:int}{ssn:type}{G:iset}
|
||||
(chan: !channel1(G, n, ssn)): int(n)
|
||||
//
|
||||
fun{}
|
||||
channel1_get_group
|
||||
{n:int}{ssn:type}{G:iset}
|
||||
(chan: !channel1(G, n, ssn)): intset(n,G)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun
|
||||
{a:vt0p}
|
||||
channel1_close
|
||||
{n:int}{ssn:type}{G:iset}(chan: channel1(G, n, nil)): void
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun{}
|
||||
channel1_skipin
|
||||
{a:vt0p}
|
||||
{n:int}{ssn:type}{G:iset}
|
||||
{i,j:nat | ismbr(G, i); ismbr(G, j)}
|
||||
(
|
||||
!channel1(G, n, msg(i, j, a)::ssn) >> channel1(G, n, ssn)
|
||||
) : void // end-of-function
|
||||
praxi
|
||||
lemma_channel1_skipin
|
||||
{a:vt0p}
|
||||
{n:int}{ssn:type}{G:iset}
|
||||
{i,j:nat | ismbr(G, i); ismbr(G, j)}
|
||||
(
|
||||
!channel1(G, n, msg(i, j, a)::ssn) >> channel1(G, n, ssn)
|
||||
) : void // lemma_channel1_skipin
|
||||
//
|
||||
fun{}
|
||||
channel1_skipex
|
||||
{a:vt0p}
|
||||
{n:int}{ssn:type}{G:iset}
|
||||
{i,j:nat | ~ismbr(G, i); ~ismbr(G, j)}
|
||||
(
|
||||
!channel1(G, n, msg(i, j, a)::ssn) >> channel1(G, n, ssn)
|
||||
) : void // end-of-function
|
||||
praxi
|
||||
lemma_channel1_skipex
|
||||
{a:vt0p}
|
||||
{n:int}{ssn:type}{G:iset}
|
||||
{i,j:nat | ~ismbr(G, i); ~ismbr(G, j)}
|
||||
(
|
||||
!channel1(G, n, msg(i, j, a)::ssn) >> channel1(G, n, ssn)
|
||||
) : void // lemma_channel1_skipex
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun
|
||||
{a:vt0p}
|
||||
channel1_send
|
||||
{n:int}{ssn:type}{G:iset}
|
||||
{i,j:nat | i < n; j < n; ismbr(G, i); ~ismbr(G, j)}
|
||||
(
|
||||
!channel1(G, n, msg(i, j, a)::ssn) >> channel1(G, n, ssn), int(i), int(j), a
|
||||
) : void // end of [channel1_send]
|
||||
//
|
||||
fun
|
||||
{a:vt0p}
|
||||
channel1_recv
|
||||
{n:int}{ssn:type}{G:iset}
|
||||
{i,j:nat | i < n; j < n; ~ismbr(G, i); ismbr(G, j)}
|
||||
(
|
||||
!channel1(G, n, msg(i, j, a)::ssn) >> channel1(G, n, ssn), int(i), int(j), &a? >> a
|
||||
) : void // end of [channel1_recv]
|
||||
//
|
||||
fun
|
||||
{a:vt0p}
|
||||
channel1_recv_val
|
||||
{n:int}{ssn:type}{G:iset}
|
||||
{i,j:nat | i < n; j < n; ~ismbr(G, i); ismbr(G, j)}
|
||||
(!channel1(G, n, msg(i, j, a)::ssn) >> channel1(G, n, ssn), int(i), int(j)): (a)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{}
|
||||
channel1_append
|
||||
{n:int}
|
||||
{ssn1,ssn2:type}
|
||||
{G:iset}
|
||||
(
|
||||
chan: !channel1(G, n, append(ssn1, ssn2)) >> channel1(G, n, ssn2)
|
||||
, fserv: (!channel1(G, n, ssn1) >> channel1(G, n, nil)) -<lincloptr1> void
|
||||
) : void // end of [channel1_append]
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
datatype
|
||||
choosetag
|
||||
(
|
||||
a:type, b:type, c:type
|
||||
) =
|
||||
| choosetag_l(a, b, a) of ()
|
||||
| choosetag_r(a, b, b) of ()
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun{}
|
||||
channel1_choose_l
|
||||
{n:int}
|
||||
{ssn1,ssn2:type}
|
||||
{G:iset}
|
||||
{i:nat | i < n; ismbr(G, i)}
|
||||
(
|
||||
!channel1(G, n, choose(i,ssn1,ssn2)) >> channel1(G, n, ssn1), i: int(i)
|
||||
) : void // end of [channel1_choose_l]
|
||||
//
|
||||
fun{}
|
||||
channel1_choose_r
|
||||
{n:int}
|
||||
{ssn1,ssn2:type}
|
||||
{G:iset}
|
||||
{i:nat | i < n; ismbr(G, i)}
|
||||
(
|
||||
!channel1(G, n, choose(i,ssn1,ssn2)) >> channel1(G, n, ssn2), i: int(i)
|
||||
) : void // end of [channel1_choose_r]
|
||||
//
|
||||
fun{}
|
||||
channel1_choose_tag
|
||||
{n:int}
|
||||
{ssn1,ssn2:type}
|
||||
{G:iset}
|
||||
{i:nat | i < n; ~isnil(G); ~ismbr(G, i)}
|
||||
(
|
||||
!channel1(G, n, choose(i,ssn1,ssn2)) >> channel1(G, n, ssn_chosen), i: int(i)
|
||||
) : #[ssn_chosen:type] choosetag(ssn1, ssn2, ssn_chosen)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun{}
|
||||
channel1_repeat_0
|
||||
{n:int}
|
||||
{ssn:type}
|
||||
{G:iset}
|
||||
{i:nat | i < n; ismbr(G, i)}
|
||||
(
|
||||
!channel1(G, n, repeat(i,ssn)) >> channel1(G, n, nil), i: int(i)
|
||||
) : void // end of [channel1_repeat_nil]
|
||||
//
|
||||
fun{}
|
||||
channel1_repeat_1
|
||||
{n:int}
|
||||
{ssn:type}
|
||||
{G:iset}
|
||||
{i:nat | i < n; ismbr(G, i)}
|
||||
(
|
||||
!channel1(G, n, repeat(i,ssn)) >> channel1(G, n, append(ssn,repeat(i,ssn))), i: int(i)
|
||||
) : void // end of [channel1_repeat_more]
|
||||
//
|
||||
fun{}
|
||||
channel1_repeat_tag
|
||||
{n:int}
|
||||
{ssn:type}
|
||||
{G:iset}
|
||||
{i:nat | i < n; ~isnil(G); ~ismbr(G, i)}
|
||||
(
|
||||
!channel1(G, n, repeat(i,ssn)) >> channel1(G, n, ssn_chosen), i: int(i)
|
||||
) : #[ssn_chosen:type] choosetag(nil, append(ssn,repeat(i,ssn)), ssn_chosen)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
(*
|
||||
//
|
||||
// HX-2015-03-06:
|
||||
// This one does not work with sschoose!!!
|
||||
//
|
||||
fun{}
|
||||
channel1_link
|
||||
{n:int}{ssn:type}
|
||||
{G1,G2:iset | isnil(G1*G2)}
|
||||
(channel1(G1, n, ssn), channel1(G2, n, ssn)): channel1(G1+G2, n, ssn)
|
||||
*)
|
||||
//
|
||||
fun{}
|
||||
channel1_link
|
||||
{n:int}{ssn:type}
|
||||
{G1,G2:iset | isful(G1+G2,n)}
|
||||
(channel1(G1, n, ssn), channel1(G2, n, ssn)): channel1(G1*G2, n, ssn)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun{}
|
||||
channel1_link_elim
|
||||
{n:int}{ssn:type}{G:iset}(channel1(G, n, ssn), cchannel1(G, n, ssn)): void
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun{}
|
||||
cchannel1_create_exn
|
||||
{n:nat}{ssn:type}{G:iset}
|
||||
(
|
||||
nrole: int(n), G: intset(n), fserv: channel1(G, n, ssn) -<lincloptr1> void
|
||||
) : cchannel1(G, n, ssn) // end of [cchannel1_create_exn]
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
(* end of [basis_ssntype.sats] *)
|
||||
179
samples/ATS/csv_parse.hats
Normal file
179
samples/ATS/csv_parse.hats
Normal file
@@ -0,0 +1,179 @@
|
||||
(*
|
||||
* The MIT License (MIT)
|
||||
*
|
||||
* Copyright (c) 2014 Hongwei Xi
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.)
|
||||
*)
|
||||
|
||||
// Source: https://github.com/githwxi/ATS-Postiats-contrib/blob/0f26aa0df8542d2ae21df9be1e13208f66f571d6/contrib/libats-/hwxi/teaching/mygrading/HATS/csv_parse.hats
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
// Author: Hongwei Xi
|
||||
// Authoremail: gmhwxiATgmailDOTcom
|
||||
// Start time: the first of July, 2016
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
#ifdef
|
||||
MYGRADING_HATS
|
||||
#then
|
||||
#else
|
||||
//
|
||||
extern
|
||||
fun
|
||||
csv_parse_line
|
||||
(
|
||||
line: string
|
||||
) : List0_vt(Strptr1)
|
||||
//
|
||||
#endif // #ifdef
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
local
|
||||
//
|
||||
staload
|
||||
UN = "prelude/SATS/unsafe.sats"
|
||||
//
|
||||
extern
|
||||
fun{}
|
||||
getpos(): int
|
||||
//
|
||||
extern
|
||||
fun{}
|
||||
is_end(): bool
|
||||
//
|
||||
extern
|
||||
fun{}
|
||||
char_at(): int
|
||||
//
|
||||
extern
|
||||
fun{}
|
||||
Strptr1_at(i0: int): Strptr1
|
||||
//
|
||||
extern
|
||||
fun{}
|
||||
rmove(): void
|
||||
extern
|
||||
fun{}
|
||||
rmove_while(test: char -<cloref1> bool): void
|
||||
//
|
||||
in (* in-of-local *)
|
||||
//
|
||||
implement
|
||||
{}(*tmp*)
|
||||
rmove_while
|
||||
(test) = let
|
||||
//
|
||||
val c0 = char_at()
|
||||
//
|
||||
in
|
||||
//
|
||||
if c0 >= 0 then
|
||||
if test(int2char0(c0)) then (rmove(); rmove_while(test)) else ()
|
||||
// end of [if]
|
||||
//
|
||||
end // end of [rmove_while]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
csv_parse_line
|
||||
(line) = let
|
||||
//
|
||||
val line = g1ofg0(line)
|
||||
//
|
||||
var i: int = 0
|
||||
val p_i = addr@i
|
||||
//
|
||||
val n0 = sz2i(length(line))
|
||||
//
|
||||
macdef get_i() = $UN.ptr0_get<int>(p_i)
|
||||
macdef inc_i() = $UN.ptr0_addby<int>(p_i, 1)
|
||||
macdef set_i(i0) = $UN.ptr0_set<int>(p_i, ,(i0))
|
||||
//
|
||||
implement
|
||||
getpos<>() = get_i()
|
||||
//
|
||||
implement
|
||||
is_end<>() = get_i() >= n0
|
||||
//
|
||||
implement
|
||||
char_at<>() = let
|
||||
val i = get_i()
|
||||
val i = ckastloc_gintGte(i, 0)
|
||||
//
|
||||
in
|
||||
if i < n0 then char2u2int0(line[i]) else ~1
|
||||
end // end of [char_at]
|
||||
//
|
||||
implement
|
||||
Strptr1_at<>(i0) = let
|
||||
//
|
||||
val i1 = get_i()
|
||||
val i0 = ckastloc_gintGte(i0, 0)
|
||||
val i1 = ckastloc_gintBtwe(i1, i0, n0)
|
||||
//
|
||||
in
|
||||
$UN.castvwtp0(
|
||||
string_make_substring(line, i2sz(i0), i2sz(i1-i0))
|
||||
) (* $UN.castvwtp0 *)
|
||||
end // end of [Strptr1_at]
|
||||
//
|
||||
implement
|
||||
rmove<>() =
|
||||
if get_i() < n0 then inc_i()
|
||||
//
|
||||
vtypedef res_vt = List0_vt(Strptr1)
|
||||
//
|
||||
fun
|
||||
loop
|
||||
(
|
||||
i: int, res: res_vt
|
||||
) : res_vt =
|
||||
if
|
||||
is_end()
|
||||
then res
|
||||
else let
|
||||
val () =
|
||||
(
|
||||
if i > 0 then rmove()
|
||||
)
|
||||
val i0 = getpos()
|
||||
var f0 =
|
||||
(
|
||||
lam@(c: char) =<clo> c != ','
|
||||
)
|
||||
val () = rmove_while($UN.cast(addr@f0))
|
||||
val s0 = Strptr1_at(i0)
|
||||
in
|
||||
loop(i+1, list_vt_cons(s0, res))
|
||||
end // end of [else]
|
||||
//
|
||||
in
|
||||
list_vt_reverse(loop(0(*i*), list_vt_nil((*void*))))
|
||||
end // end of [csv_parse_line]
|
||||
|
||||
end // end of [local]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
(* end of [csv_parse.hats] *)
|
||||
694
samples/ATS/intinf_vt.dats
Normal file
694
samples/ATS/intinf_vt.dats
Normal file
@@ -0,0 +1,694 @@
|
||||
(***********************************************************************)
|
||||
(* *)
|
||||
(* ATS/contrib/atshwxi *)
|
||||
(* *)
|
||||
(***********************************************************************)
|
||||
|
||||
(*
|
||||
** Copyright (C) 2013 Hongwei Xi, ATS Trustful Software, Inc.
|
||||
**
|
||||
** Permission is hereby granted, free of charge, to any person obtaining a
|
||||
** copy of this software and associated documentation files (the "Software"),
|
||||
** to deal in the Software without restriction, including without limitation
|
||||
** the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
** and/or sell copies of the Software, and to permit persons to whom the
|
||||
** Software is furnished to do so, subject to the following stated conditions:
|
||||
**
|
||||
** The above copyright notice and this permission notice shall be included in
|
||||
** all copies or substantial portions of the Software.
|
||||
**
|
||||
** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
** OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
** THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
** FROM OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
** IN THE SOFTWARE.
|
||||
*)
|
||||
|
||||
// Source: https://github.com/githwxi/ATS-Postiats-contrib/blob/04a984d9c08c1831f7dda8a05ce356db01f81850/contrib/libats-/hwxi/intinf/DATS/intinf_vt.dats
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
// Author: Hongwei Xi
|
||||
// Authoremail: hwxi AT gmail DOT com
|
||||
// Start Time: April, 2013
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
#include
|
||||
"share/atspre_define.hats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
staload
|
||||
UN = "prelude/SATS/unsafe.sats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
staload
|
||||
GMP = "{$LIBGMP}/SATS/gmp.sats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
vtypedef mpz = $GMP.mpz_vt0ype
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
staload "./../SATS/intinf.sats"
|
||||
staload "./../SATS/intinf_vt.sats"
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
macdef i2u (x) = g1int2uint_int_uint (,(x))
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
local
|
||||
|
||||
assume
|
||||
intinf_vtype
|
||||
(i: int) = // HX: [i] is a fake
|
||||
[l:addr] (mpz @ l, mfree_gc_v (l) | ptr l)
|
||||
// end of [intinf_vtype]
|
||||
|
||||
in (* in of [local] *)
|
||||
|
||||
implement{}
|
||||
intinf_make_int
|
||||
(i) = (x) where
|
||||
{
|
||||
//
|
||||
val x = ptr_alloc<mpz> ()
|
||||
val () = $GMP.mpz_init_set_int (!(x.2), i)
|
||||
//
|
||||
} (* end of [intinf_make_int] *)
|
||||
|
||||
implement{}
|
||||
intinf_make_uint
|
||||
(i) = (x) where
|
||||
{
|
||||
//
|
||||
val x = ptr_alloc<mpz> ()
|
||||
val () = $GMP.mpz_init_set_uint (!(x.2), i)
|
||||
//
|
||||
} (* end of [intinf_make_uint] *)
|
||||
|
||||
implement{}
|
||||
intinf_make_lint
|
||||
(i) = (x) where
|
||||
{
|
||||
//
|
||||
val x = ptr_alloc<mpz> ()
|
||||
val () = $GMP.mpz_init_set_lint (!(x.2), i)
|
||||
//
|
||||
} (* end of [intinf_make_lint] *)
|
||||
|
||||
implement{}
|
||||
intinf_make_ulint
|
||||
(i) = (x) where
|
||||
{
|
||||
//
|
||||
val x = ptr_alloc<mpz> ()
|
||||
val () = $GMP.mpz_init_set_ulint (!(x.2), i)
|
||||
//
|
||||
} (* end of [intinf_make_ulint] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
intinf_free (x) = let
|
||||
val (pfat, pfgc | p) = x
|
||||
val () = $GMP.mpz_clear (!p) in ptr_free (pfgc, pfat | p)
|
||||
end (* end of [intinf_free] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
intinf_get_int (x) = $GMP.mpz_get_int (!(x.2))
|
||||
implement{}
|
||||
intinf_get_lint (x) = $GMP.mpz_get_lint (!(x.2))
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
intinf_get_strptr
|
||||
(x, base) = $GMP.mpz_get_str_null (base, !(x.2))
|
||||
// end of [intinf_get_strptr]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
fprint_intinf_base
|
||||
(out, x, base) = let
|
||||
val nsz = $GMP.mpz_out_str (out, base, !(x.2))
|
||||
in
|
||||
//
|
||||
if (nsz = 0) then
|
||||
exit_errmsg (1, "libgmp/gmp: fprint_intinf_base")
|
||||
// end of [if]
|
||||
//
|
||||
end (* fprint_intinf_base *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{
|
||||
} neg_intinf0
|
||||
(x) = (x) where
|
||||
{
|
||||
//
|
||||
val () = $GMP.mpz_neg (!(x.2))
|
||||
//
|
||||
} (* end of [neg_intinf0] *)
|
||||
|
||||
implement{
|
||||
} neg_intinf1
|
||||
(x) = (y) where
|
||||
{
|
||||
//
|
||||
val y = ptr_alloc<mpz> ()
|
||||
val () = $GMP.mpz_init (!(y.2))
|
||||
val () = $GMP.mpz_neg (!(y.2), !(x.2))
|
||||
//
|
||||
} (* end of [neg_intinf1] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{
|
||||
} abs_intinf0
|
||||
(x) = (x) where
|
||||
{
|
||||
//
|
||||
val () = $GMP.mpz_abs (!(x.2))
|
||||
//
|
||||
} (* end of [abs_intinf0] *)
|
||||
|
||||
implement{
|
||||
} abs_intinf1
|
||||
(x) = (y) where
|
||||
{
|
||||
//
|
||||
val y = ptr_alloc<mpz> ()
|
||||
val () = $GMP.mpz_init (!(y.2))
|
||||
val () = $GMP.mpz_abs (!(y.2), !(x.2))
|
||||
//
|
||||
} (* end of [abs_intinf1] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
succ_intinf0 (x) = add_intinf0_int (x, 1)
|
||||
implement{}
|
||||
succ_intinf1 (x) = add_intinf1_int (x, 1)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
pred_intinf0 (x) = sub_intinf0_int (x, 1)
|
||||
implement{}
|
||||
pred_intinf1 (x) = sub_intinf1_int (x, 1)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
add_intinf0_int
|
||||
(x, y) = (x) where
|
||||
{
|
||||
//
|
||||
val () = $GMP.mpz_add2_int (!(x.2), y)
|
||||
//
|
||||
} (* end of [add_intinf0_int] *)
|
||||
|
||||
implement{}
|
||||
add_intinf1_int
|
||||
(x, y) = (z) where
|
||||
{
|
||||
//
|
||||
val z = ptr_alloc<mpz> ()
|
||||
val () = $GMP.mpz_init (!(z.2))
|
||||
val () = $GMP.mpz_add3_int (!(z.2), !(x.2), y)
|
||||
//
|
||||
} (* end of [add_intinf1_int] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
add_int_intinf0 (x, y) = add_intinf0_int (y, x)
|
||||
implement{}
|
||||
add_int_intinf1 (x, y) = add_intinf1_int (y, x)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
add_intinf0_intinf1
|
||||
(x, y) = (x) where
|
||||
{
|
||||
//
|
||||
val () = $GMP.mpz_add2_mpz (!(x.2), !(y.2))
|
||||
//
|
||||
} (* end of [add_intinf0_intinf1] *)
|
||||
|
||||
implement{}
|
||||
add_intinf1_intinf0
|
||||
(x, y) = (y) where
|
||||
{
|
||||
//
|
||||
val () = $GMP.mpz_add2_mpz (!(y.2), !(x.2))
|
||||
//
|
||||
} (* end of [add_intinf1_intinf0] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
add_intinf1_intinf1
|
||||
(x, y) = (z) where
|
||||
{
|
||||
//
|
||||
val z = ptr_alloc<mpz> ()
|
||||
val () = $GMP.mpz_init (!(z.2))
|
||||
val () = $GMP.mpz_add3_mpz (!(z.2), !(x.2), !(y.2))
|
||||
//
|
||||
} (* end of [add_intinf1_intinf1] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
sub_intinf0_int
|
||||
(x, y) = (x) where
|
||||
{
|
||||
//
|
||||
val () = $GMP.mpz_sub2_int (!(x.2), y)
|
||||
//
|
||||
} (* end of [sub_intinf0_int] *)
|
||||
|
||||
implement{}
|
||||
sub_intinf1_int
|
||||
(x, y) = (z) where
|
||||
{
|
||||
//
|
||||
val z = ptr_alloc<mpz> ()
|
||||
val () = $GMP.mpz_init (!(z.2))
|
||||
val () = $GMP.mpz_sub3_int (!(z.2), !(x.2), y)
|
||||
//
|
||||
} (* end of [sub_intinf1_int] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
sub_int_intinf0 (x, y) = let
|
||||
val z = sub_intinf0_int (y, x) in neg_intinf0 (z)
|
||||
end (* end of [sub_int_intinf0] *)
|
||||
|
||||
implement{}
|
||||
sub_int_intinf1 (x, y) = let
|
||||
val z = sub_intinf1_int (y, x) in neg_intinf0 (z)
|
||||
end (* end of [sub_int_intinf1] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
sub_intinf0_intinf1
|
||||
(x, y) = (x) where
|
||||
{
|
||||
//
|
||||
val () = $GMP.mpz_sub2_mpz (!(x.2), !(y.2))
|
||||
//
|
||||
} (* end of [sub_intinf0_intinf1] *)
|
||||
|
||||
implement{}
|
||||
sub_intinf1_intinf0
|
||||
(x, y) = neg_intinf0 (sub_intinf0_intinf1 (y, x))
|
||||
// end of [sub_intinf1_intinf0]
|
||||
|
||||
implement{}
|
||||
sub_intinf1_intinf1
|
||||
(x, y) = (z) where
|
||||
{
|
||||
//
|
||||
val z = ptr_alloc<mpz> ()
|
||||
val () = $GMP.mpz_init (!(z.2))
|
||||
val () = $GMP.mpz_sub3_mpz (!(z.2), !(x.2), !(y.2))
|
||||
//
|
||||
} (* end of [sub_intinf1_intinf1] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
mul_intinf0_int
|
||||
(x, y) = (x) where
|
||||
{
|
||||
//
|
||||
val () = $GMP.mpz_mul2_int (!(x.2), y)
|
||||
//
|
||||
} (* end of [mul_intinf0_int] *)
|
||||
|
||||
implement{}
|
||||
mul_intinf1_int
|
||||
(x, y) = (z) where
|
||||
{
|
||||
//
|
||||
val z = ptr_alloc<mpz> ()
|
||||
val () = $GMP.mpz_init (!(z.2))
|
||||
val () = $GMP.mpz_mul3_int (!(z.2), !(x.2), y)
|
||||
//
|
||||
} (* end of [mul_intinf1_int] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
mul_int_intinf0 (x, y) = mul_intinf0_int (y, x)
|
||||
implement{}
|
||||
mul_int_intinf1 (x, y) = mul_intinf1_int (y, x)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
mul_intinf0_intinf1
|
||||
(x, y) = (x) where
|
||||
{
|
||||
//
|
||||
val () = $GMP.mpz_mul2_mpz (!(x.2), !(y.2))
|
||||
//
|
||||
} (* end of [mul_intinf0_intinf1] *)
|
||||
|
||||
implement{}
|
||||
mul_intinf1_intinf0
|
||||
(x, y) = (y) where
|
||||
{
|
||||
//
|
||||
val () = $GMP.mpz_mul2_mpz (!(y.2), !(x.2))
|
||||
//
|
||||
} (* end of [mul_intinf0_intinf1] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
mul_intinf1_intinf1
|
||||
(x, y) = (z) where
|
||||
{
|
||||
//
|
||||
val z = ptr_alloc<mpz> ()
|
||||
val () = $GMP.mpz_init (!(z.2))
|
||||
val () = $GMP.mpz_mul3_mpz (!(z.2), !(x.2), !(y.2))
|
||||
//
|
||||
} (* end of [mul_intinf1_intinf1] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
div_intinf0_int
|
||||
{i,j} (x, y) = let
|
||||
in
|
||||
//
|
||||
if y >= 0 then let
|
||||
val () = $GMP.mpz_tdiv2_q_uint (!(x.2), i2u(y)) in x
|
||||
end else let
|
||||
val () = $GMP.mpz_tdiv2_q_uint (!(x.2), i2u(~y)) in neg_intinf0 (x)
|
||||
end // end of [if]
|
||||
//
|
||||
end (* end of [div_intinf0_int] *)
|
||||
|
||||
implement{}
|
||||
div_intinf1_int
|
||||
{i,j} (x, y) = let
|
||||
//
|
||||
val z = ptr_alloc<mpz> ()
|
||||
val () = $GMP.mpz_init (!(z.2))
|
||||
//
|
||||
in
|
||||
//
|
||||
if y >= 0 then let
|
||||
val () = $GMP.mpz_tdiv3_q_uint (!(z.2), !(x.2), i2u(y)) in z
|
||||
end else let
|
||||
val () = $GMP.mpz_tdiv3_q_uint (!(z.2), !(x.2), i2u(~y)) in neg_intinf0 (z)
|
||||
end // end of [if]
|
||||
//
|
||||
end (* end of [div_intinf1_int] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
div_intinf0_intinf1
|
||||
(x, y) = (x) where
|
||||
{
|
||||
//
|
||||
val () = $GMP.mpz_tdiv2_q_mpz (!(x.2), !(y.2))
|
||||
//
|
||||
} (* end of [div_intinf0_intinf1] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
div_intinf1_intinf1
|
||||
(x, y) = (z) where
|
||||
{
|
||||
//
|
||||
val z = ptr_alloc<mpz> ()
|
||||
val () = $GMP.mpz_init (!(z.2))
|
||||
val () = $GMP.mpz_tdiv3_q_mpz (!(z.2), !(x.2), !(y.2))
|
||||
//
|
||||
} (* end of [div_intinf1_intinf1] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
ndiv_intinf0_int (x, y) = div_intinf0_int (x, y)
|
||||
implement{}
|
||||
ndiv_intinf1_int (x, y) = div_intinf1_int (x, y)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
nmod_intinf0_int
|
||||
{i,j} (x, y) = let
|
||||
//
|
||||
val r =
|
||||
$GMP.mpz_fdiv_uint (!(x.2), i2u(y))
|
||||
val () = intinf_free (x)
|
||||
//
|
||||
in
|
||||
$UN.cast{intBtw(0,j)}(r)
|
||||
end (* end of [nmod_intinf0_int] *)
|
||||
|
||||
implement{}
|
||||
nmod_intinf1_int
|
||||
{i,j} (x, y) = let
|
||||
//
|
||||
val r = $GMP.mpz_fdiv_uint (!(x.2), i2u(y))
|
||||
//
|
||||
in
|
||||
$UN.cast{intBtw(0,j)}(r)
|
||||
end (* end of [nmod_intinf1_int] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
// comparison-functions
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
lt_intinf_int
|
||||
{i,j} (x, y) = let
|
||||
//
|
||||
val sgn = $GMP.mpz_cmp_int (!(x.2), y)
|
||||
val ans = (if sgn < 0 then true else false): bool
|
||||
//
|
||||
in
|
||||
$UN.cast{bool(i < j)}(sgn)
|
||||
end // end of [lt_intinf_int]
|
||||
|
||||
implement{}
|
||||
lt_intinf_intinf
|
||||
{i,j} (x, y) = let
|
||||
//
|
||||
val sgn = $GMP.mpz_cmp_mpz (!(x.2), !(y.2))
|
||||
val ans = (if sgn < 0 then true else false): bool
|
||||
//
|
||||
in
|
||||
$UN.cast{bool(i < j)}(sgn)
|
||||
end // end of [lt_intinf_intinf]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
lte_intinf_int
|
||||
{i,j} (x, y) = let
|
||||
//
|
||||
val sgn = $GMP.mpz_cmp_int (!(x.2), y)
|
||||
val ans = (if sgn <= 0 then true else false): bool
|
||||
//
|
||||
in
|
||||
$UN.cast{bool(i <= j)}(sgn)
|
||||
end // end of [lte_intinf_int]
|
||||
|
||||
implement{}
|
||||
lte_intinf_intinf
|
||||
{i,j} (x, y) = let
|
||||
//
|
||||
val sgn = $GMP.mpz_cmp_mpz (!(x.2), !(y.2))
|
||||
val ans = (if sgn <= 0 then true else false): bool
|
||||
//
|
||||
in
|
||||
$UN.cast{bool(i <= j)}(sgn)
|
||||
end // end of [lte_intinf_intinf]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
gt_intinf_int
|
||||
{i,j} (x, y) = let
|
||||
//
|
||||
val sgn = $GMP.mpz_cmp_int (!(x.2), y)
|
||||
val ans = (if sgn > 0 then true else false): bool
|
||||
//
|
||||
in
|
||||
$UN.cast{bool(i > j)}(sgn)
|
||||
end // end of [gt_intinf_int]
|
||||
|
||||
implement{}
|
||||
gt_intinf_intinf
|
||||
{i,j} (x, y) = let
|
||||
//
|
||||
val sgn = $GMP.mpz_cmp_mpz (!(x.2), !(y.2))
|
||||
val ans = (if sgn > 0 then true else false): bool
|
||||
//
|
||||
in
|
||||
$UN.cast{bool(i > j)}(sgn)
|
||||
end // end of [gt_intinf_intinf]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
gte_intinf_int
|
||||
{i,j} (x, y) = let
|
||||
//
|
||||
val sgn = $GMP.mpz_cmp_int (!(x.2), y)
|
||||
val ans = (if sgn >= 0 then true else false): bool
|
||||
//
|
||||
in
|
||||
$UN.cast{bool(i >= j)}(sgn)
|
||||
end // end of [gte_intinf_int]
|
||||
|
||||
implement{}
|
||||
gte_intinf_intinf
|
||||
{i,j} (x, y) = let
|
||||
//
|
||||
val sgn = $GMP.mpz_cmp_mpz (!(x.2), !(y.2))
|
||||
val ans = (if sgn >= 0 then true else false): bool
|
||||
//
|
||||
in
|
||||
$UN.cast{bool(i >= j)}(sgn)
|
||||
end // end of [gte_intinf_intinf]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
eq_intinf_int
|
||||
{i,j} (x, y) = let
|
||||
//
|
||||
val sgn = $GMP.mpz_cmp_int (!(x.2), y)
|
||||
val ans = (if sgn = 0 then true else false): bool
|
||||
//
|
||||
in
|
||||
$UN.cast{bool(i == j)}(sgn)
|
||||
end // end of [eq_intinf_int]
|
||||
|
||||
implement{}
|
||||
eq_intinf_intinf
|
||||
{i,j} (x, y) = let
|
||||
//
|
||||
val sgn = $GMP.mpz_cmp_mpz (!(x.2), !(y.2))
|
||||
val ans = (if sgn = 0 then true else false): bool
|
||||
//
|
||||
in
|
||||
$UN.cast{bool(i == j)}(sgn)
|
||||
end // end of [eq_intinf_intinf]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
neq_intinf_int
|
||||
{i,j} (x, y) = let
|
||||
//
|
||||
val sgn = $GMP.mpz_cmp_int (!(x.2), y)
|
||||
val ans = (if sgn != 0 then true else false): bool
|
||||
//
|
||||
in
|
||||
$UN.cast{bool(i != j)}(sgn)
|
||||
end // end of [neq_intinf_int]
|
||||
|
||||
implement{}
|
||||
neq_intinf_intinf
|
||||
{i,j} (x, y) = let
|
||||
//
|
||||
val sgn = $GMP.mpz_cmp_mpz (!(x.2), !(y.2))
|
||||
val ans = (if sgn != 0 then true else false): bool
|
||||
//
|
||||
in
|
||||
$UN.cast{bool(i != j)}(sgn)
|
||||
end // end of [neq_intinf_intinf]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
compare_intinf_int
|
||||
{i,j} (x, y) = let
|
||||
//
|
||||
val sgn = $GMP.mpz_cmp_int (!(x.2), y)
|
||||
val sgn = (if sgn < 0 then ~1 else (if sgn > 0 then 1 else 0)): int
|
||||
//
|
||||
in
|
||||
$UN.cast{int(sgn(i-j))}(sgn)
|
||||
end // end of [compare_intinf_int]
|
||||
|
||||
implement{}
|
||||
compare_int_intinf
|
||||
{i,j} (x, y) = let
|
||||
//
|
||||
val sgn = $GMP.mpz_cmp_int (!(y.2), x)
|
||||
val sgn = (if sgn > 0 then ~1 else (if sgn < 0 then 1 else 0)): int
|
||||
//
|
||||
in
|
||||
$UN.cast{int(sgn(i-j))}(sgn)
|
||||
end // end of [compare_int_intinf]
|
||||
|
||||
implement{}
|
||||
compare_intinf_intinf
|
||||
{i,j} (x, y) = let
|
||||
//
|
||||
val sgn = $GMP.mpz_cmp_mpz (!(x.2), !(y.2))
|
||||
val sgn = (if sgn < 0 then ~1 else (if sgn > 0 then 1 else 0)): int
|
||||
//
|
||||
in
|
||||
$UN.cast{int(sgn(i-j))}(sgn)
|
||||
end // end of [compare_intinf_intinf]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
pow_intinf_int
|
||||
(base, exp) = r where
|
||||
{
|
||||
//
|
||||
val r = ptr_alloc<mpz> ()
|
||||
val () = $GMP.mpz_init (!(r.2))
|
||||
val () = $GMP.mpz_pow_uint (!(r.2), !(base.2), i2u(exp))
|
||||
//
|
||||
} (* end of [pow_intinf_int] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
end // end of [local]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
print_intinf (x) = fprint_intinf (stdout_ref, x)
|
||||
implement{}
|
||||
prerr_intinf (x) = fprint_intinf (stderr_ref, x)
|
||||
implement{}
|
||||
fprint_intinf (out, x) = fprint_intinf_base (out, x, 10(*base*))
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
(* end of [intinf_vt.dats] *)
|
||||
@@ -1,187 +0,0 @@
|
||||
(***********************************************************************)
|
||||
(* *)
|
||||
(* Applied Type System *)
|
||||
(* *)
|
||||
(***********************************************************************)
|
||||
|
||||
(*
|
||||
** ATS/Postiats - Unleashing the Potential of Types!
|
||||
** Copyright (C) 2011-2013 Hongwei Xi, ATS Trustful Software, Inc.
|
||||
** All rights reserved
|
||||
**
|
||||
** ATS is free software; you can redistribute it and/or modify it under
|
||||
** the terms of the GNU GENERAL PUBLIC LICENSE (GPL) as published by the
|
||||
** Free Software Foundation; either version 3, or (at your option) any
|
||||
** later version.
|
||||
**
|
||||
** ATS is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
** WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
** FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
** for more details.
|
||||
**
|
||||
** You should have received a copy of the GNU General Public License
|
||||
** along with ATS; see the file COPYING. If not, please write to the
|
||||
** Free Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
** 02110-1301, USA.
|
||||
*)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
(* Author: Hongwei Xi *)
|
||||
(* Authoremail: hwxi AT cs DOT bu DOT edu *)
|
||||
(* Start time: December, 2012 *)
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
// HX: shared by linset_listord (* ordered list *)
|
||||
// HX: shared by linset_avltree (* AVL-tree-based *)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
// HX-2013-02:
|
||||
// for sets of nonlinear elements
|
||||
//
|
||||
absvtype set_vtype (a:t@ype+) = ptr
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
vtypedef set (a:t0p) = set_vtype (a)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{a:t0p}
|
||||
compare_elt_elt (x1: a, x2: a):<> int
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{} linset_nil{a:t0p} ():<> set(a)
|
||||
fun{} linset_make_nil{a:t0p} ():<> set(a)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{a:t0p} linset_sing (x: a):<!wrt> set(a)
|
||||
fun{a:t0p} linset_make_sing (x: a):<!wrt> set(a)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{a:t0p}
|
||||
linset_make_list (xs: List(INV(a))):<!wrt> set(a)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{}
|
||||
linset_is_nil {a:t0p} (xs: !set(INV(a))):<> bool
|
||||
fun{}
|
||||
linset_isnot_nil {a:t0p} (xs: !set(INV(a))):<> bool
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{a:t0p} linset_size (!set(INV(a))): size_t
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{a:t0p}
|
||||
linset_is_member (xs: !set(INV(a)), x0: a):<> bool
|
||||
fun{a:t0p}
|
||||
linset_isnot_member (xs: !set(INV(a)), x0: a):<> bool
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{a:t0p}
|
||||
linset_copy (!set(INV(a))):<!wrt> set(a)
|
||||
fun{a:t0p}
|
||||
linset_free (xs: set(INV(a))):<!wrt> void
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun{a:t0p}
|
||||
linset_insert
|
||||
(xs: &set(INV(a)) >> _, x0: a):<!wrt> bool
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun{a:t0p}
|
||||
linset_takeout
|
||||
(
|
||||
&set(INV(a)) >> _, a, res: &(a?) >> opt(a, b)
|
||||
) :<!wrt> #[b:bool] bool(b) // endfun
|
||||
fun{a:t0p}
|
||||
linset_takeout_opt (&set(INV(a)) >> _, a):<!wrt> Option_vt(a)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun{a:t0p}
|
||||
linset_remove
|
||||
(xs: &set(INV(a)) >> _, x0: a):<!wrt> bool
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
// HX: choosing an element in an unspecified manner
|
||||
//
|
||||
fun{a:t0p}
|
||||
linset_choose
|
||||
(
|
||||
xs: !set(INV(a)), x: &a? >> opt (a, b)
|
||||
) :<!wrt> #[b:bool] bool(b)
|
||||
//
|
||||
fun{a:t0p}
|
||||
linset_choose_opt (xs: !set(INV(a))):<!wrt> Option_vt(a)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{a:t0p}
|
||||
linset_takeoutmax
|
||||
(
|
||||
xs: &set(INV(a)) >> _, res: &a? >> opt(a, b)
|
||||
) :<!wrt> #[b:bool] bool (b)
|
||||
fun{a:t0p}
|
||||
linset_takeoutmax_opt (xs: &set(INV(a)) >> _):<!wrt> Option_vt(a)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{a:t0p}
|
||||
linset_takeoutmin
|
||||
(
|
||||
xs: &set(INV(a)) >> _, res: &a? >> opt(a, b)
|
||||
) :<!wrt> #[b:bool] bool (b)
|
||||
fun{a:t0p}
|
||||
linset_takeoutmin_opt (xs: &set(INV(a)) >> _):<!wrt> Option_vt(a)
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun{}
|
||||
fprint_linset$sep (FILEref): void // ", "
|
||||
//
|
||||
fun{a:t0p}
|
||||
fprint_linset (out: FILEref, xs: !set(INV(a))): void
|
||||
//
|
||||
overload fprint with fprint_linset
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun{
|
||||
a:t0p}{env:vt0p
|
||||
} linset_foreach$fwork
|
||||
(x: a, env: &(env) >> _): void
|
||||
//
|
||||
fun{a:t0p}
|
||||
linset_foreach (set: !set(INV(a))): void
|
||||
fun{
|
||||
a:t0p}{env:vt0p
|
||||
} linset_foreach_env
|
||||
(set: !set(INV(a)), env: &(env) >> _): void
|
||||
// end of [linset_foreach_env]
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{a:t0p}
|
||||
linset_listize (xs: set(INV(a))): List0_vt (a)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{a:t0p}
|
||||
linset_listize1 (xs: !set(INV(a))): List0_vt (a)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
(* end of [linset.hats] *)
|
||||
@@ -1,504 +0,0 @@
|
||||
(***********************************************************************)
|
||||
(* *)
|
||||
(* Applied Type System *)
|
||||
(* *)
|
||||
(***********************************************************************)
|
||||
|
||||
(*
|
||||
** ATS/Postiats - Unleashing the Potential of Types!
|
||||
** Copyright (C) 2011-2013 Hongwei Xi, ATS Trustful Software, Inc.
|
||||
** All rights reserved
|
||||
**
|
||||
** ATS is free software; you can redistribute it and/or modify it under
|
||||
** the terms of the GNU GENERAL PUBLIC LICENSE (GPL) as published by the
|
||||
** Free Software Foundation; either version 3, or (at your option) any
|
||||
** later version.
|
||||
**
|
||||
** ATS is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
** WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
** FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
** for more details.
|
||||
**
|
||||
** You should have received a copy of the GNU General Public License
|
||||
** along with ATS; see the file COPYING. If not, please write to the
|
||||
** Free Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
** 02110-1301, USA.
|
||||
*)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
(* Author: Hongwei Xi *)
|
||||
(* Authoremail: hwxi AT cs DOT bu DOT edu *)
|
||||
(* Start time: February, 2013 *)
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
// HX-2013-08:
|
||||
// a set is represented as a sorted list in descending order;
|
||||
// note that descending order is chosen to faciliate set comparison
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
staload
|
||||
UN = "prelude/SATS/unsafe.sats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
staload "libats/SATS/linset_listord.sats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
#include "./SHARE/linset.hats" // code reuse
|
||||
#include "./SHARE/linset_node.hats" // code reuse
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
assume
|
||||
set_vtype (elt:t@ype) = List0_vt (elt)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
linset_nil () = list_vt_nil ()
|
||||
implement{}
|
||||
linset_make_nil () = list_vt_nil ()
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
{a}(*tmp*)
|
||||
linset_sing
|
||||
(x) = list_vt_cons{a}(x, list_vt_nil)
|
||||
// end of [linset_sing]
|
||||
implement{a}
|
||||
linset_make_sing
|
||||
(x) = list_vt_cons{a}(x, list_vt_nil)
|
||||
// end of [linset_make_sing]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
linset_is_nil (xs) = list_vt_is_nil (xs)
|
||||
implement{}
|
||||
linset_isnot_nil (xs) = list_vt_is_cons (xs)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{a}
|
||||
linset_size (xs) =
|
||||
let val n = list_vt_length(xs) in i2sz(n) end
|
||||
// end of [linset_size]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{a}
|
||||
linset_is_member
|
||||
(xs, x0) = let
|
||||
//
|
||||
fun aux
|
||||
{n:nat} .<n>.
|
||||
(
|
||||
xs: !list_vt (a, n)
|
||||
) :<> bool = let
|
||||
in
|
||||
//
|
||||
case+ xs of
|
||||
| list_vt_cons (x, xs) => let
|
||||
val sgn = compare_elt_elt<a> (x0, x) in
|
||||
if sgn > 0 then false else (if sgn < 0 then aux (xs) else true)
|
||||
end // end of [list_vt_cons]
|
||||
| list_vt_nil ((*void*)) => false
|
||||
//
|
||||
end // end of [aux]
|
||||
//
|
||||
in
|
||||
aux (xs)
|
||||
end // end of [linset_is_member]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{a}
|
||||
linset_copy (xs) = list_vt_copy<a> (xs)
|
||||
implement{a}
|
||||
linset_free (xs) = list_vt_free<a> (xs)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{a}
|
||||
linset_insert
|
||||
(xs, x0) = let
|
||||
//
|
||||
fun
|
||||
mynode_cons
|
||||
{n:nat} .<>.
|
||||
(
|
||||
nx: mynode1 (a), xs: list_vt (a, n)
|
||||
) : list_vt (a, n+1) = let
|
||||
//
|
||||
val xs1 =
|
||||
$UN.castvwtp0{List1_vt(a)}(nx)
|
||||
val+@list_vt_cons (_, xs2) = xs1
|
||||
prval () = $UN.cast2void (xs2); val () = (xs2 := xs)
|
||||
//
|
||||
in
|
||||
fold@ (xs1); xs1
|
||||
end // end of [mynode_cons]
|
||||
//
|
||||
fun ins
|
||||
{n:nat} .<n>. // tail-recursive
|
||||
(
|
||||
xs: &list_vt (a, n) >> list_vt (a, n1)
|
||||
) : #[n1:nat | n <= n1; n1 <= n+1] bool =
|
||||
(
|
||||
case+ xs of
|
||||
| @list_vt_cons
|
||||
(x, xs1) => let
|
||||
val sgn =
|
||||
compare_elt_elt<a> (x0, x)
|
||||
// end of [val]
|
||||
in
|
||||
if sgn > 0 then let
|
||||
prval () = fold@ (xs)
|
||||
val nx = mynode_make_elt<a> (x0)
|
||||
val ((*void*)) = xs := mynode_cons (nx, xs)
|
||||
in
|
||||
false
|
||||
end else if sgn < 0 then let
|
||||
val ans = ins (xs1)
|
||||
prval () = fold@ (xs)
|
||||
in
|
||||
ans
|
||||
end else let // [x0] is found
|
||||
prval () = fold@ (xs)
|
||||
in
|
||||
true (* [x0] in [xs] *)
|
||||
end (* end of [if] *)
|
||||
end // end of [list_vt_cons]
|
||||
| list_vt_nil () => let
|
||||
val nx = mynode_make_elt<a> (x0)
|
||||
val ((*void*)) = xs := mynode_cons (nx, xs)
|
||||
in
|
||||
false
|
||||
end // end of [list_vt_nil]
|
||||
) (* end of [ins] *)
|
||||
//
|
||||
in
|
||||
$effmask_all (ins (xs))
|
||||
end // end of [linset_insert]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
(*
|
||||
//
|
||||
HX-2013-08:
|
||||
[linset_remove] moved up
|
||||
//
|
||||
implement{a}
|
||||
linset_remove
|
||||
(xs, x0) = let
|
||||
//
|
||||
fun rem
|
||||
{n:nat} .<n>. // tail-recursive
|
||||
(
|
||||
xs: &list_vt (a, n) >> list_vt (a, n1)
|
||||
) : #[n1:nat | n1 <= n; n <= n1+1] bool =
|
||||
(
|
||||
case+ xs of
|
||||
| @list_vt_cons
|
||||
(x, xs1) => let
|
||||
val sgn =
|
||||
compare_elt_elt<a> (x0, x)
|
||||
// end of [val]
|
||||
in
|
||||
if sgn > 0 then let
|
||||
prval () = fold@ (xs)
|
||||
in
|
||||
false
|
||||
end else if sgn < 0 then let
|
||||
val ans = rem (xs1)
|
||||
prval () = fold@ (xs)
|
||||
in
|
||||
ans
|
||||
end else let // x0 = x
|
||||
val xs1_ = xs1
|
||||
val ((*void*)) = free@{a}{0}(xs)
|
||||
val () = xs := xs1_
|
||||
in
|
||||
true // [x0] in [xs]
|
||||
end (* end of [if] *)
|
||||
end // end of [list_vt_cons]
|
||||
| list_vt_nil () => false
|
||||
) (* end of [rem] *)
|
||||
//
|
||||
in
|
||||
$effmask_all (rem (xs))
|
||||
end // end of [linset_remove]
|
||||
*)
|
||||
|
||||
(* ****** ****** *)
|
||||
(*
|
||||
** By Brandon Barker
|
||||
*)
|
||||
implement
|
||||
{a}(*tmp*)
|
||||
linset_choose
|
||||
(xs, x0) = let
|
||||
in
|
||||
//
|
||||
case+ xs of
|
||||
| list_vt_cons
|
||||
(x, xs1) => let
|
||||
val () = x0 := x
|
||||
prval () = opt_some{a}(x0)
|
||||
in
|
||||
true
|
||||
end // end of [list_vt_cons]
|
||||
| list_vt_nil () => let
|
||||
prval () = opt_none{a}(x0)
|
||||
in
|
||||
false
|
||||
end // end of [list_vt_nil]
|
||||
//
|
||||
end // end of [linset_choose]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
{a}{env}
|
||||
linset_foreach_env (xs, env) = let
|
||||
//
|
||||
implement
|
||||
list_vt_foreach$fwork<a><env>
|
||||
(x, env) = linset_foreach$fwork<a><env> (x, env)
|
||||
//
|
||||
in
|
||||
list_vt_foreach_env<a><env> (xs, env)
|
||||
end // end of [linset_foreach_env]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{a}
|
||||
linset_listize (xs) = xs
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{a}
|
||||
linset_listize1 (xs) = list_vt_copy (xs)
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
// HX: functions for processing mynodes
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{
|
||||
} mynode_null{a} () =
|
||||
$UN.castvwtp0{mynode(a,null)}(the_null_ptr)
|
||||
// end of [mynode_null]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
{a}(*tmp*)
|
||||
mynode_make_elt
|
||||
(x) = let
|
||||
//
|
||||
val nx = list_vt_cons{a}{0}(x, _ )
|
||||
//
|
||||
in
|
||||
$UN.castvwtp0{mynode1(a)}(nx)
|
||||
end // end of [mynode_make_elt]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{
|
||||
} mynode_free
|
||||
{a}(nx) = () where {
|
||||
val nx =
|
||||
$UN.castvwtp0{List1_vt(a)}(nx)
|
||||
//
|
||||
val+~list_vt_cons (_, nx2) = nx
|
||||
//
|
||||
prval ((*void*)) = $UN.cast2void (nx2)
|
||||
//
|
||||
} (* end of [mynode_free] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
{a}(*tmp*)
|
||||
mynode_get_elt
|
||||
(nx) = (x) where {
|
||||
//
|
||||
val nx1 =
|
||||
$UN.castvwtp1{List1_vt(a)}(nx)
|
||||
//
|
||||
val+list_vt_cons (x, _) = nx1
|
||||
//
|
||||
prval ((*void*)) = $UN.cast2void (nx1)
|
||||
//
|
||||
} (* end of [mynode_get_elt] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
{a}(*tmp*)
|
||||
mynode_set_elt
|
||||
{l} (nx, x0) =
|
||||
{
|
||||
//
|
||||
val nx1 =
|
||||
$UN.castvwtp1{List1_vt(a)}(nx)
|
||||
//
|
||||
val+@list_vt_cons (x, _) = nx1
|
||||
//
|
||||
val () = x := x0
|
||||
//
|
||||
prval () = fold@ (nx1)
|
||||
prval () = $UN.cast2void (nx1)
|
||||
//
|
||||
prval () = __assert (nx) where
|
||||
{
|
||||
extern praxi __assert (nx: !mynode(a?, l) >> mynode (a, l)): void
|
||||
} (* end of [prval] *)
|
||||
//
|
||||
} (* end of [mynode_set_elt] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
{a}(*tmp*)
|
||||
mynode_getfree_elt
|
||||
(nx) = (x) where {
|
||||
//
|
||||
val nx =
|
||||
$UN.castvwtp0{List1_vt(a)}(nx)
|
||||
//
|
||||
val+~list_vt_cons (x, nx2) = nx
|
||||
//
|
||||
prval ((*void*)) = $UN.cast2void (nx2)
|
||||
//
|
||||
} (* end of [mynode_getfree_elt] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
(*
|
||||
fun{a:t0p}
|
||||
linset_takeout_ngc
|
||||
(set: &set(INV(a)) >> _, x0: a):<!wrt> mynode0 (a)
|
||||
// end of [linset_takeout_ngc]
|
||||
*)
|
||||
implement
|
||||
{a}(*tmp*)
|
||||
linset_takeout_ngc
|
||||
(set, x0) = let
|
||||
//
|
||||
fun takeout
|
||||
(
|
||||
xs: &List0_vt (a) >> _
|
||||
) : mynode0(a) = let
|
||||
in
|
||||
//
|
||||
case+ xs of
|
||||
| @list_vt_cons
|
||||
(x, xs1) => let
|
||||
prval pf_x = view@x
|
||||
prval pf_xs1 = view@xs1
|
||||
val sgn =
|
||||
compare_elt_elt<a> (x0, x)
|
||||
// end of [val]
|
||||
in
|
||||
if sgn > 0 then let
|
||||
prval () = fold@ (xs)
|
||||
in
|
||||
mynode_null{a}((*void*))
|
||||
end else if sgn < 0 then let
|
||||
val res = takeout (xs1)
|
||||
prval ((*void*)) = fold@ (xs)
|
||||
in
|
||||
res
|
||||
end else let // x0 = x
|
||||
val xs1_ = xs1
|
||||
val res = $UN.castvwtp0{mynode1(a)}((pf_x, pf_xs1 | xs))
|
||||
val () = xs := xs1_
|
||||
in
|
||||
res // [x0] in [xs]
|
||||
end (* end of [if] *)
|
||||
end // end of [list_vt_cons]
|
||||
| list_vt_nil () => mynode_null{a}((*void*))
|
||||
//
|
||||
end (* end of [takeout] *)
|
||||
//
|
||||
in
|
||||
$effmask_all (takeout (set))
|
||||
end // end of [linset_takeout_ngc]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
{a}(*tmp*)
|
||||
linset_takeoutmax_ngc
|
||||
(xs) = let
|
||||
in
|
||||
//
|
||||
case+ xs of
|
||||
| @list_vt_cons
|
||||
(x, xs1) => let
|
||||
prval pf_x = view@x
|
||||
prval pf_xs1 = view@xs1
|
||||
val xs_ = xs
|
||||
val () = xs := xs1
|
||||
in
|
||||
$UN.castvwtp0{mynode1(a)}((pf_x, pf_xs1 | xs_))
|
||||
end // end of [list_vt_cons]
|
||||
| @list_vt_nil () => let
|
||||
prval () = fold@ (xs)
|
||||
in
|
||||
mynode_null{a}((*void*))
|
||||
end // end of [list_vt_nil]
|
||||
//
|
||||
end // end of [linset_takeoutmax_ngc]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
{a}(*tmp*)
|
||||
linset_takeoutmin_ngc
|
||||
(xs) = let
|
||||
//
|
||||
fun unsnoc
|
||||
{n:pos} .<n>.
|
||||
(
|
||||
xs: &list_vt (a, n) >> list_vt (a, n-1)
|
||||
) :<!wrt> mynode1 (a) = let
|
||||
//
|
||||
val+@list_vt_cons (x, xs1) = xs
|
||||
//
|
||||
prval pf_x = view@x and pf_xs1 = view@xs1
|
||||
//
|
||||
in
|
||||
//
|
||||
case+ xs1 of
|
||||
| list_vt_cons _ =>
|
||||
let val res = unsnoc(xs1) in fold@xs; res end
|
||||
// end of [list_vt_cons]
|
||||
| list_vt_nil () => let
|
||||
val xs_ = xs
|
||||
val () = xs := list_vt_nil{a}()
|
||||
in
|
||||
$UN.castvwtp0{mynode1(a)}((pf_x, pf_xs1 | xs_))
|
||||
end // end of [list_vt_nil]
|
||||
//
|
||||
end // end of [unsnoc]
|
||||
//
|
||||
in
|
||||
//
|
||||
case+ xs of
|
||||
| list_vt_cons _ => unsnoc (xs)
|
||||
| list_vt_nil () => mynode_null{a}((*void*))
|
||||
//
|
||||
end // end of [linset_takeoutmin_ngc]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
(* end of [linset_listord.dats] *)
|
||||
@@ -1,51 +0,0 @@
|
||||
(***********************************************************************)
|
||||
(* *)
|
||||
(* Applied Type System *)
|
||||
(* *)
|
||||
(***********************************************************************)
|
||||
|
||||
(*
|
||||
** ATS/Postiats - Unleashing the Potential of Types!
|
||||
** Copyright (C) 2011-2013 Hongwei Xi, ATS Trustful Software, Inc.
|
||||
** All rights reserved
|
||||
**
|
||||
** ATS is free software; you can redistribute it and/or modify it under
|
||||
** the terms of the GNU GENERAL PUBLIC LICENSE (GPL) as published by the
|
||||
** Free Software Foundation; either version 3, or (at your option) any
|
||||
** later version.
|
||||
**
|
||||
** ATS is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
** WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
** FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
** for more details.
|
||||
**
|
||||
** You should have received a copy of the GNU General Public License
|
||||
** along with ATS; see the file COPYING. If not, please write to the
|
||||
** Free Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
** 02110-1301, USA.
|
||||
*)
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
// Author: Hongwei Xi
|
||||
// Authoremail: hwxiATcsDOTbuDOTedu
|
||||
// Time: October, 2010
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
#define ATS_PACKNAME "ATSLIB.libats.linset_listord"
|
||||
#define ATS_STALOADFLAG 0 // no static loading at run-time
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
#include "./SHARE/linset.hats"
|
||||
#include "./SHARE/linset_node.hats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
castfn
|
||||
linset2list {a:t0p} (xs: set (INV(a))):<> List0_vt (a)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
(* end of [linset_listord.sats] *)
|
||||
70
samples/Alpine Abuild/filenames/APKBUILD
Normal file
70
samples/Alpine Abuild/filenames/APKBUILD
Normal file
@@ -0,0 +1,70 @@
|
||||
# Contributor: Natanael Copa <ncopa@alpinelinux.org>
|
||||
# Maintainer: Natanael Copa <ncopa@alpinelinux.org>
|
||||
pkgname=abuild
|
||||
pkgver=2.27.0
|
||||
_ver=${pkgver%_git*}
|
||||
pkgrel=0
|
||||
pkgdesc="Script to build Alpine Packages"
|
||||
url="http://git.alpinelinux.org/cgit/abuild/"
|
||||
arch="all"
|
||||
license="GPL2"
|
||||
depends="fakeroot sudo pax-utils openssl apk-tools>=2.0.7-r1 libc-utils
|
||||
attr tar pkgconf patch"
|
||||
if [ "$CBUILD" = "$CHOST" ]; then
|
||||
depends="$depends curl"
|
||||
fi
|
||||
makedepends_build="pkgconfig"
|
||||
makedepends_host="openssl-dev"
|
||||
makedepends="$makedepends_host $makedepends_build"
|
||||
install="$pkgname.pre-install $pkgname.pre-upgrade"
|
||||
subpackages="apkbuild-cpan:cpan apkbuild-gem-resolver:gems"
|
||||
options="suid"
|
||||
pkggroups="abuild"
|
||||
source="http://dev.alpinelinux.org/archive/abuild/abuild-$_ver.tar.xz
|
||||
"
|
||||
|
||||
_builddir="$srcdir/$pkgname-$_ver"
|
||||
prepare() {
|
||||
cd "$_builddir"
|
||||
for i in $source; do
|
||||
case $i in
|
||||
*.patch)
|
||||
msg "Applying $i"
|
||||
patch -p1 -i "$srcdir"/$i || return 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
sed -i -e "/^CHOST=/s/=.*/=$CHOST/" abuild.conf
|
||||
}
|
||||
|
||||
build() {
|
||||
cd "$_builddir"
|
||||
make || return 1
|
||||
}
|
||||
|
||||
package() {
|
||||
cd "$_builddir"
|
||||
make install DESTDIR="$pkgdir" || return 1
|
||||
install -m 644 abuild.conf "$pkgdir"/etc/abuild.conf || return 1
|
||||
install -d -m 775 -g abuild "$pkgdir"/var/cache/distfiles || return 1
|
||||
}
|
||||
|
||||
cpan() {
|
||||
pkgdesc="Script to generate perl APKBUILD from CPAN"
|
||||
depends="perl perl-libwww perl-json"
|
||||
arch="noarch"
|
||||
mkdir -p "$subpkgdir"/usr/bin
|
||||
mv "$pkgdir"/usr/bin/apkbuild-cpan "$subpkgdir"/usr/bin/
|
||||
}
|
||||
|
||||
gems() {
|
||||
pkgdesc="APKBUILD dependency resolver for RubyGems"
|
||||
depends="ruby ruby-augeas"
|
||||
arch="noarch"
|
||||
mkdir -p "$subpkgdir"/usr/bin
|
||||
mv "$pkgdir"/usr/bin/apkbuild-gem-resolver "$subpkgdir"/usr/bin/
|
||||
}
|
||||
|
||||
md5sums="c67e4c971c54b4d550e16db3ba331f96 abuild-2.27.0.tar.xz"
|
||||
sha256sums="c8db017e3dd168edb20ceeb91971535cf66b8c95f29d3288f88ac755bffc60e5 abuild-2.27.0.tar.xz"
|
||||
sha512sums="98e1da4e47f3ab68700b3bc992c83e103f770f3196e433788ee74145f57cd33e5239c87f0a7a15f7266840d5bad893fc8c0d4c826d663df53deaee2678c56984 abuild-2.27.0.tar.xz"
|
||||
File diff suppressed because it is too large
Load Diff
55
samples/Arduino/octave_changer.ino
Normal file
55
samples/Arduino/octave_changer.ino
Normal file
@@ -0,0 +1,55 @@
|
||||
const int buttons[4] = {2,3,4,5};
|
||||
const int octaves[2] = {6,7};
|
||||
|
||||
void setup() {
|
||||
// initialize the digital pin as an output.
|
||||
// Pin 13 has an LED connected on most Arduino boards:
|
||||
|
||||
pinMode(13,OUTPUT);
|
||||
|
||||
for(int i =0;i<sizeof(buttons)/sizeof(int);i++){
|
||||
pinMode(buttons[i],INPUT );
|
||||
}
|
||||
|
||||
for(int i =0;i<sizeof(octaves)/sizeof(int);i++){
|
||||
pinMode(octaves[i],INPUT );
|
||||
}
|
||||
|
||||
Serial.begin(9600);
|
||||
}
|
||||
|
||||
|
||||
void loop() {
|
||||
delay(1); // wait
|
||||
int output = -1;
|
||||
|
||||
// Serial.print(digitalRead(buttons[0]));
|
||||
|
||||
for(int i =0;i<sizeof(buttons)/sizeof(int);i++){
|
||||
if(digitalRead(buttons[i])==LOW
|
||||
){
|
||||
if(output<=0){
|
||||
output=1;
|
||||
}
|
||||
output+=i+1;
|
||||
}
|
||||
}
|
||||
|
||||
for(int i =0;i<sizeof(octaves)/sizeof(int);i++){
|
||||
if(output<=0){
|
||||
break;
|
||||
}
|
||||
if(digitalRead(octaves[i])==LOW
|
||||
){
|
||||
output*=7*(i==1 ? -1 : 1);
|
||||
}
|
||||
}
|
||||
if(output>=0){
|
||||
Serial.print(output);
|
||||
Serial.println(";");
|
||||
digitalWrite(13,HIGH);
|
||||
}else{
|
||||
digitalWrite(13,LOW);
|
||||
}
|
||||
|
||||
}
|
||||
21
samples/Blade/hello.blade
Normal file
21
samples/Blade/hello.blade
Normal file
@@ -0,0 +1,21 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>@yield('title', 'We love GitHub')</title>
|
||||
@stack('scripts')
|
||||
@stack('styles')
|
||||
</head>
|
||||
<body>
|
||||
@include('partials.nav')
|
||||
|
||||
@yield('content')
|
||||
|
||||
<ul>
|
||||
@foreach($foo as $bar)
|
||||
<li>{{ $bar }}</li>
|
||||
@endforeach
|
||||
</ul>
|
||||
|
||||
{!! $raw_content !!}
|
||||
</body>
|
||||
</html>
|
||||
21
samples/Blade/hello.blade.php
Normal file
21
samples/Blade/hello.blade.php
Normal file
@@ -0,0 +1,21 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>@yield('title', 'We love GitHub')</title>
|
||||
@stack('scripts')
|
||||
@stack('styles')
|
||||
</head>
|
||||
<body>
|
||||
@include('partials.nav')
|
||||
|
||||
@yield('content')
|
||||
|
||||
<ul>
|
||||
@foreach($foo as $bar)
|
||||
<li>{{ $bar }}</li>
|
||||
@endforeach
|
||||
</ul>
|
||||
|
||||
{!! $raw_content !!}
|
||||
</body>
|
||||
</html>
|
||||
1178
samples/C++/PackageInfoParser.cpp
Normal file
1178
samples/C++/PackageInfoParser.cpp
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
46
samples/C++/bug1163046.--skeleton.re
Normal file
46
samples/C++/bug1163046.--skeleton.re
Normal file
@@ -0,0 +1,46 @@
|
||||
#include <iostream>
|
||||
|
||||
#define YYCTYPE unsigned char
|
||||
#define YYCURSOR cursor
|
||||
#define YYLIMIT cursor
|
||||
#define YYMARKER marker
|
||||
#define YYFILL(n)
|
||||
|
||||
bool scan(const char *text)
|
||||
{
|
||||
YYCTYPE *start = (YYCTYPE *)text;
|
||||
YYCTYPE *cursor = (YYCTYPE *)text;
|
||||
YYCTYPE *marker = (YYCTYPE *)text;
|
||||
next:
|
||||
YYCTYPE *token = cursor;
|
||||
/*!re2c
|
||||
'(This file must be converted with BinHex 4.0)'
|
||||
{
|
||||
if (token == start || *(token - 1) == '\n')
|
||||
return true; else goto next;
|
||||
}
|
||||
[\001-\377]
|
||||
{ goto next; }
|
||||
[\000]
|
||||
{ return false; }
|
||||
*/
|
||||
return false;
|
||||
}
|
||||
|
||||
#define do_scan(str, expect) \
|
||||
res = scan(str) == expect ? 0 : 1; \
|
||||
std::cerr << str << "\t-\t" << (res ? "fail" : "ok") << std::endl; \
|
||||
result += res
|
||||
|
||||
/*!max:re2c */
|
||||
|
||||
int main(int,void**)
|
||||
{
|
||||
int res, result = 0;
|
||||
do_scan("(This file must be converted with BinHex 4.0)", 1);
|
||||
do_scan("x(This file must be converted with BinHex 4.0)", 0);
|
||||
do_scan("(This file must be converted with BinHex 4.0)x", 1);
|
||||
do_scan("x(This file must be converted with BinHex 4.0)x", 0);
|
||||
|
||||
return result;
|
||||
}
|
||||
239
samples/C++/cnokw.re
Normal file
239
samples/C++/cnokw.re
Normal file
@@ -0,0 +1,239 @@
|
||||
#include <stdlib.h>
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
|
||||
#define ADDEQ 257
|
||||
#define ANDAND 258
|
||||
#define ANDEQ 259
|
||||
#define ARRAY 260
|
||||
#define ASM 261
|
||||
#define AUTO 262
|
||||
#define BREAK 263
|
||||
#define CASE 264
|
||||
#define CHAR 265
|
||||
#define CONST 266
|
||||
#define CONTINUE 267
|
||||
#define DECR 268
|
||||
#define DEFAULT 269
|
||||
#define DEREF 270
|
||||
#define DIVEQ 271
|
||||
#define DO 272
|
||||
#define DOUBLE 273
|
||||
#define ELLIPSIS 274
|
||||
#define ELSE 275
|
||||
#define ENUM 276
|
||||
#define EQL 277
|
||||
#define EXTERN 278
|
||||
#define FCON 279
|
||||
#define FLOAT 280
|
||||
#define FOR 281
|
||||
#define FUNCTION 282
|
||||
#define GEQ 283
|
||||
#define GOTO 284
|
||||
#define ICON 285
|
||||
#define ID 286
|
||||
#define IF 287
|
||||
#define INCR 288
|
||||
#define INT 289
|
||||
#define LEQ 290
|
||||
#define LONG 291
|
||||
#define LSHIFT 292
|
||||
#define LSHIFTEQ 293
|
||||
#define MODEQ 294
|
||||
#define MULEQ 295
|
||||
#define NEQ 296
|
||||
#define OREQ 297
|
||||
#define OROR 298
|
||||
#define POINTER 299
|
||||
#define REGISTER 300
|
||||
#define RETURN 301
|
||||
#define RSHIFT 302
|
||||
#define RSHIFTEQ 303
|
||||
#define SCON 304
|
||||
#define SHORT 305
|
||||
#define SIGNED 306
|
||||
#define SIZEOF 307
|
||||
#define STATIC 308
|
||||
#define STRUCT 309
|
||||
#define SUBEQ 310
|
||||
#define SWITCH 311
|
||||
#define TYPEDEF 312
|
||||
#define UNION 313
|
||||
#define UNSIGNED 314
|
||||
#define VOID 315
|
||||
#define VOLATILE 316
|
||||
#define WHILE 317
|
||||
#define XOREQ 318
|
||||
#define EOI 319
|
||||
|
||||
typedef unsigned int uint;
|
||||
typedef unsigned char uchar;
|
||||
|
||||
#define BSIZE 8192
|
||||
|
||||
#define YYCTYPE uchar
|
||||
#define YYCURSOR cursor
|
||||
#define YYLIMIT s->lim
|
||||
#define YYMARKER s->ptr
|
||||
#define YYFILL(n) {cursor = fill(s, cursor);}
|
||||
|
||||
#define RET(i) {s->cur = cursor; return i;}
|
||||
|
||||
typedef struct Scanner {
|
||||
int fd;
|
||||
uchar *bot, *tok, *ptr, *cur, *pos, *lim, *top, *eof;
|
||||
uint line;
|
||||
} Scanner;
|
||||
|
||||
uchar *fill(Scanner *s, uchar *cursor){
|
||||
if(!s->eof){
|
||||
uint cnt = s->tok - s->bot;
|
||||
if(cnt){
|
||||
memcpy(s->bot, s->tok, s->lim - s->tok);
|
||||
s->tok = s->bot;
|
||||
s->ptr -= cnt;
|
||||
cursor -= cnt;
|
||||
s->pos -= cnt;
|
||||
s->lim -= cnt;
|
||||
}
|
||||
if((s->top - s->lim) < BSIZE){
|
||||
uchar *buf = (uchar*) malloc(((s->lim - s->bot) + BSIZE)*sizeof(uchar));
|
||||
memcpy(buf, s->tok, s->lim - s->tok);
|
||||
s->tok = buf;
|
||||
s->ptr = &buf[s->ptr - s->bot];
|
||||
cursor = &buf[cursor - s->bot];
|
||||
s->pos = &buf[s->pos - s->bot];
|
||||
s->lim = &buf[s->lim - s->bot];
|
||||
s->top = &s->lim[BSIZE];
|
||||
free(s->bot);
|
||||
s->bot = buf;
|
||||
}
|
||||
if((cnt = read(s->fd, (char*) s->lim, BSIZE)) != BSIZE){
|
||||
s->eof = &s->lim[cnt]; *(s->eof)++ = '\n';
|
||||
}
|
||||
s->lim += cnt;
|
||||
}
|
||||
return cursor;
|
||||
}
|
||||
|
||||
int scan(Scanner *s){
|
||||
uchar *cursor = s->cur;
|
||||
std:
|
||||
s->tok = cursor;
|
||||
/*!re2c
|
||||
any = [\000-\377];
|
||||
O = [0-7];
|
||||
D = [0-9];
|
||||
L = [a-zA-Z_];
|
||||
H = [a-fA-F0-9];
|
||||
E = [Ee] [+-]? D+;
|
||||
FS = [fFlL];
|
||||
IS = [uUlL]*;
|
||||
ESC = [\\] ([abfnrtv?'"\\] | "x" H+ | O+);
|
||||
*/
|
||||
|
||||
/*!re2c
|
||||
"/*" { goto comment; }
|
||||
|
||||
L (L|D)* { RET(ID); }
|
||||
|
||||
("0" [xX] H+ IS?) | ("0" D+ IS?) | (D+ IS?) |
|
||||
(['] (ESC|any\[\n\\'])* ['])
|
||||
{ RET(ICON); }
|
||||
|
||||
(D+ E FS?) | (D* "." D+ E? FS?) | (D+ "." D* E? FS?)
|
||||
{ RET(FCON); }
|
||||
|
||||
(["] (ESC|any\[\n\\"])* ["])
|
||||
{ RET(SCON); }
|
||||
|
||||
"..." { RET(ELLIPSIS); }
|
||||
">>=" { RET(RSHIFTEQ); }
|
||||
"<<=" { RET(LSHIFTEQ); }
|
||||
"+=" { RET(ADDEQ); }
|
||||
"-=" { RET(SUBEQ); }
|
||||
"*=" { RET(MULEQ); }
|
||||
"/=" { RET(DIVEQ); }
|
||||
"%=" { RET(MODEQ); }
|
||||
"&=" { RET(ANDEQ); }
|
||||
"^=" { RET(XOREQ); }
|
||||
"|=" { RET(OREQ); }
|
||||
">>" { RET(RSHIFT); }
|
||||
"<<" { RET(LSHIFT); }
|
||||
"++" { RET(INCR); }
|
||||
"--" { RET(DECR); }
|
||||
"->" { RET(DEREF); }
|
||||
"&&" { RET(ANDAND); }
|
||||
"||" { RET(OROR); }
|
||||
"<=" { RET(LEQ); }
|
||||
">=" { RET(GEQ); }
|
||||
"==" { RET(EQL); }
|
||||
"!=" { RET(NEQ); }
|
||||
";" { RET(';'); }
|
||||
"{" { RET('{'); }
|
||||
"}" { RET('}'); }
|
||||
"," { RET(','); }
|
||||
":" { RET(':'); }
|
||||
"=" { RET('='); }
|
||||
"(" { RET('('); }
|
||||
")" { RET(')'); }
|
||||
"[" { RET('['); }
|
||||
"]" { RET(']'); }
|
||||
"." { RET('.'); }
|
||||
"&" { RET('&'); }
|
||||
"!" { RET('!'); }
|
||||
"~" { RET('~'); }
|
||||
"-" { RET('-'); }
|
||||
"+" { RET('+'); }
|
||||
"*" { RET('*'); }
|
||||
"/" { RET('/'); }
|
||||
"%" { RET('%'); }
|
||||
"<" { RET('<'); }
|
||||
">" { RET('>'); }
|
||||
"^" { RET('^'); }
|
||||
"|" { RET('|'); }
|
||||
"?" { RET('?'); }
|
||||
|
||||
|
||||
[ \t\v\f]+ { goto std; }
|
||||
|
||||
"\n"
|
||||
{
|
||||
if(cursor == s->eof) RET(EOI);
|
||||
s->pos = cursor; s->line++;
|
||||
goto std;
|
||||
}
|
||||
|
||||
any
|
||||
{
|
||||
printf("unexpected character: %c\n", *s->tok);
|
||||
goto std;
|
||||
}
|
||||
*/
|
||||
|
||||
comment:
|
||||
/*!re2c
|
||||
"*/" { goto std; }
|
||||
"\n"
|
||||
{
|
||||
if(cursor == s->eof) RET(EOI);
|
||||
s->tok = s->pos = cursor; s->line++;
|
||||
goto comment;
|
||||
}
|
||||
any { goto comment; }
|
||||
*/
|
||||
}
|
||||
|
||||
main(){
|
||||
Scanner in;
|
||||
int t;
|
||||
memset((char*) &in, 0, sizeof(in));
|
||||
in.fd = 0;
|
||||
while((t = scan(&in)) != EOI){
|
||||
/*
|
||||
printf("%d\t%.*s\n", t, in.cur - in.tok, in.tok);
|
||||
printf("%d\n", t);
|
||||
*/
|
||||
}
|
||||
close(in.fd);
|
||||
}
|
||||
123
samples/C++/crypter.cpp
Normal file
123
samples/C++/crypter.cpp
Normal file
@@ -0,0 +1,123 @@
|
||||
// Copyright (c) 2009-2012 The Bitcoin Developers
|
||||
// Distributed under the MIT/X11 software license, see the accompanying
|
||||
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
// Source - https://github.com/Bradfrogger/Marvelous/blob/master/src/crypter.cpp
|
||||
|
||||
#include <openssl/aes.h>
|
||||
#include <openssl/evp.h>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
#ifdef WIN32
|
||||
#include <windows.h>
|
||||
#endif
|
||||
|
||||
#include "crypter.h"
|
||||
|
||||
bool CCrypter::SetKeyFromPassphrase(const SecureString& strKeyData, const std::vector<unsigned char>& chSalt, const unsigned int nRounds, const unsigned int nDerivationMethod)
|
||||
{
|
||||
if (nRounds < 1 || chSalt.size() != WALLET_CRYPTO_SALT_SIZE)
|
||||
return false;
|
||||
|
||||
int i = 0;
|
||||
if (nDerivationMethod == 0)
|
||||
i = EVP_BytesToKey(EVP_aes_256_cbc(), EVP_sha512(), &chSalt[0],
|
||||
(unsigned char *)&strKeyData[0], strKeyData.size(), nRounds, chKey, chIV);
|
||||
|
||||
if (i != (int)WALLET_CRYPTO_KEY_SIZE)
|
||||
{
|
||||
OPENSSL_cleanse(chKey, sizeof(chKey));
|
||||
OPENSSL_cleanse(chIV, sizeof(chIV));
|
||||
return false;
|
||||
}
|
||||
|
||||
fKeySet = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
bool CCrypter::SetKey(const CKeyingMaterial& chNewKey, const std::vector<unsigned char>& chNewIV)
|
||||
{
|
||||
if (chNewKey.size() != WALLET_CRYPTO_KEY_SIZE || chNewIV.size() != WALLET_CRYPTO_KEY_SIZE)
|
||||
return false;
|
||||
|
||||
memcpy(&chKey[0], &chNewKey[0], sizeof chKey);
|
||||
memcpy(&chIV[0], &chNewIV[0], sizeof chIV);
|
||||
|
||||
fKeySet = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
bool CCrypter::Encrypt(const CKeyingMaterial& vchPlaintext, std::vector<unsigned char> &vchCiphertext)
|
||||
{
|
||||
if (!fKeySet)
|
||||
return false;
|
||||
|
||||
// max ciphertext len for a n bytes of plaintext is
|
||||
// n + AES_BLOCK_SIZE - 1 bytes
|
||||
int nLen = vchPlaintext.size();
|
||||
int nCLen = nLen + AES_BLOCK_SIZE, nFLen = 0;
|
||||
vchCiphertext = std::vector<unsigned char> (nCLen);
|
||||
|
||||
EVP_CIPHER_CTX ctx;
|
||||
|
||||
bool fOk = true;
|
||||
|
||||
EVP_CIPHER_CTX_init(&ctx);
|
||||
if (fOk) fOk = EVP_EncryptInit_ex(&ctx, EVP_aes_256_cbc(), NULL, chKey, chIV);
|
||||
if (fOk) fOk = EVP_EncryptUpdate(&ctx, &vchCiphertext[0], &nCLen, &vchPlaintext[0], nLen);
|
||||
if (fOk) fOk = EVP_EncryptFinal_ex(&ctx, (&vchCiphertext[0])+nCLen, &nFLen);
|
||||
EVP_CIPHER_CTX_cleanup(&ctx);
|
||||
|
||||
if (!fOk) return false;
|
||||
|
||||
vchCiphertext.resize(nCLen + nFLen);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool CCrypter::Decrypt(const std::vector<unsigned char>& vchCiphertext, CKeyingMaterial& vchPlaintext)
|
||||
{
|
||||
if (!fKeySet)
|
||||
return false;
|
||||
|
||||
// plaintext will always be equal to or lesser than length of ciphertext
|
||||
int nLen = vchCiphertext.size();
|
||||
int nPLen = nLen, nFLen = 0;
|
||||
|
||||
vchPlaintext = CKeyingMaterial(nPLen);
|
||||
|
||||
EVP_CIPHER_CTX ctx;
|
||||
|
||||
bool fOk = true;
|
||||
|
||||
EVP_CIPHER_CTX_init(&ctx);
|
||||
if (fOk) fOk = EVP_DecryptInit_ex(&ctx, EVP_aes_256_cbc(), NULL, chKey, chIV);
|
||||
if (fOk) fOk = EVP_DecryptUpdate(&ctx, &vchPlaintext[0], &nPLen, &vchCiphertext[0], nLen);
|
||||
if (fOk) fOk = EVP_DecryptFinal_ex(&ctx, (&vchPlaintext[0])+nPLen, &nFLen);
|
||||
EVP_CIPHER_CTX_cleanup(&ctx);
|
||||
|
||||
if (!fOk) return false;
|
||||
|
||||
vchPlaintext.resize(nPLen + nFLen);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
bool EncryptSecret(const CKeyingMaterial& vMasterKey, const CKeyingMaterial &vchPlaintext, const uint256& nIV, std::vector<unsigned char> &vchCiphertext)
|
||||
{
|
||||
CCrypter cKeyCrypter;
|
||||
std::vector<unsigned char> chIV(WALLET_CRYPTO_KEY_SIZE);
|
||||
memcpy(&chIV[0], &nIV, WALLET_CRYPTO_KEY_SIZE);
|
||||
if(!cKeyCrypter.SetKey(vMasterKey, chIV))
|
||||
return false;
|
||||
return cKeyCrypter.Encrypt(*((const CKeyingMaterial*)&vchPlaintext), vchCiphertext);
|
||||
}
|
||||
|
||||
bool DecryptSecret(const CKeyingMaterial& vMasterKey, const std::vector<unsigned char>& vchCiphertext, const uint256& nIV, CKeyingMaterial& vchPlaintext)
|
||||
{
|
||||
CCrypter cKeyCrypter;
|
||||
std::vector<unsigned char> chIV(WALLET_CRYPTO_KEY_SIZE);
|
||||
memcpy(&chIV[0], &nIV, WALLET_CRYPTO_KEY_SIZE);
|
||||
if(!cKeyCrypter.SetKey(vMasterKey, chIV))
|
||||
return false;
|
||||
return cKeyCrypter.Decrypt(vchCiphertext, *((CKeyingMaterial*)&vchPlaintext));
|
||||
}
|
||||
63
samples/C++/cvsignore.re
Normal file
63
samples/C++/cvsignore.re
Normal file
@@ -0,0 +1,63 @@
|
||||
|
||||
#define YYFILL(n) if (cursor >= limit) break;
|
||||
#define YYCTYPE char
|
||||
#define YYCURSOR cursor
|
||||
#define YYLIMIT limit
|
||||
#define YYMARKER marker
|
||||
|
||||
/*!re2c
|
||||
any = (.|"\n");
|
||||
value = (":" (.\"$")+)?;
|
||||
cvsdat = "Date";
|
||||
cvsid = "Id";
|
||||
cvslog = "Log";
|
||||
cvsrev = "Revision";
|
||||
cvssrc = "Source";
|
||||
*/
|
||||
|
||||
#define APPEND(text) \
|
||||
append(output, outsize, text, sizeof(text) - sizeof(YYCTYPE))
|
||||
|
||||
inline void append(YYCTYPE *output, size_t & outsize, const YYCTYPE * text, size_t len)
|
||||
{
|
||||
memcpy(output + outsize, text, len);
|
||||
outsize += (len / sizeof(YYCTYPE));
|
||||
}
|
||||
|
||||
void scan(YYCTYPE *pText, size_t *pSize, int *pbChanged)
|
||||
{
|
||||
// rule
|
||||
// scan lines
|
||||
// find $ in lines
|
||||
// compact $<keyword>: .. $ to $<keyword>$
|
||||
|
||||
YYCTYPE *output;
|
||||
const YYCTYPE *cursor, *limit, *marker;
|
||||
|
||||
cursor = marker = output = *pText;
|
||||
|
||||
size_t insize = *pSize;
|
||||
size_t outsize = 0;
|
||||
|
||||
limit = cursor + insize;
|
||||
|
||||
while(1) {
|
||||
loop:
|
||||
/*!re2c
|
||||
|
||||
"$" cvsdat value "$" { APPEND(L"$" L"Date$"); goto loop; }
|
||||
"$" cvsid value "$" { APPEND(L"$" L"Id$"); goto loop; }
|
||||
"$" cvslog value "$" { APPEND(L"$" L"Log$"); goto loop; }
|
||||
"$" cvsrev value "$" { APPEND(L"$" L"Revision$"); goto loop; }
|
||||
"$" cvssrc value "$" { APPEND(L"$" L"Source$"); goto loop; }
|
||||
any { output[outsize++] = cursor[-1]; if (cursor >= limit) break; goto loop; }
|
||||
|
||||
*/
|
||||
}
|
||||
output[outsize] = '\0';
|
||||
|
||||
// set the new size
|
||||
*pSize = outsize;
|
||||
|
||||
*pbChanged = (insize == outsize) ? 0 : 1;
|
||||
}
|
||||
109
samples/C++/graphics.cpp
Normal file
109
samples/C++/graphics.cpp
Normal file
@@ -0,0 +1,109 @@
|
||||
// License - https://github.com/TurtleP/Flask/blob/master/LICENSE
|
||||
|
||||
#include <shared.h>
|
||||
|
||||
int currentR = 0xFF;
|
||||
int currentG = 0xFF;
|
||||
int currentB = 0xFF;
|
||||
int currentA = 0xFF;
|
||||
|
||||
int currentScreen = GFX_BOTTOM;
|
||||
|
||||
float transX = 0;
|
||||
float transY = 0;
|
||||
bool isPushed = false;
|
||||
|
||||
u32 getCurrentColor()
|
||||
{
|
||||
return RGBA8(currentR, currentG, currentB, currentA);
|
||||
}
|
||||
|
||||
void setColor(int r, int g, int b)
|
||||
{
|
||||
currentR = r;
|
||||
currentG = g;
|
||||
currentB = b;
|
||||
currentA = currentA;
|
||||
}
|
||||
|
||||
void setColor(int r, int g, int b, int a)
|
||||
{
|
||||
currentR = r;
|
||||
currentG = g;
|
||||
currentB = b;
|
||||
currentA = a;
|
||||
}
|
||||
|
||||
void setScreen(int screen)
|
||||
{
|
||||
currentScreen = screen;
|
||||
}
|
||||
|
||||
int getCurrentScreen()
|
||||
{
|
||||
return currentScreen;
|
||||
}
|
||||
|
||||
void screenShot() //for showing stuff being done
|
||||
{
|
||||
FILE * topScreen = fopen("sdmc:/framebuffer_top.rgb", "w+");
|
||||
|
||||
fwrite(gfxGetFramebuffer(GFX_TOP, GFX_LEFT, NULL, NULL), 288000, 1, topScreen);
|
||||
|
||||
fclose(topScreen);
|
||||
|
||||
FILE * bottomScreen = fopen("sdmc:/framebuffer_bottom.rgb", "w+");;
|
||||
|
||||
fwrite(gfxGetFramebuffer(GFX_BOTTOM, GFX_LEFT, NULL, NULL), 230400, 1, bottomScreen);
|
||||
|
||||
fclose(bottomScreen);
|
||||
}
|
||||
|
||||
void translateCoords(float * x, float * y) {
|
||||
if (isPushed)
|
||||
{
|
||||
*x += transX;
|
||||
*y += transY;
|
||||
}
|
||||
}
|
||||
|
||||
void translate(float dx, float dy)
|
||||
{
|
||||
if (sf2d_get_current_screen() == getCurrentScreen())
|
||||
{
|
||||
transX = transX + dx;
|
||||
transY = transY + dy;
|
||||
}
|
||||
}
|
||||
|
||||
void push()
|
||||
{
|
||||
if (sf2d_get_current_screen() == getCurrentScreen())
|
||||
{
|
||||
isPushed = true;
|
||||
}
|
||||
}
|
||||
|
||||
void pop()
|
||||
{
|
||||
if (sf2d_get_current_screen() == getCurrentScreen())
|
||||
{
|
||||
transX = 0;
|
||||
transY = 0;
|
||||
isPushed = false;
|
||||
}
|
||||
}
|
||||
|
||||
void setScissor(u32 x, u32 y, u32 width, u32 height)
|
||||
{
|
||||
if (sf2d_get_current_screen() == getCurrentScreen())
|
||||
{
|
||||
GPU_SCISSORMODE mode = GPU_SCISSOR_NORMAL;
|
||||
|
||||
if (!x && !y && !width && !height) {
|
||||
mode = GPU_SCISSOR_DISABLE;
|
||||
}
|
||||
|
||||
sf2d_set_scissor_test(mode, x, y, width, height);
|
||||
}
|
||||
}
|
||||
920
samples/C++/json_reader.cpp
Normal file
920
samples/C++/json_reader.cpp
Normal file
@@ -0,0 +1,920 @@
|
||||
// Copyright 2007-2010 Baptiste Lepilleur
|
||||
// Distributed under MIT license, or public domain if desired and
|
||||
// recognized in your jurisdiction.
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
/*
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
// Source - https://github.com/Ij888/ApacheCordovaRecipes/blob/6e8a2c1d9de7302f74bc3dbac54a021f0499bbb3/jqmsandbox/plugins/cordova-plugin-globalization/src/blackberry10/native/public/json_reader.cpp
|
||||
|
||||
#include <json/reader.h>
|
||||
#include <json/value.h>
|
||||
#include <utility>
|
||||
#include <cstdio>
|
||||
#include <cassert>
|
||||
#include <cstring>
|
||||
#include <iostream>
|
||||
#include <stdexcept>
|
||||
|
||||
#if _MSC_VER >= 1400 // VC++ 8.0
|
||||
#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated.
|
||||
#endif
|
||||
|
||||
namespace Json {
|
||||
|
||||
// QNX is strict about declaring C symbols in the std namespace.
|
||||
#ifdef __QNXNTO__
|
||||
using std::memcpy;
|
||||
using std::sprintf;
|
||||
using std::sscanf;
|
||||
#endif
|
||||
|
||||
// Implementation of class Features
|
||||
// ////////////////////////////////
|
||||
|
||||
Features::Features()
|
||||
: allowComments_( true )
|
||||
, strictRoot_( false )
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
Features
|
||||
Features::all()
|
||||
{
|
||||
return Features();
|
||||
}
|
||||
|
||||
|
||||
Features
|
||||
Features::strictMode()
|
||||
{
|
||||
Features features;
|
||||
features.allowComments_ = false;
|
||||
features.strictRoot_ = true;
|
||||
return features;
|
||||
}
|
||||
|
||||
// Implementation of class Reader
|
||||
// ////////////////////////////////
|
||||
|
||||
|
||||
static inline bool
|
||||
in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 )
|
||||
{
|
||||
return c == c1 || c == c2 || c == c3 || c == c4;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 )
|
||||
{
|
||||
return c == c1 || c == c2 || c == c3 || c == c4 || c == c5;
|
||||
}
|
||||
|
||||
|
||||
static bool
|
||||
containsNewLine( Reader::Location begin,
|
||||
Reader::Location end )
|
||||
{
|
||||
for ( ;begin < end; ++begin )
|
||||
if ( *begin == '\n' || *begin == '\r' )
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
static std::string codePointToUTF8(unsigned int cp)
|
||||
{
|
||||
std::string result;
|
||||
|
||||
// based on description from http://en.wikipedia.org/wiki/UTF-8
|
||||
|
||||
if (cp <= 0x7f)
|
||||
{
|
||||
result.resize(1);
|
||||
result[0] = static_cast<char>(cp);
|
||||
}
|
||||
else if (cp <= 0x7FF)
|
||||
{
|
||||
result.resize(2);
|
||||
result[1] = static_cast<char>(0x80 | (0x3f & cp));
|
||||
result[0] = static_cast<char>(0xC0 | (0x1f & (cp >> 6)));
|
||||
}
|
||||
else if (cp <= 0xFFFF)
|
||||
{
|
||||
result.resize(3);
|
||||
result[2] = static_cast<char>(0x80 | (0x3f & cp));
|
||||
result[1] = 0x80 | static_cast<char>((0x3f & (cp >> 6)));
|
||||
result[0] = 0xE0 | static_cast<char>((0xf & (cp >> 12)));
|
||||
}
|
||||
else if (cp <= 0x10FFFF)
|
||||
{
|
||||
result.resize(4);
|
||||
result[3] = static_cast<char>(0x80 | (0x3f & cp));
|
||||
result[2] = static_cast<char>(0x80 | (0x3f & (cp >> 6)));
|
||||
result[1] = static_cast<char>(0x80 | (0x3f & (cp >> 12)));
|
||||
result[0] = static_cast<char>(0xF0 | (0x7 & (cp >> 18)));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
// Class Reader
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
|
||||
Reader::Reader()
|
||||
: features_( Features::all() )
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
Reader::Reader( const Features &features )
|
||||
: features_( features )
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
Reader::parse( const std::string &document,
|
||||
Value &root,
|
||||
bool collectComments )
|
||||
{
|
||||
document_ = document;
|
||||
const char *begin = document_.c_str();
|
||||
const char *end = begin + document_.length();
|
||||
return parse( begin, end, root, collectComments );
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
Reader::parse( std::istream& sin,
|
||||
Value &root,
|
||||
bool collectComments )
|
||||
{
|
||||
//std::istream_iterator<char> begin(sin);
|
||||
//std::istream_iterator<char> end;
|
||||
// Those would allow streamed input from a file, if parse() were a
|
||||
// template function.
|
||||
|
||||
// Since std::string is reference-counted, this at least does not
|
||||
// create an extra copy.
|
||||
std::string doc;
|
||||
std::getline(sin, doc, (char)EOF);
|
||||
return parse( doc, root, collectComments );
|
||||
}
|
||||
|
||||
bool
|
||||
Reader::parse( const char *beginDoc, const char *endDoc,
|
||||
Value &root,
|
||||
bool collectComments )
|
||||
{
|
||||
if ( !features_.allowComments_ )
|
||||
{
|
||||
collectComments = false;
|
||||
}
|
||||
|
||||
begin_ = beginDoc;
|
||||
end_ = endDoc;
|
||||
collectComments_ = collectComments;
|
||||
current_ = begin_;
|
||||
lastValueEnd_ = 0;
|
||||
lastValue_ = 0;
|
||||
commentsBefore_ = "";
|
||||
errors_.clear();
|
||||
while ( !nodes_.empty() )
|
||||
nodes_.pop();
|
||||
nodes_.push( &root );
|
||||
|
||||
bool successful = readValue();
|
||||
Token token;
|
||||
skipCommentTokens( token );
|
||||
if ( collectComments_ && !commentsBefore_.empty() )
|
||||
root.setComment( commentsBefore_, commentAfter );
|
||||
if ( features_.strictRoot_ )
|
||||
{
|
||||
if ( !root.isArray() && !root.isObject() )
|
||||
{
|
||||
// Set error location to start of doc, ideally should be first token found in doc
|
||||
token.type_ = tokenError;
|
||||
token.start_ = beginDoc;
|
||||
token.end_ = endDoc;
|
||||
addError( "A valid JSON document must be either an array or an object value.",
|
||||
token );
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return successful;
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
Reader::readValue()
|
||||
{
|
||||
Token token;
|
||||
skipCommentTokens( token );
|
||||
bool successful = true;
|
||||
|
||||
if ( collectComments_ && !commentsBefore_.empty() )
|
||||
{
|
||||
currentValue().setComment( commentsBefore_, commentBefore );
|
||||
commentsBefore_ = "";
|
||||
}
|
||||
|
||||
|
||||
switch ( token.type_ )
|
||||
{
|
||||
case tokenObjectBegin:
|
||||
successful = readObject( token );
|
||||
break;
|
||||
case tokenArrayBegin:
|
||||
successful = readArray( token );
|
||||
break;
|
||||
case tokenNumber:
|
||||
successful = decodeNumber( token );
|
||||
break;
|
||||
case tokenString:
|
||||
successful = decodeString( token );
|
||||
break;
|
||||
case tokenTrue:
|
||||
currentValue() = true;
|
||||
break;
|
||||
case tokenFalse:
|
||||
currentValue() = false;
|
||||
break;
|
||||
case tokenNull:
|
||||
currentValue() = Value();
|
||||
break;
|
||||
default:
|
||||
return addError( "Syntax error: value, object or array expected.", token );
|
||||
}
|
||||
|
||||
if ( collectComments_ )
|
||||
{
|
||||
lastValueEnd_ = current_;
|
||||
lastValue_ = ¤tValue();
|
||||
}
|
||||
|
||||
return successful;
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
Reader::skipCommentTokens( Token &token )
|
||||
{
|
||||
if ( features_.allowComments_ )
|
||||
{
|
||||
do
|
||||
{
|
||||
readToken( token );
|
||||
}
|
||||
while ( token.type_ == tokenComment );
|
||||
}
|
||||
else
|
||||
{
|
||||
readToken( token );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
Reader::expectToken( TokenType type, Token &token, const char *message )
|
||||
{
|
||||
readToken( token );
|
||||
if ( token.type_ != type )
|
||||
return addError( message, token );
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
Reader::readToken( Token &token )
|
||||
{
|
||||
skipSpaces();
|
||||
token.start_ = current_;
|
||||
Char c = getNextChar();
|
||||
bool ok = true;
|
||||
switch ( c )
|
||||
{
|
||||
case '{':
|
||||
token.type_ = tokenObjectBegin;
|
||||
break;
|
||||
case '}':
|
||||
token.type_ = tokenObjectEnd;
|
||||
break;
|
||||
case '[':
|
||||
token.type_ = tokenArrayBegin;
|
||||
break;
|
||||
case ']':
|
||||
token.type_ = tokenArrayEnd;
|
||||
break;
|
||||
case '"':
|
||||
token.type_ = tokenString;
|
||||
ok = readString();
|
||||
break;
|
||||
case '/':
|
||||
token.type_ = tokenComment;
|
||||
ok = readComment();
|
||||
break;
|
||||
case '0':
|
||||
case '1':
|
||||
case '2':
|
||||
case '3':
|
||||
case '4':
|
||||
case '5':
|
||||
case '6':
|
||||
case '7':
|
||||
case '8':
|
||||
case '9':
|
||||
case '-':
|
||||
token.type_ = tokenNumber;
|
||||
readNumber();
|
||||
break;
|
||||
case 't':
|
||||
token.type_ = tokenTrue;
|
||||
ok = match( "rue", 3 );
|
||||
break;
|
||||
case 'f':
|
||||
token.type_ = tokenFalse;
|
||||
ok = match( "alse", 4 );
|
||||
break;
|
||||
case 'n':
|
||||
token.type_ = tokenNull;
|
||||
ok = match( "ull", 3 );
|
||||
break;
|
||||
case ',':
|
||||
token.type_ = tokenArraySeparator;
|
||||
break;
|
||||
case ':':
|
||||
token.type_ = tokenMemberSeparator;
|
||||
break;
|
||||
case 0:
|
||||
token.type_ = tokenEndOfStream;
|
||||
break;
|
||||
default:
|
||||
ok = false;
|
||||
break;
|
||||
}
|
||||
if ( !ok )
|
||||
token.type_ = tokenError;
|
||||
token.end_ = current_;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
Reader::skipSpaces()
|
||||
{
|
||||
while ( current_ != end_ )
|
||||
{
|
||||
Char c = *current_;
|
||||
if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' )
|
||||
++current_;
|
||||
else
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
Reader::match( Location pattern,
|
||||
int patternLength )
|
||||
{
|
||||
if ( end_ - current_ < patternLength )
|
||||
return false;
|
||||
int index = patternLength;
|
||||
while ( index-- )
|
||||
if ( current_[index] != pattern[index] )
|
||||
return false;
|
||||
current_ += patternLength;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
Reader::readComment()
|
||||
{
|
||||
Location commentBegin = current_ - 1;
|
||||
Char c = getNextChar();
|
||||
bool successful = false;
|
||||
if ( c == '*' )
|
||||
successful = readCStyleComment();
|
||||
else if ( c == '/' )
|
||||
successful = readCppStyleComment();
|
||||
if ( !successful )
|
||||
return false;
|
||||
|
||||
if ( collectComments_ )
|
||||
{
|
||||
CommentPlacement placement = commentBefore;
|
||||
if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) )
|
||||
{
|
||||
if ( c != '*' || !containsNewLine( commentBegin, current_ ) )
|
||||
placement = commentAfterOnSameLine;
|
||||
}
|
||||
|
||||
addComment( commentBegin, current_, placement );
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
Reader::addComment( Location begin,
|
||||
Location end,
|
||||
CommentPlacement placement )
|
||||
{
|
||||
assert( collectComments_ );
|
||||
if ( placement == commentAfterOnSameLine )
|
||||
{
|
||||
assert( lastValue_ != 0 );
|
||||
lastValue_->setComment( std::string( begin, end ), placement );
|
||||
}
|
||||
else
|
||||
{
|
||||
if ( !commentsBefore_.empty() )
|
||||
commentsBefore_ += "\n";
|
||||
commentsBefore_ += std::string( begin, end );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
Reader::readCStyleComment()
|
||||
{
|
||||
while ( current_ != end_ )
|
||||
{
|
||||
Char c = getNextChar();
|
||||
if ( c == '*' && *current_ == '/' )
|
||||
break;
|
||||
}
|
||||
return getNextChar() == '/';
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
Reader::readCppStyleComment()
|
||||
{
|
||||
while ( current_ != end_ )
|
||||
{
|
||||
Char c = getNextChar();
|
||||
if ( c == '\r' || c == '\n' )
|
||||
break;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
Reader::readNumber()
|
||||
{
|
||||
while ( current_ != end_ )
|
||||
{
|
||||
if ( !(*current_ >= '0' && *current_ <= '9') &&
|
||||
!in( *current_, '.', 'e', 'E', '+', '-' ) )
|
||||
break;
|
||||
++current_;
|
||||
}
|
||||
}
|
||||
|
||||
bool
|
||||
Reader::readString()
|
||||
{
|
||||
Char c = 0;
|
||||
while ( current_ != end_ )
|
||||
{
|
||||
c = getNextChar();
|
||||
if ( c == '\\' )
|
||||
getNextChar();
|
||||
else if ( c == '"' )
|
||||
break;
|
||||
}
|
||||
return c == '"';
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
Reader::readObject( Token &tokenStart )
|
||||
{
|
||||
Token tokenName;
|
||||
std::string name;
|
||||
currentValue() = Value( objectValue );
|
||||
while ( readToken( tokenName ) )
|
||||
{
|
||||
bool initialTokenOk = true;
|
||||
while ( tokenName.type_ == tokenComment && initialTokenOk )
|
||||
initialTokenOk = readToken( tokenName );
|
||||
if ( !initialTokenOk )
|
||||
break;
|
||||
if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object
|
||||
return true;
|
||||
if ( tokenName.type_ != tokenString )
|
||||
break;
|
||||
|
||||
name = "";
|
||||
if ( !decodeString( tokenName, name ) )
|
||||
return recoverFromError( tokenObjectEnd );
|
||||
|
||||
Token colon;
|
||||
if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator )
|
||||
{
|
||||
return addErrorAndRecover( "Missing ':' after object member name",
|
||||
colon,
|
||||
tokenObjectEnd );
|
||||
}
|
||||
Value &value = currentValue()[ name ];
|
||||
nodes_.push( &value );
|
||||
bool ok = readValue();
|
||||
nodes_.pop();
|
||||
if ( !ok ) // error already set
|
||||
return recoverFromError( tokenObjectEnd );
|
||||
|
||||
Token comma;
|
||||
if ( !readToken( comma )
|
||||
|| ( comma.type_ != tokenObjectEnd &&
|
||||
comma.type_ != tokenArraySeparator &&
|
||||
comma.type_ != tokenComment ) )
|
||||
{
|
||||
return addErrorAndRecover( "Missing ',' or '}' in object declaration",
|
||||
comma,
|
||||
tokenObjectEnd );
|
||||
}
|
||||
bool finalizeTokenOk = true;
|
||||
while ( comma.type_ == tokenComment &&
|
||||
finalizeTokenOk )
|
||||
finalizeTokenOk = readToken( comma );
|
||||
if ( comma.type_ == tokenObjectEnd )
|
||||
return true;
|
||||
}
|
||||
return addErrorAndRecover( "Missing '}' or object member name",
|
||||
tokenName,
|
||||
tokenObjectEnd );
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
Reader::readArray( Token &tokenStart )
|
||||
{
|
||||
currentValue() = Value( arrayValue );
|
||||
skipSpaces();
|
||||
if ( *current_ == ']' ) // empty array
|
||||
{
|
||||
Token endArray;
|
||||
readToken( endArray );
|
||||
return true;
|
||||
}
|
||||
int index = 0;
|
||||
while ( true )
|
||||
{
|
||||
Value &value = currentValue()[ index++ ];
|
||||
nodes_.push( &value );
|
||||
bool ok = readValue();
|
||||
nodes_.pop();
|
||||
if ( !ok ) // error already set
|
||||
return recoverFromError( tokenArrayEnd );
|
||||
|
||||
Token token;
|
||||
// Accept Comment after last item in the array.
|
||||
ok = readToken( token );
|
||||
while ( token.type_ == tokenComment && ok )
|
||||
{
|
||||
ok = readToken( token );
|
||||
}
|
||||
bool badTokenType = ( token.type_ == tokenArraySeparator &&
|
||||
token.type_ == tokenArrayEnd );
|
||||
if ( !ok || badTokenType )
|
||||
{
|
||||
return addErrorAndRecover( "Missing ',' or ']' in array declaration",
|
||||
token,
|
||||
tokenArrayEnd );
|
||||
}
|
||||
if ( token.type_ == tokenArrayEnd )
|
||||
break;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
Reader::decodeNumber( Token &token )
|
||||
{
|
||||
bool isDouble = false;
|
||||
for ( Location inspect = token.start_; inspect != token.end_; ++inspect )
|
||||
{
|
||||
isDouble = isDouble
|
||||
|| in( *inspect, '.', 'e', 'E', '+' )
|
||||
|| ( *inspect == '-' && inspect != token.start_ );
|
||||
}
|
||||
if ( isDouble )
|
||||
return decodeDouble( token );
|
||||
Location current = token.start_;
|
||||
bool isNegative = *current == '-';
|
||||
if ( isNegative )
|
||||
++current;
|
||||
Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt)
|
||||
: Value::maxUInt) / 10;
|
||||
Value::UInt value = 0;
|
||||
while ( current < token.end_ )
|
||||
{
|
||||
Char c = *current++;
|
||||
if ( c < '0' || c > '9' )
|
||||
return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token );
|
||||
if ( value >= threshold )
|
||||
return decodeDouble( token );
|
||||
value = value * 10 + Value::UInt(c - '0');
|
||||
}
|
||||
if ( isNegative )
|
||||
currentValue() = -Value::Int( value );
|
||||
else if ( value <= Value::UInt(Value::maxInt) )
|
||||
currentValue() = Value::Int( value );
|
||||
else
|
||||
currentValue() = value;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
Reader::decodeDouble( Token &token )
|
||||
{
|
||||
double value = 0;
|
||||
const int bufferSize = 32;
|
||||
int count;
|
||||
int length = int(token.end_ - token.start_);
|
||||
if ( length <= bufferSize )
|
||||
{
|
||||
Char buffer[bufferSize];
|
||||
memcpy( buffer, token.start_, length );
|
||||
buffer[length] = 0;
|
||||
count = sscanf( buffer, "%lf", &value );
|
||||
}
|
||||
else
|
||||
{
|
||||
std::string buffer( token.start_, token.end_ );
|
||||
count = sscanf( buffer.c_str(), "%lf", &value );
|
||||
}
|
||||
|
||||
if ( count != 1 )
|
||||
return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token );
|
||||
currentValue() = value;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
Reader::decodeString( Token &token )
|
||||
{
|
||||
std::string decoded;
|
||||
if ( !decodeString( token, decoded ) )
|
||||
return false;
|
||||
currentValue() = decoded;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
Reader::decodeString( Token &token, std::string &decoded )
|
||||
{
|
||||
decoded.reserve( token.end_ - token.start_ - 2 );
|
||||
Location current = token.start_ + 1; // skip '"'
|
||||
Location end = token.end_ - 1; // do not include '"'
|
||||
while ( current != end )
|
||||
{
|
||||
Char c = *current++;
|
||||
if ( c == '"' )
|
||||
break;
|
||||
else if ( c == '\\' )
|
||||
{
|
||||
if ( current == end )
|
||||
return addError( "Empty escape sequence in string", token, current );
|
||||
Char escape = *current++;
|
||||
switch ( escape )
|
||||
{
|
||||
case '"': decoded += '"'; break;
|
||||
case '/': decoded += '/'; break;
|
||||
case '\\': decoded += '\\'; break;
|
||||
case 'b': decoded += '\b'; break;
|
||||
case 'f': decoded += '\f'; break;
|
||||
case 'n': decoded += '\n'; break;
|
||||
case 'r': decoded += '\r'; break;
|
||||
case 't': decoded += '\t'; break;
|
||||
case 'u':
|
||||
{
|
||||
unsigned int unicode;
|
||||
if ( !decodeUnicodeCodePoint( token, current, end, unicode ) )
|
||||
return false;
|
||||
decoded += codePointToUTF8(unicode);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
return addError( "Bad escape sequence in string", token, current );
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
decoded += c;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool
|
||||
Reader::decodeUnicodeCodePoint( Token &token,
|
||||
Location ¤t,
|
||||
Location end,
|
||||
unsigned int &unicode )
|
||||
{
|
||||
|
||||
if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) )
|
||||
return false;
|
||||
if (unicode >= 0xD800 && unicode <= 0xDBFF)
|
||||
{
|
||||
// surrogate pairs
|
||||
if (end - current < 6)
|
||||
return addError( "additional six characters expected to parse unicode surrogate pair.", token, current );
|
||||
unsigned int surrogatePair;
|
||||
if (*(current++) == '\\' && *(current++)== 'u')
|
||||
{
|
||||
if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair ))
|
||||
{
|
||||
unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF);
|
||||
}
|
||||
else
|
||||
return false;
|
||||
}
|
||||
else
|
||||
return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current );
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool
|
||||
Reader::decodeUnicodeEscapeSequence( Token &token,
|
||||
Location ¤t,
|
||||
Location end,
|
||||
unsigned int &unicode )
|
||||
{
|
||||
if ( end - current < 4 )
|
||||
return addError( "Bad unicode escape sequence in string: four digits expected.", token, current );
|
||||
unicode = 0;
|
||||
for ( int index =0; index < 4; ++index )
|
||||
{
|
||||
Char c = *current++;
|
||||
unicode *= 16;
|
||||
if ( c >= '0' && c <= '9' )
|
||||
unicode += c - '0';
|
||||
else if ( c >= 'a' && c <= 'f' )
|
||||
unicode += c - 'a' + 10;
|
||||
else if ( c >= 'A' && c <= 'F' )
|
||||
unicode += c - 'A' + 10;
|
||||
else
|
||||
return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current );
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
Reader::addError( const std::string &message,
|
||||
Token &token,
|
||||
Location extra )
|
||||
{
|
||||
ErrorInfo info;
|
||||
info.token_ = token;
|
||||
info.message_ = message;
|
||||
info.extra_ = extra;
|
||||
errors_.push_back( info );
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
Reader::recoverFromError( TokenType skipUntilToken )
|
||||
{
|
||||
int errorCount = int(errors_.size());
|
||||
Token skip;
|
||||
while ( true )
|
||||
{
|
||||
if ( !readToken(skip) )
|
||||
errors_.resize( errorCount ); // discard errors caused by recovery
|
||||
if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream )
|
||||
break;
|
||||
}
|
||||
errors_.resize( errorCount );
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
Reader::addErrorAndRecover( const std::string &message,
|
||||
Token &token,
|
||||
TokenType skipUntilToken )
|
||||
{
|
||||
addError( message, token );
|
||||
return recoverFromError( skipUntilToken );
|
||||
}
|
||||
|
||||
|
||||
Value &
|
||||
Reader::currentValue()
|
||||
{
|
||||
return *(nodes_.top());
|
||||
}
|
||||
|
||||
|
||||
Reader::Char
|
||||
Reader::getNextChar()
|
||||
{
|
||||
if ( current_ == end_ )
|
||||
return 0;
|
||||
return *current_++;
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
Reader::getLocationLineAndColumn( Location location,
|
||||
int &line,
|
||||
int &column ) const
|
||||
{
|
||||
Location current = begin_;
|
||||
Location lastLineStart = current;
|
||||
line = 0;
|
||||
while ( current < location && current != end_ )
|
||||
{
|
||||
Char c = *current++;
|
||||
if ( c == '\r' )
|
||||
{
|
||||
if ( *current == '\n' )
|
||||
++current;
|
||||
lastLineStart = current;
|
||||
++line;
|
||||
}
|
||||
else if ( c == '\n' )
|
||||
{
|
||||
lastLineStart = current;
|
||||
++line;
|
||||
}
|
||||
}
|
||||
// column & line start at 1
|
||||
column = int(location - lastLineStart) + 1;
|
||||
++line;
|
||||
}
|
||||
|
||||
|
||||
std::string
|
||||
Reader::getLocationLineAndColumn( Location location ) const
|
||||
{
|
||||
int line, column;
|
||||
getLocationLineAndColumn( location, line, column );
|
||||
char buffer[18+16+16+1];
|
||||
sprintf( buffer, "Line %d, Column %d", line, column );
|
||||
return buffer;
|
||||
}
|
||||
|
||||
|
||||
std::string
|
||||
Reader::getFormatedErrorMessages() const
|
||||
{
|
||||
std::string formattedMessage;
|
||||
for ( Errors::const_iterator itError = errors_.begin();
|
||||
itError != errors_.end();
|
||||
++itError )
|
||||
{
|
||||
const ErrorInfo &error = *itError;
|
||||
formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n";
|
||||
formattedMessage += " " + error.message_ + "\n";
|
||||
if ( error.extra_ )
|
||||
formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n";
|
||||
}
|
||||
return formattedMessage;
|
||||
}
|
||||
|
||||
|
||||
std::istream& operator>>( std::istream &sin, Value &root )
|
||||
{
|
||||
Json::Reader reader;
|
||||
bool ok = reader.parse(sin, root, true);
|
||||
//JSON_ASSERT( ok );
|
||||
if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages());
|
||||
return sin;
|
||||
}
|
||||
|
||||
|
||||
} // namespace Json
|
||||
857
samples/C++/json_writer.cpp
Normal file
857
samples/C++/json_writer.cpp
Normal file
@@ -0,0 +1,857 @@
|
||||
// Copyright 2007-2010 Baptiste Lepilleur
|
||||
// Distributed under MIT license, or public domain if desired and
|
||||
// recognized in your jurisdiction.
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
/*
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
// Source - https://github.com/Ij888/ApacheCordovaRecipes/blob/6e8a2c1d9de7302f74bc3dbac54a021f0499bbb3/jqmsandbox/plugins/cordova-plugin-globalization/src/blackberry10/native/public/json_writer.cpp
|
||||
|
||||
#include <json/writer.h>
|
||||
#include <utility>
|
||||
#include <assert.h>
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include <iostream>
|
||||
#include <sstream>
|
||||
#include <iomanip>
|
||||
|
||||
#if _MSC_VER >= 1400 // VC++ 8.0
|
||||
#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated.
|
||||
#endif
|
||||
|
||||
namespace Json {
|
||||
|
||||
static bool isControlCharacter(char ch)
|
||||
{
|
||||
return ch > 0 && ch <= 0x1F;
|
||||
}
|
||||
|
||||
static bool containsControlCharacter( const char* str )
|
||||
{
|
||||
while ( *str )
|
||||
{
|
||||
if ( isControlCharacter( *(str++) ) )
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
static void uintToString( unsigned int value,
|
||||
char *¤t )
|
||||
{
|
||||
*--current = 0;
|
||||
do
|
||||
{
|
||||
*--current = (value % 10) + '0';
|
||||
value /= 10;
|
||||
}
|
||||
while ( value != 0 );
|
||||
}
|
||||
|
||||
std::string valueToString( Int value )
|
||||
{
|
||||
char buffer[32];
|
||||
char *current = buffer + sizeof(buffer);
|
||||
bool isNegative = value < 0;
|
||||
if ( isNegative )
|
||||
value = -value;
|
||||
uintToString( UInt(value), current );
|
||||
if ( isNegative )
|
||||
*--current = '-';
|
||||
assert( current >= buffer );
|
||||
return current;
|
||||
}
|
||||
|
||||
|
||||
std::string valueToString( UInt value )
|
||||
{
|
||||
char buffer[32];
|
||||
char *current = buffer + sizeof(buffer);
|
||||
uintToString( value, current );
|
||||
assert( current >= buffer );
|
||||
return current;
|
||||
}
|
||||
|
||||
std::string valueToString( double value )
|
||||
{
|
||||
char buffer[32];
|
||||
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning.
|
||||
sprintf_s(buffer, sizeof(buffer), "%#.16g", value);
|
||||
#else
|
||||
sprintf(buffer, "%#.16g", value);
|
||||
#endif
|
||||
char* ch = buffer + strlen(buffer) - 1;
|
||||
if (*ch != '0') return buffer; // nothing to truncate, so save time
|
||||
while(ch > buffer && *ch == '0'){
|
||||
--ch;
|
||||
}
|
||||
char* last_nonzero = ch;
|
||||
while(ch >= buffer){
|
||||
switch(*ch){
|
||||
case '0':
|
||||
case '1':
|
||||
case '2':
|
||||
case '3':
|
||||
case '4':
|
||||
case '5':
|
||||
case '6':
|
||||
case '7':
|
||||
case '8':
|
||||
case '9':
|
||||
--ch;
|
||||
continue;
|
||||
case '.':
|
||||
// Truncate zeroes to save bytes in output, but keep one.
|
||||
*(last_nonzero+2) = '\0';
|
||||
return buffer;
|
||||
default:
|
||||
return buffer;
|
||||
}
|
||||
}
|
||||
return buffer;
|
||||
}
|
||||
|
||||
|
||||
std::string valueToString( bool value )
|
||||
{
|
||||
return value ? "true" : "false";
|
||||
}
|
||||
|
||||
std::string valueToQuotedString( const char *value )
|
||||
{
|
||||
// Not sure how to handle unicode...
|
||||
if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value ))
|
||||
return std::string("\"") + value + "\"";
|
||||
// We have to walk value and escape any special characters.
|
||||
// Appending to std::string is not efficient, but this should be rare.
|
||||
// (Note: forward slashes are *not* rare, but I am not escaping them.)
|
||||
unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL
|
||||
std::string result;
|
||||
result.reserve(maxsize); // to avoid lots of mallocs
|
||||
result += "\"";
|
||||
for (const char* c=value; *c != 0; ++c)
|
||||
{
|
||||
switch(*c)
|
||||
{
|
||||
case '\"':
|
||||
result += "\\\"";
|
||||
break;
|
||||
case '\\':
|
||||
result += "\\\\";
|
||||
break;
|
||||
case '\b':
|
||||
result += "\\b";
|
||||
break;
|
||||
case '\f':
|
||||
result += "\\f";
|
||||
break;
|
||||
case '\n':
|
||||
result += "\\n";
|
||||
break;
|
||||
case '\r':
|
||||
result += "\\r";
|
||||
break;
|
||||
case '\t':
|
||||
result += "\\t";
|
||||
break;
|
||||
//case '/':
|
||||
// Even though \/ is considered a legal escape in JSON, a bare
|
||||
// slash is also legal, so I see no reason to escape it.
|
||||
// (I hope I am not misunderstanding something.
|
||||
// blep notes: actually escaping \/ may be useful in javascript to avoid </
|
||||
// sequence.
|
||||
// Should add a flag to allow this compatibility mode and prevent this
|
||||
// sequence from occurring.
|
||||
default:
|
||||
if ( isControlCharacter( *c ) )
|
||||
{
|
||||
std::ostringstream oss;
|
||||
oss << "\\u" << std::hex << std::uppercase << std::setfill('0') << std::setw(4) << static_cast<int>(*c);
|
||||
result += oss.str();
|
||||
}
|
||||
else
|
||||
{
|
||||
result += *c;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
result += "\"";
|
||||
return result;
|
||||
}
|
||||
|
||||
// Class Writer
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
Writer::~Writer()
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
// Class FastWriter
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
|
||||
FastWriter::FastWriter()
|
||||
: yamlCompatiblityEnabled_( false )
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
FastWriter::enableYAMLCompatibility()
|
||||
{
|
||||
yamlCompatiblityEnabled_ = true;
|
||||
}
|
||||
|
||||
|
||||
std::string
|
||||
FastWriter::write( const Value &root )
|
||||
{
|
||||
document_ = "";
|
||||
writeValue( root );
|
||||
document_ += "\n";
|
||||
return document_;
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
FastWriter::writeValue( const Value &value )
|
||||
{
|
||||
switch ( value.type() )
|
||||
{
|
||||
case nullValue:
|
||||
document_ += "null";
|
||||
break;
|
||||
case intValue:
|
||||
document_ += valueToString( value.asInt() );
|
||||
break;
|
||||
case uintValue:
|
||||
document_ += valueToString( value.asUInt() );
|
||||
break;
|
||||
case realValue:
|
||||
document_ += valueToString( value.asDouble() );
|
||||
break;
|
||||
case stringValue:
|
||||
document_ += valueToQuotedString( value.asCString() );
|
||||
break;
|
||||
case booleanValue:
|
||||
document_ += valueToString( value.asBool() );
|
||||
break;
|
||||
case arrayValue:
|
||||
{
|
||||
document_ += "[";
|
||||
int size = value.size();
|
||||
for ( int index =0; index < size; ++index )
|
||||
{
|
||||
if ( index > 0 )
|
||||
document_ += ",";
|
||||
writeValue( value[index] );
|
||||
}
|
||||
document_ += "]";
|
||||
}
|
||||
break;
|
||||
case objectValue:
|
||||
{
|
||||
Value::Members members( value.getMemberNames() );
|
||||
document_ += "{";
|
||||
for ( Value::Members::iterator it = members.begin();
|
||||
it != members.end();
|
||||
++it )
|
||||
{
|
||||
const std::string &name = *it;
|
||||
if ( it != members.begin() )
|
||||
document_ += ",";
|
||||
document_ += valueToQuotedString( name.c_str() );
|
||||
document_ += yamlCompatiblityEnabled_ ? ": "
|
||||
: ":";
|
||||
writeValue( value[name] );
|
||||
}
|
||||
document_ += "}";
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Class StyledWriter
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
|
||||
StyledWriter::StyledWriter()
|
||||
: rightMargin_( 74 )
|
||||
, indentSize_( 3 )
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
std::string
|
||||
StyledWriter::write( const Value &root )
|
||||
{
|
||||
document_ = "";
|
||||
addChildValues_ = false;
|
||||
indentString_ = "";
|
||||
writeCommentBeforeValue( root );
|
||||
writeValue( root );
|
||||
writeCommentAfterValueOnSameLine( root );
|
||||
document_ += "\n";
|
||||
return document_;
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
StyledWriter::writeValue( const Value &value )
|
||||
{
|
||||
switch ( value.type() )
|
||||
{
|
||||
case nullValue:
|
||||
pushValue( "null" );
|
||||
break;
|
||||
case intValue:
|
||||
pushValue( valueToString( value.asInt() ) );
|
||||
break;
|
||||
case uintValue:
|
||||
pushValue( valueToString( value.asUInt() ) );
|
||||
break;
|
||||
case realValue:
|
||||
pushValue( valueToString( value.asDouble() ) );
|
||||
break;
|
||||
case stringValue:
|
||||
pushValue( valueToQuotedString( value.asCString() ) );
|
||||
break;
|
||||
case booleanValue:
|
||||
pushValue( valueToString( value.asBool() ) );
|
||||
break;
|
||||
case arrayValue:
|
||||
writeArrayValue( value);
|
||||
break;
|
||||
case objectValue:
|
||||
{
|
||||
Value::Members members( value.getMemberNames() );
|
||||
if ( members.empty() )
|
||||
pushValue( "{}" );
|
||||
else
|
||||
{
|
||||
writeWithIndent( "{" );
|
||||
indent();
|
||||
Value::Members::iterator it = members.begin();
|
||||
while ( true )
|
||||
{
|
||||
const std::string &name = *it;
|
||||
const Value &childValue = value[name];
|
||||
writeCommentBeforeValue( childValue );
|
||||
writeWithIndent( valueToQuotedString( name.c_str() ) );
|
||||
document_ += " : ";
|
||||
writeValue( childValue );
|
||||
if ( ++it == members.end() )
|
||||
{
|
||||
writeCommentAfterValueOnSameLine( childValue );
|
||||
break;
|
||||
}
|
||||
document_ += ",";
|
||||
writeCommentAfterValueOnSameLine( childValue );
|
||||
}
|
||||
unindent();
|
||||
writeWithIndent( "}" );
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
StyledWriter::writeArrayValue( const Value &value )
|
||||
{
|
||||
unsigned size = value.size();
|
||||
if ( size == 0 )
|
||||
pushValue( "[]" );
|
||||
else
|
||||
{
|
||||
bool isArrayMultiLine = isMultineArray( value );
|
||||
if ( isArrayMultiLine )
|
||||
{
|
||||
writeWithIndent( "[" );
|
||||
indent();
|
||||
bool hasChildValue = !childValues_.empty();
|
||||
unsigned index =0;
|
||||
while ( true )
|
||||
{
|
||||
const Value &childValue = value[index];
|
||||
writeCommentBeforeValue( childValue );
|
||||
if ( hasChildValue )
|
||||
writeWithIndent( childValues_[index] );
|
||||
else
|
||||
{
|
||||
writeIndent();
|
||||
writeValue( childValue );
|
||||
}
|
||||
if ( ++index == size )
|
||||
{
|
||||
writeCommentAfterValueOnSameLine( childValue );
|
||||
break;
|
||||
}
|
||||
document_ += ",";
|
||||
writeCommentAfterValueOnSameLine( childValue );
|
||||
}
|
||||
unindent();
|
||||
writeWithIndent( "]" );
|
||||
}
|
||||
else // output on a single line
|
||||
{
|
||||
assert( childValues_.size() == size );
|
||||
document_ += "[ ";
|
||||
for ( unsigned index =0; index < size; ++index )
|
||||
{
|
||||
if ( index > 0 )
|
||||
document_ += ", ";
|
||||
document_ += childValues_[index];
|
||||
}
|
||||
document_ += " ]";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
StyledWriter::isMultineArray( const Value &value )
|
||||
{
|
||||
int size = value.size();
|
||||
bool isMultiLine = size*3 >= rightMargin_ ;
|
||||
childValues_.clear();
|
||||
for ( int index =0; index < size && !isMultiLine; ++index )
|
||||
{
|
||||
const Value &childValue = value[index];
|
||||
isMultiLine = isMultiLine ||
|
||||
( (childValue.isArray() || childValue.isObject()) &&
|
||||
childValue.size() > 0 );
|
||||
}
|
||||
if ( !isMultiLine ) // check if line length > max line length
|
||||
{
|
||||
childValues_.reserve( size );
|
||||
addChildValues_ = true;
|
||||
int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]'
|
||||
for ( int index =0; index < size && !isMultiLine; ++index )
|
||||
{
|
||||
writeValue( value[index] );
|
||||
lineLength += int( childValues_[index].length() );
|
||||
isMultiLine = isMultiLine && hasCommentForValue( value[index] );
|
||||
}
|
||||
addChildValues_ = false;
|
||||
isMultiLine = isMultiLine || lineLength >= rightMargin_;
|
||||
}
|
||||
return isMultiLine;
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
StyledWriter::pushValue( const std::string &value )
|
||||
{
|
||||
if ( addChildValues_ )
|
||||
childValues_.push_back( value );
|
||||
else
|
||||
document_ += value;
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
StyledWriter::writeIndent()
|
||||
{
|
||||
if ( !document_.empty() )
|
||||
{
|
||||
char last = document_[document_.length()-1];
|
||||
if ( last == ' ' ) // already indented
|
||||
return;
|
||||
if ( last != '\n' ) // Comments may add new-line
|
||||
document_ += '\n';
|
||||
}
|
||||
document_ += indentString_;
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
StyledWriter::writeWithIndent( const std::string &value )
|
||||
{
|
||||
writeIndent();
|
||||
document_ += value;
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
StyledWriter::indent()
|
||||
{
|
||||
indentString_ += std::string( indentSize_, ' ' );
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
StyledWriter::unindent()
|
||||
{
|
||||
assert( int(indentString_.size()) >= indentSize_ );
|
||||
indentString_.resize( indentString_.size() - indentSize_ );
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
StyledWriter::writeCommentBeforeValue( const Value &root )
|
||||
{
|
||||
if ( !root.hasComment( commentBefore ) )
|
||||
return;
|
||||
document_ += normalizeEOL( root.getComment( commentBefore ) );
|
||||
document_ += "\n";
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
StyledWriter::writeCommentAfterValueOnSameLine( const Value &root )
|
||||
{
|
||||
if ( root.hasComment( commentAfterOnSameLine ) )
|
||||
document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) );
|
||||
|
||||
if ( root.hasComment( commentAfter ) )
|
||||
{
|
||||
document_ += "\n";
|
||||
document_ += normalizeEOL( root.getComment( commentAfter ) );
|
||||
document_ += "\n";
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
StyledWriter::hasCommentForValue( const Value &value )
|
||||
{
|
||||
return value.hasComment( commentBefore )
|
||||
|| value.hasComment( commentAfterOnSameLine )
|
||||
|| value.hasComment( commentAfter );
|
||||
}
|
||||
|
||||
|
||||
std::string
|
||||
StyledWriter::normalizeEOL( const std::string &text )
|
||||
{
|
||||
std::string normalized;
|
||||
normalized.reserve( text.length() );
|
||||
const char *begin = text.c_str();
|
||||
const char *end = begin + text.length();
|
||||
const char *current = begin;
|
||||
while ( current != end )
|
||||
{
|
||||
char c = *current++;
|
||||
if ( c == '\r' ) // mac or dos EOL
|
||||
{
|
||||
if ( *current == '\n' ) // convert dos EOL
|
||||
++current;
|
||||
normalized += '\n';
|
||||
}
|
||||
else // handle unix EOL & other char
|
||||
normalized += c;
|
||||
}
|
||||
return normalized;
|
||||
}
|
||||
|
||||
|
||||
// Class StyledStreamWriter
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
|
||||
StyledStreamWriter::StyledStreamWriter( std::string indentation )
|
||||
: document_(NULL)
|
||||
, rightMargin_( 74 )
|
||||
, indentation_( indentation )
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
StyledStreamWriter::write( std::ostream &out, const Value &root )
|
||||
{
|
||||
document_ = &out;
|
||||
addChildValues_ = false;
|
||||
indentString_ = "";
|
||||
writeCommentBeforeValue( root );
|
||||
writeValue( root );
|
||||
writeCommentAfterValueOnSameLine( root );
|
||||
*document_ << "\n";
|
||||
document_ = NULL; // Forget the stream, for safety.
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
StyledStreamWriter::writeValue( const Value &value )
|
||||
{
|
||||
switch ( value.type() )
|
||||
{
|
||||
case nullValue:
|
||||
pushValue( "null" );
|
||||
break;
|
||||
case intValue:
|
||||
pushValue( valueToString( value.asInt() ) );
|
||||
break;
|
||||
case uintValue:
|
||||
pushValue( valueToString( value.asUInt() ) );
|
||||
break;
|
||||
case realValue:
|
||||
pushValue( valueToString( value.asDouble() ) );
|
||||
break;
|
||||
case stringValue:
|
||||
pushValue( valueToQuotedString( value.asCString() ) );
|
||||
break;
|
||||
case booleanValue:
|
||||
pushValue( valueToString( value.asBool() ) );
|
||||
break;
|
||||
case arrayValue:
|
||||
writeArrayValue( value);
|
||||
break;
|
||||
case objectValue:
|
||||
{
|
||||
Value::Members members( value.getMemberNames() );
|
||||
if ( members.empty() )
|
||||
pushValue( "{}" );
|
||||
else
|
||||
{
|
||||
writeWithIndent( "{" );
|
||||
indent();
|
||||
Value::Members::iterator it = members.begin();
|
||||
while ( true )
|
||||
{
|
||||
const std::string &name = *it;
|
||||
const Value &childValue = value[name];
|
||||
writeCommentBeforeValue( childValue );
|
||||
writeWithIndent( valueToQuotedString( name.c_str() ) );
|
||||
*document_ << " : ";
|
||||
writeValue( childValue );
|
||||
if ( ++it == members.end() )
|
||||
{
|
||||
writeCommentAfterValueOnSameLine( childValue );
|
||||
break;
|
||||
}
|
||||
*document_ << ",";
|
||||
writeCommentAfterValueOnSameLine( childValue );
|
||||
}
|
||||
unindent();
|
||||
writeWithIndent( "}" );
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
StyledStreamWriter::writeArrayValue( const Value &value )
|
||||
{
|
||||
unsigned size = value.size();
|
||||
if ( size == 0 )
|
||||
pushValue( "[]" );
|
||||
else
|
||||
{
|
||||
bool isArrayMultiLine = isMultineArray( value );
|
||||
if ( isArrayMultiLine )
|
||||
{
|
||||
writeWithIndent( "[" );
|
||||
indent();
|
||||
bool hasChildValue = !childValues_.empty();
|
||||
unsigned index =0;
|
||||
while ( true )
|
||||
{
|
||||
const Value &childValue = value[index];
|
||||
writeCommentBeforeValue( childValue );
|
||||
if ( hasChildValue )
|
||||
writeWithIndent( childValues_[index] );
|
||||
else
|
||||
{
|
||||
writeIndent();
|
||||
writeValue( childValue );
|
||||
}
|
||||
if ( ++index == size )
|
||||
{
|
||||
writeCommentAfterValueOnSameLine( childValue );
|
||||
break;
|
||||
}
|
||||
*document_ << ",";
|
||||
writeCommentAfterValueOnSameLine( childValue );
|
||||
}
|
||||
unindent();
|
||||
writeWithIndent( "]" );
|
||||
}
|
||||
else // output on a single line
|
||||
{
|
||||
assert( childValues_.size() == size );
|
||||
*document_ << "[ ";
|
||||
for ( unsigned index =0; index < size; ++index )
|
||||
{
|
||||
if ( index > 0 )
|
||||
*document_ << ", ";
|
||||
*document_ << childValues_[index];
|
||||
}
|
||||
*document_ << " ]";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
StyledStreamWriter::isMultineArray( const Value &value )
|
||||
{
|
||||
int size = value.size();
|
||||
bool isMultiLine = size*3 >= rightMargin_ ;
|
||||
childValues_.clear();
|
||||
for ( int index =0; index < size && !isMultiLine; ++index )
|
||||
{
|
||||
const Value &childValue = value[index];
|
||||
isMultiLine = isMultiLine ||
|
||||
( (childValue.isArray() || childValue.isObject()) &&
|
||||
childValue.size() > 0 );
|
||||
}
|
||||
if ( !isMultiLine ) // check if line length > max line length
|
||||
{
|
||||
childValues_.reserve( size );
|
||||
addChildValues_ = true;
|
||||
int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]'
|
||||
for ( int index =0; index < size && !isMultiLine; ++index )
|
||||
{
|
||||
writeValue( value[index] );
|
||||
lineLength += int( childValues_[index].length() );
|
||||
isMultiLine = isMultiLine && hasCommentForValue( value[index] );
|
||||
}
|
||||
addChildValues_ = false;
|
||||
isMultiLine = isMultiLine || lineLength >= rightMargin_;
|
||||
}
|
||||
return isMultiLine;
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
StyledStreamWriter::pushValue( const std::string &value )
|
||||
{
|
||||
if ( addChildValues_ )
|
||||
childValues_.push_back( value );
|
||||
else
|
||||
*document_ << value;
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
StyledStreamWriter::writeIndent()
|
||||
{
|
||||
/*
|
||||
Some comments in this method would have been nice. ;-)
|
||||
|
||||
if ( !document_.empty() )
|
||||
{
|
||||
char last = document_[document_.length()-1];
|
||||
if ( last == ' ' ) // already indented
|
||||
return;
|
||||
if ( last != '\n' ) // Comments may add new-line
|
||||
*document_ << '\n';
|
||||
}
|
||||
*/
|
||||
*document_ << '\n' << indentString_;
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
StyledStreamWriter::writeWithIndent( const std::string &value )
|
||||
{
|
||||
writeIndent();
|
||||
*document_ << value;
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
StyledStreamWriter::indent()
|
||||
{
|
||||
indentString_ += indentation_;
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
StyledStreamWriter::unindent()
|
||||
{
|
||||
assert( indentString_.size() >= indentation_.size() );
|
||||
indentString_.resize( indentString_.size() - indentation_.size() );
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
StyledStreamWriter::writeCommentBeforeValue( const Value &root )
|
||||
{
|
||||
if ( !root.hasComment( commentBefore ) )
|
||||
return;
|
||||
*document_ << normalizeEOL( root.getComment( commentBefore ) );
|
||||
*document_ << "\n";
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root )
|
||||
{
|
||||
if ( root.hasComment( commentAfterOnSameLine ) )
|
||||
*document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) );
|
||||
|
||||
if ( root.hasComment( commentAfter ) )
|
||||
{
|
||||
*document_ << "\n";
|
||||
*document_ << normalizeEOL( root.getComment( commentAfter ) );
|
||||
*document_ << "\n";
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
StyledStreamWriter::hasCommentForValue( const Value &value )
|
||||
{
|
||||
return value.hasComment( commentBefore )
|
||||
|| value.hasComment( commentAfterOnSameLine )
|
||||
|| value.hasComment( commentAfter );
|
||||
}
|
||||
|
||||
|
||||
std::string
|
||||
StyledStreamWriter::normalizeEOL( const std::string &text )
|
||||
{
|
||||
std::string normalized;
|
||||
normalized.reserve( text.length() );
|
||||
const char *begin = text.c_str();
|
||||
const char *end = begin + text.length();
|
||||
const char *current = begin;
|
||||
while ( current != end )
|
||||
{
|
||||
char c = *current++;
|
||||
if ( c == '\r' ) // mac or dos EOL
|
||||
{
|
||||
if ( *current == '\n' ) // convert dos EOL
|
||||
++current;
|
||||
normalized += '\n';
|
||||
}
|
||||
else // handle unix EOL & other char
|
||||
normalized += c;
|
||||
}
|
||||
return normalized;
|
||||
}
|
||||
|
||||
|
||||
std::ostream& operator<<( std::ostream &sout, const Value &root )
|
||||
{
|
||||
Json::StyledStreamWriter writer;
|
||||
writer.write(sout, root);
|
||||
return sout;
|
||||
}
|
||||
|
||||
|
||||
} // namespace Json
|
||||
170
samples/C++/program.cp
Normal file
170
samples/C++/program.cp
Normal file
@@ -0,0 +1,170 @@
|
||||
/**
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016 Sahil Dua ( sahildua2305 | http://sahildua.com )
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.*/
|
||||
|
||||
#include <iostream>
|
||||
#include <algorithm>
|
||||
#include <cstdio>
|
||||
#include <cstring>
|
||||
#include <cmath>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <set>
|
||||
#include <queue>
|
||||
#include <stack>
|
||||
using namespace std;
|
||||
typedef long long ll;
|
||||
#define DEBUG
|
||||
#define mod 1000000007
|
||||
#define pb push_back
|
||||
|
||||
int r2, c2, n, m;
|
||||
|
||||
bool dfs(vector<string> graph, int r, int c){
|
||||
//cout<<r<<" "<<c<<endl;
|
||||
if(graph[r][c] == 'X'){
|
||||
if(r==r2 && c==c2)
|
||||
return true;
|
||||
else
|
||||
return false;
|
||||
}
|
||||
else{
|
||||
graph[r][c] = 'X';
|
||||
}
|
||||
if(r>0){
|
||||
if(dfs(graph, r-1, c))
|
||||
return true;
|
||||
}
|
||||
if(c>0){
|
||||
if(dfs(graph, r, c-1))
|
||||
return true;
|
||||
}
|
||||
if(r<(n-1)){
|
||||
if(dfs(graph, r+1, c))
|
||||
return true;
|
||||
}
|
||||
if(c<(m-1)){
|
||||
if(dfs(graph, r, c+1))
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
struct point{
|
||||
int r,c;
|
||||
point(int rr, int cc){
|
||||
r = rr;
|
||||
c = cc;
|
||||
}
|
||||
};
|
||||
|
||||
stack<point> st;
|
||||
|
||||
// if(r>0){
|
||||
// if(dfs(graph, r-1, c))
|
||||
// return true;
|
||||
// }
|
||||
// if(c>0){
|
||||
// if(dfs(graph, r, c-1))
|
||||
// return true;
|
||||
// }
|
||||
// if(r<(n-1)){
|
||||
// if(dfs(graph, r+1, c))
|
||||
// return true;
|
||||
// }
|
||||
// if(c<(m-1)){
|
||||
// if(dfs(graph, r, c+1))
|
||||
// return true;
|
||||
// }
|
||||
|
||||
bool search(vector<string> graph, int rr, int cc){
|
||||
point t;
|
||||
t.r=rr;
|
||||
t.c=cc;
|
||||
st.push(t);
|
||||
|
||||
while(!st.empty()){
|
||||
point u = st.top();
|
||||
st.pop();
|
||||
int r = u.r, c = u.c;
|
||||
cout<<r<<" "<<c<<endl;
|
||||
if(graph[r][c]=='X'){
|
||||
if(r==r2 && c==c2)
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
else{
|
||||
graph[r][c] = 'X';
|
||||
}
|
||||
if(r>0){
|
||||
t.r=r-1;
|
||||
t.c=c;
|
||||
st.push(t);
|
||||
}
|
||||
if(c>0){
|
||||
t.r=r;
|
||||
t.c=c-1;
|
||||
st.push(t);
|
||||
}
|
||||
if(r<(n-1)){
|
||||
t.r=r+1;
|
||||
t.c=c;
|
||||
st.push(t);
|
||||
}
|
||||
if(c<(m-1)){
|
||||
t.r=r;
|
||||
t.c=c+1;
|
||||
st.push(t);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
int main(){
|
||||
ios::sync_with_stdio(false);
|
||||
#ifdef DEBUG
|
||||
freopen("input.txt", "r", stdin);
|
||||
#endif // DEBUG
|
||||
|
||||
cin>>n>>m;
|
||||
string temp;
|
||||
vector<string> graph;
|
||||
for(int i=0;i<n;i++){
|
||||
cin>>temp;
|
||||
graph.pb(temp);
|
||||
}
|
||||
int r1,c1;
|
||||
cin>>r1>>c1;
|
||||
cin>>r2>>c2;
|
||||
r2--;
|
||||
c2--;
|
||||
r1--;
|
||||
c1--;
|
||||
graph[r1][c1] = '.';
|
||||
if(search(graph, r1, c1))
|
||||
cout<<"YES\n";
|
||||
else
|
||||
cout<<"NO\n";
|
||||
|
||||
return 0;
|
||||
}
|
||||
@@ -1,415 +0,0 @@
|
||||
// This defines the interface to the QsciCommand class.
|
||||
//
|
||||
// Copyright (c) 2011 Riverbank Computing Limited <info@riverbankcomputing.com>
|
||||
//
|
||||
// This file is part of QScintilla.
|
||||
//
|
||||
// This file may be used under the terms of the GNU General Public
|
||||
// License versions 2.0 or 3.0 as published by the Free Software
|
||||
// Foundation and appearing in the files LICENSE.GPL2 and LICENSE.GPL3
|
||||
// included in the packaging of this file. Alternatively you may (at
|
||||
// your option) use any later version of the GNU General Public
|
||||
// License if such license has been publicly approved by Riverbank
|
||||
// Computing Limited (or its successors, if any) and the KDE Free Qt
|
||||
// Foundation. In addition, as a special exception, Riverbank gives you
|
||||
// certain additional rights. These rights are described in the Riverbank
|
||||
// GPL Exception version 1.1, which can be found in the file
|
||||
// GPL_EXCEPTION.txt in this package.
|
||||
//
|
||||
// If you are unsure which license is appropriate for your use, please
|
||||
// contact the sales department at sales@riverbankcomputing.com.
|
||||
//
|
||||
// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
|
||||
// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
|
||||
|
||||
|
||||
#ifndef QSCICOMMAND_H
|
||||
#define QSCICOMMAND_H
|
||||
|
||||
#ifdef __APPLE__
|
||||
extern "C++" {
|
||||
#endif
|
||||
|
||||
#include <qstring.h>
|
||||
|
||||
#include <Qsci/qsciglobal.h>
|
||||
#include <Qsci/qsciscintillabase.h>
|
||||
|
||||
|
||||
class QsciScintilla;
|
||||
|
||||
|
||||
//! \brief The QsciCommand class represents an internal editor command that may
|
||||
//! have one or two keys bound to it.
|
||||
//!
|
||||
//! Methods are provided to change the keys bound to the command and to remove
|
||||
//! a key binding. Each command has a user friendly description of the command
|
||||
//! for use in key mapping dialogs.
|
||||
class QSCINTILLA_EXPORT QsciCommand
|
||||
{
|
||||
public:
|
||||
//! This enum defines the different commands that can be assigned to a key.
|
||||
enum Command {
|
||||
//! Move down one line.
|
||||
LineDown = QsciScintillaBase::SCI_LINEDOWN,
|
||||
|
||||
//! Extend the selection down one line.
|
||||
LineDownExtend = QsciScintillaBase::SCI_LINEDOWNEXTEND,
|
||||
|
||||
//! Extend the rectangular selection down one line.
|
||||
LineDownRectExtend = QsciScintillaBase::SCI_LINEDOWNRECTEXTEND,
|
||||
|
||||
//! Scroll the view down one line.
|
||||
LineScrollDown = QsciScintillaBase::SCI_LINESCROLLDOWN,
|
||||
|
||||
//! Move up one line.
|
||||
LineUp = QsciScintillaBase::SCI_LINEUP,
|
||||
|
||||
//! Extend the selection up one line.
|
||||
LineUpExtend = QsciScintillaBase::SCI_LINEUPEXTEND,
|
||||
|
||||
//! Extend the rectangular selection up one line.
|
||||
LineUpRectExtend = QsciScintillaBase::SCI_LINEUPRECTEXTEND,
|
||||
|
||||
//! Scroll the view up one line.
|
||||
LineScrollUp = QsciScintillaBase::SCI_LINESCROLLUP,
|
||||
|
||||
//! Scroll to the start of the document.
|
||||
ScrollToStart = QsciScintillaBase::SCI_SCROLLTOSTART,
|
||||
|
||||
//! Scroll to the end of the document.
|
||||
ScrollToEnd = QsciScintillaBase::SCI_SCROLLTOEND,
|
||||
|
||||
//! Scroll vertically to centre the current line.
|
||||
VerticalCentreCaret = QsciScintillaBase::SCI_VERTICALCENTRECARET,
|
||||
|
||||
//! Move down one paragraph.
|
||||
ParaDown = QsciScintillaBase::SCI_PARADOWN,
|
||||
|
||||
//! Extend the selection down one paragraph.
|
||||
ParaDownExtend = QsciScintillaBase::SCI_PARADOWNEXTEND,
|
||||
|
||||
//! Move up one paragraph.
|
||||
ParaUp = QsciScintillaBase::SCI_PARAUP,
|
||||
|
||||
//! Extend the selection up one paragraph.
|
||||
ParaUpExtend = QsciScintillaBase::SCI_PARAUPEXTEND,
|
||||
|
||||
//! Move left one character.
|
||||
CharLeft = QsciScintillaBase::SCI_CHARLEFT,
|
||||
|
||||
//! Extend the selection left one character.
|
||||
CharLeftExtend = QsciScintillaBase::SCI_CHARLEFTEXTEND,
|
||||
|
||||
//! Extend the rectangular selection left one character.
|
||||
CharLeftRectExtend = QsciScintillaBase::SCI_CHARLEFTRECTEXTEND,
|
||||
|
||||
//! Move right one character.
|
||||
CharRight = QsciScintillaBase::SCI_CHARRIGHT,
|
||||
|
||||
//! Extend the selection right one character.
|
||||
CharRightExtend = QsciScintillaBase::SCI_CHARRIGHTEXTEND,
|
||||
|
||||
//! Extend the rectangular selection right one character.
|
||||
CharRightRectExtend = QsciScintillaBase::SCI_CHARRIGHTRECTEXTEND,
|
||||
|
||||
//! Move left one word.
|
||||
WordLeft = QsciScintillaBase::SCI_WORDLEFT,
|
||||
|
||||
//! Extend the selection left one word.
|
||||
WordLeftExtend = QsciScintillaBase::SCI_WORDLEFTEXTEND,
|
||||
|
||||
//! Move right one word.
|
||||
WordRight = QsciScintillaBase::SCI_WORDRIGHT,
|
||||
|
||||
//! Extend the selection right one word.
|
||||
WordRightExtend = QsciScintillaBase::SCI_WORDRIGHTEXTEND,
|
||||
|
||||
//! Move to the end of the previous word.
|
||||
WordLeftEnd = QsciScintillaBase::SCI_WORDLEFTEND,
|
||||
|
||||
//! Extend the selection to the end of the previous word.
|
||||
WordLeftEndExtend = QsciScintillaBase::SCI_WORDLEFTENDEXTEND,
|
||||
|
||||
//! Move to the end of the next word.
|
||||
WordRightEnd = QsciScintillaBase::SCI_WORDRIGHTEND,
|
||||
|
||||
//! Extend the selection to the end of the next word.
|
||||
WordRightEndExtend = QsciScintillaBase::SCI_WORDRIGHTENDEXTEND,
|
||||
|
||||
//! Move left one word part.
|
||||
WordPartLeft = QsciScintillaBase::SCI_WORDPARTLEFT,
|
||||
|
||||
//! Extend the selection left one word part.
|
||||
WordPartLeftExtend = QsciScintillaBase::SCI_WORDPARTLEFTEXTEND,
|
||||
|
||||
//! Move right one word part.
|
||||
WordPartRight = QsciScintillaBase::SCI_WORDPARTRIGHT,
|
||||
|
||||
//! Extend the selection right one word part.
|
||||
WordPartRightExtend = QsciScintillaBase::SCI_WORDPARTRIGHTEXTEND,
|
||||
|
||||
//! Move to the start of the document line.
|
||||
Home = QsciScintillaBase::SCI_HOME,
|
||||
|
||||
//! Extend the selection to the start of the document line.
|
||||
HomeExtend = QsciScintillaBase::SCI_HOMEEXTEND,
|
||||
|
||||
//! Extend the rectangular selection to the start of the document line.
|
||||
HomeRectExtend = QsciScintillaBase::SCI_HOMERECTEXTEND,
|
||||
|
||||
//! Move to the start of the displayed line.
|
||||
HomeDisplay = QsciScintillaBase::SCI_HOMEDISPLAY,
|
||||
|
||||
//! Extend the selection to the start of the displayed line.
|
||||
HomeDisplayExtend = QsciScintillaBase::SCI_HOMEDISPLAYEXTEND,
|
||||
|
||||
//! Move to the start of the displayed or document line.
|
||||
HomeWrap = QsciScintillaBase::SCI_HOMEWRAP,
|
||||
|
||||
//! Extend the selection to the start of the displayed or document
|
||||
//! line.
|
||||
HomeWrapExtend = QsciScintillaBase::SCI_HOMEWRAPEXTEND,
|
||||
|
||||
//! Move to the first visible character in the document line.
|
||||
VCHome = QsciScintillaBase::SCI_VCHOME,
|
||||
|
||||
//! Extend the selection to the first visible character in the document
|
||||
//! line.
|
||||
VCHomeExtend = QsciScintillaBase::SCI_VCHOMEEXTEND,
|
||||
|
||||
//! Extend the rectangular selection to the first visible character in
|
||||
//! the document line.
|
||||
VCHomeRectExtend = QsciScintillaBase::SCI_VCHOMERECTEXTEND,
|
||||
|
||||
//! Move to the first visible character of the displayed or document
|
||||
//! line.
|
||||
VCHomeWrap = QsciScintillaBase::SCI_VCHOMEWRAP,
|
||||
|
||||
//! Extend the selection to the first visible character of the
|
||||
//! displayed or document line.
|
||||
VCHomeWrapExtend = QsciScintillaBase::SCI_VCHOMEWRAPEXTEND,
|
||||
|
||||
//! Move to the end of the document line.
|
||||
LineEnd = QsciScintillaBase::SCI_LINEEND,
|
||||
|
||||
//! Extend the selection to the end of the document line.
|
||||
LineEndExtend = QsciScintillaBase::SCI_LINEENDEXTEND,
|
||||
|
||||
//! Extend the rectangular selection to the end of the document line.
|
||||
LineEndRectExtend = QsciScintillaBase::SCI_LINEENDRECTEXTEND,
|
||||
|
||||
//! Move to the end of the displayed line.
|
||||
LineEndDisplay = QsciScintillaBase::SCI_LINEENDDISPLAY,
|
||||
|
||||
//! Extend the selection to the end of the displayed line.
|
||||
LineEndDisplayExtend = QsciScintillaBase::SCI_LINEENDDISPLAYEXTEND,
|
||||
|
||||
//! Move to the end of the displayed or document line.
|
||||
LineEndWrap = QsciScintillaBase::SCI_LINEENDWRAP,
|
||||
|
||||
//! Extend the selection to the end of the displayed or document line.
|
||||
LineEndWrapExtend = QsciScintillaBase::SCI_LINEENDWRAPEXTEND,
|
||||
|
||||
//! Move to the start of the document.
|
||||
DocumentStart = QsciScintillaBase::SCI_DOCUMENTSTART,
|
||||
|
||||
//! Extend the selection to the start of the document.
|
||||
DocumentStartExtend = QsciScintillaBase::SCI_DOCUMENTSTARTEXTEND,
|
||||
|
||||
//! Move to the end of the document.
|
||||
DocumentEnd = QsciScintillaBase::SCI_DOCUMENTEND,
|
||||
|
||||
//! Extend the selection to the end of the document.
|
||||
DocumentEndExtend = QsciScintillaBase::SCI_DOCUMENTENDEXTEND,
|
||||
|
||||
//! Move up one page.
|
||||
PageUp = QsciScintillaBase::SCI_PAGEUP,
|
||||
|
||||
//! Extend the selection up one page.
|
||||
PageUpExtend = QsciScintillaBase::SCI_PAGEUPEXTEND,
|
||||
|
||||
//! Extend the rectangular selection up one page.
|
||||
PageUpRectExtend = QsciScintillaBase::SCI_PAGEUPRECTEXTEND,
|
||||
|
||||
//! Move down one page.
|
||||
PageDown = QsciScintillaBase::SCI_PAGEDOWN,
|
||||
|
||||
//! Extend the selection down one page.
|
||||
PageDownExtend = QsciScintillaBase::SCI_PAGEDOWNEXTEND,
|
||||
|
||||
//! Extend the rectangular selection down one page.
|
||||
PageDownRectExtend = QsciScintillaBase::SCI_PAGEDOWNRECTEXTEND,
|
||||
|
||||
//! Stuttered move up one page.
|
||||
StutteredPageUp = QsciScintillaBase::SCI_STUTTEREDPAGEUP,
|
||||
|
||||
//! Stuttered extend the selection up one page.
|
||||
StutteredPageUpExtend = QsciScintillaBase::SCI_STUTTEREDPAGEUPEXTEND,
|
||||
|
||||
//! Stuttered move down one page.
|
||||
StutteredPageDown = QsciScintillaBase::SCI_STUTTEREDPAGEDOWN,
|
||||
|
||||
//! Stuttered extend the selection down one page.
|
||||
StutteredPageDownExtend = QsciScintillaBase::SCI_STUTTEREDPAGEDOWNEXTEND,
|
||||
|
||||
//! Delete the current character.
|
||||
Delete = QsciScintillaBase::SCI_CLEAR,
|
||||
|
||||
//! Delete the previous character.
|
||||
DeleteBack = QsciScintillaBase::SCI_DELETEBACK,
|
||||
|
||||
//! Delete the previous character if not at start of line.
|
||||
DeleteBackNotLine = QsciScintillaBase::SCI_DELETEBACKNOTLINE,
|
||||
|
||||
//! Delete the word to the left.
|
||||
DeleteWordLeft = QsciScintillaBase::SCI_DELWORDLEFT,
|
||||
|
||||
//! Delete the word to the right.
|
||||
DeleteWordRight = QsciScintillaBase::SCI_DELWORDRIGHT,
|
||||
|
||||
//! Delete right to the end of the next word.
|
||||
DeleteWordRightEnd = QsciScintillaBase::SCI_DELWORDRIGHTEND,
|
||||
|
||||
//! Delete the line to the left.
|
||||
DeleteLineLeft = QsciScintillaBase::SCI_DELLINELEFT,
|
||||
|
||||
//! Delete the line to the right.
|
||||
DeleteLineRight = QsciScintillaBase::SCI_DELLINERIGHT,
|
||||
|
||||
//! Delete the current line.
|
||||
LineDelete = QsciScintillaBase::SCI_LINEDELETE,
|
||||
|
||||
//! Cut the current line to the clipboard.
|
||||
LineCut = QsciScintillaBase::SCI_LINECUT,
|
||||
|
||||
//! Copy the current line to the clipboard.
|
||||
LineCopy = QsciScintillaBase::SCI_LINECOPY,
|
||||
|
||||
//! Transpose the current and previous lines.
|
||||
LineTranspose = QsciScintillaBase::SCI_LINETRANSPOSE,
|
||||
|
||||
//! Duplicate the current line.
|
||||
LineDuplicate = QsciScintillaBase::SCI_LINEDUPLICATE,
|
||||
|
||||
//! Select the whole document.
|
||||
SelectAll = QsciScintillaBase::SCI_SELECTALL,
|
||||
|
||||
//! Move the selected lines up one line.
|
||||
MoveSelectedLinesUp = QsciScintillaBase::SCI_MOVESELECTEDLINESUP,
|
||||
|
||||
//! Move the selected lines down one line.
|
||||
MoveSelectedLinesDown = QsciScintillaBase::SCI_MOVESELECTEDLINESDOWN,
|
||||
|
||||
//! Duplicate the selection.
|
||||
SelectionDuplicate = QsciScintillaBase::SCI_SELECTIONDUPLICATE,
|
||||
|
||||
//! Convert the selection to lower case.
|
||||
SelectionLowerCase = QsciScintillaBase::SCI_LOWERCASE,
|
||||
|
||||
//! Convert the selection to upper case.
|
||||
SelectionUpperCase = QsciScintillaBase::SCI_UPPERCASE,
|
||||
|
||||
//! Cut the selection to the clipboard.
|
||||
SelectionCut = QsciScintillaBase::SCI_CUT,
|
||||
|
||||
//! Copy the selection to the clipboard.
|
||||
SelectionCopy = QsciScintillaBase::SCI_COPY,
|
||||
|
||||
//! Paste from the clipboard.
|
||||
Paste = QsciScintillaBase::SCI_PASTE,
|
||||
|
||||
//! Toggle insert/overtype.
|
||||
EditToggleOvertype = QsciScintillaBase::SCI_EDITTOGGLEOVERTYPE,
|
||||
|
||||
//! Insert a platform dependent newline.
|
||||
Newline = QsciScintillaBase::SCI_NEWLINE,
|
||||
|
||||
//! Insert a formfeed.
|
||||
Formfeed = QsciScintillaBase::SCI_FORMFEED,
|
||||
|
||||
//! Indent one level.
|
||||
Tab = QsciScintillaBase::SCI_TAB,
|
||||
|
||||
//! De-indent one level.
|
||||
Backtab = QsciScintillaBase::SCI_BACKTAB,
|
||||
|
||||
//! Cancel any current operation.
|
||||
Cancel = QsciScintillaBase::SCI_CANCEL,
|
||||
|
||||
//! Undo the last command.
|
||||
Undo = QsciScintillaBase::SCI_UNDO,
|
||||
|
||||
//! Redo the last command.
|
||||
Redo = QsciScintillaBase::SCI_REDO,
|
||||
|
||||
//! Zoom in.
|
||||
ZoomIn = QsciScintillaBase::SCI_ZOOMIN,
|
||||
|
||||
//! Zoom out.
|
||||
ZoomOut = QsciScintillaBase::SCI_ZOOMOUT,
|
||||
};
|
||||
|
||||
//! Return the command that will be executed by this instance.
|
||||
Command command() const {return scicmd;}
|
||||
|
||||
//! Execute the command.
|
||||
void execute();
|
||||
|
||||
//! Binds the key \a key to the command. If \a key is 0 then the key
|
||||
//! binding is removed. If \a key is invalid then the key binding is
|
||||
//! unchanged. Valid keys are any visible or control character or any
|
||||
//! of \c Key_Down, \c Key_Up, \c Key_Left, \c Key_Right, \c Key_Home,
|
||||
//! \c Key_End, \c Key_PageUp, \c Key_PageDown, \c Key_Delete,
|
||||
//! \c Key_Insert, \c Key_Escape, \c Key_Backspace, \c Key_Tab and
|
||||
//! \c Key_Return. Keys may be modified with any combination of \c SHIFT,
|
||||
//! \c CTRL, \c ALT and \c META.
|
||||
//!
|
||||
//! \sa key(), setAlternateKey(), validKey()
|
||||
void setKey(int key);
|
||||
|
||||
//! Binds the alternate key \a altkey to the command. If \a key is 0
|
||||
//! then the alternate key binding is removed.
|
||||
//!
|
||||
//! \sa alternateKey(), setKey(), validKey()
|
||||
void setAlternateKey(int altkey);
|
||||
|
||||
//! The key that is currently bound to the command is returned.
|
||||
//!
|
||||
//! \sa setKey(), alternateKey()
|
||||
int key() const {return qkey;}
|
||||
|
||||
//! The alternate key that is currently bound to the command is
|
||||
//! returned.
|
||||
//!
|
||||
//! \sa setAlternateKey(), key()
|
||||
int alternateKey() const {return qaltkey;}
|
||||
|
||||
//! If the key \a key is valid then true is returned.
|
||||
static bool validKey(int key);
|
||||
|
||||
//! The user friendly description of the command is returned.
|
||||
QString description() const;
|
||||
|
||||
private:
|
||||
friend class QsciCommandSet;
|
||||
|
||||
QsciCommand(QsciScintilla *qs, Command cmd, int key, int altkey,
|
||||
const char *desc);
|
||||
|
||||
void bindKey(int key,int &qk,int &scik);
|
||||
|
||||
QsciScintilla *qsCmd;
|
||||
Command scicmd;
|
||||
int qkey, scikey, qaltkey, scialtkey;
|
||||
const char *descCmd;
|
||||
|
||||
QsciCommand(const QsciCommand &);
|
||||
QsciCommand &operator=(const QsciCommand &);
|
||||
};
|
||||
|
||||
#ifdef __APPLE__
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif
|
||||
@@ -1,116 +0,0 @@
|
||||
// This module defines interface to the QsciPrinter class.
|
||||
//
|
||||
// Copyright (c) 2011 Riverbank Computing Limited <info@riverbankcomputing.com>
|
||||
//
|
||||
// This file is part of QScintilla.
|
||||
//
|
||||
// This file may be used under the terms of the GNU General Public
|
||||
// License versions 2.0 or 3.0 as published by the Free Software
|
||||
// Foundation and appearing in the files LICENSE.GPL2 and LICENSE.GPL3
|
||||
// included in the packaging of this file. Alternatively you may (at
|
||||
// your option) use any later version of the GNU General Public
|
||||
// License if such license has been publicly approved by Riverbank
|
||||
// Computing Limited (or its successors, if any) and the KDE Free Qt
|
||||
// Foundation. In addition, as a special exception, Riverbank gives you
|
||||
// certain additional rights. These rights are described in the Riverbank
|
||||
// GPL Exception version 1.1, which can be found in the file
|
||||
// GPL_EXCEPTION.txt in this package.
|
||||
//
|
||||
// If you are unsure which license is appropriate for your use, please
|
||||
// contact the sales department at sales@riverbankcomputing.com.
|
||||
//
|
||||
// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
|
||||
// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
|
||||
|
||||
|
||||
#ifndef QSCIPRINTER_H
|
||||
#define QSCIPRINTER_H
|
||||
|
||||
#ifdef __APPLE__
|
||||
extern "C++" {
|
||||
#endif
|
||||
|
||||
#include <qprinter.h>
|
||||
|
||||
#include <Qsci/qsciglobal.h>
|
||||
#include <Qsci/qsciscintilla.h>
|
||||
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
class QRect;
|
||||
class QPainter;
|
||||
QT_END_NAMESPACE
|
||||
|
||||
class QsciScintillaBase;
|
||||
|
||||
|
||||
//! \brief The QsciPrinter class is a sub-class of the Qt QPrinter class that
|
||||
//! is able to print the text of a Scintilla document.
|
||||
//!
|
||||
//! The class can be further sub-classed to alter to layout of the text, adding
|
||||
//! headers and footers for example.
|
||||
class QSCINTILLA_EXPORT QsciPrinter : public QPrinter
|
||||
{
|
||||
public:
|
||||
//! Constructs a printer paint device with mode \a mode.
|
||||
QsciPrinter(PrinterMode mode = ScreenResolution);
|
||||
|
||||
//! Destroys the QsciPrinter instance.
|
||||
virtual ~QsciPrinter();
|
||||
|
||||
//! Format a page, by adding headers and footers for example, before the
|
||||
//! document text is drawn on it. \a painter is the painter to be used to
|
||||
//! add customised text and graphics. \a drawing is true if the page is
|
||||
//! actually being drawn rather than being sized. \a painter drawing
|
||||
//! methods must only be called when \a drawing is true. \a area is the
|
||||
//! area of the page that will be used to draw the text. This should be
|
||||
//! modified if it is necessary to reserve space for any customised text or
|
||||
//! graphics. By default the area is relative to the printable area of the
|
||||
//! page. Use QPrinter::setFullPage() because calling printRange() if you
|
||||
//! want to try and print over the whole page. \a pagenr is the number of
|
||||
//! the page. The first page is numbered 1.
|
||||
virtual void formatPage(QPainter &painter, bool drawing, QRect &area,
|
||||
int pagenr);
|
||||
|
||||
//! Return the number of points to add to each font when printing.
|
||||
//!
|
||||
//! \sa setMagnification()
|
||||
int magnification() const {return mag;}
|
||||
|
||||
//! Sets the number of points to add to each font when printing to \a
|
||||
//! magnification.
|
||||
//!
|
||||
//! \sa magnification()
|
||||
virtual void setMagnification(int magnification);
|
||||
|
||||
//! Print a range of lines from the Scintilla instance \a qsb. \a from is
|
||||
//! the first line to print and a negative value signifies the first line
|
||||
//! of text. \a to is the last line to print and a negative value
|
||||
//! signifies the last line of text. true is returned if there was no
|
||||
//! error.
|
||||
virtual int printRange(QsciScintillaBase *qsb, int from = -1, int to = -1);
|
||||
|
||||
//! Return the line wrap mode used when printing. The default is
|
||||
//! QsciScintilla::WrapWord.
|
||||
//!
|
||||
//! \sa setWrapMode()
|
||||
QsciScintilla::WrapMode wrapMode() const {return wrap;}
|
||||
|
||||
//! Sets the line wrap mode used when printing to \a wmode.
|
||||
//!
|
||||
//! \sa wrapMode()
|
||||
virtual void setWrapMode(QsciScintilla::WrapMode wmode);
|
||||
|
||||
private:
|
||||
int mag;
|
||||
QsciScintilla::WrapMode wrap;
|
||||
|
||||
QsciPrinter(const QsciPrinter &);
|
||||
QsciPrinter &operator=(const QsciPrinter &);
|
||||
};
|
||||
|
||||
#ifdef __APPLE__
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif
|
||||
@@ -1,116 +0,0 @@
|
||||
// This module defines interface to the QsciPrinter class.
|
||||
//
|
||||
// Copyright (c) 2011 Riverbank Computing Limited <info@riverbankcomputing.com>
|
||||
//
|
||||
// This file is part of QScintilla.
|
||||
//
|
||||
// This file may be used under the terms of the GNU General Public
|
||||
// License versions 2.0 or 3.0 as published by the Free Software
|
||||
// Foundation and appearing in the files LICENSE.GPL2 and LICENSE.GPL3
|
||||
// included in the packaging of this file. Alternatively you may (at
|
||||
// your option) use any later version of the GNU General Public
|
||||
// License if such license has been publicly approved by Riverbank
|
||||
// Computing Limited (or its successors, if any) and the KDE Free Qt
|
||||
// Foundation. In addition, as a special exception, Riverbank gives you
|
||||
// certain additional rights. These rights are described in the Riverbank
|
||||
// GPL Exception version 1.1, which can be found in the file
|
||||
// GPL_EXCEPTION.txt in this package.
|
||||
//
|
||||
// If you are unsure which license is appropriate for your use, please
|
||||
// contact the sales department at sales@riverbankcomputing.com.
|
||||
//
|
||||
// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
|
||||
// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
|
||||
|
||||
|
||||
#ifndef QSCIPRINTER_H
|
||||
#define QSCIPRINTER_H
|
||||
|
||||
#ifdef __APPLE__
|
||||
extern "C++" {
|
||||
#endif
|
||||
|
||||
#include <qprinter.h>
|
||||
|
||||
#include <Qsci/qsciglobal.h>
|
||||
#include <Qsci/qsciscintilla.h>
|
||||
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
class QRect;
|
||||
class QPainter;
|
||||
QT_END_NAMESPACE
|
||||
|
||||
class QsciScintillaBase;
|
||||
|
||||
|
||||
//! \brief The QsciPrinter class is a sub-class of the Qt QPrinter class that
|
||||
//! is able to print the text of a Scintilla document.
|
||||
//!
|
||||
//! The class can be further sub-classed to alter to layout of the text, adding
|
||||
//! headers and footers for example.
|
||||
class QSCINTILLA_EXPORT QsciPrinter : public QPrinter
|
||||
{
|
||||
public:
|
||||
//! Constructs a printer paint device with mode \a mode.
|
||||
QsciPrinter(PrinterMode mode = ScreenResolution);
|
||||
|
||||
//! Destroys the QsciPrinter instance.
|
||||
virtual ~QsciPrinter();
|
||||
|
||||
//! Format a page, by adding headers and footers for example, before the
|
||||
//! document text is drawn on it. \a painter is the painter to be used to
|
||||
//! add customised text and graphics. \a drawing is true if the page is
|
||||
//! actually being drawn rather than being sized. \a painter drawing
|
||||
//! methods must only be called when \a drawing is true. \a area is the
|
||||
//! area of the page that will be used to draw the text. This should be
|
||||
//! modified if it is necessary to reserve space for any customised text or
|
||||
//! graphics. By default the area is relative to the printable area of the
|
||||
//! page. Use QPrinter::setFullPage() because calling printRange() if you
|
||||
//! want to try and print over the whole page. \a pagenr is the number of
|
||||
//! the page. The first page is numbered 1.
|
||||
virtual void formatPage(QPainter &painter, bool drawing, QRect &area,
|
||||
int pagenr);
|
||||
|
||||
//! Return the number of points to add to each font when printing.
|
||||
//!
|
||||
//! \sa setMagnification()
|
||||
int magnification() const {return mag;}
|
||||
|
||||
//! Sets the number of points to add to each font when printing to \a
|
||||
//! magnification.
|
||||
//!
|
||||
//! \sa magnification()
|
||||
virtual void setMagnification(int magnification);
|
||||
|
||||
//! Print a range of lines from the Scintilla instance \a qsb. \a from is
|
||||
//! the first line to print and a negative value signifies the first line
|
||||
//! of text. \a to is the last line to print and a negative value
|
||||
//! signifies the last line of text. true is returned if there was no
|
||||
//! error.
|
||||
virtual int printRange(QsciScintillaBase *qsb, int from = -1, int to = -1);
|
||||
|
||||
//! Return the line wrap mode used when printing. The default is
|
||||
//! QsciScintilla::WrapWord.
|
||||
//!
|
||||
//! \sa setWrapMode()
|
||||
QsciScintilla::WrapMode wrapMode() const {return wrap;}
|
||||
|
||||
//! Sets the line wrap mode used when printing to \a wmode.
|
||||
//!
|
||||
//! \sa wrapMode()
|
||||
virtual void setWrapMode(QsciScintilla::WrapMode wmode);
|
||||
|
||||
private:
|
||||
int mag;
|
||||
QsciScintilla::WrapMode wrap;
|
||||
|
||||
QsciPrinter(const QsciPrinter &);
|
||||
QsciPrinter &operator=(const QsciPrinter &);
|
||||
};
|
||||
|
||||
#ifdef __APPLE__
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif
|
||||
13
samples/C++/simple.re
Normal file
13
samples/C++/simple.re
Normal file
@@ -0,0 +1,13 @@
|
||||
#define NULL ((char*) 0)
|
||||
char *scan(char *p){
|
||||
char *q;
|
||||
#define YYCTYPE char
|
||||
#define YYCURSOR p
|
||||
#define YYLIMIT p
|
||||
#define YYMARKER q
|
||||
#define YYFILL(n)
|
||||
/*!re2c
|
||||
[0-9]+ {return YYCURSOR;}
|
||||
[\000-\377] {return NULL;}
|
||||
*/
|
||||
}
|
||||
557
samples/C++/srs_app_ingest.cpp
Normal file
557
samples/C++/srs_app_ingest.cpp
Normal file
@@ -0,0 +1,557 @@
|
||||
/*
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2013-2015 SRS(ossrs)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
// Source - https://github.com/REN-I/srs/blob/3aae7854702a37955bce706fcd8122b02ac07f53/trunk/src/app/srs_app_ingest.cpp
|
||||
|
||||
#include <srs_app_ingest.hpp>
|
||||
|
||||
#ifdef SRS_AUTO_INGEST
|
||||
|
||||
#include <stdlib.h>
|
||||
using namespace std;
|
||||
|
||||
#include <srs_kernel_error.hpp>
|
||||
#include <srs_app_config.hpp>
|
||||
#include <srs_kernel_log.hpp>
|
||||
#include <srs_app_ffmpeg.hpp>
|
||||
#include <srs_app_pithy_print.hpp>
|
||||
#include <srs_kernel_utility.hpp>
|
||||
#include <srs_app_utility.hpp>
|
||||
|
||||
// when error, ingester sleep for a while and retry.
|
||||
// ingest never sleep a long time, for we must start the stream ASAP.
|
||||
#define SRS_AUTO_INGESTER_SLEEP_US (int64_t)(3*1000*1000LL)
|
||||
|
||||
SrsIngesterFFMPEG::SrsIngesterFFMPEG()
|
||||
{
|
||||
ffmpeg = NULL;
|
||||
}
|
||||
|
||||
SrsIngesterFFMPEG::~SrsIngesterFFMPEG()
|
||||
{
|
||||
srs_freep(ffmpeg);
|
||||
}
|
||||
|
||||
int SrsIngesterFFMPEG::initialize(SrsFFMPEG* ff, string v, string i)
|
||||
{
|
||||
int ret = ERROR_SUCCESS;
|
||||
|
||||
ffmpeg = ff;
|
||||
vhost = v;
|
||||
id = i;
|
||||
starttime = srs_get_system_time_ms();
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
string SrsIngesterFFMPEG::uri()
|
||||
{
|
||||
return vhost + "/" + id;
|
||||
}
|
||||
|
||||
int SrsIngesterFFMPEG::alive()
|
||||
{
|
||||
return (int)(srs_get_system_time_ms() - starttime);
|
||||
}
|
||||
|
||||
bool SrsIngesterFFMPEG::equals(string v)
|
||||
{
|
||||
return vhost == v;
|
||||
}
|
||||
|
||||
bool SrsIngesterFFMPEG::equals(string v, string i)
|
||||
{
|
||||
return vhost == v && id == i;
|
||||
}
|
||||
|
||||
int SrsIngesterFFMPEG::start()
|
||||
{
|
||||
return ffmpeg->start();
|
||||
}
|
||||
|
||||
void SrsIngesterFFMPEG::stop()
|
||||
{
|
||||
ffmpeg->stop();
|
||||
}
|
||||
|
||||
int SrsIngesterFFMPEG::cycle()
|
||||
{
|
||||
return ffmpeg->cycle();
|
||||
}
|
||||
|
||||
void SrsIngesterFFMPEG::fast_stop()
|
||||
{
|
||||
ffmpeg->fast_stop();
|
||||
}
|
||||
|
||||
SrsIngester::SrsIngester()
|
||||
{
|
||||
_srs_config->subscribe(this);
|
||||
|
||||
pthread = new SrsReusableThread("ingest", this, SRS_AUTO_INGESTER_SLEEP_US);
|
||||
pprint = SrsPithyPrint::create_ingester();
|
||||
}
|
||||
|
||||
SrsIngester::~SrsIngester()
|
||||
{
|
||||
_srs_config->unsubscribe(this);
|
||||
|
||||
srs_freep(pthread);
|
||||
clear_engines();
|
||||
}
|
||||
|
||||
int SrsIngester::start()
|
||||
{
|
||||
int ret = ERROR_SUCCESS;
|
||||
|
||||
if ((ret = parse()) != ERROR_SUCCESS) {
|
||||
clear_engines();
|
||||
ret = ERROR_SUCCESS;
|
||||
return ret;
|
||||
}
|
||||
|
||||
// even no ingesters, we must also start it,
|
||||
// for the reload may add more ingesters.
|
||||
|
||||
// start thread to run all encoding engines.
|
||||
if ((ret = pthread->start()) != ERROR_SUCCESS) {
|
||||
srs_error("st_thread_create failed. ret=%d", ret);
|
||||
return ret;
|
||||
}
|
||||
srs_trace("ingest thread cid=%d, current_cid=%d", pthread->cid(), _srs_context->get_id());
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
int SrsIngester::parse_ingesters(SrsConfDirective* vhost)
|
||||
{
|
||||
int ret = ERROR_SUCCESS;
|
||||
|
||||
std::vector<SrsConfDirective*> ingesters = _srs_config->get_ingesters(vhost->arg0());
|
||||
|
||||
// create engine
|
||||
for (int i = 0; i < (int)ingesters.size(); i++) {
|
||||
SrsConfDirective* ingest = ingesters[i];
|
||||
if ((ret = parse_engines(vhost, ingest)) != ERROR_SUCCESS) {
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
int SrsIngester::parse_engines(SrsConfDirective* vhost, SrsConfDirective* ingest)
|
||||
{
|
||||
int ret = ERROR_SUCCESS;
|
||||
|
||||
if (!_srs_config->get_ingest_enabled(ingest)) {
|
||||
return ret;
|
||||
}
|
||||
|
||||
std::string ffmpeg_bin = _srs_config->get_ingest_ffmpeg(ingest);
|
||||
if (ffmpeg_bin.empty()) {
|
||||
ret = ERROR_ENCODER_PARSE;
|
||||
srs_trace("empty ffmpeg ret=%d", ret);
|
||||
return ret;
|
||||
}
|
||||
|
||||
// get all engines.
|
||||
std::vector<SrsConfDirective*> engines = _srs_config->get_transcode_engines(ingest);
|
||||
|
||||
// create ingesters without engines.
|
||||
if (engines.empty()) {
|
||||
SrsFFMPEG* ffmpeg = new SrsFFMPEG(ffmpeg_bin);
|
||||
if ((ret = initialize_ffmpeg(ffmpeg, vhost, ingest, NULL)) != ERROR_SUCCESS) {
|
||||
srs_freep(ffmpeg);
|
||||
if (ret != ERROR_ENCODER_LOOP) {
|
||||
srs_error("invalid ingest engine. ret=%d", ret);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
SrsIngesterFFMPEG* ingester = new SrsIngesterFFMPEG();
|
||||
if ((ret = ingester->initialize(ffmpeg, vhost->arg0(), ingest->arg0())) != ERROR_SUCCESS) {
|
||||
srs_freep(ingester);
|
||||
return ret;
|
||||
}
|
||||
|
||||
ingesters.push_back(ingester);
|
||||
return ret;
|
||||
}
|
||||
|
||||
// create ingesters with engine
|
||||
for (int i = 0; i < (int)engines.size(); i++) {
|
||||
SrsConfDirective* engine = engines[i];
|
||||
SrsFFMPEG* ffmpeg = new SrsFFMPEG(ffmpeg_bin);
|
||||
if ((ret = initialize_ffmpeg(ffmpeg, vhost, ingest, engine)) != ERROR_SUCCESS) {
|
||||
srs_freep(ffmpeg);
|
||||
if (ret != ERROR_ENCODER_LOOP) {
|
||||
srs_error("invalid ingest engine: %s %s, ret=%d",
|
||||
ingest->arg0().c_str(), engine->arg0().c_str(), ret);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
SrsIngesterFFMPEG* ingester = new SrsIngesterFFMPEG();
|
||||
if ((ret = ingester->initialize(ffmpeg, vhost->arg0(), ingest->arg0())) != ERROR_SUCCESS) {
|
||||
srs_freep(ingester);
|
||||
return ret;
|
||||
}
|
||||
|
||||
ingesters.push_back(ingester);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
void SrsIngester::dispose()
|
||||
{
|
||||
// first, use fast stop to notice all FFMPEG to quit gracefully.
|
||||
std::vector<SrsIngesterFFMPEG*>::iterator it;
|
||||
for (it = ingesters.begin(); it != ingesters.end(); ++it) {
|
||||
SrsIngesterFFMPEG* ingester = *it;
|
||||
ingester->fast_stop();
|
||||
}
|
||||
|
||||
if (!ingesters.empty()) {
|
||||
srs_trace("fast stop all ingesters ok.");
|
||||
}
|
||||
|
||||
// then, use stop to wait FFMPEG quit one by one and send SIGKILL if needed.
|
||||
stop();
|
||||
}
|
||||
|
||||
void SrsIngester::stop()
|
||||
{
|
||||
pthread->stop();
|
||||
clear_engines();
|
||||
}
|
||||
|
||||
int SrsIngester::cycle()
|
||||
{
|
||||
int ret = ERROR_SUCCESS;
|
||||
|
||||
std::vector<SrsIngesterFFMPEG*>::iterator it;
|
||||
for (it = ingesters.begin(); it != ingesters.end(); ++it) {
|
||||
SrsIngesterFFMPEG* ingester = *it;
|
||||
|
||||
// start all ffmpegs.
|
||||
if ((ret = ingester->start()) != ERROR_SUCCESS) {
|
||||
srs_error("ingest ffmpeg start failed. ret=%d", ret);
|
||||
return ret;
|
||||
}
|
||||
|
||||
// check ffmpeg status.
|
||||
if ((ret = ingester->cycle()) != ERROR_SUCCESS) {
|
||||
srs_error("ingest ffmpeg cycle failed. ret=%d", ret);
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
// pithy print
|
||||
show_ingest_log_message();
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
void SrsIngester::on_thread_stop()
|
||||
{
|
||||
}
|
||||
|
||||
void SrsIngester::clear_engines()
|
||||
{
|
||||
std::vector<SrsIngesterFFMPEG*>::iterator it;
|
||||
|
||||
for (it = ingesters.begin(); it != ingesters.end(); ++it) {
|
||||
SrsIngesterFFMPEG* ingester = *it;
|
||||
srs_freep(ingester);
|
||||
}
|
||||
|
||||
ingesters.clear();
|
||||
}
|
||||
|
||||
int SrsIngester::parse()
|
||||
{
|
||||
int ret = ERROR_SUCCESS;
|
||||
|
||||
// parse ingesters
|
||||
std::vector<SrsConfDirective*> vhosts;
|
||||
_srs_config->get_vhosts(vhosts);
|
||||
|
||||
for (int i = 0; i < (int)vhosts.size(); i++) {
|
||||
SrsConfDirective* vhost = vhosts[i];
|
||||
if ((ret = parse_ingesters(vhost)) != ERROR_SUCCESS) {
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
int SrsIngester::initialize_ffmpeg(SrsFFMPEG* ffmpeg, SrsConfDirective* vhost, SrsConfDirective* ingest, SrsConfDirective* engine)
|
||||
{
|
||||
int ret = ERROR_SUCCESS;
|
||||
|
||||
std::string port;
|
||||
if (true) {
|
||||
std::vector<std::string> ip_ports = _srs_config->get_listens();
|
||||
srs_assert(ip_ports.size() > 0);
|
||||
|
||||
std::string ep = ip_ports[0];
|
||||
std::string ip;
|
||||
srs_parse_endpoint(ep, ip, port);
|
||||
}
|
||||
|
||||
std::string output = _srs_config->get_engine_output(engine);
|
||||
// output stream, to other/self server
|
||||
// ie. rtmp://localhost:1935/live/livestream_sd
|
||||
output = srs_string_replace(output, "[vhost]", vhost->arg0());
|
||||
output = srs_string_replace(output, "[port]", port);
|
||||
if (output.empty()) {
|
||||
ret = ERROR_ENCODER_NO_OUTPUT;
|
||||
srs_trace("empty output url, ingest=%s. ret=%d", ingest->arg0().c_str(), ret);
|
||||
return ret;
|
||||
}
|
||||
|
||||
// find the app and stream in rtmp url
|
||||
std::string url = output;
|
||||
std::string app, stream;
|
||||
size_t pos = std::string::npos;
|
||||
if ((pos = url.rfind("/")) != std::string::npos) {
|
||||
stream = url.substr(pos + 1);
|
||||
url = url.substr(0, pos);
|
||||
}
|
||||
if ((pos = url.rfind("/")) != std::string::npos) {
|
||||
app = url.substr(pos + 1);
|
||||
url = url.substr(0, pos);
|
||||
}
|
||||
if ((pos = app.rfind("?")) != std::string::npos) {
|
||||
app = app.substr(0, pos);
|
||||
}
|
||||
|
||||
std::string log_file = SRS_CONSTS_NULL_FILE; // disabled
|
||||
// write ffmpeg info to log file.
|
||||
if (_srs_config->get_ffmpeg_log_enabled()) {
|
||||
log_file = _srs_config->get_ffmpeg_log_dir();
|
||||
log_file += "/";
|
||||
log_file += "ffmpeg-ingest";
|
||||
log_file += "-";
|
||||
log_file += vhost->arg0();
|
||||
log_file += "-";
|
||||
log_file += app;
|
||||
log_file += "-";
|
||||
log_file += stream;
|
||||
log_file += ".log";
|
||||
}
|
||||
|
||||
// input
|
||||
std::string input_type = _srs_config->get_ingest_input_type(ingest);
|
||||
if (input_type.empty()) {
|
||||
ret = ERROR_ENCODER_NO_INPUT;
|
||||
srs_trace("empty intput type, ingest=%s. ret=%d", ingest->arg0().c_str(), ret);
|
||||
return ret;
|
||||
}
|
||||
|
||||
if (srs_config_ingest_is_file(input_type)) {
|
||||
std::string input_url = _srs_config->get_ingest_input_url(ingest);
|
||||
if (input_url.empty()) {
|
||||
ret = ERROR_ENCODER_NO_INPUT;
|
||||
srs_trace("empty intput url, ingest=%s. ret=%d", ingest->arg0().c_str(), ret);
|
||||
return ret;
|
||||
}
|
||||
|
||||
// for file, set re.
|
||||
ffmpeg->set_iparams("-re");
|
||||
|
||||
if ((ret = ffmpeg->initialize(input_url, output, log_file)) != ERROR_SUCCESS) {
|
||||
return ret;
|
||||
}
|
||||
} else if (srs_config_ingest_is_stream(input_type)) {
|
||||
std::string input_url = _srs_config->get_ingest_input_url(ingest);
|
||||
if (input_url.empty()) {
|
||||
ret = ERROR_ENCODER_NO_INPUT;
|
||||
srs_trace("empty intput url, ingest=%s. ret=%d", ingest->arg0().c_str(), ret);
|
||||
return ret;
|
||||
}
|
||||
|
||||
// for stream, no re.
|
||||
ffmpeg->set_iparams("");
|
||||
|
||||
if ((ret = ffmpeg->initialize(input_url, output, log_file)) != ERROR_SUCCESS) {
|
||||
return ret;
|
||||
}
|
||||
} else {
|
||||
ret = ERROR_ENCODER_INPUT_TYPE;
|
||||
srs_error("invalid ingest=%s type=%s, ret=%d",
|
||||
ingest->arg0().c_str(), input_type.c_str(), ret);
|
||||
}
|
||||
|
||||
// set output format to flv for RTMP
|
||||
ffmpeg->set_oformat("flv");
|
||||
|
||||
std::string vcodec = _srs_config->get_engine_vcodec(engine);
|
||||
std::string acodec = _srs_config->get_engine_acodec(engine);
|
||||
// whatever the engine config, use copy as default.
|
||||
bool engine_disabled = !engine || !_srs_config->get_engine_enabled(engine);
|
||||
if (engine_disabled || vcodec.empty() || acodec.empty()) {
|
||||
if ((ret = ffmpeg->initialize_copy()) != ERROR_SUCCESS) {
|
||||
return ret;
|
||||
}
|
||||
} else {
|
||||
if ((ret = ffmpeg->initialize_transcode(engine)) != ERROR_SUCCESS) {
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
srs_trace("parse success, ingest=%s, vhost=%s",
|
||||
ingest->arg0().c_str(), vhost->arg0().c_str());
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
void SrsIngester::show_ingest_log_message()
|
||||
{
|
||||
pprint->elapse();
|
||||
|
||||
if ((int)ingesters.size() <= 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// random choose one ingester to report.
|
||||
int index = rand() % (int)ingesters.size();
|
||||
SrsIngesterFFMPEG* ingester = ingesters.at(index);
|
||||
|
||||
// reportable
|
||||
if (pprint->can_print()) {
|
||||
srs_trace("-> "SRS_CONSTS_LOG_INGESTER" time=%"PRId64", ingesters=%d, #%d(alive=%ds, %s)",
|
||||
pprint->age(), (int)ingesters.size(), index, ingester->alive() / 1000, ingester->uri().c_str());
|
||||
}
|
||||
}
|
||||
|
||||
int SrsIngester::on_reload_vhost_added(string vhost)
|
||||
{
|
||||
int ret = ERROR_SUCCESS;
|
||||
|
||||
SrsConfDirective* _vhost = _srs_config->get_vhost(vhost);
|
||||
if ((ret = parse_ingesters(_vhost)) != ERROR_SUCCESS) {
|
||||
return ret;
|
||||
}
|
||||
|
||||
srs_trace("reload add vhost ingesters, vhost=%s", vhost.c_str());
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
int SrsIngester::on_reload_vhost_removed(string vhost)
|
||||
{
|
||||
int ret = ERROR_SUCCESS;
|
||||
|
||||
std::vector<SrsIngesterFFMPEG*>::iterator it;
|
||||
|
||||
for (it = ingesters.begin(); it != ingesters.end();) {
|
||||
SrsIngesterFFMPEG* ingester = *it;
|
||||
|
||||
if (!ingester->equals(vhost)) {
|
||||
++it;
|
||||
continue;
|
||||
}
|
||||
|
||||
// stop the ffmpeg and free it.
|
||||
ingester->stop();
|
||||
|
||||
srs_trace("reload stop ingester, vhost=%s, id=%s", vhost.c_str(), ingester->uri().c_str());
|
||||
|
||||
srs_freep(ingester);
|
||||
|
||||
// remove the item from ingesters.
|
||||
it = ingesters.erase(it);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
int SrsIngester::on_reload_ingest_removed(string vhost, string ingest_id)
|
||||
{
|
||||
int ret = ERROR_SUCCESS;
|
||||
|
||||
std::vector<SrsIngesterFFMPEG*>::iterator it;
|
||||
|
||||
for (it = ingesters.begin(); it != ingesters.end();) {
|
||||
SrsIngesterFFMPEG* ingester = *it;
|
||||
|
||||
if (!ingester->equals(vhost, ingest_id)) {
|
||||
++it;
|
||||
continue;
|
||||
}
|
||||
|
||||
// stop the ffmpeg and free it.
|
||||
ingester->stop();
|
||||
|
||||
srs_trace("reload stop ingester, vhost=%s, id=%s", vhost.c_str(), ingester->uri().c_str());
|
||||
|
||||
srs_freep(ingester);
|
||||
|
||||
// remove the item from ingesters.
|
||||
it = ingesters.erase(it);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
int SrsIngester::on_reload_ingest_added(string vhost, string ingest_id)
|
||||
{
|
||||
int ret = ERROR_SUCCESS;
|
||||
|
||||
SrsConfDirective* _vhost = _srs_config->get_vhost(vhost);
|
||||
SrsConfDirective* _ingester = _srs_config->get_ingest_by_id(vhost, ingest_id);
|
||||
|
||||
if ((ret = parse_engines(_vhost, _ingester)) != ERROR_SUCCESS) {
|
||||
return ret;
|
||||
}
|
||||
|
||||
srs_trace("reload add ingester, "
|
||||
"vhost=%s, id=%s", vhost.c_str(), ingest_id.c_str());
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
int SrsIngester::on_reload_ingest_updated(string vhost, string ingest_id)
|
||||
{
|
||||
int ret = ERROR_SUCCESS;
|
||||
|
||||
if ((ret = on_reload_ingest_removed(vhost, ingest_id)) != ERROR_SUCCESS) {
|
||||
return ret;
|
||||
}
|
||||
|
||||
if ((ret = on_reload_ingest_added(vhost, ingest_id)) != ERROR_SUCCESS) {
|
||||
return ret;
|
||||
}
|
||||
|
||||
srs_trace("reload updated ingester, "
|
||||
"vhost=%s, id=%s", vhost.c_str(), ingest_id.c_str());
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
60
samples/C/Arduino.cats
Normal file
60
samples/C/Arduino.cats
Normal file
@@ -0,0 +1,60 @@
|
||||
/** The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Hongwei Xi
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.*/
|
||||
|
||||
// Source: https://github.com/githwxi/ATS-Postiats-contrib/blob/master/contrib/arduino/CATS/Arduino.cats
|
||||
|
||||
|
||||
/*
|
||||
** The prelude for Ardunio
|
||||
*/
|
||||
|
||||
/* ****** ****** */
|
||||
|
||||
#ifndef ARDUINO_CATS_ARDUINO
|
||||
#define ARDUINO_CATS_ARDUINO
|
||||
|
||||
/* ****** ****** */
|
||||
|
||||
#include <Arduino.h>
|
||||
|
||||
/* ****** ****** */
|
||||
|
||||
#define delay_int(ms) delay(ms)
|
||||
#define delay_ulint(ms) delay(ms)
|
||||
|
||||
/* ****** ****** */
|
||||
//
|
||||
#define random_int_1(x) random(x)
|
||||
#define random_int_2(x, y) random(x, y)
|
||||
#define random_lint_1(x) random(x)
|
||||
#define random_lint_2(x, y) random(x, y)
|
||||
//
|
||||
#define randomSeed_int(x) randomSeed(x)
|
||||
#define randomSeed_uint(x) randomSeed(x)
|
||||
//
|
||||
/* ****** ****** */
|
||||
|
||||
#endif // #ifndef(ARDUINO_CATS_ARDUINO)
|
||||
|
||||
/* ****** ****** */
|
||||
|
||||
/* end of [Arduino.cats] */
|
||||
45
samples/C/array.c
Normal file
45
samples/C/array.c
Normal file
@@ -0,0 +1,45 @@
|
||||
#include <array.h>
|
||||
|
||||
unsigned __bump_up(unsigned n) {
|
||||
unsigned base = 1;
|
||||
--n;
|
||||
while (base < sizeof n * 8) {
|
||||
n |= n >> base;
|
||||
base *= 2;
|
||||
}
|
||||
++n;
|
||||
n += (n == 0);
|
||||
return n;
|
||||
}
|
||||
|
||||
void *__array_alloc(size_t size, unsigned length) {
|
||||
unsigned allocated = __bump_up(length);
|
||||
struct __array_header *head = malloc(sizeof *head + allocated * size);
|
||||
assert(head);
|
||||
head->length = length;
|
||||
head->allocated = allocated;
|
||||
return (void *) (head + 1);
|
||||
}
|
||||
|
||||
void __array_resize(void **array, size_t size, int difference) {
|
||||
if (difference == 0) {
|
||||
return;
|
||||
}
|
||||
struct __array_header *head = __header(*array);
|
||||
head->length += difference;
|
||||
if (head->length >= head->allocated) {
|
||||
head->allocated = __bump_up(head->length);
|
||||
head = realloc(head, sizeof *head + head->allocated * size);
|
||||
assert(head);
|
||||
*array = head + 1;
|
||||
}
|
||||
}
|
||||
|
||||
int __array_search(void *array, void *elem, size_t size) {
|
||||
for (unsigned i = 0; i < alength(array) * size; i += size) {
|
||||
if (memcmp((char *)array + i, elem, size) == 0) {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
58
samples/C/array.h
Normal file
58
samples/C/array.h
Normal file
@@ -0,0 +1,58 @@
|
||||
#ifndef ARRAY_H
|
||||
#define ARRAY_H value
|
||||
|
||||
#include <assert.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
#define array(type, name, initial_length) \
|
||||
type *name = __array_alloc(sizeof(type), initial_length)
|
||||
|
||||
#define aforeach(it, array) \
|
||||
for (unsigned it = 0; \
|
||||
it < alength(array); \
|
||||
++it)
|
||||
|
||||
#define __header(array) \
|
||||
((struct __array_header *) array - 1)
|
||||
|
||||
#define alength(array) \
|
||||
(__header(array)->length)
|
||||
|
||||
#define afree(array) \
|
||||
free(__header(array))
|
||||
|
||||
#define apush(array, elem) \
|
||||
__array_resize((void **) &array, sizeof *array, 1); \
|
||||
array[alength(array)-1] = elem
|
||||
|
||||
#define apop(array) \
|
||||
aremove(array, (alength(array) - 1))
|
||||
|
||||
#define aremove(array, index) \
|
||||
assert(alength(array) > index); \
|
||||
memmove(array + index, array + index + 1, sizeof *array * (alength(array) - index - 1)); \
|
||||
__array_resize((void **) &array, sizeof *array, -1)
|
||||
|
||||
#define ainsert(array, index, elem) \
|
||||
__array_resize((void **) &array, sizeof *array, index >= alength(array) ? index - alength(array) + 1 : 1); \
|
||||
memmove(array + index + 1, array + index, sizeof *array * (alength(array) - index - 1)); \
|
||||
array[index] = elem
|
||||
|
||||
#define acontains(array, elem) \
|
||||
__array_search(array, &elem, sizeof elem)
|
||||
|
||||
#define __arrayallocated(array) \
|
||||
(__header(array)->allocated)
|
||||
|
||||
struct __array_header {
|
||||
unsigned length;
|
||||
unsigned allocated;
|
||||
};
|
||||
|
||||
unsigned __bump_up(unsigned n);
|
||||
void *__array_alloc(size_t size, unsigned length);
|
||||
void __array_resize(void **array, size_t size, int difference);
|
||||
int __array_search(void *array, void *elem, size_t size);
|
||||
|
||||
#endif /* ifndef ARRAY_H */
|
||||
@@ -1,99 +0,0 @@
|
||||
/*
|
||||
* Copyright (C) 2009-2012 the libgit2 contributors
|
||||
*
|
||||
* This file is part of libgit2, distributed under the GNU GPL v2 with
|
||||
* a Linking Exception. For full terms see the included COPYING file.
|
||||
*/
|
||||
|
||||
#include "common.h"
|
||||
#include "repository.h"
|
||||
#include "commit.h"
|
||||
#include "thread-utils.h"
|
||||
#include "util.h"
|
||||
#include "cache.h"
|
||||
|
||||
int git_cache_init(git_cache *cache, size_t size, git_cached_obj_freeptr free_ptr)
|
||||
{
|
||||
if (size < 8)
|
||||
size = 8;
|
||||
size = git__size_t_powerof2(size);
|
||||
|
||||
cache->size_mask = size - 1;
|
||||
cache->lru_count = 0;
|
||||
cache->free_obj = free_ptr;
|
||||
|
||||
git_mutex_init(&cache->lock);
|
||||
|
||||
cache->nodes = git__malloc(size * sizeof(git_cached_obj *));
|
||||
GITERR_CHECK_ALLOC(cache->nodes);
|
||||
|
||||
memset(cache->nodes, 0x0, size * sizeof(git_cached_obj *));
|
||||
return 0;
|
||||
}
|
||||
|
||||
void git_cache_free(git_cache *cache)
|
||||
{
|
||||
size_t i;
|
||||
|
||||
for (i = 0; i < (cache->size_mask + 1); ++i) {
|
||||
if (cache->nodes[i] != NULL)
|
||||
git_cached_obj_decref(cache->nodes[i], cache->free_obj);
|
||||
}
|
||||
|
||||
git__free(cache->nodes);
|
||||
}
|
||||
|
||||
void *git_cache_get(git_cache *cache, const git_oid *oid)
|
||||
{
|
||||
uint32_t hash;
|
||||
git_cached_obj *node = NULL, *result = NULL;
|
||||
|
||||
memcpy(&hash, oid->id, sizeof(hash));
|
||||
|
||||
git_mutex_lock(&cache->lock);
|
||||
{
|
||||
node = cache->nodes[hash & cache->size_mask];
|
||||
|
||||
if (node != NULL && git_oid_cmp(&node->oid, oid) == 0) {
|
||||
git_cached_obj_incref(node);
|
||||
result = node;
|
||||
}
|
||||
}
|
||||
git_mutex_unlock(&cache->lock);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
void *git_cache_try_store(git_cache *cache, void *_entry)
|
||||
{
|
||||
git_cached_obj *entry = _entry;
|
||||
uint32_t hash;
|
||||
|
||||
memcpy(&hash, &entry->oid, sizeof(uint32_t));
|
||||
|
||||
/* increase the refcount on this object, because
|
||||
* the cache now owns it */
|
||||
git_cached_obj_incref(entry);
|
||||
|
||||
git_mutex_lock(&cache->lock);
|
||||
{
|
||||
git_cached_obj *node = cache->nodes[hash & cache->size_mask];
|
||||
|
||||
if (node == NULL) {
|
||||
cache->nodes[hash & cache->size_mask] = entry;
|
||||
} else if (git_oid_cmp(&node->oid, &entry->oid) == 0) {
|
||||
git_cached_obj_decref(entry, cache->free_obj);
|
||||
entry = node;
|
||||
} else {
|
||||
git_cached_obj_decref(node, cache->free_obj);
|
||||
cache->nodes[hash & cache->size_mask] = entry;
|
||||
}
|
||||
}
|
||||
git_mutex_unlock(&cache->lock);
|
||||
|
||||
/* increase the refcount again, because we are
|
||||
* returning it to the user */
|
||||
git_cached_obj_incref(entry);
|
||||
|
||||
return entry;
|
||||
}
|
||||
725
samples/C/cpu.c
725
samples/C/cpu.c
@@ -1,725 +0,0 @@
|
||||
/* CPU control.
|
||||
* (C) 2001, 2002, 2003, 2004 Rusty Russell
|
||||
*
|
||||
* This code is licenced under the GPL.
|
||||
*/
|
||||
#include <linux/proc_fs.h>
|
||||
#include <linux/smp.h>
|
||||
#include <linux/init.h>
|
||||
#include <linux/notifier.h>
|
||||
#include <linux/sched.h>
|
||||
#include <linux/unistd.h>
|
||||
#include <linux/cpu.h>
|
||||
#include <linux/oom.h>
|
||||
#include <linux/rcupdate.h>
|
||||
#include <linux/export.h>
|
||||
#include <linux/bug.h>
|
||||
#include <linux/kthread.h>
|
||||
#include <linux/stop_machine.h>
|
||||
#include <linux/mutex.h>
|
||||
#include <linux/gfp.h>
|
||||
#include <linux/suspend.h>
|
||||
|
||||
#include "smpboot.h"
|
||||
|
||||
#ifdef CONFIG_SMP
|
||||
/* Serializes the updates to cpu_online_mask, cpu_present_mask */
|
||||
static DEFINE_MUTEX(cpu_add_remove_lock);
|
||||
|
||||
/*
|
||||
* The following two API's must be used when attempting
|
||||
* to serialize the updates to cpu_online_mask, cpu_present_mask.
|
||||
*/
|
||||
void cpu_maps_update_begin(void)
|
||||
{
|
||||
mutex_lock(&cpu_add_remove_lock);
|
||||
}
|
||||
|
||||
void cpu_maps_update_done(void)
|
||||
{
|
||||
mutex_unlock(&cpu_add_remove_lock);
|
||||
}
|
||||
|
||||
static RAW_NOTIFIER_HEAD(cpu_chain);
|
||||
|
||||
/* If set, cpu_up and cpu_down will return -EBUSY and do nothing.
|
||||
* Should always be manipulated under cpu_add_remove_lock
|
||||
*/
|
||||
static int cpu_hotplug_disabled;
|
||||
|
||||
#ifdef CONFIG_HOTPLUG_CPU
|
||||
|
||||
static struct {
|
||||
struct task_struct *active_writer;
|
||||
struct mutex lock; /* Synchronizes accesses to refcount, */
|
||||
/*
|
||||
* Also blocks the new readers during
|
||||
* an ongoing cpu hotplug operation.
|
||||
*/
|
||||
int refcount;
|
||||
} cpu_hotplug = {
|
||||
.active_writer = NULL,
|
||||
.lock = __MUTEX_INITIALIZER(cpu_hotplug.lock),
|
||||
.refcount = 0,
|
||||
};
|
||||
|
||||
void get_online_cpus(void)
|
||||
{
|
||||
might_sleep();
|
||||
if (cpu_hotplug.active_writer == current)
|
||||
return;
|
||||
mutex_lock(&cpu_hotplug.lock);
|
||||
cpu_hotplug.refcount++;
|
||||
mutex_unlock(&cpu_hotplug.lock);
|
||||
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(get_online_cpus);
|
||||
|
||||
void put_online_cpus(void)
|
||||
{
|
||||
if (cpu_hotplug.active_writer == current)
|
||||
return;
|
||||
mutex_lock(&cpu_hotplug.lock);
|
||||
if (!--cpu_hotplug.refcount && unlikely(cpu_hotplug.active_writer))
|
||||
wake_up_process(cpu_hotplug.active_writer);
|
||||
mutex_unlock(&cpu_hotplug.lock);
|
||||
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(put_online_cpus);
|
||||
|
||||
/*
|
||||
* This ensures that the hotplug operation can begin only when the
|
||||
* refcount goes to zero.
|
||||
*
|
||||
* Note that during a cpu-hotplug operation, the new readers, if any,
|
||||
* will be blocked by the cpu_hotplug.lock
|
||||
*
|
||||
* Since cpu_hotplug_begin() is always called after invoking
|
||||
* cpu_maps_update_begin(), we can be sure that only one writer is active.
|
||||
*
|
||||
* Note that theoretically, there is a possibility of a livelock:
|
||||
* - Refcount goes to zero, last reader wakes up the sleeping
|
||||
* writer.
|
||||
* - Last reader unlocks the cpu_hotplug.lock.
|
||||
* - A new reader arrives at this moment, bumps up the refcount.
|
||||
* - The writer acquires the cpu_hotplug.lock finds the refcount
|
||||
* non zero and goes to sleep again.
|
||||
*
|
||||
* However, this is very difficult to achieve in practice since
|
||||
* get_online_cpus() not an api which is called all that often.
|
||||
*
|
||||
*/
|
||||
static void cpu_hotplug_begin(void)
|
||||
{
|
||||
cpu_hotplug.active_writer = current;
|
||||
|
||||
for (;;) {
|
||||
mutex_lock(&cpu_hotplug.lock);
|
||||
if (likely(!cpu_hotplug.refcount))
|
||||
break;
|
||||
__set_current_state(TASK_UNINTERRUPTIBLE);
|
||||
mutex_unlock(&cpu_hotplug.lock);
|
||||
schedule();
|
||||
}
|
||||
}
|
||||
|
||||
static void cpu_hotplug_done(void)
|
||||
{
|
||||
cpu_hotplug.active_writer = NULL;
|
||||
mutex_unlock(&cpu_hotplug.lock);
|
||||
}
|
||||
|
||||
#else /* #if CONFIG_HOTPLUG_CPU */
|
||||
static void cpu_hotplug_begin(void) {}
|
||||
static void cpu_hotplug_done(void) {}
|
||||
#endif /* #else #if CONFIG_HOTPLUG_CPU */
|
||||
|
||||
/* Need to know about CPUs going up/down? */
|
||||
int __ref register_cpu_notifier(struct notifier_block *nb)
|
||||
{
|
||||
int ret;
|
||||
cpu_maps_update_begin();
|
||||
ret = raw_notifier_chain_register(&cpu_chain, nb);
|
||||
cpu_maps_update_done();
|
||||
return ret;
|
||||
}
|
||||
|
||||
static int __cpu_notify(unsigned long val, void *v, int nr_to_call,
|
||||
int *nr_calls)
|
||||
{
|
||||
int ret;
|
||||
|
||||
ret = __raw_notifier_call_chain(&cpu_chain, val, v, nr_to_call,
|
||||
nr_calls);
|
||||
|
||||
return notifier_to_errno(ret);
|
||||
}
|
||||
|
||||
static int cpu_notify(unsigned long val, void *v)
|
||||
{
|
||||
return __cpu_notify(val, v, -1, NULL);
|
||||
}
|
||||
|
||||
#ifdef CONFIG_HOTPLUG_CPU
|
||||
|
||||
static void cpu_notify_nofail(unsigned long val, void *v)
|
||||
{
|
||||
BUG_ON(cpu_notify(val, v));
|
||||
}
|
||||
EXPORT_SYMBOL(register_cpu_notifier);
|
||||
|
||||
void __ref unregister_cpu_notifier(struct notifier_block *nb)
|
||||
{
|
||||
cpu_maps_update_begin();
|
||||
raw_notifier_chain_unregister(&cpu_chain, nb);
|
||||
cpu_maps_update_done();
|
||||
}
|
||||
EXPORT_SYMBOL(unregister_cpu_notifier);
|
||||
|
||||
/**
|
||||
* clear_tasks_mm_cpumask - Safely clear tasks' mm_cpumask for a CPU
|
||||
* @cpu: a CPU id
|
||||
*
|
||||
* This function walks all processes, finds a valid mm struct for each one and
|
||||
* then clears a corresponding bit in mm's cpumask. While this all sounds
|
||||
* trivial, there are various non-obvious corner cases, which this function
|
||||
* tries to solve in a safe manner.
|
||||
*
|
||||
* Also note that the function uses a somewhat relaxed locking scheme, so it may
|
||||
* be called only for an already offlined CPU.
|
||||
*/
|
||||
void clear_tasks_mm_cpumask(int cpu)
|
||||
{
|
||||
struct task_struct *p;
|
||||
|
||||
/*
|
||||
* This function is called after the cpu is taken down and marked
|
||||
* offline, so its not like new tasks will ever get this cpu set in
|
||||
* their mm mask. -- Peter Zijlstra
|
||||
* Thus, we may use rcu_read_lock() here, instead of grabbing
|
||||
* full-fledged tasklist_lock.
|
||||
*/
|
||||
WARN_ON(cpu_online(cpu));
|
||||
rcu_read_lock();
|
||||
for_each_process(p) {
|
||||
struct task_struct *t;
|
||||
|
||||
/*
|
||||
* Main thread might exit, but other threads may still have
|
||||
* a valid mm. Find one.
|
||||
*/
|
||||
t = find_lock_task_mm(p);
|
||||
if (!t)
|
||||
continue;
|
||||
cpumask_clear_cpu(cpu, mm_cpumask(t->mm));
|
||||
task_unlock(t);
|
||||
}
|
||||
rcu_read_unlock();
|
||||
}
|
||||
|
||||
static inline void check_for_tasks(int cpu)
|
||||
{
|
||||
struct task_struct *p;
|
||||
|
||||
write_lock_irq(&tasklist_lock);
|
||||
for_each_process(p) {
|
||||
if (task_cpu(p) == cpu && p->state == TASK_RUNNING &&
|
||||
(p->utime || p->stime))
|
||||
printk(KERN_WARNING "Task %s (pid = %d) is on cpu %d "
|
||||
"(state = %ld, flags = %x)\n",
|
||||
p->comm, task_pid_nr(p), cpu,
|
||||
p->state, p->flags);
|
||||
}
|
||||
write_unlock_irq(&tasklist_lock);
|
||||
}
|
||||
|
||||
struct take_cpu_down_param {
|
||||
unsigned long mod;
|
||||
void *hcpu;
|
||||
};
|
||||
|
||||
/* Take this CPU down. */
|
||||
static int __ref take_cpu_down(void *_param)
|
||||
{
|
||||
struct take_cpu_down_param *param = _param;
|
||||
int err;
|
||||
|
||||
/* Ensure this CPU doesn't handle any more interrupts. */
|
||||
err = __cpu_disable();
|
||||
if (err < 0)
|
||||
return err;
|
||||
|
||||
cpu_notify(CPU_DYING | param->mod, param->hcpu);
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Requires cpu_add_remove_lock to be held */
|
||||
static int __ref _cpu_down(unsigned int cpu, int tasks_frozen)
|
||||
{
|
||||
int err, nr_calls = 0;
|
||||
void *hcpu = (void *)(long)cpu;
|
||||
unsigned long mod = tasks_frozen ? CPU_TASKS_FROZEN : 0;
|
||||
struct take_cpu_down_param tcd_param = {
|
||||
.mod = mod,
|
||||
.hcpu = hcpu,
|
||||
};
|
||||
|
||||
if (num_online_cpus() == 1)
|
||||
return -EBUSY;
|
||||
|
||||
if (!cpu_online(cpu))
|
||||
return -EINVAL;
|
||||
|
||||
cpu_hotplug_begin();
|
||||
|
||||
err = __cpu_notify(CPU_DOWN_PREPARE | mod, hcpu, -1, &nr_calls);
|
||||
if (err) {
|
||||
nr_calls--;
|
||||
__cpu_notify(CPU_DOWN_FAILED | mod, hcpu, nr_calls, NULL);
|
||||
printk("%s: attempt to take down CPU %u failed\n",
|
||||
__func__, cpu);
|
||||
goto out_release;
|
||||
}
|
||||
|
||||
err = __stop_machine(take_cpu_down, &tcd_param, cpumask_of(cpu));
|
||||
if (err) {
|
||||
/* CPU didn't die: tell everyone. Can't complain. */
|
||||
cpu_notify_nofail(CPU_DOWN_FAILED | mod, hcpu);
|
||||
|
||||
goto out_release;
|
||||
}
|
||||
BUG_ON(cpu_online(cpu));
|
||||
|
||||
/*
|
||||
* The migration_call() CPU_DYING callback will have removed all
|
||||
* runnable tasks from the cpu, there's only the idle task left now
|
||||
* that the migration thread is done doing the stop_machine thing.
|
||||
*
|
||||
* Wait for the stop thread to go away.
|
||||
*/
|
||||
while (!idle_cpu(cpu))
|
||||
cpu_relax();
|
||||
|
||||
/* This actually kills the CPU. */
|
||||
__cpu_die(cpu);
|
||||
|
||||
/* CPU is completely dead: tell everyone. Too late to complain. */
|
||||
cpu_notify_nofail(CPU_DEAD | mod, hcpu);
|
||||
|
||||
check_for_tasks(cpu);
|
||||
|
||||
out_release:
|
||||
cpu_hotplug_done();
|
||||
if (!err)
|
||||
cpu_notify_nofail(CPU_POST_DEAD | mod, hcpu);
|
||||
return err;
|
||||
}
|
||||
|
||||
int __ref cpu_down(unsigned int cpu)
|
||||
{
|
||||
int err;
|
||||
|
||||
cpu_maps_update_begin();
|
||||
|
||||
if (cpu_hotplug_disabled) {
|
||||
err = -EBUSY;
|
||||
goto out;
|
||||
}
|
||||
|
||||
err = _cpu_down(cpu, 0);
|
||||
|
||||
out:
|
||||
cpu_maps_update_done();
|
||||
return err;
|
||||
}
|
||||
EXPORT_SYMBOL(cpu_down);
|
||||
#endif /*CONFIG_HOTPLUG_CPU*/
|
||||
|
||||
/* Requires cpu_add_remove_lock to be held */
|
||||
static int __cpuinit _cpu_up(unsigned int cpu, int tasks_frozen)
|
||||
{
|
||||
int ret, nr_calls = 0;
|
||||
void *hcpu = (void *)(long)cpu;
|
||||
unsigned long mod = tasks_frozen ? CPU_TASKS_FROZEN : 0;
|
||||
struct task_struct *idle;
|
||||
|
||||
if (cpu_online(cpu) || !cpu_present(cpu))
|
||||
return -EINVAL;
|
||||
|
||||
cpu_hotplug_begin();
|
||||
|
||||
idle = idle_thread_get(cpu);
|
||||
if (IS_ERR(idle)) {
|
||||
ret = PTR_ERR(idle);
|
||||
goto out;
|
||||
}
|
||||
|
||||
ret = __cpu_notify(CPU_UP_PREPARE | mod, hcpu, -1, &nr_calls);
|
||||
if (ret) {
|
||||
nr_calls--;
|
||||
printk(KERN_WARNING "%s: attempt to bring up CPU %u failed\n",
|
||||
__func__, cpu);
|
||||
goto out_notify;
|
||||
}
|
||||
|
||||
/* Arch-specific enabling code. */
|
||||
ret = __cpu_up(cpu, idle);
|
||||
if (ret != 0)
|
||||
goto out_notify;
|
||||
BUG_ON(!cpu_online(cpu));
|
||||
|
||||
/* Now call notifier in preparation. */
|
||||
cpu_notify(CPU_ONLINE | mod, hcpu);
|
||||
|
||||
out_notify:
|
||||
if (ret != 0)
|
||||
__cpu_notify(CPU_UP_CANCELED | mod, hcpu, nr_calls, NULL);
|
||||
out:
|
||||
cpu_hotplug_done();
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
int __cpuinit cpu_up(unsigned int cpu)
|
||||
{
|
||||
int err = 0;
|
||||
|
||||
#ifdef CONFIG_MEMORY_HOTPLUG
|
||||
int nid;
|
||||
pg_data_t *pgdat;
|
||||
#endif
|
||||
|
||||
if (!cpu_possible(cpu)) {
|
||||
printk(KERN_ERR "can't online cpu %d because it is not "
|
||||
"configured as may-hotadd at boot time\n", cpu);
|
||||
#if defined(CONFIG_IA64)
|
||||
printk(KERN_ERR "please check additional_cpus= boot "
|
||||
"parameter\n");
|
||||
#endif
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
#ifdef CONFIG_MEMORY_HOTPLUG
|
||||
nid = cpu_to_node(cpu);
|
||||
if (!node_online(nid)) {
|
||||
err = mem_online_node(nid);
|
||||
if (err)
|
||||
return err;
|
||||
}
|
||||
|
||||
pgdat = NODE_DATA(nid);
|
||||
if (!pgdat) {
|
||||
printk(KERN_ERR
|
||||
"Can't online cpu %d due to NULL pgdat\n", cpu);
|
||||
return -ENOMEM;
|
||||
}
|
||||
|
||||
if (pgdat->node_zonelists->_zonerefs->zone == NULL) {
|
||||
mutex_lock(&zonelists_mutex);
|
||||
build_all_zonelists(NULL);
|
||||
mutex_unlock(&zonelists_mutex);
|
||||
}
|
||||
#endif
|
||||
|
||||
cpu_maps_update_begin();
|
||||
|
||||
if (cpu_hotplug_disabled) {
|
||||
err = -EBUSY;
|
||||
goto out;
|
||||
}
|
||||
|
||||
err = _cpu_up(cpu, 0);
|
||||
|
||||
out:
|
||||
cpu_maps_update_done();
|
||||
return err;
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(cpu_up);
|
||||
|
||||
#ifdef CONFIG_PM_SLEEP_SMP
|
||||
static cpumask_var_t frozen_cpus;
|
||||
|
||||
void __weak arch_disable_nonboot_cpus_begin(void)
|
||||
{
|
||||
}
|
||||
|
||||
void __weak arch_disable_nonboot_cpus_end(void)
|
||||
{
|
||||
}
|
||||
|
||||
int disable_nonboot_cpus(void)
|
||||
{
|
||||
int cpu, first_cpu, error = 0;
|
||||
|
||||
cpu_maps_update_begin();
|
||||
first_cpu = cpumask_first(cpu_online_mask);
|
||||
/*
|
||||
* We take down all of the non-boot CPUs in one shot to avoid races
|
||||
* with the userspace trying to use the CPU hotplug at the same time
|
||||
*/
|
||||
cpumask_clear(frozen_cpus);
|
||||
arch_disable_nonboot_cpus_begin();
|
||||
|
||||
printk("Disabling non-boot CPUs ...\n");
|
||||
for_each_online_cpu(cpu) {
|
||||
if (cpu == first_cpu)
|
||||
continue;
|
||||
error = _cpu_down(cpu, 1);
|
||||
if (!error)
|
||||
cpumask_set_cpu(cpu, frozen_cpus);
|
||||
else {
|
||||
printk(KERN_ERR "Error taking CPU%d down: %d\n",
|
||||
cpu, error);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
arch_disable_nonboot_cpus_end();
|
||||
|
||||
if (!error) {
|
||||
BUG_ON(num_online_cpus() > 1);
|
||||
/* Make sure the CPUs won't be enabled by someone else */
|
||||
cpu_hotplug_disabled = 1;
|
||||
} else {
|
||||
printk(KERN_ERR "Non-boot CPUs are not disabled\n");
|
||||
}
|
||||
cpu_maps_update_done();
|
||||
return error;
|
||||
}
|
||||
|
||||
void __weak arch_enable_nonboot_cpus_begin(void)
|
||||
{
|
||||
}
|
||||
|
||||
void __weak arch_enable_nonboot_cpus_end(void)
|
||||
{
|
||||
}
|
||||
|
||||
void __ref enable_nonboot_cpus(void)
|
||||
{
|
||||
int cpu, error;
|
||||
|
||||
/* Allow everyone to use the CPU hotplug again */
|
||||
cpu_maps_update_begin();
|
||||
cpu_hotplug_disabled = 0;
|
||||
if (cpumask_empty(frozen_cpus))
|
||||
goto out;
|
||||
|
||||
printk(KERN_INFO "Enabling non-boot CPUs ...\n");
|
||||
|
||||
arch_enable_nonboot_cpus_begin();
|
||||
|
||||
for_each_cpu(cpu, frozen_cpus) {
|
||||
error = _cpu_up(cpu, 1);
|
||||
if (!error) {
|
||||
printk(KERN_INFO "CPU%d is up\n", cpu);
|
||||
continue;
|
||||
}
|
||||
printk(KERN_WARNING "Error taking CPU%d up: %d\n", cpu, error);
|
||||
}
|
||||
|
||||
arch_enable_nonboot_cpus_end();
|
||||
|
||||
cpumask_clear(frozen_cpus);
|
||||
out:
|
||||
cpu_maps_update_done();
|
||||
}
|
||||
|
||||
static int __init alloc_frozen_cpus(void)
|
||||
{
|
||||
if (!alloc_cpumask_var(&frozen_cpus, GFP_KERNEL|__GFP_ZERO))
|
||||
return -ENOMEM;
|
||||
return 0;
|
||||
}
|
||||
core_initcall(alloc_frozen_cpus);
|
||||
|
||||
/*
|
||||
* Prevent regular CPU hotplug from racing with the freezer, by disabling CPU
|
||||
* hotplug when tasks are about to be frozen. Also, don't allow the freezer
|
||||
* to continue until any currently running CPU hotplug operation gets
|
||||
* completed.
|
||||
* To modify the 'cpu_hotplug_disabled' flag, we need to acquire the
|
||||
* 'cpu_add_remove_lock'. And this same lock is also taken by the regular
|
||||
* CPU hotplug path and released only after it is complete. Thus, we
|
||||
* (and hence the freezer) will block here until any currently running CPU
|
||||
* hotplug operation gets completed.
|
||||
*/
|
||||
void cpu_hotplug_disable_before_freeze(void)
|
||||
{
|
||||
cpu_maps_update_begin();
|
||||
cpu_hotplug_disabled = 1;
|
||||
cpu_maps_update_done();
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* When tasks have been thawed, re-enable regular CPU hotplug (which had been
|
||||
* disabled while beginning to freeze tasks).
|
||||
*/
|
||||
void cpu_hotplug_enable_after_thaw(void)
|
||||
{
|
||||
cpu_maps_update_begin();
|
||||
cpu_hotplug_disabled = 0;
|
||||
cpu_maps_update_done();
|
||||
}
|
||||
|
||||
/*
|
||||
* When callbacks for CPU hotplug notifications are being executed, we must
|
||||
* ensure that the state of the system with respect to the tasks being frozen
|
||||
* or not, as reported by the notification, remains unchanged *throughout the
|
||||
* duration* of the execution of the callbacks.
|
||||
* Hence we need to prevent the freezer from racing with regular CPU hotplug.
|
||||
*
|
||||
* This synchronization is implemented by mutually excluding regular CPU
|
||||
* hotplug and Suspend/Hibernate call paths by hooking onto the Suspend/
|
||||
* Hibernate notifications.
|
||||
*/
|
||||
static int
|
||||
cpu_hotplug_pm_callback(struct notifier_block *nb,
|
||||
unsigned long action, void *ptr)
|
||||
{
|
||||
switch (action) {
|
||||
|
||||
case PM_SUSPEND_PREPARE:
|
||||
case PM_HIBERNATION_PREPARE:
|
||||
cpu_hotplug_disable_before_freeze();
|
||||
break;
|
||||
|
||||
case PM_POST_SUSPEND:
|
||||
case PM_POST_HIBERNATION:
|
||||
cpu_hotplug_enable_after_thaw();
|
||||
break;
|
||||
|
||||
default:
|
||||
return NOTIFY_DONE;
|
||||
}
|
||||
|
||||
return NOTIFY_OK;
|
||||
}
|
||||
|
||||
|
||||
static int __init cpu_hotplug_pm_sync_init(void)
|
||||
{
|
||||
pm_notifier(cpu_hotplug_pm_callback, 0);
|
||||
return 0;
|
||||
}
|
||||
core_initcall(cpu_hotplug_pm_sync_init);
|
||||
|
||||
#endif /* CONFIG_PM_SLEEP_SMP */
|
||||
|
||||
/**
|
||||
* notify_cpu_starting(cpu) - call the CPU_STARTING notifiers
|
||||
* @cpu: cpu that just started
|
||||
*
|
||||
* This function calls the cpu_chain notifiers with CPU_STARTING.
|
||||
* It must be called by the arch code on the new cpu, before the new cpu
|
||||
* enables interrupts and before the "boot" cpu returns from __cpu_up().
|
||||
*/
|
||||
void __cpuinit notify_cpu_starting(unsigned int cpu)
|
||||
{
|
||||
unsigned long val = CPU_STARTING;
|
||||
|
||||
#ifdef CONFIG_PM_SLEEP_SMP
|
||||
if (frozen_cpus != NULL && cpumask_test_cpu(cpu, frozen_cpus))
|
||||
val = CPU_STARTING_FROZEN;
|
||||
#endif /* CONFIG_PM_SLEEP_SMP */
|
||||
cpu_notify(val, (void *)(long)cpu);
|
||||
}
|
||||
|
||||
#endif /* CONFIG_SMP */
|
||||
|
||||
/*
|
||||
* cpu_bit_bitmap[] is a special, "compressed" data structure that
|
||||
* represents all NR_CPUS bits binary values of 1<<nr.
|
||||
*
|
||||
* It is used by cpumask_of() to get a constant address to a CPU
|
||||
* mask value that has a single bit set only.
|
||||
*/
|
||||
|
||||
/* cpu_bit_bitmap[0] is empty - so we can back into it */
|
||||
#define MASK_DECLARE_1(x) [x+1][0] = (1UL << (x))
|
||||
#define MASK_DECLARE_2(x) MASK_DECLARE_1(x), MASK_DECLARE_1(x+1)
|
||||
#define MASK_DECLARE_4(x) MASK_DECLARE_2(x), MASK_DECLARE_2(x+2)
|
||||
#define MASK_DECLARE_8(x) MASK_DECLARE_4(x), MASK_DECLARE_4(x+4)
|
||||
|
||||
const unsigned long cpu_bit_bitmap[BITS_PER_LONG+1][BITS_TO_LONGS(NR_CPUS)] = {
|
||||
|
||||
MASK_DECLARE_8(0), MASK_DECLARE_8(8),
|
||||
MASK_DECLARE_8(16), MASK_DECLARE_8(24),
|
||||
#if BITS_PER_LONG > 32
|
||||
MASK_DECLARE_8(32), MASK_DECLARE_8(40),
|
||||
MASK_DECLARE_8(48), MASK_DECLARE_8(56),
|
||||
#endif
|
||||
};
|
||||
EXPORT_SYMBOL_GPL(cpu_bit_bitmap);
|
||||
|
||||
const DECLARE_BITMAP(cpu_all_bits, NR_CPUS) = CPU_BITS_ALL;
|
||||
EXPORT_SYMBOL(cpu_all_bits);
|
||||
|
||||
#ifdef CONFIG_INIT_ALL_POSSIBLE
|
||||
static DECLARE_BITMAP(cpu_possible_bits, CONFIG_NR_CPUS) __read_mostly
|
||||
= CPU_BITS_ALL;
|
||||
#else
|
||||
static DECLARE_BITMAP(cpu_possible_bits, CONFIG_NR_CPUS) __read_mostly;
|
||||
#endif
|
||||
const struct cpumask *const cpu_possible_mask = to_cpumask(cpu_possible_bits);
|
||||
EXPORT_SYMBOL(cpu_possible_mask);
|
||||
|
||||
static DECLARE_BITMAP(cpu_online_bits, CONFIG_NR_CPUS) __read_mostly;
|
||||
const struct cpumask *const cpu_online_mask = to_cpumask(cpu_online_bits);
|
||||
EXPORT_SYMBOL(cpu_online_mask);
|
||||
|
||||
static DECLARE_BITMAP(cpu_present_bits, CONFIG_NR_CPUS) __read_mostly;
|
||||
const struct cpumask *const cpu_present_mask = to_cpumask(cpu_present_bits);
|
||||
EXPORT_SYMBOL(cpu_present_mask);
|
||||
|
||||
static DECLARE_BITMAP(cpu_active_bits, CONFIG_NR_CPUS) __read_mostly;
|
||||
const struct cpumask *const cpu_active_mask = to_cpumask(cpu_active_bits);
|
||||
EXPORT_SYMBOL(cpu_active_mask);
|
||||
|
||||
void set_cpu_possible(unsigned int cpu, bool possible)
|
||||
{
|
||||
if (possible)
|
||||
cpumask_set_cpu(cpu, to_cpumask(cpu_possible_bits));
|
||||
else
|
||||
cpumask_clear_cpu(cpu, to_cpumask(cpu_possible_bits));
|
||||
}
|
||||
|
||||
void set_cpu_present(unsigned int cpu, bool present)
|
||||
{
|
||||
if (present)
|
||||
cpumask_set_cpu(cpu, to_cpumask(cpu_present_bits));
|
||||
else
|
||||
cpumask_clear_cpu(cpu, to_cpumask(cpu_present_bits));
|
||||
}
|
||||
|
||||
void set_cpu_online(unsigned int cpu, bool online)
|
||||
{
|
||||
if (online)
|
||||
cpumask_set_cpu(cpu, to_cpumask(cpu_online_bits));
|
||||
else
|
||||
cpumask_clear_cpu(cpu, to_cpumask(cpu_online_bits));
|
||||
}
|
||||
|
||||
void set_cpu_active(unsigned int cpu, bool active)
|
||||
{
|
||||
if (active)
|
||||
cpumask_set_cpu(cpu, to_cpumask(cpu_active_bits));
|
||||
else
|
||||
cpumask_clear_cpu(cpu, to_cpumask(cpu_active_bits));
|
||||
}
|
||||
|
||||
void init_cpu_present(const struct cpumask *src)
|
||||
{
|
||||
cpumask_copy(to_cpumask(cpu_present_bits), src);
|
||||
}
|
||||
|
||||
void init_cpu_possible(const struct cpumask *src)
|
||||
{
|
||||
cpumask_copy(to_cpumask(cpu_possible_bits), src);
|
||||
}
|
||||
|
||||
void init_cpu_online(const struct cpumask *src)
|
||||
{
|
||||
cpumask_copy(to_cpumask(cpu_online_bits), src);
|
||||
}
|
||||
257
samples/C/custom_extensions.c
Normal file
257
samples/C/custom_extensions.c
Normal file
@@ -0,0 +1,257 @@
|
||||
/* Copyright (c) 2014, Google Inc.
|
||||
*
|
||||
* Permission to use, copy, modify, and/or distribute this software for any
|
||||
* purpose with or without fee is hereby granted, provided that the above
|
||||
* copyright notice and this permission notice appear in all copies.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
|
||||
* SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
|
||||
* OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
|
||||
* CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */
|
||||
|
||||
#include <openssl/ssl.h>
|
||||
|
||||
#include <assert.h>
|
||||
#include <string.h>
|
||||
|
||||
#include <openssl/bytestring.h>
|
||||
#include <openssl/err.h>
|
||||
#include <openssl/mem.h>
|
||||
#include <openssl/stack.h>
|
||||
|
||||
#include "internal.h"
|
||||
|
||||
|
||||
void SSL_CUSTOM_EXTENSION_free(SSL_CUSTOM_EXTENSION *custom_extension) {
|
||||
OPENSSL_free(custom_extension);
|
||||
}
|
||||
|
||||
static const SSL_CUSTOM_EXTENSION *custom_ext_find(
|
||||
STACK_OF(SSL_CUSTOM_EXTENSION) *stack,
|
||||
unsigned *out_index, uint16_t value) {
|
||||
size_t i;
|
||||
for (i = 0; i < sk_SSL_CUSTOM_EXTENSION_num(stack); i++) {
|
||||
const SSL_CUSTOM_EXTENSION *ext = sk_SSL_CUSTOM_EXTENSION_value(stack, i);
|
||||
if (ext->value == value) {
|
||||
if (out_index != NULL) {
|
||||
*out_index = i;
|
||||
}
|
||||
return ext;
|
||||
}
|
||||
}
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* default_add_callback is used as the |add_callback| when the user doesn't
|
||||
* provide one. For servers, it does nothing while, for clients, it causes an
|
||||
* empty extension to be included. */
|
||||
static int default_add_callback(SSL *ssl, unsigned extension_value,
|
||||
const uint8_t **out, size_t *out_len,
|
||||
int *out_alert_value, void *add_arg) {
|
||||
if (ssl->server) {
|
||||
return 0;
|
||||
}
|
||||
*out_len = 0;
|
||||
return 1;
|
||||
}
|
||||
|
||||
static int custom_ext_add_hello(SSL *ssl, CBB *extensions) {
|
||||
STACK_OF(SSL_CUSTOM_EXTENSION) *stack = ssl->ctx->client_custom_extensions;
|
||||
if (ssl->server) {
|
||||
stack = ssl->ctx->server_custom_extensions;
|
||||
}
|
||||
|
||||
if (stack == NULL) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
size_t i;
|
||||
for (i = 0; i < sk_SSL_CUSTOM_EXTENSION_num(stack); i++) {
|
||||
const SSL_CUSTOM_EXTENSION *ext = sk_SSL_CUSTOM_EXTENSION_value(stack, i);
|
||||
|
||||
if (ssl->server &&
|
||||
!(ssl->s3->tmp.custom_extensions.received & (1u << i))) {
|
||||
/* Servers cannot echo extensions that the client didn't send. */
|
||||
continue;
|
||||
}
|
||||
|
||||
const uint8_t *contents;
|
||||
size_t contents_len;
|
||||
int alert = SSL_AD_DECODE_ERROR;
|
||||
CBB contents_cbb;
|
||||
|
||||
switch (ext->add_callback(ssl, ext->value, &contents, &contents_len, &alert,
|
||||
ext->add_arg)) {
|
||||
case 1:
|
||||
if (!CBB_add_u16(extensions, ext->value) ||
|
||||
!CBB_add_u16_length_prefixed(extensions, &contents_cbb) ||
|
||||
!CBB_add_bytes(&contents_cbb, contents, contents_len) ||
|
||||
!CBB_flush(extensions)) {
|
||||
OPENSSL_PUT_ERROR(SSL, ERR_R_INTERNAL_ERROR);
|
||||
ERR_add_error_dataf("extension: %u", (unsigned) ext->value);
|
||||
if (ext->free_callback && 0 < contents_len) {
|
||||
ext->free_callback(ssl, ext->value, contents, ext->add_arg);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (ext->free_callback && 0 < contents_len) {
|
||||
ext->free_callback(ssl, ext->value, contents, ext->add_arg);
|
||||
}
|
||||
|
||||
if (!ssl->server) {
|
||||
assert((ssl->s3->tmp.custom_extensions.sent & (1u << i)) == 0);
|
||||
ssl->s3->tmp.custom_extensions.sent |= (1u << i);
|
||||
}
|
||||
break;
|
||||
|
||||
case 0:
|
||||
break;
|
||||
|
||||
default:
|
||||
ssl3_send_alert(ssl, SSL3_AL_FATAL, alert);
|
||||
OPENSSL_PUT_ERROR(SSL, SSL_R_CUSTOM_EXTENSION_ERROR);
|
||||
ERR_add_error_dataf("extension: %u", (unsigned) ext->value);
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
int custom_ext_add_clienthello(SSL *ssl, CBB *extensions) {
|
||||
return custom_ext_add_hello(ssl, extensions);
|
||||
}
|
||||
|
||||
int custom_ext_parse_serverhello(SSL *ssl, int *out_alert, uint16_t value,
|
||||
const CBS *extension) {
|
||||
unsigned index;
|
||||
const SSL_CUSTOM_EXTENSION *ext =
|
||||
custom_ext_find(ssl->ctx->client_custom_extensions, &index, value);
|
||||
|
||||
if (/* Unknown extensions are not allowed in a ServerHello. */
|
||||
ext == NULL ||
|
||||
/* Also, if we didn't send the extension, that's also unacceptable. */
|
||||
!(ssl->s3->tmp.custom_extensions.sent & (1u << index))) {
|
||||
OPENSSL_PUT_ERROR(SSL, SSL_R_UNEXPECTED_EXTENSION);
|
||||
ERR_add_error_dataf("extension: %u", (unsigned)value);
|
||||
*out_alert = SSL_AD_DECODE_ERROR;
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (ext->parse_callback != NULL &&
|
||||
!ext->parse_callback(ssl, value, CBS_data(extension), CBS_len(extension),
|
||||
out_alert, ext->parse_arg)) {
|
||||
OPENSSL_PUT_ERROR(SSL, SSL_R_CUSTOM_EXTENSION_ERROR);
|
||||
ERR_add_error_dataf("extension: %u", (unsigned)ext->value);
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
int custom_ext_parse_clienthello(SSL *ssl, int *out_alert, uint16_t value,
|
||||
const CBS *extension) {
|
||||
unsigned index;
|
||||
const SSL_CUSTOM_EXTENSION *ext =
|
||||
custom_ext_find(ssl->ctx->server_custom_extensions, &index, value);
|
||||
|
||||
if (ext == NULL) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
assert((ssl->s3->tmp.custom_extensions.received & (1u << index)) == 0);
|
||||
ssl->s3->tmp.custom_extensions.received |= (1u << index);
|
||||
|
||||
if (ext->parse_callback &&
|
||||
!ext->parse_callback(ssl, value, CBS_data(extension), CBS_len(extension),
|
||||
out_alert, ext->parse_arg)) {
|
||||
OPENSSL_PUT_ERROR(SSL, SSL_R_CUSTOM_EXTENSION_ERROR);
|
||||
ERR_add_error_dataf("extension: %u", (unsigned)ext->value);
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
int custom_ext_add_serverhello(SSL *ssl, CBB *extensions) {
|
||||
return custom_ext_add_hello(ssl, extensions);
|
||||
}
|
||||
|
||||
/* MAX_NUM_CUSTOM_EXTENSIONS is the maximum number of custom extensions that
|
||||
* can be set on an |SSL_CTX|. It's determined by the size of the bitset used
|
||||
* to track when an extension has been sent. */
|
||||
#define MAX_NUM_CUSTOM_EXTENSIONS \
|
||||
(sizeof(((struct ssl3_state_st *)NULL)->tmp.custom_extensions.sent) * 8)
|
||||
|
||||
static int custom_ext_append(STACK_OF(SSL_CUSTOM_EXTENSION) **stack,
|
||||
unsigned extension_value,
|
||||
SSL_custom_ext_add_cb add_cb,
|
||||
SSL_custom_ext_free_cb free_cb, void *add_arg,
|
||||
SSL_custom_ext_parse_cb parse_cb,
|
||||
void *parse_arg) {
|
||||
if (add_cb == NULL ||
|
||||
0xffff < extension_value ||
|
||||
SSL_extension_supported(extension_value) ||
|
||||
/* Specifying a free callback without an add callback is nonsensical
|
||||
* and an error. */
|
||||
(*stack != NULL &&
|
||||
(MAX_NUM_CUSTOM_EXTENSIONS <= sk_SSL_CUSTOM_EXTENSION_num(*stack) ||
|
||||
custom_ext_find(*stack, NULL, extension_value) != NULL))) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
SSL_CUSTOM_EXTENSION *ext = OPENSSL_malloc(sizeof(SSL_CUSTOM_EXTENSION));
|
||||
if (ext == NULL) {
|
||||
return 0;
|
||||
}
|
||||
ext->add_callback = add_cb;
|
||||
ext->add_arg = add_arg;
|
||||
ext->free_callback = free_cb;
|
||||
ext->parse_callback = parse_cb;
|
||||
ext->parse_arg = parse_arg;
|
||||
ext->value = extension_value;
|
||||
|
||||
if (*stack == NULL) {
|
||||
*stack = sk_SSL_CUSTOM_EXTENSION_new_null();
|
||||
if (*stack == NULL) {
|
||||
SSL_CUSTOM_EXTENSION_free(ext);
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (!sk_SSL_CUSTOM_EXTENSION_push(*stack, ext)) {
|
||||
SSL_CUSTOM_EXTENSION_free(ext);
|
||||
if (sk_SSL_CUSTOM_EXTENSION_num(*stack) == 0) {
|
||||
sk_SSL_CUSTOM_EXTENSION_free(*stack);
|
||||
*stack = NULL;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
int SSL_CTX_add_client_custom_ext(SSL_CTX *ctx, unsigned extension_value,
|
||||
SSL_custom_ext_add_cb add_cb,
|
||||
SSL_custom_ext_free_cb free_cb, void *add_arg,
|
||||
SSL_custom_ext_parse_cb parse_cb,
|
||||
void *parse_arg) {
|
||||
return custom_ext_append(&ctx->client_custom_extensions, extension_value,
|
||||
add_cb ? add_cb : default_add_callback, free_cb,
|
||||
add_arg, parse_cb, parse_arg);
|
||||
}
|
||||
|
||||
int SSL_CTX_add_server_custom_ext(SSL_CTX *ctx, unsigned extension_value,
|
||||
SSL_custom_ext_add_cb add_cb,
|
||||
SSL_custom_ext_free_cb free_cb, void *add_arg,
|
||||
SSL_custom_ext_parse_cb parse_cb,
|
||||
void *parse_arg) {
|
||||
return custom_ext_append(&ctx->server_custom_extensions, extension_value,
|
||||
add_cb ? add_cb : default_add_callback, free_cb,
|
||||
add_arg, parse_cb, parse_arg);
|
||||
}
|
||||
784
samples/C/diff.c
784
samples/C/diff.c
@@ -1,784 +0,0 @@
|
||||
/*
|
||||
* Copyright (C) 2012 the libgit2 contributors
|
||||
*
|
||||
* This file is part of libgit2, distributed under the GNU GPL v2 with
|
||||
* a Linking Exception. For full terms see the included COPYING file.
|
||||
*/
|
||||
#include "common.h"
|
||||
#include "git2/diff.h"
|
||||
#include "diff.h"
|
||||
#include "fileops.h"
|
||||
#include "config.h"
|
||||
#include "attr_file.h"
|
||||
|
||||
static char *diff_prefix_from_pathspec(const git_strarray *pathspec)
|
||||
{
|
||||
git_buf prefix = GIT_BUF_INIT;
|
||||
const char *scan;
|
||||
|
||||
if (git_buf_common_prefix(&prefix, pathspec) < 0)
|
||||
return NULL;
|
||||
|
||||
/* diff prefix will only be leading non-wildcards */
|
||||
for (scan = prefix.ptr; *scan && !git__iswildcard(*scan); ++scan);
|
||||
git_buf_truncate(&prefix, scan - prefix.ptr);
|
||||
|
||||
if (prefix.size > 0)
|
||||
return git_buf_detach(&prefix);
|
||||
|
||||
git_buf_free(&prefix);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static bool diff_pathspec_is_interesting(const git_strarray *pathspec)
|
||||
{
|
||||
const char *str;
|
||||
|
||||
if (pathspec == NULL || pathspec->count == 0)
|
||||
return false;
|
||||
if (pathspec->count > 1)
|
||||
return true;
|
||||
|
||||
str = pathspec->strings[0];
|
||||
if (!str || !str[0] || (!str[1] && (str[0] == '*' || str[0] == '.')))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool diff_path_matches_pathspec(git_diff_list *diff, const char *path)
|
||||
{
|
||||
unsigned int i;
|
||||
git_attr_fnmatch *match;
|
||||
|
||||
if (!diff->pathspec.length)
|
||||
return true;
|
||||
|
||||
git_vector_foreach(&diff->pathspec, i, match) {
|
||||
int result = p_fnmatch(match->pattern, path, 0);
|
||||
|
||||
/* if we didn't match, look for exact dirname prefix match */
|
||||
if (result == FNM_NOMATCH &&
|
||||
(match->flags & GIT_ATTR_FNMATCH_HASWILD) == 0 &&
|
||||
strncmp(path, match->pattern, match->length) == 0 &&
|
||||
path[match->length] == '/')
|
||||
result = 0;
|
||||
|
||||
if (result == 0)
|
||||
return (match->flags & GIT_ATTR_FNMATCH_NEGATIVE) ? false : true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
static git_diff_delta *diff_delta__alloc(
|
||||
git_diff_list *diff,
|
||||
git_delta_t status,
|
||||
const char *path)
|
||||
{
|
||||
git_diff_delta *delta = git__calloc(1, sizeof(git_diff_delta));
|
||||
if (!delta)
|
||||
return NULL;
|
||||
|
||||
delta->old_file.path = git_pool_strdup(&diff->pool, path);
|
||||
if (delta->old_file.path == NULL) {
|
||||
git__free(delta);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
delta->new_file.path = delta->old_file.path;
|
||||
|
||||
if (diff->opts.flags & GIT_DIFF_REVERSE) {
|
||||
switch (status) {
|
||||
case GIT_DELTA_ADDED: status = GIT_DELTA_DELETED; break;
|
||||
case GIT_DELTA_DELETED: status = GIT_DELTA_ADDED; break;
|
||||
default: break; /* leave other status values alone */
|
||||
}
|
||||
}
|
||||
delta->status = status;
|
||||
|
||||
return delta;
|
||||
}
|
||||
|
||||
static git_diff_delta *diff_delta__dup(
|
||||
const git_diff_delta *d, git_pool *pool)
|
||||
{
|
||||
git_diff_delta *delta = git__malloc(sizeof(git_diff_delta));
|
||||
if (!delta)
|
||||
return NULL;
|
||||
|
||||
memcpy(delta, d, sizeof(git_diff_delta));
|
||||
|
||||
delta->old_file.path = git_pool_strdup(pool, d->old_file.path);
|
||||
if (delta->old_file.path == NULL)
|
||||
goto fail;
|
||||
|
||||
if (d->new_file.path != d->old_file.path) {
|
||||
delta->new_file.path = git_pool_strdup(pool, d->new_file.path);
|
||||
if (delta->new_file.path == NULL)
|
||||
goto fail;
|
||||
} else {
|
||||
delta->new_file.path = delta->old_file.path;
|
||||
}
|
||||
|
||||
return delta;
|
||||
|
||||
fail:
|
||||
git__free(delta);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static git_diff_delta *diff_delta__merge_like_cgit(
|
||||
const git_diff_delta *a, const git_diff_delta *b, git_pool *pool)
|
||||
{
|
||||
git_diff_delta *dup = diff_delta__dup(a, pool);
|
||||
if (!dup)
|
||||
return NULL;
|
||||
|
||||
if (git_oid_cmp(&dup->new_file.oid, &b->new_file.oid) == 0)
|
||||
return dup;
|
||||
|
||||
git_oid_cpy(&dup->new_file.oid, &b->new_file.oid);
|
||||
|
||||
dup->new_file.mode = b->new_file.mode;
|
||||
dup->new_file.size = b->new_file.size;
|
||||
dup->new_file.flags = b->new_file.flags;
|
||||
|
||||
/* Emulate C git for merging two diffs (a la 'git diff <sha>').
|
||||
*
|
||||
* When C git does a diff between the work dir and a tree, it actually
|
||||
* diffs with the index but uses the workdir contents. This emulates
|
||||
* those choices so we can emulate the type of diff.
|
||||
*/
|
||||
if (git_oid_cmp(&dup->old_file.oid, &dup->new_file.oid) == 0) {
|
||||
if (dup->status == GIT_DELTA_DELETED)
|
||||
/* preserve pending delete info */;
|
||||
else if (b->status == GIT_DELTA_UNTRACKED ||
|
||||
b->status == GIT_DELTA_IGNORED)
|
||||
dup->status = b->status;
|
||||
else
|
||||
dup->status = GIT_DELTA_UNMODIFIED;
|
||||
}
|
||||
else if (dup->status == GIT_DELTA_UNMODIFIED ||
|
||||
b->status == GIT_DELTA_DELETED)
|
||||
dup->status = b->status;
|
||||
|
||||
return dup;
|
||||
}
|
||||
|
||||
static int diff_delta__from_one(
|
||||
git_diff_list *diff,
|
||||
git_delta_t status,
|
||||
const git_index_entry *entry)
|
||||
{
|
||||
git_diff_delta *delta;
|
||||
|
||||
if (status == GIT_DELTA_IGNORED &&
|
||||
(diff->opts.flags & GIT_DIFF_INCLUDE_IGNORED) == 0)
|
||||
return 0;
|
||||
|
||||
if (status == GIT_DELTA_UNTRACKED &&
|
||||
(diff->opts.flags & GIT_DIFF_INCLUDE_UNTRACKED) == 0)
|
||||
return 0;
|
||||
|
||||
if (!diff_path_matches_pathspec(diff, entry->path))
|
||||
return 0;
|
||||
|
||||
delta = diff_delta__alloc(diff, status, entry->path);
|
||||
GITERR_CHECK_ALLOC(delta);
|
||||
|
||||
/* This fn is just for single-sided diffs */
|
||||
assert(status != GIT_DELTA_MODIFIED);
|
||||
|
||||
if (delta->status == GIT_DELTA_DELETED) {
|
||||
delta->old_file.mode = entry->mode;
|
||||
delta->old_file.size = entry->file_size;
|
||||
git_oid_cpy(&delta->old_file.oid, &entry->oid);
|
||||
} else /* ADDED, IGNORED, UNTRACKED */ {
|
||||
delta->new_file.mode = entry->mode;
|
||||
delta->new_file.size = entry->file_size;
|
||||
git_oid_cpy(&delta->new_file.oid, &entry->oid);
|
||||
}
|
||||
|
||||
delta->old_file.flags |= GIT_DIFF_FILE_VALID_OID;
|
||||
delta->new_file.flags |= GIT_DIFF_FILE_VALID_OID;
|
||||
|
||||
if (git_vector_insert(&diff->deltas, delta) < 0) {
|
||||
git__free(delta);
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int diff_delta__from_two(
|
||||
git_diff_list *diff,
|
||||
git_delta_t status,
|
||||
const git_index_entry *old_entry,
|
||||
const git_index_entry *new_entry,
|
||||
git_oid *new_oid)
|
||||
{
|
||||
git_diff_delta *delta;
|
||||
|
||||
if (status == GIT_DELTA_UNMODIFIED &&
|
||||
(diff->opts.flags & GIT_DIFF_INCLUDE_UNMODIFIED) == 0)
|
||||
return 0;
|
||||
|
||||
if ((diff->opts.flags & GIT_DIFF_REVERSE) != 0) {
|
||||
const git_index_entry *temp = old_entry;
|
||||
old_entry = new_entry;
|
||||
new_entry = temp;
|
||||
}
|
||||
|
||||
delta = diff_delta__alloc(diff, status, old_entry->path);
|
||||
GITERR_CHECK_ALLOC(delta);
|
||||
|
||||
delta->old_file.mode = old_entry->mode;
|
||||
git_oid_cpy(&delta->old_file.oid, &old_entry->oid);
|
||||
delta->old_file.flags |= GIT_DIFF_FILE_VALID_OID;
|
||||
|
||||
delta->new_file.mode = new_entry->mode;
|
||||
git_oid_cpy(&delta->new_file.oid, new_oid ? new_oid : &new_entry->oid);
|
||||
if (new_oid || !git_oid_iszero(&new_entry->oid))
|
||||
delta->new_file.flags |= GIT_DIFF_FILE_VALID_OID;
|
||||
|
||||
if (git_vector_insert(&diff->deltas, delta) < 0) {
|
||||
git__free(delta);
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static char *diff_strdup_prefix(git_pool *pool, const char *prefix)
|
||||
{
|
||||
size_t len = strlen(prefix);
|
||||
|
||||
/* append '/' at end if needed */
|
||||
if (len > 0 && prefix[len - 1] != '/')
|
||||
return git_pool_strcat(pool, prefix, "/");
|
||||
else
|
||||
return git_pool_strndup(pool, prefix, len + 1);
|
||||
}
|
||||
|
||||
static int diff_delta__cmp(const void *a, const void *b)
|
||||
{
|
||||
const git_diff_delta *da = a, *db = b;
|
||||
int val = strcmp(da->old_file.path, db->old_file.path);
|
||||
return val ? val : ((int)da->status - (int)db->status);
|
||||
}
|
||||
|
||||
static int config_bool(git_config *cfg, const char *name, int defvalue)
|
||||
{
|
||||
int val = defvalue;
|
||||
|
||||
if (git_config_get_bool(&val, cfg, name) < 0)
|
||||
giterr_clear();
|
||||
|
||||
return val;
|
||||
}
|
||||
|
||||
static git_diff_list *git_diff_list_alloc(
|
||||
git_repository *repo, const git_diff_options *opts)
|
||||
{
|
||||
git_config *cfg;
|
||||
size_t i;
|
||||
git_diff_list *diff = git__calloc(1, sizeof(git_diff_list));
|
||||
if (diff == NULL)
|
||||
return NULL;
|
||||
|
||||
diff->repo = repo;
|
||||
|
||||
if (git_vector_init(&diff->deltas, 0, diff_delta__cmp) < 0 ||
|
||||
git_pool_init(&diff->pool, 1, 0) < 0)
|
||||
goto fail;
|
||||
|
||||
/* load config values that affect diff behavior */
|
||||
if (git_repository_config__weakptr(&cfg, repo) < 0)
|
||||
goto fail;
|
||||
if (config_bool(cfg, "core.symlinks", 1))
|
||||
diff->diffcaps = diff->diffcaps | GIT_DIFFCAPS_HAS_SYMLINKS;
|
||||
if (config_bool(cfg, "core.ignorestat", 0))
|
||||
diff->diffcaps = diff->diffcaps | GIT_DIFFCAPS_ASSUME_UNCHANGED;
|
||||
if (config_bool(cfg, "core.filemode", 1))
|
||||
diff->diffcaps = diff->diffcaps | GIT_DIFFCAPS_TRUST_EXEC_BIT;
|
||||
if (config_bool(cfg, "core.trustctime", 1))
|
||||
diff->diffcaps = diff->diffcaps | GIT_DIFFCAPS_TRUST_CTIME;
|
||||
/* Don't set GIT_DIFFCAPS_USE_DEV - compile time option in core git */
|
||||
|
||||
if (opts == NULL)
|
||||
return diff;
|
||||
|
||||
memcpy(&diff->opts, opts, sizeof(git_diff_options));
|
||||
memset(&diff->opts.pathspec, 0, sizeof(diff->opts.pathspec));
|
||||
|
||||
diff->opts.old_prefix = diff_strdup_prefix(&diff->pool,
|
||||
opts->old_prefix ? opts->old_prefix : DIFF_OLD_PREFIX_DEFAULT);
|
||||
diff->opts.new_prefix = diff_strdup_prefix(&diff->pool,
|
||||
opts->new_prefix ? opts->new_prefix : DIFF_NEW_PREFIX_DEFAULT);
|
||||
|
||||
if (!diff->opts.old_prefix || !diff->opts.new_prefix)
|
||||
goto fail;
|
||||
|
||||
if (diff->opts.flags & GIT_DIFF_REVERSE) {
|
||||
char *swap = diff->opts.old_prefix;
|
||||
diff->opts.old_prefix = diff->opts.new_prefix;
|
||||
diff->opts.new_prefix = swap;
|
||||
}
|
||||
|
||||
/* only copy pathspec if it is "interesting" so we can test
|
||||
* diff->pathspec.length > 0 to know if it is worth calling
|
||||
* fnmatch as we iterate.
|
||||
*/
|
||||
if (!diff_pathspec_is_interesting(&opts->pathspec))
|
||||
return diff;
|
||||
|
||||
if (git_vector_init(
|
||||
&diff->pathspec, (unsigned int)opts->pathspec.count, NULL) < 0)
|
||||
goto fail;
|
||||
|
||||
for (i = 0; i < opts->pathspec.count; ++i) {
|
||||
int ret;
|
||||
const char *pattern = opts->pathspec.strings[i];
|
||||
git_attr_fnmatch *match = git__calloc(1, sizeof(git_attr_fnmatch));
|
||||
if (!match)
|
||||
goto fail;
|
||||
match->flags = GIT_ATTR_FNMATCH_ALLOWSPACE;
|
||||
ret = git_attr_fnmatch__parse(match, &diff->pool, NULL, &pattern);
|
||||
if (ret == GIT_ENOTFOUND) {
|
||||
git__free(match);
|
||||
continue;
|
||||
} else if (ret < 0)
|
||||
goto fail;
|
||||
|
||||
if (git_vector_insert(&diff->pathspec, match) < 0)
|
||||
goto fail;
|
||||
}
|
||||
|
||||
return diff;
|
||||
|
||||
fail:
|
||||
git_diff_list_free(diff);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
void git_diff_list_free(git_diff_list *diff)
|
||||
{
|
||||
git_diff_delta *delta;
|
||||
git_attr_fnmatch *match;
|
||||
unsigned int i;
|
||||
|
||||
if (!diff)
|
||||
return;
|
||||
|
||||
git_vector_foreach(&diff->deltas, i, delta) {
|
||||
git__free(delta);
|
||||
diff->deltas.contents[i] = NULL;
|
||||
}
|
||||
git_vector_free(&diff->deltas);
|
||||
|
||||
git_vector_foreach(&diff->pathspec, i, match) {
|
||||
git__free(match);
|
||||
diff->pathspec.contents[i] = NULL;
|
||||
}
|
||||
git_vector_free(&diff->pathspec);
|
||||
|
||||
git_pool_clear(&diff->pool);
|
||||
git__free(diff);
|
||||
}
|
||||
|
||||
static int oid_for_workdir_item(
|
||||
git_repository *repo,
|
||||
const git_index_entry *item,
|
||||
git_oid *oid)
|
||||
{
|
||||
int result;
|
||||
git_buf full_path = GIT_BUF_INIT;
|
||||
|
||||
if (git_buf_joinpath(&full_path, git_repository_workdir(repo), item->path) < 0)
|
||||
return -1;
|
||||
|
||||
/* calculate OID for file if possible*/
|
||||
if (S_ISLNK(item->mode))
|
||||
result = git_odb__hashlink(oid, full_path.ptr);
|
||||
else if (!git__is_sizet(item->file_size)) {
|
||||
giterr_set(GITERR_OS, "File size overflow for 32-bit systems");
|
||||
result = -1;
|
||||
} else {
|
||||
int fd = git_futils_open_ro(full_path.ptr);
|
||||
if (fd < 0)
|
||||
result = fd;
|
||||
else {
|
||||
result = git_odb__hashfd(
|
||||
oid, fd, (size_t)item->file_size, GIT_OBJ_BLOB);
|
||||
p_close(fd);
|
||||
}
|
||||
}
|
||||
|
||||
git_buf_free(&full_path);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
#define EXEC_BIT_MASK 0000111
|
||||
|
||||
static int maybe_modified(
|
||||
git_iterator *old_iter,
|
||||
const git_index_entry *oitem,
|
||||
git_iterator *new_iter,
|
||||
const git_index_entry *nitem,
|
||||
git_diff_list *diff)
|
||||
{
|
||||
git_oid noid, *use_noid = NULL;
|
||||
git_delta_t status = GIT_DELTA_MODIFIED;
|
||||
unsigned int omode = oitem->mode;
|
||||
unsigned int nmode = nitem->mode;
|
||||
|
||||
GIT_UNUSED(old_iter);
|
||||
|
||||
if (!diff_path_matches_pathspec(diff, oitem->path))
|
||||
return 0;
|
||||
|
||||
/* on platforms with no symlinks, promote plain files to symlinks */
|
||||
if (S_ISLNK(omode) && S_ISREG(nmode) &&
|
||||
!(diff->diffcaps & GIT_DIFFCAPS_HAS_SYMLINKS))
|
||||
nmode = GIT_MODE_TYPE(omode) | (nmode & GIT_MODE_PERMS_MASK);
|
||||
|
||||
/* on platforms with no execmode, clear exec bit from comparisons */
|
||||
if (!(diff->diffcaps & GIT_DIFFCAPS_TRUST_EXEC_BIT)) {
|
||||
omode = omode & ~EXEC_BIT_MASK;
|
||||
nmode = nmode & ~EXEC_BIT_MASK;
|
||||
}
|
||||
|
||||
/* support "assume unchanged" (badly, b/c we still stat everything) */
|
||||
if ((diff->diffcaps & GIT_DIFFCAPS_ASSUME_UNCHANGED) != 0)
|
||||
status = (oitem->flags_extended & GIT_IDXENTRY_INTENT_TO_ADD) ?
|
||||
GIT_DELTA_MODIFIED : GIT_DELTA_UNMODIFIED;
|
||||
|
||||
/* support "skip worktree" index bit */
|
||||
else if ((oitem->flags_extended & GIT_IDXENTRY_SKIP_WORKTREE) != 0)
|
||||
status = GIT_DELTA_UNMODIFIED;
|
||||
|
||||
/* if basic type of file changed, then split into delete and add */
|
||||
else if (GIT_MODE_TYPE(omode) != GIT_MODE_TYPE(nmode)) {
|
||||
if (diff_delta__from_one(diff, GIT_DELTA_DELETED, oitem) < 0 ||
|
||||
diff_delta__from_one(diff, GIT_DELTA_ADDED, nitem) < 0)
|
||||
return -1;
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* if oids and modes match, then file is unmodified */
|
||||
else if (git_oid_cmp(&oitem->oid, &nitem->oid) == 0 &&
|
||||
omode == nmode)
|
||||
status = GIT_DELTA_UNMODIFIED;
|
||||
|
||||
/* if we have a workdir item with an unknown oid, check deeper */
|
||||
else if (git_oid_iszero(&nitem->oid) && new_iter->type == GIT_ITERATOR_WORKDIR) {
|
||||
/* TODO: add check against index file st_mtime to avoid racy-git */
|
||||
|
||||
/* if they files look exactly alike, then we'll assume the same */
|
||||
if (oitem->file_size == nitem->file_size &&
|
||||
(!(diff->diffcaps & GIT_DIFFCAPS_TRUST_CTIME) ||
|
||||
(oitem->ctime.seconds == nitem->ctime.seconds)) &&
|
||||
oitem->mtime.seconds == nitem->mtime.seconds &&
|
||||
(!(diff->diffcaps & GIT_DIFFCAPS_USE_DEV) ||
|
||||
(oitem->dev == nitem->dev)) &&
|
||||
oitem->ino == nitem->ino &&
|
||||
oitem->uid == nitem->uid &&
|
||||
oitem->gid == nitem->gid)
|
||||
status = GIT_DELTA_UNMODIFIED;
|
||||
|
||||
else if (S_ISGITLINK(nmode)) {
|
||||
git_submodule *sub;
|
||||
|
||||
if ((diff->opts.flags & GIT_DIFF_IGNORE_SUBMODULES) != 0)
|
||||
status = GIT_DELTA_UNMODIFIED;
|
||||
else if (git_submodule_lookup(&sub, diff->repo, nitem->path) < 0)
|
||||
return -1;
|
||||
else if (sub->ignore == GIT_SUBMODULE_IGNORE_ALL)
|
||||
status = GIT_DELTA_UNMODIFIED;
|
||||
else {
|
||||
/* TODO: support other GIT_SUBMODULE_IGNORE values */
|
||||
status = GIT_DELTA_UNMODIFIED;
|
||||
}
|
||||
}
|
||||
|
||||
/* TODO: check git attributes so we will not have to read the file
|
||||
* in if it is marked binary.
|
||||
*/
|
||||
|
||||
else if (oid_for_workdir_item(diff->repo, nitem, &noid) < 0)
|
||||
return -1;
|
||||
|
||||
else if (git_oid_cmp(&oitem->oid, &noid) == 0 &&
|
||||
omode == nmode)
|
||||
status = GIT_DELTA_UNMODIFIED;
|
||||
|
||||
/* store calculated oid so we don't have to recalc later */
|
||||
use_noid = &noid;
|
||||
}
|
||||
|
||||
return diff_delta__from_two(diff, status, oitem, nitem, use_noid);
|
||||
}
|
||||
|
||||
static int diff_from_iterators(
|
||||
git_repository *repo,
|
||||
const git_diff_options *opts, /**< can be NULL for defaults */
|
||||
git_iterator *old_iter,
|
||||
git_iterator *new_iter,
|
||||
git_diff_list **diff_ptr)
|
||||
{
|
||||
const git_index_entry *oitem, *nitem;
|
||||
git_buf ignore_prefix = GIT_BUF_INIT;
|
||||
git_diff_list *diff = git_diff_list_alloc(repo, opts);
|
||||
if (!diff)
|
||||
goto fail;
|
||||
|
||||
diff->old_src = old_iter->type;
|
||||
diff->new_src = new_iter->type;
|
||||
|
||||
if (git_iterator_current(old_iter, &oitem) < 0 ||
|
||||
git_iterator_current(new_iter, &nitem) < 0)
|
||||
goto fail;
|
||||
|
||||
/* run iterators building diffs */
|
||||
while (oitem || nitem) {
|
||||
|
||||
/* create DELETED records for old items not matched in new */
|
||||
if (oitem && (!nitem || strcmp(oitem->path, nitem->path) < 0)) {
|
||||
if (diff_delta__from_one(diff, GIT_DELTA_DELETED, oitem) < 0 ||
|
||||
git_iterator_advance(old_iter, &oitem) < 0)
|
||||
goto fail;
|
||||
}
|
||||
|
||||
/* create ADDED, TRACKED, or IGNORED records for new items not
|
||||
* matched in old (and/or descend into directories as needed)
|
||||
*/
|
||||
else if (nitem && (!oitem || strcmp(oitem->path, nitem->path) > 0)) {
|
||||
git_delta_t delta_type = GIT_DELTA_UNTRACKED;
|
||||
|
||||
/* check if contained in ignored parent directory */
|
||||
if (git_buf_len(&ignore_prefix) &&
|
||||
git__prefixcmp(nitem->path, git_buf_cstr(&ignore_prefix)) == 0)
|
||||
delta_type = GIT_DELTA_IGNORED;
|
||||
|
||||
if (S_ISDIR(nitem->mode)) {
|
||||
/* recurse into directory only if there are tracked items in
|
||||
* it or if the user requested the contents of untracked
|
||||
* directories and it is not under an ignored directory.
|
||||
*/
|
||||
if ((oitem && git__prefixcmp(oitem->path, nitem->path) == 0) ||
|
||||
(delta_type == GIT_DELTA_UNTRACKED &&
|
||||
(diff->opts.flags & GIT_DIFF_RECURSE_UNTRACKED_DIRS) != 0))
|
||||
{
|
||||
/* if this directory is ignored, remember it as the
|
||||
* "ignore_prefix" for processing contained items
|
||||
*/
|
||||
if (delta_type == GIT_DELTA_UNTRACKED &&
|
||||
git_iterator_current_is_ignored(new_iter))
|
||||
git_buf_sets(&ignore_prefix, nitem->path);
|
||||
|
||||
if (git_iterator_advance_into_directory(new_iter, &nitem) < 0)
|
||||
goto fail;
|
||||
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
/* In core git, the next two "else if" clauses are effectively
|
||||
* reversed -- i.e. when an untracked file contained in an
|
||||
* ignored directory is individually ignored, it shows up as an
|
||||
* ignored file in the diff list, even though other untracked
|
||||
* files in the same directory are skipped completely.
|
||||
*
|
||||
* To me, this is odd. If the directory is ignored and the file
|
||||
* is untracked, we should skip it consistently, regardless of
|
||||
* whether it happens to match a pattern in the ignore file.
|
||||
*
|
||||
* To match the core git behavior, just reverse the following
|
||||
* two "else if" cases so that individual file ignores are
|
||||
* checked before container directory exclusions are used to
|
||||
* skip the file.
|
||||
*/
|
||||
else if (delta_type == GIT_DELTA_IGNORED) {
|
||||
if (git_iterator_advance(new_iter, &nitem) < 0)
|
||||
goto fail;
|
||||
continue; /* ignored parent directory, so skip completely */
|
||||
}
|
||||
|
||||
else if (git_iterator_current_is_ignored(new_iter))
|
||||
delta_type = GIT_DELTA_IGNORED;
|
||||
|
||||
else if (new_iter->type != GIT_ITERATOR_WORKDIR)
|
||||
delta_type = GIT_DELTA_ADDED;
|
||||
|
||||
if (diff_delta__from_one(diff, delta_type, nitem) < 0 ||
|
||||
git_iterator_advance(new_iter, &nitem) < 0)
|
||||
goto fail;
|
||||
}
|
||||
|
||||
/* otherwise item paths match, so create MODIFIED record
|
||||
* (or ADDED and DELETED pair if type changed)
|
||||
*/
|
||||
else {
|
||||
assert(oitem && nitem && strcmp(oitem->path, nitem->path) == 0);
|
||||
|
||||
if (maybe_modified(old_iter, oitem, new_iter, nitem, diff) < 0 ||
|
||||
git_iterator_advance(old_iter, &oitem) < 0 ||
|
||||
git_iterator_advance(new_iter, &nitem) < 0)
|
||||
goto fail;
|
||||
}
|
||||
}
|
||||
|
||||
git_iterator_free(old_iter);
|
||||
git_iterator_free(new_iter);
|
||||
git_buf_free(&ignore_prefix);
|
||||
|
||||
*diff_ptr = diff;
|
||||
return 0;
|
||||
|
||||
fail:
|
||||
git_iterator_free(old_iter);
|
||||
git_iterator_free(new_iter);
|
||||
git_buf_free(&ignore_prefix);
|
||||
|
||||
git_diff_list_free(diff);
|
||||
*diff_ptr = NULL;
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
int git_diff_tree_to_tree(
|
||||
git_repository *repo,
|
||||
const git_diff_options *opts, /**< can be NULL for defaults */
|
||||
git_tree *old_tree,
|
||||
git_tree *new_tree,
|
||||
git_diff_list **diff)
|
||||
{
|
||||
git_iterator *a = NULL, *b = NULL;
|
||||
char *prefix = opts ? diff_prefix_from_pathspec(&opts->pathspec) : NULL;
|
||||
|
||||
assert(repo && old_tree && new_tree && diff);
|
||||
|
||||
if (git_iterator_for_tree_range(&a, repo, old_tree, prefix, prefix) < 0 ||
|
||||
git_iterator_for_tree_range(&b, repo, new_tree, prefix, prefix) < 0)
|
||||
return -1;
|
||||
|
||||
git__free(prefix);
|
||||
|
||||
return diff_from_iterators(repo, opts, a, b, diff);
|
||||
}
|
||||
|
||||
int git_diff_index_to_tree(
|
||||
git_repository *repo,
|
||||
const git_diff_options *opts,
|
||||
git_tree *old_tree,
|
||||
git_diff_list **diff)
|
||||
{
|
||||
git_iterator *a = NULL, *b = NULL;
|
||||
char *prefix = opts ? diff_prefix_from_pathspec(&opts->pathspec) : NULL;
|
||||
|
||||
assert(repo && diff);
|
||||
|
||||
if (git_iterator_for_tree_range(&a, repo, old_tree, prefix, prefix) < 0 ||
|
||||
git_iterator_for_index_range(&b, repo, prefix, prefix) < 0)
|
||||
return -1;
|
||||
|
||||
git__free(prefix);
|
||||
|
||||
return diff_from_iterators(repo, opts, a, b, diff);
|
||||
}
|
||||
|
||||
int git_diff_workdir_to_index(
|
||||
git_repository *repo,
|
||||
const git_diff_options *opts,
|
||||
git_diff_list **diff)
|
||||
{
|
||||
git_iterator *a = NULL, *b = NULL;
|
||||
char *prefix = opts ? diff_prefix_from_pathspec(&opts->pathspec) : NULL;
|
||||
|
||||
assert(repo && diff);
|
||||
|
||||
if (git_iterator_for_index_range(&a, repo, prefix, prefix) < 0 ||
|
||||
git_iterator_for_workdir_range(&b, repo, prefix, prefix) < 0)
|
||||
return -1;
|
||||
|
||||
git__free(prefix);
|
||||
|
||||
return diff_from_iterators(repo, opts, a, b, diff);
|
||||
}
|
||||
|
||||
|
||||
int git_diff_workdir_to_tree(
|
||||
git_repository *repo,
|
||||
const git_diff_options *opts,
|
||||
git_tree *old_tree,
|
||||
git_diff_list **diff)
|
||||
{
|
||||
git_iterator *a = NULL, *b = NULL;
|
||||
char *prefix = opts ? diff_prefix_from_pathspec(&opts->pathspec) : NULL;
|
||||
|
||||
assert(repo && old_tree && diff);
|
||||
|
||||
if (git_iterator_for_tree_range(&a, repo, old_tree, prefix, prefix) < 0 ||
|
||||
git_iterator_for_workdir_range(&b, repo, prefix, prefix) < 0)
|
||||
return -1;
|
||||
|
||||
git__free(prefix);
|
||||
|
||||
return diff_from_iterators(repo, opts, a, b, diff);
|
||||
}
|
||||
|
||||
int git_diff_merge(
|
||||
git_diff_list *onto,
|
||||
const git_diff_list *from)
|
||||
{
|
||||
int error = 0;
|
||||
git_pool onto_pool;
|
||||
git_vector onto_new;
|
||||
git_diff_delta *delta;
|
||||
unsigned int i, j;
|
||||
|
||||
assert(onto && from);
|
||||
|
||||
if (!from->deltas.length)
|
||||
return 0;
|
||||
|
||||
if (git_vector_init(&onto_new, onto->deltas.length, diff_delta__cmp) < 0 ||
|
||||
git_pool_init(&onto_pool, 1, 0) < 0)
|
||||
return -1;
|
||||
|
||||
for (i = 0, j = 0; i < onto->deltas.length || j < from->deltas.length; ) {
|
||||
git_diff_delta *o = GIT_VECTOR_GET(&onto->deltas, i);
|
||||
const git_diff_delta *f = GIT_VECTOR_GET(&from->deltas, j);
|
||||
int cmp = !f ? -1 : !o ? 1 : strcmp(o->old_file.path, f->old_file.path);
|
||||
|
||||
if (cmp < 0) {
|
||||
delta = diff_delta__dup(o, &onto_pool);
|
||||
i++;
|
||||
} else if (cmp > 0) {
|
||||
delta = diff_delta__dup(f, &onto_pool);
|
||||
j++;
|
||||
} else {
|
||||
delta = diff_delta__merge_like_cgit(o, f, &onto_pool);
|
||||
i++;
|
||||
j++;
|
||||
}
|
||||
|
||||
if ((error = !delta ? -1 : git_vector_insert(&onto_new, delta)) < 0)
|
||||
break;
|
||||
}
|
||||
|
||||
if (!error) {
|
||||
git_vector_swap(&onto->deltas, &onto_new);
|
||||
git_pool_swap(&onto->pool, &onto_pool);
|
||||
onto->new_src = from->new_src;
|
||||
}
|
||||
|
||||
git_vector_foreach(&onto_new, i, delta)
|
||||
git__free(delta);
|
||||
git_vector_free(&onto_new);
|
||||
git_pool_clear(&onto_pool);
|
||||
|
||||
return error;
|
||||
}
|
||||
|
||||
@@ -1,56 +0,0 @@
|
||||
/* ******************************************************************* */
|
||||
/* */
|
||||
/* Applied Type System */
|
||||
/* */
|
||||
/* ******************************************************************* */
|
||||
|
||||
/*
|
||||
** ATS/Postiats - Unleashing the Potential of Types!
|
||||
** Copyright (C) 2011-20?? Hongwei Xi, ATS Trustful Software, Inc.
|
||||
** All rights reserved
|
||||
**
|
||||
** ATS is free software; you can redistribute it and/or modify it under
|
||||
** the terms of the GNU GENERAL PUBLIC LICENSE (GPL) as published by the
|
||||
** Free Software Foundation; either version 3, or (at your option) any
|
||||
** later version.
|
||||
**
|
||||
** ATS is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
** WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
** FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
** for more details.
|
||||
**
|
||||
** You should have received a copy of the GNU General Public License
|
||||
** along with ATS; see the file COPYING. If not, please write to the
|
||||
** Free Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
** 02110-1301, USA.
|
||||
*/
|
||||
|
||||
/* ****** ****** */
|
||||
|
||||
/*
|
||||
(* Author: Hongwei Xi *)
|
||||
(* Authoremail: hwxi AT cs DOT bu DOT edu *)
|
||||
(* Start time: March, 2013 *)
|
||||
*/
|
||||
|
||||
/* ****** ****** */
|
||||
|
||||
#ifndef ATSHOME_LIBATS_DYNARRAY_CATS
|
||||
#define ATSHOME_LIBATS_DYNARRAY_CATS
|
||||
|
||||
/* ****** ****** */
|
||||
|
||||
#include <string.h>
|
||||
|
||||
/* ****** ****** */
|
||||
|
||||
#define atslib_dynarray_memcpy memcpy
|
||||
#define atslib_dynarray_memmove memmove
|
||||
|
||||
/* ****** ****** */
|
||||
|
||||
#endif // ifndef ATSHOME_LIBATS_DYNARRAY_CATS
|
||||
|
||||
/* ****** ****** */
|
||||
|
||||
/* end of [dynarray.cats] */
|
||||
188
samples/C/fudge_node.c
Normal file
188
samples/C/fudge_node.c
Normal file
@@ -0,0 +1,188 @@
|
||||
/* Copyright (c) 2010 Jens Nyberg
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE. */
|
||||
|
||||
#include <fudge.h>
|
||||
#include <kernel.h>
|
||||
#include <modules/system/system.h>
|
||||
#include "pipe.h"
|
||||
|
||||
static struct system_node root;
|
||||
static struct system_node clone;
|
||||
|
||||
static unsigned int read(struct pipe_end *endself, struct pipe_end *endtarget, struct service_state *state, unsigned int count, void *buffer)
|
||||
{
|
||||
|
||||
count = buffer_rcfifo(&endself->buffer, count, buffer);
|
||||
|
||||
if (!count && endtarget->node.refcount)
|
||||
{
|
||||
|
||||
list_add(&endself->readlinks, &state->link);
|
||||
task_setstatus(state->link.data, TASK_STATUS_BLOCKED);
|
||||
|
||||
}
|
||||
|
||||
system_wakeup(&endtarget->writelinks);
|
||||
|
||||
return count;
|
||||
|
||||
}
|
||||
|
||||
static unsigned int write(struct pipe_end *endself, struct pipe_end *endtarget, struct service_state *state, unsigned int count, void *buffer)
|
||||
{
|
||||
|
||||
count = buffer_wcfifo(&endtarget->buffer, count, buffer);
|
||||
|
||||
if (!count)
|
||||
{
|
||||
|
||||
list_add(&endself->writelinks, &state->link);
|
||||
task_setstatus(state->link.data, TASK_STATUS_BLOCKED);
|
||||
|
||||
}
|
||||
|
||||
system_wakeup(&endtarget->readlinks);
|
||||
|
||||
return count;
|
||||
|
||||
}
|
||||
|
||||
static unsigned int end0_read(struct system_node *self, struct service_state *state, unsigned int count, void *buffer)
|
||||
{
|
||||
|
||||
struct pipe *pipe = (struct pipe *)self->parent;
|
||||
|
||||
return read(&pipe->end0, &pipe->end1, state, count, buffer);
|
||||
|
||||
}
|
||||
|
||||
static unsigned int end0_write(struct system_node *self, struct service_state *state, unsigned int count, void *buffer)
|
||||
{
|
||||
|
||||
struct pipe *pipe = (struct pipe *)self->parent;
|
||||
|
||||
return write(&pipe->end0, &pipe->end1, state, count, buffer);
|
||||
|
||||
}
|
||||
|
||||
static unsigned int end1_read(struct system_node *self, struct service_state *state, unsigned int count, void *buffer)
|
||||
{
|
||||
|
||||
struct pipe *pipe = (struct pipe *)self->parent;
|
||||
|
||||
return read(&pipe->end1, &pipe->end0, state, count, buffer);
|
||||
|
||||
}
|
||||
|
||||
static unsigned int end1_write(struct system_node *self, struct service_state *state, unsigned int count, void *buffer)
|
||||
{
|
||||
|
||||
struct pipe *pipe = (struct pipe *)self->parent;
|
||||
|
||||
return write(&pipe->end1, &pipe->end0, state, count, buffer);
|
||||
|
||||
}
|
||||
|
||||
static unsigned int clone_child(struct system_node *self, unsigned int count, char *path)
|
||||
{
|
||||
|
||||
struct list_item *current;
|
||||
|
||||
for (current = root.children.head; current; current = current->next)
|
||||
{
|
||||
|
||||
struct system_node *node = current->data;
|
||||
struct pipe *pipe = current->data;
|
||||
|
||||
if (node == self)
|
||||
continue;
|
||||
|
||||
if (pipe->end0.node.refcount || pipe->end1.node.refcount)
|
||||
continue;
|
||||
|
||||
return node->child(node, count, path);
|
||||
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
||||
}
|
||||
|
||||
void pipe_init(struct pipe *pipe)
|
||||
{
|
||||
|
||||
buffer_init(&pipe->end0.buffer, 4096, pipe->end0.data);
|
||||
buffer_init(&pipe->end1.buffer, 4096, pipe->end1.data);
|
||||
system_initnode(&pipe->end0.node, SYSTEM_NODETYPE_NORMAL, "0");
|
||||
system_initnode(&pipe->end1.node, SYSTEM_NODETYPE_NORMAL, "1");
|
||||
|
||||
pipe->end0.node.read = end0_read;
|
||||
pipe->end0.node.write = end0_write;
|
||||
pipe->end1.node.read = end1_read;
|
||||
pipe->end1.node.write = end1_write;
|
||||
|
||||
system_initnode(&pipe->root, SYSTEM_NODETYPE_GROUP | SYSTEM_NODETYPE_MULTI, "pipe");
|
||||
system_addchild(&pipe->root, &pipe->end0.node);
|
||||
system_addchild(&pipe->root, &pipe->end1.node);
|
||||
|
||||
}
|
||||
|
||||
void pipe_register(struct pipe *pipe)
|
||||
{
|
||||
|
||||
system_addchild(&root, &pipe->root);
|
||||
|
||||
}
|
||||
|
||||
void pipe_unregister(struct pipe *pipe)
|
||||
{
|
||||
|
||||
system_removechild(&root, &pipe->root);
|
||||
|
||||
}
|
||||
|
||||
void module_init(void)
|
||||
{
|
||||
|
||||
system_initnode(&root, SYSTEM_NODETYPE_GROUP, "pipe");
|
||||
system_initnode(&clone, SYSTEM_NODETYPE_GROUP, "clone");
|
||||
|
||||
clone.child = clone_child;
|
||||
|
||||
system_addchild(&root, &clone);
|
||||
|
||||
}
|
||||
|
||||
void module_register(void)
|
||||
{
|
||||
|
||||
system_registernode(&root);
|
||||
|
||||
}
|
||||
|
||||
void module_unregister(void)
|
||||
{
|
||||
|
||||
system_unregisternode(&root);
|
||||
|
||||
}
|
||||
@@ -1,74 +0,0 @@
|
||||
/*
|
||||
* Copyright (C) 2009-2012 the libgit2 contributors
|
||||
*
|
||||
* This file is part of libgit2, distributed under the GNU GPL v2 with
|
||||
* a Linking Exception. For full terms see the included COPYING file.
|
||||
*/
|
||||
|
||||
#include "common.h"
|
||||
#include "hash.h"
|
||||
|
||||
#if defined(PPC_SHA1)
|
||||
# include "ppc/sha1.h"
|
||||
#else
|
||||
# include "sha1.h"
|
||||
#endif
|
||||
|
||||
struct git_hash_ctx {
|
||||
SHA_CTX c;
|
||||
};
|
||||
|
||||
git_hash_ctx *git_hash_new_ctx(void)
|
||||
{
|
||||
git_hash_ctx *ctx = git__malloc(sizeof(*ctx));
|
||||
|
||||
if (!ctx)
|
||||
return NULL;
|
||||
|
||||
SHA1_Init(&ctx->c);
|
||||
|
||||
return ctx;
|
||||
}
|
||||
|
||||
void git_hash_free_ctx(git_hash_ctx *ctx)
|
||||
{
|
||||
git__free(ctx);
|
||||
}
|
||||
|
||||
void git_hash_init(git_hash_ctx *ctx)
|
||||
{
|
||||
assert(ctx);
|
||||
SHA1_Init(&ctx->c);
|
||||
}
|
||||
|
||||
void git_hash_update(git_hash_ctx *ctx, const void *data, size_t len)
|
||||
{
|
||||
assert(ctx);
|
||||
SHA1_Update(&ctx->c, data, len);
|
||||
}
|
||||
|
||||
void git_hash_final(git_oid *out, git_hash_ctx *ctx)
|
||||
{
|
||||
assert(ctx);
|
||||
SHA1_Final(out->id, &ctx->c);
|
||||
}
|
||||
|
||||
void git_hash_buf(git_oid *out, const void *data, size_t len)
|
||||
{
|
||||
SHA_CTX c;
|
||||
|
||||
SHA1_Init(&c);
|
||||
SHA1_Update(&c, data, len);
|
||||
SHA1_Final(out->id, &c);
|
||||
}
|
||||
|
||||
void git_hash_vec(git_oid *out, git_buf_vec *vec, size_t n)
|
||||
{
|
||||
SHA_CTX c;
|
||||
size_t i;
|
||||
|
||||
SHA1_Init(&c);
|
||||
for (i = 0; i < n; i++)
|
||||
SHA1_Update(&c, vec[i].data, vec[i].len);
|
||||
SHA1_Final(out->id, &c);
|
||||
}
|
||||
397
samples/C/main.c
Normal file
397
samples/C/main.c
Normal file
@@ -0,0 +1,397 @@
|
||||
// Copyright 2014 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#include "src/runtime/runtime-utils.h"
|
||||
|
||||
#include "src/arguments.h"
|
||||
#include "src/compiler.h"
|
||||
#include "src/deoptimizer.h"
|
||||
#include "src/frames-inl.h"
|
||||
#include "src/full-codegen/full-codegen.h"
|
||||
#include "src/isolate-inl.h"
|
||||
#include "src/messages.h"
|
||||
#include "src/v8threads.h"
|
||||
#include "src/vm-state-inl.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
RUNTIME_FUNCTION(Runtime_CompileLazy) {
|
||||
HandleScope scope(isolate);
|
||||
DCHECK_EQ(1, args.length());
|
||||
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_trace_lazy && !function->shared()->is_compiled()) {
|
||||
PrintF("[unoptimized: ");
|
||||
function->PrintName();
|
||||
PrintF("]\n");
|
||||
}
|
||||
#endif
|
||||
|
||||
StackLimitCheck check(isolate);
|
||||
if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
|
||||
if (!Compiler::Compile(function, Compiler::KEEP_EXCEPTION)) {
|
||||
return isolate->heap()->exception();
|
||||
}
|
||||
DCHECK(function->is_compiled());
|
||||
return function->code();
|
||||
}
|
||||
|
||||
RUNTIME_FUNCTION(Runtime_CompileBaseline) {
|
||||
HandleScope scope(isolate);
|
||||
DCHECK_EQ(1, args.length());
|
||||
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
|
||||
StackLimitCheck check(isolate);
|
||||
if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
|
||||
if (!Compiler::CompileBaseline(function)) {
|
||||
return isolate->heap()->exception();
|
||||
}
|
||||
DCHECK(function->is_compiled());
|
||||
return function->code();
|
||||
}
|
||||
|
||||
RUNTIME_FUNCTION(Runtime_CompileOptimized_Concurrent) {
|
||||
HandleScope scope(isolate);
|
||||
DCHECK_EQ(1, args.length());
|
||||
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
|
||||
StackLimitCheck check(isolate);
|
||||
if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
|
||||
if (!Compiler::CompileOptimized(function, Compiler::CONCURRENT)) {
|
||||
return isolate->heap()->exception();
|
||||
}
|
||||
DCHECK(function->is_compiled());
|
||||
return function->code();
|
||||
}
|
||||
|
||||
|
||||
RUNTIME_FUNCTION(Runtime_CompileOptimized_NotConcurrent) {
|
||||
HandleScope scope(isolate);
|
||||
DCHECK_EQ(1, args.length());
|
||||
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
|
||||
StackLimitCheck check(isolate);
|
||||
if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
|
||||
if (!Compiler::CompileOptimized(function, Compiler::NOT_CONCURRENT)) {
|
||||
return isolate->heap()->exception();
|
||||
}
|
||||
DCHECK(function->is_compiled());
|
||||
return function->code();
|
||||
}
|
||||
|
||||
|
||||
RUNTIME_FUNCTION(Runtime_NotifyStubFailure) {
|
||||
HandleScope scope(isolate);
|
||||
DCHECK(args.length() == 0);
|
||||
Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
|
||||
DCHECK(AllowHeapAllocation::IsAllowed());
|
||||
delete deoptimizer;
|
||||
return isolate->heap()->undefined_value();
|
||||
}
|
||||
|
||||
|
||||
class ActivationsFinder : public ThreadVisitor {
|
||||
public:
|
||||
Code* code_;
|
||||
bool has_code_activations_;
|
||||
|
||||
explicit ActivationsFinder(Code* code)
|
||||
: code_(code), has_code_activations_(false) {}
|
||||
|
||||
void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
|
||||
JavaScriptFrameIterator it(isolate, top);
|
||||
VisitFrames(&it);
|
||||
}
|
||||
|
||||
void VisitFrames(JavaScriptFrameIterator* it) {
|
||||
for (; !it->done(); it->Advance()) {
|
||||
JavaScriptFrame* frame = it->frame();
|
||||
if (code_->contains(frame->pc())) has_code_activations_ = true;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
RUNTIME_FUNCTION(Runtime_NotifyDeoptimized) {
|
||||
HandleScope scope(isolate);
|
||||
DCHECK(args.length() == 1);
|
||||
CONVERT_SMI_ARG_CHECKED(type_arg, 0);
|
||||
Deoptimizer::BailoutType type =
|
||||
static_cast<Deoptimizer::BailoutType>(type_arg);
|
||||
Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
|
||||
DCHECK(AllowHeapAllocation::IsAllowed());
|
||||
TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
|
||||
TRACE_EVENT0("v8", "V8.DeoptimizeCode");
|
||||
|
||||
Handle<JSFunction> function = deoptimizer->function();
|
||||
Handle<Code> optimized_code = deoptimizer->compiled_code();
|
||||
|
||||
DCHECK(optimized_code->kind() == Code::OPTIMIZED_FUNCTION);
|
||||
DCHECK(type == deoptimizer->bailout_type());
|
||||
|
||||
// Make sure to materialize objects before causing any allocation.
|
||||
JavaScriptFrameIterator it(isolate);
|
||||
deoptimizer->MaterializeHeapObjects(&it);
|
||||
delete deoptimizer;
|
||||
|
||||
// Ensure the context register is updated for materialized objects.
|
||||
JavaScriptFrameIterator top_it(isolate);
|
||||
JavaScriptFrame* top_frame = top_it.frame();
|
||||
isolate->set_context(Context::cast(top_frame->context()));
|
||||
|
||||
if (type == Deoptimizer::LAZY) {
|
||||
return isolate->heap()->undefined_value();
|
||||
}
|
||||
|
||||
// Search for other activations of the same optimized code.
|
||||
// At this point {it} is at the topmost frame of all the frames materialized
|
||||
// by the deoptimizer. Note that this frame does not necessarily represent
|
||||
// an activation of {function} because of potential inlined tail-calls.
|
||||
ActivationsFinder activations_finder(*optimized_code);
|
||||
activations_finder.VisitFrames(&it);
|
||||
isolate->thread_manager()->IterateArchivedThreads(&activations_finder);
|
||||
|
||||
if (!activations_finder.has_code_activations_) {
|
||||
if (function->code() == *optimized_code) {
|
||||
if (FLAG_trace_deopt) {
|
||||
PrintF("[removing optimized code for: ");
|
||||
function->PrintName();
|
||||
PrintF("]\n");
|
||||
}
|
||||
function->ReplaceCode(function->shared()->code());
|
||||
}
|
||||
// Evict optimized code for this function from the cache so that it
|
||||
// doesn't get used for new closures.
|
||||
function->shared()->EvictFromOptimizedCodeMap(*optimized_code, "notify deoptimized");
|
||||
} else {
|
||||
// TODO(titzer): we should probably do DeoptimizeCodeList(code)
|
||||
// unconditionally if the code is not already marked for deoptimization.
|
||||
// If there is an index by shared function info, all the better.
|
||||
Deoptimizer::DeoptimizeFunction(*function);
|
||||
}
|
||||
|
||||
return isolate->heap()->undefined_value();
|
||||
}
|
||||
|
||||
|
||||
static bool IsSuitableForOnStackReplacement(
|
||||
Isolate* isolate,
|
||||
Handle<JSFunction> function
|
||||
) {
|
||||
// Keep track of whether we've succeeded in optimizing.
|
||||
if (function->shared()->optimization_disabled()) return false;
|
||||
// If we are trying to do OSR when there are already optimized
|
||||
// activations of the function, it means (a) the function is directly or
|
||||
// indirectly recursive and (b) an optimized invocation has been
|
||||
// deoptimized so that we are currently in an unoptimized activation.
|
||||
// Check for optimized activations of this function.
|
||||
for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
|
||||
JavaScriptFrame* frame = it.frame();
|
||||
if (frame->is_optimized() && frame->function() == *function) return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement) {
|
||||
HandleScope scope(isolate);
|
||||
DCHECK(args.length() == 1);
|
||||
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
|
||||
Handle<Code> caller_code(function->shared()->code());
|
||||
|
||||
// We're not prepared to handle a function with arguments object.
|
||||
DCHECK(!function->shared()->uses_arguments());
|
||||
|
||||
RUNTIME_ASSERT(FLAG_use_osr);
|
||||
|
||||
// Passing the PC in the javascript frame from the caller directly is
|
||||
// not GC safe, so we walk the stack to get it.
|
||||
JavaScriptFrameIterator it(isolate);
|
||||
JavaScriptFrame* frame = it.frame();
|
||||
if (!caller_code->contains(frame->pc())) {
|
||||
// Code on the stack may not be the code object referenced by the shared
|
||||
// function info. It may have been replaced to include deoptimization data.
|
||||
caller_code = Handle<Code>(frame->LookupCode());
|
||||
}
|
||||
|
||||
uint32_t pc_offset =
|
||||
static_cast<uint32_t>(frame->pc() - caller_code->instruction_start());
|
||||
|
||||
#ifdef DEBUG
|
||||
DCHECK_EQ(frame->function(), *function);
|
||||
DCHECK_EQ(frame->LookupCode(), *caller_code);
|
||||
DCHECK(caller_code->contains(frame->pc()));
|
||||
#endif // DEBUG
|
||||
|
||||
BailoutId ast_id = caller_code->TranslatePcOffsetToAstId(pc_offset);
|
||||
DCHECK(!ast_id.IsNone());
|
||||
|
||||
MaybeHandle<Code> maybe_result;
|
||||
if (IsSuitableForOnStackReplacement(isolate, function)) {
|
||||
if (FLAG_trace_osr) {
|
||||
PrintF("[OSR - Compiling: ");
|
||||
function->PrintName();
|
||||
PrintF(" at -*- scheme -*- %d]\n", ast_id.ToInt());
|
||||
}
|
||||
maybe_result = Compiler::GetOptimizedCodeForOSR(function, ast_id, frame);
|
||||
}
|
||||
|
||||
// Revert the patched back edge table, regardless of whether OSR succeeds.
|
||||
BackEdgeTable::Revert(isolate, *caller_code);
|
||||
|
||||
// Check whether we ended up with usable optimized code.
|
||||
Handle<Code> result;
|
||||
if (maybe_result.ToHandle(&result)
|
||||
&& result->kind() == Code::OPTIMIZED_FUNCTION) {
|
||||
DeoptimizationInputData* data =
|
||||
DeoptimizationInputData::cast(result->deoptimization_data());
|
||||
|
||||
if (data->OsrPcOffset()->value() >= 0) {
|
||||
DCHECK(BailoutId(data->OsrAstId()->value()) == ast_id);
|
||||
if (FLAG_trace_osr) {
|
||||
PrintF("[OSR - Entry at AST id %d, offset %d in optimized code]\n",
|
||||
ast_id.ToInt(), data->OsrPcOffset()->value());
|
||||
}
|
||||
// TODO(titzer): this is a massive hack to make the deopt counts
|
||||
// match. Fix heuristics for reenabling optimizations!
|
||||
function->shared()->increment_deopt_count();
|
||||
|
||||
if (result->is_turbofanned()) {
|
||||
// TurboFanned OSR code cannot be installed into the function.
|
||||
// But the function is obviously hot, so optimize it next time.
|
||||
function->ReplaceCode(
|
||||
isolate->builtins()->builtin(Builtins::kCompileOptimized));
|
||||
} else {
|
||||
// Crankshafted OSR code can be installed into the function.
|
||||
function->ReplaceCode(*result);
|
||||
}
|
||||
return *result;
|
||||
}
|
||||
}
|
||||
|
||||
// Failed.
|
||||
if (FLAG_trace_osr) {
|
||||
PrintF("[OSR - Failed: ");
|
||||
function->PrintName();
|
||||
PrintF(" at AST id %d]\n", ast_id.ToInt());
|
||||
}
|
||||
|
||||
if (!function->IsOptimized()) {
|
||||
function->ReplaceCode(function->shared()->code());
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
||||
RUNTIME_FUNCTION(Runtime_TryInstallOptimizedCode) {
|
||||
HandleScope scope(isolate);
|
||||
DCHECK(args.length() == 1);
|
||||
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
|
||||
|
||||
// First check if this is a real stack overflow.
|
||||
StackLimitCheck check(isolate);
|
||||
if (check.JsHasOverflowed()) {
|
||||
SealHandleScope shs(isolate);
|
||||
return isolate->StackOverflow();
|
||||
}
|
||||
|
||||
isolate->optimizing_compile_dispatcher()->InstallOptimizedFunctions();
|
||||
return (function->IsOptimized())
|
||||
? function->code()
|
||||
: function->shared()->code();
|
||||
}
|
||||
|
||||
|
||||
bool CodeGenerationFromStringsAllowed(
|
||||
Isolate* isolate,
|
||||
Handle<Context> context
|
||||
){
|
||||
DCHECK(context->allow_code_gen_from_strings()->IsFalse());
|
||||
// Check with callback if set.
|
||||
AllowCodeGenerationFromStringsCallback callback =
|
||||
isolate->allow_code_gen_callback();
|
||||
if (callback == NULL) {
|
||||
// No callback set and code generation disallowed.
|
||||
return false;
|
||||
} else {
|
||||
// Callback set. Let it decide if code generation is allowed.
|
||||
VMState<EXTERNAL> state(isolate);
|
||||
return callback(v8::Utils::ToLocal(context));
|
||||
}
|
||||
}
|
||||
|
||||
static Object* CompileGlobalEval(
|
||||
Isolate* isolate,
|
||||
Handle<String> source,
|
||||
Handle<SharedFunctionInfo> outer_info,
|
||||
LanguageMode language_mode,
|
||||
int eval_scope_position,
|
||||
int eval_position
|
||||
){
|
||||
Handle<Context> context = Handle<Context>(isolate->context());
|
||||
Handle<Context> native_context = Handle<Context>(context->native_context());
|
||||
|
||||
// Check if native context allows code generation from
|
||||
// strings. Throw an exception if it doesn't.
|
||||
if (native_context->allow_code_gen_from_strings()->IsFalse() &&
|
||||
!CodeGenerationFromStringsAllowed(isolate, native_context)) {
|
||||
Handle<Object> error_message =
|
||||
native_context->ErrorMessageForCodeGenerationFromStrings();
|
||||
Handle<Object> error;
|
||||
MaybeHandle<Object> maybe_error = isolate->factory()->NewEvalError(
|
||||
MessageTemplate::kCodeGenFromStrings, error_message);
|
||||
if (maybe_error.ToHandle(&error)) isolate->Throw(*error);
|
||||
return isolate->heap()->exception();
|
||||
}
|
||||
|
||||
// Deal with a normal eval call with a string argument. Compile it
|
||||
// and return the compiled function bound in the local context.
|
||||
static const ParseRestriction restriction = NO_PARSE_RESTRICTION;
|
||||
Handle<JSFunction> compiled;
|
||||
ASSIGN_RETURN_ON_EXCEPTION_VALUE(
|
||||
isolate, compiled,
|
||||
Compiler::GetFunctionFromEval(
|
||||
source, outer_info, context, language_mode,
|
||||
restriction, eval_scope_position, eval_position
|
||||
),
|
||||
isolate->heap()->exception()
|
||||
);
|
||||
return *compiled;
|
||||
}
|
||||
|
||||
|
||||
RUNTIME_FUNCTION(Runtime_ResolvePossiblyDirectEval) {
|
||||
HandleScope scope(isolate);
|
||||
DCHECK(args.length() == 6);
|
||||
|
||||
Handle<Object> callee = args.at<Object>(0);
|
||||
|
||||
// If "eval" didn't refer to the original GlobalEval, it's not a
|
||||
// direct call to eval.
|
||||
// (And even if it is, but the first argument isn't a string, just let
|
||||
// execution default to an indirect call to eval, which will also return
|
||||
// the first argument without doing anything).
|
||||
if (*callee != isolate->native_context()->global_eval_fun() || !args[1]->IsString()) {
|
||||
return *callee;
|
||||
}
|
||||
|
||||
DCHECK(args[3]->IsSmi());
|
||||
DCHECK(is_valid_language_mode(args.smi_at(3)));
|
||||
LanguageMode language_mode = static_cast<LanguageMode>(args.smi_at(3));
|
||||
DCHECK(args[4]->IsSmi());
|
||||
Handle<SharedFunctionInfo> outer_info(args.at<JSFunction>(2)->shared(), isolate);
|
||||
return CompileGlobalEval(
|
||||
isolate,
|
||||
args.at<String>(1),
|
||||
outer_info,
|
||||
language_mode,
|
||||
args.smi_at(4),
|
||||
args.smi_at(5)
|
||||
);
|
||||
}
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
/* vim: set shiftwidth=4 softtabstop=0 cindent cinoptions={1s: */
|
||||
|
||||
72
samples/CSON/base.cson
Normal file
72
samples/CSON/base.cson
Normal file
@@ -0,0 +1,72 @@
|
||||
'atom-text-editor':
|
||||
# Platform Bindings
|
||||
'home': 'editor:move-to-first-character-of-line'
|
||||
'end': 'editor:move-to-end-of-screen-line'
|
||||
'shift-home': 'editor:select-to-first-character-of-line'
|
||||
'shift-end': 'editor:select-to-end-of-line'
|
||||
|
||||
'atom-text-editor:not([mini])':
|
||||
# Atom Specific
|
||||
'ctrl-C': 'editor:copy-path'
|
||||
|
||||
# Sublime Parity
|
||||
'tab': 'editor:indent'
|
||||
'enter': 'editor:newline'
|
||||
'shift-tab': 'editor:outdent-selected-rows'
|
||||
'ctrl-K': 'editor:delete-line'
|
||||
|
||||
'.select-list atom-text-editor[mini]':
|
||||
'enter': 'core:confirm'
|
||||
|
||||
'.tool-panel.panel-left, .tool-panel.panel-right':
|
||||
'escape': 'tool-panel:unfocus'
|
||||
|
||||
'atom-text-editor !important, atom-text-editor[mini] !important':
|
||||
'escape': 'editor:consolidate-selections'
|
||||
|
||||
# allow standard input fields to work correctly
|
||||
'body .native-key-bindings':
|
||||
'tab': 'core:focus-next'
|
||||
'shift-tab': 'core:focus-previous'
|
||||
'enter': 'native!'
|
||||
'backspace': 'native!'
|
||||
'shift-backspace': 'native!'
|
||||
'delete': 'native!'
|
||||
'up': 'native!'
|
||||
'down': 'native!'
|
||||
'shift-up': 'native!'
|
||||
'shift-down': 'native!'
|
||||
'alt-up': 'native!'
|
||||
'alt-down': 'native!'
|
||||
'alt-shift-up': 'native!'
|
||||
'alt-shift-down': 'native!'
|
||||
'cmd-up': 'native!'
|
||||
'cmd-down': 'native!'
|
||||
'cmd-shift-up': 'native!'
|
||||
'cmd-shift-down': 'native!'
|
||||
'ctrl-up': 'native!'
|
||||
'ctrl-down': 'native!'
|
||||
'ctrl-shift-up': 'native!'
|
||||
'ctrl-shift-down': 'native!'
|
||||
'left': 'native!'
|
||||
'right': 'native!'
|
||||
'shift-left': 'native!'
|
||||
'shift-right': 'native!'
|
||||
'alt-left': 'native!'
|
||||
'alt-right': 'native!'
|
||||
'alt-shift-left': 'native!'
|
||||
'alt-shift-right': 'native!'
|
||||
'cmd-left': 'native!'
|
||||
'cmd-right': 'native!'
|
||||
'cmd-shift-left': 'native!'
|
||||
'cmd-shift-right': 'native!'
|
||||
'ctrl-left': 'native!'
|
||||
'ctrl-right': 'native!'
|
||||
'ctrl-shift-left': 'native!'
|
||||
'ctrl-shift-right': 'native!'
|
||||
'ctrl-b': 'native!'
|
||||
'ctrl-f': 'native!'
|
||||
'ctrl-F': 'native!'
|
||||
'ctrl-B': 'native!'
|
||||
'ctrl-h': 'native!'
|
||||
'ctrl-d': 'native!'
|
||||
59
samples/CSON/config.cson
Normal file
59
samples/CSON/config.cson
Normal file
@@ -0,0 +1,59 @@
|
||||
directoryIcons:
|
||||
|
||||
Atom:
|
||||
icon: "atom"
|
||||
match: /^\.atom$/
|
||||
colour: "dark-green"
|
||||
|
||||
Bower:
|
||||
icon: "bower"
|
||||
match: /^bower[-_]components$/
|
||||
colour: "bower"
|
||||
|
||||
Dropbox:
|
||||
icon: "dropbox"
|
||||
match: /^(?:Dropbox|\.dropbox\.cache)$/
|
||||
colour: "medium-blue"
|
||||
|
||||
Git:
|
||||
icon: "git"
|
||||
match: /^\.git$/
|
||||
|
||||
GitHub:
|
||||
icon: "github"
|
||||
match: /^\.github$/
|
||||
|
||||
Meteor:
|
||||
icon: "meteor"
|
||||
match: /^\.meteor$/
|
||||
|
||||
NodeJS:
|
||||
icon: "node"
|
||||
match: /^node_modules$/
|
||||
colour: "medium-green"
|
||||
|
||||
Package:
|
||||
icon: "package"
|
||||
match: /^\.bundle$/i
|
||||
|
||||
TextMate:
|
||||
icon: "textmate"
|
||||
match: ".tmBundle"
|
||||
|
||||
|
||||
fileIcons:
|
||||
|
||||
ABAP:
|
||||
icon: "abap"
|
||||
scope: "abp"
|
||||
match: ".abap"
|
||||
colour: "medium-orange"
|
||||
|
||||
ActionScript: # Or Flash-related
|
||||
icon: "as"
|
||||
match: [
|
||||
[".swf", "medium-blue"]
|
||||
[".as", "medium-red", scope: /\.(?:flex-config|actionscript(?:\.\d+)?)$/i, alias: /ActionScript\s?3|as3/i]
|
||||
[".jsfl", "auto-yellow"]
|
||||
[".swc", "dark-red"]
|
||||
]
|
||||
108
samples/CSON/ff-sfd.cson
Normal file
108
samples/CSON/ff-sfd.cson
Normal file
@@ -0,0 +1,108 @@
|
||||
name: "Spline Font Database"
|
||||
scopeName: "text.sfd"
|
||||
fileTypes: ["sfd"]
|
||||
firstLineMatch: "^SplineFontDB: [\\d.]+"
|
||||
patterns: [include: "#main"]
|
||||
|
||||
repository:
|
||||
main:
|
||||
patterns: [
|
||||
{include: "#punctuation"}
|
||||
{include: "#private"}
|
||||
{include: "#image"}
|
||||
{include: "#pickleData"}
|
||||
{include: "#sections"}
|
||||
{include: "#copyright"}
|
||||
{include: "#property"}
|
||||
{include: "#control"}
|
||||
{include: "#address"}
|
||||
{include: "#encoding"}
|
||||
{include: "source.fontforge#shared"}
|
||||
{include: "#colour"}
|
||||
]
|
||||
|
||||
punctuation:
|
||||
patterns: [
|
||||
{match: "<|>", name: "punctuation.definition.brackets.angle.sfd"}
|
||||
{match: "[{}]", name: "punctuation.definition.brackets.curly.sfd"}
|
||||
]
|
||||
|
||||
private:
|
||||
name: "meta.section.private.sfd"
|
||||
begin: "^BeginPrivate(?=:)"
|
||||
end: "^EndPrivate\\b"
|
||||
beginCaptures: 0: name: "keyword.control.begin.private.sfd"
|
||||
endCaptures: 0: name: "keyword.control.end.private.sfd"
|
||||
patterns: [
|
||||
{match: "^\\S+", name: "entity.name.private.property.sfd"}
|
||||
{include: "$self"}
|
||||
]
|
||||
|
||||
image:
|
||||
name: "meta.image.sfd"
|
||||
begin: "^(Image)(?=:)(.+)$"
|
||||
end: "^(EndImage)\\b"
|
||||
contentName: "string.unquoted.raw.data.sfd"
|
||||
beginCaptures:
|
||||
1: name: "keyword.control.begin.image.sfd"
|
||||
2: patterns: [include: "$self"]
|
||||
endCaptures:
|
||||
1: name: "keyword.control.end.image.sfd"
|
||||
|
||||
pickleData:
|
||||
name: "meta.pickle-data.sfd"
|
||||
begin: "^(PickledData)(:)\\s*(\")"
|
||||
end: '"'
|
||||
beginCaptures:
|
||||
1: name: "entity.name.property.sfd"
|
||||
2: name: "punctuation.separator.dictionary.key-value.sfd"
|
||||
3: name: "punctuation.definition.string.begin.sfd"
|
||||
endCaptures:
|
||||
0: name: "punctuation.definition.string.end.sfd"
|
||||
patterns: [match: "\\\\.", name: "constant.character.escape.sfd"]
|
||||
|
||||
sections:
|
||||
name: "meta.section.${2:/downcase}.sfd"
|
||||
begin: "^(Start|Begin)([A-Z]\\w+)(?=:)"
|
||||
end: "^(End\\2)\\b"
|
||||
beginCaptures: 0: name: "keyword.control.begin.${2:/downcase}.sfd"
|
||||
endCaptures: 0: name: "keyword.control.end.${2:/downcase}.sfd"
|
||||
patterns: [include: "$self"]
|
||||
|
||||
control:
|
||||
name: "keyword.control.${1:/downcase}.sfd"
|
||||
match: "\\b(Fore|Back|SplineSet|^End\\w+)\\b"
|
||||
|
||||
colour:
|
||||
name: "constant.other.hex.colour.sfd"
|
||||
match: "(#)[A-Fa-f0-9]{3,}|(?<=\\s)[A-Fa-f0-9]{6,8}"
|
||||
captures:
|
||||
1: name: "punctuation.definition.colour.sfd"
|
||||
|
||||
encoding:
|
||||
name: "constant.language.encoding.sfd"
|
||||
match: "(?i)\\b(ISO[-\\w]+)(?<=\\d)(?=\\s|$)"
|
||||
|
||||
# Don't highlight numbers in freeform strings (years/version strings)
|
||||
copyright:
|
||||
name: "meta.${1:/downcase}-string.sfd"
|
||||
begin: "^(Copyright|U?Comments?|\\w+Name)(:)"
|
||||
end: "$"
|
||||
beginCaptures:
|
||||
1: name: "entity.name.property.sfd"
|
||||
2: name: "punctuation.separator.dictionary.key-value.sfd"
|
||||
patterns: [include: "source.fontforge#stringEscapes"]
|
||||
|
||||
# No idea what this is, but it looks distracting without a fix
|
||||
# Assuming it's referring to a memory register or something.
|
||||
address:
|
||||
match: "\\d+[xX][A-Fa-f0-9]+"
|
||||
name: "constant.numeric.hexadecimal.sfd"
|
||||
|
||||
property:
|
||||
match: "^([^:]+)(:)"
|
||||
name: "meta.dictionary.key-value.sfd"
|
||||
captures:
|
||||
1: name: "entity.name.property.sfd"
|
||||
2: name: "punctuation.separator.dictionary.key-value.sfd"
|
||||
|
||||
11
samples/CSON/wercker-status.cson
Normal file
11
samples/CSON/wercker-status.cson
Normal file
@@ -0,0 +1,11 @@
|
||||
'menu': [
|
||||
{
|
||||
'label': 'Packages'
|
||||
'submenu': [
|
||||
'label': 'Wercker Status'
|
||||
'submenu': [
|
||||
{ 'label': 'Check now!', 'command': 'wercker-status:checknow' }
|
||||
]
|
||||
]
|
||||
}
|
||||
]
|
||||
@@ -1,3 +1,3 @@
|
||||
Year,Make,Model,Length
|
||||
1997,Ford,E350,2.34
|
||||
2000,Mercury,Cougar,2.38
|
||||
2000,Mercury,Cougar,2.38
|
||||
|
@@ -1,707 +0,0 @@
|
||||
Inductive day : Type :=
|
||||
| monday : day
|
||||
| tuesday : day
|
||||
| wednesday : day
|
||||
| thursday : day
|
||||
| friday : day
|
||||
| saturday : day
|
||||
| sunday : day.
|
||||
|
||||
Definition next_weekday (d:day) : day :=
|
||||
match d with
|
||||
| monday => tuesday
|
||||
| tuesday => wednesday
|
||||
| wednesday => thursday
|
||||
| thursday => friday
|
||||
| friday => monday
|
||||
| saturday => monday
|
||||
| sunday => monday
|
||||
end.
|
||||
|
||||
Example test_next_weekday:
|
||||
(next_weekday (next_weekday saturday)) = tuesday.
|
||||
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
|
||||
Inductive bool : Type :=
|
||||
| true : bool
|
||||
| false : bool.
|
||||
|
||||
Definition negb (b:bool) : bool :=
|
||||
match b with
|
||||
| true => false
|
||||
| false => true
|
||||
end.
|
||||
|
||||
Definition andb (b1:bool) (b2:bool) : bool :=
|
||||
match b1 with
|
||||
| true => b2
|
||||
| false => false
|
||||
end.
|
||||
|
||||
Definition orb (b1:bool) (b2:bool) : bool :=
|
||||
match b1 with
|
||||
| true => true
|
||||
| false => b2
|
||||
end.
|
||||
|
||||
Example test_orb1: (orb true false) = true.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
|
||||
Example test_orb2: (orb false false) = false.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
|
||||
Example test_orb3: (orb false true) = true.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
|
||||
Example test_orb4: (orb true true) = true.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
|
||||
Definition nandb (b1: bool) (b2:bool) : bool :=
|
||||
match b1 with
|
||||
| true => match b2 with
|
||||
| false => true
|
||||
| true => false
|
||||
end
|
||||
| false => true
|
||||
end.
|
||||
|
||||
Example test_nandb1: (nandb true false) = true.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
Example test_nandb2: (nandb false false) = true.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
Example test_nandb3: (nandb false true) = true.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
Example test_nandb4: (nandb true true) = false.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
|
||||
Definition andb3 (b1: bool) (b2:bool) (b3:bool) : bool :=
|
||||
match b1 with
|
||||
| false => false
|
||||
| true => match b2 with
|
||||
| false => false
|
||||
| true => b3
|
||||
end
|
||||
end.
|
||||
|
||||
Example test_andb31: (andb3 true true true) = true.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
Example test_andb32: (andb3 false true true) = false.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
Example test_andb33: (andb3 true false true) = false.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
Example test_andb34: (andb3 true true false) = false.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
|
||||
Module Playground1.
|
||||
|
||||
Inductive nat : Type :=
|
||||
| O : nat
|
||||
| S : nat -> nat.
|
||||
|
||||
Definition pred (n : nat) : nat :=
|
||||
match n with
|
||||
| O => O
|
||||
| S n' => n'
|
||||
end.
|
||||
|
||||
Definition minustwo (n : nat) : nat :=
|
||||
match n with
|
||||
| O => O
|
||||
| S O => O
|
||||
| S (S n') => n'
|
||||
end.
|
||||
|
||||
Fixpoint evenb (n : nat) : bool :=
|
||||
match n with
|
||||
| O => true
|
||||
| S O => false
|
||||
| S (S n') => evenb n'
|
||||
end.
|
||||
|
||||
Definition oddb (n : nat) : bool := negb (evenb n).
|
||||
|
||||
Example test_oddb1: (oddb (S O)) = true.
|
||||
Proof. reflexivity. Qed.
|
||||
Example test_oddb2: (oddb (S (S (S (S O))))) = false.
|
||||
Proof. reflexivity. Qed.
|
||||
|
||||
Fixpoint plus (n : nat) (m : nat) : nat :=
|
||||
match n with
|
||||
| O => m
|
||||
| S n' => S (plus n' m)
|
||||
end.
|
||||
|
||||
Fixpoint mult (n m : nat) : nat :=
|
||||
match n with
|
||||
| O => O
|
||||
| S n' => plus m (mult n' m)
|
||||
end.
|
||||
|
||||
Fixpoint minus (n m : nat) : nat :=
|
||||
match n, m with
|
||||
| O, _ => n
|
||||
| S n', O => S n'
|
||||
| S n', S m' => minus n' m'
|
||||
end.
|
||||
|
||||
Fixpoint exp (base power : nat) : nat :=
|
||||
match power with
|
||||
| O => S O
|
||||
| S p => mult base (exp base p)
|
||||
end.
|
||||
|
||||
Fixpoint factorial (n : nat) : nat :=
|
||||
match n with
|
||||
| O => S O
|
||||
| S n' => mult n (factorial n')
|
||||
end.
|
||||
|
||||
Example test_factorial1: (factorial (S (S (S O)))) = (S (S (S (S (S (S O)))))).
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
|
||||
Notation "x + y" := (plus x y) (at level 50, left associativity) : nat_scope.
|
||||
Notation "x - y" := (minus x y) (at level 50, left associativity) : nat_scope.
|
||||
Notation "x * y" := (mult x y) (at level 40, left associativity) : nat_scope.
|
||||
|
||||
Fixpoint beq_nat (n m : nat) : bool :=
|
||||
match n with
|
||||
| O => match m with
|
||||
| O => true
|
||||
| S m' => false
|
||||
end
|
||||
| S n' => match m with
|
||||
| O => false
|
||||
| S m' => beq_nat n' m'
|
||||
end
|
||||
end.
|
||||
|
||||
Fixpoint ble_nat (n m : nat) : bool :=
|
||||
match n with
|
||||
| O => true
|
||||
| S n' =>
|
||||
match m with
|
||||
| O => false
|
||||
| S m' => ble_nat n' m'
|
||||
end
|
||||
end.
|
||||
|
||||
Example test_ble_nat1: (ble_nat (S (S O)) (S (S O))) = true.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
Example test_ble_nat2: (ble_nat (S (S O)) (S (S (S (S O))))) = true.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
Example test_ble_nat3: (ble_nat (S (S (S (S O)))) (S (S O))) = false.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
|
||||
Definition blt_nat (n m : nat) : bool :=
|
||||
(andb (negb (beq_nat n m)) (ble_nat n m)).
|
||||
|
||||
Example test_blt_nat1: (blt_nat (S (S O)) (S (S O))) = false.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
Example test_blt_nat3: (blt_nat (S (S (S (S O)))) (S (S O))) = false.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
Example test_blt_nat2 : (blt_nat (S (S O)) (S (S (S (S O))))) = true.
|
||||
Proof. simpl. reflexivity. Qed.
|
||||
|
||||
Theorem plus_O_n : forall n : nat, O + n = n.
|
||||
Proof.
|
||||
simpl. reflexivity. Qed.
|
||||
|
||||
Theorem plus_O_n' : forall n : nat, O + n = n.
|
||||
Proof.
|
||||
reflexivity. Qed.
|
||||
|
||||
Theorem plus_O_n'' : forall n : nat, O + n = n.
|
||||
Proof.
|
||||
intros n. reflexivity. Qed.
|
||||
|
||||
Theorem plus_1_1 : forall n : nat, (S O) + n = S n.
|
||||
Proof.
|
||||
intros n. reflexivity. Qed.
|
||||
|
||||
Theorem mult_0_1: forall n : nat, O * n = O.
|
||||
Proof.
|
||||
intros n. reflexivity. Qed.
|
||||
|
||||
Theorem plus_id_example : forall n m:nat,
|
||||
n = m -> n + n = m + m.
|
||||
Proof.
|
||||
intros n m.
|
||||
intros H.
|
||||
rewrite -> H.
|
||||
reflexivity. Qed.
|
||||
|
||||
Theorem plus_id_exercise : forall n m o: nat,
|
||||
n = m -> m = o -> n + m = m + o.
|
||||
Proof.
|
||||
intros n m o.
|
||||
intros H.
|
||||
intros H'.
|
||||
rewrite -> H.
|
||||
rewrite <- H'.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem mult_0_plus : forall n m : nat,
|
||||
(O + n) * m = n * m.
|
||||
Proof.
|
||||
intros n m.
|
||||
rewrite -> plus_O_n.
|
||||
reflexivity. Qed.
|
||||
|
||||
Theorem mult_1_plus : forall n m: nat,
|
||||
((S O) + n) * m = m + (n * m).
|
||||
Proof.
|
||||
intros n m.
|
||||
rewrite -> plus_1_1.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem mult_1 : forall n : nat,
|
||||
n * (S O) = n.
|
||||
Proof.
|
||||
intros n.
|
||||
induction n as [| n'].
|
||||
reflexivity.
|
||||
simpl.
|
||||
rewrite -> IHn'.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem plus_1_neq_0 : forall n : nat,
|
||||
beq_nat (n + (S O)) O = false.
|
||||
Proof.
|
||||
intros n.
|
||||
destruct n as [| n'].
|
||||
reflexivity.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem zero_nbeq_plus_1 : forall n : nat,
|
||||
beq_nat O (n + (S O)) = false.
|
||||
Proof.
|
||||
intros n.
|
||||
destruct n.
|
||||
reflexivity.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Require String. Open Scope string_scope.
|
||||
|
||||
Ltac move_to_top x :=
|
||||
match reverse goal with
|
||||
| H : _ |- _ => try move x after H
|
||||
end.
|
||||
|
||||
Tactic Notation "assert_eq" ident(x) constr(v) :=
|
||||
let H := fresh in
|
||||
assert (x = v) as H by reflexivity;
|
||||
clear H.
|
||||
|
||||
Tactic Notation "Case_aux" ident(x) constr(name) :=
|
||||
first [
|
||||
set (x := name); move_to_top x
|
||||
| assert_eq x name; move_to_top x
|
||||
| fail 1 "because we are working on a different case" ].
|
||||
|
||||
Ltac Case name := Case_aux Case name.
|
||||
Ltac SCase name := Case_aux SCase name.
|
||||
Ltac SSCase name := Case_aux SSCase name.
|
||||
Ltac SSSCase name := Case_aux SSSCase name.
|
||||
Ltac SSSSCase name := Case_aux SSSSCase name.
|
||||
Ltac SSSSSCase name := Case_aux SSSSSCase name.
|
||||
Ltac SSSSSSCase name := Case_aux SSSSSSCase name.
|
||||
Ltac SSSSSSSCase name := Case_aux SSSSSSSCase name.
|
||||
|
||||
Theorem andb_true_elim1 : forall b c : bool,
|
||||
andb b c = true -> b = true.
|
||||
Proof.
|
||||
intros b c H.
|
||||
destruct b.
|
||||
Case "b = true".
|
||||
reflexivity.
|
||||
Case "b = false".
|
||||
rewrite <- H. reflexivity. Qed.
|
||||
|
||||
Theorem plus_0_r : forall n : nat, n + O = n.
|
||||
Proof.
|
||||
intros n. induction n as [| n'].
|
||||
Case "n = 0". reflexivity.
|
||||
Case "n = S n'". simpl. rewrite -> IHn'. reflexivity. Qed.
|
||||
|
||||
Theorem minus_diag : forall n,
|
||||
minus n n = O.
|
||||
Proof.
|
||||
intros n. induction n as [| n'].
|
||||
Case "n = 0".
|
||||
simpl. reflexivity.
|
||||
Case "n = S n'".
|
||||
simpl. rewrite -> IHn'. reflexivity. Qed.
|
||||
|
||||
|
||||
Theorem mult_0_r : forall n:nat,
|
||||
n * O = O.
|
||||
Proof.
|
||||
intros n. induction n as [| n'].
|
||||
Case "n = 0".
|
||||
reflexivity.
|
||||
Case "n = S n'".
|
||||
simpl. rewrite -> IHn'. reflexivity. Qed.
|
||||
|
||||
Theorem plus_n_Sm : forall n m : nat,
|
||||
S (n + m) = n + (S m).
|
||||
Proof.
|
||||
intros n m. induction n as [| n'].
|
||||
Case "n = 0".
|
||||
reflexivity.
|
||||
Case "n = S n'".
|
||||
simpl. rewrite -> IHn'. reflexivity. Qed.
|
||||
|
||||
Theorem plus_assoc : forall n m p : nat,
|
||||
n + (m + p) = (n + m) + p.
|
||||
Proof.
|
||||
intros n m p.
|
||||
induction n as [| n'].
|
||||
reflexivity.
|
||||
simpl.
|
||||
rewrite -> IHn'.
|
||||
reflexivity. Qed.
|
||||
|
||||
Theorem plus_distr : forall n m: nat, S (n + m) = n + (S m).
|
||||
Proof.
|
||||
intros n m. induction n as [| n'].
|
||||
Case "n = 0".
|
||||
reflexivity.
|
||||
Case "n = S n'".
|
||||
simpl. rewrite -> IHn'. reflexivity. Qed.
|
||||
|
||||
Theorem mult_distr : forall n m: nat, n * ((S O) + m) = n * (S m).
|
||||
Proof.
|
||||
intros n m.
|
||||
induction n as [| n'].
|
||||
reflexivity.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem plus_comm : forall n m : nat,
|
||||
n + m = m + n.
|
||||
Proof.
|
||||
intros n m.
|
||||
induction n as [| n'].
|
||||
Case "n = 0".
|
||||
simpl.
|
||||
rewrite -> plus_0_r.
|
||||
reflexivity.
|
||||
Case "n = S n'".
|
||||
simpl.
|
||||
rewrite -> IHn'.
|
||||
rewrite -> plus_distr.
|
||||
reflexivity. Qed.
|
||||
|
||||
Fixpoint double (n:nat) :=
|
||||
match n with
|
||||
| O => O
|
||||
| S n' => S (S (double n'))
|
||||
end.
|
||||
|
||||
Lemma double_plus : forall n, double n = n + n.
|
||||
Proof.
|
||||
intros n. induction n as [| n'].
|
||||
Case "n = 0".
|
||||
reflexivity.
|
||||
Case "n = S n'".
|
||||
simpl. rewrite -> IHn'.
|
||||
rewrite -> plus_distr. reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem beq_nat_refl : forall n : nat,
|
||||
true = beq_nat n n.
|
||||
Proof.
|
||||
intros n. induction n as [| n'].
|
||||
Case "n = 0".
|
||||
reflexivity.
|
||||
Case "n = S n".
|
||||
simpl. rewrite <- IHn'.
|
||||
reflexivity. Qed.
|
||||
|
||||
Theorem plus_rearrange: forall n m p q : nat,
|
||||
(n + m) + (p + q) = (m + n) + (p + q).
|
||||
Proof.
|
||||
intros n m p q.
|
||||
assert(H: n + m = m + n).
|
||||
Case "Proof by assertion".
|
||||
rewrite -> plus_comm. reflexivity.
|
||||
rewrite -> H. reflexivity. Qed.
|
||||
|
||||
Theorem plus_swap : forall n m p: nat,
|
||||
n + (m + p) = m + (n + p).
|
||||
Proof.
|
||||
intros n m p.
|
||||
rewrite -> plus_assoc.
|
||||
assert(H: m + (n + p) = (m + n) + p).
|
||||
rewrite -> plus_assoc.
|
||||
reflexivity.
|
||||
rewrite -> H.
|
||||
assert(H2: m + n = n + m).
|
||||
rewrite -> plus_comm.
|
||||
reflexivity.
|
||||
rewrite -> H2.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem plus_swap' : forall n m p: nat,
|
||||
n + (m + p) = m + (n + p).
|
||||
Proof.
|
||||
intros n m p.
|
||||
rewrite -> plus_assoc.
|
||||
assert(H: m + (n + p) = (m + n) + p).
|
||||
rewrite -> plus_assoc.
|
||||
reflexivity.
|
||||
rewrite -> H.
|
||||
replace (m + n) with (n + m).
|
||||
rewrite -> plus_comm.
|
||||
reflexivity.
|
||||
rewrite -> plus_comm.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem mult_1_distr: forall m n: nat,
|
||||
n * ((S O) + m) = n * (S O) + n * m.
|
||||
Proof.
|
||||
intros n m.
|
||||
rewrite -> mult_1.
|
||||
rewrite -> plus_1_1.
|
||||
simpl.
|
||||
induction m as [|m'].
|
||||
simpl.
|
||||
reflexivity.
|
||||
simpl.
|
||||
rewrite -> plus_swap.
|
||||
rewrite <- IHm'.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem mult_comm: forall m n : nat,
|
||||
m * n = n * m.
|
||||
Proof.
|
||||
intros m n.
|
||||
induction n as [| n'].
|
||||
Case "n = 0".
|
||||
simpl.
|
||||
rewrite -> mult_0_r.
|
||||
reflexivity.
|
||||
Case "n = S n'".
|
||||
simpl.
|
||||
rewrite <- mult_distr.
|
||||
rewrite -> mult_1_distr.
|
||||
rewrite -> mult_1.
|
||||
rewrite -> IHn'.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem evenb_next : forall n : nat,
|
||||
evenb n = evenb (S (S n)).
|
||||
Proof.
|
||||
intros n.
|
||||
Admitted.
|
||||
|
||||
Theorem negb_negb : forall n : bool,
|
||||
n = negb (negb n).
|
||||
Proof.
|
||||
intros n.
|
||||
destruct n.
|
||||
reflexivity.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem evenb_n_oddb_Sn : forall n : nat,
|
||||
evenb n = negb (evenb (S n)).
|
||||
Proof.
|
||||
intros n.
|
||||
induction n as [|n'].
|
||||
reflexivity.
|
||||
assert(H: evenb n' = evenb (S (S n'))).
|
||||
reflexivity.
|
||||
rewrite <- H.
|
||||
rewrite -> IHn'.
|
||||
rewrite <- negb_negb.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
(*Fixpoint bad (n : nat) : bool :=
|
||||
match n with
|
||||
| O => true
|
||||
| S O => bad (S n)
|
||||
| S (S n') => bad n'
|
||||
end.*)
|
||||
|
||||
Theorem ble_nat_refl : forall n:nat,
|
||||
true = ble_nat n n.
|
||||
Proof.
|
||||
intros n.
|
||||
induction n as [|n'].
|
||||
Case "n = 0".
|
||||
reflexivity.
|
||||
Case "n = S n".
|
||||
simpl.
|
||||
rewrite <- IHn'.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem zero_nbeq_S : forall n: nat,
|
||||
beq_nat O (S n) = false.
|
||||
Proof.
|
||||
intros n.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem andb_false_r : forall b : bool,
|
||||
andb b false = false.
|
||||
Proof.
|
||||
intros b.
|
||||
destruct b.
|
||||
reflexivity.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem plus_ble_compat_1 : forall n m p : nat,
|
||||
ble_nat n m = true -> ble_nat (p + n) (p + m) = true.
|
||||
Proof.
|
||||
intros n m p.
|
||||
intros H.
|
||||
induction p.
|
||||
Case "p = 0".
|
||||
simpl.
|
||||
rewrite -> H.
|
||||
reflexivity.
|
||||
Case "p = S p'".
|
||||
simpl.
|
||||
rewrite -> IHp.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem S_nbeq_0 : forall n:nat,
|
||||
beq_nat (S n) O = false.
|
||||
Proof.
|
||||
intros n.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem mult_1_1 : forall n:nat, (S O) * n = n.
|
||||
Proof.
|
||||
intros n.
|
||||
simpl.
|
||||
rewrite -> plus_0_r.
|
||||
reflexivity. Qed.
|
||||
|
||||
Theorem all3_spec : forall b c : bool,
|
||||
orb (andb b c)
|
||||
(orb (negb b)
|
||||
(negb c))
|
||||
= true.
|
||||
Proof.
|
||||
intros b c.
|
||||
destruct b.
|
||||
destruct c.
|
||||
reflexivity.
|
||||
reflexivity.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Lemma mult_plus_1 : forall n m : nat,
|
||||
S(m + n) = m + (S n).
|
||||
Proof.
|
||||
intros n m.
|
||||
induction m.
|
||||
reflexivity.
|
||||
simpl.
|
||||
rewrite -> IHm.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem mult_mult : forall n m : nat,
|
||||
n * (S m) = n * m + n.
|
||||
Proof.
|
||||
intros n m.
|
||||
induction n.
|
||||
reflexivity.
|
||||
simpl.
|
||||
rewrite -> IHn.
|
||||
rewrite -> plus_assoc.
|
||||
rewrite -> mult_plus_1.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem mult_plus_distr_r : forall n m p:nat,
|
||||
(n + m) * p = (n * p) + (m * p).
|
||||
Proof.
|
||||
intros n m p.
|
||||
induction p.
|
||||
rewrite -> mult_0_r.
|
||||
rewrite -> mult_0_r.
|
||||
rewrite -> mult_0_r.
|
||||
reflexivity.
|
||||
rewrite -> mult_mult.
|
||||
rewrite -> mult_mult.
|
||||
rewrite -> mult_mult.
|
||||
rewrite -> IHp.
|
||||
assert(H1: ((n * p) + n) + (m * p + m) = (n * p) + (n + (m * p + m))).
|
||||
rewrite <- plus_assoc.
|
||||
reflexivity.
|
||||
rewrite -> H1.
|
||||
assert(H2: (n + (m * p + m)) = (m * p + (n + m))).
|
||||
rewrite -> plus_swap.
|
||||
reflexivity.
|
||||
rewrite -> H2.
|
||||
assert(H3: (n * p) + (m * p + (n + m)) = ((n * p ) + (m * p)) + (n + m)).
|
||||
rewrite -> plus_assoc.
|
||||
reflexivity.
|
||||
rewrite -> H3.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Theorem mult_assoc : forall n m p : nat,
|
||||
n * (m * p) = (n * m) * p.
|
||||
Proof.
|
||||
intros n m p.
|
||||
induction n.
|
||||
simpl.
|
||||
reflexivity.
|
||||
simpl.
|
||||
rewrite -> mult_plus_distr_r.
|
||||
rewrite -> IHn.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Inductive bin : Type :=
|
||||
| BO : bin
|
||||
| D : bin -> bin
|
||||
| M : bin -> bin.
|
||||
|
||||
Fixpoint incbin (n : bin) : bin :=
|
||||
match n with
|
||||
| BO => M (BO)
|
||||
| D n' => M n'
|
||||
| M n' => D (incbin n')
|
||||
end.
|
||||
|
||||
Fixpoint bin2un (n : bin) : nat :=
|
||||
match n with
|
||||
| BO => O
|
||||
| D n' => double (bin2un n')
|
||||
| M n' => S (double (bin2un n'))
|
||||
end.
|
||||
|
||||
Theorem bin_comm : forall n : bin,
|
||||
bin2un(incbin n) = S (bin2un n).
|
||||
Proof.
|
||||
intros n.
|
||||
induction n.
|
||||
reflexivity.
|
||||
reflexivity.
|
||||
simpl.
|
||||
rewrite -> IHn.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
End Playground1.
|
||||
85
samples/Coq/Computation.v
Normal file
85
samples/Coq/Computation.v
Normal file
@@ -0,0 +1,85 @@
|
||||
(** The definition of computations, used to represent interactive programs. *)
|
||||
Require Import Coq.NArith.NArith.
|
||||
Require Import ListString.All.
|
||||
|
||||
Local Open Scope type.
|
||||
|
||||
(** System calls. *)
|
||||
Module Command.
|
||||
Inductive t :=
|
||||
| AskCard
|
||||
| AskPIN
|
||||
| CheckPIN (pin : N)
|
||||
| AskAmount
|
||||
| CheckAmount (amount : N)
|
||||
| GiveCard
|
||||
| GiveAmount (amount : N)
|
||||
| ShowError (message : LString.t).
|
||||
|
||||
(** The type of an answer for a command depends on the value of the command. *)
|
||||
Definition answer (command : t) : Type :=
|
||||
match command with
|
||||
| AskCard => bool (* If the given card seems valid. *)
|
||||
| AskPIN => option N (* A number or cancellation. *)
|
||||
| CheckPIN _ => bool (* If the PIN number is valid. *)
|
||||
| AskAmount => option N (* A number or cancellation. *)
|
||||
| CheckAmount _ => bool (* If the amount can be withdrawn. *)
|
||||
| GiveCard => bool (* If the card was given. *)
|
||||
| GiveAmount _ => bool (* If the money was given. *)
|
||||
| ShowError _ => unit (* Show an error message. *)
|
||||
end.
|
||||
End Command.
|
||||
|
||||
(** Computations with I/Os. *)
|
||||
Module C.
|
||||
(** A computation can either does nothing, or do a system call and wait
|
||||
for the answer to run another computation. *)
|
||||
Inductive t : Type :=
|
||||
| Ret : t
|
||||
| Call : forall (command : Command.t), (Command.answer command -> t) -> t.
|
||||
Arguments Ret.
|
||||
Arguments Call _ _.
|
||||
|
||||
(** Some optional notations. *)
|
||||
Module Notations.
|
||||
(** A nicer notation for `Ret`. *)
|
||||
Definition ret : t :=
|
||||
Ret.
|
||||
|
||||
(** We define an explicit apply function so that Coq does not try to expand
|
||||
the notations everywhere. *)
|
||||
Definition apply {A B} (f : A -> B) (x : A) := f x.
|
||||
|
||||
(** System call. *)
|
||||
Notation "'call!' answer ':=' command 'in' X" :=
|
||||
(Call command (fun answer => X))
|
||||
(at level 200, answer ident, command at level 100, X at level 200).
|
||||
|
||||
(** System call with typed answer. *)
|
||||
Notation "'call!' answer : A ':=' command 'in' X" :=
|
||||
(Call command (fun (answer : A) => X))
|
||||
(at level 200, answer ident, command at level 100, A at level 200, X at level 200).
|
||||
|
||||
(** System call ignoring the answer. *)
|
||||
Notation "'do_call!' command 'in' X" :=
|
||||
(Call command (fun _ => X))
|
||||
(at level 200, command at level 100, X at level 200).
|
||||
|
||||
(** This notation is useful to compose computations which wait for a
|
||||
continuation. We do not have an explicit bind operator to simplify the
|
||||
language and the proofs. *)
|
||||
Notation "'let!' x ':=' X 'in' Y" :=
|
||||
(apply X (fun x => Y))
|
||||
(at level 200, x ident, X at level 100, Y at level 200).
|
||||
|
||||
(** Let with a typed answer. *)
|
||||
Notation "'let!' x : A ':=' X 'in' Y" :=
|
||||
(apply X (fun (x : A) => Y))
|
||||
(at level 200, x ident, X at level 100, A at level 200, Y at level 200).
|
||||
|
||||
(** Let ignoring the answer. *)
|
||||
Notation "'do!' X 'in' Y" :=
|
||||
(apply X (fun _ => Y))
|
||||
(at level 200, X at level 100, Y at level 200).
|
||||
End Notations.
|
||||
End C.
|
||||
@@ -1,290 +0,0 @@
|
||||
(** A development of Treesort on Heap trees. It has an average
|
||||
complexity of O(n.log n) but of O(n²) in the worst case (e.g. if
|
||||
the list is already sorted) *)
|
||||
|
||||
(* G. Huet 1-9-95 uses Multiset *)
|
||||
|
||||
Require Import List Multiset PermutSetoid Relations Sorting.
|
||||
|
||||
Section defs.
|
||||
|
||||
(** * Trees and heap trees *)
|
||||
|
||||
(** ** Definition of trees over an ordered set *)
|
||||
|
||||
Variable A : Type.
|
||||
Variable leA : relation A.
|
||||
Variable eqA : relation A.
|
||||
|
||||
Let gtA (x y:A) := ~ leA x y.
|
||||
|
||||
Hypothesis leA_dec : forall x y:A, {leA x y} + {leA y x}.
|
||||
Hypothesis eqA_dec : forall x y:A, {eqA x y} + {~ eqA x y}.
|
||||
Hypothesis leA_refl : forall x y:A, eqA x y -> leA x y.
|
||||
Hypothesis leA_trans : forall x y z:A, leA x y -> leA y z -> leA x z.
|
||||
Hypothesis leA_antisym : forall x y:A, leA x y -> leA y x -> eqA x y.
|
||||
|
||||
Hint Resolve leA_refl.
|
||||
Hint Immediate eqA_dec leA_dec leA_antisym.
|
||||
|
||||
Let emptyBag := EmptyBag A.
|
||||
Let singletonBag := SingletonBag _ eqA_dec.
|
||||
|
||||
Inductive Tree :=
|
||||
| Tree_Leaf : Tree
|
||||
| Tree_Node : A -> Tree -> Tree -> Tree.
|
||||
|
||||
(** [a] is lower than a Tree [T] if [T] is a Leaf
|
||||
or [T] is a Node holding [b>a] *)
|
||||
|
||||
Definition leA_Tree (a:A) (t:Tree) :=
|
||||
match t with
|
||||
| Tree_Leaf => True
|
||||
| Tree_Node b T1 T2 => leA a b
|
||||
end.
|
||||
|
||||
Lemma leA_Tree_Leaf : forall a:A, leA_Tree a Tree_Leaf.
|
||||
Proof.
|
||||
simpl; auto with datatypes.
|
||||
Qed.
|
||||
|
||||
Lemma leA_Tree_Node :
|
||||
forall (a b:A) (G D:Tree), leA a b -> leA_Tree a (Tree_Node b G D).
|
||||
Proof.
|
||||
simpl; auto with datatypes.
|
||||
Qed.
|
||||
|
||||
|
||||
(** ** The heap property *)
|
||||
|
||||
Inductive is_heap : Tree -> Prop :=
|
||||
| nil_is_heap : is_heap Tree_Leaf
|
||||
| node_is_heap :
|
||||
forall (a:A) (T1 T2:Tree),
|
||||
leA_Tree a T1 ->
|
||||
leA_Tree a T2 ->
|
||||
is_heap T1 -> is_heap T2 -> is_heap (Tree_Node a T1 T2).
|
||||
|
||||
Lemma invert_heap :
|
||||
forall (a:A) (T1 T2:Tree),
|
||||
is_heap (Tree_Node a T1 T2) ->
|
||||
leA_Tree a T1 /\ leA_Tree a T2 /\ is_heap T1 /\ is_heap T2.
|
||||
Proof.
|
||||
intros; inversion H; auto with datatypes.
|
||||
Qed.
|
||||
|
||||
(* This lemma ought to be generated automatically by the Inversion tools *)
|
||||
Lemma is_heap_rect :
|
||||
forall P:Tree -> Type,
|
||||
P Tree_Leaf ->
|
||||
(forall (a:A) (T1 T2:Tree),
|
||||
leA_Tree a T1 ->
|
||||
leA_Tree a T2 ->
|
||||
is_heap T1 -> P T1 -> is_heap T2 -> P T2 -> P (Tree_Node a T1 T2)) ->
|
||||
forall T:Tree, is_heap T -> P T.
|
||||
Proof.
|
||||
simple induction T; auto with datatypes.
|
||||
intros a G PG D PD PN.
|
||||
elim (invert_heap a G D); auto with datatypes.
|
||||
intros H1 H2; elim H2; intros H3 H4; elim H4; intros.
|
||||
apply X0; auto with datatypes.
|
||||
Qed.
|
||||
|
||||
(* This lemma ought to be generated automatically by the Inversion tools *)
|
||||
Lemma is_heap_rec :
|
||||
forall P:Tree -> Set,
|
||||
P Tree_Leaf ->
|
||||
(forall (a:A) (T1 T2:Tree),
|
||||
leA_Tree a T1 ->
|
||||
leA_Tree a T2 ->
|
||||
is_heap T1 -> P T1 -> is_heap T2 -> P T2 -> P (Tree_Node a T1 T2)) ->
|
||||
forall T:Tree, is_heap T -> P T.
|
||||
Proof.
|
||||
simple induction T; auto with datatypes.
|
||||
intros a G PG D PD PN.
|
||||
elim (invert_heap a G D); auto with datatypes.
|
||||
intros H1 H2; elim H2; intros H3 H4; elim H4; intros.
|
||||
apply X; auto with datatypes.
|
||||
Qed.
|
||||
|
||||
Lemma low_trans :
|
||||
forall (T:Tree) (a b:A), leA a b -> leA_Tree b T -> leA_Tree a T.
|
||||
Proof.
|
||||
simple induction T; auto with datatypes.
|
||||
intros; simpl; apply leA_trans with b; auto with datatypes.
|
||||
Qed.
|
||||
|
||||
(** ** Merging two sorted lists *)
|
||||
|
||||
Inductive merge_lem (l1 l2:list A) : Type :=
|
||||
merge_exist :
|
||||
forall l:list A,
|
||||
Sorted leA l ->
|
||||
meq (list_contents _ eqA_dec l)
|
||||
(munion (list_contents _ eqA_dec l1) (list_contents _ eqA_dec l2)) ->
|
||||
(forall a, HdRel leA a l1 -> HdRel leA a l2 -> HdRel leA a l) ->
|
||||
merge_lem l1 l2.
|
||||
Require Import Morphisms.
|
||||
|
||||
Instance: Equivalence (@meq A).
|
||||
Proof. constructor; auto with datatypes. red. apply meq_trans. Defined.
|
||||
|
||||
Instance: Proper (@meq A ++> @meq _ ++> @meq _) (@munion A).
|
||||
Proof. intros x y H x' y' H'. now apply meq_congr. Qed.
|
||||
|
||||
Lemma merge :
|
||||
forall l1:list A, Sorted leA l1 ->
|
||||
forall l2:list A, Sorted leA l2 -> merge_lem l1 l2.
|
||||
Proof.
|
||||
fix 1; intros; destruct l1.
|
||||
apply merge_exist with l2; auto with datatypes.
|
||||
rename l1 into l.
|
||||
revert l2 H0. fix 1. intros.
|
||||
destruct l2 as [|a0 l0].
|
||||
apply merge_exist with (a :: l); simpl; auto with datatypes.
|
||||
elim (leA_dec a a0); intros.
|
||||
|
||||
(* 1 (leA a a0) *)
|
||||
apply Sorted_inv in H. destruct H.
|
||||
destruct (merge l H (a0 :: l0) H0).
|
||||
apply merge_exist with (a :: l1). clear merge merge0.
|
||||
auto using cons_sort, cons_leA with datatypes.
|
||||
simpl. rewrite m. now rewrite munion_ass.
|
||||
intros. apply cons_leA.
|
||||
apply (@HdRel_inv _ leA) with l; trivial with datatypes.
|
||||
|
||||
(* 2 (leA a0 a) *)
|
||||
apply Sorted_inv in H0. destruct H0.
|
||||
destruct (merge0 l0 H0). clear merge merge0.
|
||||
apply merge_exist with (a0 :: l1);
|
||||
auto using cons_sort, cons_leA with datatypes.
|
||||
simpl; rewrite m. simpl. setoid_rewrite munion_ass at 1. rewrite munion_comm.
|
||||
repeat rewrite munion_ass. setoid_rewrite munion_comm at 3. reflexivity.
|
||||
intros. apply cons_leA.
|
||||
apply (@HdRel_inv _ leA) with l0; trivial with datatypes.
|
||||
Qed.
|
||||
|
||||
(** ** From trees to multisets *)
|
||||
|
||||
(** contents of a tree as a multiset *)
|
||||
|
||||
(** Nota Bene : In what follows the definition of SingletonBag
|
||||
in not used. Actually, we could just take as postulate:
|
||||
[Parameter SingletonBag : A->multiset]. *)
|
||||
|
||||
Fixpoint contents (t:Tree) : multiset A :=
|
||||
match t with
|
||||
| Tree_Leaf => emptyBag
|
||||
| Tree_Node a t1 t2 =>
|
||||
munion (contents t1) (munion (contents t2) (singletonBag a))
|
||||
end.
|
||||
|
||||
|
||||
(** equivalence of two trees is equality of corresponding multisets *)
|
||||
Definition equiv_Tree (t1 t2:Tree) := meq (contents t1) (contents t2).
|
||||
|
||||
|
||||
|
||||
(** * From lists to sorted lists *)
|
||||
|
||||
(** ** Specification of heap insertion *)
|
||||
|
||||
Inductive insert_spec (a:A) (T:Tree) : Type :=
|
||||
insert_exist :
|
||||
forall T1:Tree,
|
||||
is_heap T1 ->
|
||||
meq (contents T1) (munion (contents T) (singletonBag a)) ->
|
||||
(forall b:A, leA b a -> leA_Tree b T -> leA_Tree b T1) ->
|
||||
insert_spec a T.
|
||||
|
||||
|
||||
Lemma insert : forall T:Tree, is_heap T -> forall a:A, insert_spec a T.
|
||||
Proof.
|
||||
simple induction 1; intros.
|
||||
apply insert_exist with (Tree_Node a Tree_Leaf Tree_Leaf);
|
||||
auto using node_is_heap, nil_is_heap, leA_Tree_Leaf with datatypes.
|
||||
simpl; unfold meq, munion; auto using node_is_heap with datatypes.
|
||||
elim (leA_dec a a0); intros.
|
||||
elim (X a0); intros.
|
||||
apply insert_exist with (Tree_Node a T2 T0);
|
||||
auto using node_is_heap, nil_is_heap, leA_Tree_Leaf with datatypes.
|
||||
simpl; apply treesort_twist1; trivial with datatypes.
|
||||
elim (X a); intros T3 HeapT3 ConT3 LeA.
|
||||
apply insert_exist with (Tree_Node a0 T2 T3);
|
||||
auto using node_is_heap, nil_is_heap, leA_Tree_Leaf with datatypes.
|
||||
apply node_is_heap; auto using node_is_heap, nil_is_heap, leA_Tree_Leaf with datatypes.
|
||||
apply low_trans with a; auto with datatypes.
|
||||
apply LeA; auto with datatypes.
|
||||
apply low_trans with a; auto with datatypes.
|
||||
simpl; apply treesort_twist2; trivial with datatypes.
|
||||
Qed.
|
||||
|
||||
|
||||
(** ** Building a heap from a list *)
|
||||
|
||||
Inductive build_heap (l:list A) : Type :=
|
||||
heap_exist :
|
||||
forall T:Tree,
|
||||
is_heap T ->
|
||||
meq (list_contents _ eqA_dec l) (contents T) -> build_heap l.
|
||||
|
||||
Lemma list_to_heap : forall l:list A, build_heap l.
|
||||
Proof.
|
||||
simple induction l.
|
||||
apply (heap_exist nil Tree_Leaf); auto with datatypes.
|
||||
simpl; unfold meq; exact nil_is_heap.
|
||||
simple induction 1.
|
||||
intros T i m; elim (insert T i a).
|
||||
intros; apply heap_exist with T1; simpl; auto with datatypes.
|
||||
apply meq_trans with (munion (contents T) (singletonBag a)).
|
||||
apply meq_trans with (munion (singletonBag a) (contents T)).
|
||||
apply meq_right; trivial with datatypes.
|
||||
apply munion_comm.
|
||||
apply meq_sym; trivial with datatypes.
|
||||
Qed.
|
||||
|
||||
|
||||
(** ** Building the sorted list *)
|
||||
|
||||
Inductive flat_spec (T:Tree) : Type :=
|
||||
flat_exist :
|
||||
forall l:list A,
|
||||
Sorted leA l ->
|
||||
(forall a:A, leA_Tree a T -> HdRel leA a l) ->
|
||||
meq (contents T) (list_contents _ eqA_dec l) -> flat_spec T.
|
||||
|
||||
Lemma heap_to_list : forall T:Tree, is_heap T -> flat_spec T.
|
||||
Proof.
|
||||
intros T h; elim h; intros.
|
||||
apply flat_exist with (nil (A:=A)); auto with datatypes.
|
||||
elim X; intros l1 s1 i1 m1; elim X0; intros l2 s2 i2 m2.
|
||||
elim (merge _ s1 _ s2); intros.
|
||||
apply flat_exist with (a :: l); simpl; auto with datatypes.
|
||||
apply meq_trans with
|
||||
(munion (list_contents _ eqA_dec l1)
|
||||
(munion (list_contents _ eqA_dec l2) (singletonBag a))).
|
||||
apply meq_congr; auto with datatypes.
|
||||
apply meq_trans with
|
||||
(munion (singletonBag a)
|
||||
(munion (list_contents _ eqA_dec l1) (list_contents _ eqA_dec l2))).
|
||||
apply munion_rotate.
|
||||
apply meq_right; apply meq_sym; trivial with datatypes.
|
||||
Qed.
|
||||
|
||||
|
||||
(** * Specification of treesort *)
|
||||
|
||||
Theorem treesort :
|
||||
forall l:list A,
|
||||
{m : list A | Sorted leA m & permutation _ eqA_dec l m}.
|
||||
Proof.
|
||||
intro l; unfold permutation.
|
||||
elim (list_to_heap l).
|
||||
intros.
|
||||
elim (heap_to_list T); auto with datatypes.
|
||||
intros.
|
||||
exists l0; auto with datatypes.
|
||||
apply meq_trans with (contents T); trivial with datatypes.
|
||||
Qed.
|
||||
|
||||
End defs.
|
||||
5944
samples/Coq/JsCorrectness.v
Normal file
5944
samples/Coq/JsCorrectness.v
Normal file
File diff suppressed because it is too large
Load Diff
249
samples/Coq/JsInterpreterExtraction.v
Normal file
249
samples/Coq/JsInterpreterExtraction.v
Normal file
@@ -0,0 +1,249 @@
|
||||
Set Implicit Arguments.
|
||||
Require Import JsSyntax JsInterpreterMonads JsInterpreter JsInit.
|
||||
Require Import LibFix LibList.
|
||||
|
||||
Require Export Shared.
|
||||
Require Export LibTactics LibLogic LibReflect LibList
|
||||
LibOperation LibStruct LibNat LibEpsilon LibFunc LibHeap.
|
||||
Require Flocq.Appli.Fappli_IEEE Flocq.Appli.Fappli_IEEE_bits.
|
||||
|
||||
|
||||
|
||||
(* Here stands some commands to extract relatively correctly the interpreter to Ocaml. *)
|
||||
Extraction Language Ocaml.
|
||||
|
||||
Require Import ExtrOcamlBasic.
|
||||
Require Import ExtrOcamlNatInt.
|
||||
Require Import ExtrOcamlString.
|
||||
|
||||
(* Optimal fixpoint. *)
|
||||
Extraction Inline FixFun3 FixFun3Mod FixFun4 FixFun4Mod FixFunMod curry3 uncurry3 curry4 uncurry4.
|
||||
(* As classical logic statements are now unused, they should not be extracted
|
||||
(otherwise, useless errors will be launched). *)
|
||||
Extraction Inline epsilon epsilon_def classicT arbitrary indefinite_description Inhab_witness Fix isTrue.
|
||||
|
||||
(**************************************************************)
|
||||
(** ** Numerical values *)
|
||||
|
||||
(* number *)
|
||||
|
||||
Extract Inductive positive => float
|
||||
[ "(fun p -> 1. +. (2. *. p))"
|
||||
"(fun p -> 2. *. p)"
|
||||
"1." ]
|
||||
"(fun f2p1 f2p f1 p ->
|
||||
if p <= 1. then f1 () else if mod_float p 2. = 0. then f2p (floor (p /. 2.)) else f2p1 (floor (p /. 2.)))".
|
||||
|
||||
Extract Inductive Z => float [ "0." "" "(~-.)" ]
|
||||
"(fun f0 fp fn z -> if z=0. then f0 () else if z>0. then fp z else fn (~-. z))".
|
||||
|
||||
Extract Inductive N => float [ "0." "" ]
|
||||
"(fun f0 fp n -> if n=0. then f0 () else fp n)".
|
||||
|
||||
Extract Constant Z.add => "(+.)".
|
||||
Extract Constant Z.succ => "(+.) 1.".
|
||||
Extract Constant Z.pred => "(fun x -> x -. 1.)".
|
||||
Extract Constant Z.sub => "(-.)".
|
||||
Extract Constant Z.mul => "( *. )".
|
||||
Extract Constant Z.opp => "(~-.)".
|
||||
Extract Constant Z.abs => "abs_float".
|
||||
Extract Constant Z.min => "min".
|
||||
Extract Constant Z.max => "max".
|
||||
Extract Constant Z.compare =>
|
||||
"fun x y -> if x=y then Eq else if x<y then Lt else Gt".
|
||||
|
||||
Extract Constant Pos.add => "(+.)".
|
||||
Extract Constant Pos.succ => "(+.) 1.".
|
||||
Extract Constant Pos.pred => "(fun x -> x -. 1.)".
|
||||
Extract Constant Pos.sub => "(-.)".
|
||||
Extract Constant Pos.mul => "( *. )".
|
||||
Extract Constant Pos.min => "min".
|
||||
Extract Constant Pos.max => "max".
|
||||
Extract Constant Pos.compare =>
|
||||
"fun x y -> if x=y then Eq else if x<y then Lt else Gt".
|
||||
Extract Constant Pos.compare_cont =>
|
||||
"fun x y c -> if x=y then c else if x<y then Lt else Gt".
|
||||
|
||||
Extract Constant N.add => "(+.)".
|
||||
Extract Constant N.succ => "(+.) 1.".
|
||||
Extract Constant N.pred => "(fun x -> x -. 1.)".
|
||||
Extract Constant N.sub => "(-.)".
|
||||
Extract Constant N.mul => "( *. )".
|
||||
Extract Constant N.min => "min".
|
||||
Extract Constant N.max => "max".
|
||||
Extract Constant N.div => "(fun x y -> if x = 0. then 0. else floor (x /. y))".
|
||||
Extract Constant N.modulo => "mod_float".
|
||||
Extract Constant N.compare =>
|
||||
"fun x y -> if x=y then Eq else if x<y then Lt else Gt".
|
||||
|
||||
Extract Inductive Fappli_IEEE.binary_float => float [
|
||||
"(fun s -> if s then (0.) else (-0.))"
|
||||
"(fun s -> if s then infinity else neg_infinity)"
|
||||
"nan"
|
||||
"(fun (s, m, e) -> failwith ""FIXME: No extraction from binary float allowed yet."")"
|
||||
].
|
||||
|
||||
Extract Constant JsNumber.of_int => "fun x -> x".
|
||||
|
||||
Extract Constant JsNumber.nan => "nan".
|
||||
Extract Constant JsNumber.zero => "0.".
|
||||
Extract Constant JsNumber.neg_zero => "(-0.)".
|
||||
Extract Constant JsNumber.one => "1.".
|
||||
Extract Constant JsNumber.infinity => "infinity".
|
||||
Extract Constant JsNumber.neg_infinity => "neg_infinity".
|
||||
Extract Constant JsNumber.max_value => "max_float".
|
||||
Extract Constant JsNumber.min_value => "(Int64.float_of_bits Int64.one)".
|
||||
Extract Constant JsNumber.pi => "(4. *. atan 1.)".
|
||||
Extract Constant JsNumber.e => "(exp 1.)".
|
||||
Extract Constant JsNumber.ln2 => "(log 2.)".
|
||||
Extract Constant JsNumber.floor => "floor".
|
||||
Extract Constant JsNumber.absolute => "abs_float".
|
||||
|
||||
Extract Constant JsNumber.from_string =>
|
||||
"(fun s ->
|
||||
try
|
||||
let s = (String.concat """" (List.map (String.make 1) s)) in
|
||||
if s = """" then 0. else float_of_string s
|
||||
with Failure ""float_of_string"" -> nan)
|
||||
(* Note that we're using `float_of_string' there, which does not have the same
|
||||
behavior than JavaScript. For instance it will read ""022"" as 22 instead of
|
||||
18, which should be the JavaScript result for it. *)".
|
||||
|
||||
Extract Constant JsNumber.to_string =>
|
||||
"(fun f ->
|
||||
prerr_string (""Warning: JsNumber.to_string called. This might be responsible for errors. Argument value: "" ^ string_of_float f ^ ""."");
|
||||
prerr_newline();
|
||||
let string_of_number n =
|
||||
let sfn = string_of_float n in
|
||||
(if (sfn = ""inf"") then ""Infinity"" else
|
||||
if (sfn = ""-inf"") then ""-Infinity"" else
|
||||
if (sfn = ""nan"") then ""NaN"" else
|
||||
let inum = int_of_float n in
|
||||
if (float_of_int inum = n) then (string_of_int inum) else (string_of_float n)) in
|
||||
let ret = ref [] in (* Ugly, but the API for OCaml string is not very functional... *)
|
||||
String.iter (fun c -> ret := c :: !ret) (string_of_number f);
|
||||
List.rev !ret)
|
||||
(* Note that this is ugly, we should use the spec of JsNumber.to_string here (9.8.1). *)".
|
||||
|
||||
Extract Constant JsNumber.add => "(+.)".
|
||||
Extract Constant JsNumber.sub => "(-.)".
|
||||
Extract Constant JsNumber.mult => "( *. )".
|
||||
Extract Constant JsNumber.div => "(/.)".
|
||||
Extract Constant JsNumber.fmod => "mod_float".
|
||||
Extract Constant JsNumber.neg => "(~-.)".
|
||||
Extract Constant JsNumber.sign => "(fun f -> float_of_int (compare f 0.))".
|
||||
Extract Constant JsNumber.number_comparable => "(fun n1 n2 -> 0 = compare n1 n2)".
|
||||
Extract Constant JsNumber.lt_bool => "(<)".
|
||||
|
||||
Extract Constant JsNumber.to_int32 =>
|
||||
"fun n ->
|
||||
match classify_float n with
|
||||
| FP_normal | FP_subnormal ->
|
||||
let i32 = 2. ** 32. in
|
||||
let i31 = 2. ** 31. in
|
||||
let posint = (if n < 0. then (-1.) else 1.) *. (floor (abs_float n)) in
|
||||
let int32bit =
|
||||
let smod = mod_float posint i32 in
|
||||
if smod < 0. then smod +. i32 else smod
|
||||
in
|
||||
(if int32bit >= i31 then int32bit -. i32 else int32bit)
|
||||
| _ -> 0.". (* LATER: do in Coq. Spec is 9.5, p. 47.*)
|
||||
|
||||
Extract Constant JsNumber.to_uint32 =>
|
||||
"fun n ->
|
||||
match classify_float n with
|
||||
| FP_normal | FP_subnormal ->
|
||||
let i32 = 2. ** 32. in
|
||||
let posint = (if n < 0. then (-1.) else 1.) *. (floor (abs_float n)) in
|
||||
let int32bit =
|
||||
let smod = mod_float posint i32 in
|
||||
if smod < 0. then smod +. i32 else smod
|
||||
in
|
||||
int32bit
|
||||
| _ -> 0.". (* LAER: do in Coq. Spec is 9.6, p47.*)
|
||||
|
||||
Extract Constant JsNumber.modulo_32 => "(fun x -> let r = mod_float x 32. in if x < 0. then r +. 32. else r)".
|
||||
Extract Constant JsNumber.int32_bitwise_not => "fun x -> Int32.to_float (Int32.lognot (Int32.of_float x))".
|
||||
Extract Constant JsNumber.int32_bitwise_and => "fun x y -> Int32.to_float (Int32.logand (Int32.of_float x) (Int32.of_float y))".
|
||||
Extract Constant JsNumber.int32_bitwise_or => "fun x y -> Int32.to_float (Int32.logor (Int32.of_float x) (Int32.of_float y))".
|
||||
Extract Constant JsNumber.int32_bitwise_xor => "fun x y -> Int32.to_float (Int32.logxor (Int32.of_float x) (Int32.of_float y))".
|
||||
Extract Constant JsNumber.int32_left_shift => "(fun x y -> Int32.to_float (Int32.shift_left (Int32.of_float x) (int_of_float y)))".
|
||||
Extract Constant JsNumber.int32_right_shift => "(fun x y -> Int32.to_float (Int32.shift_right (Int32.of_float x) (int_of_float y)))".
|
||||
Extract Constant JsNumber.uint32_right_shift =>
|
||||
"(fun x y ->
|
||||
let i31 = 2. ** 31. in
|
||||
let i32 = 2. ** 32. in
|
||||
let newx = if x >= i31 then x -. i32 else x in
|
||||
let r = Int32.to_float (Int32.shift_right_logical (Int32.of_float newx) (int_of_float y)) in
|
||||
if r < 0. then r +. i32 else r)".
|
||||
|
||||
Extract Constant int_of_char => "(fun c -> float_of_int (int_of_char c))".
|
||||
|
||||
Extract Constant ascii_comparable => "(=)".
|
||||
Extract Constant lt_int_decidable => "(<)".
|
||||
Extract Constant le_int_decidable => "(<=)".
|
||||
Extract Constant ge_nat_decidable => "(>=)".
|
||||
|
||||
(* TODO ARTHUR: This TLC lemma does not extract to something computable... whereas it should! *)
|
||||
Extract Constant prop_eq_decidable => "(=)".
|
||||
|
||||
Extract Constant env_loc_global_env_record => "0".
|
||||
|
||||
(* The following functions make pattern matches with floats and shall thus be removed. *)
|
||||
Extraction Inline Fappli_IEEE.Bplus Fappli_IEEE.binary_normalize Fappli_IEEE_bits.b64_plus.
|
||||
Extraction Inline Fappli_IEEE.Bmult Fappli_IEEE.Bmult_FF Fappli_IEEE_bits.b64_mult.
|
||||
Extraction Inline Fappli_IEEE.Bdiv Fappli_IEEE_bits.b64_div.
|
||||
|
||||
(* New options for the interpreter to work in Coq 8.4 *)
|
||||
Set Extraction AccessOpaque.
|
||||
|
||||
(* These parameters are implementation-dependant according to the spec.
|
||||
I've chosed some very simple values, but we could choose another thing for them. *)
|
||||
Extract Constant object_prealloc_global_proto => "(Coq_value_prim Coq_prim_null)".
|
||||
Extract Constant object_prealloc_global_class => "(
|
||||
let rec aux s = function
|
||||
| 0 -> []
|
||||
| n -> let n' = n - 1 in
|
||||
s.[n'] :: aux s n'
|
||||
in let aux2 s =
|
||||
List.rev (aux s (String.length s))
|
||||
in aux2 ""GlobalClass"")".
|
||||
|
||||
|
||||
(* Parsing *)
|
||||
Extract Constant parse_pickable => "(fun s strict ->
|
||||
let str = String.concat """" (List.map (String.make 1) s) in
|
||||
try
|
||||
let parserExp = Parser_main.exp_from_string ~force_strict:strict str in
|
||||
Some (JsSyntaxInfos.add_infos_prog strict
|
||||
(Translate_syntax.exp_to_prog parserExp))
|
||||
with
|
||||
(* | Translate_syntax.CoqSyntaxDoesNotSupport _ -> assert false (* Temporary *) *)
|
||||
| Parser.ParserFailure _
|
||||
| Parser.InvalidArgument ->
|
||||
prerr_string (""Warning: Parser error on eval. Input string: \"""" ^ str ^ ""\""\n"");
|
||||
None
|
||||
)".
|
||||
|
||||
|
||||
(* Debugging *)
|
||||
Extract Inlined Constant not_yet_implemented_because => "(fun s ->
|
||||
print_endline (__LOC__ ^ "": Not implemented because: "" ^ Prheap.string_of_char_list s) ;
|
||||
Coq_result_not_yet_implemented)".
|
||||
Extract Inlined Constant impossible_because => "(fun s ->
|
||||
print_endline (__LOC__ ^ "": Stuck because: "" ^ Prheap.string_of_char_list s) ;
|
||||
Coq_result_impossible)".
|
||||
Extract Inlined Constant impossible_with_heap_because => "(fun s message ->
|
||||
print_endline (__LOC__ ^ "": Stuck!\nState: "" ^ Prheap.prstate true s
|
||||
^ ""\nMessage:\t"" ^ Prheap.string_of_char_list message) ;
|
||||
Coq_result_impossible)".
|
||||
|
||||
|
||||
(* Final Extraction *)
|
||||
Extraction Blacklist string list bool.
|
||||
Separate Extraction runs run_javascript.
|
||||
|
||||
|
||||
|
||||
(* -- LATER: extract inequality_test_string in more efficient way*)
|
||||
|
||||
1051
samples/Coq/JsNumber.v
Normal file
1051
samples/Coq/JsNumber.v
Normal file
File diff suppressed because it is too large
Load Diff
1766
samples/Coq/JsPrettyInterm.v
Normal file
1766
samples/Coq/JsPrettyInterm.v
Normal file
File diff suppressed because it is too large
Load Diff
42
samples/Coq/Main.v
Normal file
42
samples/Coq/Main.v
Normal file
@@ -0,0 +1,42 @@
|
||||
Require Import FunctionNinjas.All.
|
||||
Require Import ListString.All.
|
||||
Require Import Computation.
|
||||
|
||||
Import C.Notations.
|
||||
|
||||
Definition error (message : LString.t) : C.t :=
|
||||
do_call! Command.ShowError message in
|
||||
ret.
|
||||
|
||||
Definition main : C.t :=
|
||||
call! card_is_valid := Command.AskCard in
|
||||
if card_is_valid then
|
||||
call! pin := Command.AskPIN in
|
||||
match pin with
|
||||
| None => error @@ LString.s "No PIN given."
|
||||
| Some pin =>
|
||||
call! pin_is_valid := Command.CheckPIN pin in
|
||||
if pin_is_valid then
|
||||
call! ask_amount := Command.AskAmount in
|
||||
match ask_amount with
|
||||
| None => error @@ LString.s "No amount given."
|
||||
| Some amount =>
|
||||
call! amount_is_valid := Command.CheckAmount amount in
|
||||
if amount_is_valid then
|
||||
call! card_is_given := Command.GiveCard in
|
||||
if card_is_given then
|
||||
call! amount_is_given := Command.GiveAmount amount in
|
||||
if amount_is_given then
|
||||
ret
|
||||
else
|
||||
error @@ LString.s "Cannot give you the amount. Please contact your bank."
|
||||
else
|
||||
error @@ LString.s "Cannot give you back the card. Please contact your bank."
|
||||
else
|
||||
error @@ LString.s "Invalid amount."
|
||||
end
|
||||
else
|
||||
error @@ LString.s "Invalid PIN."
|
||||
end
|
||||
else
|
||||
error @@ LString.s "Invalid card.".
|
||||
@@ -1,539 +0,0 @@
|
||||
Require Import Omega Relations Multiset SetoidList.
|
||||
|
||||
(** This file is deprecated, use [Permutation.v] instead.
|
||||
|
||||
Indeed, this file defines a notion of permutation based on
|
||||
multisets (there exists a permutation between two lists iff every
|
||||
elements have the same multiplicity in the two lists) which
|
||||
requires a more complex apparatus (the equipment of the domain
|
||||
with a decidable equality) than [Permutation] in [Permutation.v].
|
||||
|
||||
The relation between the two relations are in lemma
|
||||
[permutation_Permutation].
|
||||
|
||||
File [Permutation] concerns Leibniz equality : it shows in particular
|
||||
that [List.Permutation] and [permutation] are equivalent in this context.
|
||||
*)
|
||||
|
||||
Set Implicit Arguments.
|
||||
|
||||
Local Notation "[ ]" := nil.
|
||||
Local Notation "[ a ; .. ; b ]" := (a :: .. (b :: []) ..).
|
||||
|
||||
Section Permut.
|
||||
|
||||
(** * From lists to multisets *)
|
||||
|
||||
Variable A : Type.
|
||||
Variable eqA : relation A.
|
||||
Hypothesis eqA_equiv : Equivalence eqA.
|
||||
Hypothesis eqA_dec : forall x y:A, {eqA x y} + {~ eqA x y}.
|
||||
|
||||
Let emptyBag := EmptyBag A.
|
||||
Let singletonBag := SingletonBag _ eqA_dec.
|
||||
|
||||
(** contents of a list *)
|
||||
|
||||
Fixpoint list_contents (l:list A) : multiset A :=
|
||||
match l with
|
||||
| [] => emptyBag
|
||||
| a :: l => munion (singletonBag a) (list_contents l)
|
||||
end.
|
||||
|
||||
Lemma list_contents_app :
|
||||
forall l m:list A,
|
||||
meq (list_contents (l ++ m)) (munion (list_contents l) (list_contents m)).
|
||||
Proof.
|
||||
simple induction l; simpl; auto with datatypes.
|
||||
intros.
|
||||
apply meq_trans with
|
||||
(munion (singletonBag a) (munion (list_contents l0) (list_contents m)));
|
||||
auto with datatypes.
|
||||
Qed.
|
||||
|
||||
(** * [permutation]: definition and basic properties *)
|
||||
|
||||
Definition permutation (l m:list A) := meq (list_contents l) (list_contents m).
|
||||
|
||||
Lemma permut_refl : forall l:list A, permutation l l.
|
||||
Proof.
|
||||
unfold permutation; auto with datatypes.
|
||||
Qed.
|
||||
|
||||
Lemma permut_sym :
|
||||
forall l1 l2 : list A, permutation l1 l2 -> permutation l2 l1.
|
||||
Proof.
|
||||
unfold permutation, meq; intros; symmetry; trivial.
|
||||
Qed.
|
||||
|
||||
Lemma permut_trans :
|
||||
forall l m n:list A, permutation l m -> permutation m n -> permutation l n.
|
||||
Proof.
|
||||
unfold permutation; intros.
|
||||
apply meq_trans with (list_contents m); auto with datatypes.
|
||||
Qed.
|
||||
|
||||
Lemma permut_cons_eq :
|
||||
forall l m:list A,
|
||||
permutation l m -> forall a a', eqA a a' -> permutation (a :: l) (a' :: m).
|
||||
Proof.
|
||||
unfold permutation; simpl; intros.
|
||||
apply meq_trans with (munion (singletonBag a') (list_contents l)).
|
||||
apply meq_left, meq_singleton; auto.
|
||||
auto with datatypes.
|
||||
Qed.
|
||||
|
||||
Lemma permut_cons :
|
||||
forall l m:list A,
|
||||
permutation l m -> forall a:A, permutation (a :: l) (a :: m).
|
||||
Proof.
|
||||
unfold permutation; simpl; auto with datatypes.
|
||||
Qed.
|
||||
|
||||
Lemma permut_app :
|
||||
forall l l' m m':list A,
|
||||
permutation l l' -> permutation m m' -> permutation (l ++ m) (l' ++ m').
|
||||
Proof.
|
||||
unfold permutation; intros.
|
||||
apply meq_trans with (munion (list_contents l) (list_contents m));
|
||||
auto using permut_cons, list_contents_app with datatypes.
|
||||
apply meq_trans with (munion (list_contents l') (list_contents m'));
|
||||
auto using permut_cons, list_contents_app with datatypes.
|
||||
apply meq_trans with (munion (list_contents l') (list_contents m));
|
||||
auto using permut_cons, list_contents_app with datatypes.
|
||||
Qed.
|
||||
|
||||
Lemma permut_add_inside_eq :
|
||||
forall a a' l1 l2 l3 l4, eqA a a' ->
|
||||
permutation (l1 ++ l2) (l3 ++ l4) ->
|
||||
permutation (l1 ++ a :: l2) (l3 ++ a' :: l4).
|
||||
Proof.
|
||||
unfold permutation, meq in *; intros.
|
||||
specialize H0 with a0.
|
||||
repeat rewrite list_contents_app in *; simpl in *.
|
||||
destruct (eqA_dec a a0) as [Ha|Ha]; rewrite H in Ha;
|
||||
decide (eqA_dec a' a0) with Ha; simpl; auto with arith.
|
||||
do 2 rewrite <- plus_n_Sm; f_equal; auto.
|
||||
Qed.
|
||||
|
||||
Lemma permut_add_inside :
|
||||
forall a l1 l2 l3 l4,
|
||||
permutation (l1 ++ l2) (l3 ++ l4) ->
|
||||
permutation (l1 ++ a :: l2) (l3 ++ a :: l4).
|
||||
Proof.
|
||||
unfold permutation, meq in *; intros.
|
||||
generalize (H a0); clear H.
|
||||
do 4 rewrite list_contents_app.
|
||||
simpl.
|
||||
destruct (eqA_dec a a0); simpl; auto with arith.
|
||||
do 2 rewrite <- plus_n_Sm; f_equal; auto.
|
||||
Qed.
|
||||
|
||||
Lemma permut_add_cons_inside_eq :
|
||||
forall a a' l l1 l2, eqA a a' ->
|
||||
permutation l (l1 ++ l2) ->
|
||||
permutation (a :: l) (l1 ++ a' :: l2).
|
||||
Proof.
|
||||
intros;
|
||||
replace (a :: l) with ([] ++ a :: l); trivial;
|
||||
apply permut_add_inside_eq; trivial.
|
||||
Qed.
|
||||
|
||||
Lemma permut_add_cons_inside :
|
||||
forall a l l1 l2,
|
||||
permutation l (l1 ++ l2) ->
|
||||
permutation (a :: l) (l1 ++ a :: l2).
|
||||
Proof.
|
||||
intros;
|
||||
replace (a :: l) with ([] ++ a :: l); trivial;
|
||||
apply permut_add_inside; trivial.
|
||||
Qed.
|
||||
|
||||
Lemma permut_middle :
|
||||
forall (l m:list A) (a:A), permutation (a :: l ++ m) (l ++ a :: m).
|
||||
Proof.
|
||||
intros; apply permut_add_cons_inside; auto using permut_sym, permut_refl.
|
||||
Qed.
|
||||
|
||||
Lemma permut_sym_app :
|
||||
forall l1 l2, permutation (l1 ++ l2) (l2 ++ l1).
|
||||
Proof.
|
||||
intros l1 l2;
|
||||
unfold permutation, meq;
|
||||
intro a; do 2 rewrite list_contents_app; simpl;
|
||||
auto with arith.
|
||||
Qed.
|
||||
|
||||
Lemma permut_rev :
|
||||
forall l, permutation l (rev l).
|
||||
Proof.
|
||||
induction l.
|
||||
simpl; trivial using permut_refl.
|
||||
simpl.
|
||||
apply permut_add_cons_inside.
|
||||
rewrite <- app_nil_end. trivial.
|
||||
Qed.
|
||||
|
||||
(** * Some inversion results. *)
|
||||
Lemma permut_conv_inv :
|
||||
forall e l1 l2, permutation (e :: l1) (e :: l2) -> permutation l1 l2.
|
||||
Proof.
|
||||
intros e l1 l2; unfold permutation, meq; simpl; intros H a;
|
||||
generalize (H a); apply plus_reg_l.
|
||||
Qed.
|
||||
|
||||
Lemma permut_app_inv1 :
|
||||
forall l l1 l2, permutation (l1 ++ l) (l2 ++ l) -> permutation l1 l2.
|
||||
Proof.
|
||||
intros l l1 l2; unfold permutation, meq; simpl;
|
||||
intros H a; generalize (H a); clear H.
|
||||
do 2 rewrite list_contents_app.
|
||||
simpl.
|
||||
intros; apply plus_reg_l with (multiplicity (list_contents l) a).
|
||||
rewrite plus_comm; rewrite H; rewrite plus_comm.
|
||||
trivial.
|
||||
Qed.
|
||||
|
||||
(** we can use [multiplicity] to define [InA] and [NoDupA]. *)
|
||||
|
||||
Fact if_eqA_then : forall a a' (B:Type)(b b':B),
|
||||
eqA a a' -> (if eqA_dec a a' then b else b') = b.
|
||||
Proof.
|
||||
intros. destruct eqA_dec as [_|NEQ]; auto.
|
||||
contradict NEQ; auto.
|
||||
Qed.
|
||||
|
||||
Lemma permut_app_inv2 :
|
||||
forall l l1 l2, permutation (l ++ l1) (l ++ l2) -> permutation l1 l2.
|
||||
Proof.
|
||||
intros l l1 l2; unfold permutation, meq; simpl;
|
||||
intros H a; generalize (H a); clear H.
|
||||
do 2 rewrite list_contents_app.
|
||||
simpl.
|
||||
intros; apply plus_reg_l with (multiplicity (list_contents l) a).
|
||||
trivial.
|
||||
Qed.
|
||||
|
||||
Lemma permut_remove_hd_eq :
|
||||
forall l l1 l2 a b, eqA a b ->
|
||||
permutation (a :: l) (l1 ++ b :: l2) -> permutation l (l1 ++ l2).
|
||||
Proof.
|
||||
unfold permutation, meq; simpl; intros l l1 l2 a b Heq H a0.
|
||||
specialize H with a0.
|
||||
rewrite list_contents_app in *; simpl in *.
|
||||
apply plus_reg_l with (if eqA_dec a a0 then 1 else 0).
|
||||
rewrite H; clear H.
|
||||
symmetry; rewrite plus_comm, <- ! plus_assoc; f_equal.
|
||||
rewrite plus_comm.
|
||||
destruct (eqA_dec a a0) as [Ha|Ha]; rewrite Heq in Ha;
|
||||
decide (eqA_dec b a0) with Ha; reflexivity.
|
||||
Qed.
|
||||
|
||||
Lemma permut_remove_hd :
|
||||
forall l l1 l2 a,
|
||||
permutation (a :: l) (l1 ++ a :: l2) -> permutation l (l1 ++ l2).
|
||||
Proof.
|
||||
eauto using permut_remove_hd_eq, Equivalence_Reflexive.
|
||||
Qed.
|
||||
|
||||
Fact if_eqA_else : forall a a' (B:Type)(b b':B),
|
||||
~eqA a a' -> (if eqA_dec a a' then b else b') = b'.
|
||||
Proof.
|
||||
intros. decide (eqA_dec a a') with H; auto.
|
||||
Qed.
|
||||
|
||||
Fact if_eqA_refl : forall a (B:Type)(b b':B),
|
||||
(if eqA_dec a a then b else b') = b.
|
||||
Proof.
|
||||
intros; apply (decide_left (eqA_dec a a)); auto with *.
|
||||
Qed.
|
||||
|
||||
(** PL: Inutilisable dans un rewrite sans un change prealable. *)
|
||||
|
||||
Global Instance if_eqA (B:Type)(b b':B) :
|
||||
Proper (eqA==>eqA==>@eq _) (fun x y => if eqA_dec x y then b else b').
|
||||
Proof.
|
||||
intros x x' Hxx' y y' Hyy'.
|
||||
intros; destruct (eqA_dec x y) as [H|H];
|
||||
destruct (eqA_dec x' y') as [H'|H']; auto.
|
||||
contradict H'; transitivity x; auto with *; transitivity y; auto with *.
|
||||
contradict H; transitivity x'; auto with *; transitivity y'; auto with *.
|
||||
Qed.
|
||||
|
||||
Fact if_eqA_rewrite_l : forall a1 a1' a2 (B:Type)(b b':B),
|
||||
eqA a1 a1' -> (if eqA_dec a1 a2 then b else b') =
|
||||
(if eqA_dec a1' a2 then b else b').
|
||||
Proof.
|
||||
intros; destruct (eqA_dec a1 a2) as [A1|A1];
|
||||
destruct (eqA_dec a1' a2) as [A1'|A1']; auto.
|
||||
contradict A1'; transitivity a1; eauto with *.
|
||||
contradict A1; transitivity a1'; eauto with *.
|
||||
Qed.
|
||||
|
||||
Fact if_eqA_rewrite_r : forall a1 a2 a2' (B:Type)(b b':B),
|
||||
eqA a2 a2' -> (if eqA_dec a1 a2 then b else b') =
|
||||
(if eqA_dec a1 a2' then b else b').
|
||||
Proof.
|
||||
intros; destruct (eqA_dec a1 a2) as [A2|A2];
|
||||
destruct (eqA_dec a1 a2') as [A2'|A2']; auto.
|
||||
contradict A2'; transitivity a2; eauto with *.
|
||||
contradict A2; transitivity a2'; eauto with *.
|
||||
Qed.
|
||||
|
||||
|
||||
Global Instance multiplicity_eqA (l:list A) :
|
||||
Proper (eqA==>@eq _) (multiplicity (list_contents l)).
|
||||
Proof.
|
||||
intros x x' Hxx'.
|
||||
induction l as [|y l Hl]; simpl; auto.
|
||||
rewrite (@if_eqA_rewrite_r y x x'); auto.
|
||||
Qed.
|
||||
|
||||
Lemma multiplicity_InA :
|
||||
forall l a, InA eqA a l <-> 0 < multiplicity (list_contents l) a.
|
||||
Proof.
|
||||
induction l.
|
||||
simpl.
|
||||
split; inversion 1.
|
||||
simpl.
|
||||
intros a'; split; intros H. inversion_clear H.
|
||||
apply (decide_left (eqA_dec a a')); auto with *.
|
||||
destruct (eqA_dec a a'); auto with *. simpl; rewrite <- IHl; auto.
|
||||
destruct (eqA_dec a a'); auto with *. right. rewrite IHl; auto.
|
||||
Qed.
|
||||
|
||||
Lemma multiplicity_InA_O :
|
||||
forall l a, ~ InA eqA a l -> multiplicity (list_contents l) a = 0.
|
||||
Proof.
|
||||
intros l a; rewrite multiplicity_InA;
|
||||
destruct (multiplicity (list_contents l) a); auto with arith.
|
||||
destruct 1; auto with arith.
|
||||
Qed.
|
||||
|
||||
Lemma multiplicity_InA_S :
|
||||
forall l a, InA eqA a l -> multiplicity (list_contents l) a >= 1.
|
||||
Proof.
|
||||
intros l a; rewrite multiplicity_InA; auto with arith.
|
||||
Qed.
|
||||
|
||||
Lemma multiplicity_NoDupA : forall l,
|
||||
NoDupA eqA l <-> (forall a, multiplicity (list_contents l) a <= 1).
|
||||
Proof.
|
||||
induction l.
|
||||
simpl.
|
||||
split; auto with arith.
|
||||
split; simpl.
|
||||
inversion_clear 1.
|
||||
rewrite IHl in H1.
|
||||
intros; destruct (eqA_dec a a0) as [EQ|NEQ]; simpl; auto with *.
|
||||
rewrite <- EQ.
|
||||
rewrite multiplicity_InA_O; auto.
|
||||
intros; constructor.
|
||||
rewrite multiplicity_InA.
|
||||
specialize (H a).
|
||||
rewrite if_eqA_refl in H.
|
||||
clear IHl; omega.
|
||||
rewrite IHl; intros.
|
||||
specialize (H a0). omega.
|
||||
Qed.
|
||||
|
||||
(** Permutation is compatible with InA. *)
|
||||
Lemma permut_InA_InA :
|
||||
forall l1 l2 e, permutation l1 l2 -> InA eqA e l1 -> InA eqA e l2.
|
||||
Proof.
|
||||
intros l1 l2 e.
|
||||
do 2 rewrite multiplicity_InA.
|
||||
unfold permutation, meq.
|
||||
intros H;rewrite H; auto.
|
||||
Qed.
|
||||
|
||||
Lemma permut_cons_InA :
|
||||
forall l1 l2 e, permutation (e :: l1) l2 -> InA eqA e l2.
|
||||
Proof.
|
||||
intros; apply (permut_InA_InA (e:=e) H); auto with *.
|
||||
Qed.
|
||||
|
||||
(** Permutation of an empty list. *)
|
||||
Lemma permut_nil :
|
||||
forall l, permutation l [] -> l = [].
|
||||
Proof.
|
||||
intro l; destruct l as [ | e l ]; trivial.
|
||||
assert (InA eqA e (e::l)) by (auto with *).
|
||||
intro Abs; generalize (permut_InA_InA Abs H).
|
||||
inversion 1.
|
||||
Qed.
|
||||
|
||||
(** Permutation for short lists. *)
|
||||
|
||||
Lemma permut_length_1:
|
||||
forall a b, permutation [a] [b] -> eqA a b.
|
||||
Proof.
|
||||
intros a b; unfold permutation, meq.
|
||||
intro P; specialize (P b); simpl in *.
|
||||
rewrite if_eqA_refl in *.
|
||||
destruct (eqA_dec a b); simpl; auto; discriminate.
|
||||
Qed.
|
||||
|
||||
Lemma permut_length_2 :
|
||||
forall a1 b1 a2 b2, permutation [a1; b1] [a2; b2] ->
|
||||
(eqA a1 a2) /\ (eqA b1 b2) \/ (eqA a1 b2) /\ (eqA a2 b1).
|
||||
Proof.
|
||||
intros a1 b1 a2 b2 P.
|
||||
assert (H:=permut_cons_InA P).
|
||||
inversion_clear H.
|
||||
left; split; auto.
|
||||
apply permut_length_1.
|
||||
red; red; intros.
|
||||
specialize (P a). simpl in *.
|
||||
rewrite (@if_eqA_rewrite_l a1 a2 a) in P by auto. omega.
|
||||
right.
|
||||
inversion_clear H0; [|inversion H].
|
||||
split; auto.
|
||||
apply permut_length_1.
|
||||
red; red; intros.
|
||||
specialize (P a); simpl in *.
|
||||
rewrite (@if_eqA_rewrite_l a1 b2 a) in P by auto. omega.
|
||||
Qed.
|
||||
|
||||
(** Permutation is compatible with length. *)
|
||||
Lemma permut_length :
|
||||
forall l1 l2, permutation l1 l2 -> length l1 = length l2.
|
||||
Proof.
|
||||
induction l1; intros l2 H.
|
||||
rewrite (permut_nil (permut_sym H)); auto.
|
||||
assert (H0:=permut_cons_InA H).
|
||||
destruct (InA_split H0) as (h2,(b,(t2,(H1,H2)))).
|
||||
subst l2.
|
||||
rewrite app_length.
|
||||
simpl; rewrite <- plus_n_Sm; f_equal.
|
||||
rewrite <- app_length.
|
||||
apply IHl1.
|
||||
apply permut_remove_hd with b.
|
||||
apply permut_trans with (a::l1); auto.
|
||||
revert H1; unfold permutation, meq; simpl.
|
||||
intros; f_equal; auto.
|
||||
rewrite (@if_eqA_rewrite_l a b a0); auto.
|
||||
Qed.
|
||||
|
||||
Lemma NoDupA_equivlistA_permut :
|
||||
forall l l', NoDupA eqA l -> NoDupA eqA l' ->
|
||||
equivlistA eqA l l' -> permutation l l'.
|
||||
Proof.
|
||||
intros.
|
||||
red; unfold meq; intros.
|
||||
rewrite multiplicity_NoDupA in H, H0.
|
||||
generalize (H a) (H0 a) (H1 a); clear H H0 H1.
|
||||
do 2 rewrite multiplicity_InA.
|
||||
destruct 3; omega.
|
||||
Qed.
|
||||
|
||||
End Permut.
|
||||
|
||||
Section Permut_map.
|
||||
|
||||
Variables A B : Type.
|
||||
|
||||
Variable eqA : relation A.
|
||||
Hypothesis eqA_dec : forall x y:A, {eqA x y} + {~ eqA x y}.
|
||||
Hypothesis eqA_equiv : Equivalence eqA.
|
||||
|
||||
Variable eqB : B->B->Prop.
|
||||
Hypothesis eqB_dec : forall x y:B, { eqB x y }+{ ~eqB x y }.
|
||||
Hypothesis eqB_trans : Transitive eqB.
|
||||
|
||||
(** Permutation is compatible with map. *)
|
||||
|
||||
Lemma permut_map :
|
||||
forall f,
|
||||
(Proper (eqA==>eqB) f) ->
|
||||
forall l1 l2, permutation _ eqA_dec l1 l2 ->
|
||||
permutation _ eqB_dec (map f l1) (map f l2).
|
||||
Proof.
|
||||
intros f; induction l1.
|
||||
intros l2 P; rewrite (permut_nil eqA_equiv (permut_sym P)); apply permut_refl.
|
||||
intros l2 P.
|
||||
simpl.
|
||||
assert (H0:=permut_cons_InA eqA_equiv P).
|
||||
destruct (InA_split H0) as (h2,(b,(t2,(H1,H2)))).
|
||||
subst l2.
|
||||
rewrite map_app.
|
||||
simpl.
|
||||
apply permut_trans with (f b :: map f l1).
|
||||
revert H1; unfold permutation, meq; simpl.
|
||||
intros; f_equal; auto.
|
||||
destruct (eqB_dec (f b) a0) as [H2|H2];
|
||||
destruct (eqB_dec (f a) a0) as [H3|H3]; auto.
|
||||
destruct H3; transitivity (f b); auto with *.
|
||||
destruct H2; transitivity (f a); auto with *.
|
||||
apply permut_add_cons_inside.
|
||||
rewrite <- map_app.
|
||||
apply IHl1; auto.
|
||||
apply permut_remove_hd with b; trivial.
|
||||
apply permut_trans with (a::l1); auto.
|
||||
revert H1; unfold permutation, meq; simpl.
|
||||
intros; f_equal; auto.
|
||||
rewrite (@if_eqA_rewrite_l _ _ eqA_equiv eqA_dec a b a0); auto.
|
||||
Qed.
|
||||
|
||||
End Permut_map.
|
||||
|
||||
Require Import Permutation.
|
||||
|
||||
Section Permut_permut.
|
||||
|
||||
Variable A : Type.
|
||||
|
||||
Variable eqA : relation A.
|
||||
Hypothesis eqA_dec : forall x y:A, {eqA x y} + {~ eqA x y}.
|
||||
Hypothesis eqA_equiv : Equivalence eqA.
|
||||
|
||||
Lemma Permutation_impl_permutation : forall l l',
|
||||
Permutation l l' -> permutation _ eqA_dec l l'.
|
||||
Proof.
|
||||
induction 1.
|
||||
apply permut_refl.
|
||||
apply permut_cons; auto using Equivalence_Reflexive.
|
||||
change (x :: y :: l) with ([x] ++ y :: l);
|
||||
apply permut_add_cons_inside; simpl;
|
||||
apply permut_cons_eq; auto using Equivalence_Reflexive, permut_refl.
|
||||
apply permut_trans with l'; trivial.
|
||||
Qed.
|
||||
|
||||
Lemma permut_eqA : forall l l', Forall2 eqA l l' -> permutation _ eqA_dec l l'.
|
||||
Proof.
|
||||
induction 1.
|
||||
apply permut_refl.
|
||||
apply permut_cons_eq; trivial.
|
||||
Qed.
|
||||
|
||||
Lemma permutation_Permutation : forall l l',
|
||||
permutation _ eqA_dec l l' <->
|
||||
exists l'', Permutation l l'' /\ Forall2 eqA l'' l'.
|
||||
Proof.
|
||||
split; intro H.
|
||||
(* -> *)
|
||||
induction l in l', H |- *.
|
||||
exists []; apply permut_sym, permut_nil in H as ->; auto using Forall2.
|
||||
pose proof H as H'.
|
||||
apply permut_cons_InA, InA_split in H
|
||||
as (l1 & y & l2 & Heq & ->); trivial.
|
||||
apply permut_remove_hd_eq, IHl in H'
|
||||
as (l'' & IHP & IHA); clear IHl; trivial.
|
||||
apply Forall2_app_inv_r in IHA as (l1'' & l2'' & Hl1 & Hl2 & ->).
|
||||
exists (l1'' ++ a :: l2''); split.
|
||||
apply Permutation_cons_app; trivial.
|
||||
apply Forall2_app, Forall2_cons; trivial.
|
||||
(* <- *)
|
||||
destruct H as (l'' & H & Heq).
|
||||
apply permut_trans with l''.
|
||||
apply Permutation_impl_permutation; trivial.
|
||||
apply permut_eqA; trivial.
|
||||
Qed.
|
||||
|
||||
End Permut_permut.
|
||||
|
||||
(* begin hide *)
|
||||
(** For compatibilty *)
|
||||
Notation permut_right := permut_cons (only parsing).
|
||||
Notation permut_tran := permut_trans (only parsing).
|
||||
(* end hide *)
|
||||
@@ -1,632 +0,0 @@
|
||||
(* Adapted in May 2006 by Jean-Marc Notin from initial contents by
|
||||
Laurent Thery (Huffmann contribution, October 2003) *)
|
||||
|
||||
Require Import List Setoid Compare_dec Morphisms.
|
||||
Import ListNotations. (* For notations [] and [a;b;c] *)
|
||||
Set Implicit Arguments.
|
||||
|
||||
Section Permutation.
|
||||
|
||||
Variable A:Type.
|
||||
|
||||
Inductive Permutation : list A -> list A -> Prop :=
|
||||
| perm_nil: Permutation [] []
|
||||
| perm_skip x l l' : Permutation l l' -> Permutation (x::l) (x::l')
|
||||
| perm_swap x y l : Permutation (y::x::l) (x::y::l)
|
||||
| perm_trans l l' l'' :
|
||||
Permutation l l' -> Permutation l' l'' -> Permutation l l''.
|
||||
|
||||
Local Hint Constructors Permutation.
|
||||
|
||||
(** Some facts about [Permutation] *)
|
||||
|
||||
Theorem Permutation_nil : forall (l : list A), Permutation [] l -> l = [].
|
||||
Proof.
|
||||
intros l HF.
|
||||
remember (@nil A) as m in HF.
|
||||
induction HF; discriminate || auto.
|
||||
Qed.
|
||||
|
||||
Theorem Permutation_nil_cons : forall (l : list A) (x : A),
|
||||
~ Permutation nil (x::l).
|
||||
Proof.
|
||||
intros l x HF.
|
||||
apply Permutation_nil in HF; discriminate.
|
||||
Qed.
|
||||
|
||||
(** Permutation over lists is a equivalence relation *)
|
||||
|
||||
Theorem Permutation_refl : forall l : list A, Permutation l l.
|
||||
Proof.
|
||||
induction l; constructor. exact IHl.
|
||||
Qed.
|
||||
|
||||
Theorem Permutation_sym : forall l l' : list A,
|
||||
Permutation l l' -> Permutation l' l.
|
||||
Proof.
|
||||
intros l l' Hperm; induction Hperm; auto.
|
||||
apply perm_trans with (l':=l'); assumption.
|
||||
Qed.
|
||||
|
||||
Theorem Permutation_trans : forall l l' l'' : list A,
|
||||
Permutation l l' -> Permutation l' l'' -> Permutation l l''.
|
||||
Proof.
|
||||
exact perm_trans.
|
||||
Qed.
|
||||
|
||||
End Permutation.
|
||||
|
||||
Hint Resolve Permutation_refl perm_nil perm_skip.
|
||||
|
||||
(* These hints do not reduce the size of the problem to solve and they
|
||||
must be used with care to avoid combinatoric explosions *)
|
||||
|
||||
Local Hint Resolve perm_swap perm_trans.
|
||||
Local Hint Resolve Permutation_sym Permutation_trans.
|
||||
|
||||
(* This provides reflexivity, symmetry and transitivity and rewriting
|
||||
on morphims to come *)
|
||||
|
||||
Instance Permutation_Equivalence A : Equivalence (@Permutation A) | 10 := {
|
||||
Equivalence_Reflexive := @Permutation_refl A ;
|
||||
Equivalence_Symmetric := @Permutation_sym A ;
|
||||
Equivalence_Transitive := @Permutation_trans A }.
|
||||
|
||||
Instance Permutation_cons A :
|
||||
Proper (Logic.eq ==> @Permutation A ==> @Permutation A) (@cons A) | 10.
|
||||
Proof.
|
||||
repeat intro; subst; auto using perm_skip.
|
||||
Qed.
|
||||
|
||||
Section Permutation_properties.
|
||||
|
||||
Variable A:Type.
|
||||
|
||||
Implicit Types a b : A.
|
||||
Implicit Types l m : list A.
|
||||
|
||||
(** Compatibility with others operations on lists *)
|
||||
|
||||
Theorem Permutation_in : forall (l l' : list A) (x : A),
|
||||
Permutation l l' -> In x l -> In x l'.
|
||||
Proof.
|
||||
intros l l' x Hperm; induction Hperm; simpl; tauto.
|
||||
Qed.
|
||||
|
||||
Global Instance Permutation_in' :
|
||||
Proper (Logic.eq ==> @Permutation A ==> iff) (@In A) | 10.
|
||||
Proof.
|
||||
repeat red; intros; subst; eauto using Permutation_in.
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_app_tail : forall (l l' tl : list A),
|
||||
Permutation l l' -> Permutation (l++tl) (l'++tl).
|
||||
Proof.
|
||||
intros l l' tl Hperm; induction Hperm as [|x l l'|x y l|l l' l'']; simpl; auto.
|
||||
eapply Permutation_trans with (l':=l'++tl); trivial.
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_app_head : forall (l tl tl' : list A),
|
||||
Permutation tl tl' -> Permutation (l++tl) (l++tl').
|
||||
Proof.
|
||||
intros l tl tl' Hperm; induction l;
|
||||
[trivial | repeat rewrite <- app_comm_cons; constructor; assumption].
|
||||
Qed.
|
||||
|
||||
Theorem Permutation_app : forall (l m l' m' : list A),
|
||||
Permutation l l' -> Permutation m m' -> Permutation (l++m) (l'++m').
|
||||
Proof.
|
||||
intros l m l' m' Hpermll' Hpermmm';
|
||||
induction Hpermll' as [|x l l'|x y l|l l' l''];
|
||||
repeat rewrite <- app_comm_cons; auto.
|
||||
apply Permutation_trans with (l' := (x :: y :: l ++ m));
|
||||
[idtac | repeat rewrite app_comm_cons; apply Permutation_app_head]; trivial.
|
||||
apply Permutation_trans with (l' := (l' ++ m')); try assumption.
|
||||
apply Permutation_app_tail; assumption.
|
||||
Qed.
|
||||
|
||||
Global Instance Permutation_app' :
|
||||
Proper (@Permutation A ==> @Permutation A ==> @Permutation A) (@app A) | 10.
|
||||
Proof.
|
||||
repeat intro; now apply Permutation_app.
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_add_inside : forall a (l l' tl tl' : list A),
|
||||
Permutation l l' -> Permutation tl tl' ->
|
||||
Permutation (l ++ a :: tl) (l' ++ a :: tl').
|
||||
Proof.
|
||||
intros; apply Permutation_app; auto.
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_cons_append : forall (l : list A) x,
|
||||
Permutation (x :: l) (l ++ x :: nil).
|
||||
Proof. induction l; intros; auto. simpl. rewrite <- IHl; auto. Qed.
|
||||
Local Hint Resolve Permutation_cons_append.
|
||||
|
||||
Theorem Permutation_app_comm : forall (l l' : list A),
|
||||
Permutation (l ++ l') (l' ++ l).
|
||||
Proof.
|
||||
induction l as [|x l]; simpl; intro l'.
|
||||
rewrite app_nil_r; trivial. rewrite IHl.
|
||||
rewrite app_comm_cons, Permutation_cons_append.
|
||||
now rewrite <- app_assoc.
|
||||
Qed.
|
||||
Local Hint Resolve Permutation_app_comm.
|
||||
|
||||
Theorem Permutation_cons_app : forall (l l1 l2:list A) a,
|
||||
Permutation l (l1 ++ l2) -> Permutation (a :: l) (l1 ++ a :: l2).
|
||||
Proof.
|
||||
intros l l1 l2 a H. rewrite H.
|
||||
rewrite app_comm_cons, Permutation_cons_append.
|
||||
now rewrite <- app_assoc.
|
||||
Qed.
|
||||
Local Hint Resolve Permutation_cons_app.
|
||||
|
||||
Theorem Permutation_middle : forall (l1 l2:list A) a,
|
||||
Permutation (a :: l1 ++ l2) (l1 ++ a :: l2).
|
||||
Proof.
|
||||
auto.
|
||||
Qed.
|
||||
Local Hint Resolve Permutation_middle.
|
||||
|
||||
Theorem Permutation_rev : forall (l : list A), Permutation l (rev l).
|
||||
Proof.
|
||||
induction l as [| x l]; simpl; trivial. now rewrite IHl at 1.
|
||||
Qed.
|
||||
|
||||
Global Instance Permutation_rev' :
|
||||
Proper (@Permutation A ==> @Permutation A) (@rev A) | 10.
|
||||
Proof.
|
||||
repeat intro; now rewrite <- 2 Permutation_rev.
|
||||
Qed.
|
||||
|
||||
Theorem Permutation_length : forall (l l' : list A),
|
||||
Permutation l l' -> length l = length l'.
|
||||
Proof.
|
||||
intros l l' Hperm; induction Hperm; simpl; auto. now transitivity (length l').
|
||||
Qed.
|
||||
|
||||
Global Instance Permutation_length' :
|
||||
Proper (@Permutation A ==> Logic.eq) (@length A) | 10.
|
||||
Proof.
|
||||
exact Permutation_length.
|
||||
Qed.
|
||||
|
||||
Theorem Permutation_ind_bis :
|
||||
forall P : list A -> list A -> Prop,
|
||||
P [] [] ->
|
||||
(forall x l l', Permutation l l' -> P l l' -> P (x :: l) (x :: l')) ->
|
||||
(forall x y l l', Permutation l l' -> P l l' -> P (y :: x :: l) (x :: y :: l')) ->
|
||||
(forall l l' l'', Permutation l l' -> P l l' -> Permutation l' l'' -> P l' l'' -> P l l'') ->
|
||||
forall l l', Permutation l l' -> P l l'.
|
||||
Proof.
|
||||
intros P Hnil Hskip Hswap Htrans.
|
||||
induction 1; auto.
|
||||
apply Htrans with (x::y::l); auto.
|
||||
apply Hswap; auto.
|
||||
induction l; auto.
|
||||
apply Hskip; auto.
|
||||
apply Hskip; auto.
|
||||
induction l; auto.
|
||||
eauto.
|
||||
Qed.
|
||||
|
||||
Ltac break_list l x l' H :=
|
||||
destruct l as [|x l']; simpl in *;
|
||||
injection H; intros; subst; clear H.
|
||||
|
||||
Theorem Permutation_nil_app_cons : forall (l l' : list A) (x : A),
|
||||
~ Permutation nil (l++x::l').
|
||||
Proof.
|
||||
intros l l' x HF.
|
||||
apply Permutation_nil in HF. destruct l; discriminate.
|
||||
Qed.
|
||||
|
||||
Theorem Permutation_app_inv : forall (l1 l2 l3 l4:list A) a,
|
||||
Permutation (l1++a::l2) (l3++a::l4) -> Permutation (l1++l2) (l3 ++ l4).
|
||||
Proof.
|
||||
intros l1 l2 l3 l4 a; revert l1 l2 l3 l4.
|
||||
set (P l l' :=
|
||||
forall l1 l2 l3 l4, l=l1++a::l2 -> l'=l3++a::l4 ->
|
||||
Permutation (l1++l2) (l3++l4)).
|
||||
cut (forall l l', Permutation l l' -> P l l').
|
||||
intros H; intros; eapply H; eauto.
|
||||
apply (Permutation_ind_bis P); unfold P; clear P.
|
||||
- (* nil *)
|
||||
intros; now destruct l1.
|
||||
- (* skip *)
|
||||
intros x l l' H IH; intros.
|
||||
break_list l1 b l1' H0; break_list l3 c l3' H1.
|
||||
auto.
|
||||
now rewrite H.
|
||||
now rewrite <- H.
|
||||
now rewrite (IH _ _ _ _ eq_refl eq_refl).
|
||||
- (* swap *)
|
||||
intros x y l l' Hp IH; intros.
|
||||
break_list l1 b l1' H; break_list l3 c l3' H0.
|
||||
auto.
|
||||
break_list l3' b l3'' H.
|
||||
auto.
|
||||
constructor. now rewrite Permutation_middle.
|
||||
break_list l1' c l1'' H1.
|
||||
auto.
|
||||
constructor. now rewrite Permutation_middle.
|
||||
break_list l3' d l3'' H; break_list l1' e l1'' H1.
|
||||
auto.
|
||||
rewrite perm_swap. constructor. now rewrite Permutation_middle.
|
||||
rewrite perm_swap. constructor. now rewrite Permutation_middle.
|
||||
now rewrite perm_swap, (IH _ _ _ _ eq_refl eq_refl).
|
||||
- (*trans*)
|
||||
intros.
|
||||
destruct (In_split a l') as (l'1,(l'2,H6)).
|
||||
rewrite <- H.
|
||||
subst l.
|
||||
apply in_or_app; right; red; auto.
|
||||
apply perm_trans with (l'1++l'2).
|
||||
apply (H0 _ _ _ _ H3 H6).
|
||||
apply (H2 _ _ _ _ H6 H4).
|
||||
Qed.
|
||||
|
||||
Theorem Permutation_cons_inv l l' a :
|
||||
Permutation (a::l) (a::l') -> Permutation l l'.
|
||||
Proof.
|
||||
intro H; exact (Permutation_app_inv [] l [] l' a H).
|
||||
Qed.
|
||||
|
||||
Theorem Permutation_cons_app_inv l l1 l2 a :
|
||||
Permutation (a :: l) (l1 ++ a :: l2) -> Permutation l (l1 ++ l2).
|
||||
Proof.
|
||||
intro H; exact (Permutation_app_inv [] l l1 l2 a H).
|
||||
Qed.
|
||||
|
||||
Theorem Permutation_app_inv_l : forall l l1 l2,
|
||||
Permutation (l ++ l1) (l ++ l2) -> Permutation l1 l2.
|
||||
Proof.
|
||||
induction l; simpl; auto.
|
||||
intros.
|
||||
apply IHl.
|
||||
apply Permutation_cons_inv with a; auto.
|
||||
Qed.
|
||||
|
||||
Theorem Permutation_app_inv_r : forall l l1 l2,
|
||||
Permutation (l1 ++ l) (l2 ++ l) -> Permutation l1 l2.
|
||||
Proof.
|
||||
induction l.
|
||||
intros l1 l2; do 2 rewrite app_nil_r; auto.
|
||||
intros.
|
||||
apply IHl.
|
||||
apply Permutation_app_inv with a; auto.
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_length_1_inv: forall a l, Permutation [a] l -> l = [a].
|
||||
Proof.
|
||||
intros a l H; remember [a] as m in H.
|
||||
induction H; try (injection Heqm as -> ->; clear Heqm);
|
||||
discriminate || auto.
|
||||
apply Permutation_nil in H as ->; trivial.
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_length_1: forall a b, Permutation [a] [b] -> a = b.
|
||||
Proof.
|
||||
intros a b H.
|
||||
apply Permutation_length_1_inv in H; injection H as ->; trivial.
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_length_2_inv :
|
||||
forall a1 a2 l, Permutation [a1;a2] l -> l = [a1;a2] \/ l = [a2;a1].
|
||||
Proof.
|
||||
intros a1 a2 l H; remember [a1;a2] as m in H.
|
||||
revert a1 a2 Heqm.
|
||||
induction H; intros; try (injection Heqm; intros; subst; clear Heqm);
|
||||
discriminate || (try tauto).
|
||||
apply Permutation_length_1_inv in H as ->; left; auto.
|
||||
apply IHPermutation1 in Heqm as [H1|H1]; apply IHPermutation2 in H1 as ();
|
||||
auto.
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_length_2 :
|
||||
forall a1 a2 b1 b2, Permutation [a1;a2] [b1;b2] ->
|
||||
a1 = b1 /\ a2 = b2 \/ a1 = b2 /\ a2 = b1.
|
||||
Proof.
|
||||
intros a1 b1 a2 b2 H.
|
||||
apply Permutation_length_2_inv in H as [H|H]; injection H as -> ->; auto.
|
||||
Qed.
|
||||
|
||||
Let in_middle l l1 l2 (a:A) : l = l1 ++ a :: l2 ->
|
||||
forall x, In x l <-> a = x \/ In x (l1++l2).
|
||||
Proof.
|
||||
intros; subst; rewrite !in_app_iff; simpl. tauto.
|
||||
Qed.
|
||||
|
||||
Lemma NoDup_cardinal_incl (l l' : list A) : NoDup l -> NoDup l' ->
|
||||
length l = length l' -> incl l l' -> incl l' l.
|
||||
Proof.
|
||||
intros N. revert l'. induction N as [|a l Hal Hl IH].
|
||||
- destruct l'; now auto.
|
||||
- intros l' Hl' E H x Hx.
|
||||
assert (Ha : In a l') by (apply H; simpl; auto).
|
||||
destruct (in_split _ _ Ha) as (l1 & l2 & H12). clear Ha.
|
||||
rewrite in_middle in Hx; eauto.
|
||||
destruct Hx as [Hx|Hx]; [left|right]; auto.
|
||||
apply (IH (l1++l2)); auto.
|
||||
* apply NoDup_remove_1 with a; rewrite <- H12; auto.
|
||||
* apply eq_add_S.
|
||||
simpl in E; rewrite E, H12, !app_length; simpl; auto with arith.
|
||||
* intros y Hy. assert (Hy' : In y l') by (apply H; simpl; auto).
|
||||
rewrite in_middle in Hy'; eauto.
|
||||
destruct Hy'; auto. subst y; intuition.
|
||||
Qed.
|
||||
|
||||
Lemma NoDup_Permutation l l' : NoDup l -> NoDup l' ->
|
||||
(forall x:A, In x l <-> In x l') -> Permutation l l'.
|
||||
Proof.
|
||||
intros N. revert l'. induction N as [|a l Hal Hl IH].
|
||||
- destruct l'; simpl; auto.
|
||||
intros Hl' H. exfalso. rewrite (H a); auto.
|
||||
- intros l' Hl' H.
|
||||
assert (Ha : In a l') by (apply H; simpl; auto).
|
||||
destruct (In_split _ _ Ha) as (l1 & l2 & H12).
|
||||
rewrite H12.
|
||||
apply Permutation_cons_app.
|
||||
apply IH; auto.
|
||||
* apply NoDup_remove_1 with a; rewrite <- H12; auto.
|
||||
* intro x. split; intros Hx.
|
||||
+ assert (Hx' : In x l') by (apply H; simpl; auto).
|
||||
rewrite in_middle in Hx'; eauto.
|
||||
destruct Hx'; auto. subst; intuition.
|
||||
+ assert (Hx' : In x l') by (rewrite (in_middle l1 l2 a); eauto).
|
||||
rewrite <- H in Hx'. destruct Hx'; auto.
|
||||
subst. destruct (NoDup_remove_2 _ _ _ Hl' Hx).
|
||||
Qed.
|
||||
|
||||
Lemma NoDup_Permutation_bis l l' : NoDup l -> NoDup l' ->
|
||||
length l = length l' -> incl l l' -> Permutation l l'.
|
||||
Proof.
|
||||
intros. apply NoDup_Permutation; auto.
|
||||
split; auto. apply NoDup_cardinal_incl; auto.
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_NoDup l l' : Permutation l l' -> NoDup l -> NoDup l'.
|
||||
Proof.
|
||||
induction 1; auto.
|
||||
* inversion_clear 1; constructor; eauto using Permutation_in.
|
||||
* inversion_clear 1 as [|? ? H1 H2]. inversion_clear H2; simpl in *.
|
||||
constructor. simpl; intuition. constructor; intuition.
|
||||
Qed.
|
||||
|
||||
Global Instance Permutation_NoDup' :
|
||||
Proper (@Permutation A ==> iff) (@NoDup A) | 10.
|
||||
Proof.
|
||||
repeat red; eauto using Permutation_NoDup.
|
||||
Qed.
|
||||
|
||||
End Permutation_properties.
|
||||
|
||||
Section Permutation_map.
|
||||
|
||||
Variable A B : Type.
|
||||
Variable f : A -> B.
|
||||
|
||||
Lemma Permutation_map l l' :
|
||||
Permutation l l' -> Permutation (map f l) (map f l').
|
||||
Proof.
|
||||
induction 1; simpl; eauto.
|
||||
Qed.
|
||||
|
||||
Global Instance Permutation_map' :
|
||||
Proper (@Permutation A ==> @Permutation B) (map f) | 10.
|
||||
Proof.
|
||||
exact Permutation_map.
|
||||
Qed.
|
||||
|
||||
End Permutation_map.
|
||||
|
||||
Section Injection.
|
||||
|
||||
Definition injective {A B} (f : A->B) :=
|
||||
forall x y, f x = f y -> x = y.
|
||||
|
||||
Lemma injective_map_NoDup {A B} (f:A->B) (l:list A) :
|
||||
injective f -> NoDup l -> NoDup (map f l).
|
||||
Proof.
|
||||
intros Hf. induction 1 as [|x l Hx Hl IH]; simpl; constructor; trivial.
|
||||
rewrite in_map_iff. intros (y & Hy & Hy'). apply Hf in Hy. now subst.
|
||||
Qed.
|
||||
|
||||
Lemma injective_bounded_surjective n f :
|
||||
injective f ->
|
||||
(forall x, x < n -> f x < n) ->
|
||||
(forall y, y < n -> exists x, x < n /\ f x = y).
|
||||
Proof.
|
||||
intros Hf H.
|
||||
set (l := seq 0 n).
|
||||
assert (P : incl (map f l) l).
|
||||
{ intros x. rewrite in_map_iff. intros (y & <- & Hy').
|
||||
unfold l in *. rewrite in_seq in *. simpl in *.
|
||||
destruct Hy' as (_,Hy'). auto with arith. }
|
||||
assert (P' : incl l (map f l)).
|
||||
{ unfold l.
|
||||
apply NoDup_cardinal_incl; auto using injective_map_NoDup, seq_NoDup.
|
||||
now rewrite map_length. }
|
||||
intros x Hx.
|
||||
assert (Hx' : In x l) by (unfold l; rewrite in_seq; auto with arith).
|
||||
apply P' in Hx'.
|
||||
rewrite in_map_iff in Hx'. destruct Hx' as (y & Hy & Hy').
|
||||
exists y; split; auto. unfold l in *; rewrite in_seq in Hy'.
|
||||
destruct Hy'; auto with arith.
|
||||
Qed.
|
||||
|
||||
Lemma nat_bijection_Permutation n f :
|
||||
injective f -> (forall x, x < n -> f x < n) ->
|
||||
let l := seq 0 n in Permutation (map f l) l.
|
||||
Proof.
|
||||
intros Hf BD.
|
||||
apply NoDup_Permutation_bis; auto using injective_map_NoDup, seq_NoDup.
|
||||
* now rewrite map_length.
|
||||
* intros x. rewrite in_map_iff. intros (y & <- & Hy').
|
||||
rewrite in_seq in *. simpl in *.
|
||||
destruct Hy' as (_,Hy'). auto with arith.
|
||||
Qed.
|
||||
|
||||
End Injection.
|
||||
|
||||
Section Permutation_alt.
|
||||
Variable A:Type.
|
||||
Implicit Type a : A.
|
||||
Implicit Type l : list A.
|
||||
|
||||
(** Alternative characterization of permutation
|
||||
via [nth_error] and [nth] *)
|
||||
|
||||
Let adapt f n :=
|
||||
let m := f (S n) in if le_lt_dec m (f 0) then m else pred m.
|
||||
|
||||
Let adapt_injective f : injective f -> injective (adapt f).
|
||||
Proof.
|
||||
unfold adapt. intros Hf x y EQ.
|
||||
destruct le_lt_dec as [LE|LT]; destruct le_lt_dec as [LE'|LT'].
|
||||
- now apply eq_add_S, Hf.
|
||||
- apply Lt.le_lt_or_eq in LE.
|
||||
destruct LE as [LT|EQ']; [|now apply Hf in EQ'].
|
||||
unfold lt in LT. rewrite EQ in LT.
|
||||
rewrite <- (Lt.S_pred _ _ LT') in LT.
|
||||
elim (Lt.lt_not_le _ _ LT' LT).
|
||||
- apply Lt.le_lt_or_eq in LE'.
|
||||
destruct LE' as [LT'|EQ']; [|now apply Hf in EQ'].
|
||||
unfold lt in LT'. rewrite <- EQ in LT'.
|
||||
rewrite <- (Lt.S_pred _ _ LT) in LT'.
|
||||
elim (Lt.lt_not_le _ _ LT LT').
|
||||
- apply eq_add_S, Hf.
|
||||
now rewrite (Lt.S_pred _ _ LT), (Lt.S_pred _ _ LT'), EQ.
|
||||
Qed.
|
||||
|
||||
Let adapt_ok a l1 l2 f : injective f -> length l1 = f 0 ->
|
||||
forall n, nth_error (l1++a::l2) (f (S n)) = nth_error (l1++l2) (adapt f n).
|
||||
Proof.
|
||||
unfold adapt. intros Hf E n.
|
||||
destruct le_lt_dec as [LE|LT].
|
||||
- apply Lt.le_lt_or_eq in LE.
|
||||
destruct LE as [LT|EQ]; [|now apply Hf in EQ].
|
||||
rewrite <- E in LT.
|
||||
rewrite 2 nth_error_app1; auto.
|
||||
- rewrite (Lt.S_pred _ _ LT) at 1.
|
||||
rewrite <- E, (Lt.S_pred _ _ LT) in LT.
|
||||
rewrite 2 nth_error_app2; auto with arith.
|
||||
rewrite <- Minus.minus_Sn_m; auto with arith.
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_nth_error l l' :
|
||||
Permutation l l' <->
|
||||
(length l = length l' /\
|
||||
exists f:nat->nat,
|
||||
injective f /\ forall n, nth_error l' n = nth_error l (f n)).
|
||||
Proof.
|
||||
split.
|
||||
{ intros P.
|
||||
split; [now apply Permutation_length|].
|
||||
induction P.
|
||||
- exists (fun n => n).
|
||||
split; try red; auto.
|
||||
- destruct IHP as (f & Hf & Hf').
|
||||
exists (fun n => match n with O => O | S n => S (f n) end).
|
||||
split; try red.
|
||||
* intros [|y] [|z]; simpl; now auto.
|
||||
* intros [|n]; simpl; auto.
|
||||
- exists (fun n => match n with 0 => 1 | 1 => 0 | n => n end).
|
||||
split; try red.
|
||||
* intros [|[|z]] [|[|t]]; simpl; now auto.
|
||||
* intros [|[|n]]; simpl; auto.
|
||||
- destruct IHP1 as (f & Hf & Hf').
|
||||
destruct IHP2 as (g & Hg & Hg').
|
||||
exists (fun n => f (g n)).
|
||||
split; try red.
|
||||
* auto.
|
||||
* intros n. rewrite <- Hf'; auto. }
|
||||
{ revert l. induction l'.
|
||||
- intros [|l] (E & _); now auto.
|
||||
- intros l (E & f & Hf & Hf').
|
||||
simpl in E.
|
||||
assert (Ha : nth_error l (f 0) = Some a)
|
||||
by (symmetry; apply (Hf' 0)).
|
||||
destruct (nth_error_split l (f 0) Ha) as (l1 & l2 & L12 & L1).
|
||||
rewrite L12. rewrite <- Permutation_middle. constructor.
|
||||
apply IHl'; split; [|exists (adapt f); split].
|
||||
* revert E. rewrite L12, !app_length. simpl.
|
||||
rewrite <- plus_n_Sm. now injection 1.
|
||||
* now apply adapt_injective.
|
||||
* intro n. rewrite <- (adapt_ok a), <- L12; auto.
|
||||
apply (Hf' (S n)). }
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_nth_error_bis l l' :
|
||||
Permutation l l' <->
|
||||
exists f:nat->nat,
|
||||
injective f /\
|
||||
(forall n, n < length l -> f n < length l) /\
|
||||
(forall n, nth_error l' n = nth_error l (f n)).
|
||||
Proof.
|
||||
rewrite Permutation_nth_error; split.
|
||||
- intros (E & f & Hf & Hf').
|
||||
exists f. do 2 (split; trivial).
|
||||
intros n Hn.
|
||||
destruct (Lt.le_or_lt (length l) (f n)) as [LE|LT]; trivial.
|
||||
rewrite <- nth_error_None, <- Hf', nth_error_None, <- E in LE.
|
||||
elim (Lt.lt_not_le _ _ Hn LE).
|
||||
- intros (f & Hf & Hf2 & Hf3); split; [|exists f; auto].
|
||||
assert (H : length l' <= length l') by auto with arith.
|
||||
rewrite <- nth_error_None, Hf3, nth_error_None in H.
|
||||
destruct (Lt.le_or_lt (length l) (length l')) as [LE|LT];
|
||||
[|apply Hf2 in LT; elim (Lt.lt_not_le _ _ LT H)].
|
||||
apply Lt.le_lt_or_eq in LE. destruct LE as [LT|EQ]; trivial.
|
||||
rewrite <- nth_error_Some, Hf3, nth_error_Some in LT.
|
||||
destruct (injective_bounded_surjective Hf Hf2 LT) as (y & Hy & Hy').
|
||||
apply Hf in Hy'. subst y. elim (Lt.lt_irrefl _ Hy).
|
||||
Qed.
|
||||
|
||||
Lemma Permutation_nth l l' d :
|
||||
Permutation l l' <->
|
||||
(let n := length l in
|
||||
length l' = n /\
|
||||
exists f:nat->nat,
|
||||
(forall x, x < n -> f x < n) /\
|
||||
(forall x y, x < n -> y < n -> f x = f y -> x = y) /\
|
||||
(forall x, x < n -> nth x l' d = nth (f x) l d)).
|
||||
Proof.
|
||||
split.
|
||||
- intros H.
|
||||
assert (E := Permutation_length H).
|
||||
split; auto.
|
||||
apply Permutation_nth_error_bis in H.
|
||||
destruct H as (f & Hf & Hf2 & Hf3).
|
||||
exists f. split; [|split]; auto.
|
||||
intros n Hn. rewrite <- 2 nth_default_eq. unfold nth_default.
|
||||
now rewrite Hf3.
|
||||
- intros (E & f & Hf1 & Hf2 & Hf3).
|
||||
rewrite Permutation_nth_error.
|
||||
split; auto.
|
||||
exists (fun n => if le_lt_dec (length l) n then n else f n).
|
||||
split.
|
||||
* intros x y.
|
||||
destruct le_lt_dec as [LE|LT];
|
||||
destruct le_lt_dec as [LE'|LT']; auto.
|
||||
+ apply Hf1 in LT'. intros ->.
|
||||
elim (Lt.lt_irrefl (f y)). eapply Lt.lt_le_trans; eauto.
|
||||
+ apply Hf1 in LT. intros <-.
|
||||
elim (Lt.lt_irrefl (f x)). eapply Lt.lt_le_trans; eauto.
|
||||
* intros n.
|
||||
destruct le_lt_dec as [LE|LT].
|
||||
+ assert (LE' : length l' <= n) by (now rewrite E).
|
||||
rewrite <- nth_error_None in LE, LE'. congruence.
|
||||
+ assert (LT' : n < length l') by (now rewrite E).
|
||||
specialize (Hf3 n LT). rewrite <- 2 nth_default_eq in Hf3.
|
||||
unfold nth_default in Hf3.
|
||||
apply Hf1 in LT.
|
||||
rewrite <- nth_error_Some in LT, LT'.
|
||||
do 2 destruct nth_error; congruence.
|
||||
Qed.
|
||||
|
||||
End Permutation_alt.
|
||||
|
||||
(* begin hide *)
|
||||
Notation Permutation_app_swap := Permutation_app_comm (only parsing).
|
||||
(* end hide *)
|
||||
@@ -1,2 +0,0 @@
|
||||
Require Export Sorted.
|
||||
Require Export Mergesort.
|
||||
62
samples/Coq/Spec.v
Normal file
62
samples/Coq/Spec.v
Normal file
@@ -0,0 +1,62 @@
|
||||
(** Specifications. *)
|
||||
Require Import Coq.Lists.List.
|
||||
Require Import Coq.Strings.Ascii.
|
||||
Require Import FunctionNinjas.All.
|
||||
Require Import ListString.All.
|
||||
Require Import Computation.
|
||||
|
||||
Import ListNotations.
|
||||
Local Open Scope char.
|
||||
|
||||
(** A run is an execution of the program with explicit answers for the
|
||||
system calls. *)
|
||||
Module Run.
|
||||
(** We define a run by induction on the structure of a computation. *)
|
||||
Inductive t : C.t -> Type :=
|
||||
| Ret : t C.Ret
|
||||
| Call : forall (command : Command.t) (answer : Command.answer command)
|
||||
{handler : Command.answer command -> C.t}, t (handler answer) ->
|
||||
t (C.Call command handler).
|
||||
|
||||
(** The trace of a run. *)
|
||||
Fixpoint trace {x : C.t} (run : t x)
|
||||
: list {command : Command.t & Command.answer command} :=
|
||||
match run with
|
||||
| Ret => []
|
||||
| Call command answer _ run => existT _ command answer :: trace run
|
||||
end.
|
||||
End Run.
|
||||
|
||||
Module Temporal.
|
||||
Module All.
|
||||
Inductive t (P : Command.t -> Prop) : C.t -> Prop :=
|
||||
| Ret : t P C.Ret
|
||||
| Call : forall (c : Command.t) (h : Command.answer c -> C.t),
|
||||
P c -> (forall a, t P (h a)) ->
|
||||
t P (C.Call c h).
|
||||
End All.
|
||||
|
||||
Module One.
|
||||
Inductive t (P : Command.t -> Prop) : C.t -> Prop :=
|
||||
| CallThis : forall (c : Command.t) (h : Command.answer c -> C.t),
|
||||
P c ->
|
||||
t P (C.Call c h)
|
||||
| CallOther : forall (c : Command.t) (h : Command.answer c -> C.t),
|
||||
(forall a, t P (h a)) ->
|
||||
t P (C.Call c h).
|
||||
End One.
|
||||
|
||||
Module Then.
|
||||
Inductive t (P1 P2 : Command.t -> Prop) : C.t -> Prop :=
|
||||
| Ret : t P1 P2 C.Ret
|
||||
| Call : forall (c : Command.t) (h : Command.answer c -> C.t),
|
||||
(forall a, t P1 P2 (h a)) ->
|
||||
t P1 P2 (C.Call c h)
|
||||
| CallThen : forall (c : Command.t) (h : Command.answer c -> C.t),
|
||||
P1 c -> (forall a, One.t P2 (h a)) ->
|
||||
t P1 P2 (C.Call c h).
|
||||
End Then.
|
||||
End Temporal.
|
||||
|
||||
Module CardBeforeMoney.
|
||||
End CardBeforeMoney.
|
||||
@@ -1,419 +0,0 @@
|
||||
(** Sketch of the proof of {p:nat|p<=n} = {p:nat|p<=m} -> n=m
|
||||
|
||||
- preliminary results on the irrelevance of boundedness proofs
|
||||
- introduce the notion of finite cardinal |A|
|
||||
- prove that |{p:nat|p<=n}| = n
|
||||
- prove that |A| = n /\ |A| = m -> n = m if equality is decidable on A
|
||||
- prove that equality is decidable on A
|
||||
- conclude
|
||||
*)
|
||||
|
||||
(** * Preliminary results on [nat] and [le] *)
|
||||
|
||||
(** Proving axiom K on [nat] *)
|
||||
|
||||
Require Import Eqdep_dec.
|
||||
Require Import Arith.
|
||||
|
||||
Theorem eq_rect_eq_nat :
|
||||
forall (p:nat) (Q:nat->Type) (x:Q p) (h:p=p), x = eq_rect p Q x p h.
|
||||
Proof.
|
||||
intros.
|
||||
apply K_dec_set with (p := h).
|
||||
apply eq_nat_dec.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
(** Proving unicity of proofs of [(n<=m)%nat] *)
|
||||
|
||||
Scheme le_ind' := Induction for le Sort Prop.
|
||||
|
||||
Theorem le_uniqueness_proof : forall (n m : nat) (p q : n <= m), p = q.
|
||||
Proof.
|
||||
induction p using le_ind'; intro q.
|
||||
replace (le_n n) with
|
||||
(eq_rect _ (fun n0 => n <= n0) (le_n n) _ (refl_equal n)).
|
||||
2:reflexivity.
|
||||
generalize (refl_equal n).
|
||||
pattern n at 2 4 6 10, q; case q; [intro | intros m l e].
|
||||
rewrite <- eq_rect_eq_nat; trivial.
|
||||
contradiction (le_Sn_n m); rewrite <- e; assumption.
|
||||
replace (le_S n m p) with
|
||||
(eq_rect _ (fun n0 => n <= n0) (le_S n m p) _ (refl_equal (S m))).
|
||||
2:reflexivity.
|
||||
generalize (refl_equal (S m)).
|
||||
pattern (S m) at 1 3 4 6, q; case q; [intro Heq | intros m0 l HeqS].
|
||||
contradiction (le_Sn_n m); rewrite Heq; assumption.
|
||||
injection HeqS; intro Heq; generalize l HeqS.
|
||||
rewrite <- Heq; intros; rewrite <- eq_rect_eq_nat.
|
||||
rewrite (IHp l0); reflexivity.
|
||||
Qed.
|
||||
|
||||
(** Proving irrelevance of boundedness proofs while building
|
||||
elements of interval *)
|
||||
|
||||
Lemma dep_pair_intro :
|
||||
forall (n x y:nat) (Hx : x<=n) (Hy : y<=n), x=y ->
|
||||
exist (fun x => x <= n) x Hx = exist (fun x => x <= n) y Hy.
|
||||
Proof.
|
||||
intros n x y Hx Hy Heq.
|
||||
generalize Hy.
|
||||
rewrite <- Heq.
|
||||
intros.
|
||||
rewrite (le_uniqueness_proof x n Hx Hy0).
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
(** * Proving that {p:nat|p<=n} = {p:nat|p<=m} -> n=m *)
|
||||
|
||||
(** Definition of having finite cardinality [n+1] for a set [A] *)
|
||||
|
||||
Definition card (A:Set) n :=
|
||||
exists f,
|
||||
(forall x:A, f x <= n) /\
|
||||
(forall x y:A, f x = f y -> x = y) /\
|
||||
(forall m, m <= n -> exists x:A, f x = m).
|
||||
|
||||
Require Import Arith.
|
||||
|
||||
(** Showing that the interval [0;n] has cardinality [n+1] *)
|
||||
|
||||
Theorem card_interval : forall n, card {x:nat|x<=n} n.
|
||||
Proof.
|
||||
intro n.
|
||||
exists (fun x:{x:nat|x<=n} => proj1_sig x).
|
||||
split.
|
||||
(* bounded *)
|
||||
intro x; apply (proj2_sig x).
|
||||
split.
|
||||
(* injectivity *)
|
||||
intros (p,Hp) (q,Hq).
|
||||
simpl.
|
||||
intro Hpq.
|
||||
apply dep_pair_intro; assumption.
|
||||
(* surjectivity *)
|
||||
intros m Hmn.
|
||||
exists (exist (fun x : nat => x <= n) m Hmn).
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
(** Showing that equality on the interval [0;n] is decidable *)
|
||||
|
||||
Lemma interval_dec :
|
||||
forall n (x y : {m:nat|m<=n}), {x=y}+{x<>y}.
|
||||
Proof.
|
||||
intros n (p,Hp).
|
||||
induction p; intros ([|q],Hq).
|
||||
left.
|
||||
apply dep_pair_intro.
|
||||
reflexivity.
|
||||
right.
|
||||
intro H; discriminate H.
|
||||
right.
|
||||
intro H; discriminate H.
|
||||
assert (Hp' : p <= n).
|
||||
apply le_Sn_le; assumption.
|
||||
assert (Hq' : q <= n).
|
||||
apply le_Sn_le; assumption.
|
||||
destruct (IHp Hp' (exist (fun m => m <= n) q Hq'))
|
||||
as [Heq|Hneq].
|
||||
left.
|
||||
injection Heq; intro Heq'.
|
||||
apply dep_pair_intro.
|
||||
apply eq_S.
|
||||
assumption.
|
||||
right.
|
||||
intro HeqS.
|
||||
injection HeqS; intro Heq.
|
||||
apply Hneq.
|
||||
apply dep_pair_intro.
|
||||
assumption.
|
||||
Qed.
|
||||
|
||||
(** Showing that the cardinality relation is functional on decidable sets *)
|
||||
|
||||
Lemma card_inj_aux :
|
||||
forall (A:Type) f g n,
|
||||
(forall x:A, f x <= 0) ->
|
||||
(forall x y:A, f x = f y -> x = y) ->
|
||||
(forall m, m <= S n -> exists x:A, g x = m)
|
||||
-> False.
|
||||
Proof.
|
||||
intros A f g n Hfbound Hfinj Hgsurj.
|
||||
destruct (Hgsurj (S n) (le_n _)) as (x,Hx).
|
||||
destruct (Hgsurj n (le_S _ _ (le_n _))) as (x',Hx').
|
||||
assert (Hfx : 0 = f x).
|
||||
apply le_n_O_eq.
|
||||
apply Hfbound.
|
||||
assert (Hfx' : 0 = f x').
|
||||
apply le_n_O_eq.
|
||||
apply Hfbound.
|
||||
assert (x=x').
|
||||
apply Hfinj.
|
||||
rewrite <- Hfx.
|
||||
rewrite <- Hfx'.
|
||||
reflexivity.
|
||||
rewrite H in Hx.
|
||||
rewrite Hx' in Hx.
|
||||
apply (n_Sn _ Hx).
|
||||
Qed.
|
||||
|
||||
(** For [dec_restrict], we use a lemma on the negation of equality
|
||||
that requires proof-irrelevance. It should be possible to avoid this
|
||||
lemma by generalizing over a first-order definition of [x<>y], say
|
||||
[neq] such that [{x=y}+{neq x y}] and [~(x=y /\ neq x y)]; for such
|
||||
[neq], unicity of proofs could be proven *)
|
||||
|
||||
Require Import Classical.
|
||||
Lemma neq_dep_intro :
|
||||
forall (A:Set) (z x y:A) (p:x<>z) (q:y<>z), x=y ->
|
||||
exist (fun x => x <> z) x p = exist (fun x => x <> z) y q.
|
||||
Proof.
|
||||
intros A z x y p q Heq.
|
||||
generalize q; clear q; rewrite <- Heq; intro q.
|
||||
rewrite (proof_irrelevance _ p q); reflexivity.
|
||||
Qed.
|
||||
|
||||
Lemma dec_restrict :
|
||||
forall (A:Set),
|
||||
(forall x y :A, {x=y}+{x<>y}) ->
|
||||
forall z (x y :{a:A|a<>z}), {x=y}+{x<>y}.
|
||||
Proof.
|
||||
intros A Hdec z (x,Hx) (y,Hy).
|
||||
destruct (Hdec x y) as [Heq|Hneq].
|
||||
left; apply neq_dep_intro; assumption.
|
||||
right; intro Heq; injection Heq; exact Hneq.
|
||||
Qed.
|
||||
|
||||
Lemma pred_inj : forall n m,
|
||||
0 <> n -> 0 <> m -> pred m = pred n -> m = n.
|
||||
Proof.
|
||||
destruct n.
|
||||
intros m H; destruct H; reflexivity.
|
||||
destruct m.
|
||||
intros _ H; destruct H; reflexivity.
|
||||
simpl; intros _ _ H.
|
||||
rewrite H.
|
||||
reflexivity.
|
||||
Qed.
|
||||
|
||||
Lemma le_neq_lt : forall n m, n <= m -> n<>m -> n < m.
|
||||
Proof.
|
||||
intros n m Hle Hneq.
|
||||
destruct (le_lt_eq_dec n m Hle).
|
||||
assumption.
|
||||
contradiction.
|
||||
Qed.
|
||||
|
||||
Lemma inj_restrict :
|
||||
forall (A:Set) (f:A->nat) x y z,
|
||||
(forall x y : A, f x = f y -> x = y)
|
||||
-> x <> z -> f y < f z -> f z <= f x
|
||||
-> pred (f x) = f y
|
||||
-> False.
|
||||
|
||||
(* Search error sans le type de f !! *)
|
||||
Proof.
|
||||
intros A f x y z Hfinj Hneqx Hfy Hfx Heq.
|
||||
assert (f z <> f x).
|
||||
apply sym_not_eq.
|
||||
intro Heqf.
|
||||
apply Hneqx.
|
||||
apply Hfinj.
|
||||
assumption.
|
||||
assert (f x = S (f y)).
|
||||
assert (0 < f x).
|
||||
apply le_lt_trans with (f z).
|
||||
apply le_O_n.
|
||||
apply le_neq_lt; assumption.
|
||||
apply pred_inj.
|
||||
apply O_S.
|
||||
apply lt_O_neq; assumption.
|
||||
exact Heq.
|
||||
assert (f z <= f y).
|
||||
destruct (le_lt_or_eq _ _ Hfx).
|
||||
apply lt_n_Sm_le.
|
||||
rewrite <- H0.
|
||||
assumption.
|
||||
contradiction Hneqx.
|
||||
symmetry.
|
||||
apply Hfinj.
|
||||
assumption.
|
||||
contradiction (lt_not_le (f y) (f z)).
|
||||
Qed.
|
||||
|
||||
Theorem card_inj : forall m n (A:Set),
|
||||
(forall x y :A, {x=y}+{x<>y}) ->
|
||||
card A m -> card A n -> m = n.
|
||||
Proof.
|
||||
induction m; destruct n;
|
||||
intros A Hdec
|
||||
(f,(Hfbound,(Hfinj,Hfsurj)))
|
||||
(g,(Hgbound,(Hginj,Hgsurj))).
|
||||
(* 0/0 *)
|
||||
reflexivity.
|
||||
(* 0/Sm *)
|
||||
destruct (card_inj_aux _ _ _ _ Hfbound Hfinj Hgsurj).
|
||||
(* Sn/0 *)
|
||||
destruct (card_inj_aux _ _ _ _ Hgbound Hginj Hfsurj).
|
||||
(* Sn/Sm *)
|
||||
destruct (Hgsurj (S n) (le_n _)) as (xSn,HSnx).
|
||||
rewrite IHm with (n:=n) (A := {x:A|x<>xSn}).
|
||||
reflexivity.
|
||||
(* decidability of eq on {x:A|x<>xSm} *)
|
||||
apply dec_restrict.
|
||||
assumption.
|
||||
(* cardinality of {x:A|x<>xSn} is m *)
|
||||
pose (f' := fun x' : {x:A|x<>xSn} =>
|
||||
let (x,Hneq) := x' in
|
||||
if le_lt_dec (f xSn) (f x)
|
||||
then pred (f x)
|
||||
else f x).
|
||||
exists f'.
|
||||
split.
|
||||
(* f' is bounded *)
|
||||
unfold f'.
|
||||
intros (x,_).
|
||||
destruct (le_lt_dec (f xSn) (f x)) as [Hle|Hge].
|
||||
change m with (pred (S m)).
|
||||
apply le_pred.
|
||||
apply Hfbound.
|
||||
apply le_S_n.
|
||||
apply le_trans with (f xSn).
|
||||
exact Hge.
|
||||
apply Hfbound.
|
||||
split.
|
||||
(* f' is injective *)
|
||||
unfold f'.
|
||||
intros (x,Hneqx) (y,Hneqy) Heqf'.
|
||||
destruct (le_lt_dec (f xSn) (f x)) as [Hlefx|Hgefx];
|
||||
destruct (le_lt_dec (f xSn) (f y)) as [Hlefy|Hgefy].
|
||||
(* f xSn <= f x et f xSn <= f y *)
|
||||
assert (Heq : x = y).
|
||||
apply Hfinj.
|
||||
assert (f xSn <> f y).
|
||||
apply sym_not_eq.
|
||||
intro Heqf.
|
||||
apply Hneqy.
|
||||
apply Hfinj.
|
||||
assumption.
|
||||
assert (0 < f y).
|
||||
apply le_lt_trans with (f xSn).
|
||||
apply le_O_n.
|
||||
apply le_neq_lt; assumption.
|
||||
assert (f xSn <> f x).
|
||||
apply sym_not_eq.
|
||||
intro Heqf.
|
||||
apply Hneqx.
|
||||
apply Hfinj.
|
||||
assumption.
|
||||
assert (0 < f x).
|
||||
apply le_lt_trans with (f xSn).
|
||||
apply le_O_n.
|
||||
apply le_neq_lt; assumption.
|
||||
apply pred_inj.
|
||||
apply lt_O_neq; assumption.
|
||||
apply lt_O_neq; assumption.
|
||||
assumption.
|
||||
apply neq_dep_intro; assumption.
|
||||
(* f y < f xSn <= f x *)
|
||||
destruct (inj_restrict A f x y xSn); assumption.
|
||||
(* f x < f xSn <= f y *)
|
||||
symmetry in Heqf'.
|
||||
destruct (inj_restrict A f y x xSn); assumption.
|
||||
(* f x < f xSn et f y < f xSn *)
|
||||
assert (Heq : x=y).
|
||||
apply Hfinj; assumption.
|
||||
apply neq_dep_intro; assumption.
|
||||
(* f' is surjective *)
|
||||
intros p Hlep.
|
||||
destruct (le_lt_dec (f xSn) p) as [Hle|Hlt].
|
||||
(* case f xSn <= p *)
|
||||
destruct (Hfsurj (S p) (le_n_S _ _ Hlep)) as (x,Hx).
|
||||
assert (Hneq : x <> xSn).
|
||||
intro Heqx.
|
||||
rewrite Heqx in Hx.
|
||||
rewrite Hx in Hle.
|
||||
apply le_Sn_n with p; assumption.
|
||||
exists (exist (fun a => a<>xSn) x Hneq).
|
||||
unfold f'.
|
||||
destruct (le_lt_dec (f xSn) (f x)) as [Hle'|Hlt'].
|
||||
rewrite Hx; reflexivity.
|
||||
rewrite Hx in Hlt'.
|
||||
contradiction (le_not_lt (f xSn) p).
|
||||
apply lt_trans with (S p).
|
||||
apply lt_n_Sn.
|
||||
assumption.
|
||||
(* case p < f xSn *)
|
||||
destruct (Hfsurj p (le_S _ _ Hlep)) as (x,Hx).
|
||||
assert (Hneq : x <> xSn).
|
||||
intro Heqx.
|
||||
rewrite Heqx in Hx.
|
||||
rewrite Hx in Hlt.
|
||||
apply (lt_irrefl p).
|
||||
assumption.
|
||||
exists (exist (fun a => a<>xSn) x Hneq).
|
||||
unfold f'.
|
||||
destruct (le_lt_dec (f xSn) (f x)) as [Hle'|Hlt'].
|
||||
rewrite Hx in Hle'.
|
||||
contradiction (lt_irrefl p).
|
||||
apply lt_le_trans with (f xSn); assumption.
|
||||
assumption.
|
||||
(* cardinality of {x:A|x<>xSn} is n *)
|
||||
pose (g' := fun x' : {x:A|x<>xSn} =>
|
||||
let (x,Hneq) := x' in
|
||||
if Hdec x xSn then 0 else g x).
|
||||
exists g'.
|
||||
split.
|
||||
(* g is bounded *)
|
||||
unfold g'.
|
||||
intros (x,_).
|
||||
destruct (Hdec x xSn) as [_|Hneq].
|
||||
apply le_O_n.
|
||||
assert (Hle_gx:=Hgbound x).
|
||||
destruct (le_lt_or_eq _ _ Hle_gx).
|
||||
apply lt_n_Sm_le.
|
||||
assumption.
|
||||
contradiction Hneq.
|
||||
apply Hginj.
|
||||
rewrite HSnx.
|
||||
assumption.
|
||||
split.
|
||||
(* g is injective *)
|
||||
unfold g'.
|
||||
intros (x,Hneqx) (y,Hneqy) Heqg'.
|
||||
destruct (Hdec x xSn) as [Heqx|_].
|
||||
contradiction Hneqx.
|
||||
destruct (Hdec y xSn) as [Heqy|_].
|
||||
contradiction Hneqy.
|
||||
assert (Heq : x=y).
|
||||
apply Hginj; assumption.
|
||||
apply neq_dep_intro; assumption.
|
||||
(* g is surjective *)
|
||||
intros p Hlep.
|
||||
destruct (Hgsurj p (le_S _ _ Hlep)) as (x,Hx).
|
||||
assert (Hneq : x<>xSn).
|
||||
intro Heq.
|
||||
rewrite Heq in Hx.
|
||||
rewrite Hx in HSnx.
|
||||
rewrite HSnx in Hlep.
|
||||
contradiction (le_Sn_n _ Hlep).
|
||||
exists (exist (fun a => a<>xSn) x Hneq).
|
||||
simpl.
|
||||
destruct (Hdec x xSn) as [Heqx|_].
|
||||
contradiction Hneq.
|
||||
assumption.
|
||||
Qed.
|
||||
|
||||
(** Conclusion *)
|
||||
|
||||
Theorem interval_discr :
|
||||
forall n m, {p:nat|p<=n} = {p:nat|p<=m} -> n=m.
|
||||
Proof.
|
||||
intros n m Heq.
|
||||
apply card_inj with (A := {p:nat|p<=n}).
|
||||
apply interval_dec.
|
||||
apply card_interval.
|
||||
rewrite Heq.
|
||||
apply card_interval.
|
||||
Qed.
|
||||
21
samples/Csound Document/allglass.csd
Executable file
21
samples/Csound Document/allglass.csd
Executable file
@@ -0,0 +1,21 @@
|
||||
<CsoundSynthesizer>
|
||||
<CsInstruments>
|
||||
sr = 44100
|
||||
kr = 44100
|
||||
ksmps = 1
|
||||
nchnls = 2
|
||||
|
||||
; pvanal -n 512 -w 8 allglass1-L.wav allglass1-L.pvc
|
||||
; pvanal -n 512 -w 8 allglass1-R.wav allglass1-R.pvc
|
||||
instr 1
|
||||
ktime line 0, p3, 17.5018
|
||||
arL pvoc ktime, 1, "allglass1-L.pvc"
|
||||
arR pvoc ktime, 1, "allglass1-R.pvc"
|
||||
out arL, arR
|
||||
endin
|
||||
</CsInstruments>
|
||||
<CsScore>
|
||||
i 1 0 70.0073
|
||||
e
|
||||
</CsScore>
|
||||
</CsoundSynthesizer>
|
||||
34
samples/Csound Document/interp.csd
Executable file
34
samples/Csound Document/interp.csd
Executable file
@@ -0,0 +1,34 @@
|
||||
<CsoundSynthesizer>
|
||||
<CsInstruments>
|
||||
sr = 44100
|
||||
kr = 44100
|
||||
ksmps = 1
|
||||
nchnls = 2
|
||||
|
||||
; pvanal -n 1024 -w 2 partA-L.wav partA-L.pvc
|
||||
; pvanal -n 1024 -w 2 partA-R.wav partA-R.pvc
|
||||
; pvanal -n 1024 -w 2 partB.wav partB.pvc
|
||||
instr 1
|
||||
iscale = 1
|
||||
|
||||
ktimpnt1 line 0, iscale*(82196/44100), 82196/44100
|
||||
ktimpnt2 linseg 0, iscale*1.25, 0, iscale*(103518/44100), 103518/44100
|
||||
kfreqscale linseg 1, iscale*0.5, 1, iscale*1.6, 0.8
|
||||
kfreqinterpL linseg 0, iscale*0.25, 0, iscale*1.6, 1
|
||||
kampinterpL linseg 0, iscale*0.25, 0, iscale*1.6, 1
|
||||
kfreqinterpR linseg 0, iscale*0.5, 0, iscale*1.2, 1
|
||||
kampinterpR linseg 0, iscale*0.5, 0, iscale*1.2, 1
|
||||
|
||||
pvbufread ktimpnt1, "partB.pvc"
|
||||
apvcL pvinterp ktimpnt2, 1, "partA-L.pvc", kfreqscale, 1, 1, 1, 1-kfreqinterpL, 1-kampinterpL
|
||||
pvbufread ktimpnt1, "partB.pvc"
|
||||
apvcR pvinterp ktimpnt2, 1, "partA-R.pvc", kfreqscale, 1, 1, 1, 1-kfreqinterpR, 1-kampinterpR
|
||||
|
||||
outs apvcL*0.8, apvcR*0.8
|
||||
endin
|
||||
</CsInstruments>
|
||||
<CsScore>
|
||||
i 1 0 7
|
||||
e
|
||||
</CsScore>
|
||||
</CsoundSynthesizer>
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user