mirror of
https://github.com/KevinMidboe/linguist.git
synced 2025-10-29 17:50:22 +00:00
Compare commits
764 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8845cd9c58 | ||
|
|
090f765c7e | ||
|
|
b58c0e8f3e | ||
|
|
b4ff170603 | ||
|
|
3bc540a283 | ||
|
|
25b761b506 | ||
|
|
1e502808c9 | ||
|
|
27e0c8f78a | ||
|
|
897218678e | ||
|
|
4eb33fe3be | ||
|
|
0614055efd | ||
|
|
e70f3f595a | ||
|
|
28af996bf9 | ||
|
|
8bd8f0960c | ||
|
|
255db77f1f | ||
|
|
8b0b14c9a6 | ||
|
|
95e83311b6 | ||
|
|
1968f8193c | ||
|
|
5155ad89e8 | ||
|
|
c2505e8b7b | ||
|
|
b38f4b786b | ||
|
|
1a04c79738 | ||
|
|
a464c234b8 | ||
|
|
aa2319a052 | ||
|
|
f22181f47d | ||
|
|
3191ff498d | ||
|
|
91ea482ea6 | ||
|
|
905d87a112 | ||
|
|
8eae4e56ef | ||
|
|
5466fcfd2f | ||
|
|
176a0e9926 | ||
|
|
aa049b4677 | ||
|
|
5c19f1f546 | ||
|
|
9ae19a1f94 | ||
|
|
8cf3b7ad51 | ||
|
|
03d16835aa | ||
|
|
7174130e46 | ||
|
|
59f64c47b1 | ||
|
|
f3655e8a1e | ||
|
|
f97d796f90 | ||
|
|
d342aa4841 | ||
|
|
5c655e3b20 | ||
|
|
9a0ac4a477 | ||
|
|
8ea9632ccf | ||
|
|
03ef4f30e8 | ||
|
|
12228fb525 | ||
|
|
92897046ed | ||
|
|
91aa843a4e | ||
|
|
c3145d3c08 | ||
|
|
1ad2123896 | ||
|
|
9b9a256c60 | ||
|
|
0f3644d23a | ||
|
|
04d3023f76 | ||
|
|
5c7aa5406a | ||
|
|
bce676e902 | ||
|
|
7c9fd59a99 | ||
|
|
b89d1a2e77 | ||
|
|
f8c5015b20 | ||
|
|
f28cdc8a15 | ||
|
|
0e147f1f66 | ||
|
|
a91705724d | ||
|
|
5e3e8133fb | ||
|
|
5a3758f1c7 | ||
|
|
57237106f3 | ||
|
|
99ad2368b0 | ||
|
|
21d7f99a4e | ||
|
|
24b368a30c | ||
|
|
7c8bc8561d | ||
|
|
ce37cd665d | ||
|
|
bd0f4f6f78 | ||
|
|
4867db8831 | ||
|
|
e6ab516fb7 | ||
|
|
7501b82df1 | ||
|
|
aa6b881971 | ||
|
|
3928734d0f | ||
|
|
c7868a95bc | ||
|
|
2012647f78 | ||
|
|
84471a5463 | ||
|
|
57a3c14f2b | ||
|
|
d9914307eb | ||
|
|
71cdf46197 | ||
|
|
8a27884c70 | ||
|
|
b881e3e6cb | ||
|
|
ca718d8f2a | ||
|
|
c6625b1b8a | ||
|
|
16a6d680c4 | ||
|
|
270fa8f5d3 | ||
|
|
b1f5e93b4a | ||
|
|
79a61c72e1 | ||
|
|
3f04c11537 | ||
|
|
b2270613d7 | ||
|
|
0fe854421b | ||
|
|
de074f421e | ||
|
|
27590c39bd | ||
|
|
67191d4d5e | ||
|
|
00764f3d59 | ||
|
|
4a2cb32149 | ||
|
|
1a11664239 | ||
|
|
9520cbb44c | ||
|
|
1aea6b2cdb | ||
|
|
6ff950341a | ||
|
|
b9501e42b2 | ||
|
|
065c809dd5 | ||
|
|
5b9ea4a78f | ||
|
|
b72c4d4400 | ||
|
|
d46e214985 | ||
|
|
799c47ce7a | ||
|
|
b5121e59dd | ||
|
|
f6a7b4929f | ||
|
|
162b77ab5a | ||
|
|
92904efd45 | ||
|
|
93fabe487f | ||
|
|
74d704bea2 | ||
|
|
ee1bd50dd1 | ||
|
|
07096f84f5 | ||
|
|
a9b3bd632b | ||
|
|
eec324890e | ||
|
|
ca6ac8f0db | ||
|
|
60ab4a5fe7 | ||
|
|
10eb5830f0 | ||
|
|
835ceae6f6 | ||
|
|
abe3aa47f6 | ||
|
|
53e34072ed | ||
|
|
f83f761d0a | ||
|
|
9c18bf3a89 | ||
|
|
f6e1ab444e | ||
|
|
0ae8b2959d | ||
|
|
46b0b1e5e2 | ||
|
|
b44dfb4ab8 | ||
|
|
868e528810 | ||
|
|
0a4c850ef1 | ||
|
|
b3c4232251 | ||
|
|
0c38df47b9 | ||
|
|
bfd4005760 | ||
|
|
fc9fad15a3 | ||
|
|
b5091e88ad | ||
|
|
2610808b6d | ||
|
|
3cfee4f214 | ||
|
|
70fd116eaf | ||
|
|
62aac9c2f7 | ||
|
|
afcf1c6c22 | ||
|
|
f3f0365b13 | ||
|
|
9bc12843fe | ||
|
|
5e3ceddf69 | ||
|
|
d377e23193 | ||
|
|
e6dabd59ad | ||
|
|
f0c7380132 | ||
|
|
697ad4c568 | ||
|
|
1efd9b384d | ||
|
|
c1e71dc215 | ||
|
|
d2c7d27d13 | ||
|
|
1efd4c83f9 | ||
|
|
0f7677423f | ||
|
|
2a0b0e9f93 | ||
|
|
faec60188f | ||
|
|
709a688858 | ||
|
|
2448ff8314 | ||
|
|
311202102d | ||
|
|
6812a22706 | ||
|
|
fb727ce731 | ||
|
|
6af499e352 | ||
|
|
66ec33cf8e | ||
|
|
f2694f3a74 | ||
|
|
d069d0e444 | ||
|
|
56ee61b17c | ||
|
|
b945726017 | ||
|
|
6f8a7d1070 | ||
|
|
b032886c21 | ||
|
|
988739d566 | ||
|
|
8cd80801e8 | ||
|
|
c3b7a1a6fb | ||
|
|
9d0eff75ad | ||
|
|
3ccb548b6d | ||
|
|
eeedd53f32 | ||
|
|
11a3b5b73c | ||
|
|
eacc48e8c7 | ||
|
|
5b72b4d353 | ||
|
|
3f940ce8b8 | ||
|
|
b2e3ea2334 | ||
|
|
4637da8c32 | ||
|
|
6b88c5ba86 | ||
|
|
5fdb596214 | ||
|
|
c989b02285 | ||
|
|
c8301dc20b | ||
|
|
ca4ea03828 | ||
|
|
ae27c71d5a | ||
|
|
3d1555e278 | ||
|
|
54fab9eb4e | ||
|
|
8fea8a0b47 | ||
|
|
f14ae8e51b | ||
|
|
6b60e5e786 | ||
|
|
40413dfcc7 | ||
|
|
07f5ad1daa | ||
|
|
57f5a3e780 | ||
|
|
3be007526c | ||
|
|
9bfbd0550c | ||
|
|
0301a5dcdf | ||
|
|
db994a1197 | ||
|
|
855c13ea2a | ||
|
|
bfa7eced44 | ||
|
|
b1d103b1f3 | ||
|
|
fc816d3429 | ||
|
|
04a4e8c8e6 | ||
|
|
ab69fd01ac | ||
|
|
cc6106f31b | ||
|
|
ead85379ed | ||
|
|
f8d6be55ee | ||
|
|
a241d75043 | ||
|
|
864a6c0a20 | ||
|
|
1c20c54191 | ||
|
|
4d722d1fd1 | ||
|
|
b67254e986 | ||
|
|
041cf9c94e | ||
|
|
b08c5a8421 | ||
|
|
125eaa4cc3 | ||
|
|
6b001cf861 | ||
|
|
5c4129f85b | ||
|
|
fa56879790 | ||
|
|
41713d7719 | ||
|
|
17a9463588 | ||
|
|
fb9f271720 | ||
|
|
8de50edb41 | ||
|
|
ab33fccddd | ||
|
|
bd95ac0beb | ||
|
|
7b3efb185f | ||
|
|
a0065febe2 | ||
|
|
9374784651 | ||
|
|
aa6af3deed | ||
|
|
a19e501b44 | ||
|
|
889a395340 | ||
|
|
eb8eb28ca7 | ||
|
|
697b3351e6 | ||
|
|
9fd80bfd67 | ||
|
|
7b58b1ea59 | ||
|
|
c454396c26 | ||
|
|
2e9d8f5520 | ||
|
|
c8ea3fba5a | ||
|
|
56af13047c | ||
|
|
c46900396a | ||
|
|
b235ed1223 | ||
|
|
16d9612603 | ||
|
|
721e5b4656 | ||
|
|
9b8b39f444 | ||
|
|
e32a837fb2 | ||
|
|
9961f8bc1c | ||
|
|
c066867d59 | ||
|
|
21093165e1 | ||
|
|
df88de14e3 | ||
|
|
94de431aa5 | ||
|
|
502557a97f | ||
|
|
52938f6dbf | ||
|
|
d87fad649c | ||
|
|
d8666e5309 | ||
|
|
0c071990cb | ||
|
|
77dfb19a50 | ||
|
|
49254f1f74 | ||
|
|
9dd952c175 | ||
|
|
0b9897db1f | ||
|
|
9d11128362 | ||
|
|
ee17ab3e26 | ||
|
|
06af36dac2 | ||
|
|
51d6d741e5 | ||
|
|
b593a8ae67 | ||
|
|
7b30240a7f | ||
|
|
71f124faa5 | ||
|
|
470bd34349 | ||
|
|
65087dd7b8 | ||
|
|
89c5361f25 | ||
|
|
f82cc26e4f | ||
|
|
15232fc072 | ||
|
|
0a7aab947c | ||
|
|
5906fa81bb | ||
|
|
1b2f574af2 | ||
|
|
ca4bc6110f | ||
|
|
a944769d61 | ||
|
|
98a31515ef | ||
|
|
3e665099ac | ||
|
|
5400b534e4 | ||
|
|
6f2d4dc195 | ||
|
|
4f7fac3ba1 | ||
|
|
fd6569830a | ||
|
|
5d4cad6394 | ||
|
|
b790a49282 | ||
|
|
537b83c759 | ||
|
|
c0b9e2c3f4 | ||
|
|
f488b9b9f7 | ||
|
|
7060b116f4 | ||
|
|
0f4cf11294 | ||
|
|
048496723b | ||
|
|
70068f74f1 | ||
|
|
6f197bacc7 | ||
|
|
24a89d2d75 | ||
|
|
1ac16cbec7 | ||
|
|
8144438f39 | ||
|
|
f141abbc73 | ||
|
|
5329b96793 | ||
|
|
7a70931066 | ||
|
|
9a070d7bb3 | ||
|
|
189b2d684b | ||
|
|
71dfac26fe | ||
|
|
efdc790374 | ||
|
|
a3613dc438 | ||
|
|
97afedd861 | ||
|
|
00a436f175 | ||
|
|
aa2e3000cd | ||
|
|
10e0fa4360 | ||
|
|
0473af368f | ||
|
|
b82f563c38 | ||
|
|
344297895b | ||
|
|
d3c525645b | ||
|
|
59d02e5138 | ||
|
|
8522dc1d33 | ||
|
|
53d238f310 | ||
|
|
92ed2d189e | ||
|
|
7133c3b11a | ||
|
|
d72114083b | ||
|
|
683925fcd5 | ||
|
|
1c8bf32d35 | ||
|
|
28913833f4 | ||
|
|
c6752be546 | ||
|
|
395e474cad | ||
|
|
16ea189aa6 | ||
|
|
ac81fc5da9 | ||
|
|
58191c10b3 | ||
|
|
c2ca23d580 | ||
|
|
b5dfb40c7d | ||
|
|
7e647fd915 | ||
|
|
5b9f9bc0e6 | ||
|
|
b1c057fa30 | ||
|
|
ca348dd373 | ||
|
|
b802045c5c | ||
|
|
3c244a9501 | ||
|
|
832a7b9b06 | ||
|
|
e7d856345f | ||
|
|
ac559effaa | ||
|
|
95d0daba80 | ||
|
|
a0ad1523a1 | ||
|
|
06c049b8c0 | ||
|
|
24c7380765 | ||
|
|
73ef1bf156 | ||
|
|
c8b30a62f9 | ||
|
|
48dfdd2dfe | ||
|
|
68727f724a | ||
|
|
f46e053633 | ||
|
|
d2e739ba8c | ||
|
|
37174e1d2c | ||
|
|
cdb5206def | ||
|
|
d636eaf1e3 | ||
|
|
49f3eb1286 | ||
|
|
8ab94a8643 | ||
|
|
f72c337c5b | ||
|
|
d22321de07 | ||
|
|
473e5db51f | ||
|
|
8b9fc4683a | ||
|
|
3b4415cc3c | ||
|
|
2afce1754a | ||
|
|
f232b93214 | ||
|
|
db64f192fa | ||
|
|
ca96ecdc55 | ||
|
|
2a06d1aa19 | ||
|
|
b2fa2a1f46 | ||
|
|
6839516b5c | ||
|
|
7247c80fac | ||
|
|
01d05d1d4e | ||
|
|
8db4cc482e | ||
|
|
074f17ed98 | ||
|
|
68b553ea55 | ||
|
|
fd5da9cb15 | ||
|
|
d081f687b0 | ||
|
|
34abe5b983 | ||
|
|
5765f1faf1 | ||
|
|
550b67215c | ||
|
|
d7b2826113 | ||
|
|
b4f94c7c25 | ||
|
|
d24677adbb | ||
|
|
04f981eeac | ||
|
|
b53d84d8ff | ||
|
|
fea8bb21a0 | ||
|
|
996ed8a8b1 | ||
|
|
daef164163 | ||
|
|
61a5cab1f2 | ||
|
|
2fddaaf3d7 | ||
|
|
741d246581 | ||
|
|
1b23e81541 | ||
|
|
362d300cb0 | ||
|
|
7b185cc2f3 | ||
|
|
932726863f | ||
|
|
a35a3e98ce | ||
|
|
7c404e72d2 | ||
|
|
8736e2305d | ||
|
|
a13664698f | ||
|
|
6e934067a8 | ||
|
|
3040642f97 | ||
|
|
dd7e44f957 | ||
|
|
6b56a243e7 | ||
|
|
5d08605aef | ||
|
|
bf705cbaf2 | ||
|
|
fe827896e0 | ||
|
|
317219e479 | ||
|
|
64e7df7596 | ||
|
|
33dc865c30 | ||
|
|
0d469e2966 | ||
|
|
b3ff84872b | ||
|
|
5d176a781c | ||
|
|
e0c97f97ba | ||
|
|
83f358976e | ||
|
|
9ee6153891 | ||
|
|
825e640061 | ||
|
|
e5ae213839 | ||
|
|
74e034c689 | ||
|
|
a55a60a161 | ||
|
|
9d865ec018 | ||
|
|
8b01e3dead | ||
|
|
9e3cc01715 | ||
|
|
0f204767a9 | ||
|
|
11e3251efd | ||
|
|
1f1416a5f7 | ||
|
|
b3786f3825 | ||
|
|
09c2eee91e | ||
|
|
dc78b14902 | ||
|
|
500ce0959a | ||
|
|
aa0c9e3572 | ||
|
|
e6de75d48a | ||
|
|
a5ad0a34f8 | ||
|
|
6e609cc4e3 | ||
|
|
27727a927f | ||
|
|
5ff580df0a | ||
|
|
b34acac722 | ||
|
|
37840856ed | ||
|
|
acfad4371f | ||
|
|
ae42cc0307 | ||
|
|
d06860df37 | ||
|
|
7d5d162f6b | ||
|
|
d39a75b68b | ||
|
|
d72f7311f6 | ||
|
|
3f81b7c179 | ||
|
|
33d1255a5a | ||
|
|
0ebea6a0ff | ||
|
|
c28da3a4a9 | ||
|
|
538f479b60 | ||
|
|
88cc73fa49 | ||
|
|
d6d368a65d | ||
|
|
c5be9cc3e9 | ||
|
|
c462c2bd31 | ||
|
|
3426165621 | ||
|
|
98b99e38bb | ||
|
|
d8e3bec499 | ||
|
|
7c759d4d29 | ||
|
|
41d438b47e | ||
|
|
41911d6921 | ||
|
|
dca18d77cb | ||
|
|
040af5dad2 | ||
|
|
01bb6c37ab | ||
|
|
c624d68628 | ||
|
|
4867c49bd9 | ||
|
|
a354eddf4b | ||
|
|
9b78c533a5 | ||
|
|
090ea576b9 | ||
|
|
6a2d33a4b3 | ||
|
|
b54a9c7412 | ||
|
|
2c62da7834 | ||
|
|
0145a0adb2 | ||
|
|
473282d64c | ||
|
|
c2c068e9db | ||
|
|
13d1f662d1 | ||
|
|
bdd57f58a0 | ||
|
|
b1bcabd6e6 | ||
|
|
e128c3fa82 | ||
|
|
efac9fe750 | ||
|
|
2b8545a8fa | ||
|
|
b275b5d728 | ||
|
|
1f46cfafa7 | ||
|
|
b1dcdf3418 | ||
|
|
4bfd65deb8 | ||
|
|
213cf322f5 | ||
|
|
61102812a0 | ||
|
|
580cfce7fb | ||
|
|
f1383d7a45 | ||
|
|
e4ce5bfe39 | ||
|
|
4d2b38497d | ||
|
|
fc5ae1cfbc | ||
|
|
7e76d1cc6b | ||
|
|
cf834e8a21 | ||
|
|
ee61466042 | ||
|
|
35884d482c | ||
|
|
802de8112c | ||
|
|
9a76cfc85f | ||
|
|
dc41dd888d | ||
|
|
6ed64f25a2 | ||
|
|
827ad80311 | ||
|
|
9e3d8ac4e9 | ||
|
|
114a331106 | ||
|
|
9aa24a216a | ||
|
|
1b327e29ba | ||
|
|
13702451ab | ||
|
|
f0242f6f97 | ||
|
|
9775820398 | ||
|
|
26a35ea43d | ||
|
|
81ebef2e29 | ||
|
|
7daf26bcd0 | ||
|
|
231f705098 | ||
|
|
893ab8fd8d | ||
|
|
5afdd2c533 | ||
|
|
e4f5c0066a | ||
|
|
a167f852dd | ||
|
|
b428bce126 | ||
|
|
e62d0e19a5 | ||
|
|
9b8bf9068f | ||
|
|
6e05edc350 | ||
|
|
dd8eaf2893 | ||
|
|
1068cfb4b5 | ||
|
|
73b1737dc7 | ||
|
|
1d86f40fcd | ||
|
|
ecc750f445 | ||
|
|
cf5268a7d4 | ||
|
|
59fb481138 | ||
|
|
16e22b3b77 | ||
|
|
e24efad5ff | ||
|
|
58a34cdb7d | ||
|
|
b1c6b330e9 | ||
|
|
7c3e265033 | ||
|
|
13695a716c | ||
|
|
c9e43804d6 | ||
|
|
1535e3553e | ||
|
|
0ac05bbbeb | ||
|
|
d3f979d640 | ||
|
|
0e9ded45dc | ||
|
|
a2ca886510 | ||
|
|
aa701c6766 | ||
|
|
25a1af3775 | ||
|
|
0d8e0a2970 | ||
|
|
c0fff6c8a8 | ||
|
|
e6b4428614 | ||
|
|
4e6e69833d | ||
|
|
1d9faff4c6 | ||
|
|
7025cbe760 | ||
|
|
e922b7c2ca | ||
|
|
96518d2d0f | ||
|
|
1241b20ba1 | ||
|
|
f03f5c1628 | ||
|
|
cb550a3662 | ||
|
|
d1f90d61c5 | ||
|
|
16e65fe189 | ||
|
|
62a0faa729 | ||
|
|
fbb3ab2292 | ||
|
|
b3b75e5ef8 | ||
|
|
8b36210db5 | ||
|
|
a74f3b3e46 | ||
|
|
e214a52de5 | ||
|
|
0624a9395c | ||
|
|
b2e7f7ffa6 | ||
|
|
b312b39a10 | ||
|
|
80e2d112b2 | ||
|
|
519b169df0 | ||
|
|
5c2cfbc334 | ||
|
|
7d91e4959a | ||
|
|
0c5aa2a7eb | ||
|
|
0d7a264981 | ||
|
|
52ff2d2e74 | ||
|
|
8a7ceaa845 | ||
|
|
fd9ce2d1cf | ||
|
|
2c2b37bec3 | ||
|
|
c777f2d388 | ||
|
|
eca10056a8 | ||
|
|
c7bab11ebe | ||
|
|
6995fc28b6 | ||
|
|
102f14d0e9 | ||
|
|
aac168402b | ||
|
|
152d49513f | ||
|
|
d5564c808d | ||
|
|
82410e07b2 | ||
|
|
94d90b30b5 | ||
|
|
06997f0da2 | ||
|
|
55aafa416d | ||
|
|
6226a46988 | ||
|
|
8d216f0c43 | ||
|
|
7f5bb25542 | ||
|
|
5fcdf6adc2 | ||
|
|
6a565a849b | ||
|
|
66fc67e34c | ||
|
|
7cf140940e | ||
|
|
60e90bab23 | ||
|
|
4f58258186 | ||
|
|
03e2904ebf | ||
|
|
bea90b256e | ||
|
|
8eb37ba956 | ||
|
|
8d20c1fb59 | ||
|
|
9a1abf0c49 | ||
|
|
5aae7a4000 | ||
|
|
d9509a1750 | ||
|
|
978c448fb8 | ||
|
|
997c0fca10 | ||
|
|
3ae6e68492 | ||
|
|
851c93a1f7 | ||
|
|
a5f7355e16 | ||
|
|
18ffdbaa65 | ||
|
|
c089222bc6 | ||
|
|
37f9535d27 | ||
|
|
4650368bc2 | ||
|
|
88b14ed455 | ||
|
|
54a2a47bc0 | ||
|
|
ffcc970140 | ||
|
|
7a811e39e0 | ||
|
|
11f158cbb3 | ||
|
|
5d5550c48b | ||
|
|
fd570d906a | ||
|
|
deab0662f9 | ||
|
|
7238f50a6b | ||
|
|
499fcd1f3f | ||
|
|
dc0ddc82d6 | ||
|
|
436fc34cb9 | ||
|
|
f072cd96e3 | ||
|
|
3441a001c7 | ||
|
|
bc747844ea | ||
|
|
a887f58bcc | ||
|
|
f42afef6e0 | ||
|
|
18eaf22cb9 | ||
|
|
d94f427e12 | ||
|
|
b94eb42db6 | ||
|
|
d2297f5516 | ||
|
|
ef6f58b828 | ||
|
|
eb0bf16cce | ||
|
|
ca51415540 | ||
|
|
8ae32e1d47 | ||
|
|
0a6165c4d9 | ||
|
|
cf8521a629 | ||
|
|
b11c7f3dc0 | ||
|
|
01151aad5c | ||
|
|
6b283068a9 | ||
|
|
ccd7d4d89d | ||
|
|
208ec3906f | ||
|
|
84d4fccb4d | ||
|
|
8d8ea959ee | ||
|
|
1c73db499f | ||
|
|
16a4b4947f | ||
|
|
4b2abb2064 | ||
|
|
c581b6a5a7 | ||
|
|
4c66582f87 | ||
|
|
11388a5355 | ||
|
|
24ca98b1a3 | ||
|
|
90a293727d | ||
|
|
e869f6c173 | ||
|
|
5b187d1f20 | ||
|
|
7b5d1c075d | ||
|
|
07173d2238 | ||
|
|
6b747f7d65 | ||
|
|
aef19d72f9 | ||
|
|
e1a661bffc | ||
|
|
560f9b15d7 | ||
|
|
452fc59d4f | ||
|
|
ed4bbe97d1 | ||
|
|
f05c437221 | ||
|
|
682cc2d82d | ||
|
|
29197736c7 | ||
|
|
483488a2fa | ||
|
|
b36c4f2428 | ||
|
|
e1dbd68713 | ||
|
|
0ecb865797 | ||
|
|
1ced06483e | ||
|
|
861cee33d5 | ||
|
|
6b882438b0 | ||
|
|
c570646c2d | ||
|
|
75f72578ad | ||
|
|
4dc566e13f | ||
|
|
bcfdbe3616 | ||
|
|
314b7f134d | ||
|
|
87eb4577ea | ||
|
|
7563bf43e9 | ||
|
|
ce8cfed7ff | ||
|
|
8742de9a88 | ||
|
|
9c0af2ea5b | ||
|
|
4c81c0d904 | ||
|
|
4dcdb0c79c | ||
|
|
5a646384f6 | ||
|
|
8917f1a91a | ||
|
|
d459f7289e | ||
|
|
d2c4104d30 | ||
|
|
89fabd9a89 | ||
|
|
3f7ce832ab | ||
|
|
3fdc904b18 | ||
|
|
cf471af677 | ||
|
|
2d392581e2 | ||
|
|
79a428ab8d | ||
|
|
7ed1b9b0b1 | ||
|
|
58fd8a5d87 | ||
|
|
9f8c15ed6c | ||
|
|
b94eda18ab | ||
|
|
849eb28b01 | ||
|
|
b0073d461c | ||
|
|
25d160e850 | ||
|
|
e688c865bc | ||
|
|
21e97cc65c | ||
|
|
06c1033952 | ||
|
|
8c54f68040 | ||
|
|
4c1f5c490f | ||
|
|
117735ffb9 | ||
|
|
f98e6bdac7 | ||
|
|
19d76306d3 | ||
|
|
a63a609ce1 | ||
|
|
95fce2024d | ||
|
|
f3b65015f4 | ||
|
|
5431298b0d | ||
|
|
8f651bca03 | ||
|
|
6c45e5ec19 | ||
|
|
95d394642c | ||
|
|
6a185a3d3a | ||
|
|
6ae353706d | ||
|
|
77bc72be14 | ||
|
|
0585c880de | ||
|
|
da5189e7fb | ||
|
|
5f456d8b22 | ||
|
|
a351cdd486 | ||
|
|
6e71663fd6 | ||
|
|
0b2e43cdee | ||
|
|
9cbd280f79 | ||
|
|
947be9db0f | ||
|
|
dd55f929d4 | ||
|
|
8b5399d568 | ||
|
|
1b8b0c6229 | ||
|
|
56dec47561 | ||
|
|
3e793ab5b7 | ||
|
|
8bf1defdc1 | ||
|
|
8845b266de | ||
|
|
786d1e4cfc | ||
|
|
bc8d65e7d3 | ||
|
|
3180c5d554 | ||
|
|
be122ca1a5 | ||
|
|
b05f6f0018 | ||
|
|
e811021806 | ||
|
|
656f4f440d | ||
|
|
7fb62de4d7 | ||
|
|
26a5325dc3 | ||
|
|
4881e0aa51 | ||
|
|
743f7c76de | ||
|
|
f8ef01f557 | ||
|
|
402fa5c2cd | ||
|
|
5ac1e847a5 | ||
|
|
0737a21e38 | ||
|
|
0c2aeae00d | ||
|
|
b6ee02f313 | ||
|
|
03369b8a6c | ||
|
|
3b2ddb1a18 | ||
|
|
1e20b12241 | ||
|
|
81c41df15c | ||
|
|
8b736189e0 | ||
|
|
188d2367df | ||
|
|
5aeac500da | ||
|
|
5730ab28ab | ||
|
|
1c56b03a28 | ||
|
|
fc1f8083ad | ||
|
|
af5e799a45 | ||
|
|
e073e91d62 | ||
|
|
da9bda0e27 | ||
|
|
0e996e4bb7 | ||
|
|
63fbbd6e0a | ||
|
|
924fddf698 | ||
|
|
730be65514 | ||
|
|
885b5aab41 | ||
|
|
23e55e92ca | ||
|
|
0cfdbfb91c | ||
|
|
f18ae4f99f | ||
|
|
5217f19faa | ||
|
|
296d170ba9 | ||
|
|
a97fd74399 |
252
.gitmodules
vendored
252
.gitmodules
vendored
@@ -7,9 +7,6 @@
|
||||
[submodule "vendor/grammars/sublime-cirru"]
|
||||
path = vendor/grammars/sublime-cirru
|
||||
url = https://github.com/Cirru/sublime-cirru
|
||||
[submodule "vendor/grammars/Sublime-Logos"]
|
||||
path = vendor/grammars/Sublime-Logos
|
||||
url = https://github.com/Cykey/Sublime-Logos
|
||||
[submodule "vendor/grammars/SublimeBrainfuck"]
|
||||
path = vendor/grammars/SublimeBrainfuck
|
||||
url = https://github.com/Drako/SublimeBrainfuck
|
||||
@@ -25,18 +22,9 @@
|
||||
[submodule "vendor/grammars/Sublime-REBOL"]
|
||||
path = vendor/grammars/Sublime-REBOL
|
||||
url = https://github.com/Oldes/Sublime-REBOL
|
||||
[submodule "vendor/grammars/Sublime-Inform"]
|
||||
path = vendor/grammars/Sublime-Inform
|
||||
url = https://github.com/PogiNate/Sublime-Inform
|
||||
[submodule "vendor/grammars/autoitv3-tmbundle"]
|
||||
path = vendor/grammars/autoitv3-tmbundle
|
||||
url = https://github.com/Red-Nova-Technologies/autoitv3-tmbundle
|
||||
[submodule "vendor/grammars/Sublime-VimL"]
|
||||
path = vendor/grammars/Sublime-VimL
|
||||
url = https://github.com/SalGnt/Sublime-VimL
|
||||
[submodule "vendor/grammars/boo-sublime"]
|
||||
path = vendor/grammars/boo-sublime
|
||||
url = https://github.com/Shammah/boo-sublime
|
||||
[submodule "vendor/grammars/ColdFusion"]
|
||||
path = vendor/grammars/ColdFusion
|
||||
url = https://github.com/SublimeText/ColdFusion
|
||||
@@ -85,12 +73,12 @@
|
||||
[submodule "vendor/grammars/language-shellscript"]
|
||||
path = vendor/grammars/language-shellscript
|
||||
url = https://github.com/atom/language-shellscript
|
||||
[submodule "vendor/grammars/language-supercollider"]
|
||||
path = vendor/grammars/language-supercollider
|
||||
url = https://github.com/supercollider/language-supercollider
|
||||
[submodule "vendor/grammars/language-yaml"]
|
||||
path = vendor/grammars/language-yaml
|
||||
url = https://github.com/atom/language-yaml
|
||||
[submodule "vendor/grammars/sublime-sourcepawn"]
|
||||
path = vendor/grammars/sublime-sourcepawn
|
||||
url = https://github.com/austinwagner/sublime-sourcepawn
|
||||
[submodule "vendor/grammars/Sublime-Lasso"]
|
||||
path = vendor/grammars/Sublime-Lasso
|
||||
url = https://github.com/bfad/Sublime-Lasso
|
||||
@@ -109,9 +97,6 @@
|
||||
[submodule "vendor/grammars/sublime-MuPAD"]
|
||||
path = vendor/grammars/sublime-MuPAD
|
||||
url = https://github.com/ccreutzig/sublime-MuPAD
|
||||
[submodule "vendor/grammars/nesC.tmbundle"]
|
||||
path = vendor/grammars/nesC.tmbundle
|
||||
url = https://github.com/cdwilson/nesC.tmbundle
|
||||
[submodule "vendor/grammars/haxe-sublime-bundle"]
|
||||
path = vendor/grammars/haxe-sublime-bundle
|
||||
url = https://github.com/clemos/haxe-sublime-bundle
|
||||
@@ -133,9 +118,6 @@
|
||||
[submodule "vendor/grammars/fancy-tmbundle"]
|
||||
path = vendor/grammars/fancy-tmbundle
|
||||
url = https://github.com/fancy-lang/fancy-tmbundle
|
||||
[submodule "vendor/grammars/monkey.tmbundle"]
|
||||
path = vendor/grammars/monkey.tmbundle
|
||||
url = https://github.com/gingerbeardman/monkey.tmbundle
|
||||
[submodule "vendor/grammars/dart-sublime-bundle"]
|
||||
path = vendor/grammars/dart-sublime-bundle
|
||||
url = https://github.com/guillermooo/dart-sublime-bundle
|
||||
@@ -166,21 +148,12 @@
|
||||
[submodule "vendor/grammars/fish-tmbundle"]
|
||||
path = vendor/grammars/fish-tmbundle
|
||||
url = https://github.com/l15n/fish-tmbundle
|
||||
[submodule "vendor/grammars/sublime-idris"]
|
||||
path = vendor/grammars/sublime-idris
|
||||
url = https://github.com/laughedelic/sublime-idris
|
||||
[submodule "vendor/grammars/sublime-better-typescript"]
|
||||
path = vendor/grammars/sublime-better-typescript
|
||||
url = https://github.com/lavrton/sublime-better-typescript
|
||||
[submodule "vendor/grammars/moonscript-tmbundle"]
|
||||
path = vendor/grammars/moonscript-tmbundle
|
||||
url = https://github.com/leafo/moonscript-tmbundle
|
||||
[submodule "vendor/grammars/Isabelle.tmbundle"]
|
||||
path = vendor/grammars/Isabelle.tmbundle
|
||||
url = https://github.com/lsf37/Isabelle.tmbundle
|
||||
[submodule "vendor/grammars/x86-assembly-textmate-bundle"]
|
||||
path = vendor/grammars/x86-assembly-textmate-bundle
|
||||
url = https://github.com/lunixbochs/x86-assembly-textmate-bundle
|
||||
[submodule "vendor/grammars/Alloy.tmbundle"]
|
||||
path = vendor/grammars/Alloy.tmbundle
|
||||
url = https://github.com/macekond/Alloy.tmbundle
|
||||
@@ -211,9 +184,6 @@
|
||||
[submodule "vendor/grammars/Julia.tmbundle"]
|
||||
path = vendor/grammars/Julia.tmbundle
|
||||
url = https://github.com/nanoant/Julia.tmbundle
|
||||
[submodule "vendor/grammars/assembly.tmbundle"]
|
||||
path = vendor/grammars/assembly.tmbundle
|
||||
url = https://github.com/nanoant/assembly.tmbundle
|
||||
[submodule "vendor/grammars/ooc.tmbundle"]
|
||||
path = vendor/grammars/ooc.tmbundle
|
||||
url = https://github.com/nilium/ooc.tmbundle
|
||||
@@ -223,9 +193,6 @@
|
||||
[submodule "vendor/grammars/sublime-tea"]
|
||||
path = vendor/grammars/sublime-tea
|
||||
url = https://github.com/pferruggiaro/sublime-tea
|
||||
[submodule "vendor/grammars/puppet-textmate-bundle"]
|
||||
path = vendor/grammars/puppet-textmate-bundle
|
||||
url = https://github.com/puppet-textmate-bundle/puppet-textmate-bundle
|
||||
[submodule "vendor/grammars/abap.tmbundle"]
|
||||
path = vendor/grammars/abap.tmbundle
|
||||
url = https://github.com/pvl/abap.tmbundle
|
||||
@@ -256,9 +223,6 @@
|
||||
[submodule "vendor/grammars/SublimeXtend"]
|
||||
path = vendor/grammars/SublimeXtend
|
||||
url = https://github.com/staltz/SublimeXtend
|
||||
[submodule "vendor/grammars/Stata.tmbundle"]
|
||||
path = vendor/grammars/Stata.tmbundle
|
||||
url = https://github.com/statatmbundle/Stata.tmbundle
|
||||
[submodule "vendor/grammars/Vala-TMBundle"]
|
||||
path = vendor/grammars/Vala-TMBundle
|
||||
url = https://github.com/technosophos/Vala-TMBundle
|
||||
@@ -346,9 +310,6 @@
|
||||
[submodule "vendor/grammars/latex.tmbundle"]
|
||||
path = vendor/grammars/latex.tmbundle
|
||||
url = https://github.com/textmate/latex.tmbundle
|
||||
[submodule "vendor/grammars/less.tmbundle"]
|
||||
path = vendor/grammars/less.tmbundle
|
||||
url = https://github.com/textmate/less.tmbundle
|
||||
[submodule "vendor/grammars/lilypond.tmbundle"]
|
||||
path = vendor/grammars/lilypond.tmbundle
|
||||
url = https://github.com/textmate/lilypond.tmbundle
|
||||
@@ -397,9 +358,6 @@
|
||||
[submodule "vendor/grammars/processing.tmbundle"]
|
||||
path = vendor/grammars/processing.tmbundle
|
||||
url = https://github.com/textmate/processing.tmbundle
|
||||
[submodule "vendor/grammars/prolog.tmbundle"]
|
||||
path = vendor/grammars/prolog.tmbundle
|
||||
url = https://github.com/textmate/prolog.tmbundle
|
||||
[submodule "vendor/grammars/python-django.tmbundle"]
|
||||
path = vendor/grammars/python-django.tmbundle
|
||||
url = https://github.com/textmate/python-django.tmbundle
|
||||
@@ -460,9 +418,6 @@
|
||||
[submodule "vendor/grammars/llvm.tmbundle"]
|
||||
path = vendor/grammars/llvm.tmbundle
|
||||
url = https://github.com/whitequark/llvm.tmbundle
|
||||
[submodule "vendor/grammars/sublime-nix"]
|
||||
path = vendor/grammars/sublime-nix
|
||||
url = https://github.com/wmertens/sublime-nix
|
||||
[submodule "vendor/grammars/oz-tmbundle"]
|
||||
path = vendor/grammars/oz-tmbundle
|
||||
url = https://github.com/eregon/oz-tmbundle
|
||||
@@ -491,9 +446,6 @@
|
||||
[submodule "vendor/grammars/Scalate.tmbundle"]
|
||||
path = vendor/grammars/Scalate.tmbundle
|
||||
url = https://github.com/scalate/Scalate.tmbundle
|
||||
[submodule "vendor/grammars/Elm.tmLanguage"]
|
||||
path = vendor/grammars/Elm.tmLanguage
|
||||
url = https://github.com/deadfoxygrandpa/Elm.tmLanguage
|
||||
[submodule "vendor/grammars/sublime-bsv"]
|
||||
path = vendor/grammars/sublime-bsv
|
||||
url = https://github.com/thotypous/sublime-bsv
|
||||
@@ -521,24 +473,9 @@
|
||||
[submodule "vendor/grammars/liquid.tmbundle"]
|
||||
path = vendor/grammars/liquid.tmbundle
|
||||
url = https://github.com/bastilian/validcode-textmate-bundles
|
||||
[submodule "vendor/grammars/ats.sublime"]
|
||||
path = vendor/grammars/ats.sublime
|
||||
url = https://github.com/steinwaywhw/ats-mode-sublimetext
|
||||
[submodule "vendor/grammars/Modelica"]
|
||||
path = vendor/grammars/Modelica
|
||||
url = https://github.com/BorisChumichev/modelicaSublimeTextPackage
|
||||
[submodule "vendor/grammars/sublime-apl"]
|
||||
path = vendor/grammars/sublime-apl
|
||||
url = https://github.com/StoneCypher/sublime-apl
|
||||
[submodule "vendor/grammars/CLIPS-sublime"]
|
||||
path = vendor/grammars/CLIPS-sublime
|
||||
url = https://github.com/psicomante/CLIPS-sublime
|
||||
[submodule "vendor/grammars/Creole"]
|
||||
path = vendor/grammars/Creole
|
||||
url = https://github.com/Siddley/Creole
|
||||
[submodule "vendor/grammars/GDScript-sublime"]
|
||||
path = vendor/grammars/GDScript-sublime
|
||||
url = https://github.com/beefsack/GDScript-sublime
|
||||
[submodule "vendor/grammars/sublime-golo"]
|
||||
path = vendor/grammars/sublime-golo
|
||||
url = https://github.com/TypeUnsafe/sublime-golo
|
||||
@@ -551,9 +488,6 @@
|
||||
[submodule "vendor/grammars/G-Code"]
|
||||
path = vendor/grammars/G-Code
|
||||
url = https://github.com/robotmaster/sublime-text-syntax-highlighting
|
||||
[submodule "vendor/grammars/grace-tmbundle"]
|
||||
path = vendor/grammars/grace-tmbundle
|
||||
url = https://github.com/zmthy/grace-tmbundle
|
||||
[submodule "vendor/grammars/sublime-text-ox"]
|
||||
path = vendor/grammars/sublime-text-ox
|
||||
url = https://github.com/andreashetland/sublime-text-ox
|
||||
@@ -563,9 +497,6 @@
|
||||
[submodule "vendor/grammars/ec.tmbundle"]
|
||||
path = vendor/grammars/ec.tmbundle
|
||||
url = https://github.com/ecere/ec.tmbundle
|
||||
[submodule "vendor/grammars/InnoSetup"]
|
||||
path = vendor/grammars/InnoSetup
|
||||
url = https://github.com/idleberg/InnoSetup-Sublime-Text
|
||||
[submodule "vendor/grammars/gap-tmbundle"]
|
||||
path = vendor/grammars/gap-tmbundle
|
||||
url = https://github.com/dhowden/gap-tmbundle
|
||||
@@ -587,9 +518,6 @@
|
||||
[submodule "vendor/grammars/SublimeClarion"]
|
||||
path = vendor/grammars/SublimeClarion
|
||||
url = https://github.com/fushnisoft/SublimeClarion
|
||||
[submodule "vendor/grammars/oracle.tmbundle"]
|
||||
path = vendor/grammars/oracle.tmbundle
|
||||
url = https://github.com/mulander/oracle.tmbundle.git
|
||||
[submodule "vendor/grammars/BrightScript.tmbundle"]
|
||||
path = vendor/grammars/BrightScript.tmbundle
|
||||
url = https://github.com/cmink/BrightScript.tmbundle
|
||||
@@ -599,18 +527,12 @@
|
||||
[submodule "vendor/grammars/asciidoc.tmbundle"]
|
||||
path = vendor/grammars/asciidoc.tmbundle
|
||||
url = https://github.com/zuckschwerdt/asciidoc.tmbundle
|
||||
[submodule "vendor/grammars/sublime-text-pig-latin"]
|
||||
path = vendor/grammars/sublime-text-pig-latin
|
||||
url = https://github.com/goblindegook/sublime-text-pig-latin
|
||||
[submodule "vendor/grammars/Lean.tmbundle"]
|
||||
path = vendor/grammars/Lean.tmbundle
|
||||
url = https://github.com/leanprover/Lean.tmbundle
|
||||
[submodule "vendor/grammars/ampl"]
|
||||
path = vendor/grammars/ampl
|
||||
url = https://github.com/ampl/sublime-ampl
|
||||
[submodule "vendor/grammars/openscad.tmbundle"]
|
||||
path = vendor/grammars/openscad.tmbundle
|
||||
url = https://github.com/tbuser/openscad.tmbundle
|
||||
[submodule "vendor/grammars/sublime-varnish"]
|
||||
path = vendor/grammars/sublime-varnish
|
||||
url = https://github.com/brandonwamboldt/sublime-varnish
|
||||
@@ -629,6 +551,9 @@
|
||||
[submodule "vendor/grammars/jflex.tmbundle"]
|
||||
path = vendor/grammars/jflex.tmbundle
|
||||
url = https://github.com/jflex-de/jflex.tmbundle.git
|
||||
[submodule "vendor/grammars/Sublime-Modula-2"]
|
||||
path = vendor/grammars/Sublime-Modula-2
|
||||
url = https://github.com/harogaston/Sublime-Modula-2
|
||||
[submodule "vendor/grammars/ada.tmbundle"]
|
||||
path = vendor/grammars/ada.tmbundle
|
||||
url = https://github.com/textmate/ada.tmbundle
|
||||
@@ -647,3 +572,168 @@
|
||||
[submodule "vendor/grammars/atom-fsharp"]
|
||||
path = vendor/grammars/atom-fsharp
|
||||
url = https://github.com/fsprojects/atom-fsharp
|
||||
[submodule "vendor/grammars/SMT.tmbundle"]
|
||||
path = vendor/grammars/SMT.tmbundle
|
||||
url = https://github.com/SRI-CSL/SMT.tmbundle.git
|
||||
[submodule "vendor/grammars/language-crystal"]
|
||||
path = vendor/grammars/language-crystal
|
||||
url = https://github.com/atom-crystal/language-crystal
|
||||
[submodule "vendor/grammars/language-xbase"]
|
||||
path = vendor/grammars/language-xbase
|
||||
url = https://github.com/hernad/atom-language-harbour
|
||||
[submodule "vendor/grammars/language-ncl"]
|
||||
path = vendor/grammars/language-ncl
|
||||
url = https://github.com/rpavlick/language-ncl.git
|
||||
[submodule "vendor/grammars/pawn-sublime-language"]
|
||||
path = vendor/grammars/pawn-sublime-language
|
||||
url = https://github.com/Southclaw/pawn-sublime-language.git
|
||||
[submodule "vendor/grammars/atom-language-purescript"]
|
||||
path = vendor/grammars/atom-language-purescript
|
||||
url = https://github.com/purescript-contrib/atom-language-purescript
|
||||
[submodule "vendor/grammars/vue-syntax-highlight"]
|
||||
path = vendor/grammars/vue-syntax-highlight
|
||||
url = https://github.com/vuejs/vue-syntax-highlight
|
||||
[submodule "vendor/grammars/st2-zonefile"]
|
||||
path = vendor/grammars/st2-zonefile
|
||||
url = https://github.com/sixty4k/st2-zonefile
|
||||
[submodule "vendor/grammars/sublimeprolog"]
|
||||
path = vendor/grammars/sublimeprolog
|
||||
url = https://github.com/alnkpa/sublimeprolog
|
||||
[submodule "vendor/grammars/sublime-aspectj"]
|
||||
path = vendor/grammars/sublime-aspectj
|
||||
url = https://github.com/pchaigno/sublime-aspectj
|
||||
[submodule "vendor/grammars/sublime-typescript"]
|
||||
path = vendor/grammars/sublime-typescript
|
||||
url = https://github.com/Microsoft/TypeScript-Sublime-Plugin
|
||||
[submodule "vendor/grammars/sublime-pony"]
|
||||
path = vendor/grammars/sublime-pony
|
||||
url = https://github.com/CausalityLtd/sublime-pony
|
||||
[submodule "vendor/grammars/X10"]
|
||||
path = vendor/grammars/X10
|
||||
url = https://github.com/x10-lang/x10-highlighting
|
||||
[submodule "vendor/grammars/language-babel"]
|
||||
path = vendor/grammars/language-babel
|
||||
url = https://github.com/gandm/language-babel
|
||||
[submodule "vendor/grammars/UrWeb-Language-Definition"]
|
||||
path = vendor/grammars/UrWeb-Language-Definition
|
||||
url = https://github.com/gwalborn/UrWeb-Language-Definition.git
|
||||
[submodule "vendor/grammars/Stata.tmbundle"]
|
||||
path = vendor/grammars/Stata.tmbundle
|
||||
url = https://github.com/pschumm/Stata.tmbundle
|
||||
[submodule "vendor/grammars/FreeMarker.tmbundle"]
|
||||
path = vendor/grammars/FreeMarker.tmbundle
|
||||
url = https://github.com/freemarker/FreeMarker.tmbundle
|
||||
[submodule "vendor/grammars/MagicPython"]
|
||||
path = vendor/grammars/MagicPython
|
||||
url = https://github.com/MagicStack/MagicPython
|
||||
[submodule "vendor/grammars/language-click"]
|
||||
path = vendor/grammars/language-click
|
||||
url = https://github.com/stenverbois/language-click.git
|
||||
[submodule "vendor/grammars/language-maxscript"]
|
||||
path = vendor/grammars/language-maxscript
|
||||
url = https://github.com/Alhadis/language-maxscript
|
||||
[submodule "vendor/grammars/language-renpy"]
|
||||
path = vendor/grammars/language-renpy
|
||||
url = https://github.com/williamd1k0/language-renpy.git
|
||||
[submodule "vendor/grammars/language-inform7"]
|
||||
path = vendor/grammars/language-inform7
|
||||
url = https://github.com/erkyrath/language-inform7
|
||||
[submodule "vendor/grammars/atom-language-stan"]
|
||||
path = vendor/grammars/atom-language-stan
|
||||
url = https://github.com/jrnold/atom-language-stan
|
||||
[submodule "vendor/grammars/language-yang"]
|
||||
path = vendor/grammars/language-yang
|
||||
url = https://github.com/DzonyKalafut/language-yang.git
|
||||
[submodule "vendor/grammars/perl6fe"]
|
||||
path = vendor/grammars/perl6fe
|
||||
url = https://github.com/MadcapJake/language-perl6fe.git
|
||||
[submodule "vendor/grammars/language-less"]
|
||||
path = vendor/grammars/language-less
|
||||
url = https://github.com/atom/language-less.git
|
||||
[submodule "vendor/grammars/language-povray"]
|
||||
path = vendor/grammars/language-povray
|
||||
url = https://github.com/c-lipka/language-povray
|
||||
[submodule "vendor/grammars/sublime-terra"]
|
||||
path = vendor/grammars/sublime-terra
|
||||
url = https://github.com/pyk/sublime-terra
|
||||
[submodule "vendor/grammars/SublimePuppet"]
|
||||
path = vendor/grammars/SublimePuppet
|
||||
url = https://github.com/russCloak/SublimePuppet
|
||||
[submodule "vendor/grammars/sublimeassembly"]
|
||||
path = vendor/grammars/sublimeassembly
|
||||
url = https://github.com/Nessphoro/sublimeassembly
|
||||
[submodule "vendor/grammars/monkey"]
|
||||
path = vendor/grammars/monkey
|
||||
url = https://github.com/gingerbeardman/monkey.tmbundle
|
||||
[submodule "vendor/grammars/assembly"]
|
||||
path = vendor/grammars/assembly
|
||||
url = https://github.com/nanoant/assembly.tmbundle
|
||||
[submodule "vendor/grammars/boo"]
|
||||
path = vendor/grammars/boo
|
||||
url = https://github.com/Shammah/boo-sublime
|
||||
[submodule "vendor/grammars/logos"]
|
||||
path = vendor/grammars/logos
|
||||
url = https://github.com/Cykey/Sublime-Logos
|
||||
[submodule "vendor/grammars/pig-latin"]
|
||||
path = vendor/grammars/pig-latin
|
||||
url = https://github.com/goblindegook/sublime-text-pig-latin
|
||||
[submodule "vendor/grammars/sourcepawn"]
|
||||
path = vendor/grammars/sourcepawn
|
||||
url = https://github.com/austinwagner/sublime-sourcepawn
|
||||
[submodule "vendor/grammars/gdscript"]
|
||||
path = vendor/grammars/gdscript
|
||||
url = https://github.com/beefsack/GDScript-sublime
|
||||
[submodule "vendor/grammars/nesC"]
|
||||
path = vendor/grammars/nesC
|
||||
url = https://github.com/cdwilson/nesC.tmbundle
|
||||
[submodule "vendor/grammars/ats"]
|
||||
path = vendor/grammars/ats
|
||||
url = https://github.com/steinwaywhw/ats-mode-sublimetext
|
||||
[submodule "vendor/grammars/grace"]
|
||||
path = vendor/grammars/grace
|
||||
url = https://github.com/zmthy/grace-tmbundle
|
||||
[submodule "vendor/grammars/ejs-tmbundle"]
|
||||
path = vendor/grammars/ejs-tmbundle
|
||||
url = https://github.com/gregory-m/ejs-tmbundle
|
||||
[submodule "vendor/grammars/nix"]
|
||||
path = vendor/grammars/nix
|
||||
url = https://github.com/wmertens/sublime-nix
|
||||
[submodule "vendor/grammars/idris"]
|
||||
path = vendor/grammars/idris
|
||||
url = https://github.com/idris-hackers/idris-sublime.git
|
||||
[submodule "vendor/grammars/atomic-dreams"]
|
||||
path = vendor/grammars/atomic-dreams
|
||||
url = https://github.com/PJB3005/atomic-dreams
|
||||
[submodule "vendor/grammars/language-apl"]
|
||||
path = vendor/grammars/language-apl
|
||||
url = https://github.com/Alhadis/language-apl.git
|
||||
[submodule "vendor/grammars/language-graphql"]
|
||||
path = vendor/grammars/language-graphql
|
||||
url = https://github.com/rmosolgo/language-graphql
|
||||
[submodule "vendor/grammars/language-toc-wow"]
|
||||
path = vendor/grammars/language-toc-wow
|
||||
url = https://github.com/nebularg/language-toc-wow
|
||||
[submodule "vendor/grammars/sublime-autoit"]
|
||||
path = vendor/grammars/sublime-autoit
|
||||
url = https://github.com/AutoIt/SublimeAutoItScript
|
||||
[submodule "vendor/grammars/TLA"]
|
||||
path = vendor/grammars/TLA
|
||||
url = https://github.com/agentultra/TLAGrammar
|
||||
[submodule "vendor/grammars/sublime-clips"]
|
||||
path = vendor/grammars/sublime-clips
|
||||
url = https://github.com/psicomante/CLIPS-sublime
|
||||
[submodule "vendor/grammars/creole"]
|
||||
path = vendor/grammars/creole
|
||||
url = https://github.com/Siddley/Creole
|
||||
[submodule "vendor/grammars/language-csound"]
|
||||
path = vendor/grammars/language-csound
|
||||
url = https://github.com/nwhetsell/language-csound
|
||||
[submodule "vendor/grammars/language-wavefront"]
|
||||
path = vendor/grammars/language-wavefront
|
||||
url = https://github.com/Alhadis/language-wavefront
|
||||
[submodule "vendor/grammars/nu.tmbundle"]
|
||||
path = vendor/grammars/nu.tmbundle
|
||||
url = https://github.com/jsallis/nu.tmbundle
|
||||
[submodule "vendor/grammars/Elm"]
|
||||
path = vendor/grammars/Elm
|
||||
url = https://github.com/elm-community/Elm.tmLanguage
|
||||
|
||||
11
.travis.yml
11
.travis.yml
@@ -1,6 +1,15 @@
|
||||
language: ruby
|
||||
sudo: false
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- libicu-dev
|
||||
- libicu48
|
||||
before_install: script/travis/before_install
|
||||
script:
|
||||
- bundle exec rake
|
||||
- script/licensed verify
|
||||
rvm:
|
||||
- 1.9.3
|
||||
- 2.0.0
|
||||
- 2.1
|
||||
- 2.2
|
||||
|
||||
@@ -1,10 +1,29 @@
|
||||
# Contributing
|
||||
|
||||
Hi there! We're thrilled that you'd like to contribute to this project. Your help is essential for keeping it great. The majority of contributions won't need to touch any Ruby code at all.
|
||||
Hi there! We're thrilled that you'd like to contribute to this project. Your help is essential for keeping it great. This project adheres to the [Contributor Covenant Code of Conduct](http://contributor-covenant.org/). By participating, you are expected to uphold this code.
|
||||
|
||||
The majority of contributions won't need to touch any Ruby code at all.
|
||||
|
||||
## Adding an extension to a language
|
||||
|
||||
We try only to add new extensions once they have some usage on GitHub. In most cases we prefer that extensions be in use in hundreds of repositories before supporting them in Linguist.
|
||||
|
||||
To add support for a new extension:
|
||||
|
||||
0. Add your extension to the language entry in [`languages.yml`][languages], keeping the extensions in alphabetical order.
|
||||
0. Add at least one sample for your extension to the [samples directory][samples] in the correct subdirectory.
|
||||
0. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage.
|
||||
|
||||
In addition, if this extension is already listed in [`languages.yml`][languages] then sometimes a few more steps will need to be taken:
|
||||
|
||||
0. Make sure that example `.yourextension` files are present in the [samples directory][samples] for each language that uses `.yourextension`.
|
||||
0. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.yourextension` files. (ping @arfon or @bkeepers to help with this) to ensure we're not misclassifying files.
|
||||
0. If the Bayesian classifier does a bad job with the sample `.yourextension` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
|
||||
|
||||
|
||||
## Adding a language
|
||||
|
||||
We try only to add languages once they have some usage on GitHub. In most cases we prefer that languages be in use in hundreds of repositories before supporting them in Linguist.
|
||||
We try only to add languages once they have some usage on GitHub. In most cases we prefer that each new file extension be in use in hundreds of repositories before supporting them in Linguist.
|
||||
|
||||
To add support for a new language:
|
||||
|
||||
@@ -12,6 +31,7 @@ To add support for a new language:
|
||||
0. Add a grammar for your language. Please only add grammars that have a license that permits redistribution.
|
||||
0. Add your grammar as a submodule: `git submodule add https://github.com/JaneSmith/MyGrammar vendor/grammars/MyGrammar`.
|
||||
0. Add your grammar to [`grammars.yml`][grammars] by running `script/convert-grammars --add vendor/grammars/MyGrammar`.
|
||||
0. Download the license for the grammar: `script/licensed`. Be careful to only commit the file for the new grammar, as this script may update licenses for other grammars as well.
|
||||
0. Add samples for your language to the [samples directory][samples] in the correct subdirectory.
|
||||
0. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage.
|
||||
|
||||
@@ -23,19 +43,21 @@ In addition, if your new language defines an extension that's already listed in
|
||||
|
||||
Remember, the goal here is to try and avoid false positives!
|
||||
|
||||
|
||||
## Fixing a misclassified language
|
||||
|
||||
Most languages are detected by their file extension defined in [languages.yml][languages]. For disambiguating between files with common extensions, linguist applies some [heuristics](/lib/linguist/heuristics.rb) and a [statistical classifier](lib/linguist/classifier.rb). This process can help differentiate between, for example, `.h` files which could be either C, C++, or Obj-C.
|
||||
|
||||
Misclassifications can often be solved by either adding a new filename or extension for the language or adding more [samples][samples] to make the classifier smarter.
|
||||
|
||||
|
||||
## Fixing syntax highlighting
|
||||
|
||||
Syntax highlighting in GitHub is performed using TextMate-compatible grammars. These are the same grammars that TextMate, Sublime Text and Atom use. Every language in [languages.yml][languages] is mapped to its corresponding TM `scope`. This scope will be used when picking up a grammar for highlighting.
|
||||
|
||||
Assuming your code is being detected as the right language, in most cases this is due to a bug in the language grammar rather than a bug in Linguist. [`grammars.yml`][grammars] lists all the grammars we use for syntax highlighting on github.com. Find the one corresponding to your code's programming language and submit a bug report upstream. If you can, try to reproduce the highlighting problem in the text editor that the grammar is designed for (TextMate, Sublime Text, or Atom) and include that information in your bug report.
|
||||
|
||||
You can also try to fix the bug yourself and submit a Pull Request. [TextMate's documentation](http://manual.macromates.com/en/language_grammars) offers a good introduction on how to work with TextMate-compatible grammars. You can test grammars using [Lightshow](https://github-lightshow.herokuapp.com).
|
||||
You can also try to fix the bug yourself and submit a Pull Request. [TextMate's documentation](https://manual.macromates.com/en/language_grammars) offers a good introduction on how to work with TextMate-compatible grammars. You can test grammars using [Lightshow](https://github-lightshow.herokuapp.com).
|
||||
|
||||
Once the bug has been fixed upstream, we'll pick it up for GitHub in the next release of Linguist.
|
||||
|
||||
@@ -51,9 +73,10 @@ To run the tests:
|
||||
|
||||
bundle exec rake test
|
||||
|
||||
Sometimes getting the tests running can be too much work, especially if you don't have much Ruby experience. It's okay: be lazy and let our build bot [Travis](http://travis-ci.org/#!/github/linguist) run the tests for you. Just open a pull request and the bot will start cranking away.
|
||||
Sometimes getting the tests running can be too much work, especially if you don't have much Ruby experience. It's okay: be lazy and let our build bot [Travis](https://travis-ci.org/#!/github/linguist) run the tests for you. Just open a pull request and the bot will start cranking away.
|
||||
|
||||
Here's our current build status: [](https://travis-ci.org/github/linguist)
|
||||
|
||||
Here's our current build status: [](http://travis-ci.org/github/linguist)
|
||||
|
||||
## Releasing
|
||||
|
||||
|
||||
1
Gemfile
1
Gemfile
@@ -1,4 +1,3 @@
|
||||
source 'https://rubygems.org'
|
||||
gemspec :name => "github-linguist"
|
||||
gemspec :name => "github-linguist-grammars"
|
||||
gem 'byebug' if RUBY_VERSION >= '2.0'
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,4 +1,4 @@
|
||||
Copyright (c) 2011-2015 GitHub, Inc.
|
||||
Copyright (c) 2011-2016 GitHub, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
|
||||
20
README.md
20
README.md
@@ -13,11 +13,11 @@ See [Troubleshooting](#troubleshooting) and [`CONTRIBUTING.md`](/CONTRIBUTING.md
|
||||
|
||||

|
||||
|
||||
The Language stats bar is built by aggregating the languages of each file in that repository. If it is reporting a language that you don't expect:
|
||||
The Language stats bar displays languages percentages for the files in the repository. The percentages are calculated based on the bytes of code for each language as reported by the [List Languages](https://developer.github.com/v3/repos/#list-languages) API. If the bar is reporting a language that you don't expect:
|
||||
|
||||
0. Click on the name of the language in the stats bar to see a list of the files that are identified as that language.
|
||||
0. If you see files that you didn't write, consider moving the files into one of the [paths for vendored code](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml), or use the [manual overrides](#overrides) feature to ignore them.
|
||||
0. If the files are being misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you an add, especially links to public repositories, is helpful.
|
||||
0. If you see files that you didn't write, consider moving the files into one of the [paths for vendored code](/lib/linguist/vendor.yml), or use the [manual overrides](#overrides) feature to ignore them.
|
||||
0. If the files are being misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you can add, especially links to public repositories, is helpful.
|
||||
0. If there are no reported issues of this misclassification, [open an issue][new-issue] and include a link to the repository or a sample of the code that is being misclassified.
|
||||
|
||||
## Overrides
|
||||
@@ -33,9 +33,9 @@ $ cat .gitattributes
|
||||
*.rb linguist-language=Java
|
||||
```
|
||||
|
||||
Checking code you didn't write, such as JavaScript libraries, into your git repo is a common practice, but this often inflates your project's language stats and may even cause your project to be labeled as another language. By default, Linguist treats all of the paths defined in [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository. Vendored files are also hidden by default in diffs on github.com.
|
||||
Checking code you didn't write, such as JavaScript libraries, into your git repo is a common practice, but this often inflates your project's language stats and may even cause your project to be labeled as another language. By default, Linguist treats all of the paths defined in [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository.
|
||||
|
||||
Use the `linguist-vendored` attribute to vendor or un-vendor paths. Please note, overriding the vendored (or un-vendored) status of a file only affects the language statistics for the repository and not the behavior in diffs on github.com.
|
||||
Use the `linguist-vendored` attribute to vendor or un-vendor paths.
|
||||
|
||||
```
|
||||
$ cat .gitattributes
|
||||
@@ -59,6 +59,9 @@ Alternatively, you can use Vim or Emacs style modelines to set the language for
|
||||
|
||||
##### Vim
|
||||
```
|
||||
# Some examples of various styles:
|
||||
vim: syntax=java
|
||||
vim: set syntax=ruby:
|
||||
vim: set filetype=prolog:
|
||||
vim: set ft=cpp:
|
||||
```
|
||||
@@ -111,4 +114,9 @@ lib/linguist.rb
|
||||
|
||||
Please check out our [contributing guidelines](CONTRIBUTING.md).
|
||||
|
||||
##
|
||||
## License
|
||||
|
||||
The language grammars included in this gem are covered by their repositories'
|
||||
respective licenses. `grammars.yml` specifies the repository for each grammar.
|
||||
|
||||
All other files are covered by the MIT license, see `LICENSE`.
|
||||
|
||||
10
Rakefile
10
Rakefile
@@ -40,18 +40,14 @@ task :samples do
|
||||
end
|
||||
|
||||
task :build_gem => :samples do
|
||||
rm_rf "grammars"
|
||||
sh "script/convert-grammars"
|
||||
languages = YAML.load_file("lib/linguist/languages.yml")
|
||||
File.write("lib/linguist/languages.json", Yajl.dump(languages))
|
||||
`gem build github-linguist.gemspec`
|
||||
File.delete("lib/linguist/languages.json")
|
||||
end
|
||||
|
||||
task :build_grammars_gem do
|
||||
rm_rf "grammars"
|
||||
sh "script/convert-grammars"
|
||||
sh "gem", "build", "github-linguist-grammars.gemspec"
|
||||
end
|
||||
|
||||
namespace :benchmark do
|
||||
benchmark_path = "benchmark/results"
|
||||
|
||||
@@ -62,7 +58,7 @@ namespace :benchmark do
|
||||
|
||||
corpus = File.expand_path(ENV["CORPUS"] || "samples")
|
||||
|
||||
require 'linguist/language'
|
||||
require 'linguist'
|
||||
|
||||
results = Hash.new
|
||||
Dir.glob("#{corpus}/**/*").each do |file|
|
||||
|
||||
138
bin/git-linguist
Executable file
138
bin/git-linguist
Executable file
@@ -0,0 +1,138 @@
|
||||
#!/usr/bin/env ruby
|
||||
|
||||
require 'linguist'
|
||||
require 'rugged'
|
||||
require 'optparse'
|
||||
require 'json'
|
||||
require 'tmpdir'
|
||||
require 'zlib'
|
||||
|
||||
class GitLinguist
|
||||
def initialize(path, commit_oid, incremental = true)
|
||||
@repo_path = path
|
||||
@commit_oid = commit_oid
|
||||
@incremental = incremental
|
||||
end
|
||||
|
||||
def linguist
|
||||
if @commit_oid.nil?
|
||||
raise "git-linguist must be called with a specific commit OID to perform language computation"
|
||||
end
|
||||
repo = Linguist::Repository.new(rugged, @commit_oid)
|
||||
|
||||
if @incremental && stats = load_language_stats
|
||||
old_commit_oid, old_stats = stats
|
||||
|
||||
# A cache with NULL oid means that we want to froze
|
||||
# these language stats in place and stop computing
|
||||
# them (for performance reasons)
|
||||
return old_stats if old_commit_oid == NULL_OID
|
||||
repo.load_existing_stats(old_commit_oid, old_stats)
|
||||
end
|
||||
|
||||
result = yield repo
|
||||
|
||||
save_language_stats(@commit_oid, repo.cache)
|
||||
result
|
||||
end
|
||||
|
||||
def load_language_stats
|
||||
version, oid, stats = load_cache
|
||||
if version == LANGUAGE_STATS_CACHE_VERSION && oid && stats
|
||||
[oid, stats]
|
||||
end
|
||||
end
|
||||
|
||||
def save_language_stats(oid, stats)
|
||||
cache = [LANGUAGE_STATS_CACHE_VERSION, oid, stats]
|
||||
write_cache(cache)
|
||||
end
|
||||
|
||||
def clear_language_stats
|
||||
File.unlink(cache_file)
|
||||
rescue Errno::ENOENT
|
||||
end
|
||||
|
||||
def disable_language_stats
|
||||
save_language_stats(NULL_OID, {})
|
||||
end
|
||||
|
||||
protected
|
||||
NULL_OID = ("0" * 40).freeze
|
||||
|
||||
LANGUAGE_STATS_CACHE = 'language-stats.cache'
|
||||
LANGUAGE_STATS_CACHE_VERSION = "v3:#{Linguist::VERSION}"
|
||||
|
||||
def rugged
|
||||
@rugged ||= Rugged::Repository.bare(@repo_path)
|
||||
end
|
||||
|
||||
def cache_file
|
||||
File.join(@repo_path, LANGUAGE_STATS_CACHE)
|
||||
end
|
||||
|
||||
def write_cache(object)
|
||||
return unless File.directory? @repo_path
|
||||
|
||||
begin
|
||||
tmp_path = Dir::Tmpname.make_tmpname(cache_file, nil)
|
||||
File.open(tmp_path, "wb") do |f|
|
||||
marshal = Marshal.dump(object)
|
||||
f.write(Zlib::Deflate.deflate(marshal))
|
||||
end
|
||||
|
||||
File.rename(tmp_path, cache_file)
|
||||
rescue => e
|
||||
(File.unlink(tmp_path) rescue nil)
|
||||
raise e
|
||||
end
|
||||
end
|
||||
|
||||
def load_cache
|
||||
marshal = File.open(cache_file, "rb") { |f| Zlib::Inflate.inflate(f.read) }
|
||||
Marshal.load(marshal)
|
||||
rescue SystemCallError, ::Zlib::DataError, ::Zlib::BufError, TypeError
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
def git_linguist(args)
|
||||
incremental = true
|
||||
commit = nil
|
||||
|
||||
parser = OptionParser.new do |opts|
|
||||
opts.banner = "Usage: git-linguist [OPTIONS] stats|breakdown|dump-cache|clear|disable"
|
||||
|
||||
opts.on("-f", "--force", "Force a full rescan") { incremental = false }
|
||||
opts.on("--commit=COMMIT", "Commit to index") { |v| commit = v}
|
||||
end
|
||||
|
||||
parser.parse!(args)
|
||||
|
||||
git_dir = `git rev-parse --git-dir`.strip
|
||||
raise "git-linguist must be ran in a Git repository (#{Dir.pwd})" unless $?.success?
|
||||
wrapper = GitLinguist.new(git_dir, commit, incremental)
|
||||
|
||||
case args.pop
|
||||
when "stats"
|
||||
wrapper.linguist do |linguist|
|
||||
puts JSON.dump(linguist.languages)
|
||||
end
|
||||
when "breakdown"
|
||||
wrapper.linguist do |linguist|
|
||||
puts JSON.dump(linguist.breakdown_by_file)
|
||||
end
|
||||
when "dump-cache"
|
||||
puts JSON.dump(wrapper.load_language_stats)
|
||||
when "clear"
|
||||
wrapper.clear_language_stats
|
||||
when "disable"
|
||||
wrapper.disable_language_stats
|
||||
else
|
||||
$stderr.print(parser.help)
|
||||
exit 1
|
||||
end
|
||||
end
|
||||
|
||||
git_linguist(ARGV)
|
||||
@@ -1,14 +0,0 @@
|
||||
require File.expand_path('../lib/linguist/version', __FILE__)
|
||||
|
||||
Gem::Specification.new do |s|
|
||||
s.name = 'github-linguist-grammars'
|
||||
s.version = Linguist::VERSION
|
||||
s.summary = "Language grammars for use with github-linguist"
|
||||
|
||||
s.authors = "GitHub"
|
||||
s.homepage = "https://github.com/github/linguist"
|
||||
|
||||
s.files = ['lib/linguist/grammars.rb'] + Dir['grammars/*']
|
||||
|
||||
s.add_development_dependency 'plist', '~>3.1'
|
||||
end
|
||||
@@ -10,8 +10,8 @@ Gem::Specification.new do |s|
|
||||
s.homepage = "https://github.com/github/linguist"
|
||||
s.license = "MIT"
|
||||
|
||||
s.files = Dir['lib/**/*'] - ['lib/linguist/grammars.rb']
|
||||
s.executables << 'linguist'
|
||||
s.files = Dir['lib/**/*'] + Dir['grammars/*'] + ['LICENSE']
|
||||
s.executables = ['linguist', 'git-linguist']
|
||||
|
||||
s.add_dependency 'charlock_holmes', '~> 0.7.3'
|
||||
s.add_dependency 'escape_utils', '~> 1.1.0'
|
||||
@@ -20,8 +20,11 @@ Gem::Specification.new do |s|
|
||||
|
||||
s.add_development_dependency 'minitest', '>= 5.0'
|
||||
s.add_development_dependency 'mocha'
|
||||
s.add_development_dependency 'plist', '~>3.1'
|
||||
s.add_development_dependency 'pry'
|
||||
s.add_development_dependency 'rake'
|
||||
s.add_development_dependency 'yajl-ruby'
|
||||
s.add_development_dependency 'color-proximity', '~> 0.2.1'
|
||||
s.add_development_dependency 'licensed'
|
||||
|
||||
end
|
||||
|
||||
177
grammars.yml
Normal file → Executable file
177
grammars.yml
Normal file → Executable file
@@ -29,33 +29,27 @@ vendor/grammars/AutoHotkey/:
|
||||
vendor/grammars/BrightScript.tmbundle/:
|
||||
- source.brightauthorproject
|
||||
- source.brightscript
|
||||
vendor/grammars/CLIPS-sublime:
|
||||
- source.clips
|
||||
vendor/grammars/ColdFusion:
|
||||
- source.cfscript
|
||||
- source.cfscript.cfc
|
||||
- text.cfml.basic
|
||||
- text.html.cfm
|
||||
vendor/grammars/Creole:
|
||||
- text.html.creole
|
||||
vendor/grammars/Docker.tmbundle:
|
||||
- source.dockerfile
|
||||
vendor/grammars/Elm.tmLanguage:
|
||||
vendor/grammars/Elm/:
|
||||
- source.elm
|
||||
vendor/grammars/FreeMarker.tmbundle:
|
||||
- text.html.ftl
|
||||
vendor/grammars/G-Code/:
|
||||
- source.LS
|
||||
- source.MCPOST
|
||||
- source.MOD
|
||||
- source.apt
|
||||
- source.gcode
|
||||
vendor/grammars/GDScript-sublime/:
|
||||
- source.gdscript
|
||||
vendor/grammars/Handlebars:
|
||||
- text.html.handlebars
|
||||
vendor/grammars/IDL-Syntax:
|
||||
- source.webidl
|
||||
vendor/grammars/InnoSetup/:
|
||||
- source.inno
|
||||
vendor/grammars/Isabelle.tmbundle:
|
||||
- source.isabelle.root
|
||||
- source.isabelle.theory
|
||||
@@ -67,6 +61,9 @@ vendor/grammars/Lean.tmbundle:
|
||||
- source.lean
|
||||
vendor/grammars/LiveScript.tmbundle:
|
||||
- source.livescript
|
||||
vendor/grammars/MagicPython:
|
||||
- source.python
|
||||
- source.regexp.python
|
||||
vendor/grammars/Modelica/:
|
||||
- source.modelica
|
||||
vendor/grammars/NSIS:
|
||||
@@ -85,12 +82,14 @@ vendor/grammars/Racket:
|
||||
- source.racket
|
||||
vendor/grammars/SCSS.tmbundle:
|
||||
- source.scss
|
||||
vendor/grammars/SMT.tmbundle:
|
||||
- source.smt
|
||||
vendor/grammars/Scalate.tmbundle:
|
||||
- source.scaml
|
||||
- text.html.ssp
|
||||
vendor/grammars/Slash.tmbundle:
|
||||
- text.html.slash
|
||||
vendor/grammars/Stata.tmbundle:
|
||||
vendor/grammars/Stata.tmbundle/:
|
||||
- source.mata
|
||||
- source.stata
|
||||
vendor/grammars/Stylus/:
|
||||
@@ -99,14 +98,12 @@ vendor/grammars/Sublime-Coq:
|
||||
- source.coq
|
||||
vendor/grammars/Sublime-HTTP:
|
||||
- source.httpspec
|
||||
vendor/grammars/Sublime-Inform:
|
||||
- source.Inform7
|
||||
vendor/grammars/Sublime-Lasso:
|
||||
- file.lasso
|
||||
vendor/grammars/Sublime-Logos:
|
||||
- source.logos
|
||||
vendor/grammars/Sublime-Loom:
|
||||
- source.loomscript
|
||||
vendor/grammars/Sublime-Modula-2/:
|
||||
- source.modula2
|
||||
vendor/grammars/Sublime-Nit:
|
||||
- source.nit
|
||||
vendor/grammars/Sublime-QML:
|
||||
@@ -119,6 +116,7 @@ vendor/grammars/Sublime-SQF-Language:
|
||||
- source.sqf
|
||||
vendor/grammars/Sublime-Text-2-OpenEdge-ABL:
|
||||
- source.abl
|
||||
- text.html.abl
|
||||
vendor/grammars/Sublime-VimL:
|
||||
- source.viml
|
||||
vendor/grammars/SublimeBrainfuck:
|
||||
@@ -126,19 +124,25 @@ vendor/grammars/SublimeBrainfuck:
|
||||
vendor/grammars/SublimeClarion/:
|
||||
- source.clarion
|
||||
vendor/grammars/SublimePapyrus/:
|
||||
- source.compiled-papyrus
|
||||
- source.papyrus
|
||||
- source.papyrus-assembly
|
||||
- source.papyrus.skyrim
|
||||
vendor/grammars/SublimePuppet/:
|
||||
- source.puppet
|
||||
vendor/grammars/SublimeXtend:
|
||||
- source.xtend
|
||||
vendor/grammars/TLA:
|
||||
- source.tla
|
||||
vendor/grammars/TXL/:
|
||||
- source.txl
|
||||
vendor/grammars/Textmate-Gosu-Bundle:
|
||||
- source.gosu.2
|
||||
vendor/grammars/UrWeb-Language-Definition:
|
||||
- source.ur
|
||||
vendor/grammars/VBDotNetSyntax:
|
||||
- source.vbnet
|
||||
vendor/grammars/Vala-TMBundle:
|
||||
- source.vala
|
||||
vendor/grammars/X10:
|
||||
- source.x10
|
||||
vendor/grammars/abap.tmbundle:
|
||||
- source.abap
|
||||
vendor/grammars/actionscript3-tmbundle:
|
||||
@@ -166,23 +170,31 @@ vendor/grammars/asciidoc.tmbundle/:
|
||||
vendor/grammars/asp.tmbundle:
|
||||
- source.asp
|
||||
- text.html.asp
|
||||
vendor/grammars/assembly.tmbundle:
|
||||
vendor/grammars/assembly/:
|
||||
- objdump.x86asm
|
||||
- source.x86asm
|
||||
vendor/grammars/atom-fsharp/:
|
||||
- source.fsharp
|
||||
- source.fsharp.fsi
|
||||
- source.fsharp.fsl
|
||||
- source.fsharp.fsx
|
||||
vendor/grammars/atom-language-purescript/:
|
||||
- source.purescript
|
||||
vendor/grammars/atom-language-stan/:
|
||||
- source.stan
|
||||
vendor/grammars/atom-salt:
|
||||
- source.python.salt
|
||||
- source.yaml.salt
|
||||
vendor/grammars/ats.sublime:
|
||||
vendor/grammars/atomic-dreams/:
|
||||
- source.dm
|
||||
- source.dmf
|
||||
vendor/grammars/ats:
|
||||
- source.ats
|
||||
vendor/grammars/autoitv3-tmbundle:
|
||||
- source.autoit.3
|
||||
vendor/grammars/awk-sublime:
|
||||
- source.awk
|
||||
vendor/grammars/bison.tmbundle:
|
||||
- source.bison
|
||||
vendor/grammars/boo-sublime:
|
||||
vendor/grammars/boo/:
|
||||
- source.boo
|
||||
vendor/grammars/bro-sublime:
|
||||
- source.bro
|
||||
@@ -207,6 +219,8 @@ vendor/grammars/cool-tmbundle:
|
||||
vendor/grammars/cpp-qt.tmbundle:
|
||||
- source.c++.qt
|
||||
- source.qmake
|
||||
vendor/grammars/creole/:
|
||||
- text.html.creole
|
||||
vendor/grammars/css.tmbundle:
|
||||
- source.css
|
||||
vendor/grammars/cucumber-tmbundle:
|
||||
@@ -233,6 +247,8 @@ vendor/grammars/ec.tmbundle/:
|
||||
- source.c.ec
|
||||
vendor/grammars/eiffel.tmbundle:
|
||||
- source.eiffel
|
||||
vendor/grammars/ejs-tmbundle:
|
||||
- text.html.js
|
||||
vendor/grammars/elixir-tmbundle:
|
||||
- source.elixir
|
||||
- text.elixir
|
||||
@@ -252,13 +268,15 @@ vendor/grammars/fortran.tmbundle:
|
||||
- source.fortran.modern
|
||||
vendor/grammars/gap-tmbundle/:
|
||||
- source.gap
|
||||
vendor/grammars/gdscript/:
|
||||
- source.gdscript
|
||||
vendor/grammars/gettext.tmbundle:
|
||||
- source.po
|
||||
vendor/grammars/gnuplot-tmbundle:
|
||||
- source.gnuplot
|
||||
vendor/grammars/go-tmbundle:
|
||||
- source.go
|
||||
vendor/grammars/grace-tmbundle/:
|
||||
vendor/grammars/grace:
|
||||
- source.grace
|
||||
vendor/grammars/gradle.tmbundle:
|
||||
- source.groovy.gradle
|
||||
@@ -281,6 +299,8 @@ vendor/grammars/idl.tmbundle:
|
||||
- source.idl
|
||||
- source.idl-dlm
|
||||
- text.idl-idldoc
|
||||
vendor/grammars/idris/:
|
||||
- source.idris
|
||||
vendor/grammars/ini.tmbundle:
|
||||
- source.ini
|
||||
vendor/grammars/io.tmbundle:
|
||||
@@ -288,8 +308,8 @@ vendor/grammars/io.tmbundle:
|
||||
vendor/grammars/ioke-outdated:
|
||||
- source.ioke
|
||||
vendor/grammars/jade-tmbundle:
|
||||
- source.jade
|
||||
- source.pyjade
|
||||
- text.jade
|
||||
vendor/grammars/jasmin-sublime:
|
||||
- source.jasmin
|
||||
vendor/grammars/java.tmbundle:
|
||||
@@ -305,35 +325,74 @@ vendor/grammars/json.tmbundle:
|
||||
- source.json
|
||||
vendor/grammars/kotlin-sublime-package:
|
||||
- source.Kotlin
|
||||
vendor/grammars/language-apl:
|
||||
- source.apl
|
||||
vendor/grammars/language-babel/:
|
||||
- source.js.jsx
|
||||
- source.regexp.babel
|
||||
vendor/grammars/language-click/:
|
||||
- source.click
|
||||
vendor/grammars/language-clojure:
|
||||
- source.clojure
|
||||
vendor/grammars/language-coffee-script:
|
||||
- source.coffee
|
||||
- source.litcoffee
|
||||
vendor/grammars/language-crystal:
|
||||
- source.crystal
|
||||
- text.html.ecr
|
||||
vendor/grammars/language-csharp:
|
||||
- source.cake
|
||||
- source.cs
|
||||
- source.csx
|
||||
- source.nant-build
|
||||
vendor/grammars/language-csound:
|
||||
- source.csound
|
||||
- source.csound-document
|
||||
- source.csound-score
|
||||
vendor/grammars/language-gfm:
|
||||
- source.gfm
|
||||
vendor/grammars/language-graphql:
|
||||
- source.graphql
|
||||
vendor/grammars/language-hy:
|
||||
- source.hy
|
||||
vendor/grammars/language-inform7:
|
||||
- source.inform7
|
||||
vendor/grammars/language-javascript:
|
||||
- source.js
|
||||
- source.js.regexp
|
||||
- source.js.regexp.replacement
|
||||
vendor/grammars/language-jsoniq/:
|
||||
- source.jq
|
||||
- source.xq
|
||||
vendor/grammars/language-less/:
|
||||
- source.css.less
|
||||
vendor/grammars/language-maxscript:
|
||||
- source.maxscript
|
||||
vendor/grammars/language-ncl:
|
||||
- source.ncl
|
||||
vendor/grammars/language-povray:
|
||||
- source.pov-ray sdl
|
||||
vendor/grammars/language-python:
|
||||
- source.python
|
||||
- source.regexp.python
|
||||
- text.python.console
|
||||
- text.python.traceback
|
||||
vendor/grammars/language-renpy:
|
||||
- source.renpy
|
||||
vendor/grammars/language-shellscript:
|
||||
- source.shell
|
||||
- text.shell-session
|
||||
vendor/grammars/language-supercollider:
|
||||
- source.supercollider
|
||||
vendor/grammars/language-toc-wow:
|
||||
- source.toc
|
||||
vendor/grammars/language-wavefront:
|
||||
- source.wavefront.mtl
|
||||
- source.wavefront.obj
|
||||
vendor/grammars/language-xbase:
|
||||
- source.harbour
|
||||
vendor/grammars/language-yaml:
|
||||
- source.yaml
|
||||
vendor/grammars/language-yang/:
|
||||
- source.yang
|
||||
vendor/grammars/latex.tmbundle:
|
||||
- text.bibtex
|
||||
- text.log.latex
|
||||
@@ -341,8 +400,6 @@ vendor/grammars/latex.tmbundle:
|
||||
- text.tex.latex
|
||||
- text.tex.latex.beamer
|
||||
- text.tex.latex.memoir
|
||||
vendor/grammars/less.tmbundle:
|
||||
- source.css.less
|
||||
vendor/grammars/lilypond.tmbundle:
|
||||
- source.lilypond
|
||||
vendor/grammars/liquid.tmbundle:
|
||||
@@ -351,6 +408,8 @@ vendor/grammars/lisp.tmbundle:
|
||||
- source.lisp
|
||||
vendor/grammars/llvm.tmbundle:
|
||||
- source.llvm
|
||||
vendor/grammars/logos:
|
||||
- source.logos
|
||||
vendor/grammars/logtalk.tmbundle:
|
||||
- source.logtalk
|
||||
vendor/grammars/lua.tmbundle:
|
||||
@@ -370,16 +429,20 @@ vendor/grammars/mediawiki.tmbundle/:
|
||||
- text.html.mediawiki
|
||||
vendor/grammars/mercury-tmlanguage:
|
||||
- source.mercury
|
||||
vendor/grammars/monkey.tmbundle:
|
||||
vendor/grammars/monkey/:
|
||||
- source.monkey
|
||||
vendor/grammars/moonscript-tmbundle:
|
||||
- source.moonscript
|
||||
vendor/grammars/nemerle.tmbundle:
|
||||
- source.nemerle
|
||||
vendor/grammars/nesC.tmbundle:
|
||||
vendor/grammars/nesC:
|
||||
- source.nesc
|
||||
vendor/grammars/ninja.tmbundle:
|
||||
- source.ninja
|
||||
vendor/grammars/nix:
|
||||
- source.nix
|
||||
vendor/grammars/nu.tmbundle:
|
||||
- source.nu
|
||||
vendor/grammars/objective-c.tmbundle:
|
||||
- source.objc
|
||||
- source.objc++
|
||||
@@ -394,21 +457,25 @@ vendor/grammars/ooc.tmbundle:
|
||||
- source.ooc
|
||||
vendor/grammars/opa.tmbundle:
|
||||
- source.opa
|
||||
vendor/grammars/openscad.tmbundle/:
|
||||
- source.scad
|
||||
vendor/grammars/oracle.tmbundle:
|
||||
- source.plsql.oracle
|
||||
vendor/grammars/oz-tmbundle/Syntaxes/Oz.tmLanguage:
|
||||
- source.oz
|
||||
vendor/grammars/pascal.tmbundle:
|
||||
- source.pascal
|
||||
vendor/grammars/pawn-sublime-language/:
|
||||
- source.pawn
|
||||
vendor/grammars/perl.tmbundle/:
|
||||
- source.perl
|
||||
- source.perl.6
|
||||
vendor/grammars/perl6fe:
|
||||
- source.meta-info
|
||||
- source.perl6fe
|
||||
- source.regexp.perl6fe
|
||||
vendor/grammars/php-smarty.tmbundle:
|
||||
- text.html.smarty
|
||||
vendor/grammars/php.tmbundle:
|
||||
- text.html.php
|
||||
vendor/grammars/pig-latin/:
|
||||
- source.pig_latin
|
||||
vendor/grammars/pike-textmate:
|
||||
- source.pike
|
||||
vendor/grammars/postscript.tmbundle:
|
||||
@@ -417,12 +484,8 @@ vendor/grammars/powershell:
|
||||
- source.powershell
|
||||
vendor/grammars/processing.tmbundle:
|
||||
- source.processing
|
||||
vendor/grammars/prolog.tmbundle:
|
||||
- source.prolog
|
||||
vendor/grammars/protobuf-tmbundle:
|
||||
- source.protobuf
|
||||
vendor/grammars/puppet-textmate-bundle:
|
||||
- source.puppet
|
||||
vendor/grammars/python-django.tmbundle:
|
||||
- source.python.django
|
||||
- text.html.django
|
||||
@@ -454,30 +517,34 @@ vendor/grammars/smali-sublime/smali.tmLanguage:
|
||||
- source.smali
|
||||
vendor/grammars/smalltalk-tmbundle:
|
||||
- source.smalltalk
|
||||
vendor/grammars/sourcepawn/:
|
||||
- source.sp
|
||||
vendor/grammars/sql.tmbundle:
|
||||
- source.sql
|
||||
vendor/grammars/st2-zonefile:
|
||||
- text.zone_file
|
||||
vendor/grammars/standard-ml.tmbundle:
|
||||
- source.cm
|
||||
- source.ml
|
||||
vendor/grammars/sublime-MuPAD:
|
||||
- source.mupad
|
||||
vendor/grammars/sublime-apl/:
|
||||
- source.apl
|
||||
vendor/grammars/sublime-aspectj/:
|
||||
- source.aspectj
|
||||
vendor/grammars/sublime-autoit/:
|
||||
- source.autoit
|
||||
vendor/grammars/sublime-befunge:
|
||||
- source.befunge
|
||||
vendor/grammars/sublime-better-typescript:
|
||||
- source.ts
|
||||
vendor/grammars/sublime-bsv:
|
||||
- source.bsv
|
||||
vendor/grammars/sublime-cirru:
|
||||
- source.cirru
|
||||
vendor/grammars/sublime-clips/:
|
||||
- source.clips
|
||||
vendor/grammars/sublime-glsl:
|
||||
- source.essl
|
||||
- source.glsl
|
||||
vendor/grammars/sublime-golo/:
|
||||
- source.golo
|
||||
vendor/grammars/sublime-idris:
|
||||
- source.idris
|
||||
vendor/grammars/sublime-mask:
|
||||
- source.mask
|
||||
vendor/grammars/sublime-netlinx:
|
||||
@@ -485,26 +552,27 @@ vendor/grammars/sublime-netlinx:
|
||||
- source.netlinx.erb
|
||||
vendor/grammars/sublime-nginx:
|
||||
- source.nginx
|
||||
vendor/grammars/sublime-nix:
|
||||
- source.nix
|
||||
vendor/grammars/sublime-opal/:
|
||||
- source.opal
|
||||
- source.opalsysdefs
|
||||
vendor/grammars/sublime-pony:
|
||||
- source.pony
|
||||
vendor/grammars/sublime-robot-plugin:
|
||||
- text.robot
|
||||
vendor/grammars/sublime-rust:
|
||||
- source.rust
|
||||
vendor/grammars/sublime-sourcepawn:
|
||||
- source.sp
|
||||
vendor/grammars/sublime-spintools/:
|
||||
- source.regexp.spin
|
||||
- source.spin
|
||||
vendor/grammars/sublime-tea:
|
||||
- source.tea
|
||||
vendor/grammars/sublime-terra:
|
||||
- source.terra
|
||||
vendor/grammars/sublime-text-ox/:
|
||||
- source.ox
|
||||
vendor/grammars/sublime-text-pig-latin/:
|
||||
- source.pig_latin
|
||||
vendor/grammars/sublime-typescript/:
|
||||
- source.ts
|
||||
- source.tsx
|
||||
vendor/grammars/sublime-varnish:
|
||||
- source.varnish.vcl
|
||||
vendor/grammars/sublime_cobol:
|
||||
@@ -515,6 +583,11 @@ vendor/grammars/sublime_cobol:
|
||||
vendor/grammars/sublime_man_page_support:
|
||||
- source.man
|
||||
- text.groff
|
||||
vendor/grammars/sublimeassembly/:
|
||||
- source.assembly
|
||||
vendor/grammars/sublimeprolog/:
|
||||
- source.prolog
|
||||
- source.prolog.eclipse
|
||||
vendor/grammars/sublimetext-cuda-cpp:
|
||||
- source.cuda-c++
|
||||
vendor/grammars/swift.tmbundle:
|
||||
@@ -531,8 +604,8 @@ vendor/grammars/turtle.tmbundle:
|
||||
- source.turtle
|
||||
vendor/grammars/verilog.tmbundle:
|
||||
- source.verilog
|
||||
vendor/grammars/x86-assembly-textmate-bundle:
|
||||
- source.asm.x86
|
||||
vendor/grammars/vue-syntax-highlight:
|
||||
- text.html.vue
|
||||
vendor/grammars/xc.tmbundle/:
|
||||
- source.xc
|
||||
vendor/grammars/xml.tmbundle:
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
require 'linguist/blob_helper'
|
||||
require 'linguist/generated'
|
||||
require 'linguist/grammars'
|
||||
require 'linguist/heuristics'
|
||||
require 'linguist/language'
|
||||
require 'linguist/repository'
|
||||
@@ -8,13 +9,91 @@ require 'linguist/shebang'
|
||||
require 'linguist/version'
|
||||
|
||||
class << Linguist
|
||||
# Public: Detects the Language of the blob.
|
||||
#
|
||||
# blob - an object that includes the Linguist `BlobHelper` interface;
|
||||
# see Linguist::LazyBlob and Linguist::FileBlob for examples
|
||||
#
|
||||
# Returns Language or nil.
|
||||
def detect(blob)
|
||||
# Bail early if the blob is binary or empty.
|
||||
return nil if blob.likely_binary? || blob.binary? || blob.empty?
|
||||
|
||||
Linguist.instrument("linguist.detection", :blob => blob) do
|
||||
# Call each strategy until one candidate is returned.
|
||||
languages = []
|
||||
returning_strategy = nil
|
||||
|
||||
STRATEGIES.each do |strategy|
|
||||
returning_strategy = strategy
|
||||
candidates = Linguist.instrument("linguist.strategy", :blob => blob, :strategy => strategy, :candidates => languages) do
|
||||
strategy.call(blob, languages)
|
||||
end
|
||||
if candidates.size == 1
|
||||
languages = candidates
|
||||
break
|
||||
elsif candidates.size > 1
|
||||
# More than one candidate was found, pass them to the next strategy.
|
||||
languages = candidates
|
||||
else
|
||||
# No candidates, try the next strategy
|
||||
end
|
||||
end
|
||||
|
||||
Linguist.instrument("linguist.detected", :blob => blob, :strategy => returning_strategy, :language => languages.first)
|
||||
|
||||
languages.first
|
||||
end
|
||||
end
|
||||
|
||||
# Internal: The strategies used to detect the language of a file.
|
||||
#
|
||||
# A strategy is an object that has a `.call` method that takes two arguments:
|
||||
#
|
||||
# blob - An object that quacks like a blob.
|
||||
# languages - An Array of candidate Language objects that were returned by the
|
||||
# previous strategy.
|
||||
#
|
||||
# A strategy should return an Array of Language candidates.
|
||||
#
|
||||
# Strategies are called in turn until a single Language is returned.
|
||||
STRATEGIES = [
|
||||
Linguist::Strategy::Modeline,
|
||||
Linguist::Shebang,
|
||||
Linguist::Strategy::Filename,
|
||||
Linguist::Heuristics,
|
||||
Linguist::Classifier
|
||||
]
|
||||
|
||||
# Public: Set an instrumenter.
|
||||
#
|
||||
# class CustomInstrumenter
|
||||
# def instrument(name, payload = {})
|
||||
# warn "Instrumenting #{name}: #{payload[:blob]}"
|
||||
# end
|
||||
# end
|
||||
#
|
||||
# Linguist.instrumenter = CustomInstrumenter
|
||||
#
|
||||
# The instrumenter must conform to the `ActiveSupport::Notifications`
|
||||
# interface, which defines `#instrument` and accepts:
|
||||
#
|
||||
# name - the String name of the event (e.g. "linguist.detected")
|
||||
# payload - a Hash of the exception context.
|
||||
attr_accessor :instrumenter
|
||||
|
||||
# Internal: Perform instrumentation on a block
|
||||
#
|
||||
# Linguist.instrument("linguist.dosomething", :blob => blob) do
|
||||
# # logic to instrument here.
|
||||
# end
|
||||
#
|
||||
def instrument(*args, &bk)
|
||||
if instrumenter
|
||||
instrumenter.instrument(*args, &bk)
|
||||
else
|
||||
yield if block_given?
|
||||
elsif block_given?
|
||||
yield
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
73
lib/linguist/blob.rb
Normal file
73
lib/linguist/blob.rb
Normal file
@@ -0,0 +1,73 @@
|
||||
require 'linguist/blob_helper'
|
||||
|
||||
module Linguist
|
||||
# A Blob is a wrapper around the content of a file to make it quack
|
||||
# like a Grit::Blob. It provides the basic interface: `name`,
|
||||
# `data`, `path` and `size`.
|
||||
class Blob
|
||||
include BlobHelper
|
||||
|
||||
# Public: Initialize a new Blob.
|
||||
#
|
||||
# path - A path String (does not necessarily exists on the file system).
|
||||
# content - Content of the file.
|
||||
#
|
||||
# Returns a Blob.
|
||||
def initialize(path, content)
|
||||
@path = path
|
||||
@content = content
|
||||
end
|
||||
|
||||
# Public: Filename
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# Blob.new("/path/to/linguist/lib/linguist.rb", "").path
|
||||
# # => "/path/to/linguist/lib/linguist.rb"
|
||||
#
|
||||
# Returns a String
|
||||
attr_reader :path
|
||||
|
||||
# Public: File name
|
||||
#
|
||||
# Returns a String
|
||||
def name
|
||||
File.basename(@path)
|
||||
end
|
||||
|
||||
# Public: File contents.
|
||||
#
|
||||
# Returns a String.
|
||||
def data
|
||||
@content
|
||||
end
|
||||
|
||||
# Public: Get byte size
|
||||
#
|
||||
# Returns an Integer.
|
||||
def size
|
||||
@content.bytesize
|
||||
end
|
||||
|
||||
# Public: Get file extension.
|
||||
#
|
||||
# Returns a String.
|
||||
def extension
|
||||
extensions.last || ""
|
||||
end
|
||||
|
||||
# Public: Return an array of the file extensions
|
||||
#
|
||||
# >> Linguist::Blob.new("app/views/things/index.html.erb").extensions
|
||||
# => [".html.erb", ".erb"]
|
||||
#
|
||||
# Returns an Array
|
||||
def extensions
|
||||
_, *segments = name.downcase.split(".")
|
||||
|
||||
segments.map.with_index do |segment, index|
|
||||
"." + segments[index..-1].join(".")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -6,7 +6,7 @@ require 'yaml'
|
||||
|
||||
module Linguist
|
||||
# DEPRECATED Avoid mixing into Blob classes. Prefer functional interfaces
|
||||
# like `Language.detect` over `Blob#language`. Functions are much easier to
|
||||
# like `Linguist.detect` over `Blob#language`. Functions are much easier to
|
||||
# cache and compose.
|
||||
#
|
||||
# Avoid adding additional bloat to this module.
|
||||
@@ -325,7 +325,7 @@ module Linguist
|
||||
#
|
||||
# Returns a Language or nil if none is detected
|
||||
def language
|
||||
@language ||= Language.detect(self)
|
||||
@language ||= Linguist.detect(self)
|
||||
end
|
||||
|
||||
# Internal: Get the TextMate compatible scope for the blob
|
||||
|
||||
@@ -13,11 +13,18 @@
|
||||
- (^|/)[Dd]ocumentation/
|
||||
- (^|/)javadoc/
|
||||
- ^man/
|
||||
- ^[Ee]xamples/
|
||||
|
||||
## Documentation files ##
|
||||
|
||||
- (^|/)CHANGE(S|LOG)?(\.|$)
|
||||
- (^|/)CONTRIBUTING(\.|$)
|
||||
- (^|/)COPYING(\.|$)
|
||||
- (^|/)INSTALL(\.|$)
|
||||
- (^|/)LICEN[CS]E(\.|$)
|
||||
- (^|/)[Ll]icen[cs]e(\.|$)
|
||||
- (^|/)README(\.|$)
|
||||
- (^|/)[Rr]eadme(\.|$)
|
||||
|
||||
# Samples folders
|
||||
- ^[Ss]amples/
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
require 'linguist/blob_helper'
|
||||
require 'linguist/blob'
|
||||
|
||||
module Linguist
|
||||
# A FileBlob is a wrapper around a File object to make it quack
|
||||
# like a Grit::Blob. It provides the basic interface: `name`,
|
||||
# `data`, `path` and `size`.
|
||||
class FileBlob
|
||||
class FileBlob < Blob
|
||||
include BlobHelper
|
||||
|
||||
# Public: Initialize a new FileBlob from a path
|
||||
@@ -18,20 +19,6 @@ module Linguist
|
||||
@path = base_path ? path.sub("#{base_path}/", '') : path
|
||||
end
|
||||
|
||||
# Public: Filename
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# FileBlob.new("/path/to/linguist/lib/linguist.rb").path
|
||||
# # => "/path/to/linguist/lib/linguist.rb"
|
||||
#
|
||||
# FileBlob.new("/path/to/linguist/lib/linguist.rb",
|
||||
# "/path/to/linguist").path
|
||||
# # => "lib/linguist.rb"
|
||||
#
|
||||
# Returns a String
|
||||
attr_reader :path
|
||||
|
||||
# Public: Read file permissions
|
||||
#
|
||||
# Returns a String like '100644'
|
||||
@@ -39,13 +26,6 @@ module Linguist
|
||||
File.stat(@fullpath).mode.to_s(8)
|
||||
end
|
||||
|
||||
# Public: File name
|
||||
#
|
||||
# Returns a String
|
||||
def name
|
||||
File.basename(@fullpath)
|
||||
end
|
||||
|
||||
# Public: Read file contents.
|
||||
#
|
||||
# Returns a String.
|
||||
@@ -59,26 +39,5 @@ module Linguist
|
||||
def size
|
||||
File.size(@fullpath)
|
||||
end
|
||||
|
||||
# Public: Get file extension.
|
||||
#
|
||||
# Returns a String.
|
||||
def extension
|
||||
extensions.last || ""
|
||||
end
|
||||
|
||||
# Public: Return an array of the file extensions
|
||||
#
|
||||
# >> Linguist::FileBlob.new("app/views/things/index.html.erb").extensions
|
||||
# => [".html.erb", ".erb"]
|
||||
#
|
||||
# Returns an Array
|
||||
def extensions
|
||||
basename, *segments = name.downcase.split(".")
|
||||
|
||||
segments.map.with_index do |segment, index|
|
||||
"." + segments[index..-1].join(".")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -59,6 +59,7 @@ module Linguist
|
||||
godeps? ||
|
||||
generated_by_zephir? ||
|
||||
minified_files? ||
|
||||
has_source_map? ||
|
||||
source_map? ||
|
||||
compiled_coffeescript? ||
|
||||
generated_parser? ||
|
||||
@@ -69,8 +70,12 @@ module Linguist
|
||||
generated_protocol_buffer? ||
|
||||
generated_apache_thrift? ||
|
||||
generated_jni_header? ||
|
||||
vcr_cassette? ||
|
||||
generated_module? ||
|
||||
generated_unity3d_meta? ||
|
||||
vcr_cassette?
|
||||
generated_racc? ||
|
||||
generated_jflex? ||
|
||||
generated_grammarkit?
|
||||
end
|
||||
|
||||
# Internal: Is the blob an Xcode file?
|
||||
@@ -100,6 +105,21 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
# Internal: Does the blob contain a source map reference?
|
||||
#
|
||||
# We assume that if one of the last 2 lines starts with a source map
|
||||
# reference, then the current file was generated from other files.
|
||||
#
|
||||
# We use the last 2 lines because the last line might be empty.
|
||||
#
|
||||
# We only handle JavaScript, no CSS support yet.
|
||||
#
|
||||
# Returns true or false.
|
||||
def has_source_map?
|
||||
return false unless extname.downcase == '.js'
|
||||
lines.last(2).any? { |line| line.start_with?('//# sourceMappingURL') }
|
||||
end
|
||||
|
||||
# Internal: Is the blob a generated source map?
|
||||
#
|
||||
# Source Maps usually have .css.map or .js.map extensions. In case they
|
||||
@@ -240,22 +260,26 @@ module Linguist
|
||||
return lines[0].include?("Code generated by")
|
||||
end
|
||||
|
||||
PROTOBUF_EXTENSIONS = ['.py', '.java', '.h', '.cc', '.cpp']
|
||||
|
||||
# Internal: Is the blob a C++, Java or Python source file generated by the
|
||||
# Protocol Buffer compiler?
|
||||
#
|
||||
# Returns true of false.
|
||||
def generated_protocol_buffer?
|
||||
return false unless ['.py', '.java', '.h', '.cc', '.cpp'].include?(extname)
|
||||
return false unless PROTOBUF_EXTENSIONS.include?(extname)
|
||||
return false unless lines.count > 1
|
||||
|
||||
return lines[0].include?("Generated by the protocol buffer compiler. DO NOT EDIT!")
|
||||
end
|
||||
|
||||
APACHE_THRIFT_EXTENSIONS = ['.rb', '.py', '.go', '.js', '.m', '.java', '.h', '.cc', '.cpp']
|
||||
|
||||
# Internal: Is the blob generated by Apache Thrift compiler?
|
||||
#
|
||||
# Returns true or false
|
||||
def generated_apache_thrift?
|
||||
return false unless ['.rb', '.py', '.go', '.js', '.m', '.java', '.h', '.cc', '.cpp'].include?(extname)
|
||||
return false unless APACHE_THRIFT_EXTENSIONS.include?(extname)
|
||||
return false unless lines.count > 1
|
||||
|
||||
return lines[0].include?("Autogenerated by Thrift Compiler") || lines[1].include?("Autogenerated by Thrift Compiler")
|
||||
@@ -324,6 +348,24 @@ module Linguist
|
||||
return lines[0].include?("Generated by Cython")
|
||||
end
|
||||
|
||||
# Internal: Is it a KiCAD or GFortran module file?
|
||||
#
|
||||
# KiCAD module files contain:
|
||||
# PCBNEW-LibModule-V1 yyyy-mm-dd h:mm:ss XM
|
||||
# on the first line.
|
||||
#
|
||||
# GFortran module files contain:
|
||||
# GFORTRAN module version 'x' created from
|
||||
# on the first line.
|
||||
#
|
||||
# Return true of false
|
||||
def generated_module?
|
||||
return false unless extname == '.mod'
|
||||
return false unless lines.count > 1
|
||||
return lines[0].include?("PCBNEW-LibModule-V") ||
|
||||
lines[0].include?("GFORTRAN module version '")
|
||||
end
|
||||
|
||||
# Internal: Is this a metadata file from Unity3D?
|
||||
#
|
||||
# Unity3D Meta files start with:
|
||||
@@ -336,5 +378,45 @@ module Linguist
|
||||
return false unless lines.count > 1
|
||||
return lines[0].include?("fileFormatVersion: ")
|
||||
end
|
||||
|
||||
# Internal: Is this a Racc-generated file?
|
||||
#
|
||||
# A Racc-generated file contains:
|
||||
# # This file is automatically generated by Racc x.y.z
|
||||
# on the third line.
|
||||
#
|
||||
# Return true or false
|
||||
def generated_racc?
|
||||
return false unless extname == '.rb'
|
||||
return false unless lines.count > 2
|
||||
return lines[2].start_with?("# This file is automatically generated by Racc")
|
||||
end
|
||||
|
||||
# Internal: Is this a JFlex-generated file?
|
||||
#
|
||||
# A JFlex-generated file contains:
|
||||
# /* The following code was generated by JFlex x.y.z on d/at/e ti:me */
|
||||
# on the first line.
|
||||
#
|
||||
# Return true or false
|
||||
def generated_jflex?
|
||||
return false unless extname == '.java'
|
||||
return false unless lines.count > 1
|
||||
return lines[0].start_with?("/* The following code was generated by JFlex ")
|
||||
end
|
||||
|
||||
# Internal: Is this a GrammarKit-generated file?
|
||||
#
|
||||
# A GrammarKit-generated file typically contain:
|
||||
# // This is a generated file. Not intended for manual editing.
|
||||
# on the first line. This is not always the case, as it's possible to
|
||||
# customize the class header.
|
||||
#
|
||||
# Return true or false
|
||||
def generated_grammarkit?
|
||||
return false unless extname == '.java'
|
||||
return false unless lines.count > 1
|
||||
return lines[0].start_with?("// This is a generated file. Not intended for manual editing.")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
# Note: This file is included in the github-linguist-grammars gem, not the
|
||||
# github-linguist gem.
|
||||
|
||||
module Linguist
|
||||
module Grammars
|
||||
# Get the path to the directory containing the language grammar JSON files.
|
||||
|
||||
@@ -13,11 +13,14 @@ module Linguist
|
||||
# ])
|
||||
#
|
||||
# Returns an Array of languages, or empty if none matched or were inconclusive.
|
||||
def self.call(blob, languages)
|
||||
def self.call(blob, candidates)
|
||||
data = blob.data
|
||||
|
||||
@heuristics.each do |heuristic|
|
||||
return Array(heuristic.call(data)) if heuristic.matches?(languages)
|
||||
if heuristic.matches?(blob.name)
|
||||
languages = Array(heuristic.call(data))
|
||||
return languages if languages.any? || languages.all? { |l| candidates.include?(l) }
|
||||
end
|
||||
end
|
||||
|
||||
[] # No heuristics matched
|
||||
@@ -30,7 +33,7 @@ module Linguist
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# disambiguate "Perl", "Prolog" do |data|
|
||||
# disambiguate ".pm" do |data|
|
||||
# if data.include?("use strict")
|
||||
# Language["Perl"]
|
||||
# elsif /^[^#]+:-/.match(data)
|
||||
@@ -38,22 +41,23 @@ module Linguist
|
||||
# end
|
||||
# end
|
||||
#
|
||||
def self.disambiguate(*languages, &heuristic)
|
||||
@heuristics << new(languages, &heuristic)
|
||||
def self.disambiguate(*extensions, &heuristic)
|
||||
@heuristics << new(extensions, &heuristic)
|
||||
end
|
||||
|
||||
# Internal: Array of defined heuristics
|
||||
@heuristics = []
|
||||
|
||||
# Internal
|
||||
def initialize(languages, &heuristic)
|
||||
@languages = languages
|
||||
def initialize(extensions, &heuristic)
|
||||
@extensions = extensions
|
||||
@heuristic = heuristic
|
||||
end
|
||||
|
||||
# Internal: Check if this heuristic matches the candidate languages.
|
||||
def matches?(candidates)
|
||||
candidates.any? && candidates.all? { |l| @languages.include?(l.name) }
|
||||
def matches?(filename)
|
||||
filename = filename.downcase
|
||||
@extensions.any? { |ext| filename.end_with?(ext) }
|
||||
end
|
||||
|
||||
# Internal: Perform the heuristic
|
||||
@@ -62,99 +66,9 @@ module Linguist
|
||||
end
|
||||
|
||||
# Common heuristics
|
||||
ObjectiveCRegex = /^[ \t]*@(interface|class|protocol|property|end|synchronised|selector|implementation)\b/
|
||||
ObjectiveCRegex = /^\s*(@(interface|class|protocol|property|end|synchronised|selector|implementation)\b|#import\s+.+\.h[">])/
|
||||
|
||||
disambiguate "BitBake", "BlitzBasic" do |data|
|
||||
if /^\s*; /.match(data) || data.include?("End Function")
|
||||
Language["BlitzBasic"]
|
||||
elsif /^\s*(# |include|require)\b/.match(data)
|
||||
Language["BitBake"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "C#", "Smalltalk" do |data|
|
||||
if /![\w\s]+methodsFor: /.match(data)
|
||||
Language["Smalltalk"]
|
||||
elsif /^\s*namespace\s*[\w\.]+\s*{/.match(data) || /^\s*\/\//.match(data)
|
||||
Language["C#"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Objective-C", "C++", "C" do |data|
|
||||
if ObjectiveCRegex.match(data)
|
||||
Language["Objective-C"]
|
||||
elsif (/^\s*#\s*include <(cstdint|string|vector|map|list|array|bitset|queue|stack|forward_list|unordered_map|unordered_set|(i|o|io)stream)>/.match(data) ||
|
||||
/^\s*template\s*</.match(data) || /^[ \t]*try/.match(data) || /^[ \t]*catch\s*\(/.match(data) || /^[ \t]*(class|(using[ \t]+)?namespace)\s+\w+/.match(data) || /^[ \t]*(private|public|protected):$/.match(data) || /std::\w+/.match(data))
|
||||
Language["C++"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Perl", "Perl6", "Prolog" do |data|
|
||||
if data.include?("use v6")
|
||||
Language["Perl6"]
|
||||
elsif data.match(/use strict|use\s+v?5\./)
|
||||
Language["Perl"]
|
||||
elsif /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "ECL", "Prolog" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif data.include?(":=")
|
||||
Language["ECL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "IDL", "Prolog", "INI", "QMake" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif data.include?("last_client=")
|
||||
Language["INI"]
|
||||
elsif data.include?("HEADERS") && data.include?("SOURCES")
|
||||
Language["QMake"]
|
||||
elsif /^\s*function[ \w,]+$/.match(data)
|
||||
Language["IDL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "GAP", "Scilab" do |data|
|
||||
if (data.include?("gap> "))
|
||||
Language["GAP"]
|
||||
# Heads up - we don't usually write heuristics like this (with no regex match)
|
||||
else
|
||||
Language["Scilab"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Common Lisp", "OpenCL", "Cool" do |data|
|
||||
if /^\s*\((defun|in-package|defpackage) /i.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^class/x.match(data)
|
||||
Language["Cool"]
|
||||
elsif /\/\* |\/\/ |^\}/.match(data)
|
||||
Language["OpenCL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Hack", "PHP" do |data|
|
||||
if data.include?("<?hh")
|
||||
Language["Hack"]
|
||||
elsif /<?[^h]/.match(data)
|
||||
Language["PHP"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Scala", "SuperCollider" do |data|
|
||||
if /\^(this|super)\./.match(data) || /^\s*(\+|\*)\s*\w+\s*{/.match(data) || /^\s*~\w+\s*=\./.match(data)
|
||||
Language["SuperCollider"]
|
||||
elsif /^\s*import (scala|java)\./.match(data) || /^\s*val\s+\w+\s*=/.match(data) || /^\s*class\b/.match(data)
|
||||
Language["Scala"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "AsciiDoc", "AGS Script", "Public Key" do |data|
|
||||
disambiguate ".asc" do |data|
|
||||
if /^(----[- ]BEGIN|ssh-(rsa|dss)) /.match(data)
|
||||
Language["Public Key"]
|
||||
elsif /^[=-]+(\s|\n)|{{[A-Za-z]/.match(data)
|
||||
@@ -164,15 +78,91 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "FORTRAN", "Forth", "Formatted" do |data|
|
||||
disambiguate ".bb" do |data|
|
||||
if /^\s*; /.match(data) || data.include?("End Function")
|
||||
Language["BlitzBasic"]
|
||||
elsif /^\s*(# |include|require)\b/.match(data)
|
||||
Language["BitBake"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".builds" do |data|
|
||||
if /^(\s*)(<Project|<Import|<Property|<?xml|xmlns)/i.match(data)
|
||||
Language["XML"]
|
||||
else
|
||||
Language["Text"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ch" do |data|
|
||||
if /^\s*#\s*(if|ifdef|ifndef|define|command|xcommand|translate|xtranslate|include|pragma|undef)\b/i.match(data)
|
||||
Language["xBase"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".cl" do |data|
|
||||
if /^\s*\((defun|in-package|defpackage) /i.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^class/x.match(data)
|
||||
Language["Cool"]
|
||||
elsif /\/\* |\/\/ |^\}/.match(data)
|
||||
Language["OpenCL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".cs" do |data|
|
||||
if /![\w\s]+methodsFor: /.match(data)
|
||||
Language["Smalltalk"]
|
||||
elsif /^\s*namespace\s*[\w\.]+\s*{/.match(data) || /^\s*\/\//.match(data)
|
||||
Language["C#"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".d" do |data|
|
||||
if /^module /.match(data)
|
||||
Language["D"]
|
||||
elsif /^((dtrace:::)?BEGIN|provider |#pragma (D (option|attributes)|ident)\s)/.match(data)
|
||||
Language["DTrace"]
|
||||
elsif /(\/.*:( .* \\)$| : \\$|^ : |: \\$)/.match(data)
|
||||
Language["Makefile"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ecl" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["ECLiPSe"]
|
||||
elsif data.include?(":=")
|
||||
Language["ECL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".es" do |data|
|
||||
if /^\s*(?:%%|main\s*\(.*?\)\s*->)/.match(data)
|
||||
Language["Erlang"]
|
||||
elsif /(?:\/\/|("|')use strict\1|export\s+default\s|\/\*.*?\*\/)/m.match(data)
|
||||
Language["JavaScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".for", ".f" do |data|
|
||||
if /^: /.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^([c*][^a-z]| (subroutine|program)\s|\s*!)/i.match(data)
|
||||
elsif /^([c*][^abd-z]| (subroutine|program|end)\s|\s*!)/i.match(data)
|
||||
Language["FORTRAN"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "F#", "Forth", "GLSL", "Filterscript" do |data|
|
||||
disambiguate ".fr" do |data|
|
||||
if /^(: |also |new-device|previous )/.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^\s*(import|module|package|data|type) /.match(data)
|
||||
Language["Frege"]
|
||||
else
|
||||
Language["Text"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".fs" do |data|
|
||||
if /^(: |new-device)/.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^\s*(#light|import|let|module|namespace|open|type)/.match(data)
|
||||
@@ -184,7 +174,54 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Limbo", "M", "MUF", "Mathematica", "Matlab", "Mercury", "Objective-C" do |data|
|
||||
disambiguate ".gs" do |data|
|
||||
Language["Gosu"] if /^uses java\./.match(data)
|
||||
end
|
||||
|
||||
disambiguate ".h" do |data|
|
||||
if ObjectiveCRegex.match(data)
|
||||
Language["Objective-C"]
|
||||
elsif (/^\s*#\s*include <(cstdint|string|vector|map|list|array|bitset|queue|stack|forward_list|unordered_map|unordered_set|(i|o|io)stream)>/.match(data) ||
|
||||
/^\s*template\s*</.match(data) || /^[ \t]*try/.match(data) || /^[ \t]*catch\s*\(/.match(data) || /^[ \t]*(class|(using[ \t]+)?namespace)\s+\w+/.match(data) || /^[ \t]*(private|public|protected):$/.match(data) || /std::\w+/.match(data))
|
||||
Language["C++"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".inc" do |data|
|
||||
if /^<\?(?:php)?/.match(data)
|
||||
Language["PHP"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".l" do |data|
|
||||
if /\(def(un|macro)\s/.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^(%[%{}]xs|<.*>)/.match(data)
|
||||
Language["Lex"]
|
||||
elsif /^\.[a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
elsif /^\((de|class|rel|code|data|must)\s/.match(data)
|
||||
Language["PicoLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ls" do |data|
|
||||
if /^\s*package\s*[\w\.\/\*\s]*\s*{/.match(data)
|
||||
Language["LoomScript"]
|
||||
else
|
||||
Language["LiveScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".lsp", ".lisp" do |data|
|
||||
if /^\s*\((defun|in-package|defpackage) /i.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^\s*\(define /.match(data)
|
||||
Language["NewLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".m" do |data|
|
||||
if ObjectiveCRegex.match(data)
|
||||
Language["Objective-C"]
|
||||
elsif data.include?(":- module")
|
||||
@@ -202,46 +239,144 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Gosu", "JavaScript" do |data|
|
||||
Language["Gosu"] if /^uses java\./.match(data)
|
||||
end
|
||||
|
||||
disambiguate "LoomScript", "LiveScript" do |data|
|
||||
if /^\s*package\s*[\w\.\/\*\s]*\s*{/.match(data)
|
||||
Language["LoomScript"]
|
||||
else
|
||||
Language["LiveScript"]
|
||||
disambiguate ".ml" do |data|
|
||||
if /(^\s*module)|let rec |match\s+(\S+\s)+with/.match(data)
|
||||
Language["OCaml"]
|
||||
elsif /=> |case\s+(\S+\s)+of/.match(data)
|
||||
Language["Standard ML"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Common Lisp", "NewLisp" do |data|
|
||||
if /^\s*\((defun|in-package|defpackage) /i.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^\s*\(define /.match(data)
|
||||
Language["NewLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "TypeScript", "XML" do |data|
|
||||
if data.include?("<TS ")
|
||||
disambiguate ".mod" do |data|
|
||||
if data.include?('<!ENTITY ')
|
||||
Language["XML"]
|
||||
elsif /MODULE\s\w+\s*;/i.match(data) || /^\s*END \w+;$/i.match(data)
|
||||
Language["Modula-2"]
|
||||
else
|
||||
Language["TypeScript"]
|
||||
[Language["Linux Kernel Module"], Language["AMPL"]]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Frege", "Forth", "Text" do |data|
|
||||
if /^(: |also |new-device|previous )/.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^\s*(import|module|package|data|type) /.match(data)
|
||||
Language["Frege"]
|
||||
disambiguate ".ms" do |data|
|
||||
if /^[.'][a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
elsif /(?<!\S)\.(include|globa?l)\s/.match(data) || /(?<!\/\*)(\A|\n)\s*\.[A-Za-z]/.match(data.gsub(/"([^\\"]|\\.)*"|'([^\\']|\\.)*'|\\\s*(?:--.*)?\n/, ""))
|
||||
Language["GAS"]
|
||||
else
|
||||
Language["MAXScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".n" do |data|
|
||||
if /^[.']/.match(data)
|
||||
Language["Groff"]
|
||||
elsif /^(module|namespace|using)\s/.match(data)
|
||||
Language["Nemerle"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ncl" do |data|
|
||||
if data.include?("THE_TITLE")
|
||||
Language["Text"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "PLSQL", "SQLPL", "PLpgSQL", "SQL" do |data|
|
||||
if /^\\i\b|AS \$\$|LANGUAGE '+plpgsql'+/i.match(data) || /SECURITY (DEFINER|INVOKER)/i.match(data) || /BEGIN( WORK| TRANSACTION)?;/i.match(data)
|
||||
disambiguate ".nl" do |data|
|
||||
if /^(b|g)[0-9]+ /.match(data)
|
||||
Language["NL"]
|
||||
else
|
||||
Language["NewLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".php" do |data|
|
||||
if data.include?("<?hh")
|
||||
Language["Hack"]
|
||||
elsif /<?[^h]/.match(data)
|
||||
Language["PHP"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pl" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif /use strict|use\s+v?5\./.match(data)
|
||||
Language["Perl"]
|
||||
elsif /^(use v6|(my )?class|module)/.match(data)
|
||||
Language["Perl6"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pm", ".t" do |data|
|
||||
if /use strict|use\s+v?5\./.match(data)
|
||||
Language["Perl"]
|
||||
elsif /^(use v6|(my )?class|module)/.match(data)
|
||||
Language["Perl6"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pod" do |data|
|
||||
if /^=\w+$/.match(data)
|
||||
Language["Pod"]
|
||||
else
|
||||
Language["Perl"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pro" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif data.include?("last_client=")
|
||||
Language["INI"]
|
||||
elsif data.include?("HEADERS") && data.include?("SOURCES")
|
||||
Language["QMake"]
|
||||
elsif /^\s*function[ \w,]+$/.match(data)
|
||||
Language["IDL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".props" do |data|
|
||||
if /^(\s*)(<Project|<Import|<Property|<?xml|xmlns)/i.match(data)
|
||||
Language["XML"]
|
||||
elsif /\w+\s*=\s*/i.match(data)
|
||||
Language["INI"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".r" do |data|
|
||||
if /\bRebol\b/i.match(data)
|
||||
Language["Rebol"]
|
||||
elsif data.include?("<-")
|
||||
Language["R"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".rpy" do |data|
|
||||
if /(^(import|from|class|def)\s)/m.match(data)
|
||||
Language["Python"]
|
||||
else
|
||||
Language["Ren'Py"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".rs" do |data|
|
||||
if /^(use |fn |mod |pub |macro_rules|impl|#!?\[)/.match(data)
|
||||
Language["Rust"]
|
||||
elsif /#include|#pragma\s+(rs|version)|__attribute__/.match(data)
|
||||
Language["RenderScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".sc" do |data|
|
||||
if /\^(this|super)\./.match(data) || /^\s*(\+|\*)\s*\w+\s*{/.match(data) || /^\s*~\w+\s*=\./.match(data)
|
||||
Language["SuperCollider"]
|
||||
elsif /^\s*import (scala|java)\./.match(data) || /^\s*val\s+\w+\s*=/.match(data) || /^\s*class\b/.match(data)
|
||||
Language["Scala"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".sql" do |data|
|
||||
if /^\\i\b|AS \$\$|LANGUAGE '?plpgsql'?/i.match(data) || /SECURITY (DEFINER|INVOKER)/i.match(data) || /BEGIN( WORK| TRANSACTION)?;/i.match(data)
|
||||
#Postgres
|
||||
Language["PLpgSQL"]
|
||||
elsif /(alter module)|(language sql)|(begin( NOT)+ atomic)/i.match(data) || /signal SQLSTATE '[0-9]+'/i.match(data)
|
||||
@@ -255,66 +390,29 @@ module Linguist
|
||||
Language["SQL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "D", "DTrace", "Makefile" do |data|
|
||||
if /^module /.match(data)
|
||||
Language["D"]
|
||||
elsif /^((dtrace:::)?BEGIN|provider |#pragma (D (option|attributes)|ident)\s)/.match(data)
|
||||
Language["DTrace"]
|
||||
elsif /(\/.*:( .* \\)$| : \\$|^ : |: \\$)/.match(data)
|
||||
Language["Makefile"]
|
||||
|
||||
disambiguate ".toc" do |data|
|
||||
if /^## |@no-lib-strip@/.match(data)
|
||||
Language["World of Warcraft Addon Data"]
|
||||
elsif /^\\(contentsline|defcounter|beamer|boolfalse)/.match(data)
|
||||
Language["TeX"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "OCaml", "Standard ML" do |data|
|
||||
if /(^\s*module)|let rec |match\s+(\S+\s)+with/.match(data)
|
||||
Language["OCaml"]
|
||||
elsif /=> |case\s+(\S+\s)+of/.match(data)
|
||||
Language["Standard ML"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "NL", "NewLisp" do |data|
|
||||
if /^(b|g)[0-9]+ /.match(data)
|
||||
Language["NL"]
|
||||
disambiguate ".ts" do |data|
|
||||
if data.include?("<TS ")
|
||||
Language["XML"]
|
||||
else
|
||||
Language["NewLisp"]
|
||||
Language["TypeScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Rust", "RenderScript" do |data|
|
||||
if /^(use |fn |mod |pub |macro_rules|impl|#!?\[)/.match(data)
|
||||
Language["Rust"]
|
||||
elsif /#include|#pragma\s+(rs|version)|__attribute__/.match(data)
|
||||
Language["RenderScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Common Lisp", "Lex", "Groff", "PicoLisp" do |data|
|
||||
if /\(def(un|macro)\s/.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^(%[%{}]xs|<.*>)/.match(data)
|
||||
Language["Lex"]
|
||||
elsif /^\.[a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
elsif /^\((de|class|rel|code|data|must)\s/.match(data)
|
||||
Language["PicoLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Groff", "Nemerle" do |data|
|
||||
if /^[.']/.match(data)
|
||||
Language["Groff"]
|
||||
elsif /^(module|namespace|using)\s/.match(data)
|
||||
Language["Nemerle"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "GAS", "Groff" do |data|
|
||||
if /^[.'][a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
elsif /((^|\s)move?[. ])|\.(include|globa?l)\s/.match(data)
|
||||
Language["GAS"]
|
||||
disambiguate ".tst" do |data|
|
||||
if (data.include?("gap> "))
|
||||
Language["GAP"]
|
||||
# Heads up - we don't usually write heuristics like this (with no regex match)
|
||||
else
|
||||
Language["Scilab"]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -87,14 +87,6 @@ module Linguist
|
||||
language
|
||||
end
|
||||
|
||||
STRATEGIES = [
|
||||
Linguist::Strategy::Modeline,
|
||||
Linguist::Shebang,
|
||||
Linguist::Strategy::Filename,
|
||||
Linguist::Heuristics,
|
||||
Linguist::Classifier
|
||||
]
|
||||
|
||||
# Public: Detects the Language of the blob.
|
||||
#
|
||||
# blob - an object that includes the Linguist `BlobHelper` interface;
|
||||
@@ -102,34 +94,8 @@ module Linguist
|
||||
#
|
||||
# Returns Language or nil.
|
||||
def self.detect(blob)
|
||||
# Bail early if the blob is binary or empty.
|
||||
return nil if blob.likely_binary? || blob.binary? || blob.empty?
|
||||
|
||||
Linguist.instrument("linguist.detection", :blob => blob) do
|
||||
# Call each strategy until one candidate is returned.
|
||||
languages = []
|
||||
returning_strategy = nil
|
||||
|
||||
STRATEGIES.each do |strategy|
|
||||
returning_strategy = strategy
|
||||
candidates = Linguist.instrument("linguist.strategy", :blob => blob, :strategy => strategy, :candidates => languages) do
|
||||
strategy.call(blob, languages)
|
||||
end
|
||||
if candidates.size == 1
|
||||
languages = candidates
|
||||
break
|
||||
elsif candidates.size > 1
|
||||
# More than one candidate was found, pass them to the next strategy.
|
||||
languages = candidates
|
||||
else
|
||||
# No candidates, try the next strategy
|
||||
end
|
||||
end
|
||||
|
||||
Linguist.instrument("linguist.detected", :blob => blob, :strategy => returning_strategy, :language => languages.first)
|
||||
|
||||
languages.first
|
||||
end
|
||||
warn "[DEPRECATED] `Linguist::Language.detect` is deprecated. Use `Linguist.detect`. #{caller[0]}"
|
||||
Linguist.detect(blob)
|
||||
end
|
||||
|
||||
# Public: Get all Languages
|
||||
@@ -150,7 +116,8 @@ module Linguist
|
||||
#
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.find_by_name(name)
|
||||
name && @name_index[name.downcase]
|
||||
return nil if name.to_s.empty?
|
||||
name && (@name_index[name.downcase] || @name_index[name.split(',').first.downcase])
|
||||
end
|
||||
|
||||
# Public: Look up Language by one of its aliases.
|
||||
@@ -164,7 +131,8 @@ module Linguist
|
||||
#
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.find_by_alias(name)
|
||||
name && @alias_index[name.downcase]
|
||||
return nil if name.to_s.empty?
|
||||
name && (@alias_index[name.downcase] || @alias_index[name.split(',').first.downcase])
|
||||
end
|
||||
|
||||
# Public: Look up Languages by filename.
|
||||
@@ -240,7 +208,8 @@ module Linguist
|
||||
#
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.[](name)
|
||||
name && @index[name.downcase]
|
||||
return nil if name.to_s.empty?
|
||||
name && (@index[name.downcase] || @index[name.split(',').first.downcase])
|
||||
end
|
||||
|
||||
# Public: A List of popular languages
|
||||
|
||||
620
lib/linguist/languages.yml
Normal file → Executable file
620
lib/linguist/languages.yml
Normal file → Executable file
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,11 @@ require 'rugged'
|
||||
|
||||
module Linguist
|
||||
class LazyBlob
|
||||
GIT_ATTR = ['linguist-documentation', 'linguist-language', 'linguist-vendored']
|
||||
GIT_ATTR = ['linguist-documentation',
|
||||
'linguist-language',
|
||||
'linguist-vendored',
|
||||
'linguist-generated']
|
||||
|
||||
GIT_ATTR_OPTS = { :priority => [:index], :skip_system => true }
|
||||
GIT_ATTR_FLAGS = Rugged::Repository::Attributes.parse_opts(GIT_ATTR_OPTS)
|
||||
|
||||
@@ -24,6 +28,7 @@ module Linguist
|
||||
@oid = oid
|
||||
@path = path
|
||||
@mode = mode
|
||||
@data = nil
|
||||
end
|
||||
|
||||
def git_attributes
|
||||
@@ -31,14 +36,6 @@ module Linguist
|
||||
name, GIT_ATTR, GIT_ATTR_FLAGS)
|
||||
end
|
||||
|
||||
def vendored?
|
||||
if attr = git_attributes['linguist-vendored']
|
||||
return boolean_attribute(attr)
|
||||
else
|
||||
return super
|
||||
end
|
||||
end
|
||||
|
||||
def documentation?
|
||||
if attr = git_attributes['linguist-documentation']
|
||||
boolean_attribute(attr)
|
||||
@@ -47,6 +44,22 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
def generated?
|
||||
if attr = git_attributes['linguist-generated']
|
||||
boolean_attribute(attr)
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
def vendored?
|
||||
if attr = git_attributes['linguist-vendored']
|
||||
return boolean_attribute(attr)
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
def language
|
||||
return @language if defined?(@language)
|
||||
|
||||
@@ -67,11 +80,15 @@ module Linguist
|
||||
@size
|
||||
end
|
||||
|
||||
def cleanup!
|
||||
@data.clear if @data
|
||||
end
|
||||
|
||||
protected
|
||||
|
||||
# Returns true if the attribute is present and not the string "false".
|
||||
def boolean_attribute(attr)
|
||||
attr != "false"
|
||||
def boolean_attribute(attribute)
|
||||
attribute != "false"
|
||||
end
|
||||
|
||||
def load_blob!
|
||||
|
||||
@@ -30,6 +30,9 @@ module Linguist
|
||||
@repository = repo
|
||||
@commit_oid = commit_oid
|
||||
|
||||
@old_commit_oid = nil
|
||||
@old_stats = nil
|
||||
|
||||
raise TypeError, 'commit_oid must be a commit SHA1' unless commit_oid.is_a?(String)
|
||||
end
|
||||
|
||||
@@ -126,12 +129,13 @@ module Linguist
|
||||
end
|
||||
|
||||
protected
|
||||
MAX_TREE_SIZE = 100_000
|
||||
|
||||
def compute_stats(old_commit_oid, cache = nil)
|
||||
return {} if current_tree.count_recursive(MAX_TREE_SIZE) >= MAX_TREE_SIZE
|
||||
|
||||
old_tree = old_commit_oid && Rugged::Commit.lookup(repository, old_commit_oid).tree
|
||||
|
||||
read_index
|
||||
|
||||
diff = Rugged::Tree.diff(repository, old_tree, current_tree)
|
||||
|
||||
# Clear file map and fetch full diff if any .gitattributes files are changed
|
||||
@@ -150,14 +154,18 @@ module Linguist
|
||||
next if delta.binary
|
||||
|
||||
if [:added, :modified].include? delta.status
|
||||
# Skip submodules
|
||||
# Skip submodules and symlinks
|
||||
mode = delta.new_file[:mode]
|
||||
next if (mode & 040000) != 0
|
||||
mode_format = (mode & 0170000)
|
||||
next if mode_format == 0120000 || mode_format == 040000 || mode_format == 0160000
|
||||
|
||||
blob = Linguist::LazyBlob.new(repository, delta.new_file[:oid], new, mode.to_s(8))
|
||||
|
||||
next unless blob.include_in_language_stats?
|
||||
file_map[new] = [blob.language.group.name, blob.size]
|
||||
if blob.include_in_language_stats?
|
||||
file_map[new] = [blob.language.group.name, blob.size]
|
||||
end
|
||||
|
||||
blob.cleanup!
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
@@ -42,10 +42,10 @@ module Linguist
|
||||
return unless script
|
||||
|
||||
# "python2.6" -> "python2"
|
||||
script.sub! /(\.\d+)$/, ''
|
||||
script.sub!(/(\.\d+)$/, '')
|
||||
|
||||
# #! perl -> perl
|
||||
script.sub! /^#!\s*/, ''
|
||||
script.sub!(/^#!\s*/, '')
|
||||
|
||||
# Check for multiline shebang hacks that call `exec`
|
||||
if script == 'sh' &&
|
||||
|
||||
@@ -1,8 +1,23 @@
|
||||
module Linguist
|
||||
module Strategy
|
||||
class Modeline
|
||||
EmacsModeline = /-\*-\s*(?:(?!mode)[\w-]+\s*:\s*(?:[\w+-]+)\s*;?\s*)*(?:mode\s*:)?\s*([\w+-]+)\s*(?:;\s*(?!mode)[\w-]+\s*:\s*[\w+-]+\s*)*;?\s*-\*-/i
|
||||
VimModeline = /vim:\s*set\s*(?:ft|filetype)=(\w+):/i
|
||||
EMACS_MODELINE = /-\*-\s*(?:(?!mode)[\w-]+\s*:\s*(?:[\w+-]+)\s*;?\s*)*(?:mode\s*:)?\s*([\w+-]+)\s*(?:;\s*(?!mode)[\w-]+\s*:\s*[\w+-]+\s*)*;?\s*-\*-/i
|
||||
|
||||
# First form vim modeline
|
||||
# [text]{white}{vi:|vim:|ex:}[white]{options}
|
||||
# ex: 'vim: syntax=ruby'
|
||||
VIM_MODELINE_1 = /(?:vim|vi|ex):\s*(?:ft|filetype|syntax)=(\w+)\s?/i
|
||||
|
||||
# Second form vim modeline (compatible with some versions of Vi)
|
||||
# [text]{white}{vi:|vim:|Vim:|ex:}[white]se[t] {options}:[text]
|
||||
# ex: 'vim set syntax=ruby:'
|
||||
VIM_MODELINE_2 = /(?:vim|vi|Vim|ex):\s*se(?:t)?.*\s(?:ft|filetype|syntax)=(\w+)\s?.*:/i
|
||||
|
||||
MODELINES = [EMACS_MODELINE, VIM_MODELINE_1, VIM_MODELINE_2]
|
||||
|
||||
# Scope of the search for modelines
|
||||
# Number of lines to check at the beginning and at the end of the file
|
||||
SEARCH_SCOPE = 5
|
||||
|
||||
# Public: Detects language based on Vim and Emacs modelines
|
||||
#
|
||||
@@ -15,14 +30,16 @@ module Linguist
|
||||
# Returns an Array with one Language if the blob has a Vim or Emacs modeline
|
||||
# that matches a Language name or alias. Returns an empty array if no match.
|
||||
def self.call(blob, _ = nil)
|
||||
Array(Language.find_by_alias(modeline(blob.data)))
|
||||
header = blob.lines.first(SEARCH_SCOPE).join("\n")
|
||||
footer = blob.lines.last(SEARCH_SCOPE).join("\n")
|
||||
Array(Language.find_by_alias(modeline(header + footer)))
|
||||
end
|
||||
|
||||
# Public: Get the modeline from the first n-lines of the file
|
||||
#
|
||||
# Returns a String or nil
|
||||
def self.modeline(data)
|
||||
match = data.match(EmacsModeline) || data.match(VimModeline)
|
||||
match = MODELINES.map { |regex| data.match(regex) }.reject(&:nil?).first
|
||||
match[1] if match
|
||||
end
|
||||
end
|
||||
|
||||
@@ -86,17 +86,17 @@ module Linguist
|
||||
if s.peek(1) == "\""
|
||||
s.getch
|
||||
else
|
||||
s.skip_until(/[^\\]"/)
|
||||
s.skip_until(/(?<!\\)"/)
|
||||
end
|
||||
elsif s.scan(/'/)
|
||||
if s.peek(1) == "'"
|
||||
s.getch
|
||||
else
|
||||
s.skip_until(/[^\\]'/)
|
||||
s.skip_until(/(?<!\\)'/)
|
||||
end
|
||||
|
||||
# Skip number literals
|
||||
elsif s.scan(/(0x)?\d(\d|\.)*/)
|
||||
elsif s.scan(/(0x\h(\h|\.)*|\d(\d|\.)*)([uU][lL]{0,2}|([eE][-+]\d*)?[fFlL]*)/)
|
||||
|
||||
# SGML style brackets
|
||||
elsif token = s.scan(/<[^\s<>][^<>]*>/)
|
||||
|
||||
@@ -20,10 +20,17 @@
|
||||
- ^deps/
|
||||
- ^tools/
|
||||
- (^|/)configure$
|
||||
- (^|/)configure.ac$
|
||||
- (^|/)config.guess$
|
||||
- (^|/)config.sub$
|
||||
|
||||
# stuff autogenerated by autoconf - still C deps
|
||||
- (^|/)aclocal.m4
|
||||
- (^|/)libtool.m4
|
||||
- (^|/)ltoptions.m4
|
||||
- (^|/)ltsugar.m4
|
||||
- (^|/)ltversion.m4
|
||||
- (^|/)lt~obsolete.m4
|
||||
|
||||
# Linters
|
||||
- cpplint.py
|
||||
|
||||
@@ -70,6 +77,7 @@
|
||||
- 3rd[-_]?party/
|
||||
- vendors?/
|
||||
- extern(al)?/
|
||||
- (^|/)[Vv]+endor/
|
||||
|
||||
# Debian packaging
|
||||
- ^debian/
|
||||
@@ -77,6 +85,9 @@
|
||||
# Haxelib projects often contain a neko bytecode file named run.n
|
||||
- run.n$
|
||||
|
||||
# Bootstrap Datepicker
|
||||
- bootstrap-datepicker/
|
||||
|
||||
## Commonly Bundled JavaScript frameworks ##
|
||||
|
||||
# jQuery
|
||||
@@ -87,6 +98,34 @@
|
||||
- (^|/)jquery\-ui(\-\d\.\d+(\.\d+)?)?(\.\w+)?\.(js|css)$
|
||||
- (^|/)jquery\.(ui|effects)\.([^.]*)\.(js|css)$
|
||||
|
||||
# jQuery Gantt
|
||||
- jquery.fn.gantt.js
|
||||
|
||||
# jQuery fancyBox
|
||||
- jquery.fancybox.(js|css)
|
||||
|
||||
# Fuel UX
|
||||
- fuelux.js
|
||||
|
||||
# jQuery File Upload
|
||||
- (^|/)jquery\.fileupload(-\w+)?\.js$
|
||||
|
||||
# Slick
|
||||
- (^|/)slick\.\w+.js$
|
||||
|
||||
# Leaflet plugins
|
||||
- (^|/)Leaflet\.Coordinates-\d+\.\d+\.\d+\.src\.js$
|
||||
- leaflet.draw-src.js
|
||||
- leaflet.draw.css
|
||||
- Control.FullScreen.css
|
||||
- Control.FullScreen.js
|
||||
- leaflet.spin.js
|
||||
- wicket-leaflet.js
|
||||
|
||||
# Sublime Text workspace files
|
||||
- .sublime-project
|
||||
- .sublime-workspace
|
||||
|
||||
# Prototype
|
||||
- (^|/)prototype(.*)\.js$
|
||||
- (^|/)effects\.js$
|
||||
@@ -121,7 +160,7 @@
|
||||
- (^|/)Chart\.js$
|
||||
|
||||
# Codemirror
|
||||
- (^|/)[Cc]ode[Mm]irror/(lib|mode|theme|addon|keymap)
|
||||
- (^|/)[Cc]ode[Mm]irror/(\d+\.\d+/)?(lib|mode|theme|addon|keymap|demo)
|
||||
|
||||
# SyntaxHighlighter - http://alexgorbatchev.com/
|
||||
- (^|/)shBrush([^.]*)\.js$
|
||||
@@ -151,6 +190,7 @@
|
||||
|
||||
# django
|
||||
- (^|/)admin_media/
|
||||
- (^|/)env/
|
||||
|
||||
# Fabric
|
||||
- ^fabfile\.py$
|
||||
@@ -163,6 +203,11 @@
|
||||
|
||||
## Obj-C ##
|
||||
|
||||
# Xcode
|
||||
|
||||
- \.xctemplate/
|
||||
- \.imageset/
|
||||
|
||||
# Carthage
|
||||
- ^Carthage/
|
||||
|
||||
@@ -172,6 +217,17 @@
|
||||
# Sparkle
|
||||
- (^|/)Sparkle/
|
||||
|
||||
# Crashlytics
|
||||
- Crashlytics.framework/
|
||||
|
||||
# Fabric
|
||||
- Fabric.framework/
|
||||
|
||||
# git config files
|
||||
- gitattributes$
|
||||
- gitignore$
|
||||
- gitmodules$
|
||||
|
||||
## Groovy ##
|
||||
|
||||
# Gradle
|
||||
@@ -216,21 +272,9 @@
|
||||
# Html5shiv
|
||||
- (^|/)html5shiv\.js$
|
||||
|
||||
# Samples folders
|
||||
- ^[Ss]amples/
|
||||
|
||||
# LICENSE, README, git config files
|
||||
- ^COPYING$
|
||||
- LICENSE$
|
||||
- License$
|
||||
- gitattributes$
|
||||
- gitignore$
|
||||
- gitmodules$
|
||||
- ^README$
|
||||
- ^readme$
|
||||
|
||||
# Test fixtures
|
||||
- ^[Tt]ests?/fixtures/
|
||||
- ^[Ss]pecs?/fixtures/
|
||||
|
||||
# PhoneGap/Cordova
|
||||
- (^|/)cordova([^.]*)\.js$
|
||||
@@ -266,3 +310,6 @@
|
||||
|
||||
# Android Google APIs
|
||||
- (^|/)\.google_apis/
|
||||
|
||||
# Jenkins Pipeline
|
||||
- ^Jenkinsfile$
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
module Linguist
|
||||
VERSION = "4.5.7"
|
||||
VERSION = "4.8.5"
|
||||
end
|
||||
|
||||
@@ -2,5 +2,6 @@
|
||||
"repository": "https://github.com/github/linguist",
|
||||
"dependencies": {
|
||||
"season": "~>5.0"
|
||||
}
|
||||
},
|
||||
"license": "MIT"
|
||||
}
|
||||
|
||||
58
samples/AMPL/CT2.mod
Normal file
58
samples/AMPL/CT2.mod
Normal file
@@ -0,0 +1,58 @@
|
||||
param num_beams; # number of beams
|
||||
|
||||
param num_rows >= 1, integer; # number of rows
|
||||
param num_cols >= 1, integer; # number of columns
|
||||
|
||||
set BEAMS := 1 .. num_beams; # set of beams
|
||||
|
||||
set ROWS := 1 .. num_rows; # set of rows
|
||||
set COLUMNS := 1 .. num_cols; # set of columns
|
||||
|
||||
# values for entries of each beam
|
||||
param beam_values {BEAMS, ROWS, COLUMNS} >= 0;
|
||||
|
||||
# values of tumor
|
||||
param tumor_values {ROWS, COLUMNS} >= 0;
|
||||
|
||||
# values of critical area
|
||||
param critical_values {ROWS, COLUMNS} >= 0;
|
||||
|
||||
# critical maximum dosage requirement
|
||||
param critical_max;
|
||||
|
||||
# tumor minimum dosage requirement
|
||||
param tumor_min;
|
||||
|
||||
# dosage scalar of each beam
|
||||
var X {i in BEAMS} >= 0;
|
||||
|
||||
|
||||
# define the tumor area which includes the locations where tumor exists
|
||||
set tumor_area := {k in ROWS, h in COLUMNS: tumor_values[k,h] > 0};
|
||||
|
||||
# define critical area
|
||||
set critical_area := {k in ROWS, h in COLUMNS: critical_values[k,h] > 0};
|
||||
|
||||
var S {(k,h) in tumor_area} >= 0;
|
||||
var T {(k,h) in critical_area} >= 0;
|
||||
|
||||
# maximize total dosage in tumor area
|
||||
maximize total_tumor_dosage: sum {i in BEAMS} sum {(k,h) in tumor_area} X[i] * beam_values[i,k,h];
|
||||
|
||||
# minimize total dosage in critical area
|
||||
minimize total_critical_dosage: sum {i in BEAMS} sum {(k,h) in critical_area} X[i] * beam_values[i,k,h];
|
||||
|
||||
# minimize total tumor slack
|
||||
minimize total_tumor_slack: sum {(k,h) in tumor_area} S[k,h];
|
||||
|
||||
# minimize total critical area slack
|
||||
minimize total_critical_slack: sum {(k,h) in critical_area} T[k,h];
|
||||
|
||||
# total dosage at each tumor location [k,h] should be >= min tumor dosage with slack variable
|
||||
subject to tumor_limit {(k,h) in tumor_area} : sum {i in BEAMS} X[i] * beam_values[i,k,h] == tumor_min - S[k,h];
|
||||
|
||||
# total dosage at each critical location [k,h] should be = max critical dosage with slack variable
|
||||
subject to critical_limit {(k,h) in critical_area} : sum {i in BEAMS} X[i] * beam_values[i,k,h] == critical_max + T[k,h];
|
||||
|
||||
|
||||
|
||||
7
samples/APL/hashbang
Executable file
7
samples/APL/hashbang
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/usr/local/bin/apl --script
|
||||
NEWLINE ← ⎕UCS 10
|
||||
HEADERS ← 'Content-Type: text/plain', NEWLINE
|
||||
HEADERS
|
||||
⍝ ⎕←HEADERS
|
||||
⍝ ⍕⎕TS
|
||||
)OFF
|
||||
70
samples/Alpine Abuild/filenames/APKBUILD
Normal file
70
samples/Alpine Abuild/filenames/APKBUILD
Normal file
@@ -0,0 +1,70 @@
|
||||
# Contributor: Natanael Copa <ncopa@alpinelinux.org>
|
||||
# Maintainer: Natanael Copa <ncopa@alpinelinux.org>
|
||||
pkgname=abuild
|
||||
pkgver=2.27.0
|
||||
_ver=${pkgver%_git*}
|
||||
pkgrel=0
|
||||
pkgdesc="Script to build Alpine Packages"
|
||||
url="http://git.alpinelinux.org/cgit/abuild/"
|
||||
arch="all"
|
||||
license="GPL2"
|
||||
depends="fakeroot sudo pax-utils openssl apk-tools>=2.0.7-r1 libc-utils
|
||||
attr tar pkgconf patch"
|
||||
if [ "$CBUILD" = "$CHOST" ]; then
|
||||
depends="$depends curl"
|
||||
fi
|
||||
makedepends_build="pkgconfig"
|
||||
makedepends_host="openssl-dev"
|
||||
makedepends="$makedepends_host $makedepends_build"
|
||||
install="$pkgname.pre-install $pkgname.pre-upgrade"
|
||||
subpackages="apkbuild-cpan:cpan apkbuild-gem-resolver:gems"
|
||||
options="suid"
|
||||
pkggroups="abuild"
|
||||
source="http://dev.alpinelinux.org/archive/abuild/abuild-$_ver.tar.xz
|
||||
"
|
||||
|
||||
_builddir="$srcdir/$pkgname-$_ver"
|
||||
prepare() {
|
||||
cd "$_builddir"
|
||||
for i in $source; do
|
||||
case $i in
|
||||
*.patch)
|
||||
msg "Applying $i"
|
||||
patch -p1 -i "$srcdir"/$i || return 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
sed -i -e "/^CHOST=/s/=.*/=$CHOST/" abuild.conf
|
||||
}
|
||||
|
||||
build() {
|
||||
cd "$_builddir"
|
||||
make || return 1
|
||||
}
|
||||
|
||||
package() {
|
||||
cd "$_builddir"
|
||||
make install DESTDIR="$pkgdir" || return 1
|
||||
install -m 644 abuild.conf "$pkgdir"/etc/abuild.conf || return 1
|
||||
install -d -m 775 -g abuild "$pkgdir"/var/cache/distfiles || return 1
|
||||
}
|
||||
|
||||
cpan() {
|
||||
pkgdesc="Script to generate perl APKBUILD from CPAN"
|
||||
depends="perl perl-libwww perl-json"
|
||||
arch="noarch"
|
||||
mkdir -p "$subpkgdir"/usr/bin
|
||||
mv "$pkgdir"/usr/bin/apkbuild-cpan "$subpkgdir"/usr/bin/
|
||||
}
|
||||
|
||||
gems() {
|
||||
pkgdesc="APKBUILD dependency resolver for RubyGems"
|
||||
depends="ruby ruby-augeas"
|
||||
arch="noarch"
|
||||
mkdir -p "$subpkgdir"/usr/bin
|
||||
mv "$pkgdir"/usr/bin/apkbuild-gem-resolver "$subpkgdir"/usr/bin/
|
||||
}
|
||||
|
||||
md5sums="c67e4c971c54b4d550e16db3ba331f96 abuild-2.27.0.tar.xz"
|
||||
sha256sums="c8db017e3dd168edb20ceeb91971535cf66b8c95f29d3288f88ac755bffc60e5 abuild-2.27.0.tar.xz"
|
||||
sha512sums="98e1da4e47f3ab68700b3bc992c83e103f770f3196e433788ee74145f57cd33e5239c87f0a7a15f7266840d5bad893fc8c0d4c826d663df53deaee2678c56984 abuild-2.27.0.tar.xz"
|
||||
245
samples/Assembly/fp_sqr32_160_comba.inc
Normal file
245
samples/Assembly/fp_sqr32_160_comba.inc
Normal file
@@ -0,0 +1,245 @@
|
||||
|
||||
push r2
|
||||
dint
|
||||
nop
|
||||
bis #MPYDLYWRTEN,&MPY32CTL0
|
||||
bic #MPYDLY32,&MPY32CTL0
|
||||
mov #SUMEXT,r13
|
||||
clr r12
|
||||
|
||||
mov @r15+,r4
|
||||
mov @r15+,r5
|
||||
|
||||
mov @r15+,r6
|
||||
mov @r15+,r7
|
||||
|
||||
mov @r15+,r8
|
||||
mov @r15+,r9
|
||||
|
||||
mov @r15+,r10
|
||||
mov @r15+,r11
|
||||
|
||||
sub #2*8,r15
|
||||
|
||||
/* SELF_STEP_FIRST */
|
||||
mov r4,&MPY32L
|
||||
mov r5,&MPY32H
|
||||
mov r4,&OP2L
|
||||
mov r5,&OP2H
|
||||
|
||||
/* COLUMN_END */
|
||||
mov &RES0,2*0(r14)
|
||||
mov &RES1,2*(0+1)(r14)
|
||||
mov &RES2,&RES0
|
||||
mov &RES3,&RES1
|
||||
mov r12,&RES2
|
||||
clr &RES3
|
||||
|
||||
/* STEP_1 */
|
||||
mov r4,&MAC32L
|
||||
mov r5,&MAC32H
|
||||
mov r6,&OP2L
|
||||
mov r7,&OP2H
|
||||
add &SUMEXT,r12
|
||||
mov r6,&OP2L
|
||||
mov r7,&OP2H
|
||||
|
||||
/* COLUMN_END */
|
||||
mov &RES0,2*2(r14)
|
||||
add @r13,r12
|
||||
mov &RES1,2*(2+1)(r14)
|
||||
mov &RES2,&RES0
|
||||
mov &RES3,&RES1
|
||||
mov r12,&RES2
|
||||
clr &RES3
|
||||
clr r12
|
||||
|
||||
/* STEP_1 */
|
||||
mov r4,&MAC32L
|
||||
mov r5,&MAC32H
|
||||
mov r8,&OP2L
|
||||
mov r9,&OP2H
|
||||
add &SUMEXT,r12
|
||||
mov r8,&OP2L
|
||||
mov r9,&OP2H
|
||||
|
||||
/* SELF_STEP */
|
||||
mov r6,&MAC32L
|
||||
mov r7,&MAC32H
|
||||
add @r13,r12
|
||||
mov r6,&OP2L
|
||||
mov r7,&OP2H
|
||||
|
||||
/* COLUMN_END */
|
||||
mov &RES0,2*4(r14)
|
||||
add @r13,r12
|
||||
mov &RES1,2*(4+1)(r14)
|
||||
mov &RES2,&RES0
|
||||
mov &RES3,&RES1
|
||||
mov r12,&RES2
|
||||
clr &RES3
|
||||
clr r12
|
||||
|
||||
/* STEP_1 */
|
||||
mov r4,&MAC32L
|
||||
mov r5,&MAC32H
|
||||
mov r10,&OP2L
|
||||
mov r11,&OP2H
|
||||
add &SUMEXT,r12
|
||||
mov r10,&OP2L
|
||||
mov r11,&OP2H
|
||||
|
||||
/* STEP_2MORE */
|
||||
mov r6,&MAC32L
|
||||
mov r7,&MAC32H
|
||||
add @r13,r12
|
||||
mov r8,&OP2L
|
||||
mov r9,&OP2H
|
||||
add &SUMEXT,r12
|
||||
mov r8,&OP2L
|
||||
mov r9,&OP2H
|
||||
|
||||
/* COLUMN_END */
|
||||
mov &RES0,2*6(r14)
|
||||
add @r13,r12
|
||||
mov &RES1,2*(6+1)(r14)
|
||||
mov &RES2,&RES0
|
||||
mov &RES3,&RES1
|
||||
mov r12,&RES2
|
||||
clr &RES3
|
||||
clr r12
|
||||
|
||||
/* STEP_1 */
|
||||
mov r4,&MAC32L
|
||||
mov r5,&MAC32H
|
||||
mov 2*8(r15),&OP2L
|
||||
mov 2*9(r15),&OP2H
|
||||
add &SUMEXT,r12
|
||||
mov 2*8(r15),&OP2L
|
||||
mov 2*9(r15),&OP2H
|
||||
|
||||
/* STEP_2MORE */
|
||||
mov r6,&MAC32L
|
||||
mov r7,&MAC32H
|
||||
add @r13,r12
|
||||
mov r10,&OP2L
|
||||
mov r11,&OP2H
|
||||
add &SUMEXT,r12
|
||||
mov r10,&OP2L
|
||||
mov r11,&OP2H
|
||||
|
||||
/* SELF_STEP */
|
||||
mov r8,&MAC32L
|
||||
mov r9,&MAC32H
|
||||
add @r13,r12
|
||||
mov r8,&OP2L
|
||||
mov r9,&OP2H
|
||||
|
||||
/* COLUMN_END */
|
||||
mov &RES0,2*8(r14)
|
||||
add @r13,r12
|
||||
mov &RES1,2*(8+1)(r14)
|
||||
mov &RES2,&RES0
|
||||
mov &RES3,&RES1
|
||||
mov r12,&RES2
|
||||
clr &RES3
|
||||
clr r12
|
||||
|
||||
mov 2*8(r15),r4
|
||||
mov 2*(8+1)(r15),r5
|
||||
|
||||
/* STEP_1 */
|
||||
mov r6,&MAC32L
|
||||
mov r7,&MAC32H
|
||||
mov r4,&OP2L
|
||||
mov r5,&OP2H
|
||||
add &SUMEXT,r12
|
||||
mov r4,&OP2L
|
||||
mov r5,&OP2H
|
||||
|
||||
/* STEP_2MORE */
|
||||
mov r8,&MAC32L
|
||||
mov r9,&MAC32H
|
||||
add @r13,r12
|
||||
mov r10,&OP2L
|
||||
mov r11,&OP2H
|
||||
add &SUMEXT,r12
|
||||
mov r10,&OP2L
|
||||
mov r11,&OP2H
|
||||
|
||||
/* COLUMN_END */
|
||||
mov &RES0,2*10(r14)
|
||||
add @r13,r12
|
||||
mov &RES1,2*(10+1)(r14)
|
||||
mov &RES2,&RES0
|
||||
mov &RES3,&RES1
|
||||
mov r12,&RES2
|
||||
clr &RES3
|
||||
clr r12
|
||||
|
||||
/* STEP_1 */
|
||||
mov r8,&MAC32L
|
||||
mov r9,&MAC32H
|
||||
mov r4,&OP2L
|
||||
mov r5,&OP2H
|
||||
add &SUMEXT,r12
|
||||
mov r4,&OP2L
|
||||
mov r5,&OP2H
|
||||
|
||||
/* SELF_STEP */
|
||||
mov r10,&MAC32L
|
||||
mov r11,&MAC32H
|
||||
add @r13,r12
|
||||
mov r10,&OP2L
|
||||
mov r11,&OP2H
|
||||
|
||||
/* COLUMN_END */
|
||||
mov &RES0,2*12(r14)
|
||||
add @r13,r12
|
||||
mov &RES1,2*(12+1)(r14)
|
||||
mov &RES2,&RES0
|
||||
mov &RES3,&RES1
|
||||
mov r12,&RES2
|
||||
clr &RES3
|
||||
clr r12
|
||||
|
||||
/* STEP_1 */
|
||||
mov r10,&MAC32L
|
||||
mov r11,&MAC32H
|
||||
mov r4,&OP2L
|
||||
mov r5,&OP2H
|
||||
add &SUMEXT,r12
|
||||
mov r4,&OP2L
|
||||
mov r5,&OP2H
|
||||
|
||||
/* COLUMN_END */
|
||||
mov &RES0,2*14(r14)
|
||||
add @r13,r12
|
||||
mov &RES1,2*(14+1)(r14)
|
||||
mov &RES2,&RES0
|
||||
mov &RES3,&RES1
|
||||
mov r12,&RES2
|
||||
clr &RES3
|
||||
clr r12
|
||||
|
||||
/* SELF_STEP_1 */
|
||||
mov r4,&MAC32L
|
||||
mov r5,&MAC32H
|
||||
mov r4,&OP2L
|
||||
mov r5,&OP2H
|
||||
|
||||
/* COLUMN_END */
|
||||
mov &RES0,2*16(r14)
|
||||
add @r13,r12
|
||||
mov &RES1,2*(16+1)(r14)
|
||||
mov &RES2,&RES0
|
||||
mov &RES3,&RES1
|
||||
mov r12,&RES2
|
||||
clr &RES3
|
||||
clr r12
|
||||
|
||||
/* END */
|
||||
mov &RES0,2*18(r14)
|
||||
mov &RES1,2*(18+1)(r14)
|
||||
pop r2
|
||||
eint
|
||||
170
samples/Assembly/lib.inc
Normal file
170
samples/Assembly/lib.inc
Normal file
@@ -0,0 +1,170 @@
|
||||
|
||||
; ------------------------------------------------------------------------
|
||||
; 显示 AL 中的数字
|
||||
; ------------------------------------------------------------------------
|
||||
DispAL:
|
||||
push ecx
|
||||
push edx
|
||||
push edi
|
||||
|
||||
mov edi, [dwDispPos]
|
||||
|
||||
mov ah, 0Fh ; 0000b: 黑底 1111b: 白字
|
||||
mov dl, al
|
||||
shr al, 4
|
||||
mov ecx, 2
|
||||
.begin:
|
||||
and al, 01111b
|
||||
cmp al, 9
|
||||
ja .1
|
||||
add al, '0'
|
||||
jmp .2
|
||||
.1:
|
||||
sub al, 0Ah
|
||||
add al, 'A'
|
||||
.2:
|
||||
mov [gs:edi], ax
|
||||
add edi, 2
|
||||
|
||||
mov al, dl
|
||||
loop .begin
|
||||
;add edi, 2
|
||||
|
||||
mov [dwDispPos], edi
|
||||
|
||||
pop edi
|
||||
pop edx
|
||||
pop ecx
|
||||
|
||||
ret
|
||||
; DispAL 结束-------------------------------------------------------------
|
||||
|
||||
|
||||
; ------------------------------------------------------------------------
|
||||
; 显示一个整形数
|
||||
; ------------------------------------------------------------------------
|
||||
DispInt:
|
||||
mov eax, [esp + 4]
|
||||
shr eax, 24
|
||||
call DispAL
|
||||
|
||||
mov eax, [esp + 4]
|
||||
shr eax, 16
|
||||
call DispAL
|
||||
|
||||
mov eax, [esp + 4]
|
||||
shr eax, 8
|
||||
call DispAL
|
||||
|
||||
mov eax, [esp + 4]
|
||||
call DispAL
|
||||
|
||||
mov ah, 07h ; 0000b: 黑底 0111b: 灰字
|
||||
mov al, 'h'
|
||||
push edi
|
||||
mov edi, [dwDispPos]
|
||||
mov [gs:edi], ax
|
||||
add edi, 4
|
||||
mov [dwDispPos], edi
|
||||
pop edi
|
||||
|
||||
ret
|
||||
; DispInt 结束------------------------------------------------------------
|
||||
|
||||
; ------------------------------------------------------------------------
|
||||
; 显示一个字符串
|
||||
; ------------------------------------------------------------------------
|
||||
DispStr:
|
||||
push ebp
|
||||
mov ebp, esp
|
||||
push ebx
|
||||
push esi
|
||||
push edi
|
||||
|
||||
mov esi, [ebp + 8] ; pszInfo
|
||||
mov edi, [dwDispPos]
|
||||
mov ah, 0Fh
|
||||
.1:
|
||||
lodsb
|
||||
test al, al
|
||||
jz .2
|
||||
cmp al, 0Ah ; 是回车吗?
|
||||
jnz .3
|
||||
push eax
|
||||
mov eax, edi
|
||||
mov bl, 160
|
||||
div bl
|
||||
and eax, 0FFh
|
||||
inc eax
|
||||
mov bl, 160
|
||||
mul bl
|
||||
mov edi, eax
|
||||
pop eax
|
||||
jmp .1
|
||||
.3:
|
||||
mov [gs:edi], ax
|
||||
add edi, 2
|
||||
jmp .1
|
||||
|
||||
.2:
|
||||
mov [dwDispPos], edi
|
||||
|
||||
pop edi
|
||||
pop esi
|
||||
pop ebx
|
||||
pop ebp
|
||||
ret
|
||||
; DispStr 结束------------------------------------------------------------
|
||||
|
||||
; ------------------------------------------------------------------------
|
||||
; 换行
|
||||
; ------------------------------------------------------------------------
|
||||
DispReturn:
|
||||
push szReturn
|
||||
call DispStr ;printf("\n");
|
||||
add esp, 4
|
||||
|
||||
ret
|
||||
; DispReturn 结束---------------------------------------------------------
|
||||
|
||||
|
||||
; ------------------------------------------------------------------------
|
||||
; 内存拷贝,仿 memcpy
|
||||
; ------------------------------------------------------------------------
|
||||
; void* MemCpy(void* es:pDest, void* ds:pSrc, int iSize);
|
||||
; ------------------------------------------------------------------------
|
||||
MemCpy:
|
||||
push ebp
|
||||
mov ebp, esp
|
||||
|
||||
push esi
|
||||
push edi
|
||||
push ecx
|
||||
|
||||
mov edi, [ebp + 8] ; Destination
|
||||
mov esi, [ebp + 12] ; Source
|
||||
mov ecx, [ebp + 16] ; Counter
|
||||
.1:
|
||||
cmp ecx, 0 ; 判断计数器
|
||||
jz .2 ; 计数器为零时跳出
|
||||
|
||||
mov al, [ds:esi] ; ┓
|
||||
inc esi ; ┃
|
||||
; ┣ 逐字节移动
|
||||
mov byte [es:edi], al ; ┃
|
||||
inc edi ; ┛
|
||||
|
||||
dec ecx ; 计数器减一
|
||||
jmp .1 ; 循环
|
||||
.2:
|
||||
mov eax, [ebp + 8] ; 返回值
|
||||
|
||||
pop ecx
|
||||
pop edi
|
||||
pop esi
|
||||
mov esp, ebp
|
||||
pop ebp
|
||||
|
||||
ret ; 函数结束,返回
|
||||
; MemCpy 结束-------------------------------------------------------------
|
||||
|
||||
321
samples/Assembly/macros.inc
Normal file
321
samples/Assembly/macros.inc
Normal file
@@ -0,0 +1,321 @@
|
||||
BLARGG_MACROS_INCLUDED = 1
|
||||
|
||||
; Allows extra error checking with modified version
|
||||
; of ca65. Otherwise acts like a constant of 0.
|
||||
ADDR = 0
|
||||
|
||||
; Switches to Segment and places Line there.
|
||||
; Line can be an .align directive, .res, .byte, etc.
|
||||
; Examples:
|
||||
; seg_data BSS, .align 256
|
||||
; seg_data RODATA, {message: .byte "Test",0}
|
||||
.macro seg_data Segment, Line
|
||||
.pushseg
|
||||
.segment .string(Segment)
|
||||
Line
|
||||
.popseg
|
||||
.endmacro
|
||||
|
||||
; Reserves Size bytes in Segment for Name.
|
||||
; If Size is omitted, reserves one byte.
|
||||
.macro seg_res Segment, Name, Size
|
||||
.ifblank Size
|
||||
seg_data Segment, Name: .res 1
|
||||
.else
|
||||
seg_data Segment, Name: .res Size
|
||||
.endif
|
||||
.endmacro
|
||||
|
||||
; Shortcuts for zeropage, bss, and stack
|
||||
.define zp_res seg_res ZEROPAGE,
|
||||
.define nv_res seg_res NVRAM,
|
||||
.define bss_res seg_res BSS,
|
||||
.define sp_res seg_res STACK,
|
||||
.define zp_byte zp_res
|
||||
|
||||
; Copies byte from Src to Addr. If Src begins with #,
|
||||
; it sets Addr to the immediate value.
|
||||
; Out: A = byte copied
|
||||
; Preserved: X, Y
|
||||
.macro mov Addr, Src
|
||||
lda Src
|
||||
sta Addr
|
||||
.endmacro
|
||||
|
||||
; Copies word from Src to Addr. If Src begins with #,
|
||||
; it sets Addr the immediate value.
|
||||
; Out: A = high byte of word
|
||||
; Preserved: X, Y
|
||||
.macro movw Addr, Src
|
||||
.if .match( .left( 1, {Src} ), # )
|
||||
lda #<(.right( .tcount( {Src} )-1, {Src} ))
|
||||
sta Addr
|
||||
lda #>(.right( .tcount( {Src} )-1, {Src} ))
|
||||
sta 1+(Addr)
|
||||
.else
|
||||
lda Src
|
||||
sta Addr
|
||||
lda 1+(Src)
|
||||
sta 1+(Addr)
|
||||
.endif
|
||||
.endmacro
|
||||
|
||||
; Increments 16-bit value at Addr.
|
||||
; Out: EQ/NE based on resulting 16-bit value
|
||||
; Preserved: A, X, Y
|
||||
.macro incw Addr
|
||||
.local @Skip
|
||||
inc Addr
|
||||
bne @Skip
|
||||
inc 1+(Addr)
|
||||
@Skip:
|
||||
.endmacro
|
||||
|
||||
; Adds Src to word at Addr.
|
||||
; Out: A = high byte of result, carry set appropriately
|
||||
; Preserved: X, Y
|
||||
.macro addw Addr, Src
|
||||
.if .match( .left( 1, {Src} ), # )
|
||||
addw_ Addr,(.right( .tcount( {Src} )-1, {Src} ))
|
||||
.else
|
||||
lda Addr
|
||||
clc
|
||||
adc Src
|
||||
sta Addr
|
||||
|
||||
lda 1+(Addr)
|
||||
adc 1+(Src)
|
||||
sta 1+(Addr)
|
||||
.endif
|
||||
.endmacro
|
||||
.macro addw_ Addr, Imm
|
||||
lda Addr
|
||||
clc
|
||||
adc #<Imm
|
||||
sta Addr
|
||||
|
||||
;.if (Imm >> 8) <> 0
|
||||
lda 1+(Addr)
|
||||
adc #>Imm
|
||||
sta 1+(Addr)
|
||||
;.else
|
||||
; .local @Skip
|
||||
; bcc @Skip
|
||||
; inc 1+(Addr)
|
||||
;@Skip:
|
||||
;.endif
|
||||
.endmacro
|
||||
|
||||
; Splits list of words into tables of low and high bytes
|
||||
; Example: split_words foo, {$1234, $5678}
|
||||
; expands to:
|
||||
; foo_l: $34, $78
|
||||
; foo_h: $12, $56
|
||||
; foo_count = 2
|
||||
.macro split_words Label, Words
|
||||
.ident (.concat (.string(Label), "_l")): .lobytes Words
|
||||
.ident (.concat (.string(Label), "_h")): .hibytes Words
|
||||
.ident (.concat (.string(Label), "_count")) = * - .ident (.concat (.string(Label), "_h"))
|
||||
.endmacro
|
||||
|
||||
.macro SELECT Bool, True, False, Extra
|
||||
.ifndef Bool
|
||||
False Extra
|
||||
.elseif Bool <> 0
|
||||
True Extra
|
||||
.else
|
||||
False Extra
|
||||
.endif
|
||||
.endmacro
|
||||
|
||||
.macro DEFAULT Name, Value
|
||||
.ifndef Name
|
||||
Name = Value
|
||||
.endif
|
||||
.endmacro
|
||||
|
||||
.ifp02
|
||||
; 6502 doesn't define these alternate names
|
||||
.define blt bcc
|
||||
.define bge bcs
|
||||
.endif
|
||||
.define jlt jcc
|
||||
.define jge jcs
|
||||
|
||||
; Jxx Target = Bxx Target, except it can go farther than
|
||||
; 128 bytes. Implemented via branch around a JMP.
|
||||
|
||||
; Don't use ca65's longbranch, because they fail for @labels
|
||||
;.macpack longbranch
|
||||
|
||||
.macro jeq Target
|
||||
bne *+5
|
||||
jmp Target
|
||||
.endmacro
|
||||
|
||||
.macro jne Target
|
||||
beq *+5
|
||||
jmp Target
|
||||
.endmacro
|
||||
|
||||
.macro jmi Target
|
||||
bpl *+5
|
||||
jmp Target
|
||||
.endmacro
|
||||
|
||||
.macro jpl Target
|
||||
bmi *+5
|
||||
jmp Target
|
||||
.endmacro
|
||||
|
||||
.macro jcs Target
|
||||
bcc *+5
|
||||
jmp Target
|
||||
.endmacro
|
||||
|
||||
.macro jcc Target
|
||||
bcs *+5
|
||||
jmp Target
|
||||
.endmacro
|
||||
|
||||
.macro jvs Target
|
||||
bvc *+5
|
||||
jmp Target
|
||||
.endmacro
|
||||
|
||||
.macro jvc Target
|
||||
bvs *+5
|
||||
jmp Target
|
||||
.endmacro
|
||||
|
||||
|
||||
; Passes constant data to routine in addr
|
||||
; Preserved: A, X, Y
|
||||
.macro jsr_with_addr routine,data
|
||||
.local Addr
|
||||
pha
|
||||
lda #<Addr
|
||||
sta addr
|
||||
lda #>Addr
|
||||
sta addr+1
|
||||
pla
|
||||
jsr routine
|
||||
seg_data RODATA,{Addr: data}
|
||||
.endmacro
|
||||
|
||||
; Calls routine multiple times, with A having the
|
||||
; value 'start' the first time, 'start+step' the
|
||||
; second time, up to 'end' for the last time.
|
||||
.macro for_loop routine,start,end,step
|
||||
.local @for_loop
|
||||
lda #start
|
||||
@for_loop:
|
||||
pha
|
||||
jsr routine
|
||||
pla
|
||||
clc
|
||||
adc #step
|
||||
cmp #<((end)+(step))
|
||||
bne @for_loop
|
||||
.endmacro
|
||||
|
||||
; Calls routine n times. The value of A in the routine
|
||||
; counts from 0 to n-1.
|
||||
.macro loop_n_times routine,n
|
||||
for_loop routine,0,n-1,+1
|
||||
.endmacro
|
||||
|
||||
; Same as for_loop, except uses 16-bit value in YX.
|
||||
; -256 <= step <= 255
|
||||
.macro for_loop16 routine,start,end,step
|
||||
.if (step) < -256 || (step) > 255
|
||||
.error "Step must be within -256 to 255"
|
||||
.endif
|
||||
.local @for_loop_skip
|
||||
.local @for_loop
|
||||
ldy #>(start)
|
||||
lda #<(start)
|
||||
@for_loop:
|
||||
tax
|
||||
pha
|
||||
tya
|
||||
pha
|
||||
jsr routine
|
||||
pla
|
||||
tay
|
||||
pla
|
||||
clc
|
||||
adc #step
|
||||
.if (step) > 0
|
||||
bcc @for_loop_skip
|
||||
iny
|
||||
.else
|
||||
bcs @for_loop_skip
|
||||
dey
|
||||
.endif
|
||||
@for_loop_skip:
|
||||
cmp #<((end)+(step))
|
||||
bne @for_loop
|
||||
cpy #>((end)+(step))
|
||||
bne @for_loop
|
||||
.endmacro
|
||||
|
||||
; Stores byte at addr
|
||||
; Preserved: X, Y
|
||||
.macro setb addr, byte
|
||||
lda #byte
|
||||
sta addr
|
||||
.endmacro
|
||||
|
||||
; Stores word at addr
|
||||
; Preserved: X, Y
|
||||
.macro setw addr, word
|
||||
lda #<(word)
|
||||
sta addr
|
||||
lda #>(word)
|
||||
sta addr+1
|
||||
.endmacro
|
||||
|
||||
; Loads XY with 16-bit immediate or value at address
|
||||
.macro ldxy Arg
|
||||
.if .match( .left( 1, {Arg} ), # )
|
||||
ldy #<(.right( .tcount( {Arg} )-1, {Arg} ))
|
||||
ldx #>(.right( .tcount( {Arg} )-1, {Arg} ))
|
||||
.else
|
||||
ldy (Arg)
|
||||
ldx (Arg)+1
|
||||
.endif
|
||||
.endmacro
|
||||
|
||||
; Increments XY as 16-bit register, in CONSTANT time.
|
||||
; Z flag set based on entire result.
|
||||
; Preserved: A
|
||||
; Time: 7 clocks
|
||||
.macro inxy
|
||||
iny ; 2
|
||||
beq *+4 ; 3
|
||||
; -1
|
||||
bne *+3 ; 3
|
||||
; -1
|
||||
inx ; 2
|
||||
.endmacro
|
||||
|
||||
; Negates A and adds it to operand
|
||||
.macro subaf Operand
|
||||
eor #$FF
|
||||
sec
|
||||
adc Operand
|
||||
.endmacro
|
||||
|
||||
; Initializes CPU registers to reasonable values
|
||||
; Preserved: A, Y
|
||||
.macro init_cpu_regs
|
||||
sei
|
||||
cld ; unnecessary on NES, but might help on clone
|
||||
ldx #$FF
|
||||
txs
|
||||
.ifndef BUILD_NSF
|
||||
inx
|
||||
stx PPUCTRL
|
||||
.endif
|
||||
.endmacro
|
||||
86
samples/C#/build.cake
Normal file
86
samples/C#/build.cake
Normal file
@@ -0,0 +1,86 @@
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// ARGUMENTS
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
var target = Argument<string>("target", "Default");
|
||||
var configuration = Argument<string>("configuration", "Release");
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// GLOBAL VARIABLES
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
var solutions = GetFiles("./**/*.sln");
|
||||
var solutionPaths = solutions.Select(solution => solution.GetDirectory());
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// SETUP / TEARDOWN
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
Setup(() =>
|
||||
{
|
||||
// Executed BEFORE the first task.
|
||||
Information("Running tasks...");
|
||||
});
|
||||
|
||||
Teardown(() =>
|
||||
{
|
||||
// Executed AFTER the last task.
|
||||
Information("Finished running tasks.");
|
||||
});
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// TASK DEFINITIONS
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
Task("Clean")
|
||||
.Does(() =>
|
||||
{
|
||||
// Clean solution directories.
|
||||
foreach(var path in solutionPaths)
|
||||
{
|
||||
Information("Cleaning {0}", path);
|
||||
CleanDirectories(path + "/**/bin/" + configuration);
|
||||
CleanDirectories(path + "/**/obj/" + configuration);
|
||||
}
|
||||
});
|
||||
|
||||
Task("Restore")
|
||||
.Does(() =>
|
||||
{
|
||||
// Restore all NuGet packages.
|
||||
foreach(var solution in solutions)
|
||||
{
|
||||
Information("Restoring {0}...", solution);
|
||||
NuGetRestore(solution);
|
||||
}
|
||||
});
|
||||
|
||||
Task("Build")
|
||||
.IsDependentOn("Clean")
|
||||
.IsDependentOn("Restore")
|
||||
.Does(() =>
|
||||
{
|
||||
// Build all solutions.
|
||||
foreach(var solution in solutions)
|
||||
{
|
||||
Information("Building {0}", solution);
|
||||
MSBuild(solution, settings =>
|
||||
settings.SetPlatformTarget(PlatformTarget.MSIL)
|
||||
.WithProperty("TreatWarningsAsErrors","true")
|
||||
.WithTarget("Build")
|
||||
.SetConfiguration(configuration));
|
||||
}
|
||||
});
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// TARGETS
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
Task("Default")
|
||||
.IsDependentOn("Build");
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// EXECUTION
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
RunTarget(target);
|
||||
57
samples/C++/ClasspathVMSystemProperties.inc
Normal file
57
samples/C++/ClasspathVMSystemProperties.inc
Normal file
@@ -0,0 +1,57 @@
|
||||
//===- ClasspathVMSystem/Properties.cpp -----------------------------------===//
|
||||
//===--------------------- GNU classpath gnu/classpath/VMSystemProperties -===//
|
||||
//
|
||||
// The VMKit project
|
||||
//
|
||||
// This file is distributed under the University of Illinois Open Source
|
||||
// License. See LICENSE.TXT for details.
|
||||
//
|
||||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#include <sys/utsname.h>
|
||||
|
||||
#include "types.h"
|
||||
|
||||
#include "Classpath.h"
|
||||
#include "JavaArray.h"
|
||||
#include "JavaClass.h"
|
||||
#include "JavaObject.h"
|
||||
#include "JavaThread.h"
|
||||
#include "JavaUpcalls.h"
|
||||
#include "Jnjvm.h"
|
||||
|
||||
#include "SetProperties.inc"
|
||||
|
||||
using namespace j3;
|
||||
|
||||
extern "C" {
|
||||
|
||||
|
||||
JNIEXPORT void JNICALL Java_gnu_classpath_VMSystemProperties_preInit(
|
||||
#ifdef NATIVE_JNI
|
||||
JNIEnv *env,
|
||||
jclass clazz,
|
||||
#endif
|
||||
JavaObject* prop) {
|
||||
|
||||
llvm_gcroot(prop, 0);
|
||||
|
||||
BEGIN_NATIVE_EXCEPTION(0)
|
||||
|
||||
setProperties(prop);
|
||||
|
||||
END_NATIVE_EXCEPTION
|
||||
}
|
||||
|
||||
extern "C" void Java_gnu_classpath_VMSystemProperties_postInit__Ljava_util_Properties_2(JavaObject* prop) {
|
||||
|
||||
llvm_gcroot(prop, 0);
|
||||
|
||||
BEGIN_NATIVE_EXCEPTION(0)
|
||||
|
||||
setCommandLineProperties(prop);
|
||||
|
||||
END_NATIVE_EXCEPTION
|
||||
}
|
||||
|
||||
}
|
||||
2764
samples/C++/initClasses.inc
Normal file
2764
samples/C++/initClasses.inc
Normal file
File diff suppressed because it is too large
Load Diff
34
samples/C++/instances.inc
Normal file
34
samples/C++/instances.inc
Normal file
@@ -0,0 +1,34 @@
|
||||
#include "QPBO.h"
|
||||
|
||||
#ifdef _MSC_VER
|
||||
#pragma warning(disable: 4661)
|
||||
#endif
|
||||
|
||||
// Instantiations
|
||||
|
||||
template class QPBO<int>;
|
||||
template class QPBO<float>;
|
||||
template class QPBO<double>;
|
||||
|
||||
template <>
|
||||
inline void QPBO<int>::get_type_information(char*& type_name, char*& type_format)
|
||||
{
|
||||
type_name = "int";
|
||||
type_format = "d";
|
||||
}
|
||||
|
||||
template <>
|
||||
inline void QPBO<float>::get_type_information(char*& type_name, char*& type_format)
|
||||
{
|
||||
type_name = "float";
|
||||
type_format = "f";
|
||||
}
|
||||
|
||||
template <>
|
||||
inline void QPBO<double>::get_type_information(char*& type_name, char*& type_format)
|
||||
{
|
||||
type_name = "double";
|
||||
type_format = "Lf";
|
||||
}
|
||||
|
||||
|
||||
397
samples/C/main.c
Normal file
397
samples/C/main.c
Normal file
@@ -0,0 +1,397 @@
|
||||
// Copyright 2014 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#include "src/runtime/runtime-utils.h"
|
||||
|
||||
#include "src/arguments.h"
|
||||
#include "src/compiler.h"
|
||||
#include "src/deoptimizer.h"
|
||||
#include "src/frames-inl.h"
|
||||
#include "src/full-codegen/full-codegen.h"
|
||||
#include "src/isolate-inl.h"
|
||||
#include "src/messages.h"
|
||||
#include "src/v8threads.h"
|
||||
#include "src/vm-state-inl.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
RUNTIME_FUNCTION(Runtime_CompileLazy) {
|
||||
HandleScope scope(isolate);
|
||||
DCHECK_EQ(1, args.length());
|
||||
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_trace_lazy && !function->shared()->is_compiled()) {
|
||||
PrintF("[unoptimized: ");
|
||||
function->PrintName();
|
||||
PrintF("]\n");
|
||||
}
|
||||
#endif
|
||||
|
||||
StackLimitCheck check(isolate);
|
||||
if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
|
||||
if (!Compiler::Compile(function, Compiler::KEEP_EXCEPTION)) {
|
||||
return isolate->heap()->exception();
|
||||
}
|
||||
DCHECK(function->is_compiled());
|
||||
return function->code();
|
||||
}
|
||||
|
||||
RUNTIME_FUNCTION(Runtime_CompileBaseline) {
|
||||
HandleScope scope(isolate);
|
||||
DCHECK_EQ(1, args.length());
|
||||
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
|
||||
StackLimitCheck check(isolate);
|
||||
if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
|
||||
if (!Compiler::CompileBaseline(function)) {
|
||||
return isolate->heap()->exception();
|
||||
}
|
||||
DCHECK(function->is_compiled());
|
||||
return function->code();
|
||||
}
|
||||
|
||||
RUNTIME_FUNCTION(Runtime_CompileOptimized_Concurrent) {
|
||||
HandleScope scope(isolate);
|
||||
DCHECK_EQ(1, args.length());
|
||||
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
|
||||
StackLimitCheck check(isolate);
|
||||
if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
|
||||
if (!Compiler::CompileOptimized(function, Compiler::CONCURRENT)) {
|
||||
return isolate->heap()->exception();
|
||||
}
|
||||
DCHECK(function->is_compiled());
|
||||
return function->code();
|
||||
}
|
||||
|
||||
|
||||
RUNTIME_FUNCTION(Runtime_CompileOptimized_NotConcurrent) {
|
||||
HandleScope scope(isolate);
|
||||
DCHECK_EQ(1, args.length());
|
||||
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
|
||||
StackLimitCheck check(isolate);
|
||||
if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
|
||||
if (!Compiler::CompileOptimized(function, Compiler::NOT_CONCURRENT)) {
|
||||
return isolate->heap()->exception();
|
||||
}
|
||||
DCHECK(function->is_compiled());
|
||||
return function->code();
|
||||
}
|
||||
|
||||
|
||||
RUNTIME_FUNCTION(Runtime_NotifyStubFailure) {
|
||||
HandleScope scope(isolate);
|
||||
DCHECK(args.length() == 0);
|
||||
Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
|
||||
DCHECK(AllowHeapAllocation::IsAllowed());
|
||||
delete deoptimizer;
|
||||
return isolate->heap()->undefined_value();
|
||||
}
|
||||
|
||||
|
||||
class ActivationsFinder : public ThreadVisitor {
|
||||
public:
|
||||
Code* code_;
|
||||
bool has_code_activations_;
|
||||
|
||||
explicit ActivationsFinder(Code* code)
|
||||
: code_(code), has_code_activations_(false) {}
|
||||
|
||||
void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
|
||||
JavaScriptFrameIterator it(isolate, top);
|
||||
VisitFrames(&it);
|
||||
}
|
||||
|
||||
void VisitFrames(JavaScriptFrameIterator* it) {
|
||||
for (; !it->done(); it->Advance()) {
|
||||
JavaScriptFrame* frame = it->frame();
|
||||
if (code_->contains(frame->pc())) has_code_activations_ = true;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
RUNTIME_FUNCTION(Runtime_NotifyDeoptimized) {
|
||||
HandleScope scope(isolate);
|
||||
DCHECK(args.length() == 1);
|
||||
CONVERT_SMI_ARG_CHECKED(type_arg, 0);
|
||||
Deoptimizer::BailoutType type =
|
||||
static_cast<Deoptimizer::BailoutType>(type_arg);
|
||||
Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
|
||||
DCHECK(AllowHeapAllocation::IsAllowed());
|
||||
TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
|
||||
TRACE_EVENT0("v8", "V8.DeoptimizeCode");
|
||||
|
||||
Handle<JSFunction> function = deoptimizer->function();
|
||||
Handle<Code> optimized_code = deoptimizer->compiled_code();
|
||||
|
||||
DCHECK(optimized_code->kind() == Code::OPTIMIZED_FUNCTION);
|
||||
DCHECK(type == deoptimizer->bailout_type());
|
||||
|
||||
// Make sure to materialize objects before causing any allocation.
|
||||
JavaScriptFrameIterator it(isolate);
|
||||
deoptimizer->MaterializeHeapObjects(&it);
|
||||
delete deoptimizer;
|
||||
|
||||
// Ensure the context register is updated for materialized objects.
|
||||
JavaScriptFrameIterator top_it(isolate);
|
||||
JavaScriptFrame* top_frame = top_it.frame();
|
||||
isolate->set_context(Context::cast(top_frame->context()));
|
||||
|
||||
if (type == Deoptimizer::LAZY) {
|
||||
return isolate->heap()->undefined_value();
|
||||
}
|
||||
|
||||
// Search for other activations of the same optimized code.
|
||||
// At this point {it} is at the topmost frame of all the frames materialized
|
||||
// by the deoptimizer. Note that this frame does not necessarily represent
|
||||
// an activation of {function} because of potential inlined tail-calls.
|
||||
ActivationsFinder activations_finder(*optimized_code);
|
||||
activations_finder.VisitFrames(&it);
|
||||
isolate->thread_manager()->IterateArchivedThreads(&activations_finder);
|
||||
|
||||
if (!activations_finder.has_code_activations_) {
|
||||
if (function->code() == *optimized_code) {
|
||||
if (FLAG_trace_deopt) {
|
||||
PrintF("[removing optimized code for: ");
|
||||
function->PrintName();
|
||||
PrintF("]\n");
|
||||
}
|
||||
function->ReplaceCode(function->shared()->code());
|
||||
}
|
||||
// Evict optimized code for this function from the cache so that it
|
||||
// doesn't get used for new closures.
|
||||
function->shared()->EvictFromOptimizedCodeMap(*optimized_code, "notify deoptimized");
|
||||
} else {
|
||||
// TODO(titzer): we should probably do DeoptimizeCodeList(code)
|
||||
// unconditionally if the code is not already marked for deoptimization.
|
||||
// If there is an index by shared function info, all the better.
|
||||
Deoptimizer::DeoptimizeFunction(*function);
|
||||
}
|
||||
|
||||
return isolate->heap()->undefined_value();
|
||||
}
|
||||
|
||||
|
||||
static bool IsSuitableForOnStackReplacement(
|
||||
Isolate* isolate,
|
||||
Handle<JSFunction> function
|
||||
) {
|
||||
// Keep track of whether we've succeeded in optimizing.
|
||||
if (function->shared()->optimization_disabled()) return false;
|
||||
// If we are trying to do OSR when there are already optimized
|
||||
// activations of the function, it means (a) the function is directly or
|
||||
// indirectly recursive and (b) an optimized invocation has been
|
||||
// deoptimized so that we are currently in an unoptimized activation.
|
||||
// Check for optimized activations of this function.
|
||||
for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
|
||||
JavaScriptFrame* frame = it.frame();
|
||||
if (frame->is_optimized() && frame->function() == *function) return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement) {
|
||||
HandleScope scope(isolate);
|
||||
DCHECK(args.length() == 1);
|
||||
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
|
||||
Handle<Code> caller_code(function->shared()->code());
|
||||
|
||||
// We're not prepared to handle a function with arguments object.
|
||||
DCHECK(!function->shared()->uses_arguments());
|
||||
|
||||
RUNTIME_ASSERT(FLAG_use_osr);
|
||||
|
||||
// Passing the PC in the javascript frame from the caller directly is
|
||||
// not GC safe, so we walk the stack to get it.
|
||||
JavaScriptFrameIterator it(isolate);
|
||||
JavaScriptFrame* frame = it.frame();
|
||||
if (!caller_code->contains(frame->pc())) {
|
||||
// Code on the stack may not be the code object referenced by the shared
|
||||
// function info. It may have been replaced to include deoptimization data.
|
||||
caller_code = Handle<Code>(frame->LookupCode());
|
||||
}
|
||||
|
||||
uint32_t pc_offset =
|
||||
static_cast<uint32_t>(frame->pc() - caller_code->instruction_start());
|
||||
|
||||
#ifdef DEBUG
|
||||
DCHECK_EQ(frame->function(), *function);
|
||||
DCHECK_EQ(frame->LookupCode(), *caller_code);
|
||||
DCHECK(caller_code->contains(frame->pc()));
|
||||
#endif // DEBUG
|
||||
|
||||
BailoutId ast_id = caller_code->TranslatePcOffsetToAstId(pc_offset);
|
||||
DCHECK(!ast_id.IsNone());
|
||||
|
||||
MaybeHandle<Code> maybe_result;
|
||||
if (IsSuitableForOnStackReplacement(isolate, function)) {
|
||||
if (FLAG_trace_osr) {
|
||||
PrintF("[OSR - Compiling: ");
|
||||
function->PrintName();
|
||||
PrintF(" at -*- scheme -*- %d]\n", ast_id.ToInt());
|
||||
}
|
||||
maybe_result = Compiler::GetOptimizedCodeForOSR(function, ast_id, frame);
|
||||
}
|
||||
|
||||
// Revert the patched back edge table, regardless of whether OSR succeeds.
|
||||
BackEdgeTable::Revert(isolate, *caller_code);
|
||||
|
||||
// Check whether we ended up with usable optimized code.
|
||||
Handle<Code> result;
|
||||
if (maybe_result.ToHandle(&result)
|
||||
&& result->kind() == Code::OPTIMIZED_FUNCTION) {
|
||||
DeoptimizationInputData* data =
|
||||
DeoptimizationInputData::cast(result->deoptimization_data());
|
||||
|
||||
if (data->OsrPcOffset()->value() >= 0) {
|
||||
DCHECK(BailoutId(data->OsrAstId()->value()) == ast_id);
|
||||
if (FLAG_trace_osr) {
|
||||
PrintF("[OSR - Entry at AST id %d, offset %d in optimized code]\n",
|
||||
ast_id.ToInt(), data->OsrPcOffset()->value());
|
||||
}
|
||||
// TODO(titzer): this is a massive hack to make the deopt counts
|
||||
// match. Fix heuristics for reenabling optimizations!
|
||||
function->shared()->increment_deopt_count();
|
||||
|
||||
if (result->is_turbofanned()) {
|
||||
// TurboFanned OSR code cannot be installed into the function.
|
||||
// But the function is obviously hot, so optimize it next time.
|
||||
function->ReplaceCode(
|
||||
isolate->builtins()->builtin(Builtins::kCompileOptimized));
|
||||
} else {
|
||||
// Crankshafted OSR code can be installed into the function.
|
||||
function->ReplaceCode(*result);
|
||||
}
|
||||
return *result;
|
||||
}
|
||||
}
|
||||
|
||||
// Failed.
|
||||
if (FLAG_trace_osr) {
|
||||
PrintF("[OSR - Failed: ");
|
||||
function->PrintName();
|
||||
PrintF(" at AST id %d]\n", ast_id.ToInt());
|
||||
}
|
||||
|
||||
if (!function->IsOptimized()) {
|
||||
function->ReplaceCode(function->shared()->code());
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
||||
RUNTIME_FUNCTION(Runtime_TryInstallOptimizedCode) {
|
||||
HandleScope scope(isolate);
|
||||
DCHECK(args.length() == 1);
|
||||
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
|
||||
|
||||
// First check if this is a real stack overflow.
|
||||
StackLimitCheck check(isolate);
|
||||
if (check.JsHasOverflowed()) {
|
||||
SealHandleScope shs(isolate);
|
||||
return isolate->StackOverflow();
|
||||
}
|
||||
|
||||
isolate->optimizing_compile_dispatcher()->InstallOptimizedFunctions();
|
||||
return (function->IsOptimized())
|
||||
? function->code()
|
||||
: function->shared()->code();
|
||||
}
|
||||
|
||||
|
||||
bool CodeGenerationFromStringsAllowed(
|
||||
Isolate* isolate,
|
||||
Handle<Context> context
|
||||
){
|
||||
DCHECK(context->allow_code_gen_from_strings()->IsFalse());
|
||||
// Check with callback if set.
|
||||
AllowCodeGenerationFromStringsCallback callback =
|
||||
isolate->allow_code_gen_callback();
|
||||
if (callback == NULL) {
|
||||
// No callback set and code generation disallowed.
|
||||
return false;
|
||||
} else {
|
||||
// Callback set. Let it decide if code generation is allowed.
|
||||
VMState<EXTERNAL> state(isolate);
|
||||
return callback(v8::Utils::ToLocal(context));
|
||||
}
|
||||
}
|
||||
|
||||
static Object* CompileGlobalEval(
|
||||
Isolate* isolate,
|
||||
Handle<String> source,
|
||||
Handle<SharedFunctionInfo> outer_info,
|
||||
LanguageMode language_mode,
|
||||
int eval_scope_position,
|
||||
int eval_position
|
||||
){
|
||||
Handle<Context> context = Handle<Context>(isolate->context());
|
||||
Handle<Context> native_context = Handle<Context>(context->native_context());
|
||||
|
||||
// Check if native context allows code generation from
|
||||
// strings. Throw an exception if it doesn't.
|
||||
if (native_context->allow_code_gen_from_strings()->IsFalse() &&
|
||||
!CodeGenerationFromStringsAllowed(isolate, native_context)) {
|
||||
Handle<Object> error_message =
|
||||
native_context->ErrorMessageForCodeGenerationFromStrings();
|
||||
Handle<Object> error;
|
||||
MaybeHandle<Object> maybe_error = isolate->factory()->NewEvalError(
|
||||
MessageTemplate::kCodeGenFromStrings, error_message);
|
||||
if (maybe_error.ToHandle(&error)) isolate->Throw(*error);
|
||||
return isolate->heap()->exception();
|
||||
}
|
||||
|
||||
// Deal with a normal eval call with a string argument. Compile it
|
||||
// and return the compiled function bound in the local context.
|
||||
static const ParseRestriction restriction = NO_PARSE_RESTRICTION;
|
||||
Handle<JSFunction> compiled;
|
||||
ASSIGN_RETURN_ON_EXCEPTION_VALUE(
|
||||
isolate, compiled,
|
||||
Compiler::GetFunctionFromEval(
|
||||
source, outer_info, context, language_mode,
|
||||
restriction, eval_scope_position, eval_position
|
||||
),
|
||||
isolate->heap()->exception()
|
||||
);
|
||||
return *compiled;
|
||||
}
|
||||
|
||||
|
||||
RUNTIME_FUNCTION(Runtime_ResolvePossiblyDirectEval) {
|
||||
HandleScope scope(isolate);
|
||||
DCHECK(args.length() == 6);
|
||||
|
||||
Handle<Object> callee = args.at<Object>(0);
|
||||
|
||||
// If "eval" didn't refer to the original GlobalEval, it's not a
|
||||
// direct call to eval.
|
||||
// (And even if it is, but the first argument isn't a string, just let
|
||||
// execution default to an indirect call to eval, which will also return
|
||||
// the first argument without doing anything).
|
||||
if (*callee != isolate->native_context()->global_eval_fun() || !args[1]->IsString()) {
|
||||
return *callee;
|
||||
}
|
||||
|
||||
DCHECK(args[3]->IsSmi());
|
||||
DCHECK(is_valid_language_mode(args.smi_at(3)));
|
||||
LanguageMode language_mode = static_cast<LanguageMode>(args.smi_at(3));
|
||||
DCHECK(args[4]->IsSmi());
|
||||
Handle<SharedFunctionInfo> outer_info(args.at<JSFunction>(2)->shared(), isolate);
|
||||
return CompileGlobalEval(
|
||||
isolate,
|
||||
args.at<String>(1),
|
||||
outer_info,
|
||||
language_mode,
|
||||
args.smi_at(4),
|
||||
args.smi_at(5)
|
||||
);
|
||||
}
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
/* vim: set shiftwidth=4 softtabstop=0 cindent cinoptions={1s: */
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
Year,Make,Model,Length
|
||||
1997,Ford,E350,2.34
|
||||
2000,Mercury,Cougar,2.38
|
||||
2000,Mercury,Cougar,2.38
|
||||
|
6
samples/Charity/example.ch
Normal file
6
samples/Charity/example.ch
Normal file
@@ -0,0 +1,6 @@
|
||||
%
|
||||
% Some very badly written Charity
|
||||
%
|
||||
|
||||
data LA(A) -> D = ss: A -> D
|
||||
| ff: -> D.
|
||||
133
samples/Click/sr2.click
Normal file
133
samples/Click/sr2.click
Normal file
@@ -0,0 +1,133 @@
|
||||
rates :: AvailableRates
|
||||
elementclass sr2 {
|
||||
$sr2_ip, $sr2_nm, $wireless_mac, $gateway, $probes|
|
||||
|
||||
|
||||
arp :: ARPTable();
|
||||
lt :: LinkTable(IP $sr2_ip);
|
||||
|
||||
|
||||
gw :: SR2GatewaySelector(ETHTYPE 0x062c,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
LT lt,
|
||||
ARP arp,
|
||||
PERIOD 15,
|
||||
GW $gateway);
|
||||
|
||||
|
||||
gw -> SR2SetChecksum -> [0] output;
|
||||
|
||||
set_gw :: SR2SetGateway(SEL gw);
|
||||
|
||||
|
||||
es :: SR2ETTStat(ETHTYPE 0x0641,
|
||||
ETH $wireless_mac,
|
||||
IP $sr2_ip,
|
||||
PERIOD 30000,
|
||||
TAU 300000,
|
||||
ARP arp,
|
||||
PROBES $probes,
|
||||
ETT metric,
|
||||
RT rates);
|
||||
|
||||
|
||||
metric :: SR2ETTMetric(LT lt);
|
||||
|
||||
|
||||
forwarder :: SR2Forwarder(ETHTYPE 0x0643,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
ARP arp,
|
||||
LT lt);
|
||||
|
||||
|
||||
querier :: SR2Querier(ETH $wireless_mac,
|
||||
SR forwarder,
|
||||
LT lt,
|
||||
ROUTE_DAMPENING true,
|
||||
TIME_BEFORE_SWITCH 5,
|
||||
DEBUG true);
|
||||
|
||||
|
||||
query_forwarder :: SR2MetricFlood(ETHTYPE 0x0644,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
LT lt,
|
||||
ARP arp,
|
||||
DEBUG false);
|
||||
|
||||
query_responder :: SR2QueryResponder(ETHTYPE 0x0645,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
LT lt,
|
||||
ARP arp,
|
||||
DEBUG true);
|
||||
|
||||
|
||||
query_responder -> SR2SetChecksum -> [0] output;
|
||||
query_forwarder -> SR2SetChecksum -> SR2Print(forwarding) -> [0] output;
|
||||
query_forwarder [1] -> query_responder;
|
||||
|
||||
data_ck :: SR2SetChecksum()
|
||||
|
||||
input [1]
|
||||
-> host_cl :: IPClassifier(dst net $sr2_ip mask $sr2_nm,
|
||||
-)
|
||||
-> querier
|
||||
-> data_ck;
|
||||
|
||||
|
||||
host_cl [1] -> [0] set_gw [0] -> querier;
|
||||
|
||||
forwarder[0]
|
||||
-> dt ::DecIPTTL
|
||||
-> data_ck
|
||||
-> [2] output;
|
||||
|
||||
|
||||
dt[1]
|
||||
-> Print(ttl-error)
|
||||
-> ICMPError($sr2_ip, timeexceeded, 0)
|
||||
-> querier;
|
||||
|
||||
|
||||
// queries
|
||||
querier [1] -> [1] query_forwarder;
|
||||
es -> SetTimestamp() -> [1] output;
|
||||
|
||||
|
||||
forwarder[1] //ip packets to me
|
||||
-> SR2StripHeader()
|
||||
-> CheckIPHeader()
|
||||
-> from_gw_cl :: IPClassifier(src net $sr2_ip mask $sr2_nm,
|
||||
-)
|
||||
-> [3] output;
|
||||
|
||||
from_gw_cl [1] -> [1] set_gw [1] -> [3] output;
|
||||
|
||||
input [0]
|
||||
-> ncl :: Classifier(
|
||||
12/0643 , //sr2_forwarder
|
||||
12/0644 , //sr2
|
||||
12/0645 , //replies
|
||||
12/0641 , //sr2_es
|
||||
12/062c , //sr2_gw
|
||||
);
|
||||
|
||||
|
||||
ncl[0] -> SR2CheckHeader() -> [0] forwarder;
|
||||
ncl[1] -> SR2CheckHeader() -> PrintSR(query) -> query_forwarder
|
||||
ncl[2] -> SR2CheckHeader() -> query_responder;
|
||||
ncl[3] -> es;
|
||||
ncl[4] -> SR2CheckHeader() -> gw;
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
Idle -> s :: sr2(2.0.0.1, 255.0.0.0, 00:00:00:00:00:01, false, "12 60 12 1500") -> Discard;
|
||||
Idle -> [1] s;
|
||||
s[1] -> Discard;
|
||||
s[2] -> Discard;
|
||||
s[3] -> Discard;
|
||||
142
samples/Click/thomer-nat.click
Normal file
142
samples/Click/thomer-nat.click
Normal file
@@ -0,0 +1,142 @@
|
||||
// This Click configuration implements a firewall and NAT, roughly based on the
|
||||
// mazu-nat.click example.
|
||||
//
|
||||
// This example assumes there is one interface that is IP-aliased. In this
|
||||
// example, eth0 and eth0:0 have IP addresses 66.68.65.90 and 192.168.1.1,
|
||||
// respectively. There is a local network, 192.168.1.0/24, and an upstream
|
||||
// gateway, 66.58.65.89. Traffic from the local network is NATed.
|
||||
//
|
||||
// Connections can be initiated from the NAT box itself, also.
|
||||
//
|
||||
// For bugs, suggestions, and, corrections, please email me.
|
||||
//
|
||||
// Author: Thomer M. Gil (click@thomer.com)
|
||||
|
||||
AddressInfo(
|
||||
eth0-in 192.168.1.1 192.168.1.0/24 00:0d:87:9d:1c:e9,
|
||||
eth0-ex 66.58.65.90 00:0d:87:9d:1c:e9,
|
||||
gw-addr 66.58.65.89 00:20:6f:14:54:c2
|
||||
);
|
||||
|
||||
|
||||
elementclass SniffGatewayDevice {
|
||||
$device |
|
||||
from :: FromDevice($device)
|
||||
-> t1 :: Tee
|
||||
-> output;
|
||||
input -> q :: Queue(1024)
|
||||
-> t2 :: PullTee
|
||||
-> to :: ToDevice($device);
|
||||
t1[1] -> ToHostSniffers;
|
||||
t2[1] -> ToHostSniffers($device);
|
||||
ScheduleInfo(from .1, to 1);
|
||||
}
|
||||
|
||||
|
||||
device :: SniffGatewayDevice(eth0);
|
||||
arpq_in :: ARPQuerier(eth0-in) -> device;
|
||||
ip_to_extern :: GetIPAddress(16)
|
||||
-> CheckIPHeader
|
||||
-> EtherEncap(0x800, eth0-ex, gw-addr)
|
||||
-> device;
|
||||
ip_to_host :: EtherEncap(0x800, gw-addr, eth0-ex)
|
||||
-> ToHost;
|
||||
ip_to_intern :: GetIPAddress(16)
|
||||
-> CheckIPHeader
|
||||
-> arpq_in;
|
||||
|
||||
|
||||
arp_class :: Classifier(
|
||||
12/0806 20/0001, // [0] ARP requests
|
||||
12/0806 20/0002, // [1] ARP replies to host
|
||||
12/0800); // [2] IP packets
|
||||
|
||||
device -> arp_class;
|
||||
|
||||
// ARP crap
|
||||
arp_class[0] -> ARPResponder(eth0-in, eth0-ex) -> device;
|
||||
arp_class[1] -> arp_t :: Tee;
|
||||
arp_t[0] -> ToHost;
|
||||
arp_t[1] -> [1]arpq_in;
|
||||
|
||||
|
||||
// IP packets
|
||||
arp_class[2] -> Strip(14)
|
||||
-> CheckIPHeader
|
||||
-> ipclass :: IPClassifier(dst host eth0-ex,
|
||||
dst host eth0-in,
|
||||
src net eth0-in);
|
||||
|
||||
// Define pattern NAT
|
||||
iprw :: IPRewriterPatterns(NAT eth0-ex 50000-65535 - -);
|
||||
|
||||
// Rewriting rules for UDP/TCP packets
|
||||
// output[0] rewritten to go into the wild
|
||||
// output[1] rewritten to come back from the wild or no match
|
||||
rw :: IPRewriter(pattern NAT 0 1,
|
||||
pass 1);
|
||||
|
||||
// Rewriting rules for ICMP packets
|
||||
irw :: ICMPPingRewriter(eth0-ex, -);
|
||||
irw[0] -> ip_to_extern;
|
||||
irw[1] -> icmp_me_or_intern :: IPClassifier(dst host eth0-ex, -);
|
||||
icmp_me_or_intern[0] -> ip_to_host;
|
||||
icmp_me_or_intern[1] -> ip_to_intern;
|
||||
|
||||
// Rewriting rules for ICMP error packets
|
||||
ierw :: ICMPRewriter(rw irw);
|
||||
ierw[0] -> icmp_me_or_intern;
|
||||
ierw[1] -> icmp_me_or_intern;
|
||||
|
||||
|
||||
// Packets directed at eth0-ex.
|
||||
// Send it through IPRewriter(pass). If there was a mapping, it will be
|
||||
// rewritten such that dst is eth0-in:net, otherwise dst will still be for
|
||||
// eth0-ex.
|
||||
ipclass[0] -> [1]rw;
|
||||
|
||||
// packets that were rewritten, heading into the wild world.
|
||||
rw[0] -> ip_to_extern;
|
||||
|
||||
// packets that come back from the wild or are not part of an established
|
||||
// connection.
|
||||
rw[1] -> established_class :: IPClassifier(dst host eth0-ex,
|
||||
dst net eth0-in);
|
||||
|
||||
// not established yet or returning packets for a connection that was
|
||||
// established from this host itself.
|
||||
established_class[0] ->
|
||||
firewall :: IPClassifier(dst tcp port ssh,
|
||||
dst tcp port smtp,
|
||||
dst tcp port domain,
|
||||
dst udp port domain,
|
||||
icmp type echo-reply,
|
||||
proto icmp,
|
||||
port > 4095,
|
||||
-);
|
||||
|
||||
firewall[0] -> ip_to_host; // ssh
|
||||
firewall[1] -> ip_to_host; // smtp
|
||||
firewall[2] -> ip_to_host; // domain (t)
|
||||
firewall[3] -> ip_to_host; // domain (u)
|
||||
firewall[4] -> [0]irw; // icmp reply
|
||||
firewall[5] -> [0]ierw; // other icmp
|
||||
firewall[6] -> ip_to_host; // port > 4095, probably for connection
|
||||
// originating from host itself
|
||||
firewall[7] -> Discard; // don't allow incoming for port <= 4095
|
||||
|
||||
// established connection
|
||||
established_class[1] -> ip_to_intern;
|
||||
|
||||
// To eth0-in. Only accept from inside network.
|
||||
ipclass[1] -> IPClassifier(src net eth0-in) -> ip_to_host;
|
||||
|
||||
// Packets from eth0-in:net either stay on local network or go to the wild.
|
||||
// Those that go into the wild need to go through the appropriate rewriting
|
||||
// element. (Either UDP/TCP rewriter or ICMP rewriter.)
|
||||
ipclass[2] -> inter_class :: IPClassifier(dst net eth0-in, -);
|
||||
inter_class[0] -> ip_to_intern;
|
||||
inter_class[1] -> ip_udp_class :: IPClassifier(tcp or udp,
|
||||
icmp type echo);
|
||||
ip_udp_class[0] -> [0]rw;
|
||||
ip_udp_class[1] -> [0]irw;
|
||||
17
samples/CoffeeScript/build.cake
Normal file
17
samples/CoffeeScript/build.cake
Normal file
@@ -0,0 +1,17 @@
|
||||
fs = require 'fs'
|
||||
|
||||
{print} = require 'sys'
|
||||
{spawn} = require 'child_process'
|
||||
|
||||
build = (callback) ->
|
||||
coffee = spawn 'coffee', ['-c', '-o', '.', '.']
|
||||
coffee.stderr.on 'data', (data) ->
|
||||
process.stderr.write data.toString()
|
||||
coffee.stdout.on 'data', (data) ->
|
||||
print data.toString()
|
||||
coffee.on 'exit', (code) ->
|
||||
callback?() if code is 0
|
||||
|
||||
task 'build', 'Build from source', ->
|
||||
build()
|
||||
|
||||
2
samples/Common Lisp/config.sexp
Normal file
2
samples/Common Lisp/config.sexp
Normal file
@@ -0,0 +1,2 @@
|
||||
((exe_name hello)
|
||||
(link_order (world hello)))
|
||||
103
samples/Common Lisp/rss.sexp
Normal file
103
samples/Common Lisp/rss.sexp
Normal file
@@ -0,0 +1,103 @@
|
||||
|
||||
(:TURTLE
|
||||
|
||||
(:@PREFIX "rdf:" "<http://www.w3.org/1999/02/22-rdf-syntax-ns#>")
|
||||
(:@PREFIX "owl:" "<http://www.w3.org/2002/07/owl#>")
|
||||
(:@PREFIX "dc:" "<http://purl.org/dc/elements/1.1/>")
|
||||
(:@PREFIX "xsd:" "<http://www.w3.org/2001/XMLSchema#>")
|
||||
(:@PREFIX "rdfs:" "<http://www.w3.org/2000/01/rdf-schema#>")
|
||||
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/channel>")
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:URIREF #1="<http://www.w3.org/1999/02/22-rdf-syntax-ns#type>")
|
||||
(:OBJECTS
|
||||
(:QNAME "rdfs:Class")))
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:QNAME "rdfs:comment")
|
||||
(:OBJECTS
|
||||
(:STRING "An RSS information channel.")))
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS
|
||||
(:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:QNAME "rdfs:label")
|
||||
(:OBJECTS
|
||||
(:STRING "Channel"))))
|
||||
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/description>")
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:URIREF #1#)
|
||||
(:OBJECTS
|
||||
(:QNAME "rdf:Property")))
|
||||
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS (:STRING "A short text description of the subject.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Description")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:subPropertyOf") (:OBJECTS (:QNAME "dc:description"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/image>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdfs:Class")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment") (:OBJECTS (:STRING "An RSS image.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Image"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/item>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdfs:Class")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment") (:OBJECTS (:STRING "An RSS item.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Item"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/items>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS
|
||||
(:STRING "Points to a list of rss:item elements that are members of the subject channel.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Items"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/link>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS (:STRING "The URL to which an HTML rendering of the subject will link.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Link")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:subPropertyOf") (:OBJECTS (:QNAME "dc:identifier"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/name>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS (:STRING "The text input field's (variable) name.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Name"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/textinput>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdfs:Class")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment") (:OBJECTS (:STRING "An RSS text input.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Text Input"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/title>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS (:STRING "A descriptive title for the channel.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Title")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:subPropertyOf") (:OBJECTS (:QNAME "dc:title"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/url>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS
|
||||
(:STRING
|
||||
"The URL of the image to used in the 'src' attribute of the channel's image tag when rendered as HTML.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "URL")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:subPropertyOf") (:OBJECTS (:QNAME "dc:identifier")))))
|
||||
21
samples/Csound Document/allglass.csd
Executable file
21
samples/Csound Document/allglass.csd
Executable file
@@ -0,0 +1,21 @@
|
||||
<CsoundSynthesizer>
|
||||
<CsInstruments>
|
||||
sr = 44100
|
||||
kr = 44100
|
||||
ksmps = 1
|
||||
nchnls = 2
|
||||
|
||||
; pvanal -n 512 -w 8 allglass1-L.wav allglass1-L.pvc
|
||||
; pvanal -n 512 -w 8 allglass1-R.wav allglass1-R.pvc
|
||||
instr 1
|
||||
ktime line 0, p3, 17.5018
|
||||
arL pvoc ktime, 1, "allglass1-L.pvc"
|
||||
arR pvoc ktime, 1, "allglass1-R.pvc"
|
||||
out arL, arR
|
||||
endin
|
||||
</CsInstruments>
|
||||
<CsScore>
|
||||
i 1 0 70.0073
|
||||
e
|
||||
</CsScore>
|
||||
</CsoundSynthesizer>
|
||||
34
samples/Csound Document/interp.csd
Executable file
34
samples/Csound Document/interp.csd
Executable file
@@ -0,0 +1,34 @@
|
||||
<CsoundSynthesizer>
|
||||
<CsInstruments>
|
||||
sr = 44100
|
||||
kr = 44100
|
||||
ksmps = 1
|
||||
nchnls = 2
|
||||
|
||||
; pvanal -n 1024 -w 2 partA-L.wav partA-L.pvc
|
||||
; pvanal -n 1024 -w 2 partA-R.wav partA-R.pvc
|
||||
; pvanal -n 1024 -w 2 partB.wav partB.pvc
|
||||
instr 1
|
||||
iscale = 1
|
||||
|
||||
ktimpnt1 line 0, iscale*(82196/44100), 82196/44100
|
||||
ktimpnt2 linseg 0, iscale*1.25, 0, iscale*(103518/44100), 103518/44100
|
||||
kfreqscale linseg 1, iscale*0.5, 1, iscale*1.6, 0.8
|
||||
kfreqinterpL linseg 0, iscale*0.25, 0, iscale*1.6, 1
|
||||
kampinterpL linseg 0, iscale*0.25, 0, iscale*1.6, 1
|
||||
kfreqinterpR linseg 0, iscale*0.5, 0, iscale*1.2, 1
|
||||
kampinterpR linseg 0, iscale*0.5, 0, iscale*1.2, 1
|
||||
|
||||
pvbufread ktimpnt1, "partB.pvc"
|
||||
apvcL pvinterp ktimpnt2, 1, "partA-L.pvc", kfreqscale, 1, 1, 1, 1-kfreqinterpL, 1-kampinterpL
|
||||
pvbufread ktimpnt1, "partB.pvc"
|
||||
apvcR pvinterp ktimpnt2, 1, "partA-R.pvc", kfreqscale, 1, 1, 1, 1-kfreqinterpR, 1-kampinterpR
|
||||
|
||||
outs apvcL*0.8, apvcR*0.8
|
||||
endin
|
||||
</CsInstruments>
|
||||
<CsScore>
|
||||
i 1 0 7
|
||||
e
|
||||
</CsScore>
|
||||
</CsoundSynthesizer>
|
||||
253
samples/Csound Document/test.csd
Normal file
253
samples/Csound Document/test.csd
Normal file
@@ -0,0 +1,253 @@
|
||||
<CsoundSynthesizer>
|
||||
<CsInstruments>
|
||||
// Csound single-line comments can be preceded by a pair of forward slashes...
|
||||
; ...or a semicolon.
|
||||
|
||||
/* Block comments begin with /* and end with */
|
||||
|
||||
// Orchestras begin with a header of audio parameters.
|
||||
nchnls = 1
|
||||
nchnls_i = 1
|
||||
sr = 44100
|
||||
0dbfs = 1
|
||||
ksmps = 10
|
||||
|
||||
// The control rate kr = sr / ksmps can be omitted when the number of audio
|
||||
// samples in a control period (ksmps) is set, but kr may appear in older
|
||||
// orchestras.
|
||||
kr = 4410
|
||||
|
||||
// Orchestras contain instruments. These begin with the keyword instr followed
|
||||
// by a comma-separated list of numbers or names of the instrument. Instruments
|
||||
// end at the endin keyword and cannot be nested.
|
||||
instr 1, N_a_M_e_, +Name
|
||||
// Instruments contain statements. Here is a typical statement:
|
||||
aSignal oscil 0dbfs, 440, 1
|
||||
// Statements are terminated with a newline (possibly preceded by a comment).
|
||||
// To write a statement on several lines, precede the newline with a
|
||||
// backslash.
|
||||
prints \
|
||||
"hello, world\n";comment
|
||||
|
||||
// Csound 6 introduced function syntax for opcodes with one or zero outputs.
|
||||
// The oscil statement above is the same as
|
||||
aSignal = oscil(0dbfs, 440, 1)
|
||||
|
||||
// Instruments can contain control structures.
|
||||
kNote = p3
|
||||
if (kNote == 0) then
|
||||
kFrequency = 220
|
||||
elseif kNote == 1 then // Parentheses around binary expressions are optional.
|
||||
kFrequency = 440
|
||||
endif
|
||||
|
||||
// Csound 6 introduced looping structures.
|
||||
iIndex = 0
|
||||
while iIndex < 5 do
|
||||
print iIndex
|
||||
iIndex += 1
|
||||
od
|
||||
iIndex = 0
|
||||
until iIndex >= 5 do
|
||||
print iIndex
|
||||
iIndex += 1
|
||||
enduntil
|
||||
// Both kinds of loops can be terminated by either od or enduntil.
|
||||
|
||||
// Single-line strings are enclosed in double-quotes.
|
||||
prints "string\\\r\n\t\""
|
||||
// Multi-line strings are enclosed in pairs of curly braces.
|
||||
prints {{
|
||||
hello,
|
||||
|
||||
world
|
||||
}}
|
||||
|
||||
// Instruments often end with a statement containing an output opcode.
|
||||
outc aSignal
|
||||
endin
|
||||
|
||||
// Orchestras can also contain user-defined opcodes (UDOs). Here is an
|
||||
// oscillator with one audio-rate output and two control-rate inputs:
|
||||
opcode anOscillator, a, kk
|
||||
kAmplitude, kFrequency xin
|
||||
aSignal vco2 kAmplitude, kFrequency
|
||||
xout aSignal
|
||||
endop
|
||||
instr TestOscillator
|
||||
outc(anOscillator(0dbfs, 110))
|
||||
endin
|
||||
|
||||
// Python can be executed in Csound
|
||||
// <https://csound.github.io/docs/manual/pyrun.html>. So can Lua
|
||||
// <https://csound.github.io/docs/manual/lua.html>.
|
||||
pyruni {{
|
||||
import random
|
||||
|
||||
pool = [(1 + i / 10.0) ** 1.2 for i in range(100)]
|
||||
|
||||
def get_number_from_pool(n, p):
|
||||
if random.random() < p:
|
||||
i = int(random.random() * len(pool))
|
||||
pool[i] = n;
|
||||
return random.choice(pool)
|
||||
}}
|
||||
|
||||
// The Csound preprocessor supports conditional compilation and including files.
|
||||
#ifdef DEBUG
|
||||
#undef DEBUG
|
||||
#include "filename.orc"
|
||||
#endif
|
||||
|
||||
// The preprocessor also supports object- and function-like macros. This is an
|
||||
// object-like macro that defines a number:
|
||||
#define A_HZ #440#
|
||||
|
||||
// This is a function-like macro:
|
||||
#define OSCIL_MACRO(VOLUME'FREQUENCY'TABLE) #oscil $VOLUME, $FREQUENCY, $TABLE#
|
||||
|
||||
// Bodies of macros are enclosed in # and can contain newlines. The arguments of
|
||||
// function-like macros are separated by single-quotes. Uses of macros are
|
||||
// prefixed with a dollar sign.
|
||||
instr TestMacro
|
||||
aSignal $OSCIL_MACRO(1'$A_HZ'1)
|
||||
// Not unlike PHP, macros expand in double-quoted strings.
|
||||
prints "The frequency of the oscillator is $A_HZ Hz.\n"
|
||||
out aSignal
|
||||
endin
|
||||
|
||||
// Here are other things to note about Csound.
|
||||
|
||||
// There are two bitwise NOT operators, ~ and ¬ (U+00AC). The latter is common
|
||||
// on keyboards in the United Kingdom
|
||||
// <https://en.wikipedia.org/wiki/British_and_American_keyboards>.
|
||||
instr TestBitwiseNOT
|
||||
print ~42
|
||||
print ¬42
|
||||
endin
|
||||
|
||||
// Csound uses # for bitwise XOR, which the Csound manual calls bitwise
|
||||
// non-equivalence <https://csound.github.io/docs/manual/opnonequiv.html>.
|
||||
instr TestBitwiseXOR
|
||||
print 0 # 0
|
||||
print 0 # 1
|
||||
print 1 # 0
|
||||
print 1 # 1
|
||||
endin
|
||||
|
||||
// Loops and if-then statements are relatively recent additions to Csound. There
|
||||
// are many flow-control opcodes that involve goto and labels.
|
||||
instr TestGoto
|
||||
// This...
|
||||
if p3 > 0 goto if_label
|
||||
goto else_label
|
||||
if_label:
|
||||
prints "if branch\n"
|
||||
goto endif_label
|
||||
else_label:
|
||||
prints "else branch\n"
|
||||
endif_label:
|
||||
|
||||
// ...is the same as this.
|
||||
if p3 > 0 then
|
||||
prints "if branch\n"
|
||||
else
|
||||
prints "else branch\n"
|
||||
endif
|
||||
|
||||
// This...
|
||||
iIndex = 0
|
||||
loop_label:
|
||||
print iIndex
|
||||
iIndex += 1
|
||||
if iIndex < 10 goto loop_label
|
||||
|
||||
// ...is the same as this...
|
||||
iIndex = 0
|
||||
loop_lt_label:
|
||||
print iIndex
|
||||
loop_lt iIndex, 1, 10, loop_lt_label
|
||||
|
||||
// ...and this.
|
||||
iIndex = 0
|
||||
while iIndex < 10 do
|
||||
print iIndex
|
||||
iIndex += 1
|
||||
od
|
||||
endin
|
||||
|
||||
// The prints and printks opcodes
|
||||
// <https://github.com/csound/csound/blob/develop/OOps/ugrw1.c#L831>, arguably
|
||||
// the primary methods of logging output, treat certain sequences of characters
|
||||
// different from printf in C.
|
||||
instr TestPrints
|
||||
// ^ prints an ESCAPE character (U+001B), not a CIRCUMFLEX ACCENT character
|
||||
// (U+005E). ^^ prints a CIRCUMFLEX ACCENT.
|
||||
prints "^^\n"
|
||||
// ~ prints an ESCAPE character (U+001B) followed by a [, not a TILDE
|
||||
// character (U+007E). ~~ prints a TILDE.
|
||||
prints "~~\n"
|
||||
// \A, \B, \N, \R, and \T correspond to the escaped lowercase characters (that
|
||||
// is, BELL (U+0007), BACKSPACE (U+0008), new line (U+000A), CARRIAGE RETURN
|
||||
// (U+000D), and tab (U+0009)).
|
||||
prints "\T\R\N"
|
||||
// %n, %r, and %t are the same as \n, \r, and \t, as are %N, %R, and %T.
|
||||
prints "%t%r%n"
|
||||
// %! prints a semicolon. This is a hold-over from old versions of Csound that
|
||||
// allowed comments to begin in strings.
|
||||
prints "; %!\n"
|
||||
endin
|
||||
|
||||
// The arguments of function-like macros can be separated by # instead of '.
|
||||
// These two lines define the same macro.
|
||||
#define OSCIL_MACRO(VOLUME'FREQUENCY'TABLE) #oscil $VOLUME, $FREQUENCY, $TABLE#
|
||||
#define OSCIL_MACRO(VOLUME#FREQUENCY#TABLE) #oscil $VOLUME, $FREQUENCY, $TABLE#
|
||||
|
||||
// Uses of macros can optionally be suffixed with a period.
|
||||
instr TestMacroPeriodSuffix
|
||||
aSignal $OSCIL_MACRO.(1'$A_HZ'1)
|
||||
prints "The frequency of the oscillator is $A_HZ.Hz.\n"
|
||||
out aSignal
|
||||
endin
|
||||
|
||||
// Csound has @ and @@ operator-like macros that, when followed by a literal
|
||||
// non-negative integer, expand to the next power of 2 and the next power of 2
|
||||
// plus 1:
|
||||
// @x = 2^(ceil(log2(x + 1))), x >= 0
|
||||
// @@0 = 2
|
||||
// @@x = 2^(ceil(log2(x))) + 1, x > 0
|
||||
instr TestAt
|
||||
prints "%d %2d %2d\n", 0, @0, @@0
|
||||
prints "%d %2d %2d\n", 1, @1, @@1
|
||||
prints "%d %2d %2d\n", 2, @2, @@2
|
||||
prints "%d %2d %2d\n", 3, @3, @@3
|
||||
prints "%d %2d %2d\n", 4, @4, @@4
|
||||
prints "%d %2d %2d\n", 5, @5, @@5
|
||||
prints "%d %2d %2d\n", 6, @6, @@6
|
||||
prints "%d %2d %2d\n", 7, @7, @@7
|
||||
prints "%d %2d %2d\n", 8, @8, @@8
|
||||
prints "%d %2d %2d\n", 9, @9, @@9
|
||||
endin
|
||||
|
||||
// Including newlines in macros can lead to confusing code.
|
||||
instr MacroAbuse
|
||||
if 1 == 1 then
|
||||
prints "on\n"
|
||||
#define FOO#
|
||||
BAR
|
||||
#endif // This ends the if block. It is not a preprocessor directive.
|
||||
endin
|
||||
</CsInstruments>
|
||||
<CsScore>
|
||||
f 1 0 16384 10 1
|
||||
i "N_a_M_e_" 0 2
|
||||
i "TestOscillator" 2 2
|
||||
i "TestBitwiseNOT" 0 1
|
||||
i "TestBitwiseXOR" 0 1
|
||||
i "TestGoto" 0 1
|
||||
i "TestMacroPeriodSuffix" 4 1
|
||||
i "TestAt" 0 1
|
||||
i "MacroAbuse" 0 1
|
||||
e
|
||||
</CsScore>
|
||||
</CsoundSynthesizer>
|
||||
2
samples/Csound Score/allglass.sco
Executable file
2
samples/Csound Score/allglass.sco
Executable file
@@ -0,0 +1,2 @@
|
||||
i 1 0 70.0073
|
||||
e
|
||||
2
samples/Csound Score/interp.sco
Executable file
2
samples/Csound Score/interp.sco
Executable file
@@ -0,0 +1,2 @@
|
||||
i 1 0 7
|
||||
e
|
||||
10
samples/Csound Score/test.sco
Normal file
10
samples/Csound Score/test.sco
Normal file
@@ -0,0 +1,10 @@
|
||||
f 1 0 16384 10 1
|
||||
i "N_a_M_e_" 0 2
|
||||
i "TestOscillator" 2 2
|
||||
i "TestBitwiseNOT" 0 1
|
||||
i "TestBitwiseXOR" 0 1
|
||||
i "TestGoto" 0 1
|
||||
i "TestMacroPeriodSuffix" 4 1
|
||||
i "TestAt" 0 1
|
||||
i "MacroAbuse" 0 1
|
||||
e
|
||||
13
samples/Csound/allglass.orc
Executable file
13
samples/Csound/allglass.orc
Executable file
@@ -0,0 +1,13 @@
|
||||
sr = 44100
|
||||
kr = 44100
|
||||
ksmps = 1
|
||||
nchnls = 2
|
||||
|
||||
; pvanal -n 512 -w 8 allglass1-L.wav allglass1-L.pvc
|
||||
; pvanal -n 512 -w 8 allglass1-R.wav allglass1-R.pvc
|
||||
instr 1
|
||||
ktime line 0, p3, 17.5018
|
||||
arL pvoc ktime, 1, "allglass1-L.pvc"
|
||||
arR pvoc ktime, 1, "allglass1-R.pvc"
|
||||
out arL, arR
|
||||
endin
|
||||
26
samples/Csound/interp.orc
Executable file
26
samples/Csound/interp.orc
Executable file
@@ -0,0 +1,26 @@
|
||||
sr = 44100
|
||||
kr = 44100
|
||||
ksmps = 1
|
||||
nchnls = 2
|
||||
|
||||
; pvanal -n 1024 -w 2 partA-L.wav partA-L.pvc
|
||||
; pvanal -n 1024 -w 2 partA-R.wav partA-R.pvc
|
||||
; pvanal -n 1024 -w 2 partB.wav partB.pvc
|
||||
instr 1
|
||||
iscale = 1
|
||||
|
||||
ktimpnt1 line 0, iscale*(82196/44100), 82196/44100
|
||||
ktimpnt2 linseg 0, iscale*1.25, 0, iscale*(103518/44100), 103518/44100
|
||||
kfreqscale linseg 1, iscale*0.5, 1, iscale*1.6, 0.8
|
||||
kfreqinterpL linseg 0, iscale*0.25, 0, iscale*1.6, 1
|
||||
kampinterpL linseg 0, iscale*0.25, 0, iscale*1.6, 1
|
||||
kfreqinterpR linseg 0, iscale*0.5, 0, iscale*1.2, 1
|
||||
kampinterpR linseg 0, iscale*0.5, 0, iscale*1.2, 1
|
||||
|
||||
pvbufread ktimpnt1, "partB.pvc"
|
||||
apvcL pvinterp ktimpnt2, 1, "partA-L.pvc", kfreqscale, 1, 1, 1, 1-kfreqinterpL, 1-kampinterpL
|
||||
pvbufread ktimpnt1, "partB.pvc"
|
||||
apvcR pvinterp ktimpnt2, 1, "partA-R.pvc", kfreqscale, 1, 1, 1, 1-kfreqinterpR, 1-kampinterpR
|
||||
|
||||
outs apvcL*0.8, apvcR*0.8
|
||||
endin
|
||||
250
samples/Csound/test.orc
Normal file
250
samples/Csound/test.orc
Normal file
@@ -0,0 +1,250 @@
|
||||
// Csound single-line comments can be preceded by a pair of forward slashes...
|
||||
; ...or a semicolon.
|
||||
|
||||
/* Block comments begin with /* and end with */
|
||||
|
||||
// Orchestras begin with a header of audio parameters.
|
||||
nchnls = 1
|
||||
nchnls_i = 1
|
||||
sr = 44100
|
||||
0dbfs = 1
|
||||
ksmps = 10
|
||||
|
||||
// The control rate kr = sr / ksmps can be omitted when the number of audio
|
||||
// samples in a control period (ksmps) is set, but kr may appear in older
|
||||
// orchestras.
|
||||
kr = 4410
|
||||
|
||||
// Orchestras contain instruments. These begin with the keyword instr followed
|
||||
// by a comma-separated list of numbers or names of the instrument. Instruments
|
||||
// end at the endin keyword and cannot be nested.
|
||||
instr 1, N_a_M_e_, +Name
|
||||
// Instruments contain statements. Here is a typical statement:
|
||||
aSignal oscil 0dbfs, 440, 1
|
||||
// Statements are terminated with a newline (possibly preceded by a comment).
|
||||
// To write a statement on several lines, precede the newline with a
|
||||
// backslash.
|
||||
prints \
|
||||
"hello, world\n";comment
|
||||
|
||||
// Csound 6 introduced function syntax for opcodes with one or zero outputs.
|
||||
// The oscil statement above is the same as
|
||||
aSignal = oscil(0dbfs, 440, 1)
|
||||
|
||||
// Instruments can contain control structures.
|
||||
kNote = p3
|
||||
if (kNote == 0) then
|
||||
kFrequency = 220
|
||||
elseif kNote == 1 then // Parentheses around binary expressions are optional.
|
||||
kFrequency = 440
|
||||
endif
|
||||
|
||||
// Csound 6 introduced looping structures.
|
||||
iIndex = 0
|
||||
while iIndex < 5 do
|
||||
print iIndex
|
||||
iIndex += 1
|
||||
od
|
||||
iIndex = 0
|
||||
until iIndex >= 5 do
|
||||
print iIndex
|
||||
iIndex += 1
|
||||
enduntil
|
||||
// Both kinds of loops can be terminated by either od or enduntil.
|
||||
|
||||
// Single-line strings are enclosed in double-quotes.
|
||||
prints "string\\\r\n\t\""
|
||||
// Multi-line strings are enclosed in pairs of curly braces.
|
||||
prints {{
|
||||
hello,
|
||||
|
||||
world
|
||||
}}
|
||||
|
||||
// Instruments often end with a statement containing an output opcode.
|
||||
outc aSignal
|
||||
endin
|
||||
|
||||
// Orchestras can also contain user-defined opcodes (UDOs). Here is an
|
||||
// oscillator with one audio-rate output and two control-rate inputs:
|
||||
opcode anOscillator, a, kk
|
||||
kAmplitude, kFrequency xin
|
||||
aSignal vco2 kAmplitude, kFrequency
|
||||
xout aSignal
|
||||
endop
|
||||
instr TestOscillator
|
||||
outc(anOscillator(0dbfs, 110))
|
||||
endin
|
||||
|
||||
// Python can be executed in Csound
|
||||
// <https://csound.github.io/docs/manual/pyrun.html>. So can Lua
|
||||
// <https://csound.github.io/docs/manual/lua.html>.
|
||||
pyruni {{
|
||||
import random
|
||||
|
||||
pool = [(1 + i / 10.0) ** 1.2 for i in range(100)]
|
||||
|
||||
def get_number_from_pool(n, p):
|
||||
if random.random() < p:
|
||||
i = int(random.random() * len(pool))
|
||||
pool[i] = n;
|
||||
return random.choice(pool)
|
||||
}}
|
||||
|
||||
// The Csound preprocessor supports conditional compilation and including files.
|
||||
#ifdef DEBUG
|
||||
#undef DEBUG
|
||||
#include "filename.orc"
|
||||
#endif
|
||||
|
||||
// The preprocessor also supports object- and function-like macros. This is an
|
||||
// object-like macro that defines a number:
|
||||
#define A_HZ #440#
|
||||
|
||||
// This is a function-like macro:
|
||||
#define OSCIL_MACRO(VOLUME'FREQUENCY'TABLE) #oscil $VOLUME, $FREQUENCY, $TABLE#
|
||||
|
||||
// Bodies of macros are enclosed in # and can contain newlines. The arguments of
|
||||
// function-like macros are separated by single-quotes. Uses of macros are
|
||||
// prefixed with a dollar sign.
|
||||
instr TestMacro
|
||||
aSignal $OSCIL_MACRO(1'$A_HZ'1)
|
||||
// Not unlike PHP, macros expand in double-quoted strings.
|
||||
prints "The frequency of the oscillator is $A_HZ Hz.\n"
|
||||
out aSignal
|
||||
endin
|
||||
|
||||
// Here are other things to note about Csound.
|
||||
|
||||
// There are two bitwise NOT operators, ~ and ¬ (U+00AC). The latter is common
|
||||
// on keyboards in the United Kingdom
|
||||
// <https://en.wikipedia.org/wiki/British_and_American_keyboards>.
|
||||
instr TestBitwiseNOT
|
||||
print ~42
|
||||
print ¬42
|
||||
endin
|
||||
|
||||
// Csound uses # for bitwise XOR, which the Csound manual calls bitwise
|
||||
// non-equivalence <https://csound.github.io/docs/manual/opnonequiv.html>.
|
||||
instr TestBitwiseXOR
|
||||
print 0 # 0
|
||||
print 0 # 1
|
||||
print 1 # 0
|
||||
print 1 # 1
|
||||
endin
|
||||
|
||||
// Loops and if-then statements are relatively recent additions to Csound. There
|
||||
// are many flow-control opcodes that involve goto and labels.
|
||||
instr TestGoto
|
||||
// This...
|
||||
if p3 > 0 goto if_label
|
||||
goto else_label
|
||||
if_label:
|
||||
prints "if branch\n"
|
||||
goto endif_label
|
||||
else_label:
|
||||
prints "else branch\n"
|
||||
endif_label:
|
||||
|
||||
// ...is the same as this.
|
||||
if p3 > 0 then
|
||||
prints "if branch\n"
|
||||
else
|
||||
prints "else branch\n"
|
||||
endif
|
||||
|
||||
// This...
|
||||
iIndex = 0
|
||||
loop_label:
|
||||
print iIndex
|
||||
iIndex += 1
|
||||
if iIndex < 10 goto loop_label
|
||||
|
||||
// ...is the same as this...
|
||||
iIndex = 0
|
||||
loop_lt_label:
|
||||
print iIndex
|
||||
loop_lt iIndex, 1, 10, loop_lt_label
|
||||
|
||||
// ...and this.
|
||||
iIndex = 0
|
||||
while iIndex < 10 do
|
||||
print iIndex
|
||||
iIndex += 1
|
||||
od
|
||||
endin
|
||||
|
||||
// The prints and printks opcodes
|
||||
// <https://github.com/csound/csound/blob/develop/OOps/ugrw1.c#L831>, arguably
|
||||
// the primary methods of logging output, treat certain sequences of characters
|
||||
// different from printf in C.
|
||||
instr TestPrints
|
||||
// ^ prints an ESCAPE character (U+001B), not a CIRCUMFLEX ACCENT character
|
||||
// (U+005E). ^^ prints a CIRCUMFLEX ACCENT.
|
||||
prints "^^\n"
|
||||
// ~ prints an ESCAPE character (U+001B) followed by a [, not a TILDE
|
||||
// character (U+007E). ~~ prints a TILDE.
|
||||
prints "~~\n"
|
||||
// \A, \B, \N, \R, and \T correspond to the escaped lowercase characters (that
|
||||
// is, BELL (U+0007), BACKSPACE (U+0008), new line (U+000A), CARRIAGE RETURN
|
||||
// (U+000D), and tab (U+0009)).
|
||||
prints "\T\R\N"
|
||||
// %n, %r, and %t are the same as \n, \r, and \t, as are %N, %R, and %T.
|
||||
prints "%t%r%n"
|
||||
// %! prints a semicolon. This is a hold-over from old versions of Csound that
|
||||
// allowed comments to begin in strings.
|
||||
prints "; %!\n"
|
||||
endin
|
||||
|
||||
// The arguments of function-like macros can be separated by # instead of '.
|
||||
// These two lines define the same macro.
|
||||
#define OSCIL_MACRO(VOLUME'FREQUENCY'TABLE) #oscil $VOLUME, $FREQUENCY, $TABLE#
|
||||
#define OSCIL_MACRO(VOLUME#FREQUENCY#TABLE) #oscil $VOLUME, $FREQUENCY, $TABLE#
|
||||
|
||||
// Uses of macros can optionally be suffixed with a period.
|
||||
instr TestMacroPeriodSuffix
|
||||
aSignal $OSCIL_MACRO.(1'$A_HZ'1)
|
||||
prints "The frequency of the oscillator is $A_HZ.Hz.\n"
|
||||
out aSignal
|
||||
endin
|
||||
|
||||
// Csound has @ and @@ operator-like macros that, when followed by a literal
|
||||
// non-negative integer, expand to the next power of 2 and the next power of 2
|
||||
// plus 1:
|
||||
// @x = 2^(ceil(log2(x + 1))), x >= 0
|
||||
// @@0 = 2
|
||||
// @@x = 2^(ceil(log2(x))) + 1, x > 0
|
||||
instr TestAt
|
||||
prints "%d %2d %2d\n", 0, @0, @@0
|
||||
prints "%d %2d %2d\n", 1, @1, @@1
|
||||
prints "%d %2d %2d\n", 2, @2, @@2
|
||||
prints "%d %2d %2d\n", 3, @3, @@3
|
||||
prints "%d %2d %2d\n", 4, @4, @@4
|
||||
prints "%d %2d %2d\n", 5, @5, @@5
|
||||
prints "%d %2d %2d\n", 6, @6, @@6
|
||||
prints "%d %2d %2d\n", 7, @7, @@7
|
||||
prints "%d %2d %2d\n", 8, @8, @@8
|
||||
prints "%d %2d %2d\n", 9, @9, @@9
|
||||
endin
|
||||
|
||||
// Including newlines in macros can lead to confusing code.
|
||||
instr MacroAbuse
|
||||
if 1 == 1 then
|
||||
prints "on\n"
|
||||
#define FOO#
|
||||
BAR
|
||||
#endif // This ends the if block. It is not a preprocessor directive.
|
||||
endin
|
||||
|
||||
scoreline_i {{
|
||||
f 1 0 16384 10 1
|
||||
i "N_a_M_e_" 0 2
|
||||
i "TestOscillator" 2 2
|
||||
i "TestBitwiseNOT" 0 1
|
||||
i "TestBitwiseXOR" 0 1
|
||||
i "TestGoto" 0 1
|
||||
i "TestMacroPeriodSuffix" 4 1
|
||||
i "TestAt" 0 1
|
||||
i "MacroAbuse" 0 1
|
||||
e
|
||||
}}
|
||||
13
samples/DNS Zone/sample.arpa
Normal file
13
samples/DNS Zone/sample.arpa
Normal file
@@ -0,0 +1,13 @@
|
||||
$ORIGIN 0.0.0.c.2.1.0.3.0.0.2.1.e.f.f.3.ip6.arpa.
|
||||
$TTL 60
|
||||
@ IN SOA ns root (
|
||||
2002042901 ; SERIAL
|
||||
7200 ; REFRESH
|
||||
600 ; RETRY
|
||||
36000000 ; EXPIRE
|
||||
120 ; MINIMUM
|
||||
)
|
||||
|
||||
NS ns.example.com.
|
||||
|
||||
c.a.7.e.d.7.e.f.f.f.0.2.8.0.a.0 PTR sip01.example.com.
|
||||
12
samples/DNS Zone/sneaky.net.zone
Normal file
12
samples/DNS Zone/sneaky.net.zone
Normal file
@@ -0,0 +1,12 @@
|
||||
$TTL 3d
|
||||
@ IN SOA root.localhost. root.sneaky.net. (
|
||||
2015042907 ; serial
|
||||
3d ; refresh
|
||||
1h ; retry
|
||||
12d ; expire
|
||||
2h ; negative response TTL
|
||||
)
|
||||
IN NS root.localhost.
|
||||
IN NS localhost. ; secondary name server is preferably externally maintained
|
||||
|
||||
www IN A 3.141.59.26
|
||||
13
samples/EJS/dash.ejs
Normal file
13
samples/EJS/dash.ejs
Normal file
@@ -0,0 +1,13 @@
|
||||
<% include parts/depend %>
|
||||
|
||||
<div class="row">
|
||||
<% if (user.primaryAccount == "teacher") { %>
|
||||
<% include teacher/sidebar %>
|
||||
<% include teacher/dashboard %>
|
||||
<% } else if (user.primaryAccount == "student") { %>
|
||||
<% include student/sidebar %>
|
||||
<% include student/dashboard %>
|
||||
<% } else { %>
|
||||
<center><h2>There seems to be a problem</h2></center>
|
||||
<% } %>
|
||||
</div>
|
||||
102
samples/EJS/page.ejs
Normal file
102
samples/EJS/page.ejs
Normal file
@@ -0,0 +1,102 @@
|
||||
<% include ../parts/depend %>
|
||||
|
||||
<div class="row">
|
||||
<% include sidebar %>
|
||||
<div class="col-lg-offset-3 col-lg-9 main-content">
|
||||
<div class="page-title">
|
||||
<h1>Pieces</h1>
|
||||
<% if (pieces.length == 1) { %>
|
||||
<p>You have
|
||||
<strong>1</strong>
|
||||
piece to practice</p>
|
||||
<% } else { %>
|
||||
<p>You have
|
||||
<strong><%= pieces.length %></strong>
|
||||
pieces to practice</p>
|
||||
<% } %>
|
||||
</div>
|
||||
<div class="row">
|
||||
<% if (pieces == undefined || pieces.length == 0) { %>
|
||||
<div class="error-main">
|
||||
<h1>No Pieces</h1>
|
||||
<p>You have no
|
||||
<strong>Pieces</strong>
|
||||
assigned.</p>
|
||||
</div>
|
||||
<% } else { %>
|
||||
<div class="col-lg-12 section-title">
|
||||
<div style="margin-top: 10px; margin-bottom: 10px;" class="btn-group" role="group">
|
||||
<button id="inProgressButton" type="button" class="btn btn-md btn-super-round btn-focus-off btn-primary btn-primary-active">In Progress</button>
|
||||
<button id="completedButton" type="button" class="btn btn-md btn-super-round btn-focus-off btn-purple">Completed</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="inProgressPieces">
|
||||
<% if (inProgressPieces == undefined || inProgressPieces.length == 0) { %>
|
||||
<center>
|
||||
<h2>No Pieces in Progress</h2>
|
||||
</center>
|
||||
<% } else { %>
|
||||
<% for (var i = 0; i < inProgressPieces.length; i++) { %>
|
||||
<div class="col-lg-6">
|
||||
<div class="box">
|
||||
<div class="title">
|
||||
<h1>
|
||||
<a href="/pieces/practice/<%= inProgressPieces[i].id %>"><%= inProgressPieces[i].title %></a>
|
||||
</h1>
|
||||
<p>By
|
||||
<strong><%= inProgressPieces[i].author %></strong>
|
||||
</p>
|
||||
<p>
|
||||
Teacher:
|
||||
<strong><%= inProgressPieces[i].teacherName %></strong>
|
||||
</p>
|
||||
<p>Average Practice Time:
|
||||
<strong><%= inProgressPieces[i].averagePracticeTime %>
|
||||
mins</strong>
|
||||
</p>
|
||||
<a href="/pieces/practice/<%= inProgressPieces[i].id %>" class="btn btn-success btn-block">Practice
|
||||
<%= inProgressPieces[i].title %></a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<% } %>
|
||||
<% } %>
|
||||
</div>
|
||||
|
||||
<div id="completedPieces" style="display: none;">
|
||||
<% if (completedPieces == undefined || completedPieces.length == 0) { %>
|
||||
<center>
|
||||
<h2>No Completed Pieces</h2>
|
||||
</center>
|
||||
<% } else { %>
|
||||
<% for (var i = 0; i < completedPieces.length; i++) { %>
|
||||
<div class="col-lg-6">
|
||||
<div class="box">
|
||||
<div class="title">
|
||||
<h1>
|
||||
<a href="/pieces/practice/<%= completedPieces[i].id %>"><%= completedPieces[i].title %></a>
|
||||
</h1>
|
||||
<p>By
|
||||
<strong><%= completedPieces[i].author %></strong>
|
||||
</p>
|
||||
<p>
|
||||
Teacher:
|
||||
<strong><%= completedPieces[i].teacherName %></strong>
|
||||
</p>
|
||||
<p>Average Practice Time:
|
||||
<strong><%= completedPieces[i].averagePracticeTime %>
|
||||
mins</strong>
|
||||
</p>
|
||||
<a href="/pieces/practice/<%= completedPieces[i].id %>" class="btn btn-success btn-block">Practice
|
||||
<%= completedPieces[i].title %></a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<% } %>
|
||||
<% } %>
|
||||
</div>
|
||||
<% } %>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
26
samples/Erlang/170-os-daemons.es
Executable file
26
samples/Erlang/170-os-daemons.es
Executable file
@@ -0,0 +1,26 @@
|
||||
#! /usr/bin/env escript
|
||||
|
||||
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
|
||||
% use this file except in compliance with the License. You may obtain a copy of
|
||||
% the License at
|
||||
%
|
||||
% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%
|
||||
% Unless required by applicable law or agreed to in writing, software
|
||||
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
% License for the specific language governing permissions and limitations under
|
||||
% the License.
|
||||
|
||||
loop() ->
|
||||
loop(io:read("")).
|
||||
|
||||
loop({ok, _}) ->
|
||||
loop(io:read(""));
|
||||
loop(eof) ->
|
||||
stop;
|
||||
loop({error, Reason}) ->
|
||||
throw({error, Reason}).
|
||||
|
||||
main([]) ->
|
||||
loop().
|
||||
856
samples/Erlang/elixir_parser.yrl
Normal file
856
samples/Erlang/elixir_parser.yrl
Normal file
@@ -0,0 +1,856 @@
|
||||
Nonterminals
|
||||
grammar expr_list
|
||||
expr container_expr block_expr access_expr
|
||||
no_parens_expr no_parens_zero_expr no_parens_one_expr no_parens_one_ambig_expr
|
||||
bracket_expr bracket_at_expr bracket_arg matched_expr unmatched_expr max_expr
|
||||
unmatched_op_expr matched_op_expr no_parens_op_expr no_parens_many_expr
|
||||
comp_op_eol at_op_eol unary_op_eol and_op_eol or_op_eol capture_op_eol
|
||||
add_op_eol mult_op_eol two_op_eol three_op_eol pipe_op_eol stab_op_eol
|
||||
arrow_op_eol match_op_eol when_op_eol in_op_eol in_match_op_eol
|
||||
type_op_eol rel_op_eol
|
||||
open_paren close_paren empty_paren eoe
|
||||
list list_args open_bracket close_bracket
|
||||
tuple open_curly close_curly
|
||||
bit_string open_bit close_bit
|
||||
map map_op map_close map_args map_expr struct_op
|
||||
assoc_op_eol assoc_expr assoc_base assoc_update assoc_update_kw assoc
|
||||
container_args_base container_args
|
||||
call_args_parens_expr call_args_parens_base call_args_parens parens_call
|
||||
call_args_no_parens_one call_args_no_parens_ambig call_args_no_parens_expr
|
||||
call_args_no_parens_comma_expr call_args_no_parens_all call_args_no_parens_many
|
||||
call_args_no_parens_many_strict
|
||||
stab stab_eoe stab_expr stab_op_eol_and_expr stab_parens_many
|
||||
kw_eol kw_base kw call_args_no_parens_kw_expr call_args_no_parens_kw
|
||||
dot_op dot_alias dot_alias_container
|
||||
dot_identifier dot_op_identifier dot_do_identifier
|
||||
dot_paren_identifier dot_bracket_identifier
|
||||
do_block fn_eoe do_eoe end_eoe block_eoe block_item block_list
|
||||
.
|
||||
|
||||
Terminals
|
||||
identifier kw_identifier kw_identifier_safe kw_identifier_unsafe bracket_identifier
|
||||
paren_identifier do_identifier block_identifier
|
||||
fn 'end' aliases
|
||||
number atom atom_safe atom_unsafe bin_string list_string sigil
|
||||
dot_call_op op_identifier
|
||||
comp_op at_op unary_op and_op or_op arrow_op match_op in_op in_match_op
|
||||
type_op dual_op add_op mult_op two_op three_op pipe_op stab_op when_op assoc_op
|
||||
capture_op rel_op
|
||||
'true' 'false' 'nil' 'do' eol ';' ',' '.'
|
||||
'(' ')' '[' ']' '{' '}' '<<' '>>' '%{}' '%'
|
||||
.
|
||||
|
||||
Rootsymbol grammar.
|
||||
|
||||
%% Two shift/reduce conflicts coming from call_args_parens.
|
||||
Expect 2.
|
||||
|
||||
%% Changes in ops and precedence should be reflected on lib/elixir/lib/macro.ex
|
||||
%% Note though the operator => in practice has lower precedence than all others,
|
||||
%% its entry in the table is only to support the %{user | foo => bar} syntax.
|
||||
Left 5 do.
|
||||
Right 10 stab_op_eol. %% ->
|
||||
Left 20 ','.
|
||||
Nonassoc 30 capture_op_eol. %% &
|
||||
Left 40 in_match_op_eol. %% <-, \\ (allowed in matches along =)
|
||||
Right 50 when_op_eol. %% when
|
||||
Right 60 type_op_eol. %% ::
|
||||
Right 70 pipe_op_eol. %% |
|
||||
Right 80 assoc_op_eol. %% =>
|
||||
Right 90 match_op_eol. %% =
|
||||
Left 130 or_op_eol. %% ||, |||, or
|
||||
Left 140 and_op_eol. %% &&, &&&, and
|
||||
Left 150 comp_op_eol. %% ==, !=, =~, ===, !==
|
||||
Left 160 rel_op_eol. %% <, >, <=, >=
|
||||
Left 170 arrow_op_eol. %% |>, <<<, >>>, ~>>, <<~, ~>, <~, <~>, <|>
|
||||
Left 180 in_op_eol. %% in
|
||||
Left 190 three_op_eol. %% ^^^
|
||||
Right 200 two_op_eol. %% ++, --, .., <>
|
||||
Left 210 add_op_eol. %% +, -
|
||||
Left 220 mult_op_eol. %% *, /
|
||||
Nonassoc 300 unary_op_eol. %% +, -, !, ^, not, ~~~
|
||||
Left 310 dot_call_op.
|
||||
Left 310 dot_op. %% .
|
||||
Nonassoc 320 at_op_eol. %% @
|
||||
Nonassoc 330 dot_identifier.
|
||||
|
||||
%%% MAIN FLOW OF EXPRESSIONS
|
||||
|
||||
grammar -> eoe : nil.
|
||||
grammar -> expr_list : to_block('$1').
|
||||
grammar -> eoe expr_list : to_block('$2').
|
||||
grammar -> expr_list eoe : to_block('$1').
|
||||
grammar -> eoe expr_list eoe : to_block('$2').
|
||||
grammar -> '$empty' : nil.
|
||||
|
||||
% Note expressions are on reverse order
|
||||
expr_list -> expr : ['$1'].
|
||||
expr_list -> expr_list eoe expr : ['$3'|'$1'].
|
||||
|
||||
expr -> matched_expr : '$1'.
|
||||
expr -> no_parens_expr : '$1'.
|
||||
expr -> unmatched_expr : '$1'.
|
||||
|
||||
%% In Elixir we have three main call syntaxes: with parentheses,
|
||||
%% without parentheses and with do blocks. They are represented
|
||||
%% in the AST as matched, no_parens and unmatched.
|
||||
%%
|
||||
%% Calls without parentheses are further divided according to how
|
||||
%% problematic they are:
|
||||
%%
|
||||
%% (a) no_parens_one: a call with one unproblematic argument
|
||||
%% (e.g. `f a` or `f g a` and similar) (includes unary operators)
|
||||
%%
|
||||
%% (b) no_parens_many: a call with several arguments (e.g. `f a, b`)
|
||||
%%
|
||||
%% (c) no_parens_one_ambig: a call with one argument which is
|
||||
%% itself a no_parens_many or no_parens_one_ambig (e.g. `f g a, b`
|
||||
%% or `f g h a, b` and similar)
|
||||
%%
|
||||
%% Note, in particular, that no_parens_one_ambig expressions are
|
||||
%% ambiguous and are interpreted such that the outer function has
|
||||
%% arity 1 (e.g. `f g a, b` is interpreted as `f(g(a, b))` rather
|
||||
%% than `f(g(a), b)`). Hence the name, no_parens_one_ambig.
|
||||
%%
|
||||
%% The distinction is required because we can't, for example, have
|
||||
%% a function call with a do block as argument inside another do
|
||||
%% block call, unless there are parentheses:
|
||||
%%
|
||||
%% if if true do true else false end do #=> invalid
|
||||
%% if(if true do true else false end) do #=> valid
|
||||
%%
|
||||
%% Similarly, it is not possible to nest calls without parentheses
|
||||
%% if their arity is more than 1:
|
||||
%%
|
||||
%% foo a, bar b, c #=> invalid
|
||||
%% foo(a, bar b, c) #=> invalid
|
||||
%% foo bar a, b #=> valid
|
||||
%% foo a, bar(b, c) #=> valid
|
||||
%%
|
||||
%% So the different grammar rules need to take into account
|
||||
%% if calls without parentheses are do blocks in particular
|
||||
%% segments and act accordingly.
|
||||
matched_expr -> matched_expr matched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
matched_expr -> unary_op_eol matched_expr : build_unary_op('$1', '$2').
|
||||
matched_expr -> at_op_eol matched_expr : build_unary_op('$1', '$2').
|
||||
matched_expr -> capture_op_eol matched_expr : build_unary_op('$1', '$2').
|
||||
matched_expr -> no_parens_one_expr : '$1'.
|
||||
matched_expr -> no_parens_zero_expr : '$1'.
|
||||
matched_expr -> access_expr : '$1'.
|
||||
matched_expr -> access_expr kw_identifier : throw_invalid_kw_identifier('$2').
|
||||
|
||||
unmatched_expr -> matched_expr unmatched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
unmatched_expr -> unmatched_expr matched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
unmatched_expr -> unmatched_expr unmatched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
unmatched_expr -> unmatched_expr no_parens_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
unmatched_expr -> unary_op_eol expr : build_unary_op('$1', '$2').
|
||||
unmatched_expr -> at_op_eol expr : build_unary_op('$1', '$2').
|
||||
unmatched_expr -> capture_op_eol expr : build_unary_op('$1', '$2').
|
||||
unmatched_expr -> block_expr : '$1'.
|
||||
|
||||
no_parens_expr -> matched_expr no_parens_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
no_parens_expr -> unary_op_eol no_parens_expr : build_unary_op('$1', '$2').
|
||||
no_parens_expr -> at_op_eol no_parens_expr : build_unary_op('$1', '$2').
|
||||
no_parens_expr -> capture_op_eol no_parens_expr : build_unary_op('$1', '$2').
|
||||
no_parens_expr -> no_parens_one_ambig_expr : '$1'.
|
||||
no_parens_expr -> no_parens_many_expr : '$1'.
|
||||
|
||||
block_expr -> parens_call call_args_parens do_block : build_identifier('$1', '$2' ++ '$3').
|
||||
block_expr -> parens_call call_args_parens call_args_parens do_block : build_nested_parens('$1', '$2', '$3' ++ '$4').
|
||||
block_expr -> dot_do_identifier do_block : build_identifier('$1', '$2').
|
||||
block_expr -> dot_identifier call_args_no_parens_all do_block : build_identifier('$1', '$2' ++ '$3').
|
||||
|
||||
matched_op_expr -> match_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> add_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> mult_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> two_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> three_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> and_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> or_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> in_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> in_match_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> type_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> when_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> pipe_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> comp_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> rel_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> arrow_op_eol matched_expr : {'$1', '$2'}.
|
||||
%% Warn for no parens subset
|
||||
matched_op_expr -> arrow_op_eol no_parens_one_expr : warn_pipe('$1', '$2'), {'$1', '$2'}.
|
||||
|
||||
unmatched_op_expr -> match_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> add_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> mult_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> two_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> three_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> and_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> or_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> in_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> in_match_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> type_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> when_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> pipe_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> comp_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> rel_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> arrow_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
|
||||
no_parens_op_expr -> match_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> add_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> mult_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> two_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> three_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> and_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> or_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> in_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> in_match_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> type_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> when_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> pipe_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> comp_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> rel_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> arrow_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
%% Warn for no parens subset
|
||||
no_parens_op_expr -> arrow_op_eol no_parens_one_ambig_expr : warn_pipe('$1', '$2'), {'$1', '$2'}.
|
||||
no_parens_op_expr -> arrow_op_eol no_parens_many_expr : warn_pipe('$1', '$2'), {'$1', '$2'}.
|
||||
|
||||
%% Allow when (and only when) with keywords
|
||||
no_parens_op_expr -> when_op_eol call_args_no_parens_kw : {'$1', '$2'}.
|
||||
|
||||
no_parens_one_ambig_expr -> dot_op_identifier call_args_no_parens_ambig : build_identifier('$1', '$2').
|
||||
no_parens_one_ambig_expr -> dot_identifier call_args_no_parens_ambig : build_identifier('$1', '$2').
|
||||
|
||||
no_parens_many_expr -> dot_op_identifier call_args_no_parens_many_strict : build_identifier('$1', '$2').
|
||||
no_parens_many_expr -> dot_identifier call_args_no_parens_many_strict : build_identifier('$1', '$2').
|
||||
|
||||
no_parens_one_expr -> dot_op_identifier call_args_no_parens_one : build_identifier('$1', '$2').
|
||||
no_parens_one_expr -> dot_identifier call_args_no_parens_one : build_identifier('$1', '$2').
|
||||
no_parens_zero_expr -> dot_do_identifier : build_identifier('$1', nil).
|
||||
no_parens_zero_expr -> dot_identifier : build_identifier('$1', nil).
|
||||
|
||||
%% From this point on, we just have constructs that can be
|
||||
%% used with the access syntax. Notice that (dot_)identifier
|
||||
%% is not included in this list simply because the tokenizer
|
||||
%% marks identifiers followed by brackets as bracket_identifier.
|
||||
access_expr -> bracket_at_expr : '$1'.
|
||||
access_expr -> bracket_expr : '$1'.
|
||||
access_expr -> at_op_eol number : build_unary_op('$1', ?exprs('$2')).
|
||||
access_expr -> unary_op_eol number : build_unary_op('$1', ?exprs('$2')).
|
||||
access_expr -> capture_op_eol number : build_unary_op('$1', ?exprs('$2')).
|
||||
access_expr -> fn_eoe stab end_eoe : build_fn('$1', reverse('$2')).
|
||||
access_expr -> open_paren stab close_paren : build_stab(reverse('$2')).
|
||||
access_expr -> open_paren stab ';' close_paren : build_stab(reverse('$2')).
|
||||
access_expr -> open_paren ';' stab ';' close_paren : build_stab(reverse('$3')).
|
||||
access_expr -> open_paren ';' stab close_paren : build_stab(reverse('$3')).
|
||||
access_expr -> open_paren ';' close_paren : build_stab([]).
|
||||
access_expr -> empty_paren : nil.
|
||||
access_expr -> number : ?exprs('$1').
|
||||
access_expr -> list : element(1, '$1').
|
||||
access_expr -> map : '$1'.
|
||||
access_expr -> tuple : '$1'.
|
||||
access_expr -> 'true' : ?id('$1').
|
||||
access_expr -> 'false' : ?id('$1').
|
||||
access_expr -> 'nil' : ?id('$1').
|
||||
access_expr -> bin_string : build_bin_string('$1').
|
||||
access_expr -> list_string : build_list_string('$1').
|
||||
access_expr -> bit_string : '$1'.
|
||||
access_expr -> sigil : build_sigil('$1').
|
||||
access_expr -> max_expr : '$1'.
|
||||
|
||||
%% Aliases and properly formed calls. Used by map_expr.
|
||||
max_expr -> atom : ?exprs('$1').
|
||||
max_expr -> atom_safe : build_quoted_atom('$1', true).
|
||||
max_expr -> atom_unsafe : build_quoted_atom('$1', false).
|
||||
max_expr -> parens_call call_args_parens : build_identifier('$1', '$2').
|
||||
max_expr -> parens_call call_args_parens call_args_parens : build_nested_parens('$1', '$2', '$3').
|
||||
max_expr -> dot_alias : '$1'.
|
||||
|
||||
bracket_arg -> open_bracket kw close_bracket : build_list('$1', '$2').
|
||||
bracket_arg -> open_bracket container_expr close_bracket : build_list('$1', '$2').
|
||||
bracket_arg -> open_bracket container_expr ',' close_bracket : build_list('$1', '$2').
|
||||
|
||||
bracket_expr -> dot_bracket_identifier bracket_arg : build_access(build_identifier('$1', nil), '$2').
|
||||
bracket_expr -> access_expr bracket_arg : build_access('$1', '$2').
|
||||
|
||||
bracket_at_expr -> at_op_eol dot_bracket_identifier bracket_arg :
|
||||
build_access(build_unary_op('$1', build_identifier('$2', nil)), '$3').
|
||||
bracket_at_expr -> at_op_eol access_expr bracket_arg :
|
||||
build_access(build_unary_op('$1', '$2'), '$3').
|
||||
|
||||
%% Blocks
|
||||
|
||||
do_block -> do_eoe 'end' : [[{do, nil}]].
|
||||
do_block -> do_eoe stab end_eoe : [[{do, build_stab(reverse('$2'))}]].
|
||||
do_block -> do_eoe block_list 'end' : [[{do, nil}|'$2']].
|
||||
do_block -> do_eoe stab_eoe block_list 'end' : [[{do, build_stab(reverse('$2'))}|'$3']].
|
||||
|
||||
eoe -> eol : '$1'.
|
||||
eoe -> ';' : '$1'.
|
||||
eoe -> eol ';' : '$1'.
|
||||
|
||||
fn_eoe -> 'fn' : '$1'.
|
||||
fn_eoe -> 'fn' eoe : '$1'.
|
||||
|
||||
do_eoe -> 'do' : '$1'.
|
||||
do_eoe -> 'do' eoe : '$1'.
|
||||
|
||||
end_eoe -> 'end' : '$1'.
|
||||
end_eoe -> eoe 'end' : '$2'.
|
||||
|
||||
block_eoe -> block_identifier : '$1'.
|
||||
block_eoe -> block_identifier eoe : '$1'.
|
||||
|
||||
stab -> stab_expr : ['$1'].
|
||||
stab -> stab eoe stab_expr : ['$3'|'$1'].
|
||||
|
||||
stab_eoe -> stab : '$1'.
|
||||
stab_eoe -> stab eoe : '$1'.
|
||||
|
||||
%% Here, `element(1, Token)` is the stab operator,
|
||||
%% while `element(2, Token)` is the expression.
|
||||
stab_expr -> expr :
|
||||
'$1'.
|
||||
stab_expr -> stab_op_eol_and_expr :
|
||||
build_op(element(1, '$1'), [], element(2, '$1')).
|
||||
stab_expr -> empty_paren stab_op_eol_and_expr :
|
||||
build_op(element(1, '$2'), [], element(2, '$2')).
|
||||
stab_expr -> call_args_no_parens_all stab_op_eol_and_expr :
|
||||
build_op(element(1, '$2'), unwrap_when(unwrap_splice('$1')), element(2, '$2')).
|
||||
stab_expr -> stab_parens_many stab_op_eol_and_expr :
|
||||
build_op(element(1, '$2'), unwrap_splice('$1'), element(2, '$2')).
|
||||
stab_expr -> stab_parens_many when_op expr stab_op_eol_and_expr :
|
||||
build_op(element(1, '$4'), [{'when', meta_from_token('$2'), unwrap_splice('$1') ++ ['$3']}], element(2, '$4')).
|
||||
|
||||
stab_op_eol_and_expr -> stab_op_eol expr : {'$1', '$2'}.
|
||||
stab_op_eol_and_expr -> stab_op_eol : warn_empty_stab_clause('$1'), {'$1', nil}.
|
||||
|
||||
block_item -> block_eoe stab_eoe : {?exprs('$1'), build_stab(reverse('$2'))}.
|
||||
block_item -> block_eoe : {?exprs('$1'), nil}.
|
||||
|
||||
block_list -> block_item : ['$1'].
|
||||
block_list -> block_item block_list : ['$1'|'$2'].
|
||||
|
||||
%% Helpers
|
||||
|
||||
open_paren -> '(' : '$1'.
|
||||
open_paren -> '(' eol : '$1'.
|
||||
close_paren -> ')' : '$1'.
|
||||
close_paren -> eol ')' : '$2'.
|
||||
|
||||
empty_paren -> open_paren ')' : '$1'.
|
||||
|
||||
open_bracket -> '[' : '$1'.
|
||||
open_bracket -> '[' eol : '$1'.
|
||||
close_bracket -> ']' : '$1'.
|
||||
close_bracket -> eol ']' : '$2'.
|
||||
|
||||
open_bit -> '<<' : '$1'.
|
||||
open_bit -> '<<' eol : '$1'.
|
||||
close_bit -> '>>' : '$1'.
|
||||
close_bit -> eol '>>' : '$2'.
|
||||
|
||||
open_curly -> '{' : '$1'.
|
||||
open_curly -> '{' eol : '$1'.
|
||||
close_curly -> '}' : '$1'.
|
||||
close_curly -> eol '}' : '$2'.
|
||||
|
||||
% Operators
|
||||
|
||||
add_op_eol -> add_op : '$1'.
|
||||
add_op_eol -> add_op eol : '$1'.
|
||||
add_op_eol -> dual_op : '$1'.
|
||||
add_op_eol -> dual_op eol : '$1'.
|
||||
|
||||
mult_op_eol -> mult_op : '$1'.
|
||||
mult_op_eol -> mult_op eol : '$1'.
|
||||
|
||||
two_op_eol -> two_op : '$1'.
|
||||
two_op_eol -> two_op eol : '$1'.
|
||||
|
||||
three_op_eol -> three_op : '$1'.
|
||||
three_op_eol -> three_op eol : '$1'.
|
||||
|
||||
pipe_op_eol -> pipe_op : '$1'.
|
||||
pipe_op_eol -> pipe_op eol : '$1'.
|
||||
|
||||
capture_op_eol -> capture_op : '$1'.
|
||||
capture_op_eol -> capture_op eol : '$1'.
|
||||
|
||||
unary_op_eol -> unary_op : '$1'.
|
||||
unary_op_eol -> unary_op eol : '$1'.
|
||||
unary_op_eol -> dual_op : '$1'.
|
||||
unary_op_eol -> dual_op eol : '$1'.
|
||||
|
||||
match_op_eol -> match_op : '$1'.
|
||||
match_op_eol -> match_op eol : '$1'.
|
||||
|
||||
and_op_eol -> and_op : '$1'.
|
||||
and_op_eol -> and_op eol : '$1'.
|
||||
|
||||
or_op_eol -> or_op : '$1'.
|
||||
or_op_eol -> or_op eol : '$1'.
|
||||
|
||||
in_op_eol -> in_op : '$1'.
|
||||
in_op_eol -> in_op eol : '$1'.
|
||||
|
||||
in_match_op_eol -> in_match_op : '$1'.
|
||||
in_match_op_eol -> in_match_op eol : '$1'.
|
||||
|
||||
type_op_eol -> type_op : '$1'.
|
||||
type_op_eol -> type_op eol : '$1'.
|
||||
|
||||
when_op_eol -> when_op : '$1'.
|
||||
when_op_eol -> when_op eol : '$1'.
|
||||
|
||||
stab_op_eol -> stab_op : '$1'.
|
||||
stab_op_eol -> stab_op eol : '$1'.
|
||||
|
||||
at_op_eol -> at_op : '$1'.
|
||||
at_op_eol -> at_op eol : '$1'.
|
||||
|
||||
comp_op_eol -> comp_op : '$1'.
|
||||
comp_op_eol -> comp_op eol : '$1'.
|
||||
|
||||
rel_op_eol -> rel_op : '$1'.
|
||||
rel_op_eol -> rel_op eol : '$1'.
|
||||
|
||||
arrow_op_eol -> arrow_op : '$1'.
|
||||
arrow_op_eol -> arrow_op eol : '$1'.
|
||||
|
||||
% Dot operator
|
||||
|
||||
dot_op -> '.' : '$1'.
|
||||
dot_op -> '.' eol : '$1'.
|
||||
|
||||
dot_identifier -> identifier : '$1'.
|
||||
dot_identifier -> matched_expr dot_op identifier : build_dot('$2', '$1', '$3').
|
||||
|
||||
dot_alias -> aliases : {'__aliases__', meta_from_token('$1', 0), ?exprs('$1')}.
|
||||
dot_alias -> matched_expr dot_op aliases : build_dot_alias('$2', '$1', '$3').
|
||||
dot_alias -> matched_expr dot_op dot_alias_container : build_dot_container('$2', '$1', '$3').
|
||||
|
||||
dot_alias_container -> open_curly '}' : [].
|
||||
dot_alias_container -> open_curly container_args close_curly : '$2'.
|
||||
|
||||
dot_op_identifier -> op_identifier : '$1'.
|
||||
dot_op_identifier -> matched_expr dot_op op_identifier : build_dot('$2', '$1', '$3').
|
||||
|
||||
dot_do_identifier -> do_identifier : '$1'.
|
||||
dot_do_identifier -> matched_expr dot_op do_identifier : build_dot('$2', '$1', '$3').
|
||||
|
||||
dot_bracket_identifier -> bracket_identifier : '$1'.
|
||||
dot_bracket_identifier -> matched_expr dot_op bracket_identifier : build_dot('$2', '$1', '$3').
|
||||
|
||||
dot_paren_identifier -> paren_identifier : '$1'.
|
||||
dot_paren_identifier -> matched_expr dot_op paren_identifier : build_dot('$2', '$1', '$3').
|
||||
|
||||
parens_call -> dot_paren_identifier : '$1'.
|
||||
parens_call -> matched_expr dot_call_op : {'.', meta_from_token('$2'), ['$1']}. % Fun/local calls
|
||||
|
||||
% Function calls with no parentheses
|
||||
|
||||
call_args_no_parens_expr -> matched_expr : '$1'.
|
||||
call_args_no_parens_expr -> no_parens_expr : throw_no_parens_many_strict('$1').
|
||||
|
||||
call_args_no_parens_comma_expr -> matched_expr ',' call_args_no_parens_expr : ['$3', '$1'].
|
||||
call_args_no_parens_comma_expr -> call_args_no_parens_comma_expr ',' call_args_no_parens_expr : ['$3'|'$1'].
|
||||
|
||||
call_args_no_parens_all -> call_args_no_parens_one : '$1'.
|
||||
call_args_no_parens_all -> call_args_no_parens_ambig : '$1'.
|
||||
call_args_no_parens_all -> call_args_no_parens_many : '$1'.
|
||||
|
||||
call_args_no_parens_one -> call_args_no_parens_kw : ['$1'].
|
||||
call_args_no_parens_one -> matched_expr : ['$1'].
|
||||
|
||||
call_args_no_parens_ambig -> no_parens_expr : ['$1'].
|
||||
|
||||
call_args_no_parens_many -> matched_expr ',' call_args_no_parens_kw : ['$1', '$3'].
|
||||
call_args_no_parens_many -> call_args_no_parens_comma_expr : reverse('$1').
|
||||
call_args_no_parens_many -> call_args_no_parens_comma_expr ',' call_args_no_parens_kw : reverse(['$3'|'$1']).
|
||||
|
||||
call_args_no_parens_many_strict -> call_args_no_parens_many : '$1'.
|
||||
call_args_no_parens_many_strict -> open_paren call_args_no_parens_kw close_paren : throw_no_parens_strict('$1').
|
||||
call_args_no_parens_many_strict -> open_paren call_args_no_parens_many close_paren : throw_no_parens_strict('$1').
|
||||
|
||||
stab_parens_many -> open_paren call_args_no_parens_kw close_paren : ['$2'].
|
||||
stab_parens_many -> open_paren call_args_no_parens_many close_paren : '$2'.
|
||||
|
||||
% Containers
|
||||
|
||||
container_expr -> matched_expr : '$1'.
|
||||
container_expr -> unmatched_expr : '$1'.
|
||||
container_expr -> no_parens_expr : throw_no_parens_container_strict('$1').
|
||||
|
||||
container_args_base -> container_expr : ['$1'].
|
||||
container_args_base -> container_args_base ',' container_expr : ['$3'|'$1'].
|
||||
|
||||
container_args -> container_args_base : lists:reverse('$1').
|
||||
container_args -> container_args_base ',' : lists:reverse('$1').
|
||||
container_args -> container_args_base ',' kw : lists:reverse(['$3'|'$1']).
|
||||
|
||||
% Function calls with parentheses
|
||||
|
||||
call_args_parens_expr -> matched_expr : '$1'.
|
||||
call_args_parens_expr -> unmatched_expr : '$1'.
|
||||
call_args_parens_expr -> no_parens_expr : throw_no_parens_many_strict('$1').
|
||||
|
||||
call_args_parens_base -> call_args_parens_expr : ['$1'].
|
||||
call_args_parens_base -> call_args_parens_base ',' call_args_parens_expr : ['$3'|'$1'].
|
||||
|
||||
call_args_parens -> empty_paren : [].
|
||||
call_args_parens -> open_paren no_parens_expr close_paren : ['$2'].
|
||||
call_args_parens -> open_paren kw close_paren : ['$2'].
|
||||
call_args_parens -> open_paren call_args_parens_base close_paren : reverse('$2').
|
||||
call_args_parens -> open_paren call_args_parens_base ',' kw close_paren : reverse(['$4'|'$2']).
|
||||
|
||||
% KV
|
||||
|
||||
kw_eol -> kw_identifier : ?exprs('$1').
|
||||
kw_eol -> kw_identifier eol : ?exprs('$1').
|
||||
kw_eol -> kw_identifier_safe : build_quoted_atom('$1', true).
|
||||
kw_eol -> kw_identifier_safe eol : build_quoted_atom('$1', true).
|
||||
kw_eol -> kw_identifier_unsafe : build_quoted_atom('$1', false).
|
||||
kw_eol -> kw_identifier_unsafe eol : build_quoted_atom('$1', false).
|
||||
|
||||
kw_base -> kw_eol container_expr : [{'$1', '$2'}].
|
||||
kw_base -> kw_base ',' kw_eol container_expr : [{'$3', '$4'}|'$1'].
|
||||
|
||||
kw -> kw_base : reverse('$1').
|
||||
kw -> kw_base ',' : reverse('$1').
|
||||
|
||||
call_args_no_parens_kw_expr -> kw_eol matched_expr : {'$1', '$2'}.
|
||||
call_args_no_parens_kw_expr -> kw_eol no_parens_expr : {'$1', '$2'}.
|
||||
|
||||
call_args_no_parens_kw -> call_args_no_parens_kw_expr : ['$1'].
|
||||
call_args_no_parens_kw -> call_args_no_parens_kw_expr ',' call_args_no_parens_kw : ['$1'|'$3'].
|
||||
|
||||
% Lists
|
||||
|
||||
list_args -> kw : '$1'.
|
||||
list_args -> container_args_base : reverse('$1').
|
||||
list_args -> container_args_base ',' : reverse('$1').
|
||||
list_args -> container_args_base ',' kw : reverse('$1', '$3').
|
||||
|
||||
list -> open_bracket ']' : build_list('$1', []).
|
||||
list -> open_bracket list_args close_bracket : build_list('$1', '$2').
|
||||
|
||||
% Tuple
|
||||
|
||||
tuple -> open_curly '}' : build_tuple('$1', []).
|
||||
tuple -> open_curly container_args close_curly : build_tuple('$1', '$2').
|
||||
|
||||
% Bitstrings
|
||||
|
||||
bit_string -> open_bit '>>' : build_bit('$1', []).
|
||||
bit_string -> open_bit container_args close_bit : build_bit('$1', '$2').
|
||||
|
||||
% Map and structs
|
||||
|
||||
%% Allow unquote/@something/aliases inside maps and structs.
|
||||
map_expr -> max_expr : '$1'.
|
||||
map_expr -> dot_identifier : build_identifier('$1', nil).
|
||||
map_expr -> at_op_eol map_expr : build_unary_op('$1', '$2').
|
||||
|
||||
assoc_op_eol -> assoc_op : '$1'.
|
||||
assoc_op_eol -> assoc_op eol : '$1'.
|
||||
|
||||
assoc_expr -> matched_expr assoc_op_eol matched_expr : {'$1', '$3'}.
|
||||
assoc_expr -> unmatched_expr assoc_op_eol unmatched_expr : {'$1', '$3'}.
|
||||
assoc_expr -> matched_expr assoc_op_eol unmatched_expr : {'$1', '$3'}.
|
||||
assoc_expr -> unmatched_expr assoc_op_eol matched_expr : {'$1', '$3'}.
|
||||
assoc_expr -> map_expr : '$1'.
|
||||
|
||||
assoc_update -> matched_expr pipe_op_eol assoc_expr : {'$2', '$1', ['$3']}.
|
||||
assoc_update -> unmatched_expr pipe_op_eol assoc_expr : {'$2', '$1', ['$3']}.
|
||||
|
||||
assoc_update_kw -> matched_expr pipe_op_eol kw : {'$2', '$1', '$3'}.
|
||||
assoc_update_kw -> unmatched_expr pipe_op_eol kw : {'$2', '$1', '$3'}.
|
||||
|
||||
assoc_base -> assoc_expr : ['$1'].
|
||||
assoc_base -> assoc_base ',' assoc_expr : ['$3'|'$1'].
|
||||
|
||||
assoc -> assoc_base : reverse('$1').
|
||||
assoc -> assoc_base ',' : reverse('$1').
|
||||
|
||||
map_op -> '%{}' : '$1'.
|
||||
map_op -> '%{}' eol : '$1'.
|
||||
|
||||
map_close -> kw close_curly : '$1'.
|
||||
map_close -> assoc close_curly : '$1'.
|
||||
map_close -> assoc_base ',' kw close_curly : reverse('$1', '$3').
|
||||
|
||||
map_args -> open_curly '}' : build_map('$1', []).
|
||||
map_args -> open_curly map_close : build_map('$1', '$2').
|
||||
map_args -> open_curly assoc_update close_curly : build_map_update('$1', '$2', []).
|
||||
map_args -> open_curly assoc_update ',' close_curly : build_map_update('$1', '$2', []).
|
||||
map_args -> open_curly assoc_update ',' map_close : build_map_update('$1', '$2', '$4').
|
||||
map_args -> open_curly assoc_update_kw close_curly : build_map_update('$1', '$2', []).
|
||||
|
||||
struct_op -> '%' : '$1'.
|
||||
|
||||
map -> map_op map_args : '$2'.
|
||||
map -> struct_op map_expr map_args : {'%', meta_from_token('$1'), ['$2', '$3']}.
|
||||
map -> struct_op map_expr eol map_args : {'%', meta_from_token('$1'), ['$2', '$4']}.
|
||||
|
||||
Erlang code.
|
||||
|
||||
-define(file(), get(elixir_parser_file)).
|
||||
-define(id(Token), element(1, Token)).
|
||||
-define(location(Token), element(2, Token)).
|
||||
-define(exprs(Token), element(3, Token)).
|
||||
-define(meta(Node), element(2, Node)).
|
||||
-define(rearrange_uop(Op), (Op == 'not' orelse Op == '!')).
|
||||
|
||||
%% The following directive is needed for (significantly) faster
|
||||
%% compilation of the generated .erl file by the HiPE compiler
|
||||
-compile([{hipe, [{regalloc, linear_scan}]}]).
|
||||
-import(lists, [reverse/1, reverse/2]).
|
||||
|
||||
meta_from_token(Token, Counter) -> [{counter, Counter}|meta_from_token(Token)].
|
||||
meta_from_token(Token) -> meta_from_location(?location(Token)).
|
||||
|
||||
meta_from_location({Line, Column, EndColumn})
|
||||
when is_integer(Line), is_integer(Column), is_integer(EndColumn) -> [{line, Line}].
|
||||
|
||||
%% Operators
|
||||
|
||||
build_op({_Kind, Location, 'in'}, {UOp, _, [Left]}, Right) when ?rearrange_uop(UOp) ->
|
||||
{UOp, meta_from_location(Location), [{'in', meta_from_location(Location), [Left, Right]}]};
|
||||
|
||||
build_op({_Kind, Location, Op}, Left, Right) ->
|
||||
{Op, meta_from_location(Location), [Left, Right]}.
|
||||
|
||||
build_unary_op({_Kind, Location, Op}, Expr) ->
|
||||
{Op, meta_from_location(Location), [Expr]}.
|
||||
|
||||
build_list(Marker, Args) ->
|
||||
{Args, ?location(Marker)}.
|
||||
|
||||
build_tuple(_Marker, [Left, Right]) ->
|
||||
{Left, Right};
|
||||
build_tuple(Marker, Args) ->
|
||||
{'{}', meta_from_token(Marker), Args}.
|
||||
|
||||
build_bit(Marker, Args) ->
|
||||
{'<<>>', meta_from_token(Marker), Args}.
|
||||
|
||||
build_map(Marker, Args) ->
|
||||
{'%{}', meta_from_token(Marker), Args}.
|
||||
|
||||
build_map_update(Marker, {Pipe, Left, Right}, Extra) ->
|
||||
{'%{}', meta_from_token(Marker), [build_op(Pipe, Left, Right ++ Extra)]}.
|
||||
|
||||
%% Blocks
|
||||
|
||||
build_block([{Op, _, [_]}]=Exprs) when ?rearrange_uop(Op) -> {'__block__', [], Exprs};
|
||||
build_block([{unquote_splicing, _, Args}]=Exprs) when
|
||||
length(Args) =< 2 -> {'__block__', [], Exprs};
|
||||
build_block([Expr]) -> Expr;
|
||||
build_block(Exprs) -> {'__block__', [], Exprs}.
|
||||
|
||||
%% Dots
|
||||
|
||||
build_dot_alias(Dot, {'__aliases__', _, Left}, {'aliases', _, Right}) ->
|
||||
{'__aliases__', meta_from_token(Dot), Left ++ Right};
|
||||
|
||||
build_dot_alias(_Dot, Atom, {'aliases', _, _} = Token) when is_atom(Atom) ->
|
||||
throw_bad_atom(Token);
|
||||
|
||||
build_dot_alias(Dot, Other, {'aliases', _, Right}) ->
|
||||
{'__aliases__', meta_from_token(Dot), [Other|Right]}.
|
||||
|
||||
build_dot_container(Dot, Left, Right) ->
|
||||
Meta = meta_from_token(Dot),
|
||||
{{'.', Meta, [Left, '{}']}, Meta, Right}.
|
||||
|
||||
build_dot(Dot, Left, Right) ->
|
||||
{'.', meta_from_token(Dot), [Left, extract_identifier(Right)]}.
|
||||
|
||||
extract_identifier({Kind, _, Identifier}) when
|
||||
Kind == identifier; Kind == bracket_identifier; Kind == paren_identifier;
|
||||
Kind == do_identifier; Kind == op_identifier ->
|
||||
Identifier.
|
||||
|
||||
%% Identifiers
|
||||
|
||||
build_nested_parens(Dot, Args1, Args2) ->
|
||||
Identifier = build_identifier(Dot, Args1),
|
||||
Meta = ?meta(Identifier),
|
||||
{Identifier, Meta, Args2}.
|
||||
|
||||
build_identifier({'.', Meta, _} = Dot, Args) ->
|
||||
FArgs = case Args of
|
||||
nil -> [];
|
||||
_ -> Args
|
||||
end,
|
||||
{Dot, Meta, FArgs};
|
||||
|
||||
build_identifier({op_identifier, Location, Identifier}, [Arg]) ->
|
||||
{Identifier, [{ambiguous_op, nil}|meta_from_location(Location)], [Arg]};
|
||||
|
||||
build_identifier({_, Location, Identifier}, Args) ->
|
||||
{Identifier, meta_from_location(Location), Args}.
|
||||
|
||||
%% Fn
|
||||
|
||||
build_fn(Op, [{'->', _, [_, _]}|_] = Stab) ->
|
||||
{fn, meta_from_token(Op), build_stab(Stab)};
|
||||
build_fn(Op, _Stab) ->
|
||||
throw(meta_from_token(Op), "expected clauses to be defined with -> inside: ", "'fn'").
|
||||
|
||||
%% Access
|
||||
|
||||
build_access(Expr, {List, Location}) ->
|
||||
Meta = meta_from_location(Location),
|
||||
{{'.', Meta, ['Elixir.Access', get]}, Meta, [Expr, List]}.
|
||||
|
||||
%% Interpolation aware
|
||||
|
||||
build_sigil({sigil, Location, Sigil, Parts, Modifiers}) ->
|
||||
Meta = meta_from_location(Location),
|
||||
{list_to_atom("sigil_" ++ [Sigil]), Meta, [{'<<>>', Meta, string_parts(Parts)}, Modifiers]}.
|
||||
|
||||
build_bin_string({bin_string, _Location, [H]}) when is_binary(H) ->
|
||||
H;
|
||||
build_bin_string({bin_string, Location, Args}) ->
|
||||
{'<<>>', meta_from_location(Location), string_parts(Args)}.
|
||||
|
||||
build_list_string({list_string, _Location, [H]}) when is_binary(H) ->
|
||||
elixir_utils:characters_to_list(H);
|
||||
build_list_string({list_string, Location, Args}) ->
|
||||
Meta = meta_from_location(Location),
|
||||
{{'.', Meta, ['Elixir.String', to_char_list]}, Meta, [{'<<>>', Meta, string_parts(Args)}]}.
|
||||
|
||||
build_quoted_atom({_, _Location, [H]}, Safe) when is_binary(H) ->
|
||||
Op = binary_to_atom_op(Safe), erlang:Op(H, utf8);
|
||||
build_quoted_atom({_, Location, Args}, Safe) ->
|
||||
Meta = meta_from_location(Location),
|
||||
{{'.', Meta, [erlang, binary_to_atom_op(Safe)]}, Meta, [{'<<>>', Meta, string_parts(Args)}, utf8]}.
|
||||
|
||||
binary_to_atom_op(true) -> binary_to_existing_atom;
|
||||
binary_to_atom_op(false) -> binary_to_atom.
|
||||
|
||||
string_parts(Parts) ->
|
||||
[string_part(Part) || Part <- Parts].
|
||||
string_part(Binary) when is_binary(Binary) ->
|
||||
Binary;
|
||||
string_part({Location, Tokens}) ->
|
||||
Form = string_tokens_parse(Tokens),
|
||||
Meta = meta_from_location(Location),
|
||||
{'::', Meta, [{{'.', Meta, ['Elixir.Kernel', to_string]}, Meta, [Form]}, {binary, Meta, nil}]}.
|
||||
|
||||
string_tokens_parse(Tokens) ->
|
||||
case parse(Tokens) of
|
||||
{ok, Forms} -> Forms;
|
||||
{error, _} = Error -> throw(Error)
|
||||
end.
|
||||
|
||||
%% Keywords
|
||||
|
||||
build_stab([{'->', Meta, [Left, Right]}|T]) ->
|
||||
build_stab(Meta, T, Left, [Right], []);
|
||||
|
||||
build_stab(Else) ->
|
||||
build_block(Else).
|
||||
|
||||
build_stab(Old, [{'->', New, [Left, Right]}|T], Marker, Temp, Acc) ->
|
||||
H = {'->', Old, [Marker, build_block(reverse(Temp))]},
|
||||
build_stab(New, T, Left, [Right], [H|Acc]);
|
||||
|
||||
build_stab(Meta, [H|T], Marker, Temp, Acc) ->
|
||||
build_stab(Meta, T, Marker, [H|Temp], Acc);
|
||||
|
||||
build_stab(Meta, [], Marker, Temp, Acc) ->
|
||||
H = {'->', Meta, [Marker, build_block(reverse(Temp))]},
|
||||
reverse([H|Acc]).
|
||||
|
||||
%% Every time the parser sees a (unquote_splicing())
|
||||
%% it assumes that a block is being spliced, wrapping
|
||||
%% the splicing in a __block__. But in the stab clause,
|
||||
%% we can have (unquote_splicing(1, 2, 3)) -> :ok, in such
|
||||
%% case, we don't actually want the block, since it is
|
||||
%% an arg style call. unwrap_splice unwraps the splice
|
||||
%% from such blocks.
|
||||
unwrap_splice([{'__block__', [], [{unquote_splicing, _, _}] = Splice}]) ->
|
||||
Splice;
|
||||
|
||||
unwrap_splice(Other) -> Other.
|
||||
|
||||
unwrap_when(Args) ->
|
||||
case elixir_utils:split_last(Args) of
|
||||
{Start, {'when', Meta, [_, _] = End}} ->
|
||||
[{'when', Meta, Start ++ End}];
|
||||
{_, _} ->
|
||||
Args
|
||||
end.
|
||||
|
||||
to_block([One]) -> One;
|
||||
to_block(Other) -> {'__block__', [], reverse(Other)}.
|
||||
|
||||
%% Warnings and errors
|
||||
|
||||
throw(Meta, Error, Token) ->
|
||||
Line =
|
||||
case lists:keyfind(line, 1, Meta) of
|
||||
{line, L} -> L;
|
||||
false -> 0
|
||||
end,
|
||||
throw({error, {Line, ?MODULE, [Error, Token]}}).
|
||||
|
||||
throw_bad_atom(Token) ->
|
||||
throw(meta_from_token(Token), "atom cannot be followed by an alias. If the '.' was meant to be "
|
||||
"part of the atom's name, the atom name must be quoted. Syntax error before: ", "'.'").
|
||||
|
||||
throw_no_parens_strict(Token) ->
|
||||
throw(meta_from_token(Token), "unexpected parentheses. If you are making a "
|
||||
"function call, do not insert spaces between the function name and the "
|
||||
"opening parentheses. Syntax error before: ", "'('").
|
||||
|
||||
throw_no_parens_many_strict(Node) ->
|
||||
throw(?meta(Node),
|
||||
"unexpected comma. Parentheses are required to solve ambiguity in nested calls.\n\n"
|
||||
"This error happens when you have nested function calls without parentheses. "
|
||||
"For example:\n\n"
|
||||
" one a, two b, c, d\n\n"
|
||||
"In the example above, we don't know if the parameters \"c\" and \"d\" apply "
|
||||
"to the function \"one\" or \"two\". You can solve this by explicitly adding "
|
||||
"parentheses:\n\n"
|
||||
" one a, two(b, c, d)\n\n"
|
||||
"Elixir cannot compile otherwise. Syntax error before: ", "','").
|
||||
|
||||
throw_no_parens_container_strict(Node) ->
|
||||
throw(?meta(Node),
|
||||
"unexpected comma. Parentheses are required to solve ambiguity inside containers.\n\n"
|
||||
"This error may happen when you forget a comma in a list or other container:\n\n"
|
||||
" [a, b c, d]\n\n"
|
||||
"Or when you have ambiguous calls:\n\n"
|
||||
" [one, two three, four, five]\n\n"
|
||||
"In the example above, we don't know if the parameters \"four\" and \"five\" "
|
||||
"belongs to the list or the function \"two\". You can solve this by explicitly "
|
||||
"adding parentheses:\n\n"
|
||||
" [one, two(three, four), five]\n\n"
|
||||
"Elixir cannot compile otherwise. Syntax error before: ", "','").
|
||||
|
||||
throw_invalid_kw_identifier({_, _, do} = Token) ->
|
||||
throw(meta_from_token(Token), elixir_tokenizer:invalid_do_error("unexpected keyword \"do:\""), "'do:'");
|
||||
throw_invalid_kw_identifier({_, _, KW} = Token) ->
|
||||
throw(meta_from_token(Token), "syntax error before: ", "'" ++ atom_to_list(KW) ++ "':").
|
||||
|
||||
%% TODO: Make those warnings errors.
|
||||
warn_empty_stab_clause({stab_op, {Line, _Begin, _End}, '->'}) ->
|
||||
elixir_errors:warn(Line, ?file(),
|
||||
"an expression is always required on the right side of ->. "
|
||||
"Please provide a value after ->").
|
||||
|
||||
warn_pipe({arrow_op, {Line, _Begin, _End}, Op}, {_, [_|_], [_|_]}) ->
|
||||
elixir_errors:warn(Line, ?file(),
|
||||
io_lib:format(
|
||||
"you are piping into a function call without parentheses, which may be ambiguous. "
|
||||
"Please wrap the function you are piping into in parentheses. For example:\n\n"
|
||||
" foo 1 ~ts bar 2 ~ts baz 3\n\n"
|
||||
"Should be written as:\n\n"
|
||||
" foo(1) ~ts bar(2) ~ts baz(3)\n",
|
||||
[Op, Op, Op, Op]
|
||||
)
|
||||
);
|
||||
warn_pipe(_Token, _) ->
|
||||
ok.
|
||||
256
samples/Erlang/lfe_scan.xrl
Normal file
256
samples/Erlang/lfe_scan.xrl
Normal file
@@ -0,0 +1,256 @@
|
||||
%% Copyright (c) 2008-2013 Robert Virding
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
|
||||
%% File : lfe_scan.xrl
|
||||
%% Author : Robert Virding
|
||||
%% Purpose : Token definitions for Lisp Flavoured Erlang.
|
||||
|
||||
Definitions.
|
||||
B = [01]
|
||||
O = [0-7]
|
||||
D = [0-9]
|
||||
H = [0-9a-fA-F]
|
||||
B36 = [0-9a-zA-Z]
|
||||
U = [A-Z]
|
||||
L = [a-z]
|
||||
A = ({U}|{L})
|
||||
DEL = [][()}{";\000-\s]
|
||||
SYM = [^][()}{";\000-\s\177-\237]
|
||||
SSYM = [^][()}{"|;#`',\000-\s\177-\237]
|
||||
WS = ([\000-\s]|;[^\n]*)
|
||||
|
||||
Rules.
|
||||
%% Bracketed Comments using #| foo |#
|
||||
#{D}*\|[^\|]*\|+([^#\|][^\|]*\|+)*# :
|
||||
block_comment(string:substr(TokenChars, 3)).
|
||||
|
||||
%% Separators
|
||||
' : {token,{'\'',TokenLine}}.
|
||||
` : {token,{'`',TokenLine}}.
|
||||
, : {token,{',',TokenLine}}.
|
||||
,@ : {token,{',@',TokenLine}}.
|
||||
\. : {token,{'.',TokenLine}}.
|
||||
[][()}{] : {token,{list_to_atom(TokenChars),TokenLine}}.
|
||||
|
||||
#{D}*[bB]\( : {token,{'#B(',TokenLine}}.
|
||||
#{D}*[mM]\( : {token,{'#M(',TokenLine}}.
|
||||
#{D}*\( : {token,{'#(',TokenLine}}.
|
||||
#{D}*\. : {token,{'#.',TokenLine}}.
|
||||
|
||||
#{D}*` : {token,{'#`',TokenLine}}.
|
||||
#{D}*; : {token,{'#;',TokenLine}}.
|
||||
#{D}*, : {token,{'#,',TokenLine}}.
|
||||
#{D}*,@ : {token,{'#,@',TokenLine}}.
|
||||
|
||||
%% Characters
|
||||
#{D}*\\(x{H}+|.) : char_token(skip_past(TokenChars, $\\, $\\), TokenLine).
|
||||
|
||||
%% Based numbers
|
||||
#{D}*\*{SYM}+ : base_token(skip_past(TokenChars, $*, $*), 2, TokenLine).
|
||||
#{D}*[bB]{SYM}+ : base_token(skip_past(TokenChars, $b, $B), 2, TokenLine).
|
||||
#{D}*[oO]{SYM}+ : base_token(skip_past(TokenChars, $o, $O), 8, TokenLine).
|
||||
#{D}*[dD]{SYM}+ : base_token(skip_past(TokenChars, $d, $D), 10, TokenLine).
|
||||
#{D}*[xX]{SYM}+ : base_token(skip_past(TokenChars, $x, $X), 16, TokenLine).
|
||||
#{D}*[rR]{SYM}+ :
|
||||
%% Scan over digit chars to get base.
|
||||
{Base,[_|Ds]} = base1(tl(TokenChars), 10, 0),
|
||||
base_token(Ds, Base, TokenLine).
|
||||
|
||||
%% String
|
||||
"(\\x{H}+;|\\.|[^"\\])*" :
|
||||
%% Strip quotes.
|
||||
S = string:substr(TokenChars, 2, TokenLen - 2),
|
||||
{token,{string,TokenLine,chars(S)}}.
|
||||
%% Binary string
|
||||
#"(\\x{H}+;|\\.|[^"\\])*" :
|
||||
%% Strip quotes.
|
||||
S = string:substr(TokenChars, 3, TokenLen - 3),
|
||||
Bin = unicode:characters_to_binary(chars(S), utf8, utf8),
|
||||
{token,{binary,TokenLine,Bin}}.
|
||||
%% Symbols
|
||||
\|(\\x{H}+;|\\.|[^|\\])*\| :
|
||||
%% Strip quotes.
|
||||
S = string:substr(TokenChars, 2, TokenLen - 2),
|
||||
symbol_token(chars(S), TokenLine).
|
||||
%% Funs
|
||||
#'{SSYM}{SYM}*/{D}+ :
|
||||
%% Strip sharpsign single-quote.
|
||||
FunStr = string:substr(TokenChars,3),
|
||||
{token,{'#\'',TokenLine,FunStr}}.
|
||||
%% Atoms
|
||||
[+-]?{D}+ :
|
||||
case catch {ok,list_to_integer(TokenChars)} of
|
||||
{ok,I} -> {token,{number,TokenLine,I}};
|
||||
_ -> {error,"illegal integer"}
|
||||
end.
|
||||
[+-]?{D}+\.{D}+([eE][+-]?{D}+)? :
|
||||
case catch {ok,list_to_float(TokenChars)} of
|
||||
{ok,F} -> {token,{number,TokenLine,F}};
|
||||
_ -> {error,"illegal float"}
|
||||
end.
|
||||
{SSYM}{SYM}* :
|
||||
symbol_token(TokenChars, TokenLine).
|
||||
{WS}+ : skip_token.
|
||||
|
||||
Erlang code.
|
||||
%% Copyright (c) 2008-2013 Robert Virding
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
|
||||
%% File : lfe_scan.erl
|
||||
%% Author : Robert Virding
|
||||
%% Purpose : Token definitions for Lisp Flavoured Erlang.
|
||||
|
||||
-export([start_symbol_char/1,symbol_char/1]).
|
||||
|
||||
-import(string, [substr/2,substr/3]).
|
||||
|
||||
%% start_symbol_char(Char) -> true | false.
|
||||
%% symbol_char(Char) -> true | false.
|
||||
%% Define start symbol chars and symbol chars.
|
||||
|
||||
start_symbol_char($#) -> false;
|
||||
start_symbol_char($`) -> false;
|
||||
start_symbol_char($') -> false; %'
|
||||
start_symbol_char($,) -> false;
|
||||
start_symbol_char($|) -> false; %Symbol quote character
|
||||
start_symbol_char(C) -> symbol_char(C).
|
||||
|
||||
symbol_char($() -> false;
|
||||
symbol_char($)) -> false;
|
||||
symbol_char($[) -> false;
|
||||
symbol_char($]) -> false;
|
||||
symbol_char(${) -> false;
|
||||
symbol_char($}) -> false;
|
||||
symbol_char($") -> false;
|
||||
symbol_char($;) -> false;
|
||||
symbol_char(C) -> ((C > $\s) and (C =< $~)) orelse (C > $\240).
|
||||
|
||||
%% symbol_token(Chars, Line) -> {token,{symbol,Line,Symbol}} | {error,E}.
|
||||
%% Build a symbol from list of legal characters, else error.
|
||||
|
||||
symbol_token(Cs, L) ->
|
||||
case catch {ok,list_to_atom(Cs)} of
|
||||
{ok,S} -> {token,{symbol,L,S}};
|
||||
_ -> {error,"illegal symbol"}
|
||||
end.
|
||||
|
||||
%% base_token(Chars, Base, Line) -> Integer.
|
||||
%% Convert a string of Base characters into a number. We only allow
|
||||
%% base betqeen 2 and 36, and an optional sign character first.
|
||||
|
||||
base_token(_, B, _) when B < 2; B > 36 ->
|
||||
{error,"illegal number base"};
|
||||
base_token([$+|Cs], B, L) -> base_token(Cs, B, +1, L);
|
||||
base_token([$-|Cs], B, L) -> base_token(Cs, B, -1, L);
|
||||
base_token(Cs, B, L) -> base_token(Cs, B, +1, L).
|
||||
|
||||
base_token(Cs, B, S, L) ->
|
||||
case base1(Cs, B, 0) of
|
||||
{N,[]} -> {token,{number,L,S*N}};
|
||||
{_,_} -> {error,"illegal based number"}
|
||||
end.
|
||||
|
||||
base1([C|Cs], Base, SoFar) when C >= $0, C =< $9, C < Base + $0 ->
|
||||
Next = SoFar * Base + (C - $0),
|
||||
base1(Cs, Base, Next);
|
||||
base1([C|Cs], Base, SoFar) when C >= $a, C =< $z, C < Base + $a - 10 ->
|
||||
Next = SoFar * Base + (C - $a + 10),
|
||||
base1(Cs, Base, Next);
|
||||
base1([C|Cs], Base, SoFar) when C >= $A, C =< $Z, C < Base + $A - 10 ->
|
||||
Next = SoFar * Base + (C - $A + 10),
|
||||
base1(Cs, Base, Next);
|
||||
base1([C|Cs], _Base, SoFar) -> {SoFar,[C|Cs]};
|
||||
base1([], _Base, N) -> {N,[]}.
|
||||
|
||||
-define(IS_UNICODE(C), ((C >= 0) and (C =< 16#10FFFF))).
|
||||
|
||||
%% char_token(InputChars, Line) -> {token,{number,L,N}} | {error,E}.
|
||||
%% Convert an input string into the corresponding character. For a
|
||||
%% sequence of hex characters we check resultant is code is in the
|
||||
%% unicode range.
|
||||
|
||||
char_token([$x,C|Cs], L) ->
|
||||
case base1([C|Cs], 16, 0) of
|
||||
{N,[]} when ?IS_UNICODE(N) -> {token,{number,L,N}};
|
||||
_ -> {error,"illegal character"}
|
||||
end;
|
||||
char_token([C], L) -> {token,{number,L,C}}.
|
||||
|
||||
%% chars(InputChars) -> Chars.
|
||||
%% Convert an input string into the corresponding string characters.
|
||||
%% We know that the input string is correct.
|
||||
|
||||
chars([$\\,$x,C|Cs0]) ->
|
||||
case hex_char(C) of
|
||||
true ->
|
||||
case base1([C|Cs0], 16, 0) of
|
||||
{N,[$;|Cs1]} -> [N|chars(Cs1)];
|
||||
_Other -> [escape_char($x)|chars([C|Cs0])]
|
||||
end;
|
||||
false -> [escape_char($x)|chars([C|Cs0])]
|
||||
end;
|
||||
chars([$\\,C|Cs]) -> [escape_char(C)|chars(Cs)];
|
||||
chars([C|Cs]) -> [C|chars(Cs)];
|
||||
chars([]) -> [].
|
||||
|
||||
hex_char(C) when C >= $0, C =< $9 -> true;
|
||||
hex_char(C) when C >= $a, C =< $f -> true;
|
||||
hex_char(C) when C >= $A, C =< $F -> true;
|
||||
hex_char(_) -> false.
|
||||
|
||||
escape_char($b) -> $\b; %\b = BS
|
||||
escape_char($t) -> $\t; %\t = TAB
|
||||
escape_char($n) -> $\n; %\n = LF
|
||||
escape_char($v) -> $\v; %\v = VT
|
||||
escape_char($f) -> $\f; %\f = FF
|
||||
escape_char($r) -> $\r; %\r = CR
|
||||
escape_char($e) -> $\e; %\e = ESC
|
||||
escape_char($s) -> $\s; %\s = SPC
|
||||
escape_char($d) -> $\d; %\d = DEL
|
||||
escape_char(C) -> C.
|
||||
|
||||
%% Block Comment:
|
||||
%% Provide a sensible error when people attempt to include nested
|
||||
%% comments because currently the parser cannot process them without
|
||||
%% a rebuild. But simply exploding on a '#|' is not going to be that
|
||||
%% helpful.
|
||||
|
||||
block_comment(TokenChars) ->
|
||||
%% Check we're not opening another comment block.
|
||||
case string:str(TokenChars, "#|") of
|
||||
0 -> skip_token; %% No nesting found
|
||||
_ -> {error, "illegal nested block comment"}
|
||||
end.
|
||||
|
||||
%% skip_until(String, Char1, Char2) -> String.
|
||||
%% skip_past(String, Char1, Char2) -> String.
|
||||
|
||||
%% skip_until([C|_]=Cs, C1, C2) when C =:= C1 ; C =:= C2 -> Cs;
|
||||
%% skip_until([_|Cs], C1, C2) -> skip_until(Cs, C1, C2);
|
||||
%% skip_until([], _, _) -> [].
|
||||
|
||||
skip_past([C|Cs], C1, C2) when C =:= C1 ; C =:= C2 -> Cs;
|
||||
skip_past([_|Cs], C1, C2) -> skip_past(Cs, C1, C2);
|
||||
skip_past([], _, _) -> [].
|
||||
1
samples/Erlang/release
Normal file → Executable file
1
samples/Erlang/release
Normal file → Executable file
@@ -119,4 +119,3 @@ execute_overlay([{copy, In, Out} | Rest], Vars, BaseDir, TargetDir) ->
|
||||
|
||||
exit_code(ExitCode) ->
|
||||
erlang:halt(ExitCode, [{flush, true}]).
|
||||
|
||||
|
||||
8
samples/Erlang/sample.app.src
Normal file
8
samples/Erlang/sample.app.src
Normal file
@@ -0,0 +1,8 @@
|
||||
{application, sample,
|
||||
[{description, "sample app"},
|
||||
{vsn, "1.0.0"},
|
||||
{registered, []},
|
||||
{mod, {sample_app, []}},
|
||||
{applications, [kernel, stdlib]},
|
||||
{env, []},
|
||||
{modules, []}]}.
|
||||
104
samples/Erlang/single-context.es
Executable file
104
samples/Erlang/single-context.es
Executable file
@@ -0,0 +1,104 @@
|
||||
#! /usr/bin/env escript
|
||||
% This file is part of Emonk released under the MIT license.
|
||||
% See the LICENSE file for more information.
|
||||
|
||||
main([]) ->
|
||||
start(64, 1000);
|
||||
main([N]) ->
|
||||
start(list_to_integer(N), 1000);
|
||||
main([N, M]) ->
|
||||
start(list_to_integer(N), list_to_integer(M)).
|
||||
|
||||
|
||||
start(N, M) ->
|
||||
code:add_pathz("test"),
|
||||
code:add_pathz("ebin"),
|
||||
{ok, Ctx} = emonk:create_ctx(),
|
||||
{ok, undefined} = emonk:eval(Ctx, js()),
|
||||
run(Ctx, N, M),
|
||||
wait(N).
|
||||
|
||||
run(_, 0, _) ->
|
||||
ok;
|
||||
run(Ctx, N, M) ->
|
||||
Self = self(),
|
||||
Pid = spawn(fun() -> do_js(Self, Ctx, M) end),
|
||||
io:format("Spawned: ~p~n", [Pid]),
|
||||
run(Ctx, N-1, M).
|
||||
|
||||
wait(0) ->
|
||||
ok;
|
||||
wait(N) ->
|
||||
receive
|
||||
{finished, Pid} -> ok
|
||||
end,
|
||||
io:format("Finished: ~p~n", [Pid]),
|
||||
wait(N-1).
|
||||
|
||||
do_js(Parent, _, 0) ->
|
||||
Parent ! {finished, self()};
|
||||
do_js(Parent, Ctx, M) ->
|
||||
io:format("Running: ~p~n", [M]),
|
||||
Test = random_test(),
|
||||
{ok, [Resp]} = emonk:call(Ctx, <<"f">>, [Test]),
|
||||
Sorted = sort(Resp),
|
||||
true = Test == Sorted,
|
||||
do_js(Parent, Ctx, M-1).
|
||||
|
||||
js() ->
|
||||
<<"var f = function(x) {return [x];};">>.
|
||||
|
||||
random_test() ->
|
||||
Tests = [
|
||||
null,
|
||||
true,
|
||||
false,
|
||||
1,
|
||||
-1,
|
||||
3.1416,
|
||||
-3.1416,
|
||||
12.0e10,
|
||||
1.234E+10,
|
||||
-1.234E-10,
|
||||
10.0,
|
||||
123.456,
|
||||
10.0,
|
||||
<<"foo">>,
|
||||
<<"foo", 5, "bar">>,
|
||||
<<"">>,
|
||||
<<"\n\n\n">>,
|
||||
<<"\" \b\f\r\n\t\"">>,
|
||||
{[]},
|
||||
{[{<<"foo">>, <<"bar">>}]},
|
||||
{[{<<"foo">>, <<"bar">>}, {<<"baz">>, 123}]},
|
||||
[],
|
||||
[[]],
|
||||
[1, <<"foo">>],
|
||||
{[{<<"foo">>, [123]}]},
|
||||
{[{<<"foo">>, [1, 2, 3]}]},
|
||||
{[{<<"foo">>, {[{<<"bar">>, true}]}}]},
|
||||
{[
|
||||
{<<"foo">>, []},
|
||||
{<<"bar">>, {[{<<"baz">>, true}]}}, {<<"alice">>, <<"bob">>}
|
||||
]},
|
||||
[-123, <<"foo">>, {[{<<"bar">>, []}]}, null]
|
||||
],
|
||||
{_, [Test | _]} = lists:split(random:uniform(length(Tests)) - 1, Tests),
|
||||
sort(Test).
|
||||
|
||||
sort({Props}) ->
|
||||
objsort(Props, []);
|
||||
sort(List) when is_list(List) ->
|
||||
lstsort(List, []);
|
||||
sort(Other) ->
|
||||
Other.
|
||||
|
||||
objsort([], Acc) ->
|
||||
{lists:sort(Acc)};
|
||||
objsort([{K,V} | Rest], Acc) ->
|
||||
objsort(Rest, [{K, sort(V)} | Acc]).
|
||||
|
||||
lstsort([], Acc) ->
|
||||
lists:reverse(Acc);
|
||||
lstsort([Val | Rest], Acc) ->
|
||||
lstsort(Rest, [sort(Val) | Acc]).
|
||||
54
samples/FLUX/gameserver.fx
Normal file
54
samples/FLUX/gameserver.fx
Normal file
@@ -0,0 +1,54 @@
|
||||
typedef engine isEngineMessage;
|
||||
typedef turn isTurnMessage;
|
||||
typedef connect isConnectMessage;
|
||||
typedef disconnect isDisconnectMessage;
|
||||
|
||||
ClientMessage(char* data) => ();
|
||||
ParseMessage(char* data) => (int type, int client, char* data);
|
||||
ReadMessage(int type, int client, char* data) => ();
|
||||
|
||||
ParseEngine(int type, int client, char* data) => (int client, int direction);
|
||||
DoEngine(int client, int direction) => ();
|
||||
|
||||
ParseTurn(int type, int client, char* data) => (int client, int direction);
|
||||
DoTurn(int client, int direction) => ();
|
||||
|
||||
ParseConnect(int type, int client, char* data)
|
||||
=> (int client, char* host, int port);
|
||||
DoConnect(int client, char* host, int port) => ();
|
||||
|
||||
ParseDisconnect(int type, int client, char* data) => (int client);
|
||||
DoDisconnect(int client) => ();
|
||||
|
||||
UpdateBoard(ClientList clients) => (ClientList clients);
|
||||
SendData(ClientList clients) => ();
|
||||
|
||||
DoUpdate(ClientList clients) => ();
|
||||
|
||||
DataTimer() => (ClientList clients);
|
||||
|
||||
GetClients() => (ClientList clients);
|
||||
|
||||
Wait(ClientList clients) => (ClientList clients);
|
||||
|
||||
Listen () => (char* data);
|
||||
|
||||
source Listen => ClientMessage;
|
||||
source DataTimer => DoUpdate;
|
||||
|
||||
DataTimer = GetClients -> Wait;
|
||||
|
||||
DoUpdate = UpdateBoard -> SendData;
|
||||
|
||||
ClientMessage=ParseMessage -> ReadMessage;
|
||||
|
||||
ReadMessage:[engine, _, _] = ParseEngine -> DoEngine;
|
||||
ReadMessage:[turn, _, _] = ParseTurn -> DoTurn;
|
||||
ReadMessage:[connect, _, _] = ParseConnect -> DoConnect;
|
||||
ReadMessage:[disconnect, _, _] = ParseDisconnect -> DoDisconnect;
|
||||
|
||||
atomic GetClients:{client_lock};
|
||||
atomic DoConnect:{client_lock};
|
||||
atomic DoDisconnect:{client_lock};
|
||||
|
||||
|
||||
44
samples/FLUX/imageserver.fx
Normal file
44
samples/FLUX/imageserver.fx
Normal file
@@ -0,0 +1,44 @@
|
||||
typedef xml TestXML;
|
||||
typedef html TestHTML;
|
||||
|
||||
typedef inCache TestInCache;
|
||||
|
||||
Page (int socket) => ();
|
||||
|
||||
ReadRequest (int socket) => (int socket, bool close, image_tag *request);
|
||||
|
||||
CheckCache (int socket, bool close, image_tag *request)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
|
||||
Handler (int socket, bool close, image_tag *request)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
|
||||
Complete (int socket, bool close, image_tag *request) => ();
|
||||
|
||||
ReadInFromDisk (int socket, bool close, image_tag *request)
|
||||
=> (int socket, bool close, image_tag *request, __u8 *rgb_data);
|
||||
|
||||
Write (int socket, bool close, image_tag *request)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
|
||||
Compress(int socket, bool close, image_tag *request, __u8 *rgb_data)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
|
||||
StoreInCache(int socket, bool close, image_tag *request)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
|
||||
Listen ()
|
||||
=> (int socket);
|
||||
|
||||
source Listen => Page;
|
||||
|
||||
Handler:[_, _, inCache]=;
|
||||
Handler:[_, _, _]=ReadInFromDisk -> Compress -> StoreInCache;
|
||||
|
||||
Page = ReadRequest -> CheckCache-> Handler -> Write -> Complete;
|
||||
|
||||
atomic CheckCache:{cache};
|
||||
atomic StoreInCache:{cache};
|
||||
atomic Complete:{cache};
|
||||
|
||||
handle error ReadInFromDisk => FourOhFor;
|
||||
151
samples/FLUX/mbittorrent.fx
Normal file
151
samples/FLUX/mbittorrent.fx
Normal file
@@ -0,0 +1,151 @@
|
||||
typedef choke TestChoke;
|
||||
typedef unchoke TestUnchoke;
|
||||
typedef interested TestInterested;
|
||||
typedef uninterested TestUninterested;
|
||||
typedef request TestRequest;
|
||||
typedef cancel TestCancel;
|
||||
typedef piece TestPiece;
|
||||
typedef bitfield TestBitfield;
|
||||
typedef have TestHave;
|
||||
typedef piececomplete TestPieceComplete;
|
||||
|
||||
CheckinWithTracker (torrent_data_t *tdata)
|
||||
=> ();
|
||||
|
||||
SendRequestToTracker (torrent_data_t *tdata)
|
||||
=> (torrent_data_t *tdata, int socket);
|
||||
|
||||
GetTrackerResponse (torrent_data_t *tdata, int socket)
|
||||
=> ();
|
||||
|
||||
UpdateChokeList (torrent_data_t *tdata)
|
||||
=> ();
|
||||
|
||||
PickChoked (torrent_data_t *tdata)
|
||||
=> (torrent_data_t *tdata, chokelist_t clist);
|
||||
|
||||
SendChokeUnchoke (torrent_data_t *tdata, chokelist_t clist)
|
||||
=> ();
|
||||
|
||||
SetupConnection (torrent_data_t *tdata, int socket)
|
||||
=> ();
|
||||
|
||||
Handshake (torrent_data_t *tdata, int socket)
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
SendBitfield (torrent_data_t *tdata, client_data_t *client)
|
||||
=> ();
|
||||
|
||||
Message (torrent_data_t *tdata, client_data_t *client)
|
||||
=> ();
|
||||
|
||||
ReadMessage (torrent_data_t *tdata, client_data_t *client)
|
||||
=> (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload);
|
||||
|
||||
HandleMessage (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (client_data_t *client);
|
||||
|
||||
MessageDone (client_data_t *client)
|
||||
=> ();
|
||||
|
||||
CompletePiece (torrent_data_t *tdata, client_data_t *client, int piece)
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
VerifyPiece (torrent_data_t *tdata, client_data_t *client, int piece)
|
||||
=> (torrent_data_t *tdata, client_data_t *client, int piece);
|
||||
|
||||
SendHave (torrent_data_t *tdata, client_data_t *client, int piece)
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
SendUninterested (torrent_data_t *tdata, client_data_t *client)
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
Choke (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (client_data_t *client);
|
||||
|
||||
Cancel (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (client_data_t *client);
|
||||
|
||||
Interested (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (client_data_t *client);
|
||||
|
||||
Uninterested (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (client_data_t *client);
|
||||
|
||||
Bitfield (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (client_data_t *client);
|
||||
|
||||
Unchoke (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
SendRequest (torrent_data_t *tdata, client_data_t *client)
|
||||
=> (client_data_t *client);
|
||||
|
||||
Have (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
Piece (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (torrent_data_t *tdata, client_data_t *client, int piece);
|
||||
|
||||
Request (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (client_data_t *client);
|
||||
|
||||
SendKeepAlives (torrent_data_t *tdata)
|
||||
=> ();
|
||||
|
||||
GetClients ()
|
||||
=> (int maxfd, fd_set *fds);
|
||||
|
||||
SelectSockets (int maxfd, fd_set *fds)
|
||||
=> (fd_set *fds);
|
||||
|
||||
CheckSockets (fd_set *fds)
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
TrackerTimer ()
|
||||
=> (torrent_data_t *tdata);
|
||||
|
||||
ChokeTimer ()
|
||||
=> (torrent_data_t *tdata);
|
||||
|
||||
Connect ()
|
||||
=> (torrent_data_t *tdata, int socket);
|
||||
|
||||
KeepAliveTimer ()
|
||||
=> (torrent_data_t *tdata);
|
||||
|
||||
Listen ()
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
source TrackerTimer => CheckinWithTracker;
|
||||
source ChokeTimer => UpdateChokeList;
|
||||
source Connect => SetupConnection;
|
||||
source Listen => Message;
|
||||
source KeepAliveTimer => SendKeepAlives;
|
||||
|
||||
Listen = GetClients -> SelectSockets -> CheckSockets;
|
||||
CheckinWithTracker = SendRequestToTracker -> GetTrackerResponse;
|
||||
UpdateChokeList = PickChoked -> SendChokeUnchoke;
|
||||
SetupConnection = Handshake -> SendBitfield;
|
||||
Message = ReadMessage -> HandleMessage -> MessageDone;
|
||||
|
||||
CompletePiece:[_, _, piececomplete] = VerifyPiece -> SendHave -> SendUninterested;
|
||||
|
||||
HandleMessage:[_, _, choke, _, _] = Choke;
|
||||
HandleMessage:[_, _, unchoke, _, _] = Unchoke -> SendRequest;
|
||||
HandleMessage:[_, _, interested, _, _] = Interested;
|
||||
|
||||
HandleMessage:[_, _, uninterested, _, _] = Uninterested;
|
||||
HandleMessage:[_, _, request, _, _] = Request;
|
||||
HandleMessage:[_, _, cancel, _, _] = Cancel;
|
||||
HandleMessage:[_, _, piece, _, _] = Piece -> CompletePiece -> SendRequest;
|
||||
HandleMessage:[_, _, bitfield, _, _] = Bitfield;
|
||||
HandleMessage:[_, _, have, _, _] = Have -> SendRequest;
|
||||
|
||||
atomic GetClients:{BigLock};
|
||||
atomic CheckSockets:{BigLock};
|
||||
atomic Message:{BigLock};
|
||||
atomic CheckinWithTracker:{BigLock};
|
||||
atomic UpdateChokeList:{BigLock};
|
||||
atomic SetupConnection:{BigLock};
|
||||
atomic SendKeepAlives:{BigLock};
|
||||
38
samples/FLUX/test.fx
Normal file
38
samples/FLUX/test.fx
Normal file
@@ -0,0 +1,38 @@
|
||||
// concrete node signatures
|
||||
Listen ()
|
||||
=> (int socket);
|
||||
|
||||
ReadRequest (int socket)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
|
||||
CheckCache (int socket, bool close, image_tag *request)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
|
||||
// omitted for space:
|
||||
// ReadInFromDisk, StoreInCache
|
||||
Compress (int socket, bool close, image_tag *request, __u8 *rgb_data)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
Write (int socket, bool close, image_tag *request)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
Complete (int socket, bool close, image_tag *request) => ();
|
||||
|
||||
// source node
|
||||
source Listen => Image;
|
||||
|
||||
// abstract node
|
||||
Image = ReadRequest -> CheckCache -> Handler -> Write -> Complete;
|
||||
|
||||
// predicate type & dispatch
|
||||
typedef hit TestInCache;
|
||||
Handler:[_, _, hit] = ;
|
||||
Handler:[_, _, _] =
|
||||
ReadInFromDisk -> Compress -> StoreInCache;
|
||||
|
||||
// error handler
|
||||
handle error ReadInFromDisk => FourOhFor;
|
||||
|
||||
// atomicity constraints
|
||||
atomic CheckCache:{cache};
|
||||
atomic StoreInCache:{cache};
|
||||
atomic Complete:{cache};
|
||||
|
||||
3608
samples/Formatted/NiAlH_jea.eam.fs
Normal file
3608
samples/Formatted/NiAlH_jea.eam.fs
Normal file
File diff suppressed because it is too large
Load Diff
31
samples/FreeMarker/example.ftl
Normal file
31
samples/FreeMarker/example.ftl
Normal file
@@ -0,0 +1,31 @@
|
||||
<#import "layout.ftl" as layout>
|
||||
|
||||
<#assign results = [
|
||||
{
|
||||
"title": "Example Result",
|
||||
"description": "Lorem ipsum dolor sit amet, pede id pellentesque, sollicitudin turpis sed in sed sed, libero dictum."
|
||||
}
|
||||
] />
|
||||
|
||||
<@layout.page title="FreeMarker Example">
|
||||
<#if results?size == 0>
|
||||
There were no results.
|
||||
<#else>
|
||||
<ul>
|
||||
<#list results as result>
|
||||
<li>
|
||||
<strong>${result.title}</strong>
|
||||
<p>${result.description}</p>
|
||||
</li>
|
||||
</#list>
|
||||
</ul>
|
||||
</#if>
|
||||
|
||||
<#-- This is a FreeMarker comment -->
|
||||
<@currentTime />
|
||||
</@layout.page>
|
||||
|
||||
|
||||
<#macro currentTime>
|
||||
${.now?string.full}
|
||||
</#macro>
|
||||
32
samples/FreeMarker/layout.ftl
Normal file
32
samples/FreeMarker/layout.ftl
Normal file
@@ -0,0 +1,32 @@
|
||||
<#ftl strip_text=true />
|
||||
|
||||
<#macro page title>
|
||||
<!doctype html>
|
||||
<html lang="${.lang}">
|
||||
<head>
|
||||
<title>${title}</title>
|
||||
<@metaTags />
|
||||
</head>
|
||||
<body>
|
||||
<#nested />
|
||||
<@footer />
|
||||
</body>
|
||||
</html>
|
||||
</#macro>
|
||||
|
||||
|
||||
<#---
|
||||
Default meta tags
|
||||
-->
|
||||
<#macro metaTags>
|
||||
<#compress>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width,initial-scale=1">
|
||||
<meta name="format-detection" content="telephone=no">
|
||||
</#compress>
|
||||
</#macro>
|
||||
|
||||
<#macro footer>
|
||||
<p>This page is using FreeMarker v${.version}</p>
|
||||
</#macro>
|
||||
25912
samples/G-code/lm.g
25912
samples/G-code/lm.g
File diff suppressed because it is too large
Load Diff
29735
samples/G-code/rm.g
29735
samples/G-code/rm.g
File diff suppressed because it is too large
Load Diff
20
samples/GLSL/gbuffers_textured_lit.fsh
Normal file
20
samples/GLSL/gbuffers_textured_lit.fsh
Normal file
@@ -0,0 +1,20 @@
|
||||
#version 120
|
||||
|
||||
uniform sampler2D texture;
|
||||
|
||||
varying vec3 color;
|
||||
varying vec2 texcoord;
|
||||
|
||||
vec4 GetDiffuse() {
|
||||
vec4 diffuse = vec4(color.rgb, 1.0);
|
||||
diffuse *= texture2D(texture, texcoord);
|
||||
|
||||
return diffuse;
|
||||
}
|
||||
|
||||
|
||||
void main() {
|
||||
vec4 diffuse = GetDiffuse();
|
||||
|
||||
gl_FragData[0] = diffuse;
|
||||
}
|
||||
11
samples/GLSL/gbuffers_textured_lit.vsh
Normal file
11
samples/GLSL/gbuffers_textured_lit.vsh
Normal file
@@ -0,0 +1,11 @@
|
||||
#version 120
|
||||
|
||||
varying vec3 color;
|
||||
varying vec2 texcoord;
|
||||
|
||||
void main() {
|
||||
color = gl_Color.rgb;
|
||||
texcoord = gl_MultiTexCoord0.st;
|
||||
|
||||
gl_Position = ftransform();
|
||||
}
|
||||
57
samples/GraphQL/kitchen-sink.graphql
Normal file
57
samples/GraphQL/kitchen-sink.graphql
Normal file
@@ -0,0 +1,57 @@
|
||||
# Copyright (c) 2015, Facebook, Inc.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This source code is licensed under the BSD-style license found in the
|
||||
# LICENSE file in the root directory of this source tree. An additional grant
|
||||
# of patent rights can be found in the PATENTS file in the same directory.
|
||||
|
||||
query queryName($foo: ComplexType, $site: Site = MOBILE) {
|
||||
whoever123is: node(id: [123, 456]) {
|
||||
id ,
|
||||
... on User @defer {
|
||||
field2 {
|
||||
id ,
|
||||
alias: field1(first:10, after:$foo,) @include(if: $foo) {
|
||||
id,
|
||||
...frag
|
||||
}
|
||||
}
|
||||
}
|
||||
... @skip(unless: $foo) {
|
||||
id
|
||||
}
|
||||
... {
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mutation likeStory {
|
||||
like(story: 123) @defer {
|
||||
story {
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
subscription StoryLikeSubscription($input: StoryLikeSubscribeInput) {
|
||||
storyLikeSubscribe(input: $input) {
|
||||
story {
|
||||
likers {
|
||||
count
|
||||
}
|
||||
likeSentence {
|
||||
text
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fragment frag on Friend {
|
||||
foo(size: $size, bar: $b, obj: {key: "value"})
|
||||
}
|
||||
|
||||
{
|
||||
unnamed(truthy: true, falsey: false),
|
||||
query
|
||||
}
|
||||
50
samples/GraphQL/schema-kitchen-sink.graphql
Normal file
50
samples/GraphQL/schema-kitchen-sink.graphql
Normal file
@@ -0,0 +1,50 @@
|
||||
# Copyright (c) 2015, Facebook, Inc.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This source code is licensed under the BSD-style license found in the
|
||||
# LICENSE file in the root directory of this source tree. An additional grant
|
||||
# of patent rights can be found in the PATENTS file in the same directory.
|
||||
|
||||
schema {
|
||||
query: QueryType
|
||||
mutation: MutationType
|
||||
}
|
||||
|
||||
type Foo implements Bar {
|
||||
one: Type
|
||||
two(argument: InputType!): Type
|
||||
three(argument: InputType, other: String): Int
|
||||
four(argument: String = "string"): String
|
||||
five(argument: [String] = ["string", "string"]): String
|
||||
six(argument: InputType = {key: "value"}): Type
|
||||
}
|
||||
|
||||
interface Bar {
|
||||
one: Type
|
||||
four(argument: String = "string"): String
|
||||
}
|
||||
|
||||
union Feed = Story | Article | Advert
|
||||
|
||||
scalar CustomScalar
|
||||
|
||||
enum Site {
|
||||
DESKTOP
|
||||
MOBILE
|
||||
}
|
||||
|
||||
input InputType {
|
||||
key: String!
|
||||
answer: Int = 42
|
||||
}
|
||||
|
||||
extend type Foo {
|
||||
seven(argument: [String]): Type
|
||||
}
|
||||
|
||||
directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT
|
||||
|
||||
directive @include(if: Boolean!)
|
||||
on FIELD
|
||||
| FRAGMENT_SPREAD
|
||||
| INLINE_FRAGMENT
|
||||
46
samples/Groovy/filenames/Jenkinsfile
vendored
Normal file
46
samples/Groovy/filenames/Jenkinsfile
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
jettyUrl = 'http://localhost:8081/'
|
||||
|
||||
def servers
|
||||
|
||||
stage 'Dev'
|
||||
node {
|
||||
checkout scm
|
||||
servers = load 'servers.groovy'
|
||||
mvn '-o clean package'
|
||||
dir('target') {stash name: 'war', includes: 'x.war'}
|
||||
}
|
||||
|
||||
stage 'QA'
|
||||
parallel(longerTests: {
|
||||
runTests(servers, 30)
|
||||
}, quickerTests: {
|
||||
runTests(servers, 20)
|
||||
})
|
||||
|
||||
stage name: 'Staging', concurrency: 1
|
||||
node {
|
||||
servers.deploy 'staging'
|
||||
}
|
||||
|
||||
input message: "Does ${jettyUrl}staging/ look good?"
|
||||
|
||||
stage name: 'Production', concurrency: 1
|
||||
node {
|
||||
sh "wget -O - -S ${jettyUrl}staging/"
|
||||
echo 'Production server looks to be alive'
|
||||
servers.deploy 'production'
|
||||
echo "Deployed to ${jettyUrl}production/"
|
||||
}
|
||||
|
||||
def mvn(args) {
|
||||
sh "${tool 'Maven 3.x'}/bin/mvn ${args}"
|
||||
}
|
||||
|
||||
def runTests(servers, duration) {
|
||||
node {
|
||||
checkout scm
|
||||
servers.runWithServer {id ->
|
||||
mvn "-o -f sometests test -Durl=${jettyUrl}${id}/ -Dduration=${duration}"
|
||||
}
|
||||
}
|
||||
}
|
||||
6
samples/HCL/example.hcl
Normal file
6
samples/HCL/example.hcl
Normal file
@@ -0,0 +1,6 @@
|
||||
consul = "1.2.3.4"
|
||||
|
||||
// This is a comment
|
||||
template "foo" {
|
||||
bar = "zip"
|
||||
}
|
||||
13
samples/HCL/example.tf
Normal file
13
samples/HCL/example.tf
Normal file
@@ -0,0 +1,13 @@
|
||||
resource "aws_instance" "web" {
|
||||
// Copies the myapp.conf file to /etc/myapp.conf
|
||||
provisioner "file" {
|
||||
source = "conf/myapp.conf"
|
||||
destination = "/etc/myapp.conf"
|
||||
}
|
||||
|
||||
// Copies the configs.d folder to /etc/configs.d
|
||||
provisioner "file" {
|
||||
source = "conf/configs.d"
|
||||
destination = "/etc"
|
||||
}
|
||||
}
|
||||
27
samples/HLSL/accelerated_surface_win.hlsl
Normal file
27
samples/HLSL/accelerated_surface_win.hlsl
Normal file
@@ -0,0 +1,27 @@
|
||||
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
// To compile these two shaders:
|
||||
// fxc /E pixelMain /T ps_2_0 accelerated_surface_win.hlsl
|
||||
// fxc /E vertexMain /T vs_2_0 accelerated_surface_win.hlsl
|
||||
//
|
||||
// fxc is in the DirectX SDK.
|
||||
|
||||
struct Vertex {
|
||||
float4 position : POSITION;
|
||||
float2 texCoord : TEXCOORD0;
|
||||
};
|
||||
|
||||
texture t;
|
||||
sampler s;
|
||||
|
||||
// Passes a position and texture coordinate to the pixel shader.
|
||||
Vertex vertexMain(Vertex input) {
|
||||
return input;
|
||||
};
|
||||
|
||||
// Samples a texture at the given texture coordinate and returns the result.
|
||||
float4 pixelMain(float2 texCoord : TEXCOORD0) : COLOR0 {
|
||||
return tex2D(s, texCoord);
|
||||
};
|
||||
105
samples/HLSL/corridor.fx
Normal file
105
samples/HLSL/corridor.fx
Normal file
@@ -0,0 +1,105 @@
|
||||
float4x4 matWorldView : WORLDVIEW;
|
||||
float4x4 matWorldViewProjection : WORLDVIEWPROJECTION;
|
||||
|
||||
struct VS_INPUT {
|
||||
float4 Position : POSITION0;
|
||||
float3 Normal : NORMAL;
|
||||
float3 Tangent : TANGENT;
|
||||
float3 Binormal : BINORMAL;
|
||||
float2 TexCoord0 : TEXCOORD0;
|
||||
float2 TexCoord1 : TEXCOORD1;
|
||||
};
|
||||
|
||||
struct VS_OUTPUT {
|
||||
float4 Position : POSITION0;
|
||||
float2 TexCoord0 : TEXCOORD0;
|
||||
float2 TexCoord1 : TEXCOORD1;
|
||||
float3x3 TangentToView : TEXCOORD2;
|
||||
};
|
||||
|
||||
VS_OUTPUT vs_main(VS_INPUT input)
|
||||
{
|
||||
VS_OUTPUT output;
|
||||
output.Position = mul(input.Position, matWorldViewProjection);
|
||||
output.TexCoord0 = input.TexCoord0 * 5;
|
||||
output.TexCoord1 = input.TexCoord1;
|
||||
output.TangentToView[0] = mul(float4(input.Tangent, 0), matWorldView).xyz;
|
||||
output.TangentToView[1] = mul(float4(input.Binormal, 0), matWorldView).xyz;
|
||||
output.TangentToView[2] = mul(float4(input.Normal, 0), matWorldView).xyz;
|
||||
return output;
|
||||
}
|
||||
|
||||
struct PS_OUTPUT {
|
||||
float4 gbuffer0 : COLOR0;
|
||||
float4 gbuffer1 : COLOR1;
|
||||
};
|
||||
|
||||
texture albedo_tex;
|
||||
sampler albedo_samp = sampler_state {
|
||||
Texture = (albedo_tex);
|
||||
MipFilter = Linear;
|
||||
MinFilter = Linear;
|
||||
MagFilter = Linear;
|
||||
AddressU = Wrap;
|
||||
AddressV = Wrap;
|
||||
sRGBTexture = True;
|
||||
};
|
||||
|
||||
texture normal_tex;
|
||||
sampler normal_samp = sampler_state {
|
||||
Texture = (normal_tex);
|
||||
MipFilter = Linear;
|
||||
MinFilter = Linear;
|
||||
MagFilter = Linear;
|
||||
AddressU = Wrap;
|
||||
AddressV = Wrap;
|
||||
sRGBTexture = False;
|
||||
};
|
||||
|
||||
texture specular_tex;
|
||||
sampler specular_samp = sampler_state {
|
||||
Texture = (specular_tex);
|
||||
MipFilter = Linear;
|
||||
MinFilter = Linear;
|
||||
MagFilter = Linear;
|
||||
AddressU = Wrap;
|
||||
AddressV = Wrap;
|
||||
sRGBTexture = True;
|
||||
};
|
||||
|
||||
texture ao_tex;
|
||||
sampler ao_samp = sampler_state {
|
||||
Texture = (ao_tex);
|
||||
MipFilter = Linear;
|
||||
MinFilter = Linear;
|
||||
MagFilter = Linear;
|
||||
AddressU = Wrap;
|
||||
AddressV = Wrap;
|
||||
sRGBTexture = True;
|
||||
};
|
||||
|
||||
PS_OUTPUT ps_main(VS_OUTPUT Input)
|
||||
{
|
||||
PS_OUTPUT o;
|
||||
|
||||
float3 tangentNormal = normalize(tex2D(normal_samp, Input.TexCoord0).xyz * 2 - 1);
|
||||
float3 eyeNormal = normalize(mul(tangentNormal, Input.TangentToView));
|
||||
|
||||
float3 albedo = tex2D(albedo_samp, Input.TexCoord0).rgb;
|
||||
float ao = tex2D(ao_samp, Input.TexCoord1).r * 0.75;
|
||||
float spec = tex2D(specular_samp, Input.TexCoord0).r;
|
||||
|
||||
o.gbuffer0 = float4(eyeNormal, spec * ao);
|
||||
o.gbuffer1 = float4(albedo, 1 - ao);
|
||||
return o;
|
||||
}
|
||||
|
||||
technique mesh {
|
||||
pass Geometry {
|
||||
VertexShader = compile vs_3_0 vs_main();
|
||||
PixelShader = compile ps_3_0 ps_main();
|
||||
|
||||
AlphaBlendEnable = False;
|
||||
ZWriteEnable = True;
|
||||
}
|
||||
}
|
||||
119
samples/HLSL/jellyfish.fx
Normal file
119
samples/HLSL/jellyfish.fx
Normal file
@@ -0,0 +1,119 @@
|
||||
float4x4 matWorldViewProjection : WORLDVIEWPROJECTION;
|
||||
float4x4 matWorldView : WORLDVIEW;
|
||||
float4x4 matWorld : WORLD;
|
||||
float4x4 matView : VIEW;
|
||||
|
||||
uniform float4 vViewPosition;
|
||||
|
||||
struct VS_INPUT
|
||||
{
|
||||
float3 Pos: POSITION;
|
||||
float3 Normal: NORMAL;
|
||||
float3 Tangent: TANGENT;
|
||||
float3 Binormal: BINORMAL;
|
||||
};
|
||||
|
||||
struct VS_OUTPUT
|
||||
{
|
||||
float4 Pos : POSITION;
|
||||
float3 reflection : TEXCOORD1;
|
||||
float3 refraction : TEXCOORD2;
|
||||
float fresnel : TEXCOORD3;
|
||||
};
|
||||
|
||||
uniform float3 amt;
|
||||
uniform float3 scale;
|
||||
uniform float3 phase;
|
||||
|
||||
float3 deform(float3 p)
|
||||
{
|
||||
float s = 3;
|
||||
float3 p2 = p * scale + phase;
|
||||
s += sin(p2.x) * amt.x;
|
||||
s += sin(p2.y) * amt.y;
|
||||
s += sin(p2.z) * amt.z;
|
||||
return p * s / 3;
|
||||
}
|
||||
|
||||
VS_OUTPUT vs_main( VS_INPUT In )
|
||||
{
|
||||
VS_OUTPUT Out;
|
||||
|
||||
float3 pos = In.Pos;
|
||||
float3 norm = In.Normal;
|
||||
|
||||
float3 p1 = pos + In.Tangent * 0.05;
|
||||
float3 p2 = pos + In.Binormal * 0.05;
|
||||
pos = deform(pos);
|
||||
p1 = deform(p1);
|
||||
p2 = deform(p2);
|
||||
|
||||
p1 -= pos;
|
||||
p2 -= pos;
|
||||
norm = normalize(cross(p1, p2));
|
||||
|
||||
float3 view = normalize(pos - vViewPosition.xyz);
|
||||
|
||||
Out.Pos = mul(float4(pos, 1.0), matWorldViewProjection);
|
||||
Out.reflection = reflect(view, norm);
|
||||
Out.refraction = reflect(view, norm * 0.4f); /* fake, but who cares? */
|
||||
Out.fresnel = dot(view, norm);
|
||||
norm = mul(float4(norm, 0.0), matWorldViewProjection);
|
||||
|
||||
return Out;
|
||||
}
|
||||
|
||||
#define PS_INPUT VS_OUTPUT
|
||||
|
||||
#if 0
|
||||
textureCUBE reflectionMap;
|
||||
samplerCUBE reflectionMapSampler = sampler_state
|
||||
{
|
||||
Texture = (reflectionMap);
|
||||
MipFilter = LINEAR;
|
||||
MinFilter = LINEAR;
|
||||
MagFilter = LINEAR;
|
||||
};
|
||||
#else
|
||||
// textures
|
||||
texture reflectionMap
|
||||
<
|
||||
string type = "CUBE";
|
||||
string name = "test_cube.dds";
|
||||
>;
|
||||
|
||||
samplerCUBE reflectionMapSampler = sampler_state
|
||||
{
|
||||
Texture = (reflectionMap);
|
||||
MipFilter = LINEAR;
|
||||
MinFilter = LINEAR;
|
||||
MagFilter = LINEAR;
|
||||
};
|
||||
#endif
|
||||
|
||||
struct PS_OUTPUT
|
||||
{
|
||||
float4 color : COLOR0;
|
||||
};
|
||||
|
||||
PS_OUTPUT ps_main( PS_INPUT In )
|
||||
{
|
||||
PS_OUTPUT Out;
|
||||
|
||||
float4 reflection = texCUBE(reflectionMapSampler, normalize(In.reflection)) * 1.5;
|
||||
float4 refraction = texCUBE(reflectionMapSampler, normalize(In.refraction));
|
||||
float fresnel = In.fresnel;
|
||||
// float fresnel = abs(normalize(In.normal).z);
|
||||
Out.color = lerp(reflection, refraction, fresnel) * pow(1.0 - fresnel * 0.75, 1.0);
|
||||
|
||||
return Out;
|
||||
}
|
||||
|
||||
technique blur_ps_vs_2_0
|
||||
{
|
||||
pass P0
|
||||
{
|
||||
VertexShader = compile vs_2_0 vs_main();
|
||||
PixelShader = compile ps_2_0 ps_main();
|
||||
}
|
||||
}
|
||||
41
samples/HLSL/noise.fx
Normal file
41
samples/HLSL/noise.fx
Normal file
@@ -0,0 +1,41 @@
|
||||
float alpha = 1.f;
|
||||
|
||||
texture tex;
|
||||
sampler tex_sampler = sampler_state
|
||||
{
|
||||
Texture = (tex);
|
||||
MipFilter = LINEAR;
|
||||
MinFilter = LINEAR;
|
||||
MagFilter = LINEAR;
|
||||
|
||||
AddressU = WRAP;
|
||||
AddressV = WRAP;
|
||||
};
|
||||
|
||||
struct VS_OUTPUT
|
||||
{
|
||||
float4 pos : POSITION;
|
||||
float2 tex : TEXCOORD1;
|
||||
};
|
||||
|
||||
VS_OUTPUT vertex(float4 ipos : POSITION, float2 tex : TEXCOORD0)
|
||||
{
|
||||
VS_OUTPUT Out;
|
||||
Out.pos = ipos;
|
||||
Out.tex = tex * 2;
|
||||
return Out;
|
||||
}
|
||||
|
||||
float4 pixel(VS_OUTPUT In) : COLOR
|
||||
{
|
||||
return tex2D(tex_sampler, In.tex) * alpha;
|
||||
}
|
||||
|
||||
technique blur_ps_vs_2_0
|
||||
{
|
||||
pass P0
|
||||
{
|
||||
VertexShader = compile vs_2_0 vertex();
|
||||
PixelShader = compile ps_2_0 pixel();
|
||||
}
|
||||
}
|
||||
5
samples/HTML+ECR/greeting.ecr
Normal file
5
samples/HTML+ECR/greeting.ecr
Normal file
@@ -0,0 +1,5 @@
|
||||
<% if @name %>
|
||||
Greeting, <%= @name %>!
|
||||
<% else %>
|
||||
Greeting!
|
||||
<% end %>
|
||||
26
samples/HTML+EEX/index.html.eex
Normal file
26
samples/HTML+EEX/index.html.eex
Normal file
@@ -0,0 +1,26 @@
|
||||
<h1>Listing Books</h1>
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<th>Title</th>
|
||||
<th>Summary</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
</tr>
|
||||
|
||||
<%= for book <- @books do %>
|
||||
<tr>
|
||||
<%# comment %>
|
||||
<td><%= book.title %></td>
|
||||
<td><%= book.content %></td>
|
||||
<td><%= link "Show", to: book_path(@conn, :show, book) %></td>
|
||||
<td><%= link "Edit", to: book_path(@conn, :edit, book) %></td>
|
||||
<td><%= link "Delete", to: book_path(@conn, :delete, book), method: :delete, data: [confirm: "Are you sure?"] %></td>
|
||||
</tr>
|
||||
<% end %>
|
||||
</table>
|
||||
|
||||
<br />
|
||||
|
||||
<%= link "New book", to: book_path(@conn, :new) %>
|
||||
31
samples/HTML/rpanel.inc
Normal file
31
samples/HTML/rpanel.inc
Normal file
@@ -0,0 +1,31 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<link rel="stylesheet" charset="UTF-8" type="text/css" href="main.css">
|
||||
<meta charset="UTF-8">
|
||||
</head>
|
||||
<body>
|
||||
<div id="panel_header_main_form">
|
||||
<p id="panel_header_main_form_title">Поддержка:</p>
|
||||
</div>
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>
|
||||
<p>Москва:</p>
|
||||
</td>
|
||||
<td>
|
||||
<p>+7-902-7-800-807</p>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
<p>Регионы:</p>
|
||||
</td>
|
||||
<td>
|
||||
<p>+7-902-7-800-807</p>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</body>
|
||||
5
samples/HTML/tailDel.inc
Normal file
5
samples/HTML/tailDel.inc
Normal file
@@ -0,0 +1,5 @@
|
||||
</UL>
|
||||
<P><A HREF="devices.html">Supported Targets</A></P>
|
||||
</BODY>
|
||||
</HEAD>
|
||||
|
||||
@@ -1,6 +1,74 @@
|
||||
Version 1 of Trivial Extension by Andrew Plotkin begins here.
|
||||
Version 2 of Trivial Extension by Andrew Plotkin begins here.
|
||||
|
||||
"This is the rubric of the extension."
|
||||
|
||||
"provided for the Linguist package by Andrew Plotkin"
|
||||
|
||||
[Note the two special quoted lines above.]
|
||||
|
||||
A cow is a kind of animal. A cow can be purple.
|
||||
|
||||
Understand "cow" as a cow.
|
||||
Understand "purple" as a purple cow.
|
||||
|
||||
Check pushing a cow:
|
||||
instead say "Cow-tipping, at your age?[paragraph break]Inconceivable."
|
||||
|
||||
[Here are the possible levels of heading:]
|
||||
|
||||
Volume One
|
||||
|
||||
Text-line is always "A line of text."
|
||||
|
||||
Book 2
|
||||
|
||||
Part the third - indented headings still count
|
||||
|
||||
Chapter IV - not for release
|
||||
|
||||
[Heading labels are case-insensitive.]
|
||||
|
||||
section foobar
|
||||
|
||||
[A line beginning "Volume" that does not have blank lines before and after it is *not* a header line. So the following should all be part of section foobar. Sadly, the "Volume is..." line gets colored as a header, because Atom's regexp model can't recognize "thing with blank lines before and after"!]
|
||||
|
||||
Measure is a kind of value.
|
||||
Volume is a measure. Length is a measure.
|
||||
Area is a measure.
|
||||
|
||||
[And now some Inform 6 inclusions.]
|
||||
|
||||
To say em -- running on:
|
||||
(- style underline; -).
|
||||
To say /em -- running on:
|
||||
(- style roman; -).
|
||||
|
||||
Include (-
|
||||
|
||||
! Inform 6 comments start with a ! mark and run to the end of the line.
|
||||
Global cowcount;
|
||||
|
||||
[ inform6func arg;
|
||||
print "Here is some text; ", (address) 'dictword', ".^";
|
||||
cowcount++; ! increment this variable
|
||||
];
|
||||
|
||||
Object i6cow
|
||||
with name 'cow' 'animal',
|
||||
with description "It looks like a cow.",
|
||||
has animate scenery;
|
||||
|
||||
-) after "Global Variables" in "Output.i6t".
|
||||
|
||||
Trivial Extension ends here.
|
||||
|
||||
---- DOCUMENTATION ----
|
||||
|
||||
Everything after the "---- DOCUMENTATION ----" line is documentation, so it should have the comment style.
|
||||
|
||||
However, tab-indented lines are sample Inform code within the documentation:
|
||||
|
||||
Horns are a kind of thing. Every cow has horns.
|
||||
say "Moo[if the noun is purple] indigo[end if]."
|
||||
|
||||
So we need to allow for that.
|
||||
|
||||
@@ -2,11 +2,61 @@
|
||||
|
||||
Include Trivial Extension by Andrew Plotkin.
|
||||
|
||||
Volume 1 - overview
|
||||
|
||||
Chapter - setting the scene
|
||||
|
||||
The Kitchen is a room.
|
||||
|
||||
[This kitchen is modelled after the one in Zork, although it lacks the detail to establish this to the player.]
|
||||
[Comment: this kitchen is modelled after the one in Zork, although it lacks the detail to establish this to the player.]
|
||||
|
||||
Section - the kitchen table
|
||||
|
||||
The spicerack is a container in the Kitchen.
|
||||
|
||||
Table of Spices
|
||||
Name Flavor
|
||||
"cinnamon" 5
|
||||
"nutmeg" 4
|
||||
"szechuan pepper" 8
|
||||
|
||||
The description of the spicerack is "It's mostly empty."
|
||||
|
||||
Chapter - a character
|
||||
|
||||
A purple cow called Gelett is in the Kitchen.
|
||||
|
||||
[This comment spans multiple lines..
|
||||
|
||||
...and this line contains [nested square[] brackets]...
|
||||
|
||||
...which is legal in Inform 7.]
|
||||
|
||||
Instead of examining Gelett:
|
||||
say "You'd rather see than be one."
|
||||
|
||||
Instead of examining Gelett:
|
||||
say "You'd rather see than be one."
|
||||
|
||||
Check smelling Gelett:
|
||||
say "This text contains several lines.
|
||||
|
||||
A blank line is displayed as a paragraph break,
|
||||
but a simple line break is not.";
|
||||
stop the action.
|
||||
|
||||
Section - cow catching
|
||||
|
||||
Gelett has a number called the mooness.
|
||||
|
||||
Instead of taking Gelett:
|
||||
increment the mooness of Gelett;
|
||||
if the mooness of Gelett is one:
|
||||
say "Gelett moos once.";
|
||||
else:
|
||||
say "Gelett moos [mooness of Gelett in words] times.";
|
||||
|
||||
Volume 2 - the turn cycle
|
||||
|
||||
Every turn:
|
||||
say "A turn passes[one of][or] placidly[or] idly[or] tediously[at random]."
|
||||
|
||||
1104
samples/Isabelle ROOT/filenames/ROOT
Normal file
1104
samples/Isabelle ROOT/filenames/ROOT
Normal file
File diff suppressed because it is too large
Load Diff
3
samples/J/hashbang
Executable file
3
samples/J/hashbang
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/bin/jconsole
|
||||
echo 'Hello, GitHub!'
|
||||
exit ''
|
||||
123
samples/JSON/Git Commit.JSON-tmLanguage
Normal file
123
samples/JSON/Git Commit.JSON-tmLanguage
Normal file
@@ -0,0 +1,123 @@
|
||||
{
|
||||
"name": "Git Commit Message",
|
||||
"scopeName": "text.git-commit",
|
||||
"fileTypes": [
|
||||
"COMMIT_EDITMSG"
|
||||
],
|
||||
"patterns": [
|
||||
{
|
||||
"name": "comment.line.number-sign.git-commit-message",
|
||||
"begin": "^#",
|
||||
"beginCaptures": {
|
||||
"0": {
|
||||
"name": "punctuation.definition.comment.git-commit-message"
|
||||
}
|
||||
},
|
||||
"end": "$",
|
||||
"patterns": [
|
||||
{
|
||||
"name": "comment.line.on-branch.git-commit-message",
|
||||
"match": "(?:On branch )([^ ]+)",
|
||||
"captures": {
|
||||
"1": {
|
||||
"name": "support.function.branch.git-commit-message"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "comment.line.on-branch.git-commit-message",
|
||||
"match": "Your branch .* '([^ ']+)'",
|
||||
"captures": {
|
||||
"1": {
|
||||
"name": "support.function.branch.git-commit-message"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "comment.line.untracked.git-commit-message",
|
||||
"begin": " Untracked files:",
|
||||
"beginCaptures": {
|
||||
"0": {
|
||||
"name": "entity.definition.untracked.git-commit-message"
|
||||
}
|
||||
},
|
||||
"end": "^#$",
|
||||
"patterns": [
|
||||
{
|
||||
"name": "comment.line.untracked-file.git-commit-message",
|
||||
"match": "\t(.*)$",
|
||||
"captures": {
|
||||
"1": {
|
||||
"name": "support.function.file-status.git-commit-message"
|
||||
},
|
||||
"2": {
|
||||
"name": "constant.character.branch.git-commit-message"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "comment.line.discarded.git-commit-message",
|
||||
"begin": " Change(?:s not staged for commit|d but not updated):",
|
||||
"beginCaptures": {
|
||||
"0": {
|
||||
"name": "entity.definition.discarded.git-commit-message"
|
||||
}
|
||||
},
|
||||
"end": "^#$",
|
||||
"patterns": [
|
||||
{
|
||||
"name": "comment.line.discarded.git-commit-message",
|
||||
"match": "\t([^:]+):(.*)$",
|
||||
"captures": {
|
||||
"1": {
|
||||
"name": "support.function.file-status.git-commit-message"
|
||||
},
|
||||
"2": {
|
||||
"name": "constant.character.branch.git-commit-message"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "comment.line.selected.git-commit-message",
|
||||
"begin": " Changes to be committed:",
|
||||
"beginCaptures": {
|
||||
"0": {
|
||||
"name": "entity.definition.selected.git-commit-message"
|
||||
}
|
||||
},
|
||||
"end": "^#$",
|
||||
"patterns": [
|
||||
{
|
||||
"name": "comment.line.selected.git-commit-message",
|
||||
"match": "\t([^:]+):(.*)$",
|
||||
"captures": {
|
||||
"1": {
|
||||
"name": "support.function.file-status.git-commit-message"
|
||||
},
|
||||
"2": {
|
||||
"name": "constant.character.branch.git-commit-message"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "meta.diff.git-commit",
|
||||
"comment": "diff at the end of the commit message when using commit -v, or viewing a log. End pattern is just something to be never matched so that the meta continues untill the end of the file.",
|
||||
"begin": "diff\\ \\-\\-git",
|
||||
"end": "(?=xxxxxx)123457",
|
||||
"patterns": [
|
||||
{
|
||||
"include": "source.diff"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"uuid": "de3fb2fc-e564-4a31-9813-5ee26967c5c8"
|
||||
}
|
||||
8
samples/JSON/filenames/.arcconfig
Normal file
8
samples/JSON/filenames/.arcconfig
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"project_id": "example",
|
||||
"conduit_uri": "https://ex.am.pl/",
|
||||
"copyright_holder": "Apache Software Foundation",
|
||||
"arcanist_configuration": "ArcJIRAConfiguration",
|
||||
"phabricator.uri": "https://phabricator.example.com/",
|
||||
"load": ["libs/src"]
|
||||
}
|
||||
16
samples/JSON/filenames/mcmod.info
Normal file
16
samples/JSON/filenames/mcmod.info
Normal file
@@ -0,0 +1,16 @@
|
||||
[
|
||||
{
|
||||
"modid": "examplemod",
|
||||
"name": "Example Mod",
|
||||
"description": "Example placeholder mod.",
|
||||
"version": "${version}",
|
||||
"mcversion": "${mcversion}",
|
||||
"url": "",
|
||||
"updateUrl": "",
|
||||
"authorList": ["ExampleDude"],
|
||||
"credits": "The Forge and FML guys, for making this example",
|
||||
"logoFile": "",
|
||||
"screenshots": [],
|
||||
"dependencies": []
|
||||
}
|
||||
]
|
||||
82
samples/JSON/geo.geojson
Normal file
82
samples/JSON/geo.geojson
Normal file
@@ -0,0 +1,82 @@
|
||||
{
|
||||
"type": "FeatureCollection",
|
||||
"features": [
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"name": "Australia Post - North Ryde BC",
|
||||
"geo": [-33.787792, 151.13288],
|
||||
"streetAddress": "11 Waterloo Road",
|
||||
"addressLocality": "Macquarie Park",
|
||||
"addressRegion": "New South Wales",
|
||||
"addressCountry": "Australia",
|
||||
"postalCode": "2113"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [151.13288, -33.787792, 0]
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"name": "George Weston Foods Limited",
|
||||
"geo": [-37.8263884, 144.9105381],
|
||||
"streetAddress": "Level 3, 187 Todd Road",
|
||||
"addressLocality": "Port Melbourne",
|
||||
"addressRegion": "Victoria",
|
||||
"addressCountry": "Australia",
|
||||
"postalCode": "3207"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[144.9097088901841, -37.82622654171794, 0],
|
||||
[144.9099724266943, -37.82679388891783, 0],
|
||||
[144.9110127325916, -37.82651526396403, 0],
|
||||
[144.9112227645738, -37.82655667152123, 0],
|
||||
[144.9113739439796, -37.82618552508767, 0],
|
||||
[144.9112740633105, -37.82615750100924, 0],
|
||||
[144.9111355846674, -37.82584493693527, 0],
|
||||
[144.9097088901841, -37.82622654171794, 0]
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"name": "George Weston Foods Limited",
|
||||
"geo": [-37.05202791502396, 144.2085614999388],
|
||||
"streetAddress": "67 Richards Road",
|
||||
"addressLocality": "Castlemaine",
|
||||
"addressRegion": "Victoria",
|
||||
"addressCountry": "Australia",
|
||||
"postalCode": "3450"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[144.2052428913937, -37.04906391287216, 0],
|
||||
[144.205540392692, -37.05049727485623, 0],
|
||||
[144.2059800881858, -37.05066835966983, 0],
|
||||
[144.206490656024, -37.05279538900776, 0],
|
||||
[144.2064525845008, -37.05366195881602, 0],
|
||||
[144.2084322301922, -37.0538920493147, 0],
|
||||
[144.2084811895712, -37.05266519735124, 0],
|
||||
[144.2079784002005, -37.05041270555773, 0],
|
||||
[144.2074017905817, -37.04817406993293, 0],
|
||||
[144.2061363939852, -37.04834972871226, 0],
|
||||
[144.2052428913937, -37.04906391287216, 0]
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user