mirror of
https://github.com/KevinMidboe/linguist.git
synced 2025-10-29 17:50:22 +00:00
Compare commits
776 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
21d7f99a4e | ||
|
|
24b368a30c | ||
|
|
7c8bc8561d | ||
|
|
ce37cd665d | ||
|
|
bd0f4f6f78 | ||
|
|
4867db8831 | ||
|
|
e6ab516fb7 | ||
|
|
7501b82df1 | ||
|
|
aa6b881971 | ||
|
|
3928734d0f | ||
|
|
c7868a95bc | ||
|
|
2012647f78 | ||
|
|
84471a5463 | ||
|
|
57a3c14f2b | ||
|
|
d9914307eb | ||
|
|
71cdf46197 | ||
|
|
8a27884c70 | ||
|
|
b881e3e6cb | ||
|
|
ca718d8f2a | ||
|
|
c6625b1b8a | ||
|
|
16a6d680c4 | ||
|
|
270fa8f5d3 | ||
|
|
b1f5e93b4a | ||
|
|
79a61c72e1 | ||
|
|
3f04c11537 | ||
|
|
b2270613d7 | ||
|
|
0fe854421b | ||
|
|
de074f421e | ||
|
|
27590c39bd | ||
|
|
67191d4d5e | ||
|
|
00764f3d59 | ||
|
|
4a2cb32149 | ||
|
|
1a11664239 | ||
|
|
9520cbb44c | ||
|
|
1aea6b2cdb | ||
|
|
6ff950341a | ||
|
|
b9501e42b2 | ||
|
|
065c809dd5 | ||
|
|
5b9ea4a78f | ||
|
|
b72c4d4400 | ||
|
|
d46e214985 | ||
|
|
799c47ce7a | ||
|
|
b5121e59dd | ||
|
|
f6a7b4929f | ||
|
|
162b77ab5a | ||
|
|
92904efd45 | ||
|
|
93fabe487f | ||
|
|
74d704bea2 | ||
|
|
ee1bd50dd1 | ||
|
|
07096f84f5 | ||
|
|
a9b3bd632b | ||
|
|
eec324890e | ||
|
|
ca6ac8f0db | ||
|
|
60ab4a5fe7 | ||
|
|
10eb5830f0 | ||
|
|
835ceae6f6 | ||
|
|
abe3aa47f6 | ||
|
|
53e34072ed | ||
|
|
f83f761d0a | ||
|
|
9c18bf3a89 | ||
|
|
f6e1ab444e | ||
|
|
0ae8b2959d | ||
|
|
46b0b1e5e2 | ||
|
|
b44dfb4ab8 | ||
|
|
868e528810 | ||
|
|
0a4c850ef1 | ||
|
|
b3c4232251 | ||
|
|
0c38df47b9 | ||
|
|
bfd4005760 | ||
|
|
fc9fad15a3 | ||
|
|
b5091e88ad | ||
|
|
2610808b6d | ||
|
|
3cfee4f214 | ||
|
|
70fd116eaf | ||
|
|
62aac9c2f7 | ||
|
|
afcf1c6c22 | ||
|
|
f3f0365b13 | ||
|
|
9bc12843fe | ||
|
|
5e3ceddf69 | ||
|
|
d377e23193 | ||
|
|
e6dabd59ad | ||
|
|
f0c7380132 | ||
|
|
697ad4c568 | ||
|
|
1efd9b384d | ||
|
|
c1e71dc215 | ||
|
|
d2c7d27d13 | ||
|
|
1efd4c83f9 | ||
|
|
0f7677423f | ||
|
|
2a0b0e9f93 | ||
|
|
faec60188f | ||
|
|
709a688858 | ||
|
|
2448ff8314 | ||
|
|
311202102d | ||
|
|
6812a22706 | ||
|
|
fb727ce731 | ||
|
|
6af499e352 | ||
|
|
66ec33cf8e | ||
|
|
f2694f3a74 | ||
|
|
d069d0e444 | ||
|
|
56ee61b17c | ||
|
|
b945726017 | ||
|
|
6f8a7d1070 | ||
|
|
b032886c21 | ||
|
|
988739d566 | ||
|
|
8cd80801e8 | ||
|
|
c3b7a1a6fb | ||
|
|
9d0eff75ad | ||
|
|
3ccb548b6d | ||
|
|
eeedd53f32 | ||
|
|
11a3b5b73c | ||
|
|
eacc48e8c7 | ||
|
|
5b72b4d353 | ||
|
|
3f940ce8b8 | ||
|
|
b2e3ea2334 | ||
|
|
4637da8c32 | ||
|
|
6b88c5ba86 | ||
|
|
5fdb596214 | ||
|
|
c989b02285 | ||
|
|
c8301dc20b | ||
|
|
ca4ea03828 | ||
|
|
ae27c71d5a | ||
|
|
3d1555e278 | ||
|
|
54fab9eb4e | ||
|
|
8fea8a0b47 | ||
|
|
f14ae8e51b | ||
|
|
6b60e5e786 | ||
|
|
40413dfcc7 | ||
|
|
07f5ad1daa | ||
|
|
57f5a3e780 | ||
|
|
3be007526c | ||
|
|
9bfbd0550c | ||
|
|
0301a5dcdf | ||
|
|
db994a1197 | ||
|
|
855c13ea2a | ||
|
|
bfa7eced44 | ||
|
|
b1d103b1f3 | ||
|
|
fc816d3429 | ||
|
|
04a4e8c8e6 | ||
|
|
ab69fd01ac | ||
|
|
cc6106f31b | ||
|
|
ead85379ed | ||
|
|
f8d6be55ee | ||
|
|
a241d75043 | ||
|
|
864a6c0a20 | ||
|
|
1c20c54191 | ||
|
|
4d722d1fd1 | ||
|
|
b67254e986 | ||
|
|
041cf9c94e | ||
|
|
b08c5a8421 | ||
|
|
125eaa4cc3 | ||
|
|
6b001cf861 | ||
|
|
5c4129f85b | ||
|
|
fa56879790 | ||
|
|
41713d7719 | ||
|
|
17a9463588 | ||
|
|
fb9f271720 | ||
|
|
8de50edb41 | ||
|
|
ab33fccddd | ||
|
|
bd95ac0beb | ||
|
|
7b3efb185f | ||
|
|
a0065febe2 | ||
|
|
9374784651 | ||
|
|
aa6af3deed | ||
|
|
a19e501b44 | ||
|
|
889a395340 | ||
|
|
eb8eb28ca7 | ||
|
|
697b3351e6 | ||
|
|
9fd80bfd67 | ||
|
|
7b58b1ea59 | ||
|
|
c454396c26 | ||
|
|
2e9d8f5520 | ||
|
|
c8ea3fba5a | ||
|
|
56af13047c | ||
|
|
c46900396a | ||
|
|
b235ed1223 | ||
|
|
16d9612603 | ||
|
|
721e5b4656 | ||
|
|
9b8b39f444 | ||
|
|
e32a837fb2 | ||
|
|
9961f8bc1c | ||
|
|
c066867d59 | ||
|
|
21093165e1 | ||
|
|
df88de14e3 | ||
|
|
94de431aa5 | ||
|
|
502557a97f | ||
|
|
52938f6dbf | ||
|
|
d87fad649c | ||
|
|
d8666e5309 | ||
|
|
0c071990cb | ||
|
|
77dfb19a50 | ||
|
|
49254f1f74 | ||
|
|
9dd952c175 | ||
|
|
0b9897db1f | ||
|
|
9d11128362 | ||
|
|
ee17ab3e26 | ||
|
|
06af36dac2 | ||
|
|
51d6d741e5 | ||
|
|
b593a8ae67 | ||
|
|
7b30240a7f | ||
|
|
71f124faa5 | ||
|
|
470bd34349 | ||
|
|
65087dd7b8 | ||
|
|
89c5361f25 | ||
|
|
f82cc26e4f | ||
|
|
15232fc072 | ||
|
|
0a7aab947c | ||
|
|
5906fa81bb | ||
|
|
1b2f574af2 | ||
|
|
ca4bc6110f | ||
|
|
a944769d61 | ||
|
|
98a31515ef | ||
|
|
3e665099ac | ||
|
|
5400b534e4 | ||
|
|
6f2d4dc195 | ||
|
|
4f7fac3ba1 | ||
|
|
fd6569830a | ||
|
|
5d4cad6394 | ||
|
|
b790a49282 | ||
|
|
537b83c759 | ||
|
|
c0b9e2c3f4 | ||
|
|
f488b9b9f7 | ||
|
|
7060b116f4 | ||
|
|
0f4cf11294 | ||
|
|
048496723b | ||
|
|
70068f74f1 | ||
|
|
6f197bacc7 | ||
|
|
24a89d2d75 | ||
|
|
1ac16cbec7 | ||
|
|
8144438f39 | ||
|
|
f141abbc73 | ||
|
|
5329b96793 | ||
|
|
7a70931066 | ||
|
|
9a070d7bb3 | ||
|
|
189b2d684b | ||
|
|
71dfac26fe | ||
|
|
efdc790374 | ||
|
|
a3613dc438 | ||
|
|
97afedd861 | ||
|
|
00a436f175 | ||
|
|
aa2e3000cd | ||
|
|
10e0fa4360 | ||
|
|
0473af368f | ||
|
|
b82f563c38 | ||
|
|
344297895b | ||
|
|
d3c525645b | ||
|
|
59d02e5138 | ||
|
|
8522dc1d33 | ||
|
|
53d238f310 | ||
|
|
92ed2d189e | ||
|
|
7133c3b11a | ||
|
|
d72114083b | ||
|
|
683925fcd5 | ||
|
|
1c8bf32d35 | ||
|
|
28913833f4 | ||
|
|
c6752be546 | ||
|
|
395e474cad | ||
|
|
16ea189aa6 | ||
|
|
ac81fc5da9 | ||
|
|
58191c10b3 | ||
|
|
c2ca23d580 | ||
|
|
b5dfb40c7d | ||
|
|
7e647fd915 | ||
|
|
5b9f9bc0e6 | ||
|
|
b1c057fa30 | ||
|
|
ca348dd373 | ||
|
|
b802045c5c | ||
|
|
3c244a9501 | ||
|
|
832a7b9b06 | ||
|
|
e7d856345f | ||
|
|
ac559effaa | ||
|
|
95d0daba80 | ||
|
|
a0ad1523a1 | ||
|
|
06c049b8c0 | ||
|
|
24c7380765 | ||
|
|
73ef1bf156 | ||
|
|
c8b30a62f9 | ||
|
|
48dfdd2dfe | ||
|
|
68727f724a | ||
|
|
f46e053633 | ||
|
|
d2e739ba8c | ||
|
|
37174e1d2c | ||
|
|
cdb5206def | ||
|
|
d636eaf1e3 | ||
|
|
49f3eb1286 | ||
|
|
8ab94a8643 | ||
|
|
f72c337c5b | ||
|
|
d22321de07 | ||
|
|
473e5db51f | ||
|
|
8b9fc4683a | ||
|
|
3b4415cc3c | ||
|
|
2afce1754a | ||
|
|
f232b93214 | ||
|
|
db64f192fa | ||
|
|
ca96ecdc55 | ||
|
|
2a06d1aa19 | ||
|
|
b2fa2a1f46 | ||
|
|
6839516b5c | ||
|
|
7247c80fac | ||
|
|
01d05d1d4e | ||
|
|
8db4cc482e | ||
|
|
074f17ed98 | ||
|
|
68b553ea55 | ||
|
|
fd5da9cb15 | ||
|
|
d081f687b0 | ||
|
|
34abe5b983 | ||
|
|
5765f1faf1 | ||
|
|
550b67215c | ||
|
|
d7b2826113 | ||
|
|
b4f94c7c25 | ||
|
|
d24677adbb | ||
|
|
04f981eeac | ||
|
|
b53d84d8ff | ||
|
|
fea8bb21a0 | ||
|
|
996ed8a8b1 | ||
|
|
daef164163 | ||
|
|
61a5cab1f2 | ||
|
|
2fddaaf3d7 | ||
|
|
741d246581 | ||
|
|
1b23e81541 | ||
|
|
362d300cb0 | ||
|
|
7b185cc2f3 | ||
|
|
932726863f | ||
|
|
a35a3e98ce | ||
|
|
7c404e72d2 | ||
|
|
8736e2305d | ||
|
|
a13664698f | ||
|
|
6e934067a8 | ||
|
|
3040642f97 | ||
|
|
dd7e44f957 | ||
|
|
6b56a243e7 | ||
|
|
5d08605aef | ||
|
|
bf705cbaf2 | ||
|
|
fe827896e0 | ||
|
|
317219e479 | ||
|
|
64e7df7596 | ||
|
|
33dc865c30 | ||
|
|
0d469e2966 | ||
|
|
b3ff84872b | ||
|
|
5d176a781c | ||
|
|
e0c97f97ba | ||
|
|
83f358976e | ||
|
|
9ee6153891 | ||
|
|
825e640061 | ||
|
|
e5ae213839 | ||
|
|
74e034c689 | ||
|
|
a55a60a161 | ||
|
|
9d865ec018 | ||
|
|
8b01e3dead | ||
|
|
9e3cc01715 | ||
|
|
0f204767a9 | ||
|
|
11e3251efd | ||
|
|
1f1416a5f7 | ||
|
|
b3786f3825 | ||
|
|
09c2eee91e | ||
|
|
dc78b14902 | ||
|
|
500ce0959a | ||
|
|
aa0c9e3572 | ||
|
|
e6de75d48a | ||
|
|
a5ad0a34f8 | ||
|
|
6e609cc4e3 | ||
|
|
27727a927f | ||
|
|
5ff580df0a | ||
|
|
b34acac722 | ||
|
|
37840856ed | ||
|
|
acfad4371f | ||
|
|
ae42cc0307 | ||
|
|
d06860df37 | ||
|
|
7d5d162f6b | ||
|
|
d39a75b68b | ||
|
|
d72f7311f6 | ||
|
|
3f81b7c179 | ||
|
|
33d1255a5a | ||
|
|
0ebea6a0ff | ||
|
|
c28da3a4a9 | ||
|
|
538f479b60 | ||
|
|
88cc73fa49 | ||
|
|
d6d368a65d | ||
|
|
c5be9cc3e9 | ||
|
|
c462c2bd31 | ||
|
|
3426165621 | ||
|
|
98b99e38bb | ||
|
|
d8e3bec499 | ||
|
|
7c759d4d29 | ||
|
|
41d438b47e | ||
|
|
41911d6921 | ||
|
|
dca18d77cb | ||
|
|
040af5dad2 | ||
|
|
01bb6c37ab | ||
|
|
c624d68628 | ||
|
|
4867c49bd9 | ||
|
|
a354eddf4b | ||
|
|
9b78c533a5 | ||
|
|
090ea576b9 | ||
|
|
6a2d33a4b3 | ||
|
|
b54a9c7412 | ||
|
|
2c62da7834 | ||
|
|
0145a0adb2 | ||
|
|
473282d64c | ||
|
|
c2c068e9db | ||
|
|
13d1f662d1 | ||
|
|
bdd57f58a0 | ||
|
|
b1bcabd6e6 | ||
|
|
e128c3fa82 | ||
|
|
efac9fe750 | ||
|
|
2b8545a8fa | ||
|
|
b275b5d728 | ||
|
|
1f46cfafa7 | ||
|
|
b1dcdf3418 | ||
|
|
4bfd65deb8 | ||
|
|
213cf322f5 | ||
|
|
61102812a0 | ||
|
|
580cfce7fb | ||
|
|
f1383d7a45 | ||
|
|
e4ce5bfe39 | ||
|
|
4d2b38497d | ||
|
|
fc5ae1cfbc | ||
|
|
7e76d1cc6b | ||
|
|
cf834e8a21 | ||
|
|
ee61466042 | ||
|
|
35884d482c | ||
|
|
802de8112c | ||
|
|
9a76cfc85f | ||
|
|
dc41dd888d | ||
|
|
6ed64f25a2 | ||
|
|
827ad80311 | ||
|
|
9e3d8ac4e9 | ||
|
|
114a331106 | ||
|
|
9aa24a216a | ||
|
|
1b327e29ba | ||
|
|
13702451ab | ||
|
|
f0242f6f97 | ||
|
|
9775820398 | ||
|
|
26a35ea43d | ||
|
|
81ebef2e29 | ||
|
|
7daf26bcd0 | ||
|
|
231f705098 | ||
|
|
893ab8fd8d | ||
|
|
5afdd2c533 | ||
|
|
e4f5c0066a | ||
|
|
a167f852dd | ||
|
|
b428bce126 | ||
|
|
e62d0e19a5 | ||
|
|
9b8bf9068f | ||
|
|
6e05edc350 | ||
|
|
dd8eaf2893 | ||
|
|
1068cfb4b5 | ||
|
|
73b1737dc7 | ||
|
|
1d86f40fcd | ||
|
|
ecc750f445 | ||
|
|
cf5268a7d4 | ||
|
|
59fb481138 | ||
|
|
16e22b3b77 | ||
|
|
e24efad5ff | ||
|
|
58a34cdb7d | ||
|
|
b1c6b330e9 | ||
|
|
7c3e265033 | ||
|
|
13695a716c | ||
|
|
c9e43804d6 | ||
|
|
1535e3553e | ||
|
|
0ac05bbbeb | ||
|
|
d3f979d640 | ||
|
|
0e9ded45dc | ||
|
|
a2ca886510 | ||
|
|
aa701c6766 | ||
|
|
25a1af3775 | ||
|
|
0d8e0a2970 | ||
|
|
c0fff6c8a8 | ||
|
|
e6b4428614 | ||
|
|
4e6e69833d | ||
|
|
1d9faff4c6 | ||
|
|
7025cbe760 | ||
|
|
e922b7c2ca | ||
|
|
96518d2d0f | ||
|
|
1241b20ba1 | ||
|
|
f03f5c1628 | ||
|
|
cb550a3662 | ||
|
|
d1f90d61c5 | ||
|
|
16e65fe189 | ||
|
|
62a0faa729 | ||
|
|
fbb3ab2292 | ||
|
|
b3b75e5ef8 | ||
|
|
8b36210db5 | ||
|
|
a74f3b3e46 | ||
|
|
e214a52de5 | ||
|
|
0624a9395c | ||
|
|
b2e7f7ffa6 | ||
|
|
b312b39a10 | ||
|
|
80e2d112b2 | ||
|
|
519b169df0 | ||
|
|
5c2cfbc334 | ||
|
|
7d91e4959a | ||
|
|
0c5aa2a7eb | ||
|
|
0d7a264981 | ||
|
|
52ff2d2e74 | ||
|
|
8a7ceaa845 | ||
|
|
fd9ce2d1cf | ||
|
|
2c2b37bec3 | ||
|
|
c777f2d388 | ||
|
|
eca10056a8 | ||
|
|
c7bab11ebe | ||
|
|
6995fc28b6 | ||
|
|
102f14d0e9 | ||
|
|
aac168402b | ||
|
|
152d49513f | ||
|
|
d5564c808d | ||
|
|
82410e07b2 | ||
|
|
94d90b30b5 | ||
|
|
06997f0da2 | ||
|
|
55aafa416d | ||
|
|
6226a46988 | ||
|
|
8d216f0c43 | ||
|
|
7f5bb25542 | ||
|
|
5fcdf6adc2 | ||
|
|
6a565a849b | ||
|
|
66fc67e34c | ||
|
|
7cf140940e | ||
|
|
60e90bab23 | ||
|
|
4f58258186 | ||
|
|
03e2904ebf | ||
|
|
bea90b256e | ||
|
|
8eb37ba956 | ||
|
|
8d20c1fb59 | ||
|
|
9a1abf0c49 | ||
|
|
5aae7a4000 | ||
|
|
d9509a1750 | ||
|
|
978c448fb8 | ||
|
|
997c0fca10 | ||
|
|
3ae6e68492 | ||
|
|
851c93a1f7 | ||
|
|
a5f7355e16 | ||
|
|
18ffdbaa65 | ||
|
|
c089222bc6 | ||
|
|
37f9535d27 | ||
|
|
4650368bc2 | ||
|
|
88b14ed455 | ||
|
|
54a2a47bc0 | ||
|
|
ffcc970140 | ||
|
|
7a811e39e0 | ||
|
|
11f158cbb3 | ||
|
|
5d5550c48b | ||
|
|
fd570d906a | ||
|
|
deab0662f9 | ||
|
|
7238f50a6b | ||
|
|
499fcd1f3f | ||
|
|
dc0ddc82d6 | ||
|
|
436fc34cb9 | ||
|
|
f072cd96e3 | ||
|
|
3441a001c7 | ||
|
|
bc747844ea | ||
|
|
a887f58bcc | ||
|
|
f42afef6e0 | ||
|
|
18eaf22cb9 | ||
|
|
d94f427e12 | ||
|
|
b94eb42db6 | ||
|
|
d2297f5516 | ||
|
|
ef6f58b828 | ||
|
|
eb0bf16cce | ||
|
|
ca51415540 | ||
|
|
8ae32e1d47 | ||
|
|
0a6165c4d9 | ||
|
|
cf8521a629 | ||
|
|
b11c7f3dc0 | ||
|
|
01151aad5c | ||
|
|
6b283068a9 | ||
|
|
ccd7d4d89d | ||
|
|
208ec3906f | ||
|
|
84d4fccb4d | ||
|
|
8d8ea959ee | ||
|
|
1c73db499f | ||
|
|
16a4b4947f | ||
|
|
4b2abb2064 | ||
|
|
c581b6a5a7 | ||
|
|
4c66582f87 | ||
|
|
11388a5355 | ||
|
|
24ca98b1a3 | ||
|
|
90a293727d | ||
|
|
e869f6c173 | ||
|
|
5b187d1f20 | ||
|
|
7b5d1c075d | ||
|
|
07173d2238 | ||
|
|
6b747f7d65 | ||
|
|
aef19d72f9 | ||
|
|
e1a661bffc | ||
|
|
560f9b15d7 | ||
|
|
452fc59d4f | ||
|
|
ed4bbe97d1 | ||
|
|
f05c437221 | ||
|
|
682cc2d82d | ||
|
|
29197736c7 | ||
|
|
483488a2fa | ||
|
|
b36c4f2428 | ||
|
|
e1dbd68713 | ||
|
|
0ecb865797 | ||
|
|
1ced06483e | ||
|
|
861cee33d5 | ||
|
|
6b882438b0 | ||
|
|
c570646c2d | ||
|
|
75f72578ad | ||
|
|
4dc566e13f | ||
|
|
bcfdbe3616 | ||
|
|
314b7f134d | ||
|
|
87eb4577ea | ||
|
|
7563bf43e9 | ||
|
|
ce8cfed7ff | ||
|
|
8742de9a88 | ||
|
|
9c0af2ea5b | ||
|
|
4c81c0d904 | ||
|
|
4dcdb0c79c | ||
|
|
5a646384f6 | ||
|
|
8917f1a91a | ||
|
|
d459f7289e | ||
|
|
d2c4104d30 | ||
|
|
89fabd9a89 | ||
|
|
3f7ce832ab | ||
|
|
3fdc904b18 | ||
|
|
cf471af677 | ||
|
|
2d392581e2 | ||
|
|
79a428ab8d | ||
|
|
7ed1b9b0b1 | ||
|
|
58fd8a5d87 | ||
|
|
9f8c15ed6c | ||
|
|
b94eda18ab | ||
|
|
849eb28b01 | ||
|
|
b0073d461c | ||
|
|
25d160e850 | ||
|
|
e688c865bc | ||
|
|
21e97cc65c | ||
|
|
06c1033952 | ||
|
|
8c54f68040 | ||
|
|
4c1f5c490f | ||
|
|
117735ffb9 | ||
|
|
f98e6bdac7 | ||
|
|
19d76306d3 | ||
|
|
a63a609ce1 | ||
|
|
95fce2024d | ||
|
|
f3b65015f4 | ||
|
|
5431298b0d | ||
|
|
8f651bca03 | ||
|
|
6c45e5ec19 | ||
|
|
95d394642c | ||
|
|
6a185a3d3a | ||
|
|
6ae353706d | ||
|
|
77bc72be14 | ||
|
|
0585c880de | ||
|
|
da5189e7fb | ||
|
|
5f456d8b22 | ||
|
|
a351cdd486 | ||
|
|
6e71663fd6 | ||
|
|
0b2e43cdee | ||
|
|
9cbd280f79 | ||
|
|
947be9db0f | ||
|
|
dd55f929d4 | ||
|
|
8b5399d568 | ||
|
|
1b8b0c6229 | ||
|
|
ee3081fb4c | ||
|
|
119770a038 | ||
|
|
9d61487403 | ||
|
|
56dec47561 | ||
|
|
3e793ab5b7 | ||
|
|
f6b50a67f9 | ||
|
|
cd47798d13 | ||
|
|
3b54df2cde | ||
|
|
8bf1defdc1 | ||
|
|
8845b266de | ||
|
|
4757da73f0 | ||
|
|
468fd425fc | ||
|
|
5604d658b2 | ||
|
|
37222d5622 | ||
|
|
ec38b54023 | ||
|
|
260e90401d | ||
|
|
8c66f0a5da | ||
|
|
78efc2d0de | ||
|
|
656c1e6ba6 | ||
|
|
65e932016b | ||
|
|
fc602b1d42 | ||
|
|
0e882900d7 | ||
|
|
28103ee25b | ||
|
|
0a0d2ddb1a | ||
|
|
3bf4cb721e | ||
|
|
ba1ab64b56 | ||
|
|
786d1e4cfc | ||
|
|
eb82e022ba | ||
|
|
753bfd1dc5 | ||
|
|
bc8d65e7d3 | ||
|
|
3180c5d554 | ||
|
|
6b11655db8 | ||
|
|
d1ae098c08 | ||
|
|
e1a8b5db1d | ||
|
|
302cfea295 | ||
|
|
be122ca1a5 | ||
|
|
b05f6f0018 | ||
|
|
e811021806 | ||
|
|
656f4f440d | ||
|
|
7fb62de4d7 | ||
|
|
0946fecd82 | ||
|
|
26a5325dc3 | ||
|
|
9afdcddfc9 | ||
|
|
4881e0aa51 | ||
|
|
743f7c76de | ||
|
|
f8ef01f557 | ||
|
|
402fa5c2cd | ||
|
|
5ac1e847a5 | ||
|
|
0737a21e38 | ||
|
|
4d162f88bd | ||
|
|
2806987f2f | ||
|
|
f380aa2e5e | ||
|
|
a2ce1972ec | ||
|
|
aa8cf7e6bb | ||
|
|
ec61a6974f | ||
|
|
21e249bd2c | ||
|
|
8e550f7093 | ||
|
|
53e53880bd | ||
|
|
671b5788ed | ||
|
|
76d10cd85c | ||
|
|
42c155d667 | ||
|
|
d17222ec91 | ||
|
|
a1ea55d006 | ||
|
|
f32b7bb978 | ||
|
|
38aa7bae3f | ||
|
|
0021b9532b | ||
|
|
788fac7c43 | ||
|
|
71934152e4 | ||
|
|
e20d6fc04a | ||
|
|
a1e8fc4f97 | ||
|
|
9dc8a10e2a | ||
|
|
43a2da46f9 | ||
|
|
f95ca13073 | ||
|
|
b92b3af602 | ||
|
|
83b76cde34 | ||
|
|
5ff0d488d2 | ||
|
|
f89adf6e53 | ||
|
|
efd34d8cb0 | ||
|
|
3defb60fd7 | ||
|
|
869c9d2fdd | ||
|
|
912e465193 | ||
|
|
e7c1419390 | ||
|
|
b19cd037b7 | ||
|
|
42f7bc2160 | ||
|
|
542a0d79fd | ||
|
|
4b9b09f7bf | ||
|
|
57429f3194 | ||
|
|
9bdf890fd8 | ||
|
|
9240f30f28 | ||
|
|
ed23c6ef75 | ||
|
|
90b59876d0 | ||
|
|
6d2aa601cd | ||
|
|
1811465abb | ||
|
|
0c2aeae00d | ||
|
|
a4f187efd2 | ||
|
|
b6ee02f313 | ||
|
|
03369b8a6c | ||
|
|
3b2ddb1a18 | ||
|
|
1e20b12241 | ||
|
|
81c41df15c | ||
|
|
8b736189e0 | ||
|
|
188d2367df | ||
|
|
5aeac500da | ||
|
|
5730ab28ab | ||
|
|
1c56b03a28 | ||
|
|
fc1f8083ad | ||
|
|
af5e799a45 | ||
|
|
e073e91d62 | ||
|
|
da9bda0e27 | ||
|
|
0e996e4bb7 | ||
|
|
63fbbd6e0a | ||
|
|
924fddf698 | ||
|
|
17f75e64ba | ||
|
|
0157b9eb37 | ||
|
|
730be65514 | ||
|
|
885b5aab41 | ||
|
|
23e55e92ca | ||
|
|
0cfdbfb91c | ||
|
|
f18ae4f99f | ||
|
|
5217f19faa | ||
|
|
296d170ba9 | ||
|
|
a97fd74399 |
219
.gitmodules
vendored
219
.gitmodules
vendored
@@ -7,9 +7,6 @@
|
||||
[submodule "vendor/grammars/sublime-cirru"]
|
||||
path = vendor/grammars/sublime-cirru
|
||||
url = https://github.com/Cirru/sublime-cirru
|
||||
[submodule "vendor/grammars/Sublime-Logos"]
|
||||
path = vendor/grammars/Sublime-Logos
|
||||
url = https://github.com/Cykey/Sublime-Logos
|
||||
[submodule "vendor/grammars/SublimeBrainfuck"]
|
||||
path = vendor/grammars/SublimeBrainfuck
|
||||
url = https://github.com/Drako/SublimeBrainfuck
|
||||
@@ -25,18 +22,9 @@
|
||||
[submodule "vendor/grammars/Sublime-REBOL"]
|
||||
path = vendor/grammars/Sublime-REBOL
|
||||
url = https://github.com/Oldes/Sublime-REBOL
|
||||
[submodule "vendor/grammars/Sublime-Inform"]
|
||||
path = vendor/grammars/Sublime-Inform
|
||||
url = https://github.com/PogiNate/Sublime-Inform
|
||||
[submodule "vendor/grammars/autoitv3-tmbundle"]
|
||||
path = vendor/grammars/autoitv3-tmbundle
|
||||
url = https://github.com/Red-Nova-Technologies/autoitv3-tmbundle
|
||||
[submodule "vendor/grammars/Sublime-VimL"]
|
||||
path = vendor/grammars/Sublime-VimL
|
||||
url = https://github.com/SalGnt/Sublime-VimL
|
||||
[submodule "vendor/grammars/boo-sublime"]
|
||||
path = vendor/grammars/boo-sublime
|
||||
url = https://github.com/Shammah/boo-sublime
|
||||
[submodule "vendor/grammars/ColdFusion"]
|
||||
path = vendor/grammars/ColdFusion
|
||||
url = https://github.com/SublimeText/ColdFusion
|
||||
@@ -85,12 +73,12 @@
|
||||
[submodule "vendor/grammars/language-shellscript"]
|
||||
path = vendor/grammars/language-shellscript
|
||||
url = https://github.com/atom/language-shellscript
|
||||
[submodule "vendor/grammars/language-supercollider"]
|
||||
path = vendor/grammars/language-supercollider
|
||||
url = https://github.com/supercollider/language-supercollider
|
||||
[submodule "vendor/grammars/language-yaml"]
|
||||
path = vendor/grammars/language-yaml
|
||||
url = https://github.com/atom/language-yaml
|
||||
[submodule "vendor/grammars/sublime-sourcepawn"]
|
||||
path = vendor/grammars/sublime-sourcepawn
|
||||
url = https://github.com/austinwagner/sublime-sourcepawn
|
||||
[submodule "vendor/grammars/Sublime-Lasso"]
|
||||
path = vendor/grammars/Sublime-Lasso
|
||||
url = https://github.com/bfad/Sublime-Lasso
|
||||
@@ -109,9 +97,6 @@
|
||||
[submodule "vendor/grammars/sublime-MuPAD"]
|
||||
path = vendor/grammars/sublime-MuPAD
|
||||
url = https://github.com/ccreutzig/sublime-MuPAD
|
||||
[submodule "vendor/grammars/nesC.tmbundle"]
|
||||
path = vendor/grammars/nesC.tmbundle
|
||||
url = https://github.com/cdwilson/nesC.tmbundle
|
||||
[submodule "vendor/grammars/haxe-sublime-bundle"]
|
||||
path = vendor/grammars/haxe-sublime-bundle
|
||||
url = https://github.com/clemos/haxe-sublime-bundle
|
||||
@@ -133,12 +118,6 @@
|
||||
[submodule "vendor/grammars/fancy-tmbundle"]
|
||||
path = vendor/grammars/fancy-tmbundle
|
||||
url = https://github.com/fancy-lang/fancy-tmbundle
|
||||
[submodule "vendor/grammars/fsharpbinding"]
|
||||
path = vendor/grammars/fsharpbinding
|
||||
url = https://github.com/fsharp/fsharpbinding
|
||||
[submodule "vendor/grammars/monkey.tmbundle"]
|
||||
path = vendor/grammars/monkey.tmbundle
|
||||
url = https://github.com/gingerbeardman/monkey.tmbundle
|
||||
[submodule "vendor/grammars/dart-sublime-bundle"]
|
||||
path = vendor/grammars/dart-sublime-bundle
|
||||
url = https://github.com/guillermooo/dart-sublime-bundle
|
||||
@@ -169,21 +148,12 @@
|
||||
[submodule "vendor/grammars/fish-tmbundle"]
|
||||
path = vendor/grammars/fish-tmbundle
|
||||
url = https://github.com/l15n/fish-tmbundle
|
||||
[submodule "vendor/grammars/sublime-idris"]
|
||||
path = vendor/grammars/sublime-idris
|
||||
url = https://github.com/laughedelic/sublime-idris
|
||||
[submodule "vendor/grammars/sublime-better-typescript"]
|
||||
path = vendor/grammars/sublime-better-typescript
|
||||
url = https://github.com/lavrton/sublime-better-typescript
|
||||
[submodule "vendor/grammars/moonscript-tmbundle"]
|
||||
path = vendor/grammars/moonscript-tmbundle
|
||||
url = https://github.com/leafo/moonscript-tmbundle
|
||||
[submodule "vendor/grammars/Isabelle.tmbundle"]
|
||||
path = vendor/grammars/Isabelle.tmbundle
|
||||
url = https://github.com/lsf37/Isabelle.tmbundle
|
||||
[submodule "vendor/grammars/x86-assembly-textmate-bundle"]
|
||||
path = vendor/grammars/x86-assembly-textmate-bundle
|
||||
url = https://github.com/lunixbochs/x86-assembly-textmate-bundle
|
||||
[submodule "vendor/grammars/Alloy.tmbundle"]
|
||||
path = vendor/grammars/Alloy.tmbundle
|
||||
url = https://github.com/macekond/Alloy.tmbundle
|
||||
@@ -214,9 +184,6 @@
|
||||
[submodule "vendor/grammars/Julia.tmbundle"]
|
||||
path = vendor/grammars/Julia.tmbundle
|
||||
url = https://github.com/nanoant/Julia.tmbundle
|
||||
[submodule "vendor/grammars/assembly.tmbundle"]
|
||||
path = vendor/grammars/assembly.tmbundle
|
||||
url = https://github.com/nanoant/assembly.tmbundle
|
||||
[submodule "vendor/grammars/ooc.tmbundle"]
|
||||
path = vendor/grammars/ooc.tmbundle
|
||||
url = https://github.com/nilium/ooc.tmbundle
|
||||
@@ -226,9 +193,6 @@
|
||||
[submodule "vendor/grammars/sublime-tea"]
|
||||
path = vendor/grammars/sublime-tea
|
||||
url = https://github.com/pferruggiaro/sublime-tea
|
||||
[submodule "vendor/grammars/puppet-textmate-bundle"]
|
||||
path = vendor/grammars/puppet-textmate-bundle
|
||||
url = https://github.com/puppet-textmate-bundle/puppet-textmate-bundle
|
||||
[submodule "vendor/grammars/abap.tmbundle"]
|
||||
path = vendor/grammars/abap.tmbundle
|
||||
url = https://github.com/pvl/abap.tmbundle
|
||||
@@ -259,9 +223,6 @@
|
||||
[submodule "vendor/grammars/SublimeXtend"]
|
||||
path = vendor/grammars/SublimeXtend
|
||||
url = https://github.com/staltz/SublimeXtend
|
||||
[submodule "vendor/grammars/Stata.tmbundle"]
|
||||
path = vendor/grammars/Stata.tmbundle
|
||||
url = https://github.com/statatmbundle/Stata.tmbundle
|
||||
[submodule "vendor/grammars/Vala-TMBundle"]
|
||||
path = vendor/grammars/Vala-TMBundle
|
||||
url = https://github.com/technosophos/Vala-TMBundle
|
||||
@@ -349,9 +310,6 @@
|
||||
[submodule "vendor/grammars/latex.tmbundle"]
|
||||
path = vendor/grammars/latex.tmbundle
|
||||
url = https://github.com/textmate/latex.tmbundle
|
||||
[submodule "vendor/grammars/less.tmbundle"]
|
||||
path = vendor/grammars/less.tmbundle
|
||||
url = https://github.com/textmate/less.tmbundle
|
||||
[submodule "vendor/grammars/lilypond.tmbundle"]
|
||||
path = vendor/grammars/lilypond.tmbundle
|
||||
url = https://github.com/textmate/lilypond.tmbundle
|
||||
@@ -400,9 +358,6 @@
|
||||
[submodule "vendor/grammars/processing.tmbundle"]
|
||||
path = vendor/grammars/processing.tmbundle
|
||||
url = https://github.com/textmate/processing.tmbundle
|
||||
[submodule "vendor/grammars/prolog.tmbundle"]
|
||||
path = vendor/grammars/prolog.tmbundle
|
||||
url = https://github.com/textmate/prolog.tmbundle
|
||||
[submodule "vendor/grammars/python-django.tmbundle"]
|
||||
path = vendor/grammars/python-django.tmbundle
|
||||
url = https://github.com/textmate/python-django.tmbundle
|
||||
@@ -463,9 +418,6 @@
|
||||
[submodule "vendor/grammars/llvm.tmbundle"]
|
||||
path = vendor/grammars/llvm.tmbundle
|
||||
url = https://github.com/whitequark/llvm.tmbundle
|
||||
[submodule "vendor/grammars/sublime-nix"]
|
||||
path = vendor/grammars/sublime-nix
|
||||
url = https://github.com/wmertens/sublime-nix
|
||||
[submodule "vendor/grammars/oz-tmbundle"]
|
||||
path = vendor/grammars/oz-tmbundle
|
||||
url = https://github.com/eregon/oz-tmbundle
|
||||
@@ -524,24 +476,12 @@
|
||||
[submodule "vendor/grammars/liquid.tmbundle"]
|
||||
path = vendor/grammars/liquid.tmbundle
|
||||
url = https://github.com/bastilian/validcode-textmate-bundles
|
||||
[submodule "vendor/grammars/ats.sublime"]
|
||||
path = vendor/grammars/ats.sublime
|
||||
url = https://github.com/steinwaywhw/ats-mode-sublimetext
|
||||
[submodule "vendor/grammars/Modelica"]
|
||||
path = vendor/grammars/Modelica
|
||||
url = https://github.com/BorisChumichev/modelicaSublimeTextPackage
|
||||
[submodule "vendor/grammars/sublime-apl"]
|
||||
path = vendor/grammars/sublime-apl
|
||||
url = https://github.com/StoneCypher/sublime-apl
|
||||
[submodule "vendor/grammars/CLIPS-sublime"]
|
||||
path = vendor/grammars/CLIPS-sublime
|
||||
url = https://github.com/psicomante/CLIPS-sublime
|
||||
[submodule "vendor/grammars/Creole"]
|
||||
path = vendor/grammars/Creole
|
||||
url = https://github.com/Siddley/Creole
|
||||
[submodule "vendor/grammars/GDScript-sublime"]
|
||||
path = vendor/grammars/GDScript-sublime
|
||||
url = https://github.com/beefsack/GDScript-sublime
|
||||
[submodule "vendor/grammars/sublime-golo"]
|
||||
path = vendor/grammars/sublime-golo
|
||||
url = https://github.com/TypeUnsafe/sublime-golo
|
||||
@@ -554,9 +494,6 @@
|
||||
[submodule "vendor/grammars/G-Code"]
|
||||
path = vendor/grammars/G-Code
|
||||
url = https://github.com/robotmaster/sublime-text-syntax-highlighting
|
||||
[submodule "vendor/grammars/grace-tmbundle"]
|
||||
path = vendor/grammars/grace-tmbundle
|
||||
url = https://github.com/zmthy/grace-tmbundle
|
||||
[submodule "vendor/grammars/sublime-text-ox"]
|
||||
path = vendor/grammars/sublime-text-ox
|
||||
url = https://github.com/andreashetland/sublime-text-ox
|
||||
@@ -566,9 +503,6 @@
|
||||
[submodule "vendor/grammars/ec.tmbundle"]
|
||||
path = vendor/grammars/ec.tmbundle
|
||||
url = https://github.com/ecere/ec.tmbundle
|
||||
[submodule "vendor/grammars/InnoSetup"]
|
||||
path = vendor/grammars/InnoSetup
|
||||
url = https://github.com/idleberg/InnoSetup-Sublime-Text
|
||||
[submodule "vendor/grammars/gap-tmbundle"]
|
||||
path = vendor/grammars/gap-tmbundle
|
||||
url = https://github.com/dhowden/gap-tmbundle
|
||||
@@ -590,9 +524,6 @@
|
||||
[submodule "vendor/grammars/SublimeClarion"]
|
||||
path = vendor/grammars/SublimeClarion
|
||||
url = https://github.com/fushnisoft/SublimeClarion
|
||||
[submodule "vendor/grammars/oracle.tmbundle"]
|
||||
path = vendor/grammars/oracle.tmbundle
|
||||
url = https://github.com/mulander/oracle.tmbundle.git
|
||||
[submodule "vendor/grammars/BrightScript.tmbundle"]
|
||||
path = vendor/grammars/BrightScript.tmbundle
|
||||
url = https://github.com/cmink/BrightScript.tmbundle
|
||||
@@ -602,18 +533,12 @@
|
||||
[submodule "vendor/grammars/asciidoc.tmbundle"]
|
||||
path = vendor/grammars/asciidoc.tmbundle
|
||||
url = https://github.com/zuckschwerdt/asciidoc.tmbundle
|
||||
[submodule "vendor/grammars/sublime-text-pig-latin"]
|
||||
path = vendor/grammars/sublime-text-pig-latin
|
||||
url = https://github.com/goblindegook/sublime-text-pig-latin
|
||||
[submodule "vendor/grammars/Lean.tmbundle"]
|
||||
path = vendor/grammars/Lean.tmbundle
|
||||
url = https://github.com/leanprover/Lean.tmbundle
|
||||
[submodule "vendor/grammars/ampl"]
|
||||
path = vendor/grammars/ampl
|
||||
url = https://github.com/ampl/sublime-ampl
|
||||
[submodule "vendor/grammars/openscad.tmbundle"]
|
||||
path = vendor/grammars/openscad.tmbundle
|
||||
url = https://github.com/tbuser/openscad.tmbundle
|
||||
[submodule "vendor/grammars/sublime-varnish"]
|
||||
path = vendor/grammars/sublime-varnish
|
||||
url = https://github.com/brandonwamboldt/sublime-varnish
|
||||
@@ -632,9 +557,147 @@
|
||||
[submodule "vendor/grammars/jflex.tmbundle"]
|
||||
path = vendor/grammars/jflex.tmbundle
|
||||
url = https://github.com/jflex-de/jflex.tmbundle.git
|
||||
[submodule "vendor/grammars/Sublime-Modula-2"]
|
||||
path = vendor/grammars/Sublime-Modula-2
|
||||
url = https://github.com/harogaston/Sublime-Modula-2
|
||||
[submodule "vendor/grammars/ada.tmbundle"]
|
||||
path = vendor/grammars/ada.tmbundle
|
||||
url = https://github.com/textmate/ada.tmbundle
|
||||
[submodule "vendor/grammars/api-blueprint-sublime-plugin"]
|
||||
path = vendor/grammars/api-blueprint-sublime-plugin
|
||||
url = https://github.com/apiaryio/api-blueprint-sublime-plugin
|
||||
[submodule "vendor/grammars/Handlebars"]
|
||||
path = vendor/grammars/Handlebars
|
||||
url = https://github.com/daaain/Handlebars
|
||||
[submodule "vendor/grammars/smali-sublime"]
|
||||
path = vendor/grammars/smali-sublime
|
||||
url = https://github.com/ShaneWilton/sublime-smali
|
||||
[submodule "vendor/grammars/language-jsoniq"]
|
||||
path = vendor/grammars/language-jsoniq
|
||||
url = http://github.com/wcandillon/language-jsoniq
|
||||
[submodule "vendor/grammars/atom-fsharp"]
|
||||
path = vendor/grammars/atom-fsharp
|
||||
url = https://github.com/fsprojects/atom-fsharp
|
||||
[submodule "vendor/grammars/SMT.tmbundle"]
|
||||
path = vendor/grammars/SMT.tmbundle
|
||||
url = https://github.com/SRI-CSL/SMT.tmbundle.git
|
||||
[submodule "vendor/grammars/language-crystal"]
|
||||
path = vendor/grammars/language-crystal
|
||||
url = https://github.com/atom-crystal/language-crystal
|
||||
[submodule "vendor/grammars/language-xbase"]
|
||||
path = vendor/grammars/language-xbase
|
||||
url = https://github.com/hernad/atom-language-harbour
|
||||
[submodule "vendor/grammars/language-ncl"]
|
||||
path = vendor/grammars/language-ncl
|
||||
url = https://github.com/rpavlick/language-ncl.git
|
||||
[submodule "vendor/grammars/pawn-sublime-language"]
|
||||
path = vendor/grammars/pawn-sublime-language
|
||||
url = https://github.com/Southclaw/pawn-sublime-language.git
|
||||
[submodule "vendor/grammars/atom-language-purescript"]
|
||||
path = vendor/grammars/atom-language-purescript
|
||||
url = https://github.com/purescript-contrib/atom-language-purescript
|
||||
[submodule "vendor/grammars/vue-syntax-highlight"]
|
||||
path = vendor/grammars/vue-syntax-highlight
|
||||
url = https://github.com/vuejs/vue-syntax-highlight
|
||||
[submodule "vendor/grammars/st2-zonefile"]
|
||||
path = vendor/grammars/st2-zonefile
|
||||
url = https://github.com/sixty4k/st2-zonefile
|
||||
[submodule "vendor/grammars/sublimeprolog"]
|
||||
path = vendor/grammars/sublimeprolog
|
||||
url = https://github.com/alnkpa/sublimeprolog
|
||||
[submodule "vendor/grammars/sublime-aspectj"]
|
||||
path = vendor/grammars/sublime-aspectj
|
||||
url = https://github.com/pchaigno/sublime-aspectj
|
||||
[submodule "vendor/grammars/sublime-typescript"]
|
||||
path = vendor/grammars/sublime-typescript
|
||||
url = https://github.com/Microsoft/TypeScript-Sublime-Plugin
|
||||
[submodule "vendor/grammars/sublime-pony"]
|
||||
path = vendor/grammars/sublime-pony
|
||||
url = https://github.com/CausalityLtd/sublime-pony
|
||||
[submodule "vendor/grammars/X10"]
|
||||
path = vendor/grammars/X10
|
||||
url = https://github.com/x10-lang/x10-highlighting
|
||||
[submodule "vendor/grammars/language-babel"]
|
||||
path = vendor/grammars/language-babel
|
||||
url = https://github.com/gandm/language-babel
|
||||
[submodule "vendor/grammars/UrWeb-Language-Definition"]
|
||||
path = vendor/grammars/UrWeb-Language-Definition
|
||||
url = https://github.com/gwalborn/UrWeb-Language-Definition.git
|
||||
[submodule "vendor/grammars/Stata.tmbundle"]
|
||||
path = vendor/grammars/Stata.tmbundle
|
||||
url = https://github.com/pschumm/Stata.tmbundle
|
||||
[submodule "vendor/grammars/FreeMarker.tmbundle"]
|
||||
path = vendor/grammars/FreeMarker.tmbundle
|
||||
url = https://github.com/freemarker/FreeMarker.tmbundle
|
||||
[submodule "vendor/grammars/MagicPython"]
|
||||
path = vendor/grammars/MagicPython
|
||||
url = https://github.com/MagicStack/MagicPython
|
||||
[submodule "vendor/grammars/language-click"]
|
||||
path = vendor/grammars/language-click
|
||||
url = https://github.com/stenverbois/language-click.git
|
||||
[submodule "vendor/grammars/language-maxscript"]
|
||||
path = vendor/grammars/language-maxscript
|
||||
url = https://github.com/Alhadis/language-maxscript
|
||||
[submodule "vendor/grammars/language-renpy"]
|
||||
path = vendor/grammars/language-renpy
|
||||
url = https://github.com/williamd1k0/language-renpy.git
|
||||
[submodule "vendor/grammars/language-inform7"]
|
||||
path = vendor/grammars/language-inform7
|
||||
url = https://github.com/erkyrath/language-inform7
|
||||
[submodule "vendor/grammars/atom-language-stan"]
|
||||
path = vendor/grammars/atom-language-stan
|
||||
url = https://github.com/jrnold/atom-language-stan
|
||||
[submodule "vendor/grammars/language-yang"]
|
||||
path = vendor/grammars/language-yang
|
||||
url = https://github.com/DzonyKalafut/language-yang.git
|
||||
[submodule "vendor/grammars/perl6fe"]
|
||||
path = vendor/grammars/perl6fe
|
||||
url = https://github.com/MadcapJake/language-perl6fe.git
|
||||
[submodule "vendor/grammars/language-less"]
|
||||
path = vendor/grammars/language-less
|
||||
url = https://github.com/atom/language-less.git
|
||||
[submodule "vendor/grammars/language-povray"]
|
||||
path = vendor/grammars/language-povray
|
||||
url = https://github.com/c-lipka/language-povray
|
||||
[submodule "vendor/grammars/sublime-terra"]
|
||||
path = vendor/grammars/sublime-terra
|
||||
url = https://github.com/pyk/sublime-terra
|
||||
[submodule "vendor/grammars/SublimePuppet"]
|
||||
path = vendor/grammars/SublimePuppet
|
||||
url = https://github.com/russCloak/SublimePuppet
|
||||
[submodule "vendor/grammars/sublimeassembly"]
|
||||
path = vendor/grammars/sublimeassembly
|
||||
url = https://github.com/Nessphoro/sublimeassembly
|
||||
[submodule "vendor/grammars/monkey"]
|
||||
path = vendor/grammars/monkey
|
||||
url = https://github.com/gingerbeardman/monkey.tmbundle
|
||||
[submodule "vendor/grammars/assembly"]
|
||||
path = vendor/grammars/assembly
|
||||
url = https://github.com/nanoant/assembly.tmbundle
|
||||
[submodule "vendor/grammars/boo"]
|
||||
path = vendor/grammars/boo
|
||||
url = https://github.com/Shammah/boo-sublime
|
||||
[submodule "vendor/grammars/logos"]
|
||||
path = vendor/grammars/logos
|
||||
url = https://github.com/Cykey/Sublime-Logos
|
||||
[submodule "vendor/grammars/pig-latin"]
|
||||
path = vendor/grammars/pig-latin
|
||||
url = https://github.com/goblindegook/sublime-text-pig-latin
|
||||
[submodule "vendor/grammars/sourcepawn"]
|
||||
path = vendor/grammars/sourcepawn
|
||||
url = https://github.com/austinwagner/sublime-sourcepawn
|
||||
[submodule "vendor/grammars/gdscript"]
|
||||
path = vendor/grammars/gdscript
|
||||
url = https://github.com/beefsack/GDScript-sublime
|
||||
[submodule "vendor/grammars/nesC"]
|
||||
path = vendor/grammars/nesC
|
||||
url = https://github.com/cdwilson/nesC.tmbundle
|
||||
[submodule "vendor/grammars/ats"]
|
||||
path = vendor/grammars/ats
|
||||
url = https://github.com/steinwaywhw/ats-mode-sublimetext
|
||||
[submodule "vendor/grammars/grace"]
|
||||
path = vendor/grammars/grace
|
||||
url = https://github.com/zmthy/grace-tmbundle
|
||||
[submodule "vendor/grammars/nix"]
|
||||
path = vendor/grammars/nix
|
||||
url = https://github.com/wmertens/sublime-nix
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
language: ruby
|
||||
sudo: false
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- libicu-dev
|
||||
- libicu48
|
||||
before_install: script/travis/before_install
|
||||
rvm:
|
||||
- 1.9.3
|
||||
- 2.0.0
|
||||
- 2.1
|
||||
- 2.2
|
||||
|
||||
@@ -1,10 +1,31 @@
|
||||
# Contributing
|
||||
|
||||
Hi there! We're thrilled that you'd like to contribute to this project. Your help is essential for keeping it great. The majority of contributions won't need to touch any Ruby code at all.
|
||||
[code-of-conduct]: http://todogroup.org/opencodeofconduct/#Linguist/opensource@github.com
|
||||
|
||||
Hi there! We're thrilled that you'd like to contribute to this project. Your help is essential for keeping it great. This project adheres to the [Open Code of Conduct][code-of-conduct]. By participating, you are expected to uphold this code.
|
||||
|
||||
The majority of contributions won't need to touch any Ruby code at all.
|
||||
|
||||
## Adding an extension to a language
|
||||
|
||||
We try only to add new extensions once they have some usage on GitHub. In most cases we prefer that extensions be in use in hundreds of repositories before supporting them in Linguist.
|
||||
|
||||
To add support for a new extension:
|
||||
|
||||
0. Add your extension to the language entry in [`languages.yml`][languages], keeping the extensions in alphabetical order.
|
||||
0. Add at least one sample for your extension to the [samples directory][samples] in the correct subdirectory.
|
||||
0. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage.
|
||||
|
||||
In addition, if this extension is already listed in [`languages.yml`][languages] then sometimes a few more steps will need to be taken:
|
||||
|
||||
0. Make sure that example `.yourextension` files are present in the [samples directory][samples] for each language that uses `.yourextension`.
|
||||
0. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.yourextension` files. (ping @arfon or @bkeepers to help with this) to ensure we're not misclassifying files.
|
||||
0. If the Bayesian classifier does a bad job with the sample `.yourextension` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
|
||||
|
||||
|
||||
## Adding a language
|
||||
|
||||
We try only to add languages once they have some usage on GitHub. In most cases we prefer that languages be in use in hundreds of repositories before supporting them in Linguist.
|
||||
We try only to add languages once they have some usage on GitHub. In most cases we prefer that each new file extension be in use in hundreds of repositories before supporting them in Linguist.
|
||||
|
||||
To add support for a new language:
|
||||
|
||||
@@ -23,19 +44,21 @@ In addition, if your new language defines an extension that's already listed in
|
||||
|
||||
Remember, the goal here is to try and avoid false positives!
|
||||
|
||||
|
||||
## Fixing a misclassified language
|
||||
|
||||
Most languages are detected by their file extension defined in [languages.yml][languages]. For disambiguating between files with common extensions, linguist applies some [heuristics](/lib/linguist/heuristics.rb) and a [statistical classifier](lib/linguist/classifier.rb). This process can help differentiate between, for example, `.h` files which could be either C, C++, or Obj-C.
|
||||
|
||||
Misclassifications can often be solved by either adding a new filename or extension for the language or adding more [samples][samples] to make the classifier smarter.
|
||||
|
||||
|
||||
## Fixing syntax highlighting
|
||||
|
||||
Syntax highlighting in GitHub is performed using TextMate-compatible grammars. These are the same grammars that TextMate, Sublime Text and Atom use. Every language in [languages.yml][languages] is mapped to its corresponding TM `scope`. This scope will be used when picking up a grammar for highlighting.
|
||||
|
||||
Assuming your code is being detected as the right language, in most cases this is due to a bug in the language grammar rather than a bug in Linguist. [`grammars.yml`][grammars] lists all the grammars we use for syntax highlighting on github.com. Find the one corresponding to your code's programming language and submit a bug report upstream. If you can, try to reproduce the highlighting problem in the text editor that the grammar is designed for (TextMate, Sublime Text, or Atom) and include that information in your bug report.
|
||||
|
||||
You can also try to fix the bug yourself and submit a Pull Request. [TextMate's documentation](http://manual.macromates.com/en/language_grammars) offers a good introduction on how to work with TextMate-compatible grammars. You can test grammars using [Lightshow](https://github-lightshow.herokuapp.com).
|
||||
You can also try to fix the bug yourself and submit a Pull Request. [TextMate's documentation](https://manual.macromates.com/en/language_grammars) offers a good introduction on how to work with TextMate-compatible grammars. You can test grammars using [Lightshow](https://github-lightshow.herokuapp.com).
|
||||
|
||||
Once the bug has been fixed upstream, we'll pick it up for GitHub in the next release of Linguist.
|
||||
|
||||
@@ -51,9 +74,10 @@ To run the tests:
|
||||
|
||||
bundle exec rake test
|
||||
|
||||
Sometimes getting the tests running can be too much work, especially if you don't have much Ruby experience. It's okay: be lazy and let our build bot [Travis](http://travis-ci.org/#!/github/linguist) run the tests for you. Just open a pull request and the bot will start cranking away.
|
||||
Sometimes getting the tests running can be too much work, especially if you don't have much Ruby experience. It's okay: be lazy and let our build bot [Travis](https://travis-ci.org/#!/github/linguist) run the tests for you. Just open a pull request and the bot will start cranking away.
|
||||
|
||||
Here's our current build status: [](https://travis-ci.org/github/linguist)
|
||||
|
||||
Here's our current build status: [](http://travis-ci.org/github/linguist)
|
||||
|
||||
## Releasing
|
||||
|
||||
|
||||
1
Gemfile
1
Gemfile
@@ -1,4 +1,3 @@
|
||||
source 'https://rubygems.org'
|
||||
gemspec :name => "github-linguist"
|
||||
gemspec :name => "github-linguist-grammars"
|
||||
gem 'byebug' if RUBY_VERSION >= '2.0'
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,4 +1,4 @@
|
||||
Copyright (c) 2011-2015 GitHub, Inc.
|
||||
Copyright (c) 2011-2016 GitHub, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
|
||||
22
README.md
22
README.md
@@ -13,11 +13,11 @@ See [Troubleshooting](#troubleshooting) and [`CONTRIBUTING.md`](/CONTRIBUTING.md
|
||||
|
||||

|
||||
|
||||
The Language stats bar is built by aggregating the languages of each file in that repository. If it is reporting a language that you don't expect:
|
||||
The Language stats bar displays languages percentages for the files in the repository. The percentages are calculated based on the bytes of code for each language as reported by the [List Languages](https://developer.github.com/v3/repos/#list-languages) API. If the bar is reporting a language that you don't expect:
|
||||
|
||||
0. Click on the name of the language in the stats bar to see a list of the files that are identified as that language.
|
||||
0. If you see files that you didn't write, consider moving the files into one of the [paths for vendored code](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml), or use the [manual overrides](#overrides) feature to ignore them.
|
||||
0. If the files are being misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you an add, especially links to public repositories, is helpful.
|
||||
0. If you see files that you didn't write, consider moving the files into one of the [paths for vendored code](/lib/linguist/vendor.yml), or use the [manual overrides](#overrides) feature to ignore them.
|
||||
0. If the files are being misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you can add, especially links to public repositories, is helpful.
|
||||
0. If there are no reported issues of this misclassification, [open an issue][new-issue] and include a link to the repository or a sample of the code that is being misclassified.
|
||||
|
||||
## Overrides
|
||||
@@ -26,16 +26,16 @@ Linguist supports a number of different custom overrides strategies for language
|
||||
|
||||
### Using gitattributes
|
||||
|
||||
Add a `.gitattributes` file to your project and use standard git-style path matchers for the files you want to override to set `linguist-documentation`, `linguist-language`, and `linguist-vendored`. `.gitattributes` will be used to determine language statistics, but will not be used to syntax highlight files. To manually set syntax highlighting, use [Vim or Emacs modelines](#using-emacs-and-vim-modelines).
|
||||
Add a `.gitattributes` file to your project and use standard git-style path matchers for the files you want to override to set `linguist-documentation`, `linguist-language`, and `linguist-vendored`. `.gitattributes` will be used to determine language statistics, but will not be used to syntax highlight files. To manually set syntax highlighting, use [Vim or Emacs modelines](#using-emacs-or-vim-modelines).
|
||||
|
||||
```
|
||||
$ cat .gitattributes
|
||||
*.rb linguist-language=Java
|
||||
```
|
||||
|
||||
Checking code you didn't write, such as JavaScript libraries, into your git repo is a common practice, but this often inflates your project's language stats and may even cause your project to be labeled as another language. By default, Linguist treats all of the paths defined in [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository. Vendored files are also hidden by default in diffs on github.com.
|
||||
Checking code you didn't write, such as JavaScript libraries, into your git repo is a common practice, but this often inflates your project's language stats and may even cause your project to be labeled as another language. By default, Linguist treats all of the paths defined in [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository.
|
||||
|
||||
Use the `linguist-vendored` attribute to vendor or un-vendor paths. Please note, overriding the vendored (or un-vendored) status of a file only affects the language statistics for the repository and not the behavior in diffs on github.com.
|
||||
Use the `linguist-vendored` attribute to vendor or un-vendor paths.
|
||||
|
||||
```
|
||||
$ cat .gitattributes
|
||||
@@ -59,6 +59,9 @@ Alternatively, you can use Vim or Emacs style modelines to set the language for
|
||||
|
||||
##### Vim
|
||||
```
|
||||
# Some examples of various styles:
|
||||
vim: syntax=java
|
||||
vim: set syntax=ruby:
|
||||
vim: set filetype=prolog:
|
||||
vim: set ft=cpp:
|
||||
```
|
||||
@@ -111,4 +114,9 @@ lib/linguist.rb
|
||||
|
||||
Please check out our [contributing guidelines](CONTRIBUTING.md).
|
||||
|
||||
##
|
||||
## License
|
||||
|
||||
The language grammars included in this gem are covered by their repositories'
|
||||
respective licenses. `grammars.yml` specifies the repository for each grammar.
|
||||
|
||||
All other files are covered by the MIT license, see `LICENSE`.
|
||||
|
||||
10
Rakefile
10
Rakefile
@@ -40,18 +40,14 @@ task :samples do
|
||||
end
|
||||
|
||||
task :build_gem => :samples do
|
||||
rm_rf "grammars"
|
||||
sh "script/convert-grammars"
|
||||
languages = YAML.load_file("lib/linguist/languages.yml")
|
||||
File.write("lib/linguist/languages.json", Yajl.dump(languages))
|
||||
`gem build github-linguist.gemspec`
|
||||
File.delete("lib/linguist/languages.json")
|
||||
end
|
||||
|
||||
task :build_grammars_gem do
|
||||
rm_rf "grammars"
|
||||
sh "script/convert-grammars"
|
||||
sh "gem", "build", "github-linguist-grammars.gemspec"
|
||||
end
|
||||
|
||||
namespace :benchmark do
|
||||
benchmark_path = "benchmark/results"
|
||||
|
||||
@@ -62,7 +58,7 @@ namespace :benchmark do
|
||||
|
||||
corpus = File.expand_path(ENV["CORPUS"] || "samples")
|
||||
|
||||
require 'linguist/language'
|
||||
require 'linguist'
|
||||
|
||||
results = Hash.new
|
||||
Dir.glob("#{corpus}/**/*").each do |file|
|
||||
|
||||
138
bin/git-linguist
Executable file
138
bin/git-linguist
Executable file
@@ -0,0 +1,138 @@
|
||||
#!/usr/bin/env ruby
|
||||
|
||||
require 'linguist'
|
||||
require 'rugged'
|
||||
require 'optparse'
|
||||
require 'json'
|
||||
require 'tmpdir'
|
||||
require 'zlib'
|
||||
|
||||
class GitLinguist
|
||||
def initialize(path, commit_oid, incremental = true)
|
||||
@repo_path = path
|
||||
@commit_oid = commit_oid
|
||||
@incremental = incremental
|
||||
end
|
||||
|
||||
def linguist
|
||||
if @commit_oid.nil?
|
||||
raise "git-linguist must be called with a specific commit OID to perform language computation"
|
||||
end
|
||||
repo = Linguist::Repository.new(rugged, @commit_oid)
|
||||
|
||||
if @incremental && stats = load_language_stats
|
||||
old_commit_oid, old_stats = stats
|
||||
|
||||
# A cache with NULL oid means that we want to froze
|
||||
# these language stats in place and stop computing
|
||||
# them (for performance reasons)
|
||||
return old_stats if old_commit_oid == NULL_OID
|
||||
repo.load_existing_stats(old_commit_oid, old_stats)
|
||||
end
|
||||
|
||||
result = yield repo
|
||||
|
||||
save_language_stats(@commit_oid, repo.cache)
|
||||
result
|
||||
end
|
||||
|
||||
def load_language_stats
|
||||
version, oid, stats = load_cache
|
||||
if version == LANGUAGE_STATS_CACHE_VERSION && oid && stats
|
||||
[oid, stats]
|
||||
end
|
||||
end
|
||||
|
||||
def save_language_stats(oid, stats)
|
||||
cache = [LANGUAGE_STATS_CACHE_VERSION, oid, stats]
|
||||
write_cache(cache)
|
||||
end
|
||||
|
||||
def clear_language_stats
|
||||
File.unlink(cache_file)
|
||||
rescue Errno::ENOENT
|
||||
end
|
||||
|
||||
def disable_language_stats
|
||||
save_language_stats(NULL_OID, {})
|
||||
end
|
||||
|
||||
protected
|
||||
NULL_OID = ("0" * 40).freeze
|
||||
|
||||
LANGUAGE_STATS_CACHE = 'language-stats.cache'
|
||||
LANGUAGE_STATS_CACHE_VERSION = "v3:#{Linguist::VERSION}"
|
||||
|
||||
def rugged
|
||||
@rugged ||= Rugged::Repository.bare(@repo_path)
|
||||
end
|
||||
|
||||
def cache_file
|
||||
File.join(@repo_path, LANGUAGE_STATS_CACHE)
|
||||
end
|
||||
|
||||
def write_cache(object)
|
||||
return unless File.directory? @repo_path
|
||||
|
||||
begin
|
||||
tmp_path = Dir::Tmpname.make_tmpname(cache_file, nil)
|
||||
File.open(tmp_path, "wb") do |f|
|
||||
marshal = Marshal.dump(object)
|
||||
f.write(Zlib::Deflate.deflate(marshal))
|
||||
end
|
||||
|
||||
File.rename(tmp_path, cache_file)
|
||||
rescue => e
|
||||
(File.unlink(tmp_path) rescue nil)
|
||||
raise e
|
||||
end
|
||||
end
|
||||
|
||||
def load_cache
|
||||
marshal = File.open(cache_file, "rb") { |f| Zlib::Inflate.inflate(f.read) }
|
||||
Marshal.load(marshal)
|
||||
rescue SystemCallError, ::Zlib::DataError, ::Zlib::BufError, TypeError
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
def git_linguist(args)
|
||||
incremental = true
|
||||
commit = nil
|
||||
|
||||
parser = OptionParser.new do |opts|
|
||||
opts.banner = "Usage: git-linguist [OPTIONS] stats|breakdown|dump-cache|clear|disable"
|
||||
|
||||
opts.on("-f", "--force", "Force a full rescan") { incremental = false }
|
||||
opts.on("--commit=COMMIT", "Commit to index") { |v| commit = v}
|
||||
end
|
||||
|
||||
parser.parse!(args)
|
||||
|
||||
git_dir = `git rev-parse --git-dir`.strip
|
||||
raise "git-linguist must be ran in a Git repository (#{Dir.pwd})" unless $?.success?
|
||||
wrapper = GitLinguist.new(git_dir, commit, incremental)
|
||||
|
||||
case args.pop
|
||||
when "stats"
|
||||
wrapper.linguist do |linguist|
|
||||
puts JSON.dump(linguist.languages)
|
||||
end
|
||||
when "breakdown"
|
||||
wrapper.linguist do |linguist|
|
||||
puts JSON.dump(linguist.breakdown_by_file)
|
||||
end
|
||||
when "dump-cache"
|
||||
puts JSON.dump(wrapper.load_language_stats)
|
||||
when "clear"
|
||||
wrapper.clear_language_stats
|
||||
when "disable"
|
||||
wrapper.disable_language_stats
|
||||
else
|
||||
$stderr.print(parser.help)
|
||||
exit 1
|
||||
end
|
||||
end
|
||||
|
||||
git_linguist(ARGV)
|
||||
@@ -1,14 +0,0 @@
|
||||
require File.expand_path('../lib/linguist/version', __FILE__)
|
||||
|
||||
Gem::Specification.new do |s|
|
||||
s.name = 'github-linguist-grammars'
|
||||
s.version = Linguist::VERSION
|
||||
s.summary = "Language grammars for use with github-linguist"
|
||||
|
||||
s.authors = "GitHub"
|
||||
s.homepage = "https://github.com/github/linguist"
|
||||
|
||||
s.files = ['lib/linguist/grammars.rb'] + Dir['grammars/*']
|
||||
|
||||
s.add_development_dependency 'plist', '~>3.1'
|
||||
end
|
||||
@@ -10,18 +10,21 @@ Gem::Specification.new do |s|
|
||||
s.homepage = "https://github.com/github/linguist"
|
||||
s.license = "MIT"
|
||||
|
||||
s.files = Dir['lib/**/*'] - ['lib/linguist/grammars.rb']
|
||||
s.executables << 'linguist'
|
||||
s.files = Dir['lib/**/*'] + Dir['grammars/*'] + ['LICENSE']
|
||||
s.executables = ['linguist', 'git-linguist']
|
||||
|
||||
s.add_dependency 'charlock_holmes', '~> 0.7.3'
|
||||
s.add_dependency 'escape_utils', '~> 1.1.0'
|
||||
s.add_dependency 'mime-types', '>= 1.19'
|
||||
s.add_dependency 'rugged', '~> 0.23.0b1'
|
||||
s.add_dependency 'rugged', '>= 0.23.0b'
|
||||
|
||||
s.add_development_dependency 'minitest', '>= 5.0'
|
||||
s.add_development_dependency 'mocha'
|
||||
s.add_development_dependency 'plist', '~>3.1'
|
||||
s.add_development_dependency 'pry'
|
||||
s.add_development_dependency 'rake'
|
||||
s.add_development_dependency 'yajl-ruby'
|
||||
s.add_development_dependency 'color-proximity', '~> 0.2.1'
|
||||
s.add_development_dependency 'licensed'
|
||||
|
||||
end
|
||||
|
||||
154
grammars.yml
Normal file → Executable file
154
grammars.yml
Normal file → Executable file
@@ -29,31 +29,27 @@ vendor/grammars/AutoHotkey/:
|
||||
vendor/grammars/BrightScript.tmbundle/:
|
||||
- source.brightauthorproject
|
||||
- source.brightscript
|
||||
vendor/grammars/CLIPS-sublime:
|
||||
- source.clips
|
||||
vendor/grammars/ColdFusion:
|
||||
- source.cfscript
|
||||
- source.cfscript.cfc
|
||||
- text.cfml.basic
|
||||
- text.html.cfm
|
||||
vendor/grammars/Creole:
|
||||
- text.html.creole
|
||||
vendor/grammars/Docker.tmbundle:
|
||||
- source.dockerfile
|
||||
vendor/grammars/Elm.tmLanguage:
|
||||
- source.elm
|
||||
vendor/grammars/FreeMarker.tmbundle:
|
||||
- text.html.ftl
|
||||
vendor/grammars/G-Code/:
|
||||
- source.LS
|
||||
- source.MCPOST
|
||||
- source.MOD
|
||||
- source.apt
|
||||
- source.gcode
|
||||
vendor/grammars/GDScript-sublime/:
|
||||
- source.gdscript
|
||||
vendor/grammars/Handlebars:
|
||||
- text.html.handlebars
|
||||
vendor/grammars/IDL-Syntax:
|
||||
- source.webidl
|
||||
vendor/grammars/InnoSetup/:
|
||||
- source.inno
|
||||
vendor/grammars/Isabelle.tmbundle:
|
||||
- source.isabelle.root
|
||||
- source.isabelle.theory
|
||||
@@ -65,6 +61,9 @@ vendor/grammars/Lean.tmbundle:
|
||||
- source.lean
|
||||
vendor/grammars/LiveScript.tmbundle:
|
||||
- source.livescript
|
||||
vendor/grammars/MagicPython:
|
||||
- source.python
|
||||
- source.regexp.python
|
||||
vendor/grammars/Modelica/:
|
||||
- source.modelica
|
||||
vendor/grammars/NSIS:
|
||||
@@ -83,12 +82,14 @@ vendor/grammars/Racket:
|
||||
- source.racket
|
||||
vendor/grammars/SCSS.tmbundle:
|
||||
- source.scss
|
||||
vendor/grammars/SMT.tmbundle:
|
||||
- source.smt
|
||||
vendor/grammars/Scalate.tmbundle:
|
||||
- source.scaml
|
||||
- text.html.ssp
|
||||
vendor/grammars/Slash.tmbundle:
|
||||
- text.html.slash
|
||||
vendor/grammars/Stata.tmbundle:
|
||||
vendor/grammars/Stata.tmbundle/:
|
||||
- source.mata
|
||||
- source.stata
|
||||
vendor/grammars/Stylus/:
|
||||
@@ -97,14 +98,12 @@ vendor/grammars/Sublime-Coq:
|
||||
- source.coq
|
||||
vendor/grammars/Sublime-HTTP:
|
||||
- source.httpspec
|
||||
vendor/grammars/Sublime-Inform:
|
||||
- source.Inform7
|
||||
vendor/grammars/Sublime-Lasso:
|
||||
- file.lasso
|
||||
vendor/grammars/Sublime-Logos:
|
||||
- source.logos
|
||||
vendor/grammars/Sublime-Loom:
|
||||
- source.loomscript
|
||||
vendor/grammars/Sublime-Modula-2/:
|
||||
- source.modula2
|
||||
vendor/grammars/Sublime-Nit:
|
||||
- source.nit
|
||||
vendor/grammars/Sublime-QML:
|
||||
@@ -117,6 +116,7 @@ vendor/grammars/Sublime-SQF-Language:
|
||||
- source.sqf
|
||||
vendor/grammars/Sublime-Text-2-OpenEdge-ABL:
|
||||
- source.abl
|
||||
- text.html.abl
|
||||
vendor/grammars/Sublime-VimL:
|
||||
- source.viml
|
||||
vendor/grammars/SublimeBrainfuck:
|
||||
@@ -124,19 +124,23 @@ vendor/grammars/SublimeBrainfuck:
|
||||
vendor/grammars/SublimeClarion/:
|
||||
- source.clarion
|
||||
vendor/grammars/SublimePapyrus/:
|
||||
- source.compiled-papyrus
|
||||
- source.papyrus
|
||||
- source.papyrus-assembly
|
||||
- source.papyrus.skyrim
|
||||
vendor/grammars/SublimePuppet/:
|
||||
- source.puppet
|
||||
vendor/grammars/SublimeXtend:
|
||||
- source.xtend
|
||||
vendor/grammars/TXL/:
|
||||
- source.txl
|
||||
vendor/grammars/Textmate-Gosu-Bundle:
|
||||
- source.gosu.2
|
||||
vendor/grammars/UrWeb-Language-Definition:
|
||||
- source.ur
|
||||
vendor/grammars/VBDotNetSyntax:
|
||||
- source.vbnet
|
||||
vendor/grammars/Vala-TMBundle:
|
||||
- source.vala
|
||||
vendor/grammars/X10:
|
||||
- source.x10
|
||||
vendor/grammars/abap.tmbundle:
|
||||
- source.abap
|
||||
vendor/grammars/actionscript3-tmbundle:
|
||||
@@ -164,21 +168,28 @@ vendor/grammars/asciidoc.tmbundle/:
|
||||
vendor/grammars/asp.tmbundle:
|
||||
- source.asp
|
||||
- text.html.asp
|
||||
vendor/grammars/assembly.tmbundle:
|
||||
vendor/grammars/assembly/:
|
||||
- objdump.x86asm
|
||||
- source.x86asm
|
||||
vendor/grammars/atom-fsharp/:
|
||||
- source.fsharp
|
||||
- source.fsharp.fsi
|
||||
- source.fsharp.fsl
|
||||
- source.fsharp.fsx
|
||||
vendor/grammars/atom-language-purescript/:
|
||||
- source.purescript
|
||||
vendor/grammars/atom-language-stan/:
|
||||
- source.stan
|
||||
vendor/grammars/atom-salt:
|
||||
- source.python.salt
|
||||
- source.yaml.salt
|
||||
vendor/grammars/ats.sublime:
|
||||
vendor/grammars/ats:
|
||||
- source.ats
|
||||
vendor/grammars/autoitv3-tmbundle:
|
||||
- source.autoit.3
|
||||
vendor/grammars/awk-sublime:
|
||||
- source.awk
|
||||
vendor/grammars/bison.tmbundle:
|
||||
- source.bison
|
||||
vendor/grammars/boo-sublime:
|
||||
vendor/grammars/boo/:
|
||||
- source.boo
|
||||
vendor/grammars/bro-sublime:
|
||||
- source.bro
|
||||
@@ -213,6 +224,7 @@ vendor/grammars/d.tmbundle:
|
||||
vendor/grammars/dart-sublime-bundle:
|
||||
- source.dart
|
||||
- source.pubspec
|
||||
- text.dart-analysis-output
|
||||
- text.dart-doccomments
|
||||
vendor/grammars/desktop.tmbundle:
|
||||
- source.desktop
|
||||
@@ -245,17 +257,17 @@ vendor/grammars/fish-tmbundle:
|
||||
vendor/grammars/fortran.tmbundle:
|
||||
- source.fortran
|
||||
- source.fortran.modern
|
||||
vendor/grammars/fsharpbinding:
|
||||
- source.fsharp
|
||||
vendor/grammars/gap-tmbundle/:
|
||||
- source.gap
|
||||
vendor/grammars/gdscript/:
|
||||
- source.gdscript
|
||||
vendor/grammars/gettext.tmbundle:
|
||||
- source.po
|
||||
vendor/grammars/gnuplot-tmbundle:
|
||||
- source.gnuplot
|
||||
vendor/grammars/go-tmbundle:
|
||||
- source.go
|
||||
vendor/grammars/grace-tmbundle/:
|
||||
vendor/grammars/grace:
|
||||
- source.grace
|
||||
vendor/grammars/gradle.tmbundle:
|
||||
- source.groovy.gradle
|
||||
@@ -285,8 +297,8 @@ vendor/grammars/io.tmbundle:
|
||||
vendor/grammars/ioke-outdated:
|
||||
- source.ioke
|
||||
vendor/grammars/jade-tmbundle:
|
||||
- source.jade
|
||||
- source.pyjade
|
||||
- text.jade
|
||||
vendor/grammars/jasmin-sublime:
|
||||
- source.jasmin
|
||||
vendor/grammars/java.tmbundle:
|
||||
@@ -302,12 +314,20 @@ vendor/grammars/json.tmbundle:
|
||||
- source.json
|
||||
vendor/grammars/kotlin-sublime-package:
|
||||
- source.Kotlin
|
||||
vendor/grammars/language-babel/:
|
||||
- source.js.jsx
|
||||
- source.regexp.babel
|
||||
vendor/grammars/language-click/:
|
||||
- source.click
|
||||
vendor/grammars/language-clojure:
|
||||
- source.clojure
|
||||
vendor/grammars/language-coffee-script:
|
||||
- source.coffee
|
||||
- source.litcoffee
|
||||
vendor/grammars/language-crystal:
|
||||
- source.crystal
|
||||
vendor/grammars/language-csharp:
|
||||
- source.cake
|
||||
- source.cs
|
||||
- source.csx
|
||||
- source.nant-build
|
||||
@@ -315,19 +335,39 @@ vendor/grammars/language-gfm:
|
||||
- source.gfm
|
||||
vendor/grammars/language-hy:
|
||||
- source.hy
|
||||
vendor/grammars/language-inform7:
|
||||
- source.inform7
|
||||
vendor/grammars/language-javascript:
|
||||
- source.js
|
||||
- source.js.regexp
|
||||
- source.js.regexp.replacement
|
||||
vendor/grammars/language-jsoniq/:
|
||||
- source.jq
|
||||
- source.xq
|
||||
vendor/grammars/language-less/:
|
||||
- source.css.less
|
||||
vendor/grammars/language-maxscript:
|
||||
- source.maxscript
|
||||
vendor/grammars/language-ncl:
|
||||
- source.ncl
|
||||
vendor/grammars/language-povray:
|
||||
- source.pov-ray sdl
|
||||
vendor/grammars/language-python:
|
||||
- source.python
|
||||
- source.regexp.python
|
||||
- text.python.console
|
||||
- text.python.traceback
|
||||
vendor/grammars/language-renpy:
|
||||
- source.renpy
|
||||
vendor/grammars/language-shellscript:
|
||||
- source.shell
|
||||
- text.shell-session
|
||||
vendor/grammars/language-supercollider:
|
||||
- source.supercollider
|
||||
vendor/grammars/language-xbase:
|
||||
- source.harbour
|
||||
vendor/grammars/language-yaml:
|
||||
- source.yaml
|
||||
vendor/grammars/language-yang/:
|
||||
- source.yang
|
||||
vendor/grammars/latex.tmbundle:
|
||||
- text.bibtex
|
||||
- text.log.latex
|
||||
@@ -335,8 +375,6 @@ vendor/grammars/latex.tmbundle:
|
||||
- text.tex.latex
|
||||
- text.tex.latex.beamer
|
||||
- text.tex.latex.memoir
|
||||
vendor/grammars/less.tmbundle:
|
||||
- source.css.less
|
||||
vendor/grammars/lilypond.tmbundle:
|
||||
- source.lilypond
|
||||
vendor/grammars/liquid.tmbundle:
|
||||
@@ -345,6 +383,8 @@ vendor/grammars/lisp.tmbundle:
|
||||
- source.lisp
|
||||
vendor/grammars/llvm.tmbundle:
|
||||
- source.llvm
|
||||
vendor/grammars/logos:
|
||||
- source.logos
|
||||
vendor/grammars/logtalk.tmbundle:
|
||||
- source.logtalk
|
||||
vendor/grammars/lua.tmbundle:
|
||||
@@ -364,16 +404,18 @@ vendor/grammars/mediawiki.tmbundle/:
|
||||
- text.html.mediawiki
|
||||
vendor/grammars/mercury-tmlanguage:
|
||||
- source.mercury
|
||||
vendor/grammars/monkey.tmbundle:
|
||||
vendor/grammars/monkey/:
|
||||
- source.monkey
|
||||
vendor/grammars/moonscript-tmbundle:
|
||||
- source.moonscript
|
||||
vendor/grammars/nemerle.tmbundle:
|
||||
- source.nemerle
|
||||
vendor/grammars/nesC.tmbundle:
|
||||
vendor/grammars/nesC:
|
||||
- source.nesc
|
||||
vendor/grammars/ninja.tmbundle:
|
||||
- source.ninja
|
||||
vendor/grammars/nix:
|
||||
- source.nix
|
||||
vendor/grammars/objective-c.tmbundle:
|
||||
- source.objc
|
||||
- source.objc++
|
||||
@@ -388,21 +430,25 @@ vendor/grammars/ooc.tmbundle:
|
||||
- source.ooc
|
||||
vendor/grammars/opa.tmbundle:
|
||||
- source.opa
|
||||
vendor/grammars/openscad.tmbundle/:
|
||||
- source.scad
|
||||
vendor/grammars/oracle.tmbundle:
|
||||
- source.plsql.oracle
|
||||
vendor/grammars/oz-tmbundle/Syntaxes/Oz.tmLanguage:
|
||||
- source.oz
|
||||
vendor/grammars/pascal.tmbundle:
|
||||
- source.pascal
|
||||
vendor/grammars/pawn-sublime-language/:
|
||||
- source.pawn
|
||||
vendor/grammars/perl.tmbundle/:
|
||||
- source.perl
|
||||
- source.perl.6
|
||||
vendor/grammars/perl6fe:
|
||||
- source.meta-info
|
||||
- source.perl6fe
|
||||
- source.regexp.perl6fe
|
||||
vendor/grammars/php-smarty.tmbundle:
|
||||
- text.html.smarty
|
||||
vendor/grammars/php.tmbundle:
|
||||
- text.html.php
|
||||
vendor/grammars/pig-latin/:
|
||||
- source.pig_latin
|
||||
vendor/grammars/pike-textmate:
|
||||
- source.pike
|
||||
vendor/grammars/postscript.tmbundle:
|
||||
@@ -411,12 +457,8 @@ vendor/grammars/powershell:
|
||||
- source.powershell
|
||||
vendor/grammars/processing.tmbundle:
|
||||
- source.processing
|
||||
vendor/grammars/prolog.tmbundle:
|
||||
- source.prolog
|
||||
vendor/grammars/protobuf-tmbundle:
|
||||
- source.protobuf
|
||||
vendor/grammars/puppet-textmate-bundle:
|
||||
- source.puppet
|
||||
vendor/grammars/python-django.tmbundle:
|
||||
- source.python.django
|
||||
- text.html.django
|
||||
@@ -444,10 +486,16 @@ vendor/grammars/scheme.tmbundle:
|
||||
- source.scheme
|
||||
vendor/grammars/scilab.tmbundle:
|
||||
- source.scilab
|
||||
vendor/grammars/smali-sublime/smali.tmLanguage:
|
||||
- source.smali
|
||||
vendor/grammars/smalltalk-tmbundle:
|
||||
- source.smalltalk
|
||||
vendor/grammars/sourcepawn/:
|
||||
- source.sp
|
||||
vendor/grammars/sql.tmbundle:
|
||||
- source.sql
|
||||
vendor/grammars/st2-zonefile:
|
||||
- text.zone_file
|
||||
vendor/grammars/standard-ml.tmbundle:
|
||||
- source.cm
|
||||
- source.ml
|
||||
@@ -455,10 +503,10 @@ vendor/grammars/sublime-MuPAD:
|
||||
- source.mupad
|
||||
vendor/grammars/sublime-apl/:
|
||||
- source.apl
|
||||
vendor/grammars/sublime-aspectj/:
|
||||
- source.aspectj
|
||||
vendor/grammars/sublime-befunge:
|
||||
- source.befunge
|
||||
vendor/grammars/sublime-better-typescript:
|
||||
- source.ts
|
||||
vendor/grammars/sublime-bsv:
|
||||
- source.bsv
|
||||
vendor/grammars/sublime-cirru:
|
||||
@@ -468,8 +516,6 @@ vendor/grammars/sublime-glsl:
|
||||
- source.glsl
|
||||
vendor/grammars/sublime-golo/:
|
||||
- source.golo
|
||||
vendor/grammars/sublime-idris:
|
||||
- source.idris
|
||||
vendor/grammars/sublime-mask:
|
||||
- source.mask
|
||||
vendor/grammars/sublime-netlinx:
|
||||
@@ -477,26 +523,27 @@ vendor/grammars/sublime-netlinx:
|
||||
- source.netlinx.erb
|
||||
vendor/grammars/sublime-nginx:
|
||||
- source.nginx
|
||||
vendor/grammars/sublime-nix:
|
||||
- source.nix
|
||||
vendor/grammars/sublime-opal/:
|
||||
- source.opal
|
||||
- source.opalsysdefs
|
||||
vendor/grammars/sublime-pony:
|
||||
- source.pony
|
||||
vendor/grammars/sublime-robot-plugin:
|
||||
- text.robot
|
||||
vendor/grammars/sublime-rust:
|
||||
- source.rust
|
||||
vendor/grammars/sublime-sourcepawn:
|
||||
- source.sp
|
||||
vendor/grammars/sublime-spintools/:
|
||||
- source.regexp.spin
|
||||
- source.spin
|
||||
vendor/grammars/sublime-tea:
|
||||
- source.tea
|
||||
vendor/grammars/sublime-terra:
|
||||
- source.terra
|
||||
vendor/grammars/sublime-text-ox/:
|
||||
- source.ox
|
||||
vendor/grammars/sublime-text-pig-latin/:
|
||||
- source.pig_latin
|
||||
vendor/grammars/sublime-typescript/:
|
||||
- source.ts
|
||||
- source.tsx
|
||||
vendor/grammars/sublime-varnish:
|
||||
- source.varnish.vcl
|
||||
vendor/grammars/sublime_cobol:
|
||||
@@ -507,6 +554,11 @@ vendor/grammars/sublime_cobol:
|
||||
vendor/grammars/sublime_man_page_support:
|
||||
- source.man
|
||||
- text.groff
|
||||
vendor/grammars/sublimeassembly/:
|
||||
- source.assembly
|
||||
vendor/grammars/sublimeprolog/:
|
||||
- source.prolog
|
||||
- source.prolog.eclipse
|
||||
vendor/grammars/sublimetext-cuda-cpp:
|
||||
- source.cuda-c++
|
||||
vendor/grammars/swift.tmbundle:
|
||||
@@ -523,8 +575,8 @@ vendor/grammars/turtle.tmbundle:
|
||||
- source.turtle
|
||||
vendor/grammars/verilog.tmbundle:
|
||||
- source.verilog
|
||||
vendor/grammars/x86-assembly-textmate-bundle:
|
||||
- source.asm.x86
|
||||
vendor/grammars/vue-syntax-highlight:
|
||||
- text.html.vue
|
||||
vendor/grammars/xc.tmbundle/:
|
||||
- source.xc
|
||||
vendor/grammars/xml.tmbundle:
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
require 'linguist/blob_helper'
|
||||
require 'linguist/generated'
|
||||
require 'linguist/grammars'
|
||||
require 'linguist/heuristics'
|
||||
require 'linguist/language'
|
||||
require 'linguist/repository'
|
||||
@@ -8,13 +9,91 @@ require 'linguist/shebang'
|
||||
require 'linguist/version'
|
||||
|
||||
class << Linguist
|
||||
# Public: Detects the Language of the blob.
|
||||
#
|
||||
# blob - an object that includes the Linguist `BlobHelper` interface;
|
||||
# see Linguist::LazyBlob and Linguist::FileBlob for examples
|
||||
#
|
||||
# Returns Language or nil.
|
||||
def detect(blob)
|
||||
# Bail early if the blob is binary or empty.
|
||||
return nil if blob.likely_binary? || blob.binary? || blob.empty?
|
||||
|
||||
Linguist.instrument("linguist.detection", :blob => blob) do
|
||||
# Call each strategy until one candidate is returned.
|
||||
languages = []
|
||||
returning_strategy = nil
|
||||
|
||||
STRATEGIES.each do |strategy|
|
||||
returning_strategy = strategy
|
||||
candidates = Linguist.instrument("linguist.strategy", :blob => blob, :strategy => strategy, :candidates => languages) do
|
||||
strategy.call(blob, languages)
|
||||
end
|
||||
if candidates.size == 1
|
||||
languages = candidates
|
||||
break
|
||||
elsif candidates.size > 1
|
||||
# More than one candidate was found, pass them to the next strategy.
|
||||
languages = candidates
|
||||
else
|
||||
# No candidates, try the next strategy
|
||||
end
|
||||
end
|
||||
|
||||
Linguist.instrument("linguist.detected", :blob => blob, :strategy => returning_strategy, :language => languages.first)
|
||||
|
||||
languages.first
|
||||
end
|
||||
end
|
||||
|
||||
# Internal: The strategies used to detect the language of a file.
|
||||
#
|
||||
# A strategy is an object that has a `.call` method that takes two arguments:
|
||||
#
|
||||
# blob - An object that quacks like a blob.
|
||||
# languages - An Array of candidate Language objects that were returned by the
|
||||
# previous strategy.
|
||||
#
|
||||
# A strategy should return an Array of Language candidates.
|
||||
#
|
||||
# Strategies are called in turn until a single Language is returned.
|
||||
STRATEGIES = [
|
||||
Linguist::Strategy::Modeline,
|
||||
Linguist::Shebang,
|
||||
Linguist::Strategy::Filename,
|
||||
Linguist::Heuristics,
|
||||
Linguist::Classifier
|
||||
]
|
||||
|
||||
# Public: Set an instrumenter.
|
||||
#
|
||||
# class CustomInstrumenter
|
||||
# def instrument(name, payload = {})
|
||||
# warn "Instrumenting #{name}: #{payload[:blob]}"
|
||||
# end
|
||||
# end
|
||||
#
|
||||
# Linguist.instrumenter = CustomInstrumenter
|
||||
#
|
||||
# The instrumenter must conform to the `ActiveSupport::Notifications`
|
||||
# interface, which defines `#instrument` and accepts:
|
||||
#
|
||||
# name - the String name of the event (e.g. "linguist.detected")
|
||||
# payload - a Hash of the exception context.
|
||||
attr_accessor :instrumenter
|
||||
|
||||
# Internal: Perform instrumentation on a block
|
||||
#
|
||||
# Linguist.instrument("linguist.dosomething", :blob => blob) do
|
||||
# # logic to instrument here.
|
||||
# end
|
||||
#
|
||||
def instrument(*args, &bk)
|
||||
if instrumenter
|
||||
instrumenter.instrument(*args, &bk)
|
||||
else
|
||||
yield if block_given?
|
||||
elsif block_given?
|
||||
yield
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
73
lib/linguist/blob.rb
Normal file
73
lib/linguist/blob.rb
Normal file
@@ -0,0 +1,73 @@
|
||||
require 'linguist/blob_helper'
|
||||
|
||||
module Linguist
|
||||
# A Blob is a wrapper around the content of a file to make it quack
|
||||
# like a Grit::Blob. It provides the basic interface: `name`,
|
||||
# `data`, `path` and `size`.
|
||||
class Blob
|
||||
include BlobHelper
|
||||
|
||||
# Public: Initialize a new Blob.
|
||||
#
|
||||
# path - A path String (does not necessarily exists on the file system).
|
||||
# content - Content of the file.
|
||||
#
|
||||
# Returns a Blob.
|
||||
def initialize(path, content)
|
||||
@path = path
|
||||
@content = content
|
||||
end
|
||||
|
||||
# Public: Filename
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# Blob.new("/path/to/linguist/lib/linguist.rb", "").path
|
||||
# # => "/path/to/linguist/lib/linguist.rb"
|
||||
#
|
||||
# Returns a String
|
||||
attr_reader :path
|
||||
|
||||
# Public: File name
|
||||
#
|
||||
# Returns a String
|
||||
def name
|
||||
File.basename(@path)
|
||||
end
|
||||
|
||||
# Public: File contents.
|
||||
#
|
||||
# Returns a String.
|
||||
def data
|
||||
@content
|
||||
end
|
||||
|
||||
# Public: Get byte size
|
||||
#
|
||||
# Returns an Integer.
|
||||
def size
|
||||
@content.bytesize
|
||||
end
|
||||
|
||||
# Public: Get file extension.
|
||||
#
|
||||
# Returns a String.
|
||||
def extension
|
||||
extensions.last || ""
|
||||
end
|
||||
|
||||
# Public: Return an array of the file extensions
|
||||
#
|
||||
# >> Linguist::Blob.new("app/views/things/index.html.erb").extensions
|
||||
# => [".html.erb", ".erb"]
|
||||
#
|
||||
# Returns an Array
|
||||
def extensions
|
||||
_, *segments = name.downcase.split(".")
|
||||
|
||||
segments.map.with_index do |segment, index|
|
||||
"." + segments[index..-1].join(".")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -6,7 +6,7 @@ require 'yaml'
|
||||
|
||||
module Linguist
|
||||
# DEPRECATED Avoid mixing into Blob classes. Prefer functional interfaces
|
||||
# like `Language.detect` over `Blob#language`. Functions are much easier to
|
||||
# like `Linguist.detect` over `Blob#language`. Functions are much easier to
|
||||
# cache and compose.
|
||||
#
|
||||
# Avoid adding additional bloat to this module.
|
||||
@@ -325,7 +325,7 @@ module Linguist
|
||||
#
|
||||
# Returns a Language or nil if none is detected
|
||||
def language
|
||||
@language ||= Language.detect(self)
|
||||
@language ||= Linguist.detect(self)
|
||||
end
|
||||
|
||||
# Internal: Get the TextMate compatible scope for the blob
|
||||
|
||||
@@ -13,11 +13,18 @@
|
||||
- (^|/)[Dd]ocumentation/
|
||||
- (^|/)javadoc/
|
||||
- ^man/
|
||||
- ^[Ee]xamples/
|
||||
|
||||
## Documentation files ##
|
||||
|
||||
- (^|/)CHANGE(S|LOG)?(\.|$)
|
||||
- (^|/)CONTRIBUTING(\.|$)
|
||||
- (^|/)COPYING(\.|$)
|
||||
- (^|/)INSTALL(\.|$)
|
||||
- (^|/)LICEN[CS]E(\.|$)
|
||||
- (^|/)[Ll]icen[cs]e(\.|$)
|
||||
- (^|/)README(\.|$)
|
||||
- (^|/)[Rr]eadme(\.|$)
|
||||
|
||||
# Samples folders
|
||||
- ^[Ss]amples/
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
require 'linguist/blob_helper'
|
||||
require 'linguist/blob'
|
||||
|
||||
module Linguist
|
||||
# A FileBlob is a wrapper around a File object to make it quack
|
||||
# like a Grit::Blob. It provides the basic interface: `name`,
|
||||
# `data`, `path` and `size`.
|
||||
class FileBlob
|
||||
class FileBlob < Blob
|
||||
include BlobHelper
|
||||
|
||||
# Public: Initialize a new FileBlob from a path
|
||||
@@ -18,20 +19,6 @@ module Linguist
|
||||
@path = base_path ? path.sub("#{base_path}/", '') : path
|
||||
end
|
||||
|
||||
# Public: Filename
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# FileBlob.new("/path/to/linguist/lib/linguist.rb").path
|
||||
# # => "/path/to/linguist/lib/linguist.rb"
|
||||
#
|
||||
# FileBlob.new("/path/to/linguist/lib/linguist.rb",
|
||||
# "/path/to/linguist").path
|
||||
# # => "lib/linguist.rb"
|
||||
#
|
||||
# Returns a String
|
||||
attr_reader :path
|
||||
|
||||
# Public: Read file permissions
|
||||
#
|
||||
# Returns a String like '100644'
|
||||
@@ -39,13 +26,6 @@ module Linguist
|
||||
File.stat(@fullpath).mode.to_s(8)
|
||||
end
|
||||
|
||||
# Public: File name
|
||||
#
|
||||
# Returns a String
|
||||
def name
|
||||
File.basename(@fullpath)
|
||||
end
|
||||
|
||||
# Public: Read file contents.
|
||||
#
|
||||
# Returns a String.
|
||||
@@ -59,26 +39,5 @@ module Linguist
|
||||
def size
|
||||
File.size(@fullpath)
|
||||
end
|
||||
|
||||
# Public: Get file extension.
|
||||
#
|
||||
# Returns a String.
|
||||
def extension
|
||||
extensions.last || ""
|
||||
end
|
||||
|
||||
# Public: Return an array of the file extensions
|
||||
#
|
||||
# >> Linguist::FileBlob.new("app/views/things/index.html.erb").extensions
|
||||
# => [".html.erb", ".erb"]
|
||||
#
|
||||
# Returns an Array
|
||||
def extensions
|
||||
basename, *segments = name.downcase.split(".")
|
||||
|
||||
segments.map.with_index do |segment, index|
|
||||
"." + segments[index..-1].join(".")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -65,11 +65,16 @@ module Linguist
|
||||
generated_net_docfile? ||
|
||||
generated_postscript? ||
|
||||
compiled_cython_file? ||
|
||||
generated_protocol_buffer_go? ||
|
||||
generated_go? ||
|
||||
generated_protocol_buffer? ||
|
||||
generated_apache_thrift? ||
|
||||
generated_jni_header? ||
|
||||
vcr_cassette? ||
|
||||
generated_module? ||
|
||||
generated_unity3d_meta? ||
|
||||
vcr_cassette?
|
||||
generated_racc? ||
|
||||
generated_jflex? ||
|
||||
generated_grammarkit?
|
||||
end
|
||||
|
||||
# Internal: Is the blob an Xcode file?
|
||||
@@ -180,11 +185,11 @@ module Linguist
|
||||
def generated_net_designer_file?
|
||||
name.downcase =~ /\.designer\.cs$/
|
||||
end
|
||||
|
||||
|
||||
# Internal: Is this a codegen file for Specflow feature file?
|
||||
#
|
||||
# Visual Studio's SpecFlow extension generates *.feature.cs files
|
||||
# from *.feature files, they are not meant to be consumed by humans.
|
||||
# from *.feature files, they are not meant to be consumed by humans.
|
||||
# Let's hide them.
|
||||
#
|
||||
# Returns true or false
|
||||
@@ -232,24 +237,38 @@ module Linguist
|
||||
creator.include?("ImageMagick")
|
||||
end
|
||||
|
||||
def generated_protocol_buffer_go?
|
||||
def generated_go?
|
||||
return false unless extname == '.go'
|
||||
return false unless lines.count > 1
|
||||
|
||||
return lines[0].include?("Code generated by protoc-gen-go")
|
||||
return lines[0].include?("Code generated by")
|
||||
end
|
||||
|
||||
PROTOBUF_EXTENSIONS = ['.py', '.java', '.h', '.cc', '.cpp']
|
||||
|
||||
# Internal: Is the blob a C++, Java or Python source file generated by the
|
||||
# Protocol Buffer compiler?
|
||||
#
|
||||
# Returns true of false.
|
||||
def generated_protocol_buffer?
|
||||
return false unless ['.py', '.java', '.h', '.cc', '.cpp'].include?(extname)
|
||||
return false unless PROTOBUF_EXTENSIONS.include?(extname)
|
||||
return false unless lines.count > 1
|
||||
|
||||
return lines[0].include?("Generated by the protocol buffer compiler. DO NOT EDIT!")
|
||||
end
|
||||
|
||||
APACHE_THRIFT_EXTENSIONS = ['.rb', '.py', '.go', '.js', '.m', '.java', '.h', '.cc', '.cpp']
|
||||
|
||||
# Internal: Is the blob generated by Apache Thrift compiler?
|
||||
#
|
||||
# Returns true or false
|
||||
def generated_apache_thrift?
|
||||
return false unless APACHE_THRIFT_EXTENSIONS.include?(extname)
|
||||
return false unless lines.count > 1
|
||||
|
||||
return lines[0].include?("Autogenerated by Thrift Compiler") || lines[1].include?("Autogenerated by Thrift Compiler")
|
||||
end
|
||||
|
||||
# Internal: Is the blob a C/C++ header generated by the Java JNI tool javah?
|
||||
#
|
||||
# Returns true of false.
|
||||
@@ -313,6 +332,24 @@ module Linguist
|
||||
return lines[0].include?("Generated by Cython")
|
||||
end
|
||||
|
||||
# Internal: Is it a KiCAD or GFortran module file?
|
||||
#
|
||||
# KiCAD module files contain:
|
||||
# PCBNEW-LibModule-V1 yyyy-mm-dd h:mm:ss XM
|
||||
# on the first line.
|
||||
#
|
||||
# GFortran module files contain:
|
||||
# GFORTRAN module version 'x' created from
|
||||
# on the first line.
|
||||
#
|
||||
# Return true of false
|
||||
def generated_module?
|
||||
return false unless extname == '.mod'
|
||||
return false unless lines.count > 1
|
||||
return lines[0].include?("PCBNEW-LibModule-V") ||
|
||||
lines[0].include?("GFORTRAN module version '")
|
||||
end
|
||||
|
||||
# Internal: Is this a metadata file from Unity3D?
|
||||
#
|
||||
# Unity3D Meta files start with:
|
||||
@@ -325,5 +362,45 @@ module Linguist
|
||||
return false unless lines.count > 1
|
||||
return lines[0].include?("fileFormatVersion: ")
|
||||
end
|
||||
|
||||
# Internal: Is this a Racc-generated file?
|
||||
#
|
||||
# A Racc-generated file contains:
|
||||
# # This file is automatically generated by Racc x.y.z
|
||||
# on the third line.
|
||||
#
|
||||
# Return true or false
|
||||
def generated_racc?
|
||||
return false unless extname == '.rb'
|
||||
return false unless lines.count > 2
|
||||
return lines[2].start_with?("# This file is automatically generated by Racc")
|
||||
end
|
||||
|
||||
# Internal: Is this a JFlex-generated file?
|
||||
#
|
||||
# A JFlex-generated file contains:
|
||||
# /* The following code was generated by JFlex x.y.z on d/at/e ti:me */
|
||||
# on the first line.
|
||||
#
|
||||
# Return true or false
|
||||
def generated_jflex?
|
||||
return false unless extname == '.java'
|
||||
return false unless lines.count > 1
|
||||
return lines[0].start_with?("/* The following code was generated by JFlex ")
|
||||
end
|
||||
|
||||
# Internal: Is this a GrammarKit-generated file?
|
||||
#
|
||||
# A GrammarKit-generated file typically contain:
|
||||
# // This is a generated file. Not intended for manual editing.
|
||||
# on the first line. This is not always the case, as it's possible to
|
||||
# customize the class header.
|
||||
#
|
||||
# Return true or false
|
||||
def generated_grammarkit?
|
||||
return false unless extname == '.java'
|
||||
return false unless lines.count > 1
|
||||
return lines[0].start_with?("// This is a generated file. Not intended for manual editing.")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
# Note: This file is included in the github-linguist-grammars gem, not the
|
||||
# github-linguist gem.
|
||||
|
||||
module Linguist
|
||||
module Grammars
|
||||
# Get the path to the directory containing the language grammar JSON files.
|
||||
|
||||
@@ -13,11 +13,14 @@ module Linguist
|
||||
# ])
|
||||
#
|
||||
# Returns an Array of languages, or empty if none matched or were inconclusive.
|
||||
def self.call(blob, languages)
|
||||
def self.call(blob, candidates)
|
||||
data = blob.data
|
||||
|
||||
@heuristics.each do |heuristic|
|
||||
return Array(heuristic.call(data)) if heuristic.matches?(languages)
|
||||
if heuristic.matches?(blob.name)
|
||||
languages = Array(heuristic.call(data))
|
||||
return languages if languages.any? || languages.all? { |l| candidates.include?(l) }
|
||||
end
|
||||
end
|
||||
|
||||
[] # No heuristics matched
|
||||
@@ -30,7 +33,7 @@ module Linguist
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# disambiguate "Perl", "Prolog" do |data|
|
||||
# disambiguate ".pm" do |data|
|
||||
# if data.include?("use strict")
|
||||
# Language["Perl"]
|
||||
# elsif /^[^#]+:-/.match(data)
|
||||
@@ -38,22 +41,23 @@ module Linguist
|
||||
# end
|
||||
# end
|
||||
#
|
||||
def self.disambiguate(*languages, &heuristic)
|
||||
@heuristics << new(languages, &heuristic)
|
||||
def self.disambiguate(*extensions, &heuristic)
|
||||
@heuristics << new(extensions, &heuristic)
|
||||
end
|
||||
|
||||
# Internal: Array of defined heuristics
|
||||
@heuristics = []
|
||||
|
||||
# Internal
|
||||
def initialize(languages, &heuristic)
|
||||
@languages = languages
|
||||
def initialize(extensions, &heuristic)
|
||||
@extensions = extensions
|
||||
@heuristic = heuristic
|
||||
end
|
||||
|
||||
# Internal: Check if this heuristic matches the candidate languages.
|
||||
def matches?(candidates)
|
||||
candidates.any? && candidates.all? { |l| @languages.include?(l.name) }
|
||||
def matches?(filename)
|
||||
filename = filename.downcase
|
||||
@extensions.any? { |ext| filename.end_with?(ext) }
|
||||
end
|
||||
|
||||
# Internal: Perform the heuristic
|
||||
@@ -62,99 +66,9 @@ module Linguist
|
||||
end
|
||||
|
||||
# Common heuristics
|
||||
ObjectiveCRegex = /^[ \t]*@(interface|class|protocol|property|end|synchronised|selector|implementation)\b/
|
||||
ObjectiveCRegex = /^\s*(@(interface|class|protocol|property|end|synchronised|selector|implementation)\b|#import\s+.+\.h[">])/
|
||||
|
||||
disambiguate "BitBake", "BlitzBasic" do |data|
|
||||
if /^\s*; /.match(data) || data.include?("End Function")
|
||||
Language["BlitzBasic"]
|
||||
elsif /^\s*(# |include|require)\b/.match(data)
|
||||
Language["BitBake"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "C#", "Smalltalk" do |data|
|
||||
if /![\w\s]+methodsFor: /.match(data)
|
||||
Language["Smalltalk"]
|
||||
elsif /^\s*namespace\s*[\w\.]+\s*{/.match(data) || /^\s*\/\//.match(data)
|
||||
Language["C#"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Objective-C", "C++", "C" do |data|
|
||||
if ObjectiveCRegex.match(data)
|
||||
Language["Objective-C"]
|
||||
elsif (/^\s*#\s*include <(cstdint|string|vector|map|list|array|bitset|queue|stack|forward_list|unordered_map|unordered_set|(i|o|io)stream)>/.match(data) ||
|
||||
/^\s*template\s*</.match(data) || /^[ \t]*try/.match(data) || /^[ \t]*catch\s*\(/.match(data) || /^[ \t]*(class|(using[ \t]+)?namespace)\s+\w+/.match(data) || /^[ \t]*(private|public|protected):$/.match(data) || /std::\w+/.match(data))
|
||||
Language["C++"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Perl", "Perl6", "Prolog" do |data|
|
||||
if data.include?("use v6")
|
||||
Language["Perl6"]
|
||||
elsif data.match(/use strict|use\s+v?5\./)
|
||||
Language["Perl"]
|
||||
elsif /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "ECL", "Prolog" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif data.include?(":=")
|
||||
Language["ECL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "IDL", "Prolog", "INI", "QMake" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif data.include?("last_client=")
|
||||
Language["INI"]
|
||||
elsif data.include?("HEADERS") && data.include?("SOURCES")
|
||||
Language["QMake"]
|
||||
elsif /^\s*function[ \w,]+$/.match(data)
|
||||
Language["IDL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "GAP", "Scilab" do |data|
|
||||
if (data.include?("gap> "))
|
||||
Language["GAP"]
|
||||
# Heads up - we don't usually write heuristics like this (with no regex match)
|
||||
else
|
||||
Language["Scilab"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Common Lisp", "OpenCL", "Cool" do |data|
|
||||
if /^\s*\((defun|in-package|defpackage) /i.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^class/x.match(data)
|
||||
Language["Cool"]
|
||||
elsif /\/\* |\/\/ |^\}/.match(data)
|
||||
Language["OpenCL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Hack", "PHP" do |data|
|
||||
if data.include?("<?hh")
|
||||
Language["Hack"]
|
||||
elsif /<?[^h]/.match(data)
|
||||
Language["PHP"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Scala", "SuperCollider" do |data|
|
||||
if /\^(this|super)\./.match(data) || /^\s*(\+|\*)\s*\w+\s*{/.match(data) || /^\s*~\w+\s*=\./.match(data)
|
||||
Language["SuperCollider"]
|
||||
elsif /^\s*import (scala|java)\./.match(data) || /^\s*val\s+\w+\s*=/.match(data) || /^\s*class\b/.match(data)
|
||||
Language["Scala"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "AsciiDoc", "AGS Script", "Public Key" do |data|
|
||||
disambiguate ".asc" do |data|
|
||||
if /^(----[- ]BEGIN|ssh-(rsa|dss)) /.match(data)
|
||||
Language["Public Key"]
|
||||
elsif /^[=-]+(\s|\n)|{{[A-Za-z]/.match(data)
|
||||
@@ -164,15 +78,83 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "FORTRAN", "Forth", "Formatted" do |data|
|
||||
disambiguate ".bb" do |data|
|
||||
if /^\s*; /.match(data) || data.include?("End Function")
|
||||
Language["BlitzBasic"]
|
||||
elsif /^\s*(# |include|require)\b/.match(data)
|
||||
Language["BitBake"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ch" do |data|
|
||||
if /^\s*#\s*(if|ifdef|ifndef|define|command|xcommand|translate|xtranslate|include|pragma|undef)\b/i.match(data)
|
||||
Language["xBase"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".cl" do |data|
|
||||
if /^\s*\((defun|in-package|defpackage) /i.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^class/x.match(data)
|
||||
Language["Cool"]
|
||||
elsif /\/\* |\/\/ |^\}/.match(data)
|
||||
Language["OpenCL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".cs" do |data|
|
||||
if /![\w\s]+methodsFor: /.match(data)
|
||||
Language["Smalltalk"]
|
||||
elsif /^\s*namespace\s*[\w\.]+\s*{/.match(data) || /^\s*\/\//.match(data)
|
||||
Language["C#"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".d" do |data|
|
||||
if /^module /.match(data)
|
||||
Language["D"]
|
||||
elsif /^((dtrace:::)?BEGIN|provider |#pragma (D (option|attributes)|ident)\s)/.match(data)
|
||||
Language["DTrace"]
|
||||
elsif /(\/.*:( .* \\)$| : \\$|^ : |: \\$)/.match(data)
|
||||
Language["Makefile"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ecl" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["ECLiPSe"]
|
||||
elsif data.include?(":=")
|
||||
Language["ECL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".es" do |data|
|
||||
if /^\s*(?:%%|main\s*\(.*?\)\s*->)/.match(data)
|
||||
Language["Erlang"]
|
||||
elsif /(?:\/\/|("|')use strict\1|export\s+default\s|\/\*.*?\*\/)/m.match(data)
|
||||
Language["JavaScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".for", ".f" do |data|
|
||||
if /^: /.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^([c*][^a-z]| (subroutine|program)\s|\s*!)/i.match(data)
|
||||
elsif /^([c*][^abd-z]| (subroutine|program|end)\s|\s*!)/i.match(data)
|
||||
Language["FORTRAN"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "F#", "Forth", "GLSL", "Filterscript" do |data|
|
||||
disambiguate ".fr" do |data|
|
||||
if /^(: |also |new-device|previous )/.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^\s*(import|module|package|data|type) /.match(data)
|
||||
Language["Frege"]
|
||||
else
|
||||
Language["Text"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".fs" do |data|
|
||||
if /^(: |new-device)/.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^\s*(#light|import|let|module|namespace|open|type)/.match(data)
|
||||
@@ -184,7 +166,48 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Limbo", "M", "MUF", "Mathematica", "Matlab", "Mercury", "Objective-C" do |data|
|
||||
disambiguate ".gs" do |data|
|
||||
Language["Gosu"] if /^uses java\./.match(data)
|
||||
end
|
||||
|
||||
disambiguate ".h" do |data|
|
||||
if ObjectiveCRegex.match(data)
|
||||
Language["Objective-C"]
|
||||
elsif (/^\s*#\s*include <(cstdint|string|vector|map|list|array|bitset|queue|stack|forward_list|unordered_map|unordered_set|(i|o|io)stream)>/.match(data) ||
|
||||
/^\s*template\s*</.match(data) || /^[ \t]*try/.match(data) || /^[ \t]*catch\s*\(/.match(data) || /^[ \t]*(class|(using[ \t]+)?namespace)\s+\w+/.match(data) || /^[ \t]*(private|public|protected):$/.match(data) || /std::\w+/.match(data))
|
||||
Language["C++"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".l" do |data|
|
||||
if /\(def(un|macro)\s/.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^(%[%{}]xs|<.*>)/.match(data)
|
||||
Language["Lex"]
|
||||
elsif /^\.[a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
elsif /^\((de|class|rel|code|data|must)\s/.match(data)
|
||||
Language["PicoLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ls" do |data|
|
||||
if /^\s*package\s*[\w\.\/\*\s]*\s*{/.match(data)
|
||||
Language["LoomScript"]
|
||||
else
|
||||
Language["LiveScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".lsp", ".lisp" do |data|
|
||||
if /^\s*\((defun|in-package|defpackage) /i.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^\s*\(define /.match(data)
|
||||
Language["NewLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".m" do |data|
|
||||
if ObjectiveCRegex.match(data)
|
||||
Language["Objective-C"]
|
||||
elsif data.include?(":- module")
|
||||
@@ -202,46 +225,144 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Gosu", "JavaScript" do |data|
|
||||
Language["Gosu"] if /^uses java\./.match(data)
|
||||
end
|
||||
|
||||
disambiguate "LoomScript", "LiveScript" do |data|
|
||||
if /^\s*package\s*[\w\.\/\*\s]*\s*{/.match(data)
|
||||
Language["LoomScript"]
|
||||
else
|
||||
Language["LiveScript"]
|
||||
disambiguate ".ml" do |data|
|
||||
if /(^\s*module)|let rec |match\s+(\S+\s)+with/.match(data)
|
||||
Language["OCaml"]
|
||||
elsif /=> |case\s+(\S+\s)+of/.match(data)
|
||||
Language["Standard ML"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Common Lisp", "NewLisp" do |data|
|
||||
if /^\s*\((defun|in-package|defpackage) /i.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^\s*\(define /.match(data)
|
||||
Language["NewLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "TypeScript", "XML" do |data|
|
||||
if data.include?("<TS ")
|
||||
disambiguate ".mod" do |data|
|
||||
if data.include?('<!ENTITY ')
|
||||
Language["XML"]
|
||||
elsif /MODULE\s\w+\s*;/i.match(data) || /^\s*END \w+;$/i.match(data)
|
||||
Language["Modula-2"]
|
||||
else
|
||||
Language["TypeScript"]
|
||||
[Language["Linux Kernel Module"], Language["AMPL"]]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Frege", "Forth", "Text" do |data|
|
||||
if /^(: |also |new-device|previous )/.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^\s*(import|module|package|data|type) /.match(data)
|
||||
Language["Frege"]
|
||||
disambiguate ".ms" do |data|
|
||||
if /^[.'][a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
elsif /(?<!\S)\.(include|globa?l)\s/.match(data) || /(?<!\/\*)(\A|\n)\s*\.[A-Za-z]/.match(data.gsub(/"([^\\"]|\\.)*"|'([^\\']|\\.)*'|\\\s*(?:--.*)?\n/, ""))
|
||||
Language["GAS"]
|
||||
else
|
||||
Language["MAXScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".n" do |data|
|
||||
if /^[.']/.match(data)
|
||||
Language["Groff"]
|
||||
elsif /^(module|namespace|using)\s/.match(data)
|
||||
Language["Nemerle"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ncl" do |data|
|
||||
if data.include?("THE_TITLE")
|
||||
Language["Text"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "PLSQL", "SQLPL", "PLpgSQL", "SQL" do |data|
|
||||
if /^\\i\b|AS \$\$|LANGUAGE '+plpgsql'+/i.match(data) || /SECURITY (DEFINER|INVOKER)/i.match(data) || /BEGIN( WORK| TRANSACTION)?;/i.match(data)
|
||||
disambiguate ".nl" do |data|
|
||||
if /^(b|g)[0-9]+ /.match(data)
|
||||
Language["NL"]
|
||||
else
|
||||
Language["NewLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".php" do |data|
|
||||
if data.include?("<?hh")
|
||||
Language["Hack"]
|
||||
elsif /<?[^h]/.match(data)
|
||||
Language["PHP"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pl" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif /use strict|use\s+v?5\./.match(data)
|
||||
Language["Perl"]
|
||||
elsif /^(use v6|(my )?class|module)/.match(data)
|
||||
Language["Perl6"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pm", ".t" do |data|
|
||||
if /use strict|use\s+v?5\./.match(data)
|
||||
Language["Perl"]
|
||||
elsif /^(use v6|(my )?class|module)/.match(data)
|
||||
Language["Perl6"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pod" do |data|
|
||||
if /^=\w+$/.match(data)
|
||||
Language["Pod"]
|
||||
else
|
||||
Language["Perl"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pro" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif data.include?("last_client=")
|
||||
Language["INI"]
|
||||
elsif data.include?("HEADERS") && data.include?("SOURCES")
|
||||
Language["QMake"]
|
||||
elsif /^\s*function[ \w,]+$/.match(data)
|
||||
Language["IDL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".props" do |data|
|
||||
if /^(\s*)(<Project|<Import|<Property|<?xml|xmlns)/i.match(data)
|
||||
Language["XML"]
|
||||
elsif /\w+\s*=\s*/i.match(data)
|
||||
Language["INI"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".r" do |data|
|
||||
if /\bRebol\b/i.match(data)
|
||||
Language["Rebol"]
|
||||
elsif data.include?("<-")
|
||||
Language["R"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".rpy" do |data|
|
||||
if /(^(import|from|class|def)\s)/m.match(data)
|
||||
Language["Python"]
|
||||
else
|
||||
Language["Ren'Py"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".rs" do |data|
|
||||
if /^(use |fn |mod |pub |macro_rules|impl|#!?\[)/.match(data)
|
||||
Language["Rust"]
|
||||
elsif /#include|#pragma\s+(rs|version)|__attribute__/.match(data)
|
||||
Language["RenderScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".sc" do |data|
|
||||
if /\^(this|super)\./.match(data) || /^\s*(\+|\*)\s*\w+\s*{/.match(data) || /^\s*~\w+\s*=\./.match(data)
|
||||
Language["SuperCollider"]
|
||||
elsif /^\s*import (scala|java)\./.match(data) || /^\s*val\s+\w+\s*=/.match(data) || /^\s*class\b/.match(data)
|
||||
Language["Scala"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".sql" do |data|
|
||||
if /^\\i\b|AS \$\$|LANGUAGE '?plpgsql'?/i.match(data) || /SECURITY (DEFINER|INVOKER)/i.match(data) || /BEGIN( WORK| TRANSACTION)?;/i.match(data)
|
||||
#Postgres
|
||||
Language["PLpgSQL"]
|
||||
elsif /(alter module)|(language sql)|(begin( NOT)+ atomic)/i.match(data) || /signal SQLSTATE '[0-9]+'/i.match(data)
|
||||
@@ -256,37 +377,20 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "D", "DTrace", "Makefile" do |data|
|
||||
if /^module /.match(data)
|
||||
Language["D"]
|
||||
elsif /^((dtrace:::)?BEGIN|provider |#pragma (D (option|attributes)|ident)\s)/.match(data)
|
||||
Language["DTrace"]
|
||||
elsif /(\/.*:( .* \\)$| : \\$|^ : |: \\$)/.match(data)
|
||||
Language["Makefile"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "OCaml", "Standard ML" do |data|
|
||||
if /(^\s*module)|let rec |match\s+(\S+\s)+with/.match(data)
|
||||
Language["OCaml"]
|
||||
elsif /=> |case\s+(\S+\s)+of/.match(data)
|
||||
Language["Standard ML"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "NL", "NewLisp" do |data|
|
||||
if /^(b|g)[0-9]+ /.match(data)
|
||||
Language["NL"]
|
||||
disambiguate ".ts" do |data|
|
||||
if data.include?("<TS ")
|
||||
Language["XML"]
|
||||
else
|
||||
Language["NewLisp"]
|
||||
Language["TypeScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Rust", "RenderScript" do |data|
|
||||
if data.include?("^(use |fn |mod |pub |macro_rules|impl|#!?\[)")
|
||||
Language["Rust"]
|
||||
elsif /#include|#pragma\s+(rs|version)|__attribute__/.match(data)
|
||||
Language["RenderScript"]
|
||||
disambiguate ".tst" do |data|
|
||||
if (data.include?("gap> "))
|
||||
Language["GAP"]
|
||||
# Heads up - we don't usually write heuristics like this (with no regex match)
|
||||
else
|
||||
Language["Scilab"]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -87,14 +87,6 @@ module Linguist
|
||||
language
|
||||
end
|
||||
|
||||
STRATEGIES = [
|
||||
Linguist::Strategy::Modeline,
|
||||
Linguist::Shebang,
|
||||
Linguist::Strategy::Filename,
|
||||
Linguist::Heuristics,
|
||||
Linguist::Classifier
|
||||
]
|
||||
|
||||
# Public: Detects the Language of the blob.
|
||||
#
|
||||
# blob - an object that includes the Linguist `BlobHelper` interface;
|
||||
@@ -102,34 +94,8 @@ module Linguist
|
||||
#
|
||||
# Returns Language or nil.
|
||||
def self.detect(blob)
|
||||
# Bail early if the blob is binary or empty.
|
||||
return nil if blob.likely_binary? || blob.binary? || blob.empty?
|
||||
|
||||
Linguist.instrument("linguist.detection", :blob => blob) do
|
||||
# Call each strategy until one candidate is returned.
|
||||
languages = []
|
||||
returning_strategy = nil
|
||||
|
||||
STRATEGIES.each do |strategy|
|
||||
returning_strategy = strategy
|
||||
candidates = Linguist.instrument("linguist.strategy", :blob => blob, :strategy => strategy, :candidates => languages) do
|
||||
strategy.call(blob, languages)
|
||||
end
|
||||
if candidates.size == 1
|
||||
languages = candidates
|
||||
break
|
||||
elsif candidates.size > 1
|
||||
# More than one candidate was found, pass them to the next strategy.
|
||||
languages = candidates
|
||||
else
|
||||
# No candidates, try the next strategy
|
||||
end
|
||||
end
|
||||
|
||||
Linguist.instrument("linguist.detected", :blob => blob, :strategy => returning_strategy, :language => languages.first)
|
||||
|
||||
languages.first
|
||||
end
|
||||
warn "[DEPRECATED] `Linguist::Language.detect` is deprecated. Use `Linguist.detect`. #{caller[0]}"
|
||||
Linguist.detect(blob)
|
||||
end
|
||||
|
||||
# Public: Get all Languages
|
||||
@@ -150,7 +116,8 @@ module Linguist
|
||||
#
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.find_by_name(name)
|
||||
name && @name_index[name.downcase]
|
||||
return nil if name.to_s.empty?
|
||||
name && (@name_index[name.downcase] || @name_index[name.split(',').first.downcase])
|
||||
end
|
||||
|
||||
# Public: Look up Language by one of its aliases.
|
||||
@@ -164,7 +131,8 @@ module Linguist
|
||||
#
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.find_by_alias(name)
|
||||
name && @alias_index[name.downcase]
|
||||
return nil if name.to_s.empty?
|
||||
name && (@alias_index[name.downcase] || @alias_index[name.split(',').first.downcase])
|
||||
end
|
||||
|
||||
# Public: Look up Languages by filename.
|
||||
@@ -240,7 +208,8 @@ module Linguist
|
||||
#
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.[](name)
|
||||
name && @index[name.downcase]
|
||||
return nil if name.to_s.empty?
|
||||
name && (@index[name.downcase] || @index[name.split(',').first.downcase])
|
||||
end
|
||||
|
||||
# Public: A List of popular languages
|
||||
|
||||
554
lib/linguist/languages.yml
Normal file → Executable file
554
lib/linguist/languages.yml
Normal file → Executable file
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,11 @@ require 'rugged'
|
||||
|
||||
module Linguist
|
||||
class LazyBlob
|
||||
GIT_ATTR = ['linguist-documentation', 'linguist-language', 'linguist-vendored']
|
||||
GIT_ATTR = ['linguist-documentation',
|
||||
'linguist-language',
|
||||
'linguist-vendored',
|
||||
'linguist-generated']
|
||||
|
||||
GIT_ATTR_OPTS = { :priority => [:index], :skip_system => true }
|
||||
GIT_ATTR_FLAGS = Rugged::Repository::Attributes.parse_opts(GIT_ATTR_OPTS)
|
||||
|
||||
@@ -24,6 +28,7 @@ module Linguist
|
||||
@oid = oid
|
||||
@path = path
|
||||
@mode = mode
|
||||
@data = nil
|
||||
end
|
||||
|
||||
def git_attributes
|
||||
@@ -31,14 +36,6 @@ module Linguist
|
||||
name, GIT_ATTR, GIT_ATTR_FLAGS)
|
||||
end
|
||||
|
||||
def vendored?
|
||||
if attr = git_attributes['linguist-vendored']
|
||||
return boolean_attribute(attr)
|
||||
else
|
||||
return super
|
||||
end
|
||||
end
|
||||
|
||||
def documentation?
|
||||
if attr = git_attributes['linguist-documentation']
|
||||
boolean_attribute(attr)
|
||||
@@ -47,6 +44,22 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
def generated?
|
||||
if attr = git_attributes['linguist-generated']
|
||||
boolean_attribute(attr)
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
def vendored?
|
||||
if attr = git_attributes['linguist-vendored']
|
||||
return boolean_attribute(attr)
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
def language
|
||||
return @language if defined?(@language)
|
||||
|
||||
@@ -67,11 +80,15 @@ module Linguist
|
||||
@size
|
||||
end
|
||||
|
||||
def cleanup!
|
||||
@data.clear if @data
|
||||
end
|
||||
|
||||
protected
|
||||
|
||||
# Returns true if the attribute is present and not the string "false".
|
||||
def boolean_attribute(attr)
|
||||
attr != "false"
|
||||
def boolean_attribute(attribute)
|
||||
attribute != "false"
|
||||
end
|
||||
|
||||
def load_blob!
|
||||
|
||||
@@ -30,6 +30,9 @@ module Linguist
|
||||
@repository = repo
|
||||
@commit_oid = commit_oid
|
||||
|
||||
@old_commit_oid = nil
|
||||
@old_stats = nil
|
||||
|
||||
raise TypeError, 'commit_oid must be a commit SHA1' unless commit_oid.is_a?(String)
|
||||
end
|
||||
|
||||
@@ -126,12 +129,13 @@ module Linguist
|
||||
end
|
||||
|
||||
protected
|
||||
MAX_TREE_SIZE = 100_000
|
||||
|
||||
def compute_stats(old_commit_oid, cache = nil)
|
||||
return {} if current_tree.count_recursive(MAX_TREE_SIZE) >= MAX_TREE_SIZE
|
||||
|
||||
old_tree = old_commit_oid && Rugged::Commit.lookup(repository, old_commit_oid).tree
|
||||
|
||||
read_index
|
||||
|
||||
diff = Rugged::Tree.diff(repository, old_tree, current_tree)
|
||||
|
||||
# Clear file map and fetch full diff if any .gitattributes files are changed
|
||||
@@ -150,14 +154,18 @@ module Linguist
|
||||
next if delta.binary
|
||||
|
||||
if [:added, :modified].include? delta.status
|
||||
# Skip submodules
|
||||
# Skip submodules and symlinks
|
||||
mode = delta.new_file[:mode]
|
||||
next if (mode & 040000) != 0
|
||||
mode_format = (mode & 0170000)
|
||||
next if mode_format == 0120000 || mode_format == 040000 || mode_format == 0160000
|
||||
|
||||
blob = Linguist::LazyBlob.new(repository, delta.new_file[:oid], new, mode.to_s(8))
|
||||
|
||||
next unless blob.include_in_language_stats?
|
||||
file_map[new] = [blob.language.group.name, blob.size]
|
||||
if blob.include_in_language_stats?
|
||||
file_map[new] = [blob.language.group.name, blob.size]
|
||||
end
|
||||
|
||||
blob.cleanup!
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
@@ -42,10 +42,10 @@ module Linguist
|
||||
return unless script
|
||||
|
||||
# "python2.6" -> "python2"
|
||||
script.sub! /(\.\d+)$/, ''
|
||||
script.sub!(/(\.\d+)$/, '')
|
||||
|
||||
# #! perl -> perl
|
||||
script.sub! /^#!\s*/, ''
|
||||
script.sub!(/^#!\s*/, '')
|
||||
|
||||
# Check for multiline shebang hacks that call `exec`
|
||||
if script == 'sh' &&
|
||||
|
||||
@@ -1,8 +1,19 @@
|
||||
module Linguist
|
||||
module Strategy
|
||||
class Modeline
|
||||
EmacsModeline = /-\*-\s*(?:(?!mode)[\w-]+\s*:\s*(?:[\w+-]+)\s*;?\s*)*(?:mode\s*:)?\s*([\w+-]+)\s*(?:;\s*(?!mode)[\w-]+\s*:\s*[\w+-]+\s*)*;?\s*-\*-/i
|
||||
VimModeline = /vim:\s*set\s*(?:ft|filetype)=(\w+):/i
|
||||
EMACS_MODELINE = /-\*-\s*(?:(?!mode)[\w-]+\s*:\s*(?:[\w+-]+)\s*;?\s*)*(?:mode\s*:)?\s*([\w+-]+)\s*(?:;\s*(?!mode)[\w-]+\s*:\s*[\w+-]+\s*)*;?\s*-\*-/i
|
||||
|
||||
# First form vim modeline
|
||||
# [text]{white}{vi:|vim:|ex:}[white]{options}
|
||||
# ex: 'vim: syntax=ruby'
|
||||
VIM_MODELINE_1 = /(?:vim|vi|ex):\s*(?:ft|filetype|syntax)=(\w+)\s?/i
|
||||
|
||||
# Second form vim modeline (compatible with some versions of Vi)
|
||||
# [text]{white}{vi:|vim:|Vim:|ex:}[white]se[t] {options}:[text]
|
||||
# ex: 'vim set syntax=ruby:'
|
||||
VIM_MODELINE_2 = /(?:vim|vi|Vim|ex):\s*se(?:t)?.*\s(?:ft|filetype|syntax)=(\w+)\s?.*:/i
|
||||
|
||||
MODELINES = [EMACS_MODELINE, VIM_MODELINE_1, VIM_MODELINE_2]
|
||||
|
||||
# Public: Detects language based on Vim and Emacs modelines
|
||||
#
|
||||
@@ -22,7 +33,7 @@ module Linguist
|
||||
#
|
||||
# Returns a String or nil
|
||||
def self.modeline(data)
|
||||
match = data.match(EmacsModeline) || data.match(VimModeline)
|
||||
match = MODELINES.map { |regex| data.match(regex) }.reject(&:nil?).first
|
||||
match[1] if match
|
||||
end
|
||||
end
|
||||
|
||||
@@ -86,17 +86,17 @@ module Linguist
|
||||
if s.peek(1) == "\""
|
||||
s.getch
|
||||
else
|
||||
s.skip_until(/[^\\]"/)
|
||||
s.skip_until(/(?<!\\)"/)
|
||||
end
|
||||
elsif s.scan(/'/)
|
||||
if s.peek(1) == "'"
|
||||
s.getch
|
||||
else
|
||||
s.skip_until(/[^\\]'/)
|
||||
s.skip_until(/(?<!\\)'/)
|
||||
end
|
||||
|
||||
# Skip number literals
|
||||
elsif s.scan(/(0x)?\d(\d|\.)*/)
|
||||
elsif s.scan(/(0x\h(\h|\.)*|\d(\d|\.)*)([uU][lL]{0,2}|([eE][-+]\d*)?[fFlL]*)/)
|
||||
|
||||
# SGML style brackets
|
||||
elsif token = s.scan(/<[^\s<>][^<>]*>/)
|
||||
|
||||
@@ -20,10 +20,17 @@
|
||||
- ^deps/
|
||||
- ^tools/
|
||||
- (^|/)configure$
|
||||
- (^|/)configure.ac$
|
||||
- (^|/)config.guess$
|
||||
- (^|/)config.sub$
|
||||
|
||||
# stuff autogenerated by autoconf - still C deps
|
||||
- (^|/)aclocal.m4
|
||||
- (^|/)libtool.m4
|
||||
- (^|/)ltoptions.m4
|
||||
- (^|/)ltsugar.m4
|
||||
- (^|/)ltversion.m4
|
||||
- (^|/)lt~obsolete.m4
|
||||
|
||||
# Linters
|
||||
- cpplint.py
|
||||
|
||||
@@ -70,6 +77,7 @@
|
||||
- 3rd[-_]?party/
|
||||
- vendors?/
|
||||
- extern(al)?/
|
||||
- (^|/)[Vv]+endor/
|
||||
|
||||
# Debian packaging
|
||||
- ^debian/
|
||||
@@ -77,6 +85,9 @@
|
||||
# Haxelib projects often contain a neko bytecode file named run.n
|
||||
- run.n$
|
||||
|
||||
# Bootstrap Datepicker
|
||||
- bootstrap-datepicker/
|
||||
|
||||
## Commonly Bundled JavaScript frameworks ##
|
||||
|
||||
# jQuery
|
||||
@@ -87,6 +98,34 @@
|
||||
- (^|/)jquery\-ui(\-\d\.\d+(\.\d+)?)?(\.\w+)?\.(js|css)$
|
||||
- (^|/)jquery\.(ui|effects)\.([^.]*)\.(js|css)$
|
||||
|
||||
# jQuery Gantt
|
||||
- jquery.fn.gantt.js
|
||||
|
||||
# jQuery fancyBox
|
||||
- jquery.fancybox.(js|css)
|
||||
|
||||
# Fuel UX
|
||||
- fuelux.js
|
||||
|
||||
# jQuery File Upload
|
||||
- (^|/)jquery\.fileupload(-\w+)?\.js$
|
||||
|
||||
# Slick
|
||||
- (^|/)slick\.\w+.js$
|
||||
|
||||
# Leaflet plugins
|
||||
- (^|/)Leaflet\.Coordinates-\d+\.\d+\.\d+\.src\.js$
|
||||
- leaflet.draw-src.js
|
||||
- leaflet.draw.css
|
||||
- Control.FullScreen.css
|
||||
- Control.FullScreen.js
|
||||
- leaflet.spin.js
|
||||
- wicket-leaflet.js
|
||||
|
||||
# Sublime Text workspace files
|
||||
- .sublime-project
|
||||
- .sublime-workspace
|
||||
|
||||
# Prototype
|
||||
- (^|/)prototype(.*)\.js$
|
||||
- (^|/)effects\.js$
|
||||
@@ -121,7 +160,7 @@
|
||||
- (^|/)Chart\.js$
|
||||
|
||||
# Codemirror
|
||||
- (^|/)[Cc]ode[Mm]irror/(lib|mode|theme|addon|keymap)
|
||||
- (^|/)[Cc]ode[Mm]irror/(\d+\.\d+/)?(lib|mode|theme|addon|keymap|demo)
|
||||
|
||||
# SyntaxHighlighter - http://alexgorbatchev.com/
|
||||
- (^|/)shBrush([^.]*)\.js$
|
||||
@@ -163,12 +202,31 @@
|
||||
|
||||
## Obj-C ##
|
||||
|
||||
# Xcode
|
||||
|
||||
- \.xctemplate/
|
||||
- \.imageset/
|
||||
|
||||
# Carthage
|
||||
- ^Carthage/
|
||||
|
||||
# Cocoapods
|
||||
- ^Pods/
|
||||
|
||||
# Sparkle
|
||||
- (^|/)Sparkle/
|
||||
|
||||
# Crashlytics
|
||||
- Crashlytics.framework/
|
||||
|
||||
# Fabric
|
||||
- Fabric.framework/
|
||||
|
||||
# git config files
|
||||
- gitattributes$
|
||||
- gitignore$
|
||||
- gitmodules$
|
||||
|
||||
## Groovy ##
|
||||
|
||||
# Gradle
|
||||
@@ -213,21 +271,9 @@
|
||||
# Html5shiv
|
||||
- (^|/)html5shiv\.js$
|
||||
|
||||
# Samples folders
|
||||
- ^[Ss]amples/
|
||||
|
||||
# LICENSE, README, git config files
|
||||
- ^COPYING$
|
||||
- LICENSE$
|
||||
- License$
|
||||
- gitattributes$
|
||||
- gitignore$
|
||||
- gitmodules$
|
||||
- ^README$
|
||||
- ^readme$
|
||||
|
||||
# Test fixtures
|
||||
- ^[Tt]ests?/fixtures/
|
||||
- ^[Ss]pecs?/fixtures/
|
||||
|
||||
# PhoneGap/Cordova
|
||||
- (^|/)cordova([^.]*)\.js$
|
||||
@@ -263,3 +309,6 @@
|
||||
|
||||
# Android Google APIs
|
||||
- (^|/)\.google_apis/
|
||||
|
||||
# Jenkins Pipeline
|
||||
- ^Jenkinsfile$
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
module Linguist
|
||||
VERSION = "4.5.5"
|
||||
VERSION = "4.8.2"
|
||||
end
|
||||
|
||||
@@ -2,5 +2,6 @@
|
||||
"repository": "https://github.com/github/linguist",
|
||||
"dependencies": {
|
||||
"season": "~>5.0"
|
||||
}
|
||||
},
|
||||
"license": "MIT"
|
||||
}
|
||||
|
||||
58
samples/AMPL/CT2.mod
Normal file
58
samples/AMPL/CT2.mod
Normal file
@@ -0,0 +1,58 @@
|
||||
param num_beams; # number of beams
|
||||
|
||||
param num_rows >= 1, integer; # number of rows
|
||||
param num_cols >= 1, integer; # number of columns
|
||||
|
||||
set BEAMS := 1 .. num_beams; # set of beams
|
||||
|
||||
set ROWS := 1 .. num_rows; # set of rows
|
||||
set COLUMNS := 1 .. num_cols; # set of columns
|
||||
|
||||
# values for entries of each beam
|
||||
param beam_values {BEAMS, ROWS, COLUMNS} >= 0;
|
||||
|
||||
# values of tumor
|
||||
param tumor_values {ROWS, COLUMNS} >= 0;
|
||||
|
||||
# values of critical area
|
||||
param critical_values {ROWS, COLUMNS} >= 0;
|
||||
|
||||
# critical maximum dosage requirement
|
||||
param critical_max;
|
||||
|
||||
# tumor minimum dosage requirement
|
||||
param tumor_min;
|
||||
|
||||
# dosage scalar of each beam
|
||||
var X {i in BEAMS} >= 0;
|
||||
|
||||
|
||||
# define the tumor area which includes the locations where tumor exists
|
||||
set tumor_area := {k in ROWS, h in COLUMNS: tumor_values[k,h] > 0};
|
||||
|
||||
# define critical area
|
||||
set critical_area := {k in ROWS, h in COLUMNS: critical_values[k,h] > 0};
|
||||
|
||||
var S {(k,h) in tumor_area} >= 0;
|
||||
var T {(k,h) in critical_area} >= 0;
|
||||
|
||||
# maximize total dosage in tumor area
|
||||
maximize total_tumor_dosage: sum {i in BEAMS} sum {(k,h) in tumor_area} X[i] * beam_values[i,k,h];
|
||||
|
||||
# minimize total dosage in critical area
|
||||
minimize total_critical_dosage: sum {i in BEAMS} sum {(k,h) in critical_area} X[i] * beam_values[i,k,h];
|
||||
|
||||
# minimize total tumor slack
|
||||
minimize total_tumor_slack: sum {(k,h) in tumor_area} S[k,h];
|
||||
|
||||
# minimize total critical area slack
|
||||
minimize total_critical_slack: sum {(k,h) in critical_area} T[k,h];
|
||||
|
||||
# total dosage at each tumor location [k,h] should be >= min tumor dosage with slack variable
|
||||
subject to tumor_limit {(k,h) in tumor_area} : sum {i in BEAMS} X[i] * beam_values[i,k,h] == tumor_min - S[k,h];
|
||||
|
||||
# total dosage at each critical location [k,h] should be = max critical dosage with slack variable
|
||||
subject to critical_limit {(k,h) in critical_area} : sum {i in BEAMS} X[i] * beam_values[i,k,h] == critical_max + T[k,h];
|
||||
|
||||
|
||||
|
||||
17
samples/ApacheConf/apache.vhost
Normal file
17
samples/ApacheConf/apache.vhost
Normal file
@@ -0,0 +1,17 @@
|
||||
#######################
|
||||
# HOSTNAME
|
||||
######################
|
||||
|
||||
<VirtualHost 127.0.0.1:PORT>
|
||||
ServerAdmin patrick@heysparkbox.com
|
||||
DocumentRoot "/var/www/HOSTNAME"
|
||||
ServerName HOSTNAME
|
||||
|
||||
<Directory "/var/www/HOSTNAME">
|
||||
Options Indexes MultiViews FollowSymLinks
|
||||
AllowOverride All
|
||||
Order allow,deny
|
||||
Allow from all
|
||||
DirectoryIndex index.php
|
||||
</Directory>
|
||||
</VirtualHost>
|
||||
245
samples/Assembly/fp_sqr32_160_comba.inc
Normal file
245
samples/Assembly/fp_sqr32_160_comba.inc
Normal file
@@ -0,0 +1,245 @@
|
||||
|
||||
push r2
|
||||
dint
|
||||
nop
|
||||
bis #MPYDLYWRTEN,&MPY32CTL0
|
||||
bic #MPYDLY32,&MPY32CTL0
|
||||
mov #SUMEXT,r13
|
||||
clr r12
|
||||
|
||||
mov @r15+,r4
|
||||
mov @r15+,r5
|
||||
|
||||
mov @r15+,r6
|
||||
mov @r15+,r7
|
||||
|
||||
mov @r15+,r8
|
||||
mov @r15+,r9
|
||||
|
||||
mov @r15+,r10
|
||||
mov @r15+,r11
|
||||
|
||||
sub #2*8,r15
|
||||
|
||||
/* SELF_STEP_FIRST */
|
||||
mov r4,&MPY32L
|
||||
mov r5,&MPY32H
|
||||
mov r4,&OP2L
|
||||
mov r5,&OP2H
|
||||
|
||||
/* COLUMN_END */
|
||||
mov &RES0,2*0(r14)
|
||||
mov &RES1,2*(0+1)(r14)
|
||||
mov &RES2,&RES0
|
||||
mov &RES3,&RES1
|
||||
mov r12,&RES2
|
||||
clr &RES3
|
||||
|
||||
/* STEP_1 */
|
||||
mov r4,&MAC32L
|
||||
mov r5,&MAC32H
|
||||
mov r6,&OP2L
|
||||
mov r7,&OP2H
|
||||
add &SUMEXT,r12
|
||||
mov r6,&OP2L
|
||||
mov r7,&OP2H
|
||||
|
||||
/* COLUMN_END */
|
||||
mov &RES0,2*2(r14)
|
||||
add @r13,r12
|
||||
mov &RES1,2*(2+1)(r14)
|
||||
mov &RES2,&RES0
|
||||
mov &RES3,&RES1
|
||||
mov r12,&RES2
|
||||
clr &RES3
|
||||
clr r12
|
||||
|
||||
/* STEP_1 */
|
||||
mov r4,&MAC32L
|
||||
mov r5,&MAC32H
|
||||
mov r8,&OP2L
|
||||
mov r9,&OP2H
|
||||
add &SUMEXT,r12
|
||||
mov r8,&OP2L
|
||||
mov r9,&OP2H
|
||||
|
||||
/* SELF_STEP */
|
||||
mov r6,&MAC32L
|
||||
mov r7,&MAC32H
|
||||
add @r13,r12
|
||||
mov r6,&OP2L
|
||||
mov r7,&OP2H
|
||||
|
||||
/* COLUMN_END */
|
||||
mov &RES0,2*4(r14)
|
||||
add @r13,r12
|
||||
mov &RES1,2*(4+1)(r14)
|
||||
mov &RES2,&RES0
|
||||
mov &RES3,&RES1
|
||||
mov r12,&RES2
|
||||
clr &RES3
|
||||
clr r12
|
||||
|
||||
/* STEP_1 */
|
||||
mov r4,&MAC32L
|
||||
mov r5,&MAC32H
|
||||
mov r10,&OP2L
|
||||
mov r11,&OP2H
|
||||
add &SUMEXT,r12
|
||||
mov r10,&OP2L
|
||||
mov r11,&OP2H
|
||||
|
||||
/* STEP_2MORE */
|
||||
mov r6,&MAC32L
|
||||
mov r7,&MAC32H
|
||||
add @r13,r12
|
||||
mov r8,&OP2L
|
||||
mov r9,&OP2H
|
||||
add &SUMEXT,r12
|
||||
mov r8,&OP2L
|
||||
mov r9,&OP2H
|
||||
|
||||
/* COLUMN_END */
|
||||
mov &RES0,2*6(r14)
|
||||
add @r13,r12
|
||||
mov &RES1,2*(6+1)(r14)
|
||||
mov &RES2,&RES0
|
||||
mov &RES3,&RES1
|
||||
mov r12,&RES2
|
||||
clr &RES3
|
||||
clr r12
|
||||
|
||||
/* STEP_1 */
|
||||
mov r4,&MAC32L
|
||||
mov r5,&MAC32H
|
||||
mov 2*8(r15),&OP2L
|
||||
mov 2*9(r15),&OP2H
|
||||
add &SUMEXT,r12
|
||||
mov 2*8(r15),&OP2L
|
||||
mov 2*9(r15),&OP2H
|
||||
|
||||
/* STEP_2MORE */
|
||||
mov r6,&MAC32L
|
||||
mov r7,&MAC32H
|
||||
add @r13,r12
|
||||
mov r10,&OP2L
|
||||
mov r11,&OP2H
|
||||
add &SUMEXT,r12
|
||||
mov r10,&OP2L
|
||||
mov r11,&OP2H
|
||||
|
||||
/* SELF_STEP */
|
||||
mov r8,&MAC32L
|
||||
mov r9,&MAC32H
|
||||
add @r13,r12
|
||||
mov r8,&OP2L
|
||||
mov r9,&OP2H
|
||||
|
||||
/* COLUMN_END */
|
||||
mov &RES0,2*8(r14)
|
||||
add @r13,r12
|
||||
mov &RES1,2*(8+1)(r14)
|
||||
mov &RES2,&RES0
|
||||
mov &RES3,&RES1
|
||||
mov r12,&RES2
|
||||
clr &RES3
|
||||
clr r12
|
||||
|
||||
mov 2*8(r15),r4
|
||||
mov 2*(8+1)(r15),r5
|
||||
|
||||
/* STEP_1 */
|
||||
mov r6,&MAC32L
|
||||
mov r7,&MAC32H
|
||||
mov r4,&OP2L
|
||||
mov r5,&OP2H
|
||||
add &SUMEXT,r12
|
||||
mov r4,&OP2L
|
||||
mov r5,&OP2H
|
||||
|
||||
/* STEP_2MORE */
|
||||
mov r8,&MAC32L
|
||||
mov r9,&MAC32H
|
||||
add @r13,r12
|
||||
mov r10,&OP2L
|
||||
mov r11,&OP2H
|
||||
add &SUMEXT,r12
|
||||
mov r10,&OP2L
|
||||
mov r11,&OP2H
|
||||
|
||||
/* COLUMN_END */
|
||||
mov &RES0,2*10(r14)
|
||||
add @r13,r12
|
||||
mov &RES1,2*(10+1)(r14)
|
||||
mov &RES2,&RES0
|
||||
mov &RES3,&RES1
|
||||
mov r12,&RES2
|
||||
clr &RES3
|
||||
clr r12
|
||||
|
||||
/* STEP_1 */
|
||||
mov r8,&MAC32L
|
||||
mov r9,&MAC32H
|
||||
mov r4,&OP2L
|
||||
mov r5,&OP2H
|
||||
add &SUMEXT,r12
|
||||
mov r4,&OP2L
|
||||
mov r5,&OP2H
|
||||
|
||||
/* SELF_STEP */
|
||||
mov r10,&MAC32L
|
||||
mov r11,&MAC32H
|
||||
add @r13,r12
|
||||
mov r10,&OP2L
|
||||
mov r11,&OP2H
|
||||
|
||||
/* COLUMN_END */
|
||||
mov &RES0,2*12(r14)
|
||||
add @r13,r12
|
||||
mov &RES1,2*(12+1)(r14)
|
||||
mov &RES2,&RES0
|
||||
mov &RES3,&RES1
|
||||
mov r12,&RES2
|
||||
clr &RES3
|
||||
clr r12
|
||||
|
||||
/* STEP_1 */
|
||||
mov r10,&MAC32L
|
||||
mov r11,&MAC32H
|
||||
mov r4,&OP2L
|
||||
mov r5,&OP2H
|
||||
add &SUMEXT,r12
|
||||
mov r4,&OP2L
|
||||
mov r5,&OP2H
|
||||
|
||||
/* COLUMN_END */
|
||||
mov &RES0,2*14(r14)
|
||||
add @r13,r12
|
||||
mov &RES1,2*(14+1)(r14)
|
||||
mov &RES2,&RES0
|
||||
mov &RES3,&RES1
|
||||
mov r12,&RES2
|
||||
clr &RES3
|
||||
clr r12
|
||||
|
||||
/* SELF_STEP_1 */
|
||||
mov r4,&MAC32L
|
||||
mov r5,&MAC32H
|
||||
mov r4,&OP2L
|
||||
mov r5,&OP2H
|
||||
|
||||
/* COLUMN_END */
|
||||
mov &RES0,2*16(r14)
|
||||
add @r13,r12
|
||||
mov &RES1,2*(16+1)(r14)
|
||||
mov &RES2,&RES0
|
||||
mov &RES3,&RES1
|
||||
mov r12,&RES2
|
||||
clr &RES3
|
||||
clr r12
|
||||
|
||||
/* END */
|
||||
mov &RES0,2*18(r14)
|
||||
mov &RES1,2*(18+1)(r14)
|
||||
pop r2
|
||||
eint
|
||||
170
samples/Assembly/lib.inc
Normal file
170
samples/Assembly/lib.inc
Normal file
@@ -0,0 +1,170 @@
|
||||
|
||||
; ------------------------------------------------------------------------
|
||||
; 显示 AL 中的数字
|
||||
; ------------------------------------------------------------------------
|
||||
DispAL:
|
||||
push ecx
|
||||
push edx
|
||||
push edi
|
||||
|
||||
mov edi, [dwDispPos]
|
||||
|
||||
mov ah, 0Fh ; 0000b: 黑底 1111b: 白字
|
||||
mov dl, al
|
||||
shr al, 4
|
||||
mov ecx, 2
|
||||
.begin:
|
||||
and al, 01111b
|
||||
cmp al, 9
|
||||
ja .1
|
||||
add al, '0'
|
||||
jmp .2
|
||||
.1:
|
||||
sub al, 0Ah
|
||||
add al, 'A'
|
||||
.2:
|
||||
mov [gs:edi], ax
|
||||
add edi, 2
|
||||
|
||||
mov al, dl
|
||||
loop .begin
|
||||
;add edi, 2
|
||||
|
||||
mov [dwDispPos], edi
|
||||
|
||||
pop edi
|
||||
pop edx
|
||||
pop ecx
|
||||
|
||||
ret
|
||||
; DispAL 结束-------------------------------------------------------------
|
||||
|
||||
|
||||
; ------------------------------------------------------------------------
|
||||
; 显示一个整形数
|
||||
; ------------------------------------------------------------------------
|
||||
DispInt:
|
||||
mov eax, [esp + 4]
|
||||
shr eax, 24
|
||||
call DispAL
|
||||
|
||||
mov eax, [esp + 4]
|
||||
shr eax, 16
|
||||
call DispAL
|
||||
|
||||
mov eax, [esp + 4]
|
||||
shr eax, 8
|
||||
call DispAL
|
||||
|
||||
mov eax, [esp + 4]
|
||||
call DispAL
|
||||
|
||||
mov ah, 07h ; 0000b: 黑底 0111b: 灰字
|
||||
mov al, 'h'
|
||||
push edi
|
||||
mov edi, [dwDispPos]
|
||||
mov [gs:edi], ax
|
||||
add edi, 4
|
||||
mov [dwDispPos], edi
|
||||
pop edi
|
||||
|
||||
ret
|
||||
; DispInt 结束------------------------------------------------------------
|
||||
|
||||
; ------------------------------------------------------------------------
|
||||
; 显示一个字符串
|
||||
; ------------------------------------------------------------------------
|
||||
DispStr:
|
||||
push ebp
|
||||
mov ebp, esp
|
||||
push ebx
|
||||
push esi
|
||||
push edi
|
||||
|
||||
mov esi, [ebp + 8] ; pszInfo
|
||||
mov edi, [dwDispPos]
|
||||
mov ah, 0Fh
|
||||
.1:
|
||||
lodsb
|
||||
test al, al
|
||||
jz .2
|
||||
cmp al, 0Ah ; 是回车吗?
|
||||
jnz .3
|
||||
push eax
|
||||
mov eax, edi
|
||||
mov bl, 160
|
||||
div bl
|
||||
and eax, 0FFh
|
||||
inc eax
|
||||
mov bl, 160
|
||||
mul bl
|
||||
mov edi, eax
|
||||
pop eax
|
||||
jmp .1
|
||||
.3:
|
||||
mov [gs:edi], ax
|
||||
add edi, 2
|
||||
jmp .1
|
||||
|
||||
.2:
|
||||
mov [dwDispPos], edi
|
||||
|
||||
pop edi
|
||||
pop esi
|
||||
pop ebx
|
||||
pop ebp
|
||||
ret
|
||||
; DispStr 结束------------------------------------------------------------
|
||||
|
||||
; ------------------------------------------------------------------------
|
||||
; 换行
|
||||
; ------------------------------------------------------------------------
|
||||
DispReturn:
|
||||
push szReturn
|
||||
call DispStr ;printf("\n");
|
||||
add esp, 4
|
||||
|
||||
ret
|
||||
; DispReturn 结束---------------------------------------------------------
|
||||
|
||||
|
||||
; ------------------------------------------------------------------------
|
||||
; 内存拷贝,仿 memcpy
|
||||
; ------------------------------------------------------------------------
|
||||
; void* MemCpy(void* es:pDest, void* ds:pSrc, int iSize);
|
||||
; ------------------------------------------------------------------------
|
||||
MemCpy:
|
||||
push ebp
|
||||
mov ebp, esp
|
||||
|
||||
push esi
|
||||
push edi
|
||||
push ecx
|
||||
|
||||
mov edi, [ebp + 8] ; Destination
|
||||
mov esi, [ebp + 12] ; Source
|
||||
mov ecx, [ebp + 16] ; Counter
|
||||
.1:
|
||||
cmp ecx, 0 ; 判断计数器
|
||||
jz .2 ; 计数器为零时跳出
|
||||
|
||||
mov al, [ds:esi] ; ┓
|
||||
inc esi ; ┃
|
||||
; ┣ 逐字节移动
|
||||
mov byte [es:edi], al ; ┃
|
||||
inc edi ; ┛
|
||||
|
||||
dec ecx ; 计数器减一
|
||||
jmp .1 ; 循环
|
||||
.2:
|
||||
mov eax, [ebp + 8] ; 返回值
|
||||
|
||||
pop ecx
|
||||
pop edi
|
||||
pop esi
|
||||
mov esp, ebp
|
||||
pop ebp
|
||||
|
||||
ret ; 函数结束,返回
|
||||
; MemCpy 结束-------------------------------------------------------------
|
||||
|
||||
321
samples/Assembly/macros.inc
Normal file
321
samples/Assembly/macros.inc
Normal file
@@ -0,0 +1,321 @@
|
||||
BLARGG_MACROS_INCLUDED = 1
|
||||
|
||||
; Allows extra error checking with modified version
|
||||
; of ca65. Otherwise acts like a constant of 0.
|
||||
ADDR = 0
|
||||
|
||||
; Switches to Segment and places Line there.
|
||||
; Line can be an .align directive, .res, .byte, etc.
|
||||
; Examples:
|
||||
; seg_data BSS, .align 256
|
||||
; seg_data RODATA, {message: .byte "Test",0}
|
||||
.macro seg_data Segment, Line
|
||||
.pushseg
|
||||
.segment .string(Segment)
|
||||
Line
|
||||
.popseg
|
||||
.endmacro
|
||||
|
||||
; Reserves Size bytes in Segment for Name.
|
||||
; If Size is omitted, reserves one byte.
|
||||
.macro seg_res Segment, Name, Size
|
||||
.ifblank Size
|
||||
seg_data Segment, Name: .res 1
|
||||
.else
|
||||
seg_data Segment, Name: .res Size
|
||||
.endif
|
||||
.endmacro
|
||||
|
||||
; Shortcuts for zeropage, bss, and stack
|
||||
.define zp_res seg_res ZEROPAGE,
|
||||
.define nv_res seg_res NVRAM,
|
||||
.define bss_res seg_res BSS,
|
||||
.define sp_res seg_res STACK,
|
||||
.define zp_byte zp_res
|
||||
|
||||
; Copies byte from Src to Addr. If Src begins with #,
|
||||
; it sets Addr to the immediate value.
|
||||
; Out: A = byte copied
|
||||
; Preserved: X, Y
|
||||
.macro mov Addr, Src
|
||||
lda Src
|
||||
sta Addr
|
||||
.endmacro
|
||||
|
||||
; Copies word from Src to Addr. If Src begins with #,
|
||||
; it sets Addr the immediate value.
|
||||
; Out: A = high byte of word
|
||||
; Preserved: X, Y
|
||||
.macro movw Addr, Src
|
||||
.if .match( .left( 1, {Src} ), # )
|
||||
lda #<(.right( .tcount( {Src} )-1, {Src} ))
|
||||
sta Addr
|
||||
lda #>(.right( .tcount( {Src} )-1, {Src} ))
|
||||
sta 1+(Addr)
|
||||
.else
|
||||
lda Src
|
||||
sta Addr
|
||||
lda 1+(Src)
|
||||
sta 1+(Addr)
|
||||
.endif
|
||||
.endmacro
|
||||
|
||||
; Increments 16-bit value at Addr.
|
||||
; Out: EQ/NE based on resulting 16-bit value
|
||||
; Preserved: A, X, Y
|
||||
.macro incw Addr
|
||||
.local @Skip
|
||||
inc Addr
|
||||
bne @Skip
|
||||
inc 1+(Addr)
|
||||
@Skip:
|
||||
.endmacro
|
||||
|
||||
; Adds Src to word at Addr.
|
||||
; Out: A = high byte of result, carry set appropriately
|
||||
; Preserved: X, Y
|
||||
.macro addw Addr, Src
|
||||
.if .match( .left( 1, {Src} ), # )
|
||||
addw_ Addr,(.right( .tcount( {Src} )-1, {Src} ))
|
||||
.else
|
||||
lda Addr
|
||||
clc
|
||||
adc Src
|
||||
sta Addr
|
||||
|
||||
lda 1+(Addr)
|
||||
adc 1+(Src)
|
||||
sta 1+(Addr)
|
||||
.endif
|
||||
.endmacro
|
||||
.macro addw_ Addr, Imm
|
||||
lda Addr
|
||||
clc
|
||||
adc #<Imm
|
||||
sta Addr
|
||||
|
||||
;.if (Imm >> 8) <> 0
|
||||
lda 1+(Addr)
|
||||
adc #>Imm
|
||||
sta 1+(Addr)
|
||||
;.else
|
||||
; .local @Skip
|
||||
; bcc @Skip
|
||||
; inc 1+(Addr)
|
||||
;@Skip:
|
||||
;.endif
|
||||
.endmacro
|
||||
|
||||
; Splits list of words into tables of low and high bytes
|
||||
; Example: split_words foo, {$1234, $5678}
|
||||
; expands to:
|
||||
; foo_l: $34, $78
|
||||
; foo_h: $12, $56
|
||||
; foo_count = 2
|
||||
.macro split_words Label, Words
|
||||
.ident (.concat (.string(Label), "_l")): .lobytes Words
|
||||
.ident (.concat (.string(Label), "_h")): .hibytes Words
|
||||
.ident (.concat (.string(Label), "_count")) = * - .ident (.concat (.string(Label), "_h"))
|
||||
.endmacro
|
||||
|
||||
.macro SELECT Bool, True, False, Extra
|
||||
.ifndef Bool
|
||||
False Extra
|
||||
.elseif Bool <> 0
|
||||
True Extra
|
||||
.else
|
||||
False Extra
|
||||
.endif
|
||||
.endmacro
|
||||
|
||||
.macro DEFAULT Name, Value
|
||||
.ifndef Name
|
||||
Name = Value
|
||||
.endif
|
||||
.endmacro
|
||||
|
||||
.ifp02
|
||||
; 6502 doesn't define these alternate names
|
||||
.define blt bcc
|
||||
.define bge bcs
|
||||
.endif
|
||||
.define jlt jcc
|
||||
.define jge jcs
|
||||
|
||||
; Jxx Target = Bxx Target, except it can go farther than
|
||||
; 128 bytes. Implemented via branch around a JMP.
|
||||
|
||||
; Don't use ca65's longbranch, because they fail for @labels
|
||||
;.macpack longbranch
|
||||
|
||||
.macro jeq Target
|
||||
bne *+5
|
||||
jmp Target
|
||||
.endmacro
|
||||
|
||||
.macro jne Target
|
||||
beq *+5
|
||||
jmp Target
|
||||
.endmacro
|
||||
|
||||
.macro jmi Target
|
||||
bpl *+5
|
||||
jmp Target
|
||||
.endmacro
|
||||
|
||||
.macro jpl Target
|
||||
bmi *+5
|
||||
jmp Target
|
||||
.endmacro
|
||||
|
||||
.macro jcs Target
|
||||
bcc *+5
|
||||
jmp Target
|
||||
.endmacro
|
||||
|
||||
.macro jcc Target
|
||||
bcs *+5
|
||||
jmp Target
|
||||
.endmacro
|
||||
|
||||
.macro jvs Target
|
||||
bvc *+5
|
||||
jmp Target
|
||||
.endmacro
|
||||
|
||||
.macro jvc Target
|
||||
bvs *+5
|
||||
jmp Target
|
||||
.endmacro
|
||||
|
||||
|
||||
; Passes constant data to routine in addr
|
||||
; Preserved: A, X, Y
|
||||
.macro jsr_with_addr routine,data
|
||||
.local Addr
|
||||
pha
|
||||
lda #<Addr
|
||||
sta addr
|
||||
lda #>Addr
|
||||
sta addr+1
|
||||
pla
|
||||
jsr routine
|
||||
seg_data RODATA,{Addr: data}
|
||||
.endmacro
|
||||
|
||||
; Calls routine multiple times, with A having the
|
||||
; value 'start' the first time, 'start+step' the
|
||||
; second time, up to 'end' for the last time.
|
||||
.macro for_loop routine,start,end,step
|
||||
.local @for_loop
|
||||
lda #start
|
||||
@for_loop:
|
||||
pha
|
||||
jsr routine
|
||||
pla
|
||||
clc
|
||||
adc #step
|
||||
cmp #<((end)+(step))
|
||||
bne @for_loop
|
||||
.endmacro
|
||||
|
||||
; Calls routine n times. The value of A in the routine
|
||||
; counts from 0 to n-1.
|
||||
.macro loop_n_times routine,n
|
||||
for_loop routine,0,n-1,+1
|
||||
.endmacro
|
||||
|
||||
; Same as for_loop, except uses 16-bit value in YX.
|
||||
; -256 <= step <= 255
|
||||
.macro for_loop16 routine,start,end,step
|
||||
.if (step) < -256 || (step) > 255
|
||||
.error "Step must be within -256 to 255"
|
||||
.endif
|
||||
.local @for_loop_skip
|
||||
.local @for_loop
|
||||
ldy #>(start)
|
||||
lda #<(start)
|
||||
@for_loop:
|
||||
tax
|
||||
pha
|
||||
tya
|
||||
pha
|
||||
jsr routine
|
||||
pla
|
||||
tay
|
||||
pla
|
||||
clc
|
||||
adc #step
|
||||
.if (step) > 0
|
||||
bcc @for_loop_skip
|
||||
iny
|
||||
.else
|
||||
bcs @for_loop_skip
|
||||
dey
|
||||
.endif
|
||||
@for_loop_skip:
|
||||
cmp #<((end)+(step))
|
||||
bne @for_loop
|
||||
cpy #>((end)+(step))
|
||||
bne @for_loop
|
||||
.endmacro
|
||||
|
||||
; Stores byte at addr
|
||||
; Preserved: X, Y
|
||||
.macro setb addr, byte
|
||||
lda #byte
|
||||
sta addr
|
||||
.endmacro
|
||||
|
||||
; Stores word at addr
|
||||
; Preserved: X, Y
|
||||
.macro setw addr, word
|
||||
lda #<(word)
|
||||
sta addr
|
||||
lda #>(word)
|
||||
sta addr+1
|
||||
.endmacro
|
||||
|
||||
; Loads XY with 16-bit immediate or value at address
|
||||
.macro ldxy Arg
|
||||
.if .match( .left( 1, {Arg} ), # )
|
||||
ldy #<(.right( .tcount( {Arg} )-1, {Arg} ))
|
||||
ldx #>(.right( .tcount( {Arg} )-1, {Arg} ))
|
||||
.else
|
||||
ldy (Arg)
|
||||
ldx (Arg)+1
|
||||
.endif
|
||||
.endmacro
|
||||
|
||||
; Increments XY as 16-bit register, in CONSTANT time.
|
||||
; Z flag set based on entire result.
|
||||
; Preserved: A
|
||||
; Time: 7 clocks
|
||||
.macro inxy
|
||||
iny ; 2
|
||||
beq *+4 ; 3
|
||||
; -1
|
||||
bne *+3 ; 3
|
||||
; -1
|
||||
inx ; 2
|
||||
.endmacro
|
||||
|
||||
; Negates A and adds it to operand
|
||||
.macro subaf Operand
|
||||
eor #$FF
|
||||
sec
|
||||
adc Operand
|
||||
.endmacro
|
||||
|
||||
; Initializes CPU registers to reasonable values
|
||||
; Preserved: A, Y
|
||||
.macro init_cpu_regs
|
||||
sei
|
||||
cld ; unnecessary on NES, but might help on clone
|
||||
ldx #$FF
|
||||
txs
|
||||
.ifndef BUILD_NSF
|
||||
inx
|
||||
stx PPUCTRL
|
||||
.endif
|
||||
.endmacro
|
||||
86
samples/C#/build.cake
Normal file
86
samples/C#/build.cake
Normal file
@@ -0,0 +1,86 @@
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// ARGUMENTS
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
var target = Argument<string>("target", "Default");
|
||||
var configuration = Argument<string>("configuration", "Release");
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// GLOBAL VARIABLES
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
var solutions = GetFiles("./**/*.sln");
|
||||
var solutionPaths = solutions.Select(solution => solution.GetDirectory());
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// SETUP / TEARDOWN
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
Setup(() =>
|
||||
{
|
||||
// Executed BEFORE the first task.
|
||||
Information("Running tasks...");
|
||||
});
|
||||
|
||||
Teardown(() =>
|
||||
{
|
||||
// Executed AFTER the last task.
|
||||
Information("Finished running tasks.");
|
||||
});
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// TASK DEFINITIONS
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
Task("Clean")
|
||||
.Does(() =>
|
||||
{
|
||||
// Clean solution directories.
|
||||
foreach(var path in solutionPaths)
|
||||
{
|
||||
Information("Cleaning {0}", path);
|
||||
CleanDirectories(path + "/**/bin/" + configuration);
|
||||
CleanDirectories(path + "/**/obj/" + configuration);
|
||||
}
|
||||
});
|
||||
|
||||
Task("Restore")
|
||||
.Does(() =>
|
||||
{
|
||||
// Restore all NuGet packages.
|
||||
foreach(var solution in solutions)
|
||||
{
|
||||
Information("Restoring {0}...", solution);
|
||||
NuGetRestore(solution);
|
||||
}
|
||||
});
|
||||
|
||||
Task("Build")
|
||||
.IsDependentOn("Clean")
|
||||
.IsDependentOn("Restore")
|
||||
.Does(() =>
|
||||
{
|
||||
// Build all solutions.
|
||||
foreach(var solution in solutions)
|
||||
{
|
||||
Information("Building {0}", solution);
|
||||
MSBuild(solution, settings =>
|
||||
settings.SetPlatformTarget(PlatformTarget.MSIL)
|
||||
.WithProperty("TreatWarningsAsErrors","true")
|
||||
.WithTarget("Build")
|
||||
.SetConfiguration(configuration));
|
||||
}
|
||||
});
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// TARGETS
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
Task("Default")
|
||||
.IsDependentOn("Build");
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// EXECUTION
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
RunTarget(target);
|
||||
57
samples/C++/ClasspathVMSystemProperties.inc
Normal file
57
samples/C++/ClasspathVMSystemProperties.inc
Normal file
@@ -0,0 +1,57 @@
|
||||
//===- ClasspathVMSystem/Properties.cpp -----------------------------------===//
|
||||
//===--------------------- GNU classpath gnu/classpath/VMSystemProperties -===//
|
||||
//
|
||||
// The VMKit project
|
||||
//
|
||||
// This file is distributed under the University of Illinois Open Source
|
||||
// License. See LICENSE.TXT for details.
|
||||
//
|
||||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#include <sys/utsname.h>
|
||||
|
||||
#include "types.h"
|
||||
|
||||
#include "Classpath.h"
|
||||
#include "JavaArray.h"
|
||||
#include "JavaClass.h"
|
||||
#include "JavaObject.h"
|
||||
#include "JavaThread.h"
|
||||
#include "JavaUpcalls.h"
|
||||
#include "Jnjvm.h"
|
||||
|
||||
#include "SetProperties.inc"
|
||||
|
||||
using namespace j3;
|
||||
|
||||
extern "C" {
|
||||
|
||||
|
||||
JNIEXPORT void JNICALL Java_gnu_classpath_VMSystemProperties_preInit(
|
||||
#ifdef NATIVE_JNI
|
||||
JNIEnv *env,
|
||||
jclass clazz,
|
||||
#endif
|
||||
JavaObject* prop) {
|
||||
|
||||
llvm_gcroot(prop, 0);
|
||||
|
||||
BEGIN_NATIVE_EXCEPTION(0)
|
||||
|
||||
setProperties(prop);
|
||||
|
||||
END_NATIVE_EXCEPTION
|
||||
}
|
||||
|
||||
extern "C" void Java_gnu_classpath_VMSystemProperties_postInit__Ljava_util_Properties_2(JavaObject* prop) {
|
||||
|
||||
llvm_gcroot(prop, 0);
|
||||
|
||||
BEGIN_NATIVE_EXCEPTION(0)
|
||||
|
||||
setCommandLineProperties(prop);
|
||||
|
||||
END_NATIVE_EXCEPTION
|
||||
}
|
||||
|
||||
}
|
||||
2764
samples/C++/initClasses.inc
Normal file
2764
samples/C++/initClasses.inc
Normal file
File diff suppressed because it is too large
Load Diff
34
samples/C++/instances.inc
Normal file
34
samples/C++/instances.inc
Normal file
@@ -0,0 +1,34 @@
|
||||
#include "QPBO.h"
|
||||
|
||||
#ifdef _MSC_VER
|
||||
#pragma warning(disable: 4661)
|
||||
#endif
|
||||
|
||||
// Instantiations
|
||||
|
||||
template class QPBO<int>;
|
||||
template class QPBO<float>;
|
||||
template class QPBO<double>;
|
||||
|
||||
template <>
|
||||
inline void QPBO<int>::get_type_information(char*& type_name, char*& type_format)
|
||||
{
|
||||
type_name = "int";
|
||||
type_format = "d";
|
||||
}
|
||||
|
||||
template <>
|
||||
inline void QPBO<float>::get_type_information(char*& type_name, char*& type_format)
|
||||
{
|
||||
type_name = "float";
|
||||
type_format = "f";
|
||||
}
|
||||
|
||||
template <>
|
||||
inline void QPBO<double>::get_type_information(char*& type_name, char*& type_format)
|
||||
{
|
||||
type_name = "double";
|
||||
type_format = "Lf";
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
Year,Make,Model,Length
|
||||
1997,Ford,E350,2.34
|
||||
2000,Mercury,Cougar,2.38
|
||||
2000,Mercury,Cougar,2.38
|
||||
|
6
samples/Charity/example.ch
Normal file
6
samples/Charity/example.ch
Normal file
@@ -0,0 +1,6 @@
|
||||
%
|
||||
% Some very badly written Charity
|
||||
%
|
||||
|
||||
data LA(A) -> D = ss: A -> D
|
||||
| ff: -> D.
|
||||
133
samples/Click/sr2.click
Normal file
133
samples/Click/sr2.click
Normal file
@@ -0,0 +1,133 @@
|
||||
rates :: AvailableRates
|
||||
elementclass sr2 {
|
||||
$sr2_ip, $sr2_nm, $wireless_mac, $gateway, $probes|
|
||||
|
||||
|
||||
arp :: ARPTable();
|
||||
lt :: LinkTable(IP $sr2_ip);
|
||||
|
||||
|
||||
gw :: SR2GatewaySelector(ETHTYPE 0x062c,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
LT lt,
|
||||
ARP arp,
|
||||
PERIOD 15,
|
||||
GW $gateway);
|
||||
|
||||
|
||||
gw -> SR2SetChecksum -> [0] output;
|
||||
|
||||
set_gw :: SR2SetGateway(SEL gw);
|
||||
|
||||
|
||||
es :: SR2ETTStat(ETHTYPE 0x0641,
|
||||
ETH $wireless_mac,
|
||||
IP $sr2_ip,
|
||||
PERIOD 30000,
|
||||
TAU 300000,
|
||||
ARP arp,
|
||||
PROBES $probes,
|
||||
ETT metric,
|
||||
RT rates);
|
||||
|
||||
|
||||
metric :: SR2ETTMetric(LT lt);
|
||||
|
||||
|
||||
forwarder :: SR2Forwarder(ETHTYPE 0x0643,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
ARP arp,
|
||||
LT lt);
|
||||
|
||||
|
||||
querier :: SR2Querier(ETH $wireless_mac,
|
||||
SR forwarder,
|
||||
LT lt,
|
||||
ROUTE_DAMPENING true,
|
||||
TIME_BEFORE_SWITCH 5,
|
||||
DEBUG true);
|
||||
|
||||
|
||||
query_forwarder :: SR2MetricFlood(ETHTYPE 0x0644,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
LT lt,
|
||||
ARP arp,
|
||||
DEBUG false);
|
||||
|
||||
query_responder :: SR2QueryResponder(ETHTYPE 0x0645,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
LT lt,
|
||||
ARP arp,
|
||||
DEBUG true);
|
||||
|
||||
|
||||
query_responder -> SR2SetChecksum -> [0] output;
|
||||
query_forwarder -> SR2SetChecksum -> SR2Print(forwarding) -> [0] output;
|
||||
query_forwarder [1] -> query_responder;
|
||||
|
||||
data_ck :: SR2SetChecksum()
|
||||
|
||||
input [1]
|
||||
-> host_cl :: IPClassifier(dst net $sr2_ip mask $sr2_nm,
|
||||
-)
|
||||
-> querier
|
||||
-> data_ck;
|
||||
|
||||
|
||||
host_cl [1] -> [0] set_gw [0] -> querier;
|
||||
|
||||
forwarder[0]
|
||||
-> dt ::DecIPTTL
|
||||
-> data_ck
|
||||
-> [2] output;
|
||||
|
||||
|
||||
dt[1]
|
||||
-> Print(ttl-error)
|
||||
-> ICMPError($sr2_ip, timeexceeded, 0)
|
||||
-> querier;
|
||||
|
||||
|
||||
// queries
|
||||
querier [1] -> [1] query_forwarder;
|
||||
es -> SetTimestamp() -> [1] output;
|
||||
|
||||
|
||||
forwarder[1] //ip packets to me
|
||||
-> SR2StripHeader()
|
||||
-> CheckIPHeader()
|
||||
-> from_gw_cl :: IPClassifier(src net $sr2_ip mask $sr2_nm,
|
||||
-)
|
||||
-> [3] output;
|
||||
|
||||
from_gw_cl [1] -> [1] set_gw [1] -> [3] output;
|
||||
|
||||
input [0]
|
||||
-> ncl :: Classifier(
|
||||
12/0643 , //sr2_forwarder
|
||||
12/0644 , //sr2
|
||||
12/0645 , //replies
|
||||
12/0641 , //sr2_es
|
||||
12/062c , //sr2_gw
|
||||
);
|
||||
|
||||
|
||||
ncl[0] -> SR2CheckHeader() -> [0] forwarder;
|
||||
ncl[1] -> SR2CheckHeader() -> PrintSR(query) -> query_forwarder
|
||||
ncl[2] -> SR2CheckHeader() -> query_responder;
|
||||
ncl[3] -> es;
|
||||
ncl[4] -> SR2CheckHeader() -> gw;
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
Idle -> s :: sr2(2.0.0.1, 255.0.0.0, 00:00:00:00:00:01, false, "12 60 12 1500") -> Discard;
|
||||
Idle -> [1] s;
|
||||
s[1] -> Discard;
|
||||
s[2] -> Discard;
|
||||
s[3] -> Discard;
|
||||
142
samples/Click/thomer-nat.click
Normal file
142
samples/Click/thomer-nat.click
Normal file
@@ -0,0 +1,142 @@
|
||||
// This Click configuration implements a firewall and NAT, roughly based on the
|
||||
// mazu-nat.click example.
|
||||
//
|
||||
// This example assumes there is one interface that is IP-aliased. In this
|
||||
// example, eth0 and eth0:0 have IP addresses 66.68.65.90 and 192.168.1.1,
|
||||
// respectively. There is a local network, 192.168.1.0/24, and an upstream
|
||||
// gateway, 66.58.65.89. Traffic from the local network is NATed.
|
||||
//
|
||||
// Connections can be initiated from the NAT box itself, also.
|
||||
//
|
||||
// For bugs, suggestions, and, corrections, please email me.
|
||||
//
|
||||
// Author: Thomer M. Gil (click@thomer.com)
|
||||
|
||||
AddressInfo(
|
||||
eth0-in 192.168.1.1 192.168.1.0/24 00:0d:87:9d:1c:e9,
|
||||
eth0-ex 66.58.65.90 00:0d:87:9d:1c:e9,
|
||||
gw-addr 66.58.65.89 00:20:6f:14:54:c2
|
||||
);
|
||||
|
||||
|
||||
elementclass SniffGatewayDevice {
|
||||
$device |
|
||||
from :: FromDevice($device)
|
||||
-> t1 :: Tee
|
||||
-> output;
|
||||
input -> q :: Queue(1024)
|
||||
-> t2 :: PullTee
|
||||
-> to :: ToDevice($device);
|
||||
t1[1] -> ToHostSniffers;
|
||||
t2[1] -> ToHostSniffers($device);
|
||||
ScheduleInfo(from .1, to 1);
|
||||
}
|
||||
|
||||
|
||||
device :: SniffGatewayDevice(eth0);
|
||||
arpq_in :: ARPQuerier(eth0-in) -> device;
|
||||
ip_to_extern :: GetIPAddress(16)
|
||||
-> CheckIPHeader
|
||||
-> EtherEncap(0x800, eth0-ex, gw-addr)
|
||||
-> device;
|
||||
ip_to_host :: EtherEncap(0x800, gw-addr, eth0-ex)
|
||||
-> ToHost;
|
||||
ip_to_intern :: GetIPAddress(16)
|
||||
-> CheckIPHeader
|
||||
-> arpq_in;
|
||||
|
||||
|
||||
arp_class :: Classifier(
|
||||
12/0806 20/0001, // [0] ARP requests
|
||||
12/0806 20/0002, // [1] ARP replies to host
|
||||
12/0800); // [2] IP packets
|
||||
|
||||
device -> arp_class;
|
||||
|
||||
// ARP crap
|
||||
arp_class[0] -> ARPResponder(eth0-in, eth0-ex) -> device;
|
||||
arp_class[1] -> arp_t :: Tee;
|
||||
arp_t[0] -> ToHost;
|
||||
arp_t[1] -> [1]arpq_in;
|
||||
|
||||
|
||||
// IP packets
|
||||
arp_class[2] -> Strip(14)
|
||||
-> CheckIPHeader
|
||||
-> ipclass :: IPClassifier(dst host eth0-ex,
|
||||
dst host eth0-in,
|
||||
src net eth0-in);
|
||||
|
||||
// Define pattern NAT
|
||||
iprw :: IPRewriterPatterns(NAT eth0-ex 50000-65535 - -);
|
||||
|
||||
// Rewriting rules for UDP/TCP packets
|
||||
// output[0] rewritten to go into the wild
|
||||
// output[1] rewritten to come back from the wild or no match
|
||||
rw :: IPRewriter(pattern NAT 0 1,
|
||||
pass 1);
|
||||
|
||||
// Rewriting rules for ICMP packets
|
||||
irw :: ICMPPingRewriter(eth0-ex, -);
|
||||
irw[0] -> ip_to_extern;
|
||||
irw[1] -> icmp_me_or_intern :: IPClassifier(dst host eth0-ex, -);
|
||||
icmp_me_or_intern[0] -> ip_to_host;
|
||||
icmp_me_or_intern[1] -> ip_to_intern;
|
||||
|
||||
// Rewriting rules for ICMP error packets
|
||||
ierw :: ICMPRewriter(rw irw);
|
||||
ierw[0] -> icmp_me_or_intern;
|
||||
ierw[1] -> icmp_me_or_intern;
|
||||
|
||||
|
||||
// Packets directed at eth0-ex.
|
||||
// Send it through IPRewriter(pass). If there was a mapping, it will be
|
||||
// rewritten such that dst is eth0-in:net, otherwise dst will still be for
|
||||
// eth0-ex.
|
||||
ipclass[0] -> [1]rw;
|
||||
|
||||
// packets that were rewritten, heading into the wild world.
|
||||
rw[0] -> ip_to_extern;
|
||||
|
||||
// packets that come back from the wild or are not part of an established
|
||||
// connection.
|
||||
rw[1] -> established_class :: IPClassifier(dst host eth0-ex,
|
||||
dst net eth0-in);
|
||||
|
||||
// not established yet or returning packets for a connection that was
|
||||
// established from this host itself.
|
||||
established_class[0] ->
|
||||
firewall :: IPClassifier(dst tcp port ssh,
|
||||
dst tcp port smtp,
|
||||
dst tcp port domain,
|
||||
dst udp port domain,
|
||||
icmp type echo-reply,
|
||||
proto icmp,
|
||||
port > 4095,
|
||||
-);
|
||||
|
||||
firewall[0] -> ip_to_host; // ssh
|
||||
firewall[1] -> ip_to_host; // smtp
|
||||
firewall[2] -> ip_to_host; // domain (t)
|
||||
firewall[3] -> ip_to_host; // domain (u)
|
||||
firewall[4] -> [0]irw; // icmp reply
|
||||
firewall[5] -> [0]ierw; // other icmp
|
||||
firewall[6] -> ip_to_host; // port > 4095, probably for connection
|
||||
// originating from host itself
|
||||
firewall[7] -> Discard; // don't allow incoming for port <= 4095
|
||||
|
||||
// established connection
|
||||
established_class[1] -> ip_to_intern;
|
||||
|
||||
// To eth0-in. Only accept from inside network.
|
||||
ipclass[1] -> IPClassifier(src net eth0-in) -> ip_to_host;
|
||||
|
||||
// Packets from eth0-in:net either stay on local network or go to the wild.
|
||||
// Those that go into the wild need to go through the appropriate rewriting
|
||||
// element. (Either UDP/TCP rewriter or ICMP rewriter.)
|
||||
ipclass[2] -> inter_class :: IPClassifier(dst net eth0-in, -);
|
||||
inter_class[0] -> ip_to_intern;
|
||||
inter_class[1] -> ip_udp_class :: IPClassifier(tcp or udp,
|
||||
icmp type echo);
|
||||
ip_udp_class[0] -> [0]rw;
|
||||
ip_udp_class[1] -> [0]irw;
|
||||
17
samples/CoffeeScript/build.cake
Normal file
17
samples/CoffeeScript/build.cake
Normal file
@@ -0,0 +1,17 @@
|
||||
fs = require 'fs'
|
||||
|
||||
{print} = require 'sys'
|
||||
{spawn} = require 'child_process'
|
||||
|
||||
build = (callback) ->
|
||||
coffee = spawn 'coffee', ['-c', '-o', '.', '.']
|
||||
coffee.stderr.on 'data', (data) ->
|
||||
process.stderr.write data.toString()
|
||||
coffee.stdout.on 'data', (data) ->
|
||||
print data.toString()
|
||||
coffee.on 'exit', (code) ->
|
||||
callback?() if code is 0
|
||||
|
||||
task 'build', 'Build from source', ->
|
||||
build()
|
||||
|
||||
164
samples/Common Lisp/array.l
Normal file
164
samples/Common Lisp/array.l
Normal file
@@ -0,0 +1,164 @@
|
||||
;;; -*- Mode: Lisp; Package: LISP -*-
|
||||
;;;
|
||||
;;; This file is part of xyzzy.
|
||||
;;;
|
||||
|
||||
(provide "array")
|
||||
|
||||
(in-package "lisp")
|
||||
|
||||
(export '(make-vector make-array vector array-dimensions array-in-bounds-p
|
||||
upgraded-array-element-type adjust-array))
|
||||
|
||||
(defun upgraded-array-element-type (type)
|
||||
(cond ((or (eq type 't)
|
||||
(null type))
|
||||
't)
|
||||
((member type '(character base-character standard-char
|
||||
extended-character) :test #'eq)
|
||||
'character)
|
||||
(t
|
||||
(setq type (car (si:canonicalize-type type)))
|
||||
(cond ((or (eq type 't)
|
||||
(null type))
|
||||
't)
|
||||
((member type '(character base-character standard-char
|
||||
extended-character) :test #'eq)
|
||||
'character)
|
||||
(t 't)))))
|
||||
|
||||
(defun check-array-initialize-option (ies-p ics-p displaced-to)
|
||||
(let ((x 0))
|
||||
(and ies-p (incf x))
|
||||
(and ics-p (incf x))
|
||||
(and displaced-to (incf x))
|
||||
(when (> x 1)
|
||||
(error ":initial-element, :initial-contents, :displaced-to"))))
|
||||
|
||||
(defun make-vector (length &key
|
||||
(element-type t)
|
||||
(initial-element nil ies-p)
|
||||
(initial-contents nil ics-p)
|
||||
fill-pointer
|
||||
adjustable
|
||||
displaced-to
|
||||
(displaced-index-offset 0))
|
||||
(setq element-type (upgraded-array-element-type element-type))
|
||||
(check-array-initialize-option ies-p ics-p displaced-to)
|
||||
(let ((vector (si:*make-vector length element-type initial-element adjustable
|
||||
fill-pointer displaced-to displaced-index-offset)))
|
||||
(when ics-p
|
||||
(si:*copy-into-seq vector initial-contents))
|
||||
vector))
|
||||
|
||||
(defun make-array (dimensions &rest rest
|
||||
&key
|
||||
(element-type t)
|
||||
(initial-element nil ies-p)
|
||||
(initial-contents nil ics-p)
|
||||
fill-pointer
|
||||
adjustable
|
||||
displaced-to
|
||||
(displaced-index-offset 0))
|
||||
(cond ((integerp dimensions)
|
||||
(apply #'make-vector dimensions rest))
|
||||
((= (length dimensions) 1)
|
||||
(apply #'make-vector (car dimensions) rest))
|
||||
(t
|
||||
(setq element-type (upgraded-array-element-type element-type))
|
||||
(check-array-initialize-option ies-p ics-p displaced-to)
|
||||
(when fill-pointer
|
||||
(error ":fill-pointer"))
|
||||
(let ((array (si:*make-array dimensions element-type
|
||||
initial-element adjustable
|
||||
displaced-to displaced-index-offset)))
|
||||
(when ics-p
|
||||
(let ((dims (make-list (array-rank array)
|
||||
:initial-element 0))
|
||||
(stack (list initial-contents))
|
||||
(rank (1- (array-rank array))))
|
||||
(dolist (x dims)
|
||||
(push (elt (car stack) 0) stack))
|
||||
(dotimes (i (array-total-size array))
|
||||
(setf (row-major-aref array i) (car stack))
|
||||
(do ((x dims (cdr x))
|
||||
(j rank (1- j)))
|
||||
((null x))
|
||||
(pop stack)
|
||||
(incf (car x))
|
||||
(when (< (car x) (array-dimension array j))
|
||||
(do ((r (- rank j) (1- r)))
|
||||
((< r 0))
|
||||
(push (elt (car stack) (nth r dims)) stack))
|
||||
(return))
|
||||
(setf (car x) 0)))))
|
||||
array))))
|
||||
|
||||
(defun vector (&rest list)
|
||||
(make-vector (length list) :element-type t :initial-contents list))
|
||||
|
||||
(defun array-dimensions (array)
|
||||
(do ((i (1- (array-rank array)) (1- i))
|
||||
(dims '()))
|
||||
((minusp i) dims)
|
||||
(push (array-dimension array i) dims)))
|
||||
|
||||
(defun array-in-bounds-p (array &rest subscripts)
|
||||
(let ((r (array-rank array)))
|
||||
(when (/= r (length subscripts))
|
||||
(error "subscripts: ~S" subscripts))
|
||||
(do ((i 0 (1+ i))
|
||||
(s subscripts (cdr s)))
|
||||
((= i r) t)
|
||||
(unless (<= 0 (car s) (1- (array-dimension array i)))
|
||||
(return nil)))))
|
||||
|
||||
(defun adjust-array (old-array
|
||||
dimensions
|
||||
&rest rest
|
||||
&key
|
||||
(element-type nil ets-p)
|
||||
initial-element
|
||||
(initial-contents nil ics-p)
|
||||
(fill-pointer nil fps-p)
|
||||
displaced-to
|
||||
displaced-index-offset)
|
||||
(when (/= (length dimensions) (array-rank old-array))
|
||||
(error "?"))
|
||||
(unless ets-p
|
||||
(push (array-element-type old-array) rest)
|
||||
(push :element-type rest))
|
||||
(when (adjustable-array-p old-array)
|
||||
(push t rest)
|
||||
(push :adjustable rest))
|
||||
(cond (fps-p
|
||||
(unless (array-has-fill-pointer-p old-array)
|
||||
(error "?")))
|
||||
(t
|
||||
(when (array-has-fill-pointer-p old-array)
|
||||
(push (fill-pointer old-array) rest)
|
||||
(push :fill-pointer rest))))
|
||||
(when (eq old-array displaced-to)
|
||||
(error "?"))
|
||||
(let ((new-array (apply #'make-array dimensions rest)))
|
||||
(or ics-p displaced-to
|
||||
(copy-array-partially old-array new-array))
|
||||
(cond ((adjustable-array-p old-array)
|
||||
(si:*replace-array old-array new-array)
|
||||
old-array)
|
||||
(t
|
||||
new-array))))
|
||||
|
||||
(defun copy-array-partially (src dst)
|
||||
(let* ((dims (mapcar #'min (array-dimensions src) (array-dimensions dst)))
|
||||
(r (array-rank src))
|
||||
(s (make-list r :initial-element 0)))
|
||||
(setq r (1- r))
|
||||
(dotimes (x (apply #'* dims))
|
||||
(setf (apply #'aref dst s) (apply #'aref src s))
|
||||
(do ((i r (1- i)))
|
||||
((minusp i))
|
||||
(incf (nth i s))
|
||||
(when (< (nth i s) (nth i dims))
|
||||
(return))
|
||||
(setf (nth i s) 0)))))
|
||||
1201
samples/Common Lisp/common.l
Normal file
1201
samples/Common Lisp/common.l
Normal file
File diff suppressed because it is too large
Load Diff
2
samples/Common Lisp/config.sexp
Normal file
2
samples/Common Lisp/config.sexp
Normal file
@@ -0,0 +1,2 @@
|
||||
((exe_name hello)
|
||||
(link_order (world hello)))
|
||||
103
samples/Common Lisp/rss.sexp
Normal file
103
samples/Common Lisp/rss.sexp
Normal file
@@ -0,0 +1,103 @@
|
||||
|
||||
(:TURTLE
|
||||
|
||||
(:@PREFIX "rdf:" "<http://www.w3.org/1999/02/22-rdf-syntax-ns#>")
|
||||
(:@PREFIX "owl:" "<http://www.w3.org/2002/07/owl#>")
|
||||
(:@PREFIX "dc:" "<http://purl.org/dc/elements/1.1/>")
|
||||
(:@PREFIX "xsd:" "<http://www.w3.org/2001/XMLSchema#>")
|
||||
(:@PREFIX "rdfs:" "<http://www.w3.org/2000/01/rdf-schema#>")
|
||||
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/channel>")
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:URIREF #1="<http://www.w3.org/1999/02/22-rdf-syntax-ns#type>")
|
||||
(:OBJECTS
|
||||
(:QNAME "rdfs:Class")))
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:QNAME "rdfs:comment")
|
||||
(:OBJECTS
|
||||
(:STRING "An RSS information channel.")))
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS
|
||||
(:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:QNAME "rdfs:label")
|
||||
(:OBJECTS
|
||||
(:STRING "Channel"))))
|
||||
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/description>")
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:URIREF #1#)
|
||||
(:OBJECTS
|
||||
(:QNAME "rdf:Property")))
|
||||
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS (:STRING "A short text description of the subject.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Description")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:subPropertyOf") (:OBJECTS (:QNAME "dc:description"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/image>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdfs:Class")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment") (:OBJECTS (:STRING "An RSS image.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Image"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/item>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdfs:Class")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment") (:OBJECTS (:STRING "An RSS item.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Item"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/items>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS
|
||||
(:STRING "Points to a list of rss:item elements that are members of the subject channel.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Items"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/link>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS (:STRING "The URL to which an HTML rendering of the subject will link.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Link")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:subPropertyOf") (:OBJECTS (:QNAME "dc:identifier"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/name>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS (:STRING "The text input field's (variable) name.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Name"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/textinput>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdfs:Class")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment") (:OBJECTS (:STRING "An RSS text input.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Text Input"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/title>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS (:STRING "A descriptive title for the channel.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Title")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:subPropertyOf") (:OBJECTS (:QNAME "dc:title"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/url>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS
|
||||
(:STRING
|
||||
"The URL of the image to used in the 'src' attribute of the channel's image tag when rendered as HTML.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "URL")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:subPropertyOf") (:OBJECTS (:QNAME "dc:identifier")))))
|
||||
13
samples/DNS Zone/sample.arpa
Normal file
13
samples/DNS Zone/sample.arpa
Normal file
@@ -0,0 +1,13 @@
|
||||
$ORIGIN 0.0.0.c.2.1.0.3.0.0.2.1.e.f.f.3.ip6.arpa.
|
||||
$TTL 60
|
||||
@ IN SOA ns root (
|
||||
2002042901 ; SERIAL
|
||||
7200 ; REFRESH
|
||||
600 ; RETRY
|
||||
36000000 ; EXPIRE
|
||||
120 ; MINIMUM
|
||||
)
|
||||
|
||||
NS ns.example.com.
|
||||
|
||||
c.a.7.e.d.7.e.f.f.f.0.2.8.0.a.0 PTR sip01.example.com.
|
||||
12
samples/DNS Zone/sneaky.net.zone
Normal file
12
samples/DNS Zone/sneaky.net.zone
Normal file
@@ -0,0 +1,12 @@
|
||||
$TTL 3d
|
||||
@ IN SOA root.localhost. root.sneaky.net. (
|
||||
2015042907 ; serial
|
||||
3d ; refresh
|
||||
1h ; retry
|
||||
12d ; expire
|
||||
2h ; negative response TTL
|
||||
)
|
||||
IN NS root.localhost.
|
||||
IN NS localhost. ; secondary name server is preferably externally maintained
|
||||
|
||||
www IN A 3.141.59.26
|
||||
26
samples/Erlang/170-os-daemons.es
Executable file
26
samples/Erlang/170-os-daemons.es
Executable file
@@ -0,0 +1,26 @@
|
||||
#! /usr/bin/env escript
|
||||
|
||||
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
|
||||
% use this file except in compliance with the License. You may obtain a copy of
|
||||
% the License at
|
||||
%
|
||||
% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%
|
||||
% Unless required by applicable law or agreed to in writing, software
|
||||
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
% License for the specific language governing permissions and limitations under
|
||||
% the License.
|
||||
|
||||
loop() ->
|
||||
loop(io:read("")).
|
||||
|
||||
loop({ok, _}) ->
|
||||
loop(io:read(""));
|
||||
loop(eof) ->
|
||||
stop;
|
||||
loop({error, Reason}) ->
|
||||
throw({error, Reason}).
|
||||
|
||||
main([]) ->
|
||||
loop().
|
||||
856
samples/Erlang/elixir_parser.yrl
Normal file
856
samples/Erlang/elixir_parser.yrl
Normal file
@@ -0,0 +1,856 @@
|
||||
Nonterminals
|
||||
grammar expr_list
|
||||
expr container_expr block_expr access_expr
|
||||
no_parens_expr no_parens_zero_expr no_parens_one_expr no_parens_one_ambig_expr
|
||||
bracket_expr bracket_at_expr bracket_arg matched_expr unmatched_expr max_expr
|
||||
unmatched_op_expr matched_op_expr no_parens_op_expr no_parens_many_expr
|
||||
comp_op_eol at_op_eol unary_op_eol and_op_eol or_op_eol capture_op_eol
|
||||
add_op_eol mult_op_eol two_op_eol three_op_eol pipe_op_eol stab_op_eol
|
||||
arrow_op_eol match_op_eol when_op_eol in_op_eol in_match_op_eol
|
||||
type_op_eol rel_op_eol
|
||||
open_paren close_paren empty_paren eoe
|
||||
list list_args open_bracket close_bracket
|
||||
tuple open_curly close_curly
|
||||
bit_string open_bit close_bit
|
||||
map map_op map_close map_args map_expr struct_op
|
||||
assoc_op_eol assoc_expr assoc_base assoc_update assoc_update_kw assoc
|
||||
container_args_base container_args
|
||||
call_args_parens_expr call_args_parens_base call_args_parens parens_call
|
||||
call_args_no_parens_one call_args_no_parens_ambig call_args_no_parens_expr
|
||||
call_args_no_parens_comma_expr call_args_no_parens_all call_args_no_parens_many
|
||||
call_args_no_parens_many_strict
|
||||
stab stab_eoe stab_expr stab_op_eol_and_expr stab_parens_many
|
||||
kw_eol kw_base kw call_args_no_parens_kw_expr call_args_no_parens_kw
|
||||
dot_op dot_alias dot_alias_container
|
||||
dot_identifier dot_op_identifier dot_do_identifier
|
||||
dot_paren_identifier dot_bracket_identifier
|
||||
do_block fn_eoe do_eoe end_eoe block_eoe block_item block_list
|
||||
.
|
||||
|
||||
Terminals
|
||||
identifier kw_identifier kw_identifier_safe kw_identifier_unsafe bracket_identifier
|
||||
paren_identifier do_identifier block_identifier
|
||||
fn 'end' aliases
|
||||
number atom atom_safe atom_unsafe bin_string list_string sigil
|
||||
dot_call_op op_identifier
|
||||
comp_op at_op unary_op and_op or_op arrow_op match_op in_op in_match_op
|
||||
type_op dual_op add_op mult_op two_op three_op pipe_op stab_op when_op assoc_op
|
||||
capture_op rel_op
|
||||
'true' 'false' 'nil' 'do' eol ';' ',' '.'
|
||||
'(' ')' '[' ']' '{' '}' '<<' '>>' '%{}' '%'
|
||||
.
|
||||
|
||||
Rootsymbol grammar.
|
||||
|
||||
%% Two shift/reduce conflicts coming from call_args_parens.
|
||||
Expect 2.
|
||||
|
||||
%% Changes in ops and precedence should be reflected on lib/elixir/lib/macro.ex
|
||||
%% Note though the operator => in practice has lower precedence than all others,
|
||||
%% its entry in the table is only to support the %{user | foo => bar} syntax.
|
||||
Left 5 do.
|
||||
Right 10 stab_op_eol. %% ->
|
||||
Left 20 ','.
|
||||
Nonassoc 30 capture_op_eol. %% &
|
||||
Left 40 in_match_op_eol. %% <-, \\ (allowed in matches along =)
|
||||
Right 50 when_op_eol. %% when
|
||||
Right 60 type_op_eol. %% ::
|
||||
Right 70 pipe_op_eol. %% |
|
||||
Right 80 assoc_op_eol. %% =>
|
||||
Right 90 match_op_eol. %% =
|
||||
Left 130 or_op_eol. %% ||, |||, or
|
||||
Left 140 and_op_eol. %% &&, &&&, and
|
||||
Left 150 comp_op_eol. %% ==, !=, =~, ===, !==
|
||||
Left 160 rel_op_eol. %% <, >, <=, >=
|
||||
Left 170 arrow_op_eol. %% |>, <<<, >>>, ~>>, <<~, ~>, <~, <~>, <|>
|
||||
Left 180 in_op_eol. %% in
|
||||
Left 190 three_op_eol. %% ^^^
|
||||
Right 200 two_op_eol. %% ++, --, .., <>
|
||||
Left 210 add_op_eol. %% +, -
|
||||
Left 220 mult_op_eol. %% *, /
|
||||
Nonassoc 300 unary_op_eol. %% +, -, !, ^, not, ~~~
|
||||
Left 310 dot_call_op.
|
||||
Left 310 dot_op. %% .
|
||||
Nonassoc 320 at_op_eol. %% @
|
||||
Nonassoc 330 dot_identifier.
|
||||
|
||||
%%% MAIN FLOW OF EXPRESSIONS
|
||||
|
||||
grammar -> eoe : nil.
|
||||
grammar -> expr_list : to_block('$1').
|
||||
grammar -> eoe expr_list : to_block('$2').
|
||||
grammar -> expr_list eoe : to_block('$1').
|
||||
grammar -> eoe expr_list eoe : to_block('$2').
|
||||
grammar -> '$empty' : nil.
|
||||
|
||||
% Note expressions are on reverse order
|
||||
expr_list -> expr : ['$1'].
|
||||
expr_list -> expr_list eoe expr : ['$3'|'$1'].
|
||||
|
||||
expr -> matched_expr : '$1'.
|
||||
expr -> no_parens_expr : '$1'.
|
||||
expr -> unmatched_expr : '$1'.
|
||||
|
||||
%% In Elixir we have three main call syntaxes: with parentheses,
|
||||
%% without parentheses and with do blocks. They are represented
|
||||
%% in the AST as matched, no_parens and unmatched.
|
||||
%%
|
||||
%% Calls without parentheses are further divided according to how
|
||||
%% problematic they are:
|
||||
%%
|
||||
%% (a) no_parens_one: a call with one unproblematic argument
|
||||
%% (e.g. `f a` or `f g a` and similar) (includes unary operators)
|
||||
%%
|
||||
%% (b) no_parens_many: a call with several arguments (e.g. `f a, b`)
|
||||
%%
|
||||
%% (c) no_parens_one_ambig: a call with one argument which is
|
||||
%% itself a no_parens_many or no_parens_one_ambig (e.g. `f g a, b`
|
||||
%% or `f g h a, b` and similar)
|
||||
%%
|
||||
%% Note, in particular, that no_parens_one_ambig expressions are
|
||||
%% ambiguous and are interpreted such that the outer function has
|
||||
%% arity 1 (e.g. `f g a, b` is interpreted as `f(g(a, b))` rather
|
||||
%% than `f(g(a), b)`). Hence the name, no_parens_one_ambig.
|
||||
%%
|
||||
%% The distinction is required because we can't, for example, have
|
||||
%% a function call with a do block as argument inside another do
|
||||
%% block call, unless there are parentheses:
|
||||
%%
|
||||
%% if if true do true else false end do #=> invalid
|
||||
%% if(if true do true else false end) do #=> valid
|
||||
%%
|
||||
%% Similarly, it is not possible to nest calls without parentheses
|
||||
%% if their arity is more than 1:
|
||||
%%
|
||||
%% foo a, bar b, c #=> invalid
|
||||
%% foo(a, bar b, c) #=> invalid
|
||||
%% foo bar a, b #=> valid
|
||||
%% foo a, bar(b, c) #=> valid
|
||||
%%
|
||||
%% So the different grammar rules need to take into account
|
||||
%% if calls without parentheses are do blocks in particular
|
||||
%% segments and act accordingly.
|
||||
matched_expr -> matched_expr matched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
matched_expr -> unary_op_eol matched_expr : build_unary_op('$1', '$2').
|
||||
matched_expr -> at_op_eol matched_expr : build_unary_op('$1', '$2').
|
||||
matched_expr -> capture_op_eol matched_expr : build_unary_op('$1', '$2').
|
||||
matched_expr -> no_parens_one_expr : '$1'.
|
||||
matched_expr -> no_parens_zero_expr : '$1'.
|
||||
matched_expr -> access_expr : '$1'.
|
||||
matched_expr -> access_expr kw_identifier : throw_invalid_kw_identifier('$2').
|
||||
|
||||
unmatched_expr -> matched_expr unmatched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
unmatched_expr -> unmatched_expr matched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
unmatched_expr -> unmatched_expr unmatched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
unmatched_expr -> unmatched_expr no_parens_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
unmatched_expr -> unary_op_eol expr : build_unary_op('$1', '$2').
|
||||
unmatched_expr -> at_op_eol expr : build_unary_op('$1', '$2').
|
||||
unmatched_expr -> capture_op_eol expr : build_unary_op('$1', '$2').
|
||||
unmatched_expr -> block_expr : '$1'.
|
||||
|
||||
no_parens_expr -> matched_expr no_parens_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
no_parens_expr -> unary_op_eol no_parens_expr : build_unary_op('$1', '$2').
|
||||
no_parens_expr -> at_op_eol no_parens_expr : build_unary_op('$1', '$2').
|
||||
no_parens_expr -> capture_op_eol no_parens_expr : build_unary_op('$1', '$2').
|
||||
no_parens_expr -> no_parens_one_ambig_expr : '$1'.
|
||||
no_parens_expr -> no_parens_many_expr : '$1'.
|
||||
|
||||
block_expr -> parens_call call_args_parens do_block : build_identifier('$1', '$2' ++ '$3').
|
||||
block_expr -> parens_call call_args_parens call_args_parens do_block : build_nested_parens('$1', '$2', '$3' ++ '$4').
|
||||
block_expr -> dot_do_identifier do_block : build_identifier('$1', '$2').
|
||||
block_expr -> dot_identifier call_args_no_parens_all do_block : build_identifier('$1', '$2' ++ '$3').
|
||||
|
||||
matched_op_expr -> match_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> add_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> mult_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> two_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> three_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> and_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> or_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> in_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> in_match_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> type_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> when_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> pipe_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> comp_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> rel_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> arrow_op_eol matched_expr : {'$1', '$2'}.
|
||||
%% Warn for no parens subset
|
||||
matched_op_expr -> arrow_op_eol no_parens_one_expr : warn_pipe('$1', '$2'), {'$1', '$2'}.
|
||||
|
||||
unmatched_op_expr -> match_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> add_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> mult_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> two_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> three_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> and_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> or_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> in_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> in_match_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> type_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> when_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> pipe_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> comp_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> rel_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> arrow_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
|
||||
no_parens_op_expr -> match_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> add_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> mult_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> two_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> three_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> and_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> or_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> in_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> in_match_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> type_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> when_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> pipe_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> comp_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> rel_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> arrow_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
%% Warn for no parens subset
|
||||
no_parens_op_expr -> arrow_op_eol no_parens_one_ambig_expr : warn_pipe('$1', '$2'), {'$1', '$2'}.
|
||||
no_parens_op_expr -> arrow_op_eol no_parens_many_expr : warn_pipe('$1', '$2'), {'$1', '$2'}.
|
||||
|
||||
%% Allow when (and only when) with keywords
|
||||
no_parens_op_expr -> when_op_eol call_args_no_parens_kw : {'$1', '$2'}.
|
||||
|
||||
no_parens_one_ambig_expr -> dot_op_identifier call_args_no_parens_ambig : build_identifier('$1', '$2').
|
||||
no_parens_one_ambig_expr -> dot_identifier call_args_no_parens_ambig : build_identifier('$1', '$2').
|
||||
|
||||
no_parens_many_expr -> dot_op_identifier call_args_no_parens_many_strict : build_identifier('$1', '$2').
|
||||
no_parens_many_expr -> dot_identifier call_args_no_parens_many_strict : build_identifier('$1', '$2').
|
||||
|
||||
no_parens_one_expr -> dot_op_identifier call_args_no_parens_one : build_identifier('$1', '$2').
|
||||
no_parens_one_expr -> dot_identifier call_args_no_parens_one : build_identifier('$1', '$2').
|
||||
no_parens_zero_expr -> dot_do_identifier : build_identifier('$1', nil).
|
||||
no_parens_zero_expr -> dot_identifier : build_identifier('$1', nil).
|
||||
|
||||
%% From this point on, we just have constructs that can be
|
||||
%% used with the access syntax. Notice that (dot_)identifier
|
||||
%% is not included in this list simply because the tokenizer
|
||||
%% marks identifiers followed by brackets as bracket_identifier.
|
||||
access_expr -> bracket_at_expr : '$1'.
|
||||
access_expr -> bracket_expr : '$1'.
|
||||
access_expr -> at_op_eol number : build_unary_op('$1', ?exprs('$2')).
|
||||
access_expr -> unary_op_eol number : build_unary_op('$1', ?exprs('$2')).
|
||||
access_expr -> capture_op_eol number : build_unary_op('$1', ?exprs('$2')).
|
||||
access_expr -> fn_eoe stab end_eoe : build_fn('$1', reverse('$2')).
|
||||
access_expr -> open_paren stab close_paren : build_stab(reverse('$2')).
|
||||
access_expr -> open_paren stab ';' close_paren : build_stab(reverse('$2')).
|
||||
access_expr -> open_paren ';' stab ';' close_paren : build_stab(reverse('$3')).
|
||||
access_expr -> open_paren ';' stab close_paren : build_stab(reverse('$3')).
|
||||
access_expr -> open_paren ';' close_paren : build_stab([]).
|
||||
access_expr -> empty_paren : nil.
|
||||
access_expr -> number : ?exprs('$1').
|
||||
access_expr -> list : element(1, '$1').
|
||||
access_expr -> map : '$1'.
|
||||
access_expr -> tuple : '$1'.
|
||||
access_expr -> 'true' : ?id('$1').
|
||||
access_expr -> 'false' : ?id('$1').
|
||||
access_expr -> 'nil' : ?id('$1').
|
||||
access_expr -> bin_string : build_bin_string('$1').
|
||||
access_expr -> list_string : build_list_string('$1').
|
||||
access_expr -> bit_string : '$1'.
|
||||
access_expr -> sigil : build_sigil('$1').
|
||||
access_expr -> max_expr : '$1'.
|
||||
|
||||
%% Aliases and properly formed calls. Used by map_expr.
|
||||
max_expr -> atom : ?exprs('$1').
|
||||
max_expr -> atom_safe : build_quoted_atom('$1', true).
|
||||
max_expr -> atom_unsafe : build_quoted_atom('$1', false).
|
||||
max_expr -> parens_call call_args_parens : build_identifier('$1', '$2').
|
||||
max_expr -> parens_call call_args_parens call_args_parens : build_nested_parens('$1', '$2', '$3').
|
||||
max_expr -> dot_alias : '$1'.
|
||||
|
||||
bracket_arg -> open_bracket kw close_bracket : build_list('$1', '$2').
|
||||
bracket_arg -> open_bracket container_expr close_bracket : build_list('$1', '$2').
|
||||
bracket_arg -> open_bracket container_expr ',' close_bracket : build_list('$1', '$2').
|
||||
|
||||
bracket_expr -> dot_bracket_identifier bracket_arg : build_access(build_identifier('$1', nil), '$2').
|
||||
bracket_expr -> access_expr bracket_arg : build_access('$1', '$2').
|
||||
|
||||
bracket_at_expr -> at_op_eol dot_bracket_identifier bracket_arg :
|
||||
build_access(build_unary_op('$1', build_identifier('$2', nil)), '$3').
|
||||
bracket_at_expr -> at_op_eol access_expr bracket_arg :
|
||||
build_access(build_unary_op('$1', '$2'), '$3').
|
||||
|
||||
%% Blocks
|
||||
|
||||
do_block -> do_eoe 'end' : [[{do, nil}]].
|
||||
do_block -> do_eoe stab end_eoe : [[{do, build_stab(reverse('$2'))}]].
|
||||
do_block -> do_eoe block_list 'end' : [[{do, nil}|'$2']].
|
||||
do_block -> do_eoe stab_eoe block_list 'end' : [[{do, build_stab(reverse('$2'))}|'$3']].
|
||||
|
||||
eoe -> eol : '$1'.
|
||||
eoe -> ';' : '$1'.
|
||||
eoe -> eol ';' : '$1'.
|
||||
|
||||
fn_eoe -> 'fn' : '$1'.
|
||||
fn_eoe -> 'fn' eoe : '$1'.
|
||||
|
||||
do_eoe -> 'do' : '$1'.
|
||||
do_eoe -> 'do' eoe : '$1'.
|
||||
|
||||
end_eoe -> 'end' : '$1'.
|
||||
end_eoe -> eoe 'end' : '$2'.
|
||||
|
||||
block_eoe -> block_identifier : '$1'.
|
||||
block_eoe -> block_identifier eoe : '$1'.
|
||||
|
||||
stab -> stab_expr : ['$1'].
|
||||
stab -> stab eoe stab_expr : ['$3'|'$1'].
|
||||
|
||||
stab_eoe -> stab : '$1'.
|
||||
stab_eoe -> stab eoe : '$1'.
|
||||
|
||||
%% Here, `element(1, Token)` is the stab operator,
|
||||
%% while `element(2, Token)` is the expression.
|
||||
stab_expr -> expr :
|
||||
'$1'.
|
||||
stab_expr -> stab_op_eol_and_expr :
|
||||
build_op(element(1, '$1'), [], element(2, '$1')).
|
||||
stab_expr -> empty_paren stab_op_eol_and_expr :
|
||||
build_op(element(1, '$2'), [], element(2, '$2')).
|
||||
stab_expr -> call_args_no_parens_all stab_op_eol_and_expr :
|
||||
build_op(element(1, '$2'), unwrap_when(unwrap_splice('$1')), element(2, '$2')).
|
||||
stab_expr -> stab_parens_many stab_op_eol_and_expr :
|
||||
build_op(element(1, '$2'), unwrap_splice('$1'), element(2, '$2')).
|
||||
stab_expr -> stab_parens_many when_op expr stab_op_eol_and_expr :
|
||||
build_op(element(1, '$4'), [{'when', meta_from_token('$2'), unwrap_splice('$1') ++ ['$3']}], element(2, '$4')).
|
||||
|
||||
stab_op_eol_and_expr -> stab_op_eol expr : {'$1', '$2'}.
|
||||
stab_op_eol_and_expr -> stab_op_eol : warn_empty_stab_clause('$1'), {'$1', nil}.
|
||||
|
||||
block_item -> block_eoe stab_eoe : {?exprs('$1'), build_stab(reverse('$2'))}.
|
||||
block_item -> block_eoe : {?exprs('$1'), nil}.
|
||||
|
||||
block_list -> block_item : ['$1'].
|
||||
block_list -> block_item block_list : ['$1'|'$2'].
|
||||
|
||||
%% Helpers
|
||||
|
||||
open_paren -> '(' : '$1'.
|
||||
open_paren -> '(' eol : '$1'.
|
||||
close_paren -> ')' : '$1'.
|
||||
close_paren -> eol ')' : '$2'.
|
||||
|
||||
empty_paren -> open_paren ')' : '$1'.
|
||||
|
||||
open_bracket -> '[' : '$1'.
|
||||
open_bracket -> '[' eol : '$1'.
|
||||
close_bracket -> ']' : '$1'.
|
||||
close_bracket -> eol ']' : '$2'.
|
||||
|
||||
open_bit -> '<<' : '$1'.
|
||||
open_bit -> '<<' eol : '$1'.
|
||||
close_bit -> '>>' : '$1'.
|
||||
close_bit -> eol '>>' : '$2'.
|
||||
|
||||
open_curly -> '{' : '$1'.
|
||||
open_curly -> '{' eol : '$1'.
|
||||
close_curly -> '}' : '$1'.
|
||||
close_curly -> eol '}' : '$2'.
|
||||
|
||||
% Operators
|
||||
|
||||
add_op_eol -> add_op : '$1'.
|
||||
add_op_eol -> add_op eol : '$1'.
|
||||
add_op_eol -> dual_op : '$1'.
|
||||
add_op_eol -> dual_op eol : '$1'.
|
||||
|
||||
mult_op_eol -> mult_op : '$1'.
|
||||
mult_op_eol -> mult_op eol : '$1'.
|
||||
|
||||
two_op_eol -> two_op : '$1'.
|
||||
two_op_eol -> two_op eol : '$1'.
|
||||
|
||||
three_op_eol -> three_op : '$1'.
|
||||
three_op_eol -> three_op eol : '$1'.
|
||||
|
||||
pipe_op_eol -> pipe_op : '$1'.
|
||||
pipe_op_eol -> pipe_op eol : '$1'.
|
||||
|
||||
capture_op_eol -> capture_op : '$1'.
|
||||
capture_op_eol -> capture_op eol : '$1'.
|
||||
|
||||
unary_op_eol -> unary_op : '$1'.
|
||||
unary_op_eol -> unary_op eol : '$1'.
|
||||
unary_op_eol -> dual_op : '$1'.
|
||||
unary_op_eol -> dual_op eol : '$1'.
|
||||
|
||||
match_op_eol -> match_op : '$1'.
|
||||
match_op_eol -> match_op eol : '$1'.
|
||||
|
||||
and_op_eol -> and_op : '$1'.
|
||||
and_op_eol -> and_op eol : '$1'.
|
||||
|
||||
or_op_eol -> or_op : '$1'.
|
||||
or_op_eol -> or_op eol : '$1'.
|
||||
|
||||
in_op_eol -> in_op : '$1'.
|
||||
in_op_eol -> in_op eol : '$1'.
|
||||
|
||||
in_match_op_eol -> in_match_op : '$1'.
|
||||
in_match_op_eol -> in_match_op eol : '$1'.
|
||||
|
||||
type_op_eol -> type_op : '$1'.
|
||||
type_op_eol -> type_op eol : '$1'.
|
||||
|
||||
when_op_eol -> when_op : '$1'.
|
||||
when_op_eol -> when_op eol : '$1'.
|
||||
|
||||
stab_op_eol -> stab_op : '$1'.
|
||||
stab_op_eol -> stab_op eol : '$1'.
|
||||
|
||||
at_op_eol -> at_op : '$1'.
|
||||
at_op_eol -> at_op eol : '$1'.
|
||||
|
||||
comp_op_eol -> comp_op : '$1'.
|
||||
comp_op_eol -> comp_op eol : '$1'.
|
||||
|
||||
rel_op_eol -> rel_op : '$1'.
|
||||
rel_op_eol -> rel_op eol : '$1'.
|
||||
|
||||
arrow_op_eol -> arrow_op : '$1'.
|
||||
arrow_op_eol -> arrow_op eol : '$1'.
|
||||
|
||||
% Dot operator
|
||||
|
||||
dot_op -> '.' : '$1'.
|
||||
dot_op -> '.' eol : '$1'.
|
||||
|
||||
dot_identifier -> identifier : '$1'.
|
||||
dot_identifier -> matched_expr dot_op identifier : build_dot('$2', '$1', '$3').
|
||||
|
||||
dot_alias -> aliases : {'__aliases__', meta_from_token('$1', 0), ?exprs('$1')}.
|
||||
dot_alias -> matched_expr dot_op aliases : build_dot_alias('$2', '$1', '$3').
|
||||
dot_alias -> matched_expr dot_op dot_alias_container : build_dot_container('$2', '$1', '$3').
|
||||
|
||||
dot_alias_container -> open_curly '}' : [].
|
||||
dot_alias_container -> open_curly container_args close_curly : '$2'.
|
||||
|
||||
dot_op_identifier -> op_identifier : '$1'.
|
||||
dot_op_identifier -> matched_expr dot_op op_identifier : build_dot('$2', '$1', '$3').
|
||||
|
||||
dot_do_identifier -> do_identifier : '$1'.
|
||||
dot_do_identifier -> matched_expr dot_op do_identifier : build_dot('$2', '$1', '$3').
|
||||
|
||||
dot_bracket_identifier -> bracket_identifier : '$1'.
|
||||
dot_bracket_identifier -> matched_expr dot_op bracket_identifier : build_dot('$2', '$1', '$3').
|
||||
|
||||
dot_paren_identifier -> paren_identifier : '$1'.
|
||||
dot_paren_identifier -> matched_expr dot_op paren_identifier : build_dot('$2', '$1', '$3').
|
||||
|
||||
parens_call -> dot_paren_identifier : '$1'.
|
||||
parens_call -> matched_expr dot_call_op : {'.', meta_from_token('$2'), ['$1']}. % Fun/local calls
|
||||
|
||||
% Function calls with no parentheses
|
||||
|
||||
call_args_no_parens_expr -> matched_expr : '$1'.
|
||||
call_args_no_parens_expr -> no_parens_expr : throw_no_parens_many_strict('$1').
|
||||
|
||||
call_args_no_parens_comma_expr -> matched_expr ',' call_args_no_parens_expr : ['$3', '$1'].
|
||||
call_args_no_parens_comma_expr -> call_args_no_parens_comma_expr ',' call_args_no_parens_expr : ['$3'|'$1'].
|
||||
|
||||
call_args_no_parens_all -> call_args_no_parens_one : '$1'.
|
||||
call_args_no_parens_all -> call_args_no_parens_ambig : '$1'.
|
||||
call_args_no_parens_all -> call_args_no_parens_many : '$1'.
|
||||
|
||||
call_args_no_parens_one -> call_args_no_parens_kw : ['$1'].
|
||||
call_args_no_parens_one -> matched_expr : ['$1'].
|
||||
|
||||
call_args_no_parens_ambig -> no_parens_expr : ['$1'].
|
||||
|
||||
call_args_no_parens_many -> matched_expr ',' call_args_no_parens_kw : ['$1', '$3'].
|
||||
call_args_no_parens_many -> call_args_no_parens_comma_expr : reverse('$1').
|
||||
call_args_no_parens_many -> call_args_no_parens_comma_expr ',' call_args_no_parens_kw : reverse(['$3'|'$1']).
|
||||
|
||||
call_args_no_parens_many_strict -> call_args_no_parens_many : '$1'.
|
||||
call_args_no_parens_many_strict -> open_paren call_args_no_parens_kw close_paren : throw_no_parens_strict('$1').
|
||||
call_args_no_parens_many_strict -> open_paren call_args_no_parens_many close_paren : throw_no_parens_strict('$1').
|
||||
|
||||
stab_parens_many -> open_paren call_args_no_parens_kw close_paren : ['$2'].
|
||||
stab_parens_many -> open_paren call_args_no_parens_many close_paren : '$2'.
|
||||
|
||||
% Containers
|
||||
|
||||
container_expr -> matched_expr : '$1'.
|
||||
container_expr -> unmatched_expr : '$1'.
|
||||
container_expr -> no_parens_expr : throw_no_parens_container_strict('$1').
|
||||
|
||||
container_args_base -> container_expr : ['$1'].
|
||||
container_args_base -> container_args_base ',' container_expr : ['$3'|'$1'].
|
||||
|
||||
container_args -> container_args_base : lists:reverse('$1').
|
||||
container_args -> container_args_base ',' : lists:reverse('$1').
|
||||
container_args -> container_args_base ',' kw : lists:reverse(['$3'|'$1']).
|
||||
|
||||
% Function calls with parentheses
|
||||
|
||||
call_args_parens_expr -> matched_expr : '$1'.
|
||||
call_args_parens_expr -> unmatched_expr : '$1'.
|
||||
call_args_parens_expr -> no_parens_expr : throw_no_parens_many_strict('$1').
|
||||
|
||||
call_args_parens_base -> call_args_parens_expr : ['$1'].
|
||||
call_args_parens_base -> call_args_parens_base ',' call_args_parens_expr : ['$3'|'$1'].
|
||||
|
||||
call_args_parens -> empty_paren : [].
|
||||
call_args_parens -> open_paren no_parens_expr close_paren : ['$2'].
|
||||
call_args_parens -> open_paren kw close_paren : ['$2'].
|
||||
call_args_parens -> open_paren call_args_parens_base close_paren : reverse('$2').
|
||||
call_args_parens -> open_paren call_args_parens_base ',' kw close_paren : reverse(['$4'|'$2']).
|
||||
|
||||
% KV
|
||||
|
||||
kw_eol -> kw_identifier : ?exprs('$1').
|
||||
kw_eol -> kw_identifier eol : ?exprs('$1').
|
||||
kw_eol -> kw_identifier_safe : build_quoted_atom('$1', true).
|
||||
kw_eol -> kw_identifier_safe eol : build_quoted_atom('$1', true).
|
||||
kw_eol -> kw_identifier_unsafe : build_quoted_atom('$1', false).
|
||||
kw_eol -> kw_identifier_unsafe eol : build_quoted_atom('$1', false).
|
||||
|
||||
kw_base -> kw_eol container_expr : [{'$1', '$2'}].
|
||||
kw_base -> kw_base ',' kw_eol container_expr : [{'$3', '$4'}|'$1'].
|
||||
|
||||
kw -> kw_base : reverse('$1').
|
||||
kw -> kw_base ',' : reverse('$1').
|
||||
|
||||
call_args_no_parens_kw_expr -> kw_eol matched_expr : {'$1', '$2'}.
|
||||
call_args_no_parens_kw_expr -> kw_eol no_parens_expr : {'$1', '$2'}.
|
||||
|
||||
call_args_no_parens_kw -> call_args_no_parens_kw_expr : ['$1'].
|
||||
call_args_no_parens_kw -> call_args_no_parens_kw_expr ',' call_args_no_parens_kw : ['$1'|'$3'].
|
||||
|
||||
% Lists
|
||||
|
||||
list_args -> kw : '$1'.
|
||||
list_args -> container_args_base : reverse('$1').
|
||||
list_args -> container_args_base ',' : reverse('$1').
|
||||
list_args -> container_args_base ',' kw : reverse('$1', '$3').
|
||||
|
||||
list -> open_bracket ']' : build_list('$1', []).
|
||||
list -> open_bracket list_args close_bracket : build_list('$1', '$2').
|
||||
|
||||
% Tuple
|
||||
|
||||
tuple -> open_curly '}' : build_tuple('$1', []).
|
||||
tuple -> open_curly container_args close_curly : build_tuple('$1', '$2').
|
||||
|
||||
% Bitstrings
|
||||
|
||||
bit_string -> open_bit '>>' : build_bit('$1', []).
|
||||
bit_string -> open_bit container_args close_bit : build_bit('$1', '$2').
|
||||
|
||||
% Map and structs
|
||||
|
||||
%% Allow unquote/@something/aliases inside maps and structs.
|
||||
map_expr -> max_expr : '$1'.
|
||||
map_expr -> dot_identifier : build_identifier('$1', nil).
|
||||
map_expr -> at_op_eol map_expr : build_unary_op('$1', '$2').
|
||||
|
||||
assoc_op_eol -> assoc_op : '$1'.
|
||||
assoc_op_eol -> assoc_op eol : '$1'.
|
||||
|
||||
assoc_expr -> matched_expr assoc_op_eol matched_expr : {'$1', '$3'}.
|
||||
assoc_expr -> unmatched_expr assoc_op_eol unmatched_expr : {'$1', '$3'}.
|
||||
assoc_expr -> matched_expr assoc_op_eol unmatched_expr : {'$1', '$3'}.
|
||||
assoc_expr -> unmatched_expr assoc_op_eol matched_expr : {'$1', '$3'}.
|
||||
assoc_expr -> map_expr : '$1'.
|
||||
|
||||
assoc_update -> matched_expr pipe_op_eol assoc_expr : {'$2', '$1', ['$3']}.
|
||||
assoc_update -> unmatched_expr pipe_op_eol assoc_expr : {'$2', '$1', ['$3']}.
|
||||
|
||||
assoc_update_kw -> matched_expr pipe_op_eol kw : {'$2', '$1', '$3'}.
|
||||
assoc_update_kw -> unmatched_expr pipe_op_eol kw : {'$2', '$1', '$3'}.
|
||||
|
||||
assoc_base -> assoc_expr : ['$1'].
|
||||
assoc_base -> assoc_base ',' assoc_expr : ['$3'|'$1'].
|
||||
|
||||
assoc -> assoc_base : reverse('$1').
|
||||
assoc -> assoc_base ',' : reverse('$1').
|
||||
|
||||
map_op -> '%{}' : '$1'.
|
||||
map_op -> '%{}' eol : '$1'.
|
||||
|
||||
map_close -> kw close_curly : '$1'.
|
||||
map_close -> assoc close_curly : '$1'.
|
||||
map_close -> assoc_base ',' kw close_curly : reverse('$1', '$3').
|
||||
|
||||
map_args -> open_curly '}' : build_map('$1', []).
|
||||
map_args -> open_curly map_close : build_map('$1', '$2').
|
||||
map_args -> open_curly assoc_update close_curly : build_map_update('$1', '$2', []).
|
||||
map_args -> open_curly assoc_update ',' close_curly : build_map_update('$1', '$2', []).
|
||||
map_args -> open_curly assoc_update ',' map_close : build_map_update('$1', '$2', '$4').
|
||||
map_args -> open_curly assoc_update_kw close_curly : build_map_update('$1', '$2', []).
|
||||
|
||||
struct_op -> '%' : '$1'.
|
||||
|
||||
map -> map_op map_args : '$2'.
|
||||
map -> struct_op map_expr map_args : {'%', meta_from_token('$1'), ['$2', '$3']}.
|
||||
map -> struct_op map_expr eol map_args : {'%', meta_from_token('$1'), ['$2', '$4']}.
|
||||
|
||||
Erlang code.
|
||||
|
||||
-define(file(), get(elixir_parser_file)).
|
||||
-define(id(Token), element(1, Token)).
|
||||
-define(location(Token), element(2, Token)).
|
||||
-define(exprs(Token), element(3, Token)).
|
||||
-define(meta(Node), element(2, Node)).
|
||||
-define(rearrange_uop(Op), (Op == 'not' orelse Op == '!')).
|
||||
|
||||
%% The following directive is needed for (significantly) faster
|
||||
%% compilation of the generated .erl file by the HiPE compiler
|
||||
-compile([{hipe, [{regalloc, linear_scan}]}]).
|
||||
-import(lists, [reverse/1, reverse/2]).
|
||||
|
||||
meta_from_token(Token, Counter) -> [{counter, Counter}|meta_from_token(Token)].
|
||||
meta_from_token(Token) -> meta_from_location(?location(Token)).
|
||||
|
||||
meta_from_location({Line, Column, EndColumn})
|
||||
when is_integer(Line), is_integer(Column), is_integer(EndColumn) -> [{line, Line}].
|
||||
|
||||
%% Operators
|
||||
|
||||
build_op({_Kind, Location, 'in'}, {UOp, _, [Left]}, Right) when ?rearrange_uop(UOp) ->
|
||||
{UOp, meta_from_location(Location), [{'in', meta_from_location(Location), [Left, Right]}]};
|
||||
|
||||
build_op({_Kind, Location, Op}, Left, Right) ->
|
||||
{Op, meta_from_location(Location), [Left, Right]}.
|
||||
|
||||
build_unary_op({_Kind, Location, Op}, Expr) ->
|
||||
{Op, meta_from_location(Location), [Expr]}.
|
||||
|
||||
build_list(Marker, Args) ->
|
||||
{Args, ?location(Marker)}.
|
||||
|
||||
build_tuple(_Marker, [Left, Right]) ->
|
||||
{Left, Right};
|
||||
build_tuple(Marker, Args) ->
|
||||
{'{}', meta_from_token(Marker), Args}.
|
||||
|
||||
build_bit(Marker, Args) ->
|
||||
{'<<>>', meta_from_token(Marker), Args}.
|
||||
|
||||
build_map(Marker, Args) ->
|
||||
{'%{}', meta_from_token(Marker), Args}.
|
||||
|
||||
build_map_update(Marker, {Pipe, Left, Right}, Extra) ->
|
||||
{'%{}', meta_from_token(Marker), [build_op(Pipe, Left, Right ++ Extra)]}.
|
||||
|
||||
%% Blocks
|
||||
|
||||
build_block([{Op, _, [_]}]=Exprs) when ?rearrange_uop(Op) -> {'__block__', [], Exprs};
|
||||
build_block([{unquote_splicing, _, Args}]=Exprs) when
|
||||
length(Args) =< 2 -> {'__block__', [], Exprs};
|
||||
build_block([Expr]) -> Expr;
|
||||
build_block(Exprs) -> {'__block__', [], Exprs}.
|
||||
|
||||
%% Dots
|
||||
|
||||
build_dot_alias(Dot, {'__aliases__', _, Left}, {'aliases', _, Right}) ->
|
||||
{'__aliases__', meta_from_token(Dot), Left ++ Right};
|
||||
|
||||
build_dot_alias(_Dot, Atom, {'aliases', _, _} = Token) when is_atom(Atom) ->
|
||||
throw_bad_atom(Token);
|
||||
|
||||
build_dot_alias(Dot, Other, {'aliases', _, Right}) ->
|
||||
{'__aliases__', meta_from_token(Dot), [Other|Right]}.
|
||||
|
||||
build_dot_container(Dot, Left, Right) ->
|
||||
Meta = meta_from_token(Dot),
|
||||
{{'.', Meta, [Left, '{}']}, Meta, Right}.
|
||||
|
||||
build_dot(Dot, Left, Right) ->
|
||||
{'.', meta_from_token(Dot), [Left, extract_identifier(Right)]}.
|
||||
|
||||
extract_identifier({Kind, _, Identifier}) when
|
||||
Kind == identifier; Kind == bracket_identifier; Kind == paren_identifier;
|
||||
Kind == do_identifier; Kind == op_identifier ->
|
||||
Identifier.
|
||||
|
||||
%% Identifiers
|
||||
|
||||
build_nested_parens(Dot, Args1, Args2) ->
|
||||
Identifier = build_identifier(Dot, Args1),
|
||||
Meta = ?meta(Identifier),
|
||||
{Identifier, Meta, Args2}.
|
||||
|
||||
build_identifier({'.', Meta, _} = Dot, Args) ->
|
||||
FArgs = case Args of
|
||||
nil -> [];
|
||||
_ -> Args
|
||||
end,
|
||||
{Dot, Meta, FArgs};
|
||||
|
||||
build_identifier({op_identifier, Location, Identifier}, [Arg]) ->
|
||||
{Identifier, [{ambiguous_op, nil}|meta_from_location(Location)], [Arg]};
|
||||
|
||||
build_identifier({_, Location, Identifier}, Args) ->
|
||||
{Identifier, meta_from_location(Location), Args}.
|
||||
|
||||
%% Fn
|
||||
|
||||
build_fn(Op, [{'->', _, [_, _]}|_] = Stab) ->
|
||||
{fn, meta_from_token(Op), build_stab(Stab)};
|
||||
build_fn(Op, _Stab) ->
|
||||
throw(meta_from_token(Op), "expected clauses to be defined with -> inside: ", "'fn'").
|
||||
|
||||
%% Access
|
||||
|
||||
build_access(Expr, {List, Location}) ->
|
||||
Meta = meta_from_location(Location),
|
||||
{{'.', Meta, ['Elixir.Access', get]}, Meta, [Expr, List]}.
|
||||
|
||||
%% Interpolation aware
|
||||
|
||||
build_sigil({sigil, Location, Sigil, Parts, Modifiers}) ->
|
||||
Meta = meta_from_location(Location),
|
||||
{list_to_atom("sigil_" ++ [Sigil]), Meta, [{'<<>>', Meta, string_parts(Parts)}, Modifiers]}.
|
||||
|
||||
build_bin_string({bin_string, _Location, [H]}) when is_binary(H) ->
|
||||
H;
|
||||
build_bin_string({bin_string, Location, Args}) ->
|
||||
{'<<>>', meta_from_location(Location), string_parts(Args)}.
|
||||
|
||||
build_list_string({list_string, _Location, [H]}) when is_binary(H) ->
|
||||
elixir_utils:characters_to_list(H);
|
||||
build_list_string({list_string, Location, Args}) ->
|
||||
Meta = meta_from_location(Location),
|
||||
{{'.', Meta, ['Elixir.String', to_char_list]}, Meta, [{'<<>>', Meta, string_parts(Args)}]}.
|
||||
|
||||
build_quoted_atom({_, _Location, [H]}, Safe) when is_binary(H) ->
|
||||
Op = binary_to_atom_op(Safe), erlang:Op(H, utf8);
|
||||
build_quoted_atom({_, Location, Args}, Safe) ->
|
||||
Meta = meta_from_location(Location),
|
||||
{{'.', Meta, [erlang, binary_to_atom_op(Safe)]}, Meta, [{'<<>>', Meta, string_parts(Args)}, utf8]}.
|
||||
|
||||
binary_to_atom_op(true) -> binary_to_existing_atom;
|
||||
binary_to_atom_op(false) -> binary_to_atom.
|
||||
|
||||
string_parts(Parts) ->
|
||||
[string_part(Part) || Part <- Parts].
|
||||
string_part(Binary) when is_binary(Binary) ->
|
||||
Binary;
|
||||
string_part({Location, Tokens}) ->
|
||||
Form = string_tokens_parse(Tokens),
|
||||
Meta = meta_from_location(Location),
|
||||
{'::', Meta, [{{'.', Meta, ['Elixir.Kernel', to_string]}, Meta, [Form]}, {binary, Meta, nil}]}.
|
||||
|
||||
string_tokens_parse(Tokens) ->
|
||||
case parse(Tokens) of
|
||||
{ok, Forms} -> Forms;
|
||||
{error, _} = Error -> throw(Error)
|
||||
end.
|
||||
|
||||
%% Keywords
|
||||
|
||||
build_stab([{'->', Meta, [Left, Right]}|T]) ->
|
||||
build_stab(Meta, T, Left, [Right], []);
|
||||
|
||||
build_stab(Else) ->
|
||||
build_block(Else).
|
||||
|
||||
build_stab(Old, [{'->', New, [Left, Right]}|T], Marker, Temp, Acc) ->
|
||||
H = {'->', Old, [Marker, build_block(reverse(Temp))]},
|
||||
build_stab(New, T, Left, [Right], [H|Acc]);
|
||||
|
||||
build_stab(Meta, [H|T], Marker, Temp, Acc) ->
|
||||
build_stab(Meta, T, Marker, [H|Temp], Acc);
|
||||
|
||||
build_stab(Meta, [], Marker, Temp, Acc) ->
|
||||
H = {'->', Meta, [Marker, build_block(reverse(Temp))]},
|
||||
reverse([H|Acc]).
|
||||
|
||||
%% Every time the parser sees a (unquote_splicing())
|
||||
%% it assumes that a block is being spliced, wrapping
|
||||
%% the splicing in a __block__. But in the stab clause,
|
||||
%% we can have (unquote_splicing(1, 2, 3)) -> :ok, in such
|
||||
%% case, we don't actually want the block, since it is
|
||||
%% an arg style call. unwrap_splice unwraps the splice
|
||||
%% from such blocks.
|
||||
unwrap_splice([{'__block__', [], [{unquote_splicing, _, _}] = Splice}]) ->
|
||||
Splice;
|
||||
|
||||
unwrap_splice(Other) -> Other.
|
||||
|
||||
unwrap_when(Args) ->
|
||||
case elixir_utils:split_last(Args) of
|
||||
{Start, {'when', Meta, [_, _] = End}} ->
|
||||
[{'when', Meta, Start ++ End}];
|
||||
{_, _} ->
|
||||
Args
|
||||
end.
|
||||
|
||||
to_block([One]) -> One;
|
||||
to_block(Other) -> {'__block__', [], reverse(Other)}.
|
||||
|
||||
%% Warnings and errors
|
||||
|
||||
throw(Meta, Error, Token) ->
|
||||
Line =
|
||||
case lists:keyfind(line, 1, Meta) of
|
||||
{line, L} -> L;
|
||||
false -> 0
|
||||
end,
|
||||
throw({error, {Line, ?MODULE, [Error, Token]}}).
|
||||
|
||||
throw_bad_atom(Token) ->
|
||||
throw(meta_from_token(Token), "atom cannot be followed by an alias. If the '.' was meant to be "
|
||||
"part of the atom's name, the atom name must be quoted. Syntax error before: ", "'.'").
|
||||
|
||||
throw_no_parens_strict(Token) ->
|
||||
throw(meta_from_token(Token), "unexpected parentheses. If you are making a "
|
||||
"function call, do not insert spaces between the function name and the "
|
||||
"opening parentheses. Syntax error before: ", "'('").
|
||||
|
||||
throw_no_parens_many_strict(Node) ->
|
||||
throw(?meta(Node),
|
||||
"unexpected comma. Parentheses are required to solve ambiguity in nested calls.\n\n"
|
||||
"This error happens when you have nested function calls without parentheses. "
|
||||
"For example:\n\n"
|
||||
" one a, two b, c, d\n\n"
|
||||
"In the example above, we don't know if the parameters \"c\" and \"d\" apply "
|
||||
"to the function \"one\" or \"two\". You can solve this by explicitly adding "
|
||||
"parentheses:\n\n"
|
||||
" one a, two(b, c, d)\n\n"
|
||||
"Elixir cannot compile otherwise. Syntax error before: ", "','").
|
||||
|
||||
throw_no_parens_container_strict(Node) ->
|
||||
throw(?meta(Node),
|
||||
"unexpected comma. Parentheses are required to solve ambiguity inside containers.\n\n"
|
||||
"This error may happen when you forget a comma in a list or other container:\n\n"
|
||||
" [a, b c, d]\n\n"
|
||||
"Or when you have ambiguous calls:\n\n"
|
||||
" [one, two three, four, five]\n\n"
|
||||
"In the example above, we don't know if the parameters \"four\" and \"five\" "
|
||||
"belongs to the list or the function \"two\". You can solve this by explicitly "
|
||||
"adding parentheses:\n\n"
|
||||
" [one, two(three, four), five]\n\n"
|
||||
"Elixir cannot compile otherwise. Syntax error before: ", "','").
|
||||
|
||||
throw_invalid_kw_identifier({_, _, do} = Token) ->
|
||||
throw(meta_from_token(Token), elixir_tokenizer:invalid_do_error("unexpected keyword \"do:\""), "'do:'");
|
||||
throw_invalid_kw_identifier({_, _, KW} = Token) ->
|
||||
throw(meta_from_token(Token), "syntax error before: ", "'" ++ atom_to_list(KW) ++ "':").
|
||||
|
||||
%% TODO: Make those warnings errors.
|
||||
warn_empty_stab_clause({stab_op, {Line, _Begin, _End}, '->'}) ->
|
||||
elixir_errors:warn(Line, ?file(),
|
||||
"an expression is always required on the right side of ->. "
|
||||
"Please provide a value after ->").
|
||||
|
||||
warn_pipe({arrow_op, {Line, _Begin, _End}, Op}, {_, [_|_], [_|_]}) ->
|
||||
elixir_errors:warn(Line, ?file(),
|
||||
io_lib:format(
|
||||
"you are piping into a function call without parentheses, which may be ambiguous. "
|
||||
"Please wrap the function you are piping into in parentheses. For example:\n\n"
|
||||
" foo 1 ~ts bar 2 ~ts baz 3\n\n"
|
||||
"Should be written as:\n\n"
|
||||
" foo(1) ~ts bar(2) ~ts baz(3)\n",
|
||||
[Op, Op, Op, Op]
|
||||
)
|
||||
);
|
||||
warn_pipe(_Token, _) ->
|
||||
ok.
|
||||
256
samples/Erlang/lfe_scan.xrl
Normal file
256
samples/Erlang/lfe_scan.xrl
Normal file
@@ -0,0 +1,256 @@
|
||||
%% Copyright (c) 2008-2013 Robert Virding
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
|
||||
%% File : lfe_scan.xrl
|
||||
%% Author : Robert Virding
|
||||
%% Purpose : Token definitions for Lisp Flavoured Erlang.
|
||||
|
||||
Definitions.
|
||||
B = [01]
|
||||
O = [0-7]
|
||||
D = [0-9]
|
||||
H = [0-9a-fA-F]
|
||||
B36 = [0-9a-zA-Z]
|
||||
U = [A-Z]
|
||||
L = [a-z]
|
||||
A = ({U}|{L})
|
||||
DEL = [][()}{";\000-\s]
|
||||
SYM = [^][()}{";\000-\s\177-\237]
|
||||
SSYM = [^][()}{"|;#`',\000-\s\177-\237]
|
||||
WS = ([\000-\s]|;[^\n]*)
|
||||
|
||||
Rules.
|
||||
%% Bracketed Comments using #| foo |#
|
||||
#{D}*\|[^\|]*\|+([^#\|][^\|]*\|+)*# :
|
||||
block_comment(string:substr(TokenChars, 3)).
|
||||
|
||||
%% Separators
|
||||
' : {token,{'\'',TokenLine}}.
|
||||
` : {token,{'`',TokenLine}}.
|
||||
, : {token,{',',TokenLine}}.
|
||||
,@ : {token,{',@',TokenLine}}.
|
||||
\. : {token,{'.',TokenLine}}.
|
||||
[][()}{] : {token,{list_to_atom(TokenChars),TokenLine}}.
|
||||
|
||||
#{D}*[bB]\( : {token,{'#B(',TokenLine}}.
|
||||
#{D}*[mM]\( : {token,{'#M(',TokenLine}}.
|
||||
#{D}*\( : {token,{'#(',TokenLine}}.
|
||||
#{D}*\. : {token,{'#.',TokenLine}}.
|
||||
|
||||
#{D}*` : {token,{'#`',TokenLine}}.
|
||||
#{D}*; : {token,{'#;',TokenLine}}.
|
||||
#{D}*, : {token,{'#,',TokenLine}}.
|
||||
#{D}*,@ : {token,{'#,@',TokenLine}}.
|
||||
|
||||
%% Characters
|
||||
#{D}*\\(x{H}+|.) : char_token(skip_past(TokenChars, $\\, $\\), TokenLine).
|
||||
|
||||
%% Based numbers
|
||||
#{D}*\*{SYM}+ : base_token(skip_past(TokenChars, $*, $*), 2, TokenLine).
|
||||
#{D}*[bB]{SYM}+ : base_token(skip_past(TokenChars, $b, $B), 2, TokenLine).
|
||||
#{D}*[oO]{SYM}+ : base_token(skip_past(TokenChars, $o, $O), 8, TokenLine).
|
||||
#{D}*[dD]{SYM}+ : base_token(skip_past(TokenChars, $d, $D), 10, TokenLine).
|
||||
#{D}*[xX]{SYM}+ : base_token(skip_past(TokenChars, $x, $X), 16, TokenLine).
|
||||
#{D}*[rR]{SYM}+ :
|
||||
%% Scan over digit chars to get base.
|
||||
{Base,[_|Ds]} = base1(tl(TokenChars), 10, 0),
|
||||
base_token(Ds, Base, TokenLine).
|
||||
|
||||
%% String
|
||||
"(\\x{H}+;|\\.|[^"\\])*" :
|
||||
%% Strip quotes.
|
||||
S = string:substr(TokenChars, 2, TokenLen - 2),
|
||||
{token,{string,TokenLine,chars(S)}}.
|
||||
%% Binary string
|
||||
#"(\\x{H}+;|\\.|[^"\\])*" :
|
||||
%% Strip quotes.
|
||||
S = string:substr(TokenChars, 3, TokenLen - 3),
|
||||
Bin = unicode:characters_to_binary(chars(S), utf8, utf8),
|
||||
{token,{binary,TokenLine,Bin}}.
|
||||
%% Symbols
|
||||
\|(\\x{H}+;|\\.|[^|\\])*\| :
|
||||
%% Strip quotes.
|
||||
S = string:substr(TokenChars, 2, TokenLen - 2),
|
||||
symbol_token(chars(S), TokenLine).
|
||||
%% Funs
|
||||
#'{SSYM}{SYM}*/{D}+ :
|
||||
%% Strip sharpsign single-quote.
|
||||
FunStr = string:substr(TokenChars,3),
|
||||
{token,{'#\'',TokenLine,FunStr}}.
|
||||
%% Atoms
|
||||
[+-]?{D}+ :
|
||||
case catch {ok,list_to_integer(TokenChars)} of
|
||||
{ok,I} -> {token,{number,TokenLine,I}};
|
||||
_ -> {error,"illegal integer"}
|
||||
end.
|
||||
[+-]?{D}+\.{D}+([eE][+-]?{D}+)? :
|
||||
case catch {ok,list_to_float(TokenChars)} of
|
||||
{ok,F} -> {token,{number,TokenLine,F}};
|
||||
_ -> {error,"illegal float"}
|
||||
end.
|
||||
{SSYM}{SYM}* :
|
||||
symbol_token(TokenChars, TokenLine).
|
||||
{WS}+ : skip_token.
|
||||
|
||||
Erlang code.
|
||||
%% Copyright (c) 2008-2013 Robert Virding
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
|
||||
%% File : lfe_scan.erl
|
||||
%% Author : Robert Virding
|
||||
%% Purpose : Token definitions for Lisp Flavoured Erlang.
|
||||
|
||||
-export([start_symbol_char/1,symbol_char/1]).
|
||||
|
||||
-import(string, [substr/2,substr/3]).
|
||||
|
||||
%% start_symbol_char(Char) -> true | false.
|
||||
%% symbol_char(Char) -> true | false.
|
||||
%% Define start symbol chars and symbol chars.
|
||||
|
||||
start_symbol_char($#) -> false;
|
||||
start_symbol_char($`) -> false;
|
||||
start_symbol_char($') -> false; %'
|
||||
start_symbol_char($,) -> false;
|
||||
start_symbol_char($|) -> false; %Symbol quote character
|
||||
start_symbol_char(C) -> symbol_char(C).
|
||||
|
||||
symbol_char($() -> false;
|
||||
symbol_char($)) -> false;
|
||||
symbol_char($[) -> false;
|
||||
symbol_char($]) -> false;
|
||||
symbol_char(${) -> false;
|
||||
symbol_char($}) -> false;
|
||||
symbol_char($") -> false;
|
||||
symbol_char($;) -> false;
|
||||
symbol_char(C) -> ((C > $\s) and (C =< $~)) orelse (C > $\240).
|
||||
|
||||
%% symbol_token(Chars, Line) -> {token,{symbol,Line,Symbol}} | {error,E}.
|
||||
%% Build a symbol from list of legal characters, else error.
|
||||
|
||||
symbol_token(Cs, L) ->
|
||||
case catch {ok,list_to_atom(Cs)} of
|
||||
{ok,S} -> {token,{symbol,L,S}};
|
||||
_ -> {error,"illegal symbol"}
|
||||
end.
|
||||
|
||||
%% base_token(Chars, Base, Line) -> Integer.
|
||||
%% Convert a string of Base characters into a number. We only allow
|
||||
%% base betqeen 2 and 36, and an optional sign character first.
|
||||
|
||||
base_token(_, B, _) when B < 2; B > 36 ->
|
||||
{error,"illegal number base"};
|
||||
base_token([$+|Cs], B, L) -> base_token(Cs, B, +1, L);
|
||||
base_token([$-|Cs], B, L) -> base_token(Cs, B, -1, L);
|
||||
base_token(Cs, B, L) -> base_token(Cs, B, +1, L).
|
||||
|
||||
base_token(Cs, B, S, L) ->
|
||||
case base1(Cs, B, 0) of
|
||||
{N,[]} -> {token,{number,L,S*N}};
|
||||
{_,_} -> {error,"illegal based number"}
|
||||
end.
|
||||
|
||||
base1([C|Cs], Base, SoFar) when C >= $0, C =< $9, C < Base + $0 ->
|
||||
Next = SoFar * Base + (C - $0),
|
||||
base1(Cs, Base, Next);
|
||||
base1([C|Cs], Base, SoFar) when C >= $a, C =< $z, C < Base + $a - 10 ->
|
||||
Next = SoFar * Base + (C - $a + 10),
|
||||
base1(Cs, Base, Next);
|
||||
base1([C|Cs], Base, SoFar) when C >= $A, C =< $Z, C < Base + $A - 10 ->
|
||||
Next = SoFar * Base + (C - $A + 10),
|
||||
base1(Cs, Base, Next);
|
||||
base1([C|Cs], _Base, SoFar) -> {SoFar,[C|Cs]};
|
||||
base1([], _Base, N) -> {N,[]}.
|
||||
|
||||
-define(IS_UNICODE(C), ((C >= 0) and (C =< 16#10FFFF))).
|
||||
|
||||
%% char_token(InputChars, Line) -> {token,{number,L,N}} | {error,E}.
|
||||
%% Convert an input string into the corresponding character. For a
|
||||
%% sequence of hex characters we check resultant is code is in the
|
||||
%% unicode range.
|
||||
|
||||
char_token([$x,C|Cs], L) ->
|
||||
case base1([C|Cs], 16, 0) of
|
||||
{N,[]} when ?IS_UNICODE(N) -> {token,{number,L,N}};
|
||||
_ -> {error,"illegal character"}
|
||||
end;
|
||||
char_token([C], L) -> {token,{number,L,C}}.
|
||||
|
||||
%% chars(InputChars) -> Chars.
|
||||
%% Convert an input string into the corresponding string characters.
|
||||
%% We know that the input string is correct.
|
||||
|
||||
chars([$\\,$x,C|Cs0]) ->
|
||||
case hex_char(C) of
|
||||
true ->
|
||||
case base1([C|Cs0], 16, 0) of
|
||||
{N,[$;|Cs1]} -> [N|chars(Cs1)];
|
||||
_Other -> [escape_char($x)|chars([C|Cs0])]
|
||||
end;
|
||||
false -> [escape_char($x)|chars([C|Cs0])]
|
||||
end;
|
||||
chars([$\\,C|Cs]) -> [escape_char(C)|chars(Cs)];
|
||||
chars([C|Cs]) -> [C|chars(Cs)];
|
||||
chars([]) -> [].
|
||||
|
||||
hex_char(C) when C >= $0, C =< $9 -> true;
|
||||
hex_char(C) when C >= $a, C =< $f -> true;
|
||||
hex_char(C) when C >= $A, C =< $F -> true;
|
||||
hex_char(_) -> false.
|
||||
|
||||
escape_char($b) -> $\b; %\b = BS
|
||||
escape_char($t) -> $\t; %\t = TAB
|
||||
escape_char($n) -> $\n; %\n = LF
|
||||
escape_char($v) -> $\v; %\v = VT
|
||||
escape_char($f) -> $\f; %\f = FF
|
||||
escape_char($r) -> $\r; %\r = CR
|
||||
escape_char($e) -> $\e; %\e = ESC
|
||||
escape_char($s) -> $\s; %\s = SPC
|
||||
escape_char($d) -> $\d; %\d = DEL
|
||||
escape_char(C) -> C.
|
||||
|
||||
%% Block Comment:
|
||||
%% Provide a sensible error when people attempt to include nested
|
||||
%% comments because currently the parser cannot process them without
|
||||
%% a rebuild. But simply exploding on a '#|' is not going to be that
|
||||
%% helpful.
|
||||
|
||||
block_comment(TokenChars) ->
|
||||
%% Check we're not opening another comment block.
|
||||
case string:str(TokenChars, "#|") of
|
||||
0 -> skip_token; %% No nesting found
|
||||
_ -> {error, "illegal nested block comment"}
|
||||
end.
|
||||
|
||||
%% skip_until(String, Char1, Char2) -> String.
|
||||
%% skip_past(String, Char1, Char2) -> String.
|
||||
|
||||
%% skip_until([C|_]=Cs, C1, C2) when C =:= C1 ; C =:= C2 -> Cs;
|
||||
%% skip_until([_|Cs], C1, C2) -> skip_until(Cs, C1, C2);
|
||||
%% skip_until([], _, _) -> [].
|
||||
|
||||
skip_past([C|Cs], C1, C2) when C =:= C1 ; C =:= C2 -> Cs;
|
||||
skip_past([_|Cs], C1, C2) -> skip_past(Cs, C1, C2);
|
||||
skip_past([], _, _) -> [].
|
||||
1
samples/Erlang/release
Normal file → Executable file
1
samples/Erlang/release
Normal file → Executable file
@@ -119,4 +119,3 @@ execute_overlay([{copy, In, Out} | Rest], Vars, BaseDir, TargetDir) ->
|
||||
|
||||
exit_code(ExitCode) ->
|
||||
erlang:halt(ExitCode, [{flush, true}]).
|
||||
|
||||
|
||||
104
samples/Erlang/single-context.es
Executable file
104
samples/Erlang/single-context.es
Executable file
@@ -0,0 +1,104 @@
|
||||
#! /usr/bin/env escript
|
||||
% This file is part of Emonk released under the MIT license.
|
||||
% See the LICENSE file for more information.
|
||||
|
||||
main([]) ->
|
||||
start(64, 1000);
|
||||
main([N]) ->
|
||||
start(list_to_integer(N), 1000);
|
||||
main([N, M]) ->
|
||||
start(list_to_integer(N), list_to_integer(M)).
|
||||
|
||||
|
||||
start(N, M) ->
|
||||
code:add_pathz("test"),
|
||||
code:add_pathz("ebin"),
|
||||
{ok, Ctx} = emonk:create_ctx(),
|
||||
{ok, undefined} = emonk:eval(Ctx, js()),
|
||||
run(Ctx, N, M),
|
||||
wait(N).
|
||||
|
||||
run(_, 0, _) ->
|
||||
ok;
|
||||
run(Ctx, N, M) ->
|
||||
Self = self(),
|
||||
Pid = spawn(fun() -> do_js(Self, Ctx, M) end),
|
||||
io:format("Spawned: ~p~n", [Pid]),
|
||||
run(Ctx, N-1, M).
|
||||
|
||||
wait(0) ->
|
||||
ok;
|
||||
wait(N) ->
|
||||
receive
|
||||
{finished, Pid} -> ok
|
||||
end,
|
||||
io:format("Finished: ~p~n", [Pid]),
|
||||
wait(N-1).
|
||||
|
||||
do_js(Parent, _, 0) ->
|
||||
Parent ! {finished, self()};
|
||||
do_js(Parent, Ctx, M) ->
|
||||
io:format("Running: ~p~n", [M]),
|
||||
Test = random_test(),
|
||||
{ok, [Resp]} = emonk:call(Ctx, <<"f">>, [Test]),
|
||||
Sorted = sort(Resp),
|
||||
true = Test == Sorted,
|
||||
do_js(Parent, Ctx, M-1).
|
||||
|
||||
js() ->
|
||||
<<"var f = function(x) {return [x];};">>.
|
||||
|
||||
random_test() ->
|
||||
Tests = [
|
||||
null,
|
||||
true,
|
||||
false,
|
||||
1,
|
||||
-1,
|
||||
3.1416,
|
||||
-3.1416,
|
||||
12.0e10,
|
||||
1.234E+10,
|
||||
-1.234E-10,
|
||||
10.0,
|
||||
123.456,
|
||||
10.0,
|
||||
<<"foo">>,
|
||||
<<"foo", 5, "bar">>,
|
||||
<<"">>,
|
||||
<<"\n\n\n">>,
|
||||
<<"\" \b\f\r\n\t\"">>,
|
||||
{[]},
|
||||
{[{<<"foo">>, <<"bar">>}]},
|
||||
{[{<<"foo">>, <<"bar">>}, {<<"baz">>, 123}]},
|
||||
[],
|
||||
[[]],
|
||||
[1, <<"foo">>],
|
||||
{[{<<"foo">>, [123]}]},
|
||||
{[{<<"foo">>, [1, 2, 3]}]},
|
||||
{[{<<"foo">>, {[{<<"bar">>, true}]}}]},
|
||||
{[
|
||||
{<<"foo">>, []},
|
||||
{<<"bar">>, {[{<<"baz">>, true}]}}, {<<"alice">>, <<"bob">>}
|
||||
]},
|
||||
[-123, <<"foo">>, {[{<<"bar">>, []}]}, null]
|
||||
],
|
||||
{_, [Test | _]} = lists:split(random:uniform(length(Tests)) - 1, Tests),
|
||||
sort(Test).
|
||||
|
||||
sort({Props}) ->
|
||||
objsort(Props, []);
|
||||
sort(List) when is_list(List) ->
|
||||
lstsort(List, []);
|
||||
sort(Other) ->
|
||||
Other.
|
||||
|
||||
objsort([], Acc) ->
|
||||
{lists:sort(Acc)};
|
||||
objsort([{K,V} | Rest], Acc) ->
|
||||
objsort(Rest, [{K, sort(V)} | Acc]).
|
||||
|
||||
lstsort([], Acc) ->
|
||||
lists:reverse(Acc);
|
||||
lstsort([Val | Rest], Acc) ->
|
||||
lstsort(Rest, [sort(Val) | Acc]).
|
||||
54
samples/FLUX/gameserver.fx
Normal file
54
samples/FLUX/gameserver.fx
Normal file
@@ -0,0 +1,54 @@
|
||||
typedef engine isEngineMessage;
|
||||
typedef turn isTurnMessage;
|
||||
typedef connect isConnectMessage;
|
||||
typedef disconnect isDisconnectMessage;
|
||||
|
||||
ClientMessage(char* data) => ();
|
||||
ParseMessage(char* data) => (int type, int client, char* data);
|
||||
ReadMessage(int type, int client, char* data) => ();
|
||||
|
||||
ParseEngine(int type, int client, char* data) => (int client, int direction);
|
||||
DoEngine(int client, int direction) => ();
|
||||
|
||||
ParseTurn(int type, int client, char* data) => (int client, int direction);
|
||||
DoTurn(int client, int direction) => ();
|
||||
|
||||
ParseConnect(int type, int client, char* data)
|
||||
=> (int client, char* host, int port);
|
||||
DoConnect(int client, char* host, int port) => ();
|
||||
|
||||
ParseDisconnect(int type, int client, char* data) => (int client);
|
||||
DoDisconnect(int client) => ();
|
||||
|
||||
UpdateBoard(ClientList clients) => (ClientList clients);
|
||||
SendData(ClientList clients) => ();
|
||||
|
||||
DoUpdate(ClientList clients) => ();
|
||||
|
||||
DataTimer() => (ClientList clients);
|
||||
|
||||
GetClients() => (ClientList clients);
|
||||
|
||||
Wait(ClientList clients) => (ClientList clients);
|
||||
|
||||
Listen () => (char* data);
|
||||
|
||||
source Listen => ClientMessage;
|
||||
source DataTimer => DoUpdate;
|
||||
|
||||
DataTimer = GetClients -> Wait;
|
||||
|
||||
DoUpdate = UpdateBoard -> SendData;
|
||||
|
||||
ClientMessage=ParseMessage -> ReadMessage;
|
||||
|
||||
ReadMessage:[engine, _, _] = ParseEngine -> DoEngine;
|
||||
ReadMessage:[turn, _, _] = ParseTurn -> DoTurn;
|
||||
ReadMessage:[connect, _, _] = ParseConnect -> DoConnect;
|
||||
ReadMessage:[disconnect, _, _] = ParseDisconnect -> DoDisconnect;
|
||||
|
||||
atomic GetClients:{client_lock};
|
||||
atomic DoConnect:{client_lock};
|
||||
atomic DoDisconnect:{client_lock};
|
||||
|
||||
|
||||
44
samples/FLUX/imageserver.fx
Normal file
44
samples/FLUX/imageserver.fx
Normal file
@@ -0,0 +1,44 @@
|
||||
typedef xml TestXML;
|
||||
typedef html TestHTML;
|
||||
|
||||
typedef inCache TestInCache;
|
||||
|
||||
Page (int socket) => ();
|
||||
|
||||
ReadRequest (int socket) => (int socket, bool close, image_tag *request);
|
||||
|
||||
CheckCache (int socket, bool close, image_tag *request)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
|
||||
Handler (int socket, bool close, image_tag *request)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
|
||||
Complete (int socket, bool close, image_tag *request) => ();
|
||||
|
||||
ReadInFromDisk (int socket, bool close, image_tag *request)
|
||||
=> (int socket, bool close, image_tag *request, __u8 *rgb_data);
|
||||
|
||||
Write (int socket, bool close, image_tag *request)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
|
||||
Compress(int socket, bool close, image_tag *request, __u8 *rgb_data)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
|
||||
StoreInCache(int socket, bool close, image_tag *request)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
|
||||
Listen ()
|
||||
=> (int socket);
|
||||
|
||||
source Listen => Page;
|
||||
|
||||
Handler:[_, _, inCache]=;
|
||||
Handler:[_, _, _]=ReadInFromDisk -> Compress -> StoreInCache;
|
||||
|
||||
Page = ReadRequest -> CheckCache-> Handler -> Write -> Complete;
|
||||
|
||||
atomic CheckCache:{cache};
|
||||
atomic StoreInCache:{cache};
|
||||
atomic Complete:{cache};
|
||||
|
||||
handle error ReadInFromDisk => FourOhFor;
|
||||
151
samples/FLUX/mbittorrent.fx
Normal file
151
samples/FLUX/mbittorrent.fx
Normal file
@@ -0,0 +1,151 @@
|
||||
typedef choke TestChoke;
|
||||
typedef unchoke TestUnchoke;
|
||||
typedef interested TestInterested;
|
||||
typedef uninterested TestUninterested;
|
||||
typedef request TestRequest;
|
||||
typedef cancel TestCancel;
|
||||
typedef piece TestPiece;
|
||||
typedef bitfield TestBitfield;
|
||||
typedef have TestHave;
|
||||
typedef piececomplete TestPieceComplete;
|
||||
|
||||
CheckinWithTracker (torrent_data_t *tdata)
|
||||
=> ();
|
||||
|
||||
SendRequestToTracker (torrent_data_t *tdata)
|
||||
=> (torrent_data_t *tdata, int socket);
|
||||
|
||||
GetTrackerResponse (torrent_data_t *tdata, int socket)
|
||||
=> ();
|
||||
|
||||
UpdateChokeList (torrent_data_t *tdata)
|
||||
=> ();
|
||||
|
||||
PickChoked (torrent_data_t *tdata)
|
||||
=> (torrent_data_t *tdata, chokelist_t clist);
|
||||
|
||||
SendChokeUnchoke (torrent_data_t *tdata, chokelist_t clist)
|
||||
=> ();
|
||||
|
||||
SetupConnection (torrent_data_t *tdata, int socket)
|
||||
=> ();
|
||||
|
||||
Handshake (torrent_data_t *tdata, int socket)
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
SendBitfield (torrent_data_t *tdata, client_data_t *client)
|
||||
=> ();
|
||||
|
||||
Message (torrent_data_t *tdata, client_data_t *client)
|
||||
=> ();
|
||||
|
||||
ReadMessage (torrent_data_t *tdata, client_data_t *client)
|
||||
=> (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload);
|
||||
|
||||
HandleMessage (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (client_data_t *client);
|
||||
|
||||
MessageDone (client_data_t *client)
|
||||
=> ();
|
||||
|
||||
CompletePiece (torrent_data_t *tdata, client_data_t *client, int piece)
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
VerifyPiece (torrent_data_t *tdata, client_data_t *client, int piece)
|
||||
=> (torrent_data_t *tdata, client_data_t *client, int piece);
|
||||
|
||||
SendHave (torrent_data_t *tdata, client_data_t *client, int piece)
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
SendUninterested (torrent_data_t *tdata, client_data_t *client)
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
Choke (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (client_data_t *client);
|
||||
|
||||
Cancel (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (client_data_t *client);
|
||||
|
||||
Interested (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (client_data_t *client);
|
||||
|
||||
Uninterested (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (client_data_t *client);
|
||||
|
||||
Bitfield (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (client_data_t *client);
|
||||
|
||||
Unchoke (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
SendRequest (torrent_data_t *tdata, client_data_t *client)
|
||||
=> (client_data_t *client);
|
||||
|
||||
Have (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
Piece (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (torrent_data_t *tdata, client_data_t *client, int piece);
|
||||
|
||||
Request (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (client_data_t *client);
|
||||
|
||||
SendKeepAlives (torrent_data_t *tdata)
|
||||
=> ();
|
||||
|
||||
GetClients ()
|
||||
=> (int maxfd, fd_set *fds);
|
||||
|
||||
SelectSockets (int maxfd, fd_set *fds)
|
||||
=> (fd_set *fds);
|
||||
|
||||
CheckSockets (fd_set *fds)
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
TrackerTimer ()
|
||||
=> (torrent_data_t *tdata);
|
||||
|
||||
ChokeTimer ()
|
||||
=> (torrent_data_t *tdata);
|
||||
|
||||
Connect ()
|
||||
=> (torrent_data_t *tdata, int socket);
|
||||
|
||||
KeepAliveTimer ()
|
||||
=> (torrent_data_t *tdata);
|
||||
|
||||
Listen ()
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
source TrackerTimer => CheckinWithTracker;
|
||||
source ChokeTimer => UpdateChokeList;
|
||||
source Connect => SetupConnection;
|
||||
source Listen => Message;
|
||||
source KeepAliveTimer => SendKeepAlives;
|
||||
|
||||
Listen = GetClients -> SelectSockets -> CheckSockets;
|
||||
CheckinWithTracker = SendRequestToTracker -> GetTrackerResponse;
|
||||
UpdateChokeList = PickChoked -> SendChokeUnchoke;
|
||||
SetupConnection = Handshake -> SendBitfield;
|
||||
Message = ReadMessage -> HandleMessage -> MessageDone;
|
||||
|
||||
CompletePiece:[_, _, piececomplete] = VerifyPiece -> SendHave -> SendUninterested;
|
||||
|
||||
HandleMessage:[_, _, choke, _, _] = Choke;
|
||||
HandleMessage:[_, _, unchoke, _, _] = Unchoke -> SendRequest;
|
||||
HandleMessage:[_, _, interested, _, _] = Interested;
|
||||
|
||||
HandleMessage:[_, _, uninterested, _, _] = Uninterested;
|
||||
HandleMessage:[_, _, request, _, _] = Request;
|
||||
HandleMessage:[_, _, cancel, _, _] = Cancel;
|
||||
HandleMessage:[_, _, piece, _, _] = Piece -> CompletePiece -> SendRequest;
|
||||
HandleMessage:[_, _, bitfield, _, _] = Bitfield;
|
||||
HandleMessage:[_, _, have, _, _] = Have -> SendRequest;
|
||||
|
||||
atomic GetClients:{BigLock};
|
||||
atomic CheckSockets:{BigLock};
|
||||
atomic Message:{BigLock};
|
||||
atomic CheckinWithTracker:{BigLock};
|
||||
atomic UpdateChokeList:{BigLock};
|
||||
atomic SetupConnection:{BigLock};
|
||||
atomic SendKeepAlives:{BigLock};
|
||||
38
samples/FLUX/test.fx
Normal file
38
samples/FLUX/test.fx
Normal file
@@ -0,0 +1,38 @@
|
||||
// concrete node signatures
|
||||
Listen ()
|
||||
=> (int socket);
|
||||
|
||||
ReadRequest (int socket)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
|
||||
CheckCache (int socket, bool close, image_tag *request)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
|
||||
// omitted for space:
|
||||
// ReadInFromDisk, StoreInCache
|
||||
Compress (int socket, bool close, image_tag *request, __u8 *rgb_data)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
Write (int socket, bool close, image_tag *request)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
Complete (int socket, bool close, image_tag *request) => ();
|
||||
|
||||
// source node
|
||||
source Listen => Image;
|
||||
|
||||
// abstract node
|
||||
Image = ReadRequest -> CheckCache -> Handler -> Write -> Complete;
|
||||
|
||||
// predicate type & dispatch
|
||||
typedef hit TestInCache;
|
||||
Handler:[_, _, hit] = ;
|
||||
Handler:[_, _, _] =
|
||||
ReadInFromDisk -> Compress -> StoreInCache;
|
||||
|
||||
// error handler
|
||||
handle error ReadInFromDisk => FourOhFor;
|
||||
|
||||
// atomicity constraints
|
||||
atomic CheckCache:{cache};
|
||||
atomic StoreInCache:{cache};
|
||||
atomic Complete:{cache};
|
||||
|
||||
3608
samples/Formatted/NiAlH_jea.eam.fs
Normal file
3608
samples/Formatted/NiAlH_jea.eam.fs
Normal file
File diff suppressed because it is too large
Load Diff
31
samples/FreeMarker/example.ftl
Normal file
31
samples/FreeMarker/example.ftl
Normal file
@@ -0,0 +1,31 @@
|
||||
<#import "layout.ftl" as layout>
|
||||
|
||||
<#assign results = [
|
||||
{
|
||||
"title": "Example Result",
|
||||
"description": "Lorem ipsum dolor sit amet, pede id pellentesque, sollicitudin turpis sed in sed sed, libero dictum."
|
||||
}
|
||||
] />
|
||||
|
||||
<@layout.page title="FreeMarker Example">
|
||||
<#if results?size == 0>
|
||||
There were no results.
|
||||
<#else>
|
||||
<ul>
|
||||
<#list results as result>
|
||||
<li>
|
||||
<strong>${result.title}</strong>
|
||||
<p>${result.description}</p>
|
||||
</li>
|
||||
</#list>
|
||||
</ul>
|
||||
</#if>
|
||||
|
||||
<#-- This is a FreeMarker comment -->
|
||||
<@currentTime />
|
||||
</@layout.page>
|
||||
|
||||
|
||||
<#macro currentTime>
|
||||
${.now?string.full}
|
||||
</#macro>
|
||||
32
samples/FreeMarker/layout.ftl
Normal file
32
samples/FreeMarker/layout.ftl
Normal file
@@ -0,0 +1,32 @@
|
||||
<#ftl strip_text=true />
|
||||
|
||||
<#macro page title>
|
||||
<!doctype html>
|
||||
<html lang="${.lang}">
|
||||
<head>
|
||||
<title>${title}</title>
|
||||
<@metaTags />
|
||||
</head>
|
||||
<body>
|
||||
<#nested />
|
||||
<@footer />
|
||||
</body>
|
||||
</html>
|
||||
</#macro>
|
||||
|
||||
|
||||
<#---
|
||||
Default meta tags
|
||||
-->
|
||||
<#macro metaTags>
|
||||
<#compress>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width,initial-scale=1">
|
||||
<meta name="format-detection" content="telephone=no">
|
||||
</#compress>
|
||||
</#macro>
|
||||
|
||||
<#macro footer>
|
||||
<p>This page is using FreeMarker v${.version}</p>
|
||||
</#macro>
|
||||
25912
samples/G-code/lm.g
25912
samples/G-code/lm.g
File diff suppressed because it is too large
Load Diff
29735
samples/G-code/rm.g
29735
samples/G-code/rm.g
File diff suppressed because it is too large
Load Diff
91
samples/GAS/hello.ms
Normal file
91
samples/GAS/hello.ms
Normal file
@@ -0,0 +1,91 @@
|
||||
# output(): Hello, world.\n
|
||||
# mach(): all
|
||||
|
||||
# Emit hello world while switching back and forth between arm/thumb.
|
||||
# ??? Unfinished
|
||||
|
||||
.macro invalid
|
||||
# This is "undefined" but it's not properly decoded yet.
|
||||
.word 0x07ffffff
|
||||
# This is stc which isn't recognized yet.
|
||||
stc 0,cr0,[r0]
|
||||
.endm
|
||||
|
||||
.global _start
|
||||
_start:
|
||||
# Run some simple insns to confirm the engine is at least working.
|
||||
nop
|
||||
|
||||
# Skip over output text.
|
||||
|
||||
bl skip_output
|
||||
|
||||
hello_text:
|
||||
.asciz "Hello, world.\n"
|
||||
|
||||
.p2align 2
|
||||
skip_output:
|
||||
|
||||
# Prime loop.
|
||||
|
||||
mov r4, r14
|
||||
|
||||
output_next:
|
||||
|
||||
# Switch arm->thumb to output next chacter.
|
||||
# At this point r4 must point to the next character to output.
|
||||
|
||||
adr r0, into_thumb + 1
|
||||
bx r0
|
||||
|
||||
into_thumb:
|
||||
.thumb
|
||||
|
||||
# Output a character.
|
||||
|
||||
mov r0,#3 @ writec angel call
|
||||
mov r1,r4
|
||||
swi 0xab @ ??? Confirm number.
|
||||
|
||||
# Switch thumb->arm.
|
||||
|
||||
adr r5, back_to_arm
|
||||
bx r5
|
||||
|
||||
.p2align 2
|
||||
back_to_arm:
|
||||
.arm
|
||||
|
||||
# Load next character, see if done.
|
||||
|
||||
add r4,r4,#1
|
||||
sub r3,r3,r3
|
||||
ldrb r5,[r4,r3]
|
||||
teq r5,#0
|
||||
beq done
|
||||
|
||||
# Output a character (in arm mode).
|
||||
|
||||
mov r0,#3
|
||||
mov r1,r4
|
||||
swi #0x123456
|
||||
|
||||
# Load next character, see if done.
|
||||
|
||||
add r4,r4,#1
|
||||
sub r3,r3,r3
|
||||
ldrb r5,[r4,r3]
|
||||
teq r5,#0
|
||||
bne output_next
|
||||
|
||||
done:
|
||||
mov r0,#0x18
|
||||
ldr r1,exit_code
|
||||
swi #0x123456
|
||||
|
||||
# If that fails, try to die with an invalid insn.
|
||||
|
||||
invalid
|
||||
|
||||
exit_code:
|
||||
.word 0x20026
|
||||
852
samples/Go/embedded.go
Normal file
852
samples/Go/embedded.go
Normal file
File diff suppressed because one or more lines are too long
18
samples/Go/gen-go-linguist-thrift.go
Normal file
18
samples/Go/gen-go-linguist-thrift.go
Normal file
@@ -0,0 +1,18 @@
|
||||
// Autogenerated by Thrift Compiler (1.0.0-dev)
|
||||
// DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
|
||||
package linguist
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"git.apache.org/thrift.git/lib/go/thrift"
|
||||
)
|
||||
|
||||
// (needed to ensure safety because of naive import list construction.)
|
||||
var _ = thrift.ZERO
|
||||
var _ = fmt.Printf
|
||||
var _ = bytes.Equal
|
||||
|
||||
func init() {
|
||||
}
|
||||
275
samples/Groff/Tcl.n
Normal file
275
samples/Groff/Tcl.n
Normal file
@@ -0,0 +1,275 @@
|
||||
'\"
|
||||
'\" Copyright (c) 1993 The Regents of the University of California.
|
||||
'\" Copyright (c) 1994-1996 Sun Microsystems, Inc.
|
||||
'\"
|
||||
'\" See the file "license.terms" for information on usage and redistribution
|
||||
'\" of this file, and for a DISCLAIMER OF ALL WARRANTIES.
|
||||
'\"
|
||||
.TH Tcl n "8.6" Tcl "Tcl Built-In Commands"
|
||||
.so man.macros
|
||||
.BS
|
||||
.SH NAME
|
||||
Tcl \- Tool Command Language
|
||||
.SH SYNOPSIS
|
||||
Summary of Tcl language syntax.
|
||||
.BE
|
||||
.SH DESCRIPTION
|
||||
.PP
|
||||
The following rules define the syntax and semantics of the Tcl language:
|
||||
.IP "[1] \fBCommands.\fR"
|
||||
A Tcl script is a string containing one or more commands.
|
||||
Semi-colons and newlines are command separators unless quoted as
|
||||
described below.
|
||||
Close brackets are command terminators during command substitution
|
||||
(see below) unless quoted.
|
||||
.IP "[2] \fBEvaluation.\fR"
|
||||
A command is evaluated in two steps.
|
||||
First, the Tcl interpreter breaks the command into \fIwords\fR
|
||||
and performs substitutions as described below.
|
||||
These substitutions are performed in the same way for all
|
||||
commands.
|
||||
Secondly, the first word is used to locate a command procedure to
|
||||
carry out the command, then all of the words of the command are
|
||||
passed to the command procedure.
|
||||
The command procedure is free to interpret each of its words
|
||||
in any way it likes, such as an integer, variable name, list,
|
||||
or Tcl script.
|
||||
Different commands interpret their words differently.
|
||||
.IP "[3] \fBWords.\fR"
|
||||
Words of a command are separated by white space (except for
|
||||
newlines, which are command separators).
|
||||
.IP "[4] \fBDouble quotes.\fR"
|
||||
If the first character of a word is double-quote
|
||||
.PQ \N'34'
|
||||
then the word is terminated by the next double-quote character.
|
||||
If semi-colons, close brackets, or white space characters
|
||||
(including newlines) appear between the quotes then they are treated
|
||||
as ordinary characters and included in the word.
|
||||
Command substitution, variable substitution, and backslash substitution
|
||||
are performed on the characters between the quotes as described below.
|
||||
The double-quotes are not retained as part of the word.
|
||||
.IP "[5] \fBArgument expansion.\fR"
|
||||
If a word starts with the string
|
||||
.QW {*}
|
||||
followed by a non-whitespace character, then the leading
|
||||
.QW {*}
|
||||
is removed and the rest of the word is parsed and substituted as any other
|
||||
word. After substitution, the word is parsed as a list (without command or
|
||||
variable substitutions; backslash substitutions are performed as is normal for
|
||||
a list and individual internal words may be surrounded by either braces or
|
||||
double-quote characters), and its words are added to the command being
|
||||
substituted. For instance,
|
||||
.QW "cmd a {*}{b [c]} d {*}{$e f {g h}}"
|
||||
is equivalent to
|
||||
.QW "cmd a b {[c]} d {$e} f {g h}" .
|
||||
.IP "[6] \fBBraces.\fR"
|
||||
If the first character of a word is an open brace
|
||||
.PQ {
|
||||
and rule [5] does not apply, then
|
||||
the word is terminated by the matching close brace
|
||||
.PQ } "" .
|
||||
Braces nest within the word: for each additional open
|
||||
brace there must be an additional close brace (however,
|
||||
if an open brace or close brace within the word is
|
||||
quoted with a backslash then it is not counted in locating the
|
||||
matching close brace).
|
||||
No substitutions are performed on the characters between the
|
||||
braces except for backslash-newline substitutions described
|
||||
below, nor do semi-colons, newlines, close brackets,
|
||||
or white space receive any special interpretation.
|
||||
The word will consist of exactly the characters between the
|
||||
outer braces, not including the braces themselves.
|
||||
.IP "[7] \fBCommand substitution.\fR"
|
||||
If a word contains an open bracket
|
||||
.PQ [
|
||||
then Tcl performs \fIcommand substitution\fR.
|
||||
To do this it invokes the Tcl interpreter recursively to process
|
||||
the characters following the open bracket as a Tcl script.
|
||||
The script may contain any number of commands and must be terminated
|
||||
by a close bracket
|
||||
.PQ ] "" .
|
||||
The result of the script (i.e. the result of its last command) is
|
||||
substituted into the word in place of the brackets and all of the
|
||||
characters between them.
|
||||
There may be any number of command substitutions in a single word.
|
||||
Command substitution is not performed on words enclosed in braces.
|
||||
.IP "[8] \fBVariable substitution.\fR"
|
||||
If a word contains a dollar-sign
|
||||
.PQ $
|
||||
followed by one of the forms
|
||||
described below, then Tcl performs \fIvariable
|
||||
substitution\fR: the dollar-sign and the following characters are
|
||||
replaced in the word by the value of a variable.
|
||||
Variable substitution may take any of the following forms:
|
||||
.RS
|
||||
.TP 15
|
||||
\fB$\fIname\fR
|
||||
.
|
||||
\fIName\fR is the name of a scalar variable; the name is a sequence
|
||||
of one or more characters that are a letter, digit, underscore,
|
||||
or namespace separators (two or more colons).
|
||||
Letters and digits are \fIonly\fR the standard ASCII ones (\fB0\fR\(en\fB9\fR,
|
||||
\fBA\fR\(en\fBZ\fR and \fBa\fR\(en\fBz\fR).
|
||||
.TP 15
|
||||
\fB$\fIname\fB(\fIindex\fB)\fR
|
||||
.
|
||||
\fIName\fR gives the name of an array variable and \fIindex\fR gives
|
||||
the name of an element within that array.
|
||||
\fIName\fR must contain only letters, digits, underscores, and
|
||||
namespace separators, and may be an empty string.
|
||||
Letters and digits are \fIonly\fR the standard ASCII ones (\fB0\fR\(en\fB9\fR,
|
||||
\fBA\fR\(en\fBZ\fR and \fBa\fR\(en\fBz\fR).
|
||||
Command substitutions, variable substitutions, and backslash
|
||||
substitutions are performed on the characters of \fIindex\fR.
|
||||
.TP 15
|
||||
\fB${\fIname\fB}\fR
|
||||
.
|
||||
\fIName\fR is the name of a scalar variable or array element. It may contain
|
||||
any characters whatsoever except for close braces. It indicates an array
|
||||
element if \fIname\fR is in the form
|
||||
.QW \fIarrayName\fB(\fIindex\fB)\fR
|
||||
where \fIarrayName\fR does not contain any open parenthesis characters,
|
||||
.QW \fB(\fR ,
|
||||
or close brace characters,
|
||||
.QW \fB}\fR ,
|
||||
and \fIindex\fR can be any sequence of characters except for close brace
|
||||
characters. No further
|
||||
substitutions are performed during the parsing of \fIname\fR.
|
||||
.PP
|
||||
There may be any number of variable substitutions in a single word.
|
||||
Variable substitution is not performed on words enclosed in braces.
|
||||
.PP
|
||||
Note that variables may contain character sequences other than those listed
|
||||
above, but in that case other mechanisms must be used to access them (e.g.,
|
||||
via the \fBset\fR command's single-argument form).
|
||||
.RE
|
||||
.IP "[9] \fBBackslash substitution.\fR"
|
||||
If a backslash
|
||||
.PQ \e
|
||||
appears within a word then \fIbackslash substitution\fR occurs.
|
||||
In all cases but those described below the backslash is dropped and
|
||||
the following character is treated as an ordinary
|
||||
character and included in the word.
|
||||
This allows characters such as double quotes, close brackets,
|
||||
and dollar signs to be included in words without triggering
|
||||
special processing.
|
||||
The following table lists the backslash sequences that are
|
||||
handled specially, along with the value that replaces each sequence.
|
||||
.RS
|
||||
.TP 7
|
||||
\e\fBa\fR
|
||||
Audible alert (bell) (Unicode U+000007).
|
||||
.TP 7
|
||||
\e\fBb\fR
|
||||
Backspace (Unicode U+000008).
|
||||
.TP 7
|
||||
\e\fBf\fR
|
||||
Form feed (Unicode U+00000C).
|
||||
.TP 7
|
||||
\e\fBn\fR
|
||||
Newline (Unicode U+00000A).
|
||||
.TP 7
|
||||
\e\fBr\fR
|
||||
Carriage-return (Unicode U+00000D).
|
||||
.TP 7
|
||||
\e\fBt\fR
|
||||
Tab (Unicode U+000009).
|
||||
.TP 7
|
||||
\e\fBv\fR
|
||||
Vertical tab (Unicode U+00000B).
|
||||
.TP 7
|
||||
\e\fB<newline>\fIwhiteSpace\fR
|
||||
.
|
||||
A single space character replaces the backslash, newline, and all spaces
|
||||
and tabs after the newline. This backslash sequence is unique in that it
|
||||
is replaced in a separate pre-pass before the command is actually parsed.
|
||||
This means that it will be replaced even when it occurs between braces,
|
||||
and the resulting space will be treated as a word separator if it is not
|
||||
in braces or quotes.
|
||||
.TP 7
|
||||
\e\e
|
||||
Backslash
|
||||
.PQ \e "" .
|
||||
.TP 7
|
||||
\e\fIooo\fR
|
||||
.
|
||||
The digits \fIooo\fR (one, two, or three of them) give a eight-bit octal
|
||||
value for the Unicode character that will be inserted, in the range
|
||||
\fI000\fR\(en\fI377\fR (i.e., the range U+000000\(enU+0000FF).
|
||||
The parser will stop just before this range overflows, or when
|
||||
the maximum of three digits is reached. The upper bits of the Unicode
|
||||
character will be 0.
|
||||
.TP 7
|
||||
\e\fBx\fIhh\fR
|
||||
.
|
||||
The hexadecimal digits \fIhh\fR (one or two of them) give an eight-bit
|
||||
hexadecimal value for the Unicode character that will be inserted. The upper
|
||||
bits of the Unicode character will be 0 (i.e., the character will be in the
|
||||
range U+000000\(enU+0000FF).
|
||||
.TP 7
|
||||
\e\fBu\fIhhhh\fR
|
||||
.
|
||||
The hexadecimal digits \fIhhhh\fR (one, two, three, or four of them) give a
|
||||
sixteen-bit hexadecimal value for the Unicode character that will be
|
||||
inserted. The upper bits of the Unicode character will be 0 (i.e., the
|
||||
character will be in the range U+000000\(enU+00FFFF).
|
||||
.TP 7
|
||||
\e\fBU\fIhhhhhhhh\fR
|
||||
.
|
||||
The hexadecimal digits \fIhhhhhhhh\fR (one up to eight of them) give a
|
||||
twenty-one-bit hexadecimal value for the Unicode character that will be
|
||||
inserted, in the range U+000000\(enU+10FFFF. The parser will stop just
|
||||
before this range overflows, or when the maximum of eight digits
|
||||
is reached. The upper bits of the Unicode character will be 0.
|
||||
.RS
|
||||
.PP
|
||||
The range U+010000\(enU+10FFFD is reserved for the future.
|
||||
.RE
|
||||
.PP
|
||||
Backslash substitution is not performed on words enclosed in braces,
|
||||
except for backslash-newline as described above.
|
||||
.RE
|
||||
.IP "[10] \fBComments.\fR"
|
||||
If a hash character
|
||||
.PQ #
|
||||
appears at a point where Tcl is
|
||||
expecting the first character of the first word of a command,
|
||||
then the hash character and the characters that follow it, up
|
||||
through the next newline, are treated as a comment and ignored.
|
||||
The comment character only has significance when it appears
|
||||
at the beginning of a command.
|
||||
.IP "[11] \fBOrder of substitution.\fR"
|
||||
Each character is processed exactly once by the Tcl interpreter
|
||||
as part of creating the words of a command.
|
||||
For example, if variable substitution occurs then no further
|
||||
substitutions are performed on the value of the variable; the
|
||||
value is inserted into the word verbatim.
|
||||
If command substitution occurs then the nested command is
|
||||
processed entirely by the recursive call to the Tcl interpreter;
|
||||
no substitutions are performed before making the recursive
|
||||
call and no additional substitutions are performed on the result
|
||||
of the nested script.
|
||||
.RS
|
||||
.PP
|
||||
Substitutions take place from left to right, and each substitution is
|
||||
evaluated completely before attempting to evaluate the next. Thus, a
|
||||
sequence like
|
||||
.PP
|
||||
.CS
|
||||
set y [set x 0][incr x][incr x]
|
||||
.CE
|
||||
.PP
|
||||
will always set the variable \fIy\fR to the value, \fI012\fR.
|
||||
.RE
|
||||
.IP "[12] \fBSubstitution and word boundaries.\fR"
|
||||
Substitutions do not affect the word boundaries of a command,
|
||||
except for argument expansion as specified in rule [5].
|
||||
For example, during variable substitution the entire value of
|
||||
the variable becomes part of a single word, even if the variable's
|
||||
value contains spaces.
|
||||
.SH KEYWORDS
|
||||
backslash, command, comment, script, substitution, variable
|
||||
'\" Local Variables:
|
||||
'\" mode: nroff
|
||||
'\" fill-column: 78
|
||||
'\" End:
|
||||
135
samples/Groff/create_view.l
Normal file
135
samples/Groff/create_view.l
Normal file
@@ -0,0 +1,135 @@
|
||||
.\\" auto-generated by docbook2man-spec $Revision: 1.1.1.1 $
|
||||
.TH "CREATE VIEW" "" "2005-11-05" "SQL - Language Statements" "SQL Commands"
|
||||
.SH NAME
|
||||
CREATE VIEW \- define a new view
|
||||
|
||||
.SH SYNOPSIS
|
||||
.sp
|
||||
.nf
|
||||
CREATE [ OR REPLACE ] [ TEMP | TEMPORARY ] VIEW \fIname\fR [ ( \fIcolumn_name\fR [, ...] ) ]
|
||||
AS \fIquery\fR
|
||||
.sp
|
||||
.fi
|
||||
.SH "DESCRIPTION"
|
||||
.PP
|
||||
\fBCREATE VIEW\fR defines a view of a query. The view
|
||||
is not physically materialized. Instead, the query is run every time
|
||||
the view is referenced in a query.
|
||||
.PP
|
||||
\fBCREATE OR REPLACE VIEW\fR is similar, but if a view
|
||||
of the same name already exists, it is replaced. You can only replace
|
||||
a view with a new query that generates the identical set of columns
|
||||
(i.e., same column names and data types).
|
||||
.PP
|
||||
If a schema name is given (for example, CREATE VIEW
|
||||
myschema.myview ...) then the view is created in the specified
|
||||
schema. Otherwise it is created in the current schema. Temporary
|
||||
views exist in a special schema, so a schema name may not be given
|
||||
when creating a temporary view. The name of the view must be
|
||||
distinct from the name of any other view, table, sequence, or index
|
||||
in the same schema.
|
||||
.SH "PARAMETERS"
|
||||
.TP
|
||||
\fBTEMPORARY or TEMP\fR
|
||||
If specified, the view is created as a temporary view.
|
||||
Temporary views are automatically dropped at the end of the
|
||||
current session. Existing
|
||||
permanent relations with the same name are not visible to the
|
||||
current session while the temporary view exists, unless they are
|
||||
referenced with schema-qualified names.
|
||||
|
||||
If any of the tables referenced by the view are temporary,
|
||||
the view is created as a temporary view (whether
|
||||
TEMPORARY is specified or not).
|
||||
.TP
|
||||
\fB\fIname\fB\fR
|
||||
The name (optionally schema-qualified) of a view to be created.
|
||||
.TP
|
||||
\fB\fIcolumn_name\fB\fR
|
||||
An optional list of names to be used for columns of the view.
|
||||
If not given, the column names are deduced from the query.
|
||||
.TP
|
||||
\fB\fIquery\fB\fR
|
||||
A query (that is, a \fBSELECT\fR statement) which will
|
||||
provide the columns and rows of the view.
|
||||
|
||||
Refer to SELECT [\fBselect\fR(l)]
|
||||
for more information about valid queries.
|
||||
.SH "NOTES"
|
||||
.PP
|
||||
Currently, views are read only: the system will not allow an insert,
|
||||
update, or delete on a view. You can get the effect of an updatable
|
||||
view by creating rules that rewrite inserts, etc. on the view into
|
||||
appropriate actions on other tables. For more information see
|
||||
CREATE RULE [\fBcreate_rule\fR(l)].
|
||||
.PP
|
||||
Use the DROP VIEW [\fBdrop_view\fR(l)]
|
||||
statement to drop views.
|
||||
.PP
|
||||
Be careful that the names and types of the view's columns will be
|
||||
assigned the way you want. For example,
|
||||
.sp
|
||||
.nf
|
||||
CREATE VIEW vista AS SELECT 'Hello World';
|
||||
.sp
|
||||
.fi
|
||||
is bad form in two ways: the column name defaults to ?column?,
|
||||
and the column data type defaults to \fBunknown\fR. If you want a
|
||||
string literal in a view's result, use something like
|
||||
.sp
|
||||
.nf
|
||||
CREATE VIEW vista AS SELECT text 'Hello World' AS hello;
|
||||
.sp
|
||||
.fi
|
||||
.PP
|
||||
Access to tables referenced in the view is determined by permissions of
|
||||
the view owner. However, functions called in the view are treated the
|
||||
same as if they had been called directly from the query using the view.
|
||||
Therefore the user of a view must have permissions to call all functions
|
||||
used by the view.
|
||||
.SH "EXAMPLES"
|
||||
.PP
|
||||
Create a view consisting of all comedy films:
|
||||
.sp
|
||||
.nf
|
||||
CREATE VIEW comedies AS
|
||||
SELECT *
|
||||
FROM films
|
||||
WHERE kind = 'Comedy';
|
||||
.sp
|
||||
.fi
|
||||
.SH "COMPATIBILITY"
|
||||
.PP
|
||||
The SQL standard specifies some additional capabilities for the
|
||||
\fBCREATE VIEW\fR statement:
|
||||
.sp
|
||||
.nf
|
||||
CREATE VIEW \fIname\fR [ ( \fIcolumn_name\fR [, ...] ) ]
|
||||
AS \fIquery\fR
|
||||
[ WITH [ CASCADED | LOCAL ] CHECK OPTION ]
|
||||
.sp
|
||||
.fi
|
||||
.PP
|
||||
The optional clauses for the full SQL command are:
|
||||
.TP
|
||||
\fBCHECK OPTION\fR
|
||||
This option has to do with updatable views. All
|
||||
\fBINSERT\fR and \fBUPDATE\fR commands on the view
|
||||
will be checked to ensure data satisfy the view-defining
|
||||
condition (that is, the new data would be visible through the
|
||||
view). If they do not, the update will be rejected.
|
||||
.TP
|
||||
\fBLOCAL\fR
|
||||
Check for integrity on this view.
|
||||
.TP
|
||||
\fBCASCADED\fR
|
||||
Check for integrity on this view and on any dependent
|
||||
view. CASCADED is assumed if neither
|
||||
CASCADED nor LOCAL is specified.
|
||||
.PP
|
||||
.PP
|
||||
\fBCREATE OR REPLACE VIEW\fR is a
|
||||
PostgreSQL language extension.
|
||||
So is the concept of a temporary view.
|
||||
.SH "SEE ALSO"
|
||||
DROP VIEW [\fBdrop_view\fR(l)]
|
||||
1174
samples/Groff/fsinterface.ms
Normal file
1174
samples/Groff/fsinterface.ms
Normal file
File diff suppressed because it is too large
Load Diff
46
samples/Groovy/filenames/Jenkinsfile
vendored
Normal file
46
samples/Groovy/filenames/Jenkinsfile
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
jettyUrl = 'http://localhost:8081/'
|
||||
|
||||
def servers
|
||||
|
||||
stage 'Dev'
|
||||
node {
|
||||
checkout scm
|
||||
servers = load 'servers.groovy'
|
||||
mvn '-o clean package'
|
||||
dir('target') {stash name: 'war', includes: 'x.war'}
|
||||
}
|
||||
|
||||
stage 'QA'
|
||||
parallel(longerTests: {
|
||||
runTests(servers, 30)
|
||||
}, quickerTests: {
|
||||
runTests(servers, 20)
|
||||
})
|
||||
|
||||
stage name: 'Staging', concurrency: 1
|
||||
node {
|
||||
servers.deploy 'staging'
|
||||
}
|
||||
|
||||
input message: "Does ${jettyUrl}staging/ look good?"
|
||||
|
||||
stage name: 'Production', concurrency: 1
|
||||
node {
|
||||
sh "wget -O - -S ${jettyUrl}staging/"
|
||||
echo 'Production server looks to be alive'
|
||||
servers.deploy 'production'
|
||||
echo "Deployed to ${jettyUrl}production/"
|
||||
}
|
||||
|
||||
def mvn(args) {
|
||||
sh "${tool 'Maven 3.x'}/bin/mvn ${args}"
|
||||
}
|
||||
|
||||
def runTests(servers, duration) {
|
||||
node {
|
||||
checkout scm
|
||||
servers.runWithServer {id ->
|
||||
mvn "-o -f sometests test -Durl=${jettyUrl}${id}/ -Dduration=${duration}"
|
||||
}
|
||||
}
|
||||
}
|
||||
6
samples/HCL/example.hcl
Normal file
6
samples/HCL/example.hcl
Normal file
@@ -0,0 +1,6 @@
|
||||
consul = "1.2.3.4"
|
||||
|
||||
// This is a comment
|
||||
template "foo" {
|
||||
bar = "zip"
|
||||
}
|
||||
13
samples/HCL/example.tf
Normal file
13
samples/HCL/example.tf
Normal file
@@ -0,0 +1,13 @@
|
||||
resource "aws_instance" "web" {
|
||||
// Copies the myapp.conf file to /etc/myapp.conf
|
||||
provisioner "file" {
|
||||
source = "conf/myapp.conf"
|
||||
destination = "/etc/myapp.conf"
|
||||
}
|
||||
|
||||
// Copies the configs.d folder to /etc/configs.d
|
||||
provisioner "file" {
|
||||
source = "conf/configs.d"
|
||||
destination = "/etc"
|
||||
}
|
||||
}
|
||||
27
samples/HLSL/accelerated_surface_win.hlsl
Normal file
27
samples/HLSL/accelerated_surface_win.hlsl
Normal file
@@ -0,0 +1,27 @@
|
||||
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
// To compile these two shaders:
|
||||
// fxc /E pixelMain /T ps_2_0 accelerated_surface_win.hlsl
|
||||
// fxc /E vertexMain /T vs_2_0 accelerated_surface_win.hlsl
|
||||
//
|
||||
// fxc is in the DirectX SDK.
|
||||
|
||||
struct Vertex {
|
||||
float4 position : POSITION;
|
||||
float2 texCoord : TEXCOORD0;
|
||||
};
|
||||
|
||||
texture t;
|
||||
sampler s;
|
||||
|
||||
// Passes a position and texture coordinate to the pixel shader.
|
||||
Vertex vertexMain(Vertex input) {
|
||||
return input;
|
||||
};
|
||||
|
||||
// Samples a texture at the given texture coordinate and returns the result.
|
||||
float4 pixelMain(float2 texCoord : TEXCOORD0) : COLOR0 {
|
||||
return tex2D(s, texCoord);
|
||||
};
|
||||
105
samples/HLSL/corridor.fx
Normal file
105
samples/HLSL/corridor.fx
Normal file
@@ -0,0 +1,105 @@
|
||||
float4x4 matWorldView : WORLDVIEW;
|
||||
float4x4 matWorldViewProjection : WORLDVIEWPROJECTION;
|
||||
|
||||
struct VS_INPUT {
|
||||
float4 Position : POSITION0;
|
||||
float3 Normal : NORMAL;
|
||||
float3 Tangent : TANGENT;
|
||||
float3 Binormal : BINORMAL;
|
||||
float2 TexCoord0 : TEXCOORD0;
|
||||
float2 TexCoord1 : TEXCOORD1;
|
||||
};
|
||||
|
||||
struct VS_OUTPUT {
|
||||
float4 Position : POSITION0;
|
||||
float2 TexCoord0 : TEXCOORD0;
|
||||
float2 TexCoord1 : TEXCOORD1;
|
||||
float3x3 TangentToView : TEXCOORD2;
|
||||
};
|
||||
|
||||
VS_OUTPUT vs_main(VS_INPUT input)
|
||||
{
|
||||
VS_OUTPUT output;
|
||||
output.Position = mul(input.Position, matWorldViewProjection);
|
||||
output.TexCoord0 = input.TexCoord0 * 5;
|
||||
output.TexCoord1 = input.TexCoord1;
|
||||
output.TangentToView[0] = mul(float4(input.Tangent, 0), matWorldView).xyz;
|
||||
output.TangentToView[1] = mul(float4(input.Binormal, 0), matWorldView).xyz;
|
||||
output.TangentToView[2] = mul(float4(input.Normal, 0), matWorldView).xyz;
|
||||
return output;
|
||||
}
|
||||
|
||||
struct PS_OUTPUT {
|
||||
float4 gbuffer0 : COLOR0;
|
||||
float4 gbuffer1 : COLOR1;
|
||||
};
|
||||
|
||||
texture albedo_tex;
|
||||
sampler albedo_samp = sampler_state {
|
||||
Texture = (albedo_tex);
|
||||
MipFilter = Linear;
|
||||
MinFilter = Linear;
|
||||
MagFilter = Linear;
|
||||
AddressU = Wrap;
|
||||
AddressV = Wrap;
|
||||
sRGBTexture = True;
|
||||
};
|
||||
|
||||
texture normal_tex;
|
||||
sampler normal_samp = sampler_state {
|
||||
Texture = (normal_tex);
|
||||
MipFilter = Linear;
|
||||
MinFilter = Linear;
|
||||
MagFilter = Linear;
|
||||
AddressU = Wrap;
|
||||
AddressV = Wrap;
|
||||
sRGBTexture = False;
|
||||
};
|
||||
|
||||
texture specular_tex;
|
||||
sampler specular_samp = sampler_state {
|
||||
Texture = (specular_tex);
|
||||
MipFilter = Linear;
|
||||
MinFilter = Linear;
|
||||
MagFilter = Linear;
|
||||
AddressU = Wrap;
|
||||
AddressV = Wrap;
|
||||
sRGBTexture = True;
|
||||
};
|
||||
|
||||
texture ao_tex;
|
||||
sampler ao_samp = sampler_state {
|
||||
Texture = (ao_tex);
|
||||
MipFilter = Linear;
|
||||
MinFilter = Linear;
|
||||
MagFilter = Linear;
|
||||
AddressU = Wrap;
|
||||
AddressV = Wrap;
|
||||
sRGBTexture = True;
|
||||
};
|
||||
|
||||
PS_OUTPUT ps_main(VS_OUTPUT Input)
|
||||
{
|
||||
PS_OUTPUT o;
|
||||
|
||||
float3 tangentNormal = normalize(tex2D(normal_samp, Input.TexCoord0).xyz * 2 - 1);
|
||||
float3 eyeNormal = normalize(mul(tangentNormal, Input.TangentToView));
|
||||
|
||||
float3 albedo = tex2D(albedo_samp, Input.TexCoord0).rgb;
|
||||
float ao = tex2D(ao_samp, Input.TexCoord1).r * 0.75;
|
||||
float spec = tex2D(specular_samp, Input.TexCoord0).r;
|
||||
|
||||
o.gbuffer0 = float4(eyeNormal, spec * ao);
|
||||
o.gbuffer1 = float4(albedo, 1 - ao);
|
||||
return o;
|
||||
}
|
||||
|
||||
technique mesh {
|
||||
pass Geometry {
|
||||
VertexShader = compile vs_3_0 vs_main();
|
||||
PixelShader = compile ps_3_0 ps_main();
|
||||
|
||||
AlphaBlendEnable = False;
|
||||
ZWriteEnable = True;
|
||||
}
|
||||
}
|
||||
119
samples/HLSL/jellyfish.fx
Normal file
119
samples/HLSL/jellyfish.fx
Normal file
@@ -0,0 +1,119 @@
|
||||
float4x4 matWorldViewProjection : WORLDVIEWPROJECTION;
|
||||
float4x4 matWorldView : WORLDVIEW;
|
||||
float4x4 matWorld : WORLD;
|
||||
float4x4 matView : VIEW;
|
||||
|
||||
uniform float4 vViewPosition;
|
||||
|
||||
struct VS_INPUT
|
||||
{
|
||||
float3 Pos: POSITION;
|
||||
float3 Normal: NORMAL;
|
||||
float3 Tangent: TANGENT;
|
||||
float3 Binormal: BINORMAL;
|
||||
};
|
||||
|
||||
struct VS_OUTPUT
|
||||
{
|
||||
float4 Pos : POSITION;
|
||||
float3 reflection : TEXCOORD1;
|
||||
float3 refraction : TEXCOORD2;
|
||||
float fresnel : TEXCOORD3;
|
||||
};
|
||||
|
||||
uniform float3 amt;
|
||||
uniform float3 scale;
|
||||
uniform float3 phase;
|
||||
|
||||
float3 deform(float3 p)
|
||||
{
|
||||
float s = 3;
|
||||
float3 p2 = p * scale + phase;
|
||||
s += sin(p2.x) * amt.x;
|
||||
s += sin(p2.y) * amt.y;
|
||||
s += sin(p2.z) * amt.z;
|
||||
return p * s / 3;
|
||||
}
|
||||
|
||||
VS_OUTPUT vs_main( VS_INPUT In )
|
||||
{
|
||||
VS_OUTPUT Out;
|
||||
|
||||
float3 pos = In.Pos;
|
||||
float3 norm = In.Normal;
|
||||
|
||||
float3 p1 = pos + In.Tangent * 0.05;
|
||||
float3 p2 = pos + In.Binormal * 0.05;
|
||||
pos = deform(pos);
|
||||
p1 = deform(p1);
|
||||
p2 = deform(p2);
|
||||
|
||||
p1 -= pos;
|
||||
p2 -= pos;
|
||||
norm = normalize(cross(p1, p2));
|
||||
|
||||
float3 view = normalize(pos - vViewPosition.xyz);
|
||||
|
||||
Out.Pos = mul(float4(pos, 1.0), matWorldViewProjection);
|
||||
Out.reflection = reflect(view, norm);
|
||||
Out.refraction = reflect(view, norm * 0.4f); /* fake, but who cares? */
|
||||
Out.fresnel = dot(view, norm);
|
||||
norm = mul(float4(norm, 0.0), matWorldViewProjection);
|
||||
|
||||
return Out;
|
||||
}
|
||||
|
||||
#define PS_INPUT VS_OUTPUT
|
||||
|
||||
#if 0
|
||||
textureCUBE reflectionMap;
|
||||
samplerCUBE reflectionMapSampler = sampler_state
|
||||
{
|
||||
Texture = (reflectionMap);
|
||||
MipFilter = LINEAR;
|
||||
MinFilter = LINEAR;
|
||||
MagFilter = LINEAR;
|
||||
};
|
||||
#else
|
||||
// textures
|
||||
texture reflectionMap
|
||||
<
|
||||
string type = "CUBE";
|
||||
string name = "test_cube.dds";
|
||||
>;
|
||||
|
||||
samplerCUBE reflectionMapSampler = sampler_state
|
||||
{
|
||||
Texture = (reflectionMap);
|
||||
MipFilter = LINEAR;
|
||||
MinFilter = LINEAR;
|
||||
MagFilter = LINEAR;
|
||||
};
|
||||
#endif
|
||||
|
||||
struct PS_OUTPUT
|
||||
{
|
||||
float4 color : COLOR0;
|
||||
};
|
||||
|
||||
PS_OUTPUT ps_main( PS_INPUT In )
|
||||
{
|
||||
PS_OUTPUT Out;
|
||||
|
||||
float4 reflection = texCUBE(reflectionMapSampler, normalize(In.reflection)) * 1.5;
|
||||
float4 refraction = texCUBE(reflectionMapSampler, normalize(In.refraction));
|
||||
float fresnel = In.fresnel;
|
||||
// float fresnel = abs(normalize(In.normal).z);
|
||||
Out.color = lerp(reflection, refraction, fresnel) * pow(1.0 - fresnel * 0.75, 1.0);
|
||||
|
||||
return Out;
|
||||
}
|
||||
|
||||
technique blur_ps_vs_2_0
|
||||
{
|
||||
pass P0
|
||||
{
|
||||
VertexShader = compile vs_2_0 vs_main();
|
||||
PixelShader = compile ps_2_0 ps_main();
|
||||
}
|
||||
}
|
||||
41
samples/HLSL/noise.fx
Normal file
41
samples/HLSL/noise.fx
Normal file
@@ -0,0 +1,41 @@
|
||||
float alpha = 1.f;
|
||||
|
||||
texture tex;
|
||||
sampler tex_sampler = sampler_state
|
||||
{
|
||||
Texture = (tex);
|
||||
MipFilter = LINEAR;
|
||||
MinFilter = LINEAR;
|
||||
MagFilter = LINEAR;
|
||||
|
||||
AddressU = WRAP;
|
||||
AddressV = WRAP;
|
||||
};
|
||||
|
||||
struct VS_OUTPUT
|
||||
{
|
||||
float4 pos : POSITION;
|
||||
float2 tex : TEXCOORD1;
|
||||
};
|
||||
|
||||
VS_OUTPUT vertex(float4 ipos : POSITION, float2 tex : TEXCOORD0)
|
||||
{
|
||||
VS_OUTPUT Out;
|
||||
Out.pos = ipos;
|
||||
Out.tex = tex * 2;
|
||||
return Out;
|
||||
}
|
||||
|
||||
float4 pixel(VS_OUTPUT In) : COLOR
|
||||
{
|
||||
return tex2D(tex_sampler, In.tex) * alpha;
|
||||
}
|
||||
|
||||
technique blur_ps_vs_2_0
|
||||
{
|
||||
pass P0
|
||||
{
|
||||
VertexShader = compile vs_2_0 vertex();
|
||||
PixelShader = compile ps_2_0 pixel();
|
||||
}
|
||||
}
|
||||
26
samples/HTML+EEX/index.html.eex
Normal file
26
samples/HTML+EEX/index.html.eex
Normal file
@@ -0,0 +1,26 @@
|
||||
<h1>Listing Books</h1>
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<th>Title</th>
|
||||
<th>Summary</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
</tr>
|
||||
|
||||
<%= for book <- @books do %>
|
||||
<tr>
|
||||
<%# comment %>
|
||||
<td><%= book.title %></td>
|
||||
<td><%= book.content %></td>
|
||||
<td><%= link "Show", to: book_path(@conn, :show, book) %></td>
|
||||
<td><%= link "Edit", to: book_path(@conn, :edit, book) %></td>
|
||||
<td><%= link "Delete", to: book_path(@conn, :delete, book), method: :delete, data: [confirm: "Are you sure?"] %></td>
|
||||
</tr>
|
||||
<% end %>
|
||||
</table>
|
||||
|
||||
<br />
|
||||
|
||||
<%= link "New book", to: book_path(@conn, :new) %>
|
||||
31
samples/HTML/rpanel.inc
Normal file
31
samples/HTML/rpanel.inc
Normal file
@@ -0,0 +1,31 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<link rel="stylesheet" charset="UTF-8" type="text/css" href="main.css">
|
||||
<meta charset="UTF-8">
|
||||
</head>
|
||||
<body>
|
||||
<div id="panel_header_main_form">
|
||||
<p id="panel_header_main_form_title">Поддержка:</p>
|
||||
</div>
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>
|
||||
<p>Москва:</p>
|
||||
</td>
|
||||
<td>
|
||||
<p>+7-902-7-800-807</p>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
<p>Регионы:</p>
|
||||
</td>
|
||||
<td>
|
||||
<p>+7-902-7-800-807</p>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</body>
|
||||
5
samples/HTML/tailDel.inc
Normal file
5
samples/HTML/tailDel.inc
Normal file
@@ -0,0 +1,5 @@
|
||||
</UL>
|
||||
<P><A HREF="devices.html">Supported Targets</A></P>
|
||||
</BODY>
|
||||
</HEAD>
|
||||
|
||||
@@ -1,6 +1,74 @@
|
||||
Version 1 of Trivial Extension by Andrew Plotkin begins here.
|
||||
Version 2 of Trivial Extension by Andrew Plotkin begins here.
|
||||
|
||||
"This is the rubric of the extension."
|
||||
|
||||
"provided for the Linguist package by Andrew Plotkin"
|
||||
|
||||
[Note the two special quoted lines above.]
|
||||
|
||||
A cow is a kind of animal. A cow can be purple.
|
||||
|
||||
Understand "cow" as a cow.
|
||||
Understand "purple" as a purple cow.
|
||||
|
||||
Check pushing a cow:
|
||||
instead say "Cow-tipping, at your age?[paragraph break]Inconceivable."
|
||||
|
||||
[Here are the possible levels of heading:]
|
||||
|
||||
Volume One
|
||||
|
||||
Text-line is always "A line of text."
|
||||
|
||||
Book 2
|
||||
|
||||
Part the third - indented headings still count
|
||||
|
||||
Chapter IV - not for release
|
||||
|
||||
[Heading labels are case-insensitive.]
|
||||
|
||||
section foobar
|
||||
|
||||
[A line beginning "Volume" that does not have blank lines before and after it is *not* a header line. So the following should all be part of section foobar. Sadly, the "Volume is..." line gets colored as a header, because Atom's regexp model can't recognize "thing with blank lines before and after"!]
|
||||
|
||||
Measure is a kind of value.
|
||||
Volume is a measure. Length is a measure.
|
||||
Area is a measure.
|
||||
|
||||
[And now some Inform 6 inclusions.]
|
||||
|
||||
To say em -- running on:
|
||||
(- style underline; -).
|
||||
To say /em -- running on:
|
||||
(- style roman; -).
|
||||
|
||||
Include (-
|
||||
|
||||
! Inform 6 comments start with a ! mark and run to the end of the line.
|
||||
Global cowcount;
|
||||
|
||||
[ inform6func arg;
|
||||
print "Here is some text; ", (address) 'dictword', ".^";
|
||||
cowcount++; ! increment this variable
|
||||
];
|
||||
|
||||
Object i6cow
|
||||
with name 'cow' 'animal',
|
||||
with description "It looks like a cow.",
|
||||
has animate scenery;
|
||||
|
||||
-) after "Global Variables" in "Output.i6t".
|
||||
|
||||
Trivial Extension ends here.
|
||||
|
||||
---- DOCUMENTATION ----
|
||||
|
||||
Everything after the "---- DOCUMENTATION ----" line is documentation, so it should have the comment style.
|
||||
|
||||
However, tab-indented lines are sample Inform code within the documentation:
|
||||
|
||||
Horns are a kind of thing. Every cow has horns.
|
||||
say "Moo[if the noun is purple] indigo[end if]."
|
||||
|
||||
So we need to allow for that.
|
||||
|
||||
@@ -2,11 +2,61 @@
|
||||
|
||||
Include Trivial Extension by Andrew Plotkin.
|
||||
|
||||
Volume 1 - overview
|
||||
|
||||
Chapter - setting the scene
|
||||
|
||||
The Kitchen is a room.
|
||||
|
||||
[This kitchen is modelled after the one in Zork, although it lacks the detail to establish this to the player.]
|
||||
[Comment: this kitchen is modelled after the one in Zork, although it lacks the detail to establish this to the player.]
|
||||
|
||||
Section - the kitchen table
|
||||
|
||||
The spicerack is a container in the Kitchen.
|
||||
|
||||
Table of Spices
|
||||
Name Flavor
|
||||
"cinnamon" 5
|
||||
"nutmeg" 4
|
||||
"szechuan pepper" 8
|
||||
|
||||
The description of the spicerack is "It's mostly empty."
|
||||
|
||||
Chapter - a character
|
||||
|
||||
A purple cow called Gelett is in the Kitchen.
|
||||
|
||||
[This comment spans multiple lines..
|
||||
|
||||
...and this line contains [nested square[] brackets]...
|
||||
|
||||
...which is legal in Inform 7.]
|
||||
|
||||
Instead of examining Gelett:
|
||||
say "You'd rather see than be one."
|
||||
|
||||
Instead of examining Gelett:
|
||||
say "You'd rather see than be one."
|
||||
|
||||
Check smelling Gelett:
|
||||
say "This text contains several lines.
|
||||
|
||||
A blank line is displayed as a paragraph break,
|
||||
but a simple line break is not.";
|
||||
stop the action.
|
||||
|
||||
Section - cow catching
|
||||
|
||||
Gelett has a number called the mooness.
|
||||
|
||||
Instead of taking Gelett:
|
||||
increment the mooness of Gelett;
|
||||
if the mooness of Gelett is one:
|
||||
say "Gelett moos once.";
|
||||
else:
|
||||
say "Gelett moos [mooness of Gelett in words] times.";
|
||||
|
||||
Volume 2 - the turn cycle
|
||||
|
||||
Every turn:
|
||||
say "A turn passes[one of][or] placidly[or] idly[or] tediously[at random]."
|
||||
|
||||
1104
samples/Isabelle ROOT/filenames/ROOT
Normal file
1104
samples/Isabelle ROOT/filenames/ROOT
Normal file
File diff suppressed because it is too large
Load Diff
82
samples/JSON/geo.geojson
Normal file
82
samples/JSON/geo.geojson
Normal file
@@ -0,0 +1,82 @@
|
||||
{
|
||||
"type": "FeatureCollection",
|
||||
"features": [
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"name": "Australia Post - North Ryde BC",
|
||||
"geo": [-33.787792, 151.13288],
|
||||
"streetAddress": "11 Waterloo Road",
|
||||
"addressLocality": "Macquarie Park",
|
||||
"addressRegion": "New South Wales",
|
||||
"addressCountry": "Australia",
|
||||
"postalCode": "2113"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [151.13288, -33.787792, 0]
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"name": "George Weston Foods Limited",
|
||||
"geo": [-37.8263884, 144.9105381],
|
||||
"streetAddress": "Level 3, 187 Todd Road",
|
||||
"addressLocality": "Port Melbourne",
|
||||
"addressRegion": "Victoria",
|
||||
"addressCountry": "Australia",
|
||||
"postalCode": "3207"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[144.9097088901841, -37.82622654171794, 0],
|
||||
[144.9099724266943, -37.82679388891783, 0],
|
||||
[144.9110127325916, -37.82651526396403, 0],
|
||||
[144.9112227645738, -37.82655667152123, 0],
|
||||
[144.9113739439796, -37.82618552508767, 0],
|
||||
[144.9112740633105, -37.82615750100924, 0],
|
||||
[144.9111355846674, -37.82584493693527, 0],
|
||||
[144.9097088901841, -37.82622654171794, 0]
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"name": "George Weston Foods Limited",
|
||||
"geo": [-37.05202791502396, 144.2085614999388],
|
||||
"streetAddress": "67 Richards Road",
|
||||
"addressLocality": "Castlemaine",
|
||||
"addressRegion": "Victoria",
|
||||
"addressCountry": "Australia",
|
||||
"postalCode": "3450"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[144.2052428913937, -37.04906391287216, 0],
|
||||
[144.205540392692, -37.05049727485623, 0],
|
||||
[144.2059800881858, -37.05066835966983, 0],
|
||||
[144.206490656024, -37.05279538900776, 0],
|
||||
[144.2064525845008, -37.05366195881602, 0],
|
||||
[144.2084322301922, -37.0538920493147, 0],
|
||||
[144.2084811895712, -37.05266519735124, 0],
|
||||
[144.2079784002005, -37.05041270555773, 0],
|
||||
[144.2074017905817, -37.04817406993293, 0],
|
||||
[144.2061363939852, -37.04834972871226, 0],
|
||||
[144.2052428913937, -37.04906391287216, 0]
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
1
samples/JSON/switzerland.topojson
Normal file
1
samples/JSON/switzerland.topojson
Normal file
File diff suppressed because one or more lines are too long
23
samples/JSX/sample.jsx
Normal file
23
samples/JSX/sample.jsx
Normal file
@@ -0,0 +1,23 @@
|
||||
'use strict';
|
||||
|
||||
const React = require('react')
|
||||
|
||||
module.exports = React.createClass({
|
||||
render: function() {
|
||||
let {feeds, log} = this.props;
|
||||
|
||||
log.info(feeds);
|
||||
return <div className="feed-list">
|
||||
<h3>News Feed's</h3>
|
||||
<ul>
|
||||
{feeds.map(function(feed) {
|
||||
return <li key={feed.name} className={feed.fetched ? 'loaded' : 'loading'}>
|
||||
{feed.data && feed.data.length > 0 ?
|
||||
<span>{feed.name} <span className='light'>({feed.data.length})</span></span>
|
||||
: 'feed.name' }
|
||||
</li>
|
||||
})}
|
||||
</ul>
|
||||
</div>;
|
||||
}
|
||||
});
|
||||
625
samples/Java/GrammarKit.java
Normal file
625
samples/Java/GrammarKit.java
Normal file
@@ -0,0 +1,625 @@
|
||||
// This is a generated file. Not intended for manual editing.
|
||||
package org.intellij.grammar.parser;
|
||||
|
||||
import com.intellij.lang.PsiBuilder;
|
||||
import com.intellij.lang.PsiBuilder.Marker;
|
||||
import static org.intellij.grammar.psi.BnfTypes.*;
|
||||
import static org.intellij.grammar.parser.GeneratedParserUtilBase.*;
|
||||
import com.intellij.psi.tree.IElementType;
|
||||
import com.intellij.lang.ASTNode;
|
||||
import com.intellij.psi.tree.TokenSet;
|
||||
import com.intellij.lang.PsiParser;
|
||||
import com.intellij.lang.LightPsiParser;
|
||||
|
||||
@SuppressWarnings({"SimplifiableIfStatement", "UnusedAssignment"})
|
||||
public class GrammarParser implements PsiParser, LightPsiParser {
|
||||
|
||||
public ASTNode parse(IElementType t, PsiBuilder b) {
|
||||
parseLight(t, b);
|
||||
return b.getTreeBuilt();
|
||||
}
|
||||
|
||||
public void parseLight(IElementType t, PsiBuilder b) {
|
||||
boolean r;
|
||||
b = adapt_builder_(t, b, this, EXTENDS_SETS_);
|
||||
Marker m = enter_section_(b, 0, _COLLAPSE_, null);
|
||||
if (t == BNF_ATTR) {
|
||||
r = attr(b, 0);
|
||||
}
|
||||
else if (t == BNF_ATTR_PATTERN) {
|
||||
r = attr_pattern(b, 0);
|
||||
}
|
||||
else if (t == BNF_ATTR_VALUE) {
|
||||
r = attr_value(b, 0);
|
||||
}
|
||||
else if (t == BNF_ATTRS) {
|
||||
r = attrs(b, 0);
|
||||
}
|
||||
else if (t == BNF_CHOICE) {
|
||||
r = choice(b, 0);
|
||||
}
|
||||
else if (t == BNF_EXPRESSION) {
|
||||
r = expression(b, 0);
|
||||
}
|
||||
else if (t == BNF_LITERAL_EXPRESSION) {
|
||||
r = literal_expression(b, 0);
|
||||
}
|
||||
else if (t == BNF_MODIFIER) {
|
||||
r = modifier(b, 0);
|
||||
}
|
||||
else if (t == BNF_PAREN_EXPRESSION) {
|
||||
r = paren_expression(b, 0);
|
||||
}
|
||||
else if (t == BNF_PREDICATE) {
|
||||
r = predicate(b, 0);
|
||||
}
|
||||
else if (t == BNF_PREDICATE_SIGN) {
|
||||
r = predicate_sign(b, 0);
|
||||
}
|
||||
else if (t == BNF_QUANTIFIED) {
|
||||
r = quantified(b, 0);
|
||||
}
|
||||
else if (t == BNF_QUANTIFIER) {
|
||||
r = quantifier(b, 0);
|
||||
}
|
||||
else if (t == BNF_REFERENCE_OR_TOKEN) {
|
||||
r = reference_or_token(b, 0);
|
||||
}
|
||||
else if (t == BNF_RULE) {
|
||||
r = rule(b, 0);
|
||||
}
|
||||
else if (t == BNF_SEQUENCE) {
|
||||
r = sequence(b, 0);
|
||||
}
|
||||
else if (t == BNF_STRING_LITERAL_EXPRESSION) {
|
||||
r = string_literal_expression(b, 0);
|
||||
}
|
||||
else {
|
||||
r = parse_root_(t, b, 0);
|
||||
}
|
||||
exit_section_(b, 0, m, t, r, true, TRUE_CONDITION);
|
||||
}
|
||||
|
||||
protected boolean parse_root_(IElementType t, PsiBuilder b, int l) {
|
||||
return grammar(b, l + 1);
|
||||
}
|
||||
|
||||
public static final TokenSet[] EXTENDS_SETS_ = new TokenSet[] {
|
||||
create_token_set_(BNF_LITERAL_EXPRESSION, BNF_STRING_LITERAL_EXPRESSION),
|
||||
create_token_set_(BNF_CHOICE, BNF_EXPRESSION, BNF_LITERAL_EXPRESSION, BNF_PAREN_EXPRESSION,
|
||||
BNF_PREDICATE, BNF_QUANTIFIED, BNF_REFERENCE_OR_TOKEN, BNF_SEQUENCE,
|
||||
BNF_STRING_LITERAL_EXPRESSION),
|
||||
};
|
||||
|
||||
/* ********************************************************** */
|
||||
// id attr_pattern? '=' attr_value ';'?
|
||||
public static boolean attr(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr")) return false;
|
||||
boolean r, p;
|
||||
Marker m = enter_section_(b, l, _NONE_, "<attr>");
|
||||
r = consumeToken(b, BNF_ID);
|
||||
p = r; // pin = 1
|
||||
r = r && report_error_(b, attr_1(b, l + 1));
|
||||
r = p && report_error_(b, consumeToken(b, BNF_OP_EQ)) && r;
|
||||
r = p && report_error_(b, attr_value(b, l + 1)) && r;
|
||||
r = p && attr_4(b, l + 1) && r;
|
||||
exit_section_(b, l, m, BNF_ATTR, r, p, attr_recover_until_parser_);
|
||||
return r || p;
|
||||
}
|
||||
|
||||
// attr_pattern?
|
||||
private static boolean attr_1(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr_1")) return false;
|
||||
attr_pattern(b, l + 1);
|
||||
return true;
|
||||
}
|
||||
|
||||
// ';'?
|
||||
private static boolean attr_4(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr_4")) return false;
|
||||
consumeToken(b, BNF_SEMICOLON);
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '(' string ')'
|
||||
public static boolean attr_pattern(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr_pattern")) return false;
|
||||
if (!nextTokenIs(b, BNF_LEFT_PAREN)) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = consumeToken(b, BNF_LEFT_PAREN);
|
||||
r = r && consumeToken(b, BNF_STRING);
|
||||
r = r && consumeToken(b, BNF_RIGHT_PAREN);
|
||||
exit_section_(b, m, BNF_ATTR_PATTERN, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// !'}'
|
||||
static boolean attr_recover_until(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr_recover_until")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NOT_, null);
|
||||
r = !consumeToken(b, BNF_RIGHT_BRACE);
|
||||
exit_section_(b, l, m, null, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// (reference_or_token | literal_expression) !'='
|
||||
public static boolean attr_value(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr_value")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NONE_, "<attr value>");
|
||||
r = attr_value_0(b, l + 1);
|
||||
r = r && attr_value_1(b, l + 1);
|
||||
exit_section_(b, l, m, BNF_ATTR_VALUE, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
// reference_or_token | literal_expression
|
||||
private static boolean attr_value_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr_value_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = reference_or_token(b, l + 1);
|
||||
if (!r) r = literal_expression(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// !'='
|
||||
private static boolean attr_value_1(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr_value_1")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NOT_, null);
|
||||
r = !consumeToken(b, BNF_OP_EQ);
|
||||
exit_section_(b, l, m, null, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '{' attr* '}'
|
||||
public static boolean attrs(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attrs")) return false;
|
||||
if (!nextTokenIs(b, BNF_LEFT_BRACE)) return false;
|
||||
boolean r, p;
|
||||
Marker m = enter_section_(b, l, _NONE_, null);
|
||||
r = consumeToken(b, BNF_LEFT_BRACE);
|
||||
p = r; // pin = 1
|
||||
r = r && report_error_(b, attrs_1(b, l + 1));
|
||||
r = p && consumeToken(b, BNF_RIGHT_BRACE) && r;
|
||||
exit_section_(b, l, m, BNF_ATTRS, r, p, null);
|
||||
return r || p;
|
||||
}
|
||||
|
||||
// attr*
|
||||
private static boolean attrs_1(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attrs_1")) return false;
|
||||
int c = current_position_(b);
|
||||
while (true) {
|
||||
if (!attr(b, l + 1)) break;
|
||||
if (!empty_element_parsed_guard_(b, "attrs_1", c)) break;
|
||||
c = current_position_(b);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '{' sequence ('|' sequence)* '}' | sequence choice_tail*
|
||||
public static boolean choice(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "choice")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _COLLAPSE_, "<choice>");
|
||||
r = choice_0(b, l + 1);
|
||||
if (!r) r = choice_1(b, l + 1);
|
||||
exit_section_(b, l, m, BNF_CHOICE, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
// '{' sequence ('|' sequence)* '}'
|
||||
private static boolean choice_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "choice_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = consumeToken(b, BNF_LEFT_BRACE);
|
||||
r = r && sequence(b, l + 1);
|
||||
r = r && choice_0_2(b, l + 1);
|
||||
r = r && consumeToken(b, BNF_RIGHT_BRACE);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// ('|' sequence)*
|
||||
private static boolean choice_0_2(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "choice_0_2")) return false;
|
||||
int c = current_position_(b);
|
||||
while (true) {
|
||||
if (!choice_0_2_0(b, l + 1)) break;
|
||||
if (!empty_element_parsed_guard_(b, "choice_0_2", c)) break;
|
||||
c = current_position_(b);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// '|' sequence
|
||||
private static boolean choice_0_2_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "choice_0_2_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = consumeToken(b, BNF_OP_OR);
|
||||
r = r && sequence(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// sequence choice_tail*
|
||||
private static boolean choice_1(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "choice_1")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = sequence(b, l + 1);
|
||||
r = r && choice_1_1(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// choice_tail*
|
||||
private static boolean choice_1_1(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "choice_1_1")) return false;
|
||||
int c = current_position_(b);
|
||||
while (true) {
|
||||
if (!choice_tail(b, l + 1)) break;
|
||||
if (!empty_element_parsed_guard_(b, "choice_1_1", c)) break;
|
||||
c = current_position_(b);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '|' sequence
|
||||
static boolean choice_tail(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "choice_tail")) return false;
|
||||
if (!nextTokenIs(b, BNF_OP_OR)) return false;
|
||||
boolean r, p;
|
||||
Marker m = enter_section_(b, l, _NONE_, null);
|
||||
r = consumeToken(b, BNF_OP_OR);
|
||||
p = r; // pin = 1
|
||||
r = r && sequence(b, l + 1);
|
||||
exit_section_(b, l, m, null, r, p, null);
|
||||
return r || p;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// choice?
|
||||
public static boolean expression(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "expression")) return false;
|
||||
Marker m = enter_section_(b, l, _COLLAPSE_, "<expression>");
|
||||
choice(b, l + 1);
|
||||
exit_section_(b, l, m, BNF_EXPRESSION, true, false, null);
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// (attrs | rule) *
|
||||
static boolean grammar(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "grammar")) return false;
|
||||
int c = current_position_(b);
|
||||
while (true) {
|
||||
if (!grammar_0(b, l + 1)) break;
|
||||
if (!empty_element_parsed_guard_(b, "grammar", c)) break;
|
||||
c = current_position_(b);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// attrs | rule
|
||||
private static boolean grammar_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "grammar_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = attrs(b, l + 1);
|
||||
if (!r) r = rule(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// string_literal_expression | number
|
||||
public static boolean literal_expression(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "literal_expression")) return false;
|
||||
if (!nextTokenIs(b, "<literal expression>", BNF_NUMBER, BNF_STRING)) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _COLLAPSE_, "<literal expression>");
|
||||
r = string_literal_expression(b, l + 1);
|
||||
if (!r) r = consumeToken(b, BNF_NUMBER);
|
||||
exit_section_(b, l, m, BNF_LITERAL_EXPRESSION, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// 'private' | 'external' | 'wrapped'
|
||||
public static boolean modifier(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "modifier")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NONE_, "<modifier>");
|
||||
r = consumeToken(b, "private");
|
||||
if (!r) r = consumeToken(b, "external");
|
||||
if (!r) r = consumeToken(b, "wrapped");
|
||||
exit_section_(b, l, m, BNF_MODIFIER, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// quantified | predicate
|
||||
static boolean option(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "option")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = quantified(b, l + 1);
|
||||
if (!r) r = predicate(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '(' expression ')'
|
||||
public static boolean paren_expression(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "paren_expression")) return false;
|
||||
if (!nextTokenIs(b, BNF_LEFT_PAREN)) return false;
|
||||
boolean r, p;
|
||||
Marker m = enter_section_(b, l, _NONE_, null);
|
||||
r = consumeToken(b, BNF_LEFT_PAREN);
|
||||
p = r; // pin = 1
|
||||
r = r && report_error_(b, expression(b, l + 1));
|
||||
r = p && consumeToken(b, BNF_RIGHT_PAREN) && r;
|
||||
exit_section_(b, l, m, BNF_PAREN_EXPRESSION, r, p, null);
|
||||
return r || p;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// predicate_sign simple
|
||||
public static boolean predicate(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "predicate")) return false;
|
||||
if (!nextTokenIs(b, "<predicate>", BNF_OP_NOT, BNF_OP_AND)) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NONE_, "<predicate>");
|
||||
r = predicate_sign(b, l + 1);
|
||||
r = r && simple(b, l + 1);
|
||||
exit_section_(b, l, m, BNF_PREDICATE, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '&' | '!'
|
||||
public static boolean predicate_sign(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "predicate_sign")) return false;
|
||||
if (!nextTokenIs(b, "<predicate sign>", BNF_OP_NOT, BNF_OP_AND)) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NONE_, "<predicate sign>");
|
||||
r = consumeToken(b, BNF_OP_AND);
|
||||
if (!r) r = consumeToken(b, BNF_OP_NOT);
|
||||
exit_section_(b, l, m, BNF_PREDICATE_SIGN, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '[' expression ']' | simple quantifier?
|
||||
public static boolean quantified(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "quantified")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _COLLAPSE_, "<quantified>");
|
||||
r = quantified_0(b, l + 1);
|
||||
if (!r) r = quantified_1(b, l + 1);
|
||||
exit_section_(b, l, m, BNF_QUANTIFIED, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
// '[' expression ']'
|
||||
private static boolean quantified_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "quantified_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = consumeToken(b, BNF_LEFT_BRACKET);
|
||||
r = r && expression(b, l + 1);
|
||||
r = r && consumeToken(b, BNF_RIGHT_BRACKET);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// simple quantifier?
|
||||
private static boolean quantified_1(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "quantified_1")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = simple(b, l + 1);
|
||||
r = r && quantified_1_1(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// quantifier?
|
||||
private static boolean quantified_1_1(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "quantified_1_1")) return false;
|
||||
quantifier(b, l + 1);
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '?' | '+' | '*'
|
||||
public static boolean quantifier(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "quantifier")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NONE_, "<quantifier>");
|
||||
r = consumeToken(b, BNF_OP_OPT);
|
||||
if (!r) r = consumeToken(b, BNF_OP_ONEMORE);
|
||||
if (!r) r = consumeToken(b, BNF_OP_ZEROMORE);
|
||||
exit_section_(b, l, m, BNF_QUANTIFIER, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// id
|
||||
public static boolean reference_or_token(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "reference_or_token")) return false;
|
||||
if (!nextTokenIs(b, BNF_ID)) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = consumeToken(b, BNF_ID);
|
||||
exit_section_(b, m, BNF_REFERENCE_OR_TOKEN, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// modifier* id '::=' expression attrs? ';'?
|
||||
public static boolean rule(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "rule")) return false;
|
||||
boolean r, p;
|
||||
Marker m = enter_section_(b, l, _NONE_, "<rule>");
|
||||
r = rule_0(b, l + 1);
|
||||
r = r && consumeToken(b, BNF_ID);
|
||||
r = r && consumeToken(b, BNF_OP_IS);
|
||||
p = r; // pin = 3
|
||||
r = r && report_error_(b, expression(b, l + 1));
|
||||
r = p && report_error_(b, rule_4(b, l + 1)) && r;
|
||||
r = p && rule_5(b, l + 1) && r;
|
||||
exit_section_(b, l, m, BNF_RULE, r, p, rule_recover_until_parser_);
|
||||
return r || p;
|
||||
}
|
||||
|
||||
// modifier*
|
||||
private static boolean rule_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "rule_0")) return false;
|
||||
int c = current_position_(b);
|
||||
while (true) {
|
||||
if (!modifier(b, l + 1)) break;
|
||||
if (!empty_element_parsed_guard_(b, "rule_0", c)) break;
|
||||
c = current_position_(b);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// attrs?
|
||||
private static boolean rule_4(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "rule_4")) return false;
|
||||
attrs(b, l + 1);
|
||||
return true;
|
||||
}
|
||||
|
||||
// ';'?
|
||||
private static boolean rule_5(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "rule_5")) return false;
|
||||
consumeToken(b, BNF_SEMICOLON);
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// !'{'
|
||||
static boolean rule_recover_until(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "rule_recover_until")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NOT_, null);
|
||||
r = !consumeToken(b, BNF_LEFT_BRACE);
|
||||
exit_section_(b, l, m, null, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// option +
|
||||
public static boolean sequence(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "sequence")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _COLLAPSE_, "<sequence>");
|
||||
r = option(b, l + 1);
|
||||
int c = current_position_(b);
|
||||
while (r) {
|
||||
if (!option(b, l + 1)) break;
|
||||
if (!empty_element_parsed_guard_(b, "sequence", c)) break;
|
||||
c = current_position_(b);
|
||||
}
|
||||
exit_section_(b, l, m, BNF_SEQUENCE, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// !(modifier* id '::=' ) reference_or_token | literal_expression | paren_expression
|
||||
static boolean simple(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "simple")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = simple_0(b, l + 1);
|
||||
if (!r) r = literal_expression(b, l + 1);
|
||||
if (!r) r = paren_expression(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// !(modifier* id '::=' ) reference_or_token
|
||||
private static boolean simple_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "simple_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = simple_0_0(b, l + 1);
|
||||
r = r && reference_or_token(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// !(modifier* id '::=' )
|
||||
private static boolean simple_0_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "simple_0_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NOT_, null);
|
||||
r = !simple_0_0_0(b, l + 1);
|
||||
exit_section_(b, l, m, null, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
// modifier* id '::='
|
||||
private static boolean simple_0_0_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "simple_0_0_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = simple_0_0_0_0(b, l + 1);
|
||||
r = r && consumeToken(b, BNF_ID);
|
||||
r = r && consumeToken(b, BNF_OP_IS);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// modifier*
|
||||
private static boolean simple_0_0_0_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "simple_0_0_0_0")) return false;
|
||||
int c = current_position_(b);
|
||||
while (true) {
|
||||
if (!modifier(b, l + 1)) break;
|
||||
if (!empty_element_parsed_guard_(b, "simple_0_0_0_0", c)) break;
|
||||
c = current_position_(b);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// string
|
||||
public static boolean string_literal_expression(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "string_literal_expression")) return false;
|
||||
if (!nextTokenIs(b, BNF_STRING)) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = consumeToken(b, BNF_STRING);
|
||||
exit_section_(b, m, BNF_STRING_LITERAL_EXPRESSION, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
final static Parser attr_recover_until_parser_ = new Parser() {
|
||||
public boolean parse(PsiBuilder b, int l) {
|
||||
return attr_recover_until(b, l + 1);
|
||||
}
|
||||
};
|
||||
final static Parser rule_recover_until_parser_ = new Parser() {
|
||||
public boolean parse(PsiBuilder b, int l) {
|
||||
return rule_recover_until(b, l + 1);
|
||||
}
|
||||
};
|
||||
}
|
||||
482
samples/Java/JFlexLexer.java
Normal file
482
samples/Java/JFlexLexer.java
Normal file
@@ -0,0 +1,482 @@
|
||||
/* The following code was generated by JFlex 1.4.3 on 28/01/16 11:27 */
|
||||
|
||||
package test;
|
||||
import com.intellij.lexer.*;
|
||||
import com.intellij.psi.tree.IElementType;
|
||||
import static org.intellij.grammar.psi.BnfTypes.*;
|
||||
|
||||
|
||||
/**
|
||||
* This class is a scanner generated by
|
||||
* <a href="http://www.jflex.de/">JFlex</a> 1.4.3
|
||||
* on 28/01/16 11:27 from the specification file
|
||||
* <tt>/home/abigail/code/intellij-grammar-kit-test/src/test/_GrammarLexer.flex</tt>
|
||||
*/
|
||||
public class _GrammarLexer implements FlexLexer {
|
||||
/** initial size of the lookahead buffer */
|
||||
private static final int ZZ_BUFFERSIZE = 16384;
|
||||
|
||||
/** lexical states */
|
||||
public static final int YYINITIAL = 0;
|
||||
|
||||
/**
|
||||
* ZZ_LEXSTATE[l] is the state in the DFA for the lexical state l
|
||||
* ZZ_LEXSTATE[l+1] is the state in the DFA for the lexical state l
|
||||
* at the beginning of a line
|
||||
* l is of the form l = 2*k, k a non negative integer
|
||||
*/
|
||||
private static final int ZZ_LEXSTATE[] = {
|
||||
0, 0
|
||||
};
|
||||
|
||||
/**
|
||||
* Translates characters to character classes
|
||||
*/
|
||||
private static final String ZZ_CMAP_PACKED =
|
||||
"\11\0\1\1\1\1\1\0\1\1\1\1\22\0\1\1\101\0\1\13"+
|
||||
"\1\0\1\3\1\14\1\0\1\10\1\0\1\2\3\0\1\12\1\7"+
|
||||
"\3\0\1\6\1\4\1\5\1\11\uff8a\0";
|
||||
|
||||
/**
|
||||
* Translates characters to character classes
|
||||
*/
|
||||
private static final char [] ZZ_CMAP = zzUnpackCMap(ZZ_CMAP_PACKED);
|
||||
|
||||
/**
|
||||
* Translates DFA states to action switch labels.
|
||||
*/
|
||||
private static final int [] ZZ_ACTION = zzUnpackAction();
|
||||
|
||||
private static final String ZZ_ACTION_PACKED_0 =
|
||||
"\1\0\1\1\1\2\3\1\1\3\10\0\1\4\1\5";
|
||||
|
||||
private static int [] zzUnpackAction() {
|
||||
int [] result = new int[17];
|
||||
int offset = 0;
|
||||
offset = zzUnpackAction(ZZ_ACTION_PACKED_0, offset, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static int zzUnpackAction(String packed, int offset, int [] result) {
|
||||
int i = 0; /* index in packed string */
|
||||
int j = offset; /* index in unpacked array */
|
||||
int l = packed.length();
|
||||
while (i < l) {
|
||||
int count = packed.charAt(i++);
|
||||
int value = packed.charAt(i++);
|
||||
do result[j++] = value; while (--count > 0);
|
||||
}
|
||||
return j;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Translates a state to a row index in the transition table
|
||||
*/
|
||||
private static final int [] ZZ_ROWMAP = zzUnpackRowMap();
|
||||
|
||||
private static final String ZZ_ROWMAP_PACKED_0 =
|
||||
"\0\0\0\15\0\32\0\47\0\64\0\101\0\15\0\116"+
|
||||
"\0\133\0\150\0\165\0\202\0\217\0\234\0\251\0\15"+
|
||||
"\0\15";
|
||||
|
||||
private static int [] zzUnpackRowMap() {
|
||||
int [] result = new int[17];
|
||||
int offset = 0;
|
||||
offset = zzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static int zzUnpackRowMap(String packed, int offset, int [] result) {
|
||||
int i = 0; /* index in packed string */
|
||||
int j = offset; /* index in unpacked array */
|
||||
int l = packed.length();
|
||||
while (i < l) {
|
||||
int high = packed.charAt(i++) << 16;
|
||||
result[j++] = high | packed.charAt(i++);
|
||||
}
|
||||
return j;
|
||||
}
|
||||
|
||||
/**
|
||||
* The transition table of the DFA
|
||||
*/
|
||||
private static final int [] ZZ_TRANS = zzUnpackTrans();
|
||||
|
||||
private static final String ZZ_TRANS_PACKED_0 =
|
||||
"\1\2\1\3\1\4\1\2\1\5\2\2\1\6\5\2"+
|
||||
"\16\0\1\3\16\0\1\7\16\0\1\10\20\0\1\11"+
|
||||
"\11\0\1\12\20\0\1\13\4\0\1\14\25\0\1\15"+
|
||||
"\10\0\1\16\21\0\1\17\10\0\1\20\12\0\1\21"+
|
||||
"\6\0";
|
||||
|
||||
private static int [] zzUnpackTrans() {
|
||||
int [] result = new int[182];
|
||||
int offset = 0;
|
||||
offset = zzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static int zzUnpackTrans(String packed, int offset, int [] result) {
|
||||
int i = 0; /* index in packed string */
|
||||
int j = offset; /* index in unpacked array */
|
||||
int l = packed.length();
|
||||
while (i < l) {
|
||||
int count = packed.charAt(i++);
|
||||
int value = packed.charAt(i++);
|
||||
value--;
|
||||
do result[j++] = value; while (--count > 0);
|
||||
}
|
||||
return j;
|
||||
}
|
||||
|
||||
|
||||
/* error codes */
|
||||
private static final int ZZ_UNKNOWN_ERROR = 0;
|
||||
private static final int ZZ_NO_MATCH = 1;
|
||||
private static final int ZZ_PUSHBACK_2BIG = 2;
|
||||
private static final char[] EMPTY_BUFFER = new char[0];
|
||||
private static final int YYEOF = -1;
|
||||
private static java.io.Reader zzReader = null; // Fake
|
||||
|
||||
/* error messages for the codes above */
|
||||
private static final String ZZ_ERROR_MSG[] = {
|
||||
"Unkown internal scanner error",
|
||||
"Error: could not match input",
|
||||
"Error: pushback value was too large"
|
||||
};
|
||||
|
||||
/**
|
||||
* ZZ_ATTRIBUTE[aState] contains the attributes of state <code>aState</code>
|
||||
*/
|
||||
private static final int [] ZZ_ATTRIBUTE = zzUnpackAttribute();
|
||||
|
||||
private static final String ZZ_ATTRIBUTE_PACKED_0 =
|
||||
"\1\0\1\11\4\1\1\11\10\0\2\11";
|
||||
|
||||
private static int [] zzUnpackAttribute() {
|
||||
int [] result = new int[17];
|
||||
int offset = 0;
|
||||
offset = zzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static int zzUnpackAttribute(String packed, int offset, int [] result) {
|
||||
int i = 0; /* index in packed string */
|
||||
int j = offset; /* index in unpacked array */
|
||||
int l = packed.length();
|
||||
while (i < l) {
|
||||
int count = packed.charAt(i++);
|
||||
int value = packed.charAt(i++);
|
||||
do result[j++] = value; while (--count > 0);
|
||||
}
|
||||
return j;
|
||||
}
|
||||
|
||||
/** the current state of the DFA */
|
||||
private int zzState;
|
||||
|
||||
/** the current lexical state */
|
||||
private int zzLexicalState = YYINITIAL;
|
||||
|
||||
/** this buffer contains the current text to be matched and is
|
||||
the source of the yytext() string */
|
||||
private CharSequence zzBuffer = "";
|
||||
|
||||
/** this buffer may contains the current text array to be matched when it is cheap to acquire it */
|
||||
private char[] zzBufferArray;
|
||||
|
||||
/** the textposition at the last accepting state */
|
||||
private int zzMarkedPos;
|
||||
|
||||
/** the textposition at the last state to be included in yytext */
|
||||
private int zzPushbackPos;
|
||||
|
||||
/** the current text position in the buffer */
|
||||
private int zzCurrentPos;
|
||||
|
||||
/** startRead marks the beginning of the yytext() string in the buffer */
|
||||
private int zzStartRead;
|
||||
|
||||
/** endRead marks the last character in the buffer, that has been read
|
||||
from input */
|
||||
private int zzEndRead;
|
||||
|
||||
/**
|
||||
* zzAtBOL == true <=> the scanner is currently at the beginning of a line
|
||||
*/
|
||||
private boolean zzAtBOL = true;
|
||||
|
||||
/** zzAtEOF == true <=> the scanner is at the EOF */
|
||||
private boolean zzAtEOF;
|
||||
|
||||
/* user code: */
|
||||
public _GrammarLexer() {
|
||||
this((java.io.Reader)null);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates a new scanner
|
||||
*
|
||||
* @param in the java.io.Reader to read input from.
|
||||
*/
|
||||
public _GrammarLexer(java.io.Reader in) {
|
||||
this.zzReader = in;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Unpacks the compressed character translation table.
|
||||
*
|
||||
* @param packed the packed character translation table
|
||||
* @return the unpacked character translation table
|
||||
*/
|
||||
private static char [] zzUnpackCMap(String packed) {
|
||||
char [] map = new char[0x10000];
|
||||
int i = 0; /* index in packed string */
|
||||
int j = 0; /* index in unpacked array */
|
||||
while (i < 52) {
|
||||
int count = packed.charAt(i++);
|
||||
char value = packed.charAt(i++);
|
||||
do map[j++] = value; while (--count > 0);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
public final int getTokenStart(){
|
||||
return zzStartRead;
|
||||
}
|
||||
|
||||
public final int getTokenEnd(){
|
||||
return getTokenStart() + yylength();
|
||||
}
|
||||
|
||||
public void reset(CharSequence buffer, int start, int end,int initialState){
|
||||
zzBuffer = buffer;
|
||||
zzBufferArray = com.intellij.util.text.CharArrayUtil.fromSequenceWithoutCopying(buffer);
|
||||
zzCurrentPos = zzMarkedPos = zzStartRead = start;
|
||||
zzPushbackPos = 0;
|
||||
zzAtEOF = false;
|
||||
zzAtBOL = true;
|
||||
zzEndRead = end;
|
||||
yybegin(initialState);
|
||||
}
|
||||
|
||||
/**
|
||||
* Refills the input buffer.
|
||||
*
|
||||
* @return <code>false</code>, iff there was new input.
|
||||
*
|
||||
* @exception java.io.IOException if any I/O-Error occurs
|
||||
*/
|
||||
private boolean zzRefill() throws java.io.IOException {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the current lexical state.
|
||||
*/
|
||||
public final int yystate() {
|
||||
return zzLexicalState;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Enters a new lexical state
|
||||
*
|
||||
* @param newState the new lexical state
|
||||
*/
|
||||
public final void yybegin(int newState) {
|
||||
zzLexicalState = newState;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the text matched by the current regular expression.
|
||||
*/
|
||||
public final CharSequence yytext() {
|
||||
return zzBuffer.subSequence(zzStartRead, zzMarkedPos);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the character at position <tt>pos</tt> from the
|
||||
* matched text.
|
||||
*
|
||||
* It is equivalent to yytext().charAt(pos), but faster
|
||||
*
|
||||
* @param pos the position of the character to fetch.
|
||||
* A value from 0 to yylength()-1.
|
||||
*
|
||||
* @return the character at position pos
|
||||
*/
|
||||
public final char yycharat(int pos) {
|
||||
return zzBufferArray != null ? zzBufferArray[zzStartRead+pos]:zzBuffer.charAt(zzStartRead+pos);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the length of the matched text region.
|
||||
*/
|
||||
public final int yylength() {
|
||||
return zzMarkedPos-zzStartRead;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Reports an error that occured while scanning.
|
||||
*
|
||||
* In a wellformed scanner (no or only correct usage of
|
||||
* yypushback(int) and a match-all fallback rule) this method
|
||||
* will only be called with things that "Can't Possibly Happen".
|
||||
* If this method is called, something is seriously wrong
|
||||
* (e.g. a JFlex bug producing a faulty scanner etc.).
|
||||
*
|
||||
* Usual syntax/scanner level error handling should be done
|
||||
* in error fallback rules.
|
||||
*
|
||||
* @param errorCode the code of the errormessage to display
|
||||
*/
|
||||
private void zzScanError(int errorCode) {
|
||||
String message;
|
||||
try {
|
||||
message = ZZ_ERROR_MSG[errorCode];
|
||||
}
|
||||
catch (ArrayIndexOutOfBoundsException e) {
|
||||
message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR];
|
||||
}
|
||||
|
||||
throw new Error(message);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Pushes the specified amount of characters back into the input stream.
|
||||
*
|
||||
* They will be read again by then next call of the scanning method
|
||||
*
|
||||
* @param number the number of characters to be read again.
|
||||
* This number must not be greater than yylength()!
|
||||
*/
|
||||
public void yypushback(int number) {
|
||||
if ( number > yylength() )
|
||||
zzScanError(ZZ_PUSHBACK_2BIG);
|
||||
|
||||
zzMarkedPos -= number;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Resumes scanning until the next regular expression is matched,
|
||||
* the end of input is encountered or an I/O-Error occurs.
|
||||
*
|
||||
* @return the next token
|
||||
* @exception java.io.IOException if any I/O-Error occurs
|
||||
*/
|
||||
public IElementType advance() throws java.io.IOException {
|
||||
int zzInput;
|
||||
int zzAction;
|
||||
|
||||
// cached fields:
|
||||
int zzCurrentPosL;
|
||||
int zzMarkedPosL;
|
||||
int zzEndReadL = zzEndRead;
|
||||
CharSequence zzBufferL = zzBuffer;
|
||||
char[] zzBufferArrayL = zzBufferArray;
|
||||
char [] zzCMapL = ZZ_CMAP;
|
||||
|
||||
int [] zzTransL = ZZ_TRANS;
|
||||
int [] zzRowMapL = ZZ_ROWMAP;
|
||||
int [] zzAttrL = ZZ_ATTRIBUTE;
|
||||
|
||||
while (true) {
|
||||
zzMarkedPosL = zzMarkedPos;
|
||||
|
||||
zzAction = -1;
|
||||
|
||||
zzCurrentPosL = zzCurrentPos = zzStartRead = zzMarkedPosL;
|
||||
|
||||
zzState = ZZ_LEXSTATE[zzLexicalState];
|
||||
|
||||
|
||||
zzForAction: {
|
||||
while (true) {
|
||||
|
||||
if (zzCurrentPosL < zzEndReadL)
|
||||
zzInput = (zzBufferArrayL != null ? zzBufferArrayL[zzCurrentPosL++] : zzBufferL.charAt(zzCurrentPosL++));
|
||||
else if (zzAtEOF) {
|
||||
zzInput = YYEOF;
|
||||
break zzForAction;
|
||||
}
|
||||
else {
|
||||
// store back cached positions
|
||||
zzCurrentPos = zzCurrentPosL;
|
||||
zzMarkedPos = zzMarkedPosL;
|
||||
boolean eof = zzRefill();
|
||||
// get translated positions and possibly new buffer
|
||||
zzCurrentPosL = zzCurrentPos;
|
||||
zzMarkedPosL = zzMarkedPos;
|
||||
zzBufferL = zzBuffer;
|
||||
zzEndReadL = zzEndRead;
|
||||
if (eof) {
|
||||
zzInput = YYEOF;
|
||||
break zzForAction;
|
||||
}
|
||||
else {
|
||||
zzInput = (zzBufferArrayL != null ? zzBufferArrayL[zzCurrentPosL++] : zzBufferL.charAt(zzCurrentPosL++));
|
||||
}
|
||||
}
|
||||
int zzNext = zzTransL[ zzRowMapL[zzState] + zzCMapL[zzInput] ];
|
||||
if (zzNext == -1) break zzForAction;
|
||||
zzState = zzNext;
|
||||
|
||||
int zzAttributes = zzAttrL[zzState];
|
||||
if ( (zzAttributes & 1) == 1 ) {
|
||||
zzAction = zzState;
|
||||
zzMarkedPosL = zzCurrentPosL;
|
||||
if ( (zzAttributes & 8) == 8 ) break zzForAction;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
// store back cached position
|
||||
zzMarkedPos = zzMarkedPosL;
|
||||
|
||||
switch (zzAction < 0 ? zzAction : ZZ_ACTION[zzAction]) {
|
||||
case 1:
|
||||
{ return com.intellij.psi.TokenType.BAD_CHARACTER;
|
||||
}
|
||||
case 6: break;
|
||||
case 4:
|
||||
{ return BNF_STRING;
|
||||
}
|
||||
case 7: break;
|
||||
case 5:
|
||||
{ return BNF_NUMBER;
|
||||
}
|
||||
case 8: break;
|
||||
case 3:
|
||||
{ return BNF_ID;
|
||||
}
|
||||
case 9: break;
|
||||
case 2:
|
||||
{ return com.intellij.psi.TokenType.WHITE_SPACE;
|
||||
}
|
||||
case 10: break;
|
||||
default:
|
||||
if (zzInput == YYEOF && zzStartRead == zzCurrentPos) {
|
||||
zzAtEOF = true;
|
||||
return null;
|
||||
}
|
||||
else {
|
||||
zzScanError(ZZ_NO_MATCH);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
396
samples/Java/gen-java-linguist-thrift.java
Normal file
396
samples/Java/gen-java-linguist-thrift.java
Normal file
@@ -0,0 +1,396 @@
|
||||
/**
|
||||
* Autogenerated by Thrift Compiler (1.0.0-dev)
|
||||
*
|
||||
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
* @generated
|
||||
*/
|
||||
import org.apache.thrift.scheme.IScheme;
|
||||
import org.apache.thrift.scheme.SchemeFactory;
|
||||
import org.apache.thrift.scheme.StandardScheme;
|
||||
|
||||
import org.apache.thrift.scheme.TupleScheme;
|
||||
import org.apache.thrift.protocol.TTupleProtocol;
|
||||
import org.apache.thrift.protocol.TProtocolException;
|
||||
import org.apache.thrift.EncodingUtils;
|
||||
import org.apache.thrift.TException;
|
||||
import org.apache.thrift.async.AsyncMethodCallback;
|
||||
import org.apache.thrift.server.AbstractNonblockingServer.*;
|
||||
import java.util.List;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Map;
|
||||
import java.util.HashMap;
|
||||
import java.util.EnumMap;
|
||||
import java.util.Set;
|
||||
import java.util.HashSet;
|
||||
import java.util.EnumSet;
|
||||
import java.util.Collections;
|
||||
import java.util.BitSet;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Arrays;
|
||||
import javax.annotation.Generated;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
|
||||
@Generated(value = "Autogenerated by Thrift Compiler (1.0.0-dev)", date = "2015-5-12")
|
||||
public class PullRequest implements org.apache.thrift.TBase<PullRequest, PullRequest._Fields>, java.io.Serializable, Cloneable, Comparable<PullRequest> {
|
||||
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("PullRequest");
|
||||
|
||||
private static final org.apache.thrift.protocol.TField TITLE_FIELD_DESC = new org.apache.thrift.protocol.TField("title", org.apache.thrift.protocol.TType.STRING, (short)1);
|
||||
|
||||
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
|
||||
static {
|
||||
schemes.put(StandardScheme.class, new PullRequestStandardSchemeFactory());
|
||||
schemes.put(TupleScheme.class, new PullRequestTupleSchemeFactory());
|
||||
}
|
||||
|
||||
public String title; // required
|
||||
|
||||
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
|
||||
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
|
||||
TITLE((short)1, "title");
|
||||
|
||||
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
|
||||
|
||||
static {
|
||||
for (_Fields field : EnumSet.allOf(_Fields.class)) {
|
||||
byName.put(field.getFieldName(), field);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the _Fields constant that matches fieldId, or null if its not found.
|
||||
*/
|
||||
public static _Fields findByThriftId(int fieldId) {
|
||||
switch(fieldId) {
|
||||
case 1: // TITLE
|
||||
return TITLE;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the _Fields constant that matches fieldId, throwing an exception
|
||||
* if it is not found.
|
||||
*/
|
||||
public static _Fields findByThriftIdOrThrow(int fieldId) {
|
||||
_Fields fields = findByThriftId(fieldId);
|
||||
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
|
||||
return fields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the _Fields constant that matches name, or null if its not found.
|
||||
*/
|
||||
public static _Fields findByName(String name) {
|
||||
return byName.get(name);
|
||||
}
|
||||
|
||||
private final short _thriftId;
|
||||
private final String _fieldName;
|
||||
|
||||
_Fields(short thriftId, String fieldName) {
|
||||
_thriftId = thriftId;
|
||||
_fieldName = fieldName;
|
||||
}
|
||||
|
||||
public short getThriftFieldId() {
|
||||
return _thriftId;
|
||||
}
|
||||
|
||||
public String getFieldName() {
|
||||
return _fieldName;
|
||||
}
|
||||
}
|
||||
|
||||
// isset id assignments
|
||||
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
|
||||
static {
|
||||
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
|
||||
tmpMap.put(_Fields.TITLE, new org.apache.thrift.meta_data.FieldMetaData("title", org.apache.thrift.TFieldRequirementType.DEFAULT,
|
||||
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
|
||||
metaDataMap = Collections.unmodifiableMap(tmpMap);
|
||||
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(PullRequest.class, metaDataMap);
|
||||
}
|
||||
|
||||
public PullRequest() {
|
||||
}
|
||||
|
||||
public PullRequest(
|
||||
String title)
|
||||
{
|
||||
this();
|
||||
this.title = title;
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a deep copy on <i>other</i>.
|
||||
*/
|
||||
public PullRequest(PullRequest other) {
|
||||
if (other.isSetTitle()) {
|
||||
this.title = other.title;
|
||||
}
|
||||
}
|
||||
|
||||
public PullRequest deepCopy() {
|
||||
return new PullRequest(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
this.title = null;
|
||||
}
|
||||
|
||||
public String getTitle() {
|
||||
return this.title;
|
||||
}
|
||||
|
||||
public PullRequest setTitle(String title) {
|
||||
this.title = title;
|
||||
return this;
|
||||
}
|
||||
|
||||
public void unsetTitle() {
|
||||
this.title = null;
|
||||
}
|
||||
|
||||
/** Returns true if field title is set (has been assigned a value) and false otherwise */
|
||||
public boolean isSetTitle() {
|
||||
return this.title != null;
|
||||
}
|
||||
|
||||
public void setTitleIsSet(boolean value) {
|
||||
if (!value) {
|
||||
this.title = null;
|
||||
}
|
||||
}
|
||||
|
||||
public void setFieldValue(_Fields field, Object value) {
|
||||
switch (field) {
|
||||
case TITLE:
|
||||
if (value == null) {
|
||||
unsetTitle();
|
||||
} else {
|
||||
setTitle((String)value);
|
||||
}
|
||||
break;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public Object getFieldValue(_Fields field) {
|
||||
switch (field) {
|
||||
case TITLE:
|
||||
return getTitle();
|
||||
|
||||
}
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
|
||||
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
|
||||
public boolean isSet(_Fields field) {
|
||||
if (field == null) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
|
||||
switch (field) {
|
||||
case TITLE:
|
||||
return isSetTitle();
|
||||
}
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object that) {
|
||||
if (that == null)
|
||||
return false;
|
||||
if (that instanceof PullRequest)
|
||||
return this.equals((PullRequest)that);
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean equals(PullRequest that) {
|
||||
if (that == null)
|
||||
return false;
|
||||
|
||||
boolean this_present_title = true && this.isSetTitle();
|
||||
boolean that_present_title = true && that.isSetTitle();
|
||||
if (this_present_title || that_present_title) {
|
||||
if (!(this_present_title && that_present_title))
|
||||
return false;
|
||||
if (!this.title.equals(that.title))
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
List<Object> list = new ArrayList<Object>();
|
||||
|
||||
boolean present_title = true && (isSetTitle());
|
||||
list.add(present_title);
|
||||
if (present_title)
|
||||
list.add(title);
|
||||
|
||||
return list.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(PullRequest other) {
|
||||
if (!getClass().equals(other.getClass())) {
|
||||
return getClass().getName().compareTo(other.getClass().getName());
|
||||
}
|
||||
|
||||
int lastComparison = 0;
|
||||
|
||||
lastComparison = Boolean.valueOf(isSetTitle()).compareTo(other.isSetTitle());
|
||||
if (lastComparison != 0) {
|
||||
return lastComparison;
|
||||
}
|
||||
if (isSetTitle()) {
|
||||
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.title, other.title);
|
||||
if (lastComparison != 0) {
|
||||
return lastComparison;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
public _Fields fieldForId(int fieldId) {
|
||||
return _Fields.findByThriftId(fieldId);
|
||||
}
|
||||
|
||||
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
|
||||
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
|
||||
}
|
||||
|
||||
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
|
||||
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder("PullRequest(");
|
||||
boolean first = true;
|
||||
|
||||
sb.append("title:");
|
||||
if (this.title == null) {
|
||||
sb.append("null");
|
||||
} else {
|
||||
sb.append(this.title);
|
||||
}
|
||||
first = false;
|
||||
sb.append(")");
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
public void validate() throws org.apache.thrift.TException {
|
||||
// check for required fields
|
||||
// check for sub-struct validity
|
||||
}
|
||||
|
||||
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
|
||||
try {
|
||||
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
|
||||
} catch (org.apache.thrift.TException te) {
|
||||
throw new java.io.IOException(te);
|
||||
}
|
||||
}
|
||||
|
||||
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
|
||||
try {
|
||||
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
|
||||
} catch (org.apache.thrift.TException te) {
|
||||
throw new java.io.IOException(te);
|
||||
}
|
||||
}
|
||||
|
||||
private static class PullRequestStandardSchemeFactory implements SchemeFactory {
|
||||
public PullRequestStandardScheme getScheme() {
|
||||
return new PullRequestStandardScheme();
|
||||
}
|
||||
}
|
||||
|
||||
private static class PullRequestStandardScheme extends StandardScheme<PullRequest> {
|
||||
|
||||
public void read(org.apache.thrift.protocol.TProtocol iprot, PullRequest struct) throws org.apache.thrift.TException {
|
||||
org.apache.thrift.protocol.TField schemeField;
|
||||
iprot.readStructBegin();
|
||||
while (true)
|
||||
{
|
||||
schemeField = iprot.readFieldBegin();
|
||||
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
|
||||
break;
|
||||
}
|
||||
switch (schemeField.id) {
|
||||
case 1: // TITLE
|
||||
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
|
||||
struct.title = iprot.readString();
|
||||
struct.setTitleIsSet(true);
|
||||
} else {
|
||||
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
|
||||
}
|
||||
iprot.readFieldEnd();
|
||||
}
|
||||
iprot.readStructEnd();
|
||||
|
||||
// check for required fields of primitive type, which can't be checked in the validate method
|
||||
struct.validate();
|
||||
}
|
||||
|
||||
public void write(org.apache.thrift.protocol.TProtocol oprot, PullRequest struct) throws org.apache.thrift.TException {
|
||||
struct.validate();
|
||||
|
||||
oprot.writeStructBegin(STRUCT_DESC);
|
||||
if (struct.title != null) {
|
||||
oprot.writeFieldBegin(TITLE_FIELD_DESC);
|
||||
oprot.writeString(struct.title);
|
||||
oprot.writeFieldEnd();
|
||||
}
|
||||
oprot.writeFieldStop();
|
||||
oprot.writeStructEnd();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static class PullRequestTupleSchemeFactory implements SchemeFactory {
|
||||
public PullRequestTupleScheme getScheme() {
|
||||
return new PullRequestTupleScheme();
|
||||
}
|
||||
}
|
||||
|
||||
private static class PullRequestTupleScheme extends TupleScheme<PullRequest> {
|
||||
|
||||
@Override
|
||||
public void write(org.apache.thrift.protocol.TProtocol prot, PullRequest struct) throws org.apache.thrift.TException {
|
||||
TTupleProtocol oprot = (TTupleProtocol) prot;
|
||||
BitSet optionals = new BitSet();
|
||||
if (struct.isSetTitle()) {
|
||||
optionals.set(0);
|
||||
}
|
||||
oprot.writeBitSet(optionals, 1);
|
||||
if (struct.isSetTitle()) {
|
||||
oprot.writeString(struct.title);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void read(org.apache.thrift.protocol.TProtocol prot, PullRequest struct) throws org.apache.thrift.TException {
|
||||
TTupleProtocol iprot = (TTupleProtocol) prot;
|
||||
BitSet incoming = iprot.readBitSet(1);
|
||||
if (incoming.get(0)) {
|
||||
struct.title = iprot.readString();
|
||||
struct.setTitleIsSet(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
13
samples/JavaScript/axios.es
Normal file
13
samples/JavaScript/axios.es
Normal file
@@ -0,0 +1,13 @@
|
||||
import axios from "axios";
|
||||
|
||||
export default {
|
||||
async getIndex(prefix) {
|
||||
const {data} = await axios.get((prefix || "") + "/index.json");
|
||||
return data;
|
||||
},
|
||||
|
||||
async getContent(path, prefix) {
|
||||
const {data} = await axios.get((prefix || "") + "/" + path + ".json");
|
||||
return data;
|
||||
}
|
||||
}
|
||||
60
samples/JavaScript/gen-js-linguist-thrift.js
Normal file
60
samples/JavaScript/gen-js-linguist-thrift.js
Normal file
@@ -0,0 +1,60 @@
|
||||
//
|
||||
// Autogenerated by Thrift Compiler (1.0.0-dev)
|
||||
//
|
||||
// DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
//
|
||||
|
||||
|
||||
PullRequest = function(args) {
|
||||
this.title = null;
|
||||
if (args) {
|
||||
if (args.title !== undefined) {
|
||||
this.title = args.title;
|
||||
}
|
||||
}
|
||||
};
|
||||
PullRequest.prototype = {};
|
||||
PullRequest.prototype.read = function(input) {
|
||||
input.readStructBegin();
|
||||
while (true)
|
||||
{
|
||||
var ret = input.readFieldBegin();
|
||||
var fname = ret.fname;
|
||||
var ftype = ret.ftype;
|
||||
var fid = ret.fid;
|
||||
if (ftype == Thrift.Type.STOP) {
|
||||
break;
|
||||
}
|
||||
switch (fid)
|
||||
{
|
||||
case 1:
|
||||
if (ftype == Thrift.Type.STRING) {
|
||||
this.title = input.readString().value;
|
||||
} else {
|
||||
input.skip(ftype);
|
||||
}
|
||||
break;
|
||||
case 0:
|
||||
input.skip(ftype);
|
||||
break;
|
||||
default:
|
||||
input.skip(ftype);
|
||||
}
|
||||
input.readFieldEnd();
|
||||
}
|
||||
input.readStructEnd();
|
||||
return;
|
||||
};
|
||||
|
||||
PullRequest.prototype.write = function(output) {
|
||||
output.writeStructBegin('PullRequest');
|
||||
if (this.title !== null && this.title !== undefined) {
|
||||
output.writeFieldBegin('title', Thrift.Type.STRING, 1);
|
||||
output.writeString(this.title);
|
||||
output.writeFieldEnd();
|
||||
}
|
||||
output.writeFieldStop();
|
||||
output.writeStructEnd();
|
||||
return;
|
||||
};
|
||||
|
||||
35
samples/JavaScript/index.es
Normal file
35
samples/JavaScript/index.es
Normal file
@@ -0,0 +1,35 @@
|
||||
import config from "../webpack.config";
|
||||
import webpackDevMiddleware from "webpack-dev-middleware";
|
||||
import webpackHot from "webpack-hot-middleware";
|
||||
import webpack from "webpack";
|
||||
import express from "express";
|
||||
|
||||
app.use(webpackDevMiddleware(compiler, {
|
||||
noInfo: false,
|
||||
quiet: false,
|
||||
publicPath: config.output.publicPath,
|
||||
hot: true,
|
||||
historyApiFallback: true
|
||||
}));
|
||||
|
||||
app.get("/(:root).json", (req, resp) => {
|
||||
resp.send(indexer.index(req.params.root));
|
||||
});
|
||||
|
||||
export default function(){
|
||||
const server = http.createServer(app);
|
||||
|
||||
server.listen(3000);
|
||||
|
||||
const wss = new WebSocketServer({server});
|
||||
|
||||
let id = 1;
|
||||
wss.on("connection", (ws) => {
|
||||
console.log("Hello", " world");
|
||||
let wsId = id++;
|
||||
sessions[wsId] = ws;
|
||||
ws.on("close", () => {
|
||||
delete sessions[wsId]
|
||||
});
|
||||
});
|
||||
};
|
||||
19
samples/JavaScript/logo.jscad
Normal file
19
samples/JavaScript/logo.jscad
Normal file
@@ -0,0 +1,19 @@
|
||||
// title : OpenJSCAD.org Logo
|
||||
// author : Rene K. Mueller
|
||||
// license : MIT License
|
||||
// revision : 0.003
|
||||
// tags : Logo,Intersection,Sphere,Cube
|
||||
// file : logo.jscad
|
||||
|
||||
function main() {
|
||||
return union(
|
||||
difference(
|
||||
cube({size: 3, center: true}),
|
||||
sphere({r:2, center: true})
|
||||
),
|
||||
intersection(
|
||||
sphere({r: 1.3, center: true}),
|
||||
cube({size: 2.1, center: true})
|
||||
)
|
||||
).translate([0,0,1.5]).scale(10);
|
||||
}
|
||||
210
samples/Jupyter Notebook/JupyterNotebook.ipynb
Normal file
210
samples/Jupyter Notebook/JupyterNotebook.ipynb
Normal file
File diff suppressed because one or more lines are too long
928
samples/KiCad/nrf-bga.kicad_pcb
Normal file
928
samples/KiCad/nrf-bga.kicad_pcb
Normal file
@@ -0,0 +1,928 @@
|
||||
(kicad_pcb (version 4) (host pcbnew "(2014-08-05 BZR 5054)-product")
|
||||
|
||||
(general
|
||||
(links 36)
|
||||
(no_connects 0)
|
||||
(area 146.9984 92.8984 164.465656 112.3572)
|
||||
(thickness 1.6)
|
||||
(drawings 0)
|
||||
(tracks 108)
|
||||
(zones 0)
|
||||
(modules 12)
|
||||
(nets 54)
|
||||
)
|
||||
|
||||
(page A4)
|
||||
(layers
|
||||
(0 F.Cu signal)
|
||||
(31 B.Cu signal)
|
||||
(32 B.Adhes user)
|
||||
(33 F.Adhes user)
|
||||
(34 B.Paste user)
|
||||
(35 F.Paste user)
|
||||
(36 B.SilkS user)
|
||||
(37 F.SilkS user)
|
||||
(38 B.Mask user)
|
||||
(39 F.Mask user)
|
||||
(40 Dwgs.User user)
|
||||
(41 Cmts.User user)
|
||||
(42 Eco1.User user)
|
||||
(43 Eco2.User user)
|
||||
(44 Edge.Cuts user)
|
||||
(45 Margin user)
|
||||
(46 B.CrtYd user)
|
||||
(47 F.CrtYd user)
|
||||
(48 B.Fab user)
|
||||
(49 F.Fab user)
|
||||
)
|
||||
|
||||
(setup
|
||||
(last_trace_width 0.1016)
|
||||
(trace_clearance 0.1016)
|
||||
(zone_clearance 0.2032)
|
||||
(zone_45_only no)
|
||||
(trace_min 0.1016)
|
||||
(segment_width 0.2)
|
||||
(edge_width 0.1)
|
||||
(via_size 0.889)
|
||||
(via_drill 0.635)
|
||||
(via_min_size 0.889)
|
||||
(via_min_drill 0.508)
|
||||
(uvia_size 0.508)
|
||||
(uvia_drill 0.127)
|
||||
(uvias_allowed no)
|
||||
(uvia_min_size 0.508)
|
||||
(uvia_min_drill 0.127)
|
||||
(pcb_text_width 0.3)
|
||||
(pcb_text_size 1.5 1.5)
|
||||
(mod_edge_width 0.15)
|
||||
(mod_text_size 1 1)
|
||||
(mod_text_width 0.15)
|
||||
(pad_size 0.2 0.2)
|
||||
(pad_drill 0)
|
||||
(pad_to_mask_clearance 0)
|
||||
(aux_axis_origin 0 0)
|
||||
(visible_elements FFFCFF7F)
|
||||
(pcbplotparams
|
||||
(layerselection 0x00000_00000001)
|
||||
(usegerberextensions false)
|
||||
(excludeedgelayer true)
|
||||
(linewidth 0.100000)
|
||||
(plotframeref false)
|
||||
(viasonmask false)
|
||||
(mode 1)
|
||||
(useauxorigin false)
|
||||
(hpglpennumber 1)
|
||||
(hpglpenspeed 20)
|
||||
(hpglpendiameter 15)
|
||||
(hpglpenoverlay 2)
|
||||
(psnegative false)
|
||||
(psa4output false)
|
||||
(plotreference true)
|
||||
(plotvalue true)
|
||||
(plotinvisibletext false)
|
||||
(padsonsilk false)
|
||||
(subtractmaskfromsilk false)
|
||||
(outputformat 5)
|
||||
(mirror true)
|
||||
(drillshape 0)
|
||||
(scaleselection 1)
|
||||
(outputdirectory pdfs/))
|
||||
)
|
||||
|
||||
(net 0 "")
|
||||
(net 1 "Net-(C1-Pad1)")
|
||||
(net 2 GND)
|
||||
(net 3 "Net-(C2-Pad1)")
|
||||
(net 4 "Net-(C3-Pad1)")
|
||||
(net 5 "Net-(C4-Pad2)")
|
||||
(net 6 "Net-(D1-Pad1)")
|
||||
(net 7 +BATT)
|
||||
(net 8 SWDIO)
|
||||
(net 9 SWCLK)
|
||||
(net 10 LED)
|
||||
(net 11 "Net-(U1-PadC1)")
|
||||
(net 12 "Net-(U1-PadD1)")
|
||||
(net 13 "Net-(U1-PadG1)")
|
||||
(net 14 "Net-(U1-PadE2)")
|
||||
(net 15 "Net-(U1-PadF2)")
|
||||
(net 16 "Net-(U1-PadG2)")
|
||||
(net 17 "Net-(U1-PadE3)")
|
||||
(net 18 "Net-(U1-PadF3)")
|
||||
(net 19 "Net-(U1-PadG3)")
|
||||
(net 20 "Net-(U1-PadH4)")
|
||||
(net 21 "Net-(U1-PadA5)")
|
||||
(net 22 "Net-(U1-PadB5)")
|
||||
(net 23 "Net-(U1-PadC5)")
|
||||
(net 24 "Net-(U1-PadH5)")
|
||||
(net 25 "Net-(U1-PadA6)")
|
||||
(net 26 "Net-(U1-PadB6)")
|
||||
(net 27 "Net-(U1-PadC6)")
|
||||
(net 28 "Net-(U1-PadH6)")
|
||||
(net 29 "Net-(U1-PadA7)")
|
||||
(net 30 "Net-(U1-PadB7)")
|
||||
(net 31 "Net-(U1-PadC7)")
|
||||
(net 32 "Net-(U1-PadE7)")
|
||||
(net 33 "Net-(U1-PadF7)")
|
||||
(net 34 "Net-(U1-PadG7)")
|
||||
(net 35 "Net-(U1-PadH7)")
|
||||
(net 36 "Net-(U1-PadA8)")
|
||||
(net 37 "Net-(U1-PadD8)")
|
||||
(net 38 "Net-(U1-PadE8)")
|
||||
(net 39 "Net-(U1-PadF8)")
|
||||
(net 40 "Net-(U1-PadG8)")
|
||||
(net 41 "Net-(U1-PadH8)")
|
||||
(net 42 "Net-(U1-PadE9)")
|
||||
(net 43 "Net-(U1-PadF9)")
|
||||
(net 44 "Net-(U1-PadH1)")
|
||||
(net 45 "Net-(U1-PadJ3)")
|
||||
(net 46 "Net-(U1-PadJ4)")
|
||||
(net 47 "Net-(U1-PadJ5)")
|
||||
(net 48 "Net-(U1-PadJ6)")
|
||||
(net 49 "Net-(U1-PadJ7)")
|
||||
(net 50 "Net-(U1-PadJ8)")
|
||||
(net 51 "Net-(U1-PadH9)")
|
||||
(net 52 "Net-(C5-Pad1)")
|
||||
(net 53 "Net-(ANT1-Pad2)")
|
||||
|
||||
(net_class Default "This is the default net class."
|
||||
(clearance 0.1016)
|
||||
(trace_width 0.1016)
|
||||
(via_dia 0.889)
|
||||
(via_drill 0.635)
|
||||
(uvia_dia 0.508)
|
||||
(uvia_drill 0.127)
|
||||
(add_net +BATT)
|
||||
(add_net LED)
|
||||
(add_net "Net-(C1-Pad1)")
|
||||
(add_net "Net-(C2-Pad1)")
|
||||
(add_net "Net-(C3-Pad1)")
|
||||
(add_net "Net-(C4-Pad2)")
|
||||
(add_net "Net-(C5-Pad1)")
|
||||
(add_net "Net-(D1-Pad1)")
|
||||
(add_net "Net-(U1-PadA5)")
|
||||
(add_net "Net-(U1-PadA6)")
|
||||
(add_net "Net-(U1-PadA7)")
|
||||
(add_net "Net-(U1-PadA8)")
|
||||
(add_net "Net-(U1-PadB5)")
|
||||
(add_net "Net-(U1-PadB6)")
|
||||
(add_net "Net-(U1-PadB7)")
|
||||
(add_net "Net-(U1-PadC1)")
|
||||
(add_net "Net-(U1-PadC5)")
|
||||
(add_net "Net-(U1-PadC6)")
|
||||
(add_net "Net-(U1-PadC7)")
|
||||
(add_net "Net-(U1-PadD1)")
|
||||
(add_net "Net-(U1-PadD8)")
|
||||
(add_net "Net-(U1-PadE2)")
|
||||
(add_net "Net-(U1-PadE3)")
|
||||
(add_net "Net-(U1-PadE7)")
|
||||
(add_net "Net-(U1-PadE8)")
|
||||
(add_net "Net-(U1-PadE9)")
|
||||
(add_net "Net-(U1-PadF2)")
|
||||
(add_net "Net-(U1-PadF3)")
|
||||
(add_net "Net-(U1-PadF7)")
|
||||
(add_net "Net-(U1-PadF8)")
|
||||
(add_net "Net-(U1-PadF9)")
|
||||
(add_net "Net-(U1-PadG1)")
|
||||
(add_net "Net-(U1-PadG2)")
|
||||
(add_net "Net-(U1-PadG3)")
|
||||
(add_net "Net-(U1-PadG7)")
|
||||
(add_net "Net-(U1-PadG8)")
|
||||
(add_net "Net-(U1-PadH1)")
|
||||
(add_net "Net-(U1-PadH4)")
|
||||
(add_net "Net-(U1-PadH5)")
|
||||
(add_net "Net-(U1-PadH6)")
|
||||
(add_net "Net-(U1-PadH7)")
|
||||
(add_net "Net-(U1-PadH8)")
|
||||
(add_net "Net-(U1-PadH9)")
|
||||
(add_net "Net-(U1-PadJ3)")
|
||||
(add_net "Net-(U1-PadJ4)")
|
||||
(add_net "Net-(U1-PadJ5)")
|
||||
(add_net "Net-(U1-PadJ6)")
|
||||
(add_net "Net-(U1-PadJ7)")
|
||||
(add_net "Net-(U1-PadJ8)")
|
||||
(add_net SWCLK)
|
||||
(add_net SWDIO)
|
||||
)
|
||||
|
||||
(net_class ANT ""
|
||||
(clearance 0.1016)
|
||||
(trace_width 0.254)
|
||||
(via_dia 0.889)
|
||||
(via_drill 0.635)
|
||||
(uvia_dia 0.508)
|
||||
(uvia_drill 0.127)
|
||||
(add_net GND)
|
||||
(add_net "Net-(ANT1-Pad2)")
|
||||
)
|
||||
|
||||
(module Capacitors_SMD:C_0402 (layer F.Cu) (tedit 5415D599) (tstamp 54B59067)
|
||||
(at 153.75 102.5 270)
|
||||
(descr "Capacitor SMD 0402, reflow soldering, AVX (see smccp.pdf)")
|
||||
(tags "capacitor 0402")
|
||||
(path /54B59137)
|
||||
(attr smd)
|
||||
(fp_text reference C1 (at 0 -1.7 270) (layer F.SilkS)
|
||||
(effects (font (size 1 1) (thickness 0.15)))
|
||||
)
|
||||
(fp_text value 12p (at 0 1.7 270) (layer F.SilkS)
|
||||
(effects (font (size 1 1) (thickness 0.15)))
|
||||
)
|
||||
(fp_line (start -1.15 -0.6) (end 1.15 -0.6) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start -1.15 0.6) (end 1.15 0.6) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start -1.15 -0.6) (end -1.15 0.6) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start 1.15 -0.6) (end 1.15 0.6) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start 0.25 -0.475) (end -0.25 -0.475) (layer F.SilkS) (width 0.15))
|
||||
(fp_line (start -0.25 0.475) (end 0.25 0.475) (layer F.SilkS) (width 0.15))
|
||||
(pad 1 smd rect (at -0.55 0 270) (size 0.6 0.5) (layers F.Cu F.Paste F.Mask)
|
||||
(net 1 "Net-(C1-Pad1)"))
|
||||
(pad 2 smd rect (at 0.55 0 270) (size 0.6 0.5) (layers F.Cu F.Paste F.Mask)
|
||||
(net 2 GND))
|
||||
(model Capacitors_SMD/C_0402.wrl
|
||||
(at (xyz 0 0 0))
|
||||
(scale (xyz 1 1 1))
|
||||
(rotate (xyz 0 0 0))
|
||||
)
|
||||
)
|
||||
|
||||
(module Capacitors_SMD:C_0402 (layer F.Cu) (tedit 5415D599) (tstamp 54B58E0D)
|
||||
(at 159.004 100.711)
|
||||
(descr "Capacitor SMD 0402, reflow soldering, AVX (see smccp.pdf)")
|
||||
(tags "capacitor 0402")
|
||||
(path /54B591B6)
|
||||
(attr smd)
|
||||
(fp_text reference C2 (at 0 -1.7) (layer F.SilkS)
|
||||
(effects (font (size 1 1) (thickness 0.15)))
|
||||
)
|
||||
(fp_text value 12p (at 0 1.7) (layer F.SilkS)
|
||||
(effects (font (size 1 1) (thickness 0.15)))
|
||||
)
|
||||
(fp_line (start -1.15 -0.6) (end 1.15 -0.6) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start -1.15 0.6) (end 1.15 0.6) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start -1.15 -0.6) (end -1.15 0.6) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start 1.15 -0.6) (end 1.15 0.6) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start 0.25 -0.475) (end -0.25 -0.475) (layer F.SilkS) (width 0.15))
|
||||
(fp_line (start -0.25 0.475) (end 0.25 0.475) (layer F.SilkS) (width 0.15))
|
||||
(pad 1 smd rect (at -0.55 0) (size 0.6 0.5) (layers F.Cu F.Paste F.Mask)
|
||||
(net 3 "Net-(C2-Pad1)"))
|
||||
(pad 2 smd rect (at 0.55 0) (size 0.6 0.5) (layers F.Cu F.Paste F.Mask)
|
||||
(net 2 GND))
|
||||
(model Capacitors_SMD/C_0402.wrl
|
||||
(at (xyz 0 0 0))
|
||||
(scale (xyz 1 1 1))
|
||||
(rotate (xyz 0 0 0))
|
||||
)
|
||||
)
|
||||
|
||||
(module Capacitors_SMD:C_0402 (layer F.Cu) (tedit 5415D599) (tstamp 54B59E9D)
|
||||
(at 159.004 102.1588)
|
||||
(descr "Capacitor SMD 0402, reflow soldering, AVX (see smccp.pdf)")
|
||||
(tags "capacitor 0402")
|
||||
(path /54B58D67)
|
||||
(attr smd)
|
||||
(fp_text reference C3 (at 0 -1.7) (layer F.SilkS)
|
||||
(effects (font (size 1 1) (thickness 0.15)))
|
||||
)
|
||||
(fp_text value 100n (at 0 1.7) (layer F.SilkS)
|
||||
(effects (font (size 1 1) (thickness 0.15)))
|
||||
)
|
||||
(fp_line (start -1.15 -0.6) (end 1.15 -0.6) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start -1.15 0.6) (end 1.15 0.6) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start -1.15 -0.6) (end -1.15 0.6) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start 1.15 -0.6) (end 1.15 0.6) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start 0.25 -0.475) (end -0.25 -0.475) (layer F.SilkS) (width 0.15))
|
||||
(fp_line (start -0.25 0.475) (end 0.25 0.475) (layer F.SilkS) (width 0.15))
|
||||
(pad 1 smd rect (at -0.55 0) (size 0.6 0.5) (layers F.Cu F.Paste F.Mask)
|
||||
(net 4 "Net-(C3-Pad1)"))
|
||||
(pad 2 smd rect (at 0.55 0) (size 0.6 0.5) (layers F.Cu F.Paste F.Mask)
|
||||
(net 2 GND))
|
||||
(model Capacitors_SMD/C_0402.wrl
|
||||
(at (xyz 0 0 0))
|
||||
(scale (xyz 1 1 1))
|
||||
(rotate (xyz 0 0 0))
|
||||
)
|
||||
)
|
||||
|
||||
(module Capacitors_SMD:C_0402 (layer F.Cu) (tedit 5415D599) (tstamp 54B58E25)
|
||||
(at 154.15 106.55 90)
|
||||
(descr "Capacitor SMD 0402, reflow soldering, AVX (see smccp.pdf)")
|
||||
(tags "capacitor 0402")
|
||||
(path /54B58DD5)
|
||||
(attr smd)
|
||||
(fp_text reference C4 (at 0 -1.7 90) (layer F.SilkS)
|
||||
(effects (font (size 1 1) (thickness 0.15)))
|
||||
)
|
||||
(fp_text value 100n (at 0 1.7 90) (layer F.SilkS)
|
||||
(effects (font (size 1 1) (thickness 0.15)))
|
||||
)
|
||||
(fp_line (start -1.15 -0.6) (end 1.15 -0.6) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start -1.15 0.6) (end 1.15 0.6) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start -1.15 -0.6) (end -1.15 0.6) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start 1.15 -0.6) (end 1.15 0.6) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start 0.25 -0.475) (end -0.25 -0.475) (layer F.SilkS) (width 0.15))
|
||||
(fp_line (start -0.25 0.475) (end 0.25 0.475) (layer F.SilkS) (width 0.15))
|
||||
(pad 1 smd rect (at -0.55 0 90) (size 0.6 0.5) (layers F.Cu F.Paste F.Mask)
|
||||
(net 2 GND))
|
||||
(pad 2 smd rect (at 0.55 0 90) (size 0.6 0.5) (layers F.Cu F.Paste F.Mask)
|
||||
(net 5 "Net-(C4-Pad2)"))
|
||||
(model Capacitors_SMD/C_0402.wrl
|
||||
(at (xyz 0 0 0))
|
||||
(scale (xyz 1 1 1))
|
||||
(rotate (xyz 0 0 0))
|
||||
)
|
||||
)
|
||||
|
||||
(module SMD_Packages:SMD-0805 (layer F.Cu) (tedit 54B58D88) (tstamp 54B58E32)
|
||||
(at 162.0266 104.9782 90)
|
||||
(path /54B599BA)
|
||||
(attr smd)
|
||||
(fp_text reference D1 (at 0 -0.3175 90) (layer F.SilkS)
|
||||
(effects (font (size 1 1) (thickness 0.15)))
|
||||
)
|
||||
(fp_text value LED (at 0 0.381 90) (layer F.SilkS)
|
||||
(effects (font (size 1 1) (thickness 0.15)))
|
||||
)
|
||||
(fp_circle (center -1.651 0.762) (end -1.651 0.635) (layer F.SilkS) (width 0.15))
|
||||
(fp_line (start -0.508 0.762) (end -1.524 0.762) (layer F.SilkS) (width 0.15))
|
||||
(fp_line (start -1.524 0.762) (end -1.524 -0.762) (layer F.SilkS) (width 0.15))
|
||||
(fp_line (start -1.524 -0.762) (end -0.508 -0.762) (layer F.SilkS) (width 0.15))
|
||||
(fp_line (start 0.508 -0.762) (end 1.524 -0.762) (layer F.SilkS) (width 0.15))
|
||||
(fp_line (start 1.524 -0.762) (end 1.524 0.762) (layer F.SilkS) (width 0.15))
|
||||
(fp_line (start 1.524 0.762) (end 0.508 0.762) (layer F.SilkS) (width 0.15))
|
||||
(pad 1 smd rect (at -0.9525 0 90) (size 0.889 1.397) (layers F.Cu F.Paste F.Mask)
|
||||
(net 6 "Net-(D1-Pad1)"))
|
||||
(pad 2 smd rect (at 0.9525 0 90) (size 0.889 1.397) (layers F.Cu F.Paste F.Mask)
|
||||
(net 2 GND))
|
||||
(model SMD_Packages/SMD-0805.wrl
|
||||
(at (xyz 0 0 0))
|
||||
(scale (xyz 0.1000000014901161 0.1000000014901161 0.1000000014901161))
|
||||
(rotate (xyz 0 0 0))
|
||||
)
|
||||
)
|
||||
|
||||
(module Pin_Headers:Pin_Header_Straight_1x04 (layer F.Cu) (tedit 54B58D8A) (tstamp 54B58E41)
|
||||
(at 156.7942 109.1946 180)
|
||||
(descr "Through hole pin header")
|
||||
(tags "pin header")
|
||||
(path /54B593BB)
|
||||
(fp_text reference P1 (at 0 -2.286 180) (layer F.SilkS)
|
||||
(effects (font (size 1 1) (thickness 0.15)))
|
||||
)
|
||||
(fp_text value CONN_01X04 (at 0 0 180) (layer F.SilkS) hide
|
||||
(effects (font (size 1 1) (thickness 0.15)))
|
||||
)
|
||||
(fp_line (start -2.54 1.27) (end 5.08 1.27) (layer F.SilkS) (width 0.15))
|
||||
(fp_line (start -2.54 -1.27) (end 5.08 -1.27) (layer F.SilkS) (width 0.15))
|
||||
(fp_line (start -5.08 -1.27) (end -2.54 -1.27) (layer F.SilkS) (width 0.15))
|
||||
(fp_line (start 5.08 1.27) (end 5.08 -1.27) (layer F.SilkS) (width 0.15))
|
||||
(fp_line (start -2.54 -1.27) (end -2.54 1.27) (layer F.SilkS) (width 0.15))
|
||||
(fp_line (start -5.08 -1.27) (end -5.08 1.27) (layer F.SilkS) (width 0.15))
|
||||
(fp_line (start -5.08 1.27) (end -2.54 1.27) (layer F.SilkS) (width 0.15))
|
||||
(pad 1 thru_hole rect (at -3.81 0 180) (size 1.7272 2.032) (drill 1.016) (layers *.Cu *.Mask F.SilkS)
|
||||
(net 2 GND))
|
||||
(pad 2 thru_hole oval (at -1.27 0 180) (size 1.7272 2.032) (drill 1.016) (layers *.Cu *.Mask F.SilkS)
|
||||
(net 7 +BATT))
|
||||
(pad 3 thru_hole oval (at 1.27 0 180) (size 1.7272 2.032) (drill 1.016) (layers *.Cu *.Mask F.SilkS)
|
||||
(net 8 SWDIO))
|
||||
(pad 4 thru_hole oval (at 3.81 0 180) (size 1.7272 2.032) (drill 1.016) (layers *.Cu *.Mask F.SilkS)
|
||||
(net 9 SWCLK))
|
||||
(model Pin_Headers/Pin_Header_Straight_1x04.wrl
|
||||
(at (xyz 0 0 0))
|
||||
(scale (xyz 1 1 1))
|
||||
(rotate (xyz 0 0 0))
|
||||
)
|
||||
)
|
||||
|
||||
(module Resistors_SMD:R_0603 (layer F.Cu) (tedit 5415CC62) (tstamp 54B59E20)
|
||||
(at 160.0454 105.0544 270)
|
||||
(descr "Resistor SMD 0603, reflow soldering, Vishay (see dcrcw.pdf)")
|
||||
(tags "resistor 0603")
|
||||
(path /54B59A45)
|
||||
(attr smd)
|
||||
(fp_text reference R1 (at 0 -1.9 270) (layer F.SilkS)
|
||||
(effects (font (size 1 1) (thickness 0.15)))
|
||||
)
|
||||
(fp_text value R (at 0 1.9 270) (layer F.SilkS)
|
||||
(effects (font (size 1 1) (thickness 0.15)))
|
||||
)
|
||||
(fp_line (start -1.3 -0.8) (end 1.3 -0.8) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start -1.3 0.8) (end 1.3 0.8) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start -1.3 -0.8) (end -1.3 0.8) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start 1.3 -0.8) (end 1.3 0.8) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start 0.5 0.675) (end -0.5 0.675) (layer F.SilkS) (width 0.15))
|
||||
(fp_line (start -0.5 -0.675) (end 0.5 -0.675) (layer F.SilkS) (width 0.15))
|
||||
(pad 1 smd rect (at -0.75 0 270) (size 0.5 0.9) (layers F.Cu F.Paste F.Mask)
|
||||
(net 10 LED))
|
||||
(pad 2 smd rect (at 0.75 0 270) (size 0.5 0.9) (layers F.Cu F.Paste F.Mask)
|
||||
(net 6 "Net-(D1-Pad1)"))
|
||||
(model Resistors_SMD/R_0603.wrl
|
||||
(at (xyz 0 0 0))
|
||||
(scale (xyz 1 1 1))
|
||||
(rotate (xyz 0 0 0))
|
||||
)
|
||||
)
|
||||
|
||||
(module Crystals_Oscillators_SMD:CX2520DB (layer F.Cu) (tedit 544FEDED) (tstamp 54B58E9A)
|
||||
(at 156.1084 101.3714)
|
||||
(path /54B5903E)
|
||||
(fp_text reference X1 (at 0 -2.25) (layer F.SilkS)
|
||||
(effects (font (size 1 1) (thickness 0.15)))
|
||||
)
|
||||
(fp_text value CRYSTAL (at 0 2.5) (layer F.SilkS)
|
||||
(effects (font (size 1 1) (thickness 0.15)))
|
||||
)
|
||||
(fp_line (start -1.25 1) (end 1.25 1) (layer F.SilkS) (width 0.15))
|
||||
(fp_line (start 1.25 1) (end 1.25 -1) (layer F.SilkS) (width 0.15))
|
||||
(fp_line (start 1.25 -1) (end -1.25 -1) (layer F.SilkS) (width 0.15))
|
||||
(fp_line (start -1.25 -1) (end -1.25 1) (layer F.SilkS) (width 0.15))
|
||||
(pad 1 smd rect (at -0.9 0.65) (size 1.2 1) (layers F.Cu F.Paste F.Mask)
|
||||
(net 1 "Net-(C1-Pad1)"))
|
||||
(pad 2 smd rect (at 0.9 -0.65) (size 1.2 1) (layers F.Cu F.Paste F.Mask)
|
||||
(net 3 "Net-(C2-Pad1)"))
|
||||
(pad 3 smd rect (at 0.9 0.65) (size 1.2 1) (layers F.Cu F.Paste F.Mask))
|
||||
(pad 4 smd rect (at -0.9 -0.65) (size 1.2 1) (layers F.Cu F.Paste F.Mask))
|
||||
)
|
||||
|
||||
(module NRF51:WLCSP62 (layer F.Cu) (tedit 54B59368) (tstamp 54B59DAD)
|
||||
(at 156.9212 105.1306)
|
||||
(path /54B58C30)
|
||||
(fp_text reference U1 (at 0 3.2) (layer F.SilkS)
|
||||
(effects (font (size 1.2 1.2) (thickness 0.2)))
|
||||
)
|
||||
(fp_text value nRF51x22-CEAA (at 0 -3.2) (layer F.SilkS)
|
||||
(effects (font (size 1.2 1.2) (thickness 0.2)))
|
||||
)
|
||||
(fp_line (start -1.6 -2) (end -2 -1.6) (layer F.SilkS) (width 0.2))
|
||||
(fp_line (start -2 -1.6) (end -2 2) (layer F.SilkS) (width 0.2))
|
||||
(fp_line (start -2 2) (end 2 2) (layer F.SilkS) (width 0.2))
|
||||
(fp_line (start 2 2) (end 2 -2) (layer F.SilkS) (width 0.2))
|
||||
(fp_line (start 2 -2) (end -1.6 -2) (layer F.SilkS) (width 0.2))
|
||||
(pad A1 smd circle (at -1.6 -1.6) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 7 +BATT))
|
||||
(pad B1 smd circle (at -1.6 -1.2) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 2 GND))
|
||||
(pad C1 smd circle (at -1.6 -0.8) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 11 "Net-(U1-PadC1)"))
|
||||
(pad D1 smd circle (at -1.6 -0.4) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 12 "Net-(U1-PadD1)"))
|
||||
(pad E1 smd circle (at -1.6 0) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 52 "Net-(C5-Pad1)"))
|
||||
(pad F1 smd circle (at -1.6 0.4) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 5 "Net-(C4-Pad2)"))
|
||||
(pad G1 smd circle (at -1.6 0.8) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 13 "Net-(U1-PadG1)"))
|
||||
(pad H1 smd circle (at -1.6 1.2) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 44 "Net-(U1-PadH1)"))
|
||||
(pad A2 smd circle (at -1.2 -1.6) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 1 "Net-(C1-Pad1)"))
|
||||
(pad E2 smd circle (at -1.2 0) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 14 "Net-(U1-PadE2)"))
|
||||
(pad F2 smd circle (at -1.2 0.4) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 15 "Net-(U1-PadF2)"))
|
||||
(pad G2 smd circle (at -1.2 0.8) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 16 "Net-(U1-PadG2)"))
|
||||
(pad H2 smd circle (at -1.2 1.2) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 9 SWCLK))
|
||||
(pad J2 smd circle (at -1.2 1.6) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 8 SWDIO))
|
||||
(pad A3 smd circle (at -0.8 -1.6) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 3 "Net-(C2-Pad1)"))
|
||||
(pad E3 smd circle (at -0.8 0) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 17 "Net-(U1-PadE3)"))
|
||||
(pad F3 smd circle (at -0.8 0.4) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 18 "Net-(U1-PadF3)"))
|
||||
(pad G3 smd circle (at -0.8 0.8) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 19 "Net-(U1-PadG3)"))
|
||||
(pad H3 smd circle (at -0.8 1.2) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 2 GND))
|
||||
(pad J3 smd circle (at -0.8 1.6) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 45 "Net-(U1-PadJ3)"))
|
||||
(pad A4 smd circle (at -0.4 -1.6) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 4 "Net-(C3-Pad1)"))
|
||||
(pad B4 smd circle (at -0.4 -1.2) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 2 GND))
|
||||
(pad G4 smd circle (at -0.4 0.8) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 2 GND))
|
||||
(pad H4 smd circle (at -0.4 1.2) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 20 "Net-(U1-PadH4)"))
|
||||
(pad J4 smd circle (at -0.4 1.6) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 46 "Net-(U1-PadJ4)"))
|
||||
(pad A5 smd circle (at 0 -1.6) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 21 "Net-(U1-PadA5)"))
|
||||
(pad B5 smd circle (at 0 -1.2) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 22 "Net-(U1-PadB5)"))
|
||||
(pad C5 smd circle (at 0 -0.8) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 23 "Net-(U1-PadC5)"))
|
||||
(pad G5 smd circle (at 0 0.8) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 2 GND))
|
||||
(pad H5 smd circle (at 0 1.2) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 24 "Net-(U1-PadH5)"))
|
||||
(pad J5 smd circle (at 0 1.6) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 47 "Net-(U1-PadJ5)"))
|
||||
(pad A6 smd circle (at 0.4 -1.6) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 25 "Net-(U1-PadA6)"))
|
||||
(pad B6 smd circle (at 0.4 -1.2) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 26 "Net-(U1-PadB6)"))
|
||||
(pad C6 smd circle (at 0.4 -0.8) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 27 "Net-(U1-PadC6)"))
|
||||
(pad G6 smd circle (at 0.4 0.8) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 2 GND))
|
||||
(pad H6 smd circle (at 0.4 1.2) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 28 "Net-(U1-PadH6)"))
|
||||
(pad J6 smd circle (at 0.4 1.6) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 48 "Net-(U1-PadJ6)"))
|
||||
(pad A7 smd circle (at 0.8 -1.6) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 29 "Net-(U1-PadA7)"))
|
||||
(pad B7 smd circle (at 0.8 -1.2) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 30 "Net-(U1-PadB7)"))
|
||||
(pad C7 smd circle (at 0.8 -0.8) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 31 "Net-(U1-PadC7)"))
|
||||
(pad D7 smd circle (at 0.8 -0.4) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 2 GND))
|
||||
(pad E7 smd circle (at 0.8 0) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 32 "Net-(U1-PadE7)"))
|
||||
(pad F7 smd circle (at 0.8 0.4) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 33 "Net-(U1-PadF7)"))
|
||||
(pad G7 smd circle (at 0.8 0.8) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 34 "Net-(U1-PadG7)"))
|
||||
(pad H7 smd circle (at 0.8 1.2) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 35 "Net-(U1-PadH7)"))
|
||||
(pad J7 smd circle (at 0.8 1.6) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 49 "Net-(U1-PadJ7)"))
|
||||
(pad A8 smd circle (at 1.2 -1.6) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 36 "Net-(U1-PadA8)"))
|
||||
(pad B8 smd circle (at 1.2 -1.2) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 7 +BATT))
|
||||
(pad C8 smd circle (at 1.2 -0.8) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 2 GND))
|
||||
(pad D8 smd circle (at 1.2 -0.4) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 37 "Net-(U1-PadD8)"))
|
||||
(pad E8 smd circle (at 1.2 0) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 38 "Net-(U1-PadE8)"))
|
||||
(pad F8 smd circle (at 1.2 0.4) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 39 "Net-(U1-PadF8)"))
|
||||
(pad G8 smd circle (at 1.2 0.8) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 40 "Net-(U1-PadG8)"))
|
||||
(pad H8 smd circle (at 1.2 1.2) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 41 "Net-(U1-PadH8)"))
|
||||
(pad J8 smd circle (at 1.2 1.6) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 50 "Net-(U1-PadJ8)"))
|
||||
(pad A9 smd circle (at 1.6 -1.6) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask))
|
||||
(pad B9 smd circle (at 1.6 -1.2) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 7 +BATT))
|
||||
(pad C9 smd circle (at 1.6 -0.8) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 2 GND))
|
||||
(pad D9 smd circle (at 1.6 -0.4) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 10 LED))
|
||||
(pad E9 smd circle (at 1.6 0) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 42 "Net-(U1-PadE9)"))
|
||||
(pad F9 smd circle (at 1.6 0.4) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 43 "Net-(U1-PadF9)"))
|
||||
(pad G9 smd circle (at 1.6 0.8) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 2 GND))
|
||||
(pad H9 smd circle (at 1.6 1.2) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 51 "Net-(U1-PadH9)"))
|
||||
)
|
||||
|
||||
(module Capacitors_SMD:C_0402 (layer F.Cu) (tedit 5415D599) (tstamp 55048D3E)
|
||||
(at 153 106.55 270)
|
||||
(descr "Capacitor SMD 0402, reflow soldering, AVX (see smccp.pdf)")
|
||||
(tags "capacitor 0402")
|
||||
(path /550483E1)
|
||||
(attr smd)
|
||||
(fp_text reference C5 (at 0 -1.7 270) (layer F.SilkS)
|
||||
(effects (font (size 1 1) (thickness 0.15)))
|
||||
)
|
||||
(fp_text value 2n2 (at 0 1.7 270) (layer F.Fab)
|
||||
(effects (font (size 1 1) (thickness 0.15)))
|
||||
)
|
||||
(fp_line (start -1.15 -0.6) (end 1.15 -0.6) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start -1.15 0.6) (end 1.15 0.6) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start -1.15 -0.6) (end -1.15 0.6) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start 1.15 -0.6) (end 1.15 0.6) (layer F.CrtYd) (width 0.05))
|
||||
(fp_line (start 0.25 -0.475) (end -0.25 -0.475) (layer F.SilkS) (width 0.15))
|
||||
(fp_line (start -0.25 0.475) (end 0.25 0.475) (layer F.SilkS) (width 0.15))
|
||||
(pad 1 smd rect (at -0.55 0 270) (size 0.6 0.5) (layers F.Cu F.Paste F.Mask)
|
||||
(net 52 "Net-(C5-Pad1)"))
|
||||
(pad 2 smd rect (at 0.55 0 270) (size 0.6 0.5) (layers F.Cu F.Paste F.Mask)
|
||||
(net 2 GND))
|
||||
(model Capacitors_SMD.3dshapes/C_0402.wrl
|
||||
(at (xyz 0 0 0))
|
||||
(scale (xyz 1 1 1))
|
||||
(rotate (xyz 0 0 0))
|
||||
)
|
||||
)
|
||||
|
||||
(module ANT:ANT-2.4GHz (layer F.Cu) (tedit 54517711) (tstamp 55049531)
|
||||
(at 149.35 106.95 90)
|
||||
(path /55047C3D)
|
||||
(fp_text reference ANT1 (at -2.5 1.5 90) (layer F.SilkS)
|
||||
(effects (font (size 0.4 0.4) (thickness 0.04)))
|
||||
)
|
||||
(fp_text value ANT-2.4GHz (at -2 2 90) (layer F.SilkS)
|
||||
(effects (font (size 0.4 0.4) (thickness 0.04)))
|
||||
)
|
||||
(pad 1 smd rect (at 0 0 90) (size 0.9 4.9) (layers F.Cu)
|
||||
(net 2 GND))
|
||||
(pad 3 smd rect (at 2.05 -2.7 90) (size 5 0.5) (layers F.Cu))
|
||||
(pad 2 smd rect (at 2.1 0 90) (size 0.5 4.9) (layers F.Cu)
|
||||
(net 53 "Net-(ANT1-Pad2)"))
|
||||
(pad 3 smd rect (at 4.3 -1.13 90) (size 0.5 2.64) (layers F.Cu))
|
||||
(pad 3 smd rect (at 6.8 -1.13 90) (size 0.5 2.64) (layers F.Cu))
|
||||
(pad 3 smd rect (at 5.55 -0.06 90) (size 2 0.5) (layers F.Cu))
|
||||
(pad 3 smd rect (at 7.9 -2.7 90) (size 2.7 0.5) (layers F.Cu))
|
||||
(pad 3 smd rect (at 12.6 -2.7 90) (size 2.7 0.5) (layers F.Cu))
|
||||
(pad 3 smd rect (at 9 -1.13 90) (size 0.5 2.64) (layers F.Cu))
|
||||
(pad 3 smd rect (at 11.5 -1.13 90) (size 0.5 2.64) (layers F.Cu))
|
||||
(pad 3 smd rect (at 10.25 -0.06 90) (size 2 0.5) (layers F.Cu))
|
||||
(pad 3 smd rect (at 13.7 -0.48 90) (size 0.5 3.94) (layers F.Cu))
|
||||
)
|
||||
|
||||
(module NRF51:BAL-NRF02D3 (layer F.Cu) (tedit 550C7CF1) (tstamp 55048D47)
|
||||
(at 153.65 104.55 180)
|
||||
(path /550480A2)
|
||||
(fp_text reference U2 (at 0 1.95 180) (layer F.SilkS)
|
||||
(effects (font (size 1.2 1.2) (thickness 0.2)))
|
||||
)
|
||||
(fp_text value BAL-NRF02D3 (at 0 -1.95 180) (layer F.SilkS)
|
||||
(effects (font (size 1.2 1.2) (thickness 0.2)))
|
||||
)
|
||||
(fp_circle (center 0.75 -1.25) (end 1 -1.25) (layer F.SilkS) (width 0.15))
|
||||
(fp_line (start -1 -0.75) (end -1 0.75) (layer F.SilkS) (width 0.2))
|
||||
(fp_line (start -1 0.75) (end 1 0.75) (layer F.SilkS) (width 0.2))
|
||||
(fp_line (start 1 0.75) (end 1 -0.75) (layer F.SilkS) (width 0.2))
|
||||
(fp_line (start 1 -0.75) (end -1 -0.75) (layer F.SilkS) (width 0.2))
|
||||
(pad A3 smd circle (at -0.5 -0.25 180) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 12 "Net-(U1-PadD1)"))
|
||||
(pad B3 smd circle (at -0.5 0.25 180) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 11 "Net-(U1-PadC1)"))
|
||||
(pad A2 smd circle (at 0 -0.25 180) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 52 "Net-(C5-Pad1)"))
|
||||
(pad A1 smd circle (at 0.5 -0.25 180) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 53 "Net-(ANT1-Pad2)"))
|
||||
(pad B1 smd circle (at 0.5 0.25 180) (size 0.2 0.2) (layers F.Cu F.Paste F.Mask)
|
||||
(net 2 GND))
|
||||
)
|
||||
|
||||
(segment (start 155.7212 102.5342) (end 155.2084 102.0214) (width 0.1016) (layer F.Cu) (net 1))
|
||||
(segment (start 155.7212 103.5306) (end 155.7212 102.5342) (width 0.1016) (layer F.Cu) (net 1))
|
||||
(segment (start 155.137 101.95) (end 155.2084 102.0214) (width 0.1016) (layer F.Cu) (net 1) (tstamp 55048FFD))
|
||||
(segment (start 153.75 101.95) (end 155.137 101.95) (width 0.1016) (layer F.Cu) (net 1))
|
||||
(segment (start 156.947646 104.7306) (end 156.7306 104.7306) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 156.7306 104.7306) (end 156.5212 104.5212) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 156.5212 104.5212) (end 156.5212 104.072021) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 156.5212 104.072021) (end 156.5212 103.9306) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 156.1212 106.3306) (end 156.5212 105.9306) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 156.5212 105.9306) (end 156.9212 105.9306) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 157.3212 105.9306) (end 156.9212 105.9306) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 157.7212 104.7306) (end 156.947646 104.7306) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 156.5212 103.9306) (end 155.3212 103.9306) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 156.9212 104.757046) (end 156.9212 105.9306) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 156.947646 104.7306) (end 156.9212 104.757046) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 157.7212 104.7306) (end 158.1212 104.3306) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 158.1212 104.3306) (end 158.5212 104.3306) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 161.2265 103.4796) (end 161.7726 104.0257) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 159.3722 103.4796) (end 161.2265 103.4796) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 161.7726 104.0257) (end 162.0266 104.0257) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 159.554 101.0626) (end 159.554 102.1588) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 159.554 100.711) (end 159.554 101.0626) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 159.3722 102.6922) (end 159.3722 103.4796) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 159.554 102.5104) (end 159.3722 102.6922) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 159.554 102.1588) (end 159.554 102.5104) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 159.3722 103.621021) (end 159.3722 103.4796) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 158.5212 104.3306) (end 158.662621 104.3306) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 158.662621 104.3306) (end 159.3722 103.621021) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 155.0306 103.9306) (end 154.15 103.05) (width 0.1016) (layer F.Cu) (net 2) (tstamp 55048FDD))
|
||||
(segment (start 154.15 103.05) (end 153.75 103.05) (width 0.1016) (layer F.Cu) (net 2) (tstamp 55048FEF))
|
||||
(segment (start 155.3212 103.9306) (end 155.0306 103.9306) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 154.15 107.1) (end 153 107.1) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 160.6042 108.3542) (end 160.6042 109.1946) (width 0.1016) (layer F.Cu) (net 2) (tstamp 5504935E))
|
||||
(segment (start 158.5212 105.9306) (end 159.05 105.9306) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 160.85 105.05) (end 159.4 105.05) (width 0.1016) (layer F.Cu) (net 2) (tstamp 55049373))
|
||||
(segment (start 159.4 105.05) (end 159.05 105.4) (width 0.1016) (layer F.Cu) (net 2) (tstamp 55049377))
|
||||
(segment (start 159.05 105.4) (end 159.05 105.9306) (width 0.1016) (layer F.Cu) (net 2) (tstamp 55049378))
|
||||
(segment (start 161.8743 104.0257) (end 160.85 105.05) (width 0.1016) (layer F.Cu) (net 2) (tstamp 55049366))
|
||||
(segment (start 162.0266 104.0257) (end 161.8743 104.0257) (width 0.1016) (layer F.Cu) (net 2))
|
||||
(segment (start 149.5 107.1) (end 149.35 106.95) (width 0.1016) (layer F.Cu) (net 2) (tstamp 5504964A) (status 30))
|
||||
(segment (start 149.5 107.1) (end 149.35 106.95) (width 0.254) (layer F.Cu) (net 2) (tstamp 5504967F) (status 30))
|
||||
(segment (start 153 107.1) (end 149.5 107.1) (width 0.254) (layer F.Cu) (net 2) (status 20))
|
||||
(segment (start 158.4436 100.7214) (end 158.454 100.711) (width 0.1016) (layer F.Cu) (net 3))
|
||||
(segment (start 157.0084 100.7214) (end 158.4436 100.7214) (width 0.1016) (layer F.Cu) (net 3))
|
||||
(segment (start 156.6786 100.7214) (end 157.0084 100.7214) (width 0.1016) (layer F.Cu) (net 3))
|
||||
(segment (start 156.1084 101.2916) (end 156.6786 100.7214) (width 0.1016) (layer F.Cu) (net 3))
|
||||
(segment (start 156.1084 103.5178) (end 156.1084 101.2916) (width 0.1016) (layer F.Cu) (net 3))
|
||||
(segment (start 156.1212 103.5306) (end 156.1084 103.5178) (width 0.1016) (layer F.Cu) (net 3))
|
||||
(segment (start 156.5212 103.389179) (end 157.014979 102.8954) (width 0.1016) (layer F.Cu) (net 4))
|
||||
(segment (start 156.5212 103.5306) (end 156.5212 103.389179) (width 0.1016) (layer F.Cu) (net 4))
|
||||
(segment (start 157.6674 102.8954) (end 158.404 102.1588) (width 0.1016) (layer F.Cu) (net 4))
|
||||
(segment (start 158.404 102.1588) (end 158.454 102.1588) (width 0.1016) (layer F.Cu) (net 4))
|
||||
(segment (start 157.014979 102.8954) (end 157.6674 102.8954) (width 0.1016) (layer F.Cu) (net 4))
|
||||
(segment (start 154.45 106) (end 154.9194 105.5306) (width 0.1016) (layer F.Cu) (net 5) (tstamp 5504901F))
|
||||
(segment (start 154.9194 105.5306) (end 155.3212 105.5306) (width 0.1016) (layer F.Cu) (net 5) (tstamp 55049021))
|
||||
(segment (start 154.15 106) (end 154.45 106) (width 0.1016) (layer F.Cu) (net 5))
|
||||
(segment (start 160.1717 105.9307) (end 160.0454 105.8044) (width 0.1016) (layer F.Cu) (net 6))
|
||||
(segment (start 162.0266 105.9307) (end 160.1717 105.9307) (width 0.1016) (layer F.Cu) (net 6))
|
||||
(segment (start 159 99.9) (end 159.05 99.9) (width 0.1016) (layer F.Cu) (net 7))
|
||||
(segment (start 159.35 99.6) (end 161.2 99.6) (width 0.1016) (layer F.Cu) (net 7))
|
||||
(segment (start 158.25 111.15) (end 158.0642 110.9642) (width 0.1016) (layer F.Cu) (net 7))
|
||||
(segment (start 159.05 99.9) (end 159.35 99.6) (width 0.1016) (layer F.Cu) (net 7))
|
||||
(segment (start 161.2 99.6) (end 163.35 101.75) (width 0.1016) (layer F.Cu) (net 7))
|
||||
(segment (start 163.35 101.75) (end 163.35 110.35) (width 0.1016) (layer F.Cu) (net 7))
|
||||
(segment (start 163.35 110.35) (end 162.55 111.15) (width 0.1016) (layer F.Cu) (net 7))
|
||||
(segment (start 158.0642 110.9642) (end 158.0642 109.1946) (width 0.1016) (layer F.Cu) (net 7))
|
||||
(segment (start 162.55 111.15) (end 158.25 111.15) (width 0.1016) (layer F.Cu) (net 7))
|
||||
(segment (start 158.5212 103.9306) (end 158.1212 103.9306) (width 0.1016) (layer F.Cu) (net 7))
|
||||
(segment (start 158.0642 109.347) (end 158.0642 109.1946) (width 0.1016) (layer F.Cu) (net 7))
|
||||
(segment (start 159 99.9) (end 159 103.593221) (width 0.1016) (layer F.Cu) (net 7))
|
||||
(segment (start 158.662621 103.9306) (end 158.5212 103.9306) (width 0.1016) (layer F.Cu) (net 7))
|
||||
(segment (start 154.1 99.6) (end 158.7 99.6) (width 0.1016) (layer F.Cu) (net 7))
|
||||
(segment (start 153 102.3) (end 153 100.7) (width 0.1016) (layer F.Cu) (net 7))
|
||||
(segment (start 153.2 102.5) (end 153 102.3) (width 0.1016) (layer F.Cu) (net 7))
|
||||
(segment (start 158.7 99.6) (end 159 99.9) (width 0.1016) (layer F.Cu) (net 7))
|
||||
(segment (start 154.2906 102.5) (end 153.2 102.5) (width 0.1016) (layer F.Cu) (net 7))
|
||||
(segment (start 159 103.593221) (end 158.662621 103.9306) (width 0.1016) (layer F.Cu) (net 7))
|
||||
(segment (start 153 100.7) (end 154.1 99.6) (width 0.1016) (layer F.Cu) (net 7))
|
||||
(segment (start 155.3212 103.5306) (end 154.2906 102.5) (width 0.1016) (layer F.Cu) (net 7))
|
||||
(segment (start 155.7212 108.9976) (end 155.5242 109.1946) (width 0.1016) (layer F.Cu) (net 8))
|
||||
(segment (start 155.7212 106.7306) (end 155.7212 108.9976) (width 0.1016) (layer F.Cu) (net 8))
|
||||
(segment (start 152.9842 109.0676) (end 152.9842 109.1946) (width 0.1016) (layer F.Cu) (net 9))
|
||||
(segment (start 152.9842 109.1946) (end 152.9842 109.0422) (width 0.1016) (layer F.Cu) (net 9))
|
||||
(segment (start 155.2018 106.85) (end 155.7212 106.3306) (width 0.1016) (layer F.Cu) (net 9) (tstamp 55049280))
|
||||
(segment (start 155.1 106.95) (end 155.2 106.85) (width 0.1016) (layer F.Cu) (net 9))
|
||||
(segment (start 155.1 107.35) (end 155.1 106.95) (width 0.1016) (layer F.Cu) (net 9))
|
||||
(segment (start 154.55 107.9) (end 155.1 107.35) (width 0.1016) (layer F.Cu) (net 9))
|
||||
(segment (start 154.1264 107.9) (end 154.55 107.9) (width 0.1016) (layer F.Cu) (net 9))
|
||||
(segment (start 152.9842 109.0422) (end 154.1264 107.9) (width 0.1016) (layer F.Cu) (net 9))
|
||||
(segment (start 155.2 106.85) (end 155.2018 106.85) (width 0.1016) (layer F.Cu) (net 9))
|
||||
(segment (start 160.0454 104.3044) (end 159.4938 104.3044) (width 0.1016) (layer F.Cu) (net 10))
|
||||
(segment (start 158.662621 104.7306) (end 158.5212 104.7306) (width 0.1016) (layer F.Cu) (net 10))
|
||||
(segment (start 159.0676 104.7306) (end 158.662621 104.7306) (width 0.1016) (layer F.Cu) (net 10))
|
||||
(segment (start 159.4938 104.3044) (end 159.0676 104.7306) (width 0.1016) (layer F.Cu) (net 10))
|
||||
(segment (start 154.1806 104.3306) (end 154.15 104.3) (width 0.1016) (layer F.Cu) (net 11) (tstamp 5504938B))
|
||||
(segment (start 155.3212 104.3306) (end 154.1806 104.3306) (width 0.1016) (layer F.Cu) (net 11))
|
||||
(segment (start 154.2194 104.7306) (end 154.15 104.8) (width 0.1016) (layer F.Cu) (net 12) (tstamp 55049388))
|
||||
(segment (start 155.3212 104.7306) (end 154.2194 104.7306) (width 0.1016) (layer F.Cu) (net 12))
|
||||
(segment (start 155.3212 105.1306) (end 154.7194 105.1306) (width 0.1016) (layer F.Cu) (net 52))
|
||||
(segment (start 154.7194 105.1306) (end 154.6 105.25) (width 0.1016) (layer F.Cu) (net 52))
|
||||
(segment (start 154.6 105.25) (end 153.65 105.25) (width 0.1016) (layer F.Cu) (net 52))
|
||||
(segment (start 153.65 105.25) (end 153.65 104.8) (width 0.1016) (layer F.Cu) (net 52) (tstamp 5504901A))
|
||||
(segment (start 153.6 105.3) (end 153.65 105.25) (width 0.1016) (layer F.Cu) (net 52) (tstamp 55049386))
|
||||
(segment (start 153 105.9) (end 153.6 105.3) (width 0.1016) (layer F.Cu) (net 52) (tstamp 55049018))
|
||||
(segment (start 153 106) (end 153 105.9) (width 0.1016) (layer F.Cu) (net 52))
|
||||
(segment (start 149.4 104.8) (end 149.35 104.85) (width 0.1016) (layer F.Cu) (net 53) (tstamp 55049543) (status 30))
|
||||
(segment (start 153.1 104.85) (end 153.15 104.8) (width 0.254) (layer F.Cu) (net 53) (tstamp 550495A4))
|
||||
(segment (start 149.35 104.85) (end 153.1 104.85) (width 0.254) (layer F.Cu) (net 53) (status 10))
|
||||
|
||||
(zone (net 2) (net_name GND) (layer F.Cu) (tstamp 550C7F83) (hatch edge 0.508)
|
||||
(connect_pads (clearance 0.2032))
|
||||
(min_thickness 0.1524)
|
||||
(fill yes (arc_segments 16) (thermal_gap 0.2032) (thermal_bridge_width 0.15494) (smoothing chamfer) (radius 0.5))
|
||||
(polygon
|
||||
(pts
|
||||
(xy 164 111.85) (xy 151.5 111.85) (xy 151.5 97.15) (xy 164 97.15)
|
||||
)
|
||||
)
|
||||
(filled_polygon
|
||||
(pts
|
||||
(xy 163.9238 111.318437) (xy 163.468437 111.7738) (xy 152.031563 111.7738) (xy 151.5762 111.318437) (xy 151.5762 107.6794)
|
||||
(xy 151.744424 107.6794) (xy 151.855576 107.6794) (xy 151.958267 107.636864) (xy 152.036864 107.558267) (xy 152.0794 107.455576)
|
||||
(xy 152.0794 107.02112) (xy 152.00955 106.95127) (xy 151.5762 106.95127) (xy 151.5762 106.94873) (xy 152.00955 106.94873)
|
||||
(xy 152.0794 106.87888) (xy 152.0794 106.444424) (xy 152.036864 106.341733) (xy 151.958267 106.263136) (xy 151.855576 106.2206)
|
||||
(xy 151.744424 106.2206) (xy 151.5762 106.2206) (xy 151.5762 105.3794) (xy 151.855576 105.3794) (xy 151.958267 105.336864)
|
||||
(xy 152.036864 105.258267) (xy 152.037637 105.2564) (xy 153.1 105.2564) (xy 153.195652 105.237373) (xy 153.012426 105.4206)
|
||||
(xy 152.694424 105.4206) (xy 152.591733 105.463136) (xy 152.513136 105.541733) (xy 152.4706 105.644424) (xy 152.4706 105.755576)
|
||||
(xy 152.4706 106.355576) (xy 152.513136 106.458267) (xy 152.591733 106.536864) (xy 152.623446 106.55) (xy 152.591733 106.563136)
|
||||
(xy 152.513136 106.641733) (xy 152.4706 106.744424) (xy 152.4706 107.02888) (xy 152.54045 107.09873) (xy 152.99873 107.09873)
|
||||
(xy 152.99873 107.07873) (xy 153.00127 107.07873) (xy 153.00127 107.09873) (xy 153.45955 107.09873) (xy 153.5294 107.02888)
|
||||
(xy 153.5294 106.744424) (xy 153.486864 106.641733) (xy 153.408267 106.563136) (xy 153.376553 106.55) (xy 153.408267 106.536864)
|
||||
(xy 153.486864 106.458267) (xy 153.5294 106.355576) (xy 153.5294 106.244424) (xy 153.5294 105.837574) (xy 153.6206 105.746374)
|
||||
(xy 153.6206 105.755576) (xy 153.6206 106.355576) (xy 153.663136 106.458267) (xy 153.741733 106.536864) (xy 153.773446 106.55)
|
||||
(xy 153.741733 106.563136) (xy 153.663136 106.641733) (xy 153.6206 106.744424) (xy 153.6206 107.02888) (xy 153.69045 107.09873)
|
||||
(xy 154.14873 107.09873) (xy 154.14873 107.07873) (xy 154.15127 107.07873) (xy 154.15127 107.09873) (xy 154.60955 107.09873)
|
||||
(xy 154.6794 107.02888) (xy 154.6794 106.744424) (xy 154.636864 106.641733) (xy 154.558267 106.563136) (xy 154.526553 106.55)
|
||||
(xy 154.558267 106.536864) (xy 154.636864 106.458267) (xy 154.6794 106.355576) (xy 154.6794 106.244424) (xy 154.6794 106.236217)
|
||||
(xy 154.683487 106.233487) (xy 154.7968 106.120174) (xy 154.7968 106.634168) (xy 154.872829 106.710197) (xy 154.866513 106.716513)
|
||||
(xy 154.794935 106.823638) (xy 154.7698 106.95) (xy 154.7698 107.213226) (xy 154.6794 107.303626) (xy 154.6794 107.17112)
|
||||
(xy 154.60955 107.10127) (xy 154.15127 107.10127) (xy 154.15127 107.12127) (xy 154.14873 107.12127) (xy 154.14873 107.10127)
|
||||
(xy 153.69045 107.10127) (xy 153.6206 107.17112) (xy 153.6206 107.455576) (xy 153.663136 107.558267) (xy 153.741733 107.636864)
|
||||
(xy 153.844424 107.6794) (xy 153.880026 107.6794) (xy 153.5294 108.030025) (xy 153.5294 107.455576) (xy 153.5294 107.17112)
|
||||
(xy 153.45955 107.10127) (xy 153.00127 107.10127) (xy 153.00127 107.60955) (xy 153.07112 107.6794) (xy 153.194424 107.6794)
|
||||
(xy 153.305576 107.6794) (xy 153.408267 107.636864) (xy 153.486864 107.558267) (xy 153.5294 107.455576) (xy 153.5294 108.030025)
|
||||
(xy 153.527705 108.03172) (xy 153.421607 107.960828) (xy 152.99873 107.876712) (xy 152.99873 107.60955) (xy 152.99873 107.10127)
|
||||
(xy 152.54045 107.10127) (xy 152.4706 107.17112) (xy 152.4706 107.455576) (xy 152.513136 107.558267) (xy 152.591733 107.636864)
|
||||
(xy 152.694424 107.6794) (xy 152.805576 107.6794) (xy 152.92888 107.6794) (xy 152.99873 107.60955) (xy 152.99873 107.876712)
|
||||
(xy 152.9842 107.873822) (xy 152.546793 107.960828) (xy 152.175977 108.208599) (xy 151.928206 108.579415) (xy 151.8412 109.016822)
|
||||
(xy 151.8412 109.372378) (xy 151.928206 109.809785) (xy 152.175977 110.180601) (xy 152.546793 110.428372) (xy 152.9842 110.515378)
|
||||
(xy 153.421607 110.428372) (xy 153.792423 110.180601) (xy 154.040194 109.809785) (xy 154.1272 109.372378) (xy 154.1272 109.016822)
|
||||
(xy 154.040194 108.579415) (xy 153.989631 108.503742) (xy 154.263173 108.2302) (xy 154.55 108.2302) (xy 154.676362 108.205065)
|
||||
(xy 154.783487 108.133487) (xy 155.333483 107.583489) (xy 155.333486 107.583487) (xy 155.333487 107.583487) (xy 155.391 107.497411)
|
||||
(xy 155.391 107.900317) (xy 155.086793 107.960828) (xy 154.715977 108.208599) (xy 154.468206 108.579415) (xy 154.3812 109.016822)
|
||||
(xy 154.3812 109.372378) (xy 154.468206 109.809785) (xy 154.715977 110.180601) (xy 155.086793 110.428372) (xy 155.5242 110.515378)
|
||||
(xy 155.961607 110.428372) (xy 156.332423 110.180601) (xy 156.580194 109.809785) (xy 156.6672 109.372378) (xy 156.6672 109.016822)
|
||||
(xy 156.580194 108.579415) (xy 156.332423 108.208599) (xy 156.0514 108.020825) (xy 156.0514 107.2532) (xy 158.434168 107.2532)
|
||||
(xy 159.0532 106.634168) (xy 159.0532 105.0608) (xy 159.0676 105.0608) (xy 159.193962 105.035665) (xy 159.301087 104.964087)
|
||||
(xy 159.463138 104.802035) (xy 159.539824 104.8338) (xy 159.650976 104.8338) (xy 160.550976 104.8338) (xy 160.653667 104.791264)
|
||||
(xy 160.732264 104.712667) (xy 160.7748 104.609976) (xy 160.7748 104.498824) (xy 160.7748 103.998824) (xy 160.732264 103.896133)
|
||||
(xy 160.653667 103.817536) (xy 160.550976 103.775) (xy 160.439824 103.775) (xy 160.1334 103.775) (xy 160.1334 102.464376)
|
||||
(xy 160.1334 102.353224) (xy 160.1334 102.22992) (xy 160.1334 102.08768) (xy 160.1334 101.964376) (xy 160.1334 101.853224)
|
||||
(xy 160.1334 101.016576) (xy 160.1334 100.905424) (xy 160.1334 100.78212) (xy 160.1334 100.63988) (xy 160.1334 100.516576)
|
||||
(xy 160.1334 100.405424) (xy 160.090864 100.302733) (xy 160.012267 100.224136) (xy 159.909576 100.1816) (xy 159.62512 100.1816)
|
||||
(xy 159.55527 100.25145) (xy 159.55527 100.70973) (xy 160.06355 100.70973) (xy 160.1334 100.63988) (xy 160.1334 100.78212)
|
||||
(xy 160.06355 100.71227) (xy 159.55527 100.71227) (xy 159.55527 101.17055) (xy 159.62512 101.2404) (xy 159.909576 101.2404)
|
||||
(xy 160.012267 101.197864) (xy 160.090864 101.119267) (xy 160.1334 101.016576) (xy 160.1334 101.853224) (xy 160.090864 101.750533)
|
||||
(xy 160.012267 101.671936) (xy 159.909576 101.6294) (xy 159.62512 101.6294) (xy 159.55527 101.69925) (xy 159.55527 102.15753)
|
||||
(xy 160.06355 102.15753) (xy 160.1334 102.08768) (xy 160.1334 102.22992) (xy 160.06355 102.16007) (xy 159.55527 102.16007)
|
||||
(xy 159.55527 102.61835) (xy 159.62512 102.6882) (xy 159.909576 102.6882) (xy 160.012267 102.645664) (xy 160.090864 102.567067)
|
||||
(xy 160.1334 102.464376) (xy 160.1334 103.775) (xy 159.539824 103.775) (xy 159.437133 103.817536) (xy 159.358536 103.896133)
|
||||
(xy 159.316 103.998824) (xy 159.316 104.033704) (xy 159.260313 104.070913) (xy 159.0532 104.278026) (xy 159.0532 104.006995)
|
||||
(xy 159.233487 103.826708) (xy 159.305065 103.719583) (xy 159.3302 103.593221) (xy 159.330201 103.593221) (xy 159.3302 103.593215)
|
||||
(xy 159.3302 102.6882) (xy 159.48288 102.6882) (xy 159.55273 102.61835) (xy 159.55273 102.16007) (xy 159.53273 102.16007)
|
||||
(xy 159.53273 102.15753) (xy 159.55273 102.15753) (xy 159.55273 101.69925) (xy 159.48288 101.6294) (xy 159.3302 101.6294)
|
||||
(xy 159.3302 101.2404) (xy 159.48288 101.2404) (xy 159.55273 101.17055) (xy 159.55273 100.71227) (xy 159.53273 100.71227)
|
||||
(xy 159.53273 100.70973) (xy 159.55273 100.70973) (xy 159.55273 100.25145) (xy 159.48288 100.1816) (xy 159.3302 100.1816)
|
||||
(xy 159.3302 100.086774) (xy 159.486774 99.9302) (xy 161.063226 99.9302) (xy 163.0198 101.886774) (xy 163.0198 110.213226)
|
||||
(xy 163.0045 110.228526) (xy 163.0045 106.430776) (xy 163.0045 106.319624) (xy 163.0045 105.430624) (xy 163.0045 104.525776)
|
||||
(xy 163.0045 104.09682) (xy 163.0045 103.95458) (xy 163.0045 103.525624) (xy 162.961964 103.422933) (xy 162.883367 103.344336)
|
||||
(xy 162.780676 103.3018) (xy 162.669524 103.3018) (xy 162.09772 103.3018) (xy 162.02787 103.37165) (xy 162.02787 104.02443)
|
||||
(xy 162.93465 104.02443) (xy 163.0045 103.95458) (xy 163.0045 104.09682) (xy 162.93465 104.02697) (xy 162.02787 104.02697)
|
||||
(xy 162.02787 104.67975) (xy 162.09772 104.7496) (xy 162.669524 104.7496) (xy 162.780676 104.7496) (xy 162.883367 104.707064)
|
||||
(xy 162.961964 104.628467) (xy 163.0045 104.525776) (xy 163.0045 105.430624) (xy 162.961964 105.327933) (xy 162.883367 105.249336)
|
||||
(xy 162.780676 105.2068) (xy 162.669524 105.2068) (xy 162.02533 105.2068) (xy 162.02533 104.67975) (xy 162.02533 104.02697)
|
||||
(xy 162.02533 104.02443) (xy 162.02533 103.37165) (xy 161.95548 103.3018) (xy 161.383676 103.3018) (xy 161.272524 103.3018)
|
||||
(xy 161.169833 103.344336) (xy 161.091236 103.422933) (xy 161.0487 103.525624) (xy 161.0487 103.95458) (xy 161.11855 104.02443)
|
||||
(xy 162.02533 104.02443) (xy 162.02533 104.02697) (xy 161.11855 104.02697) (xy 161.0487 104.09682) (xy 161.0487 104.525776)
|
||||
(xy 161.091236 104.628467) (xy 161.169833 104.707064) (xy 161.272524 104.7496) (xy 161.383676 104.7496) (xy 161.95548 104.7496)
|
||||
(xy 162.02533 104.67975) (xy 162.02533 105.2068) (xy 161.272524 105.2068) (xy 161.169833 105.249336) (xy 161.091236 105.327933)
|
||||
(xy 161.0487 105.430624) (xy 161.0487 105.541776) (xy 161.0487 105.6005) (xy 160.7748 105.6005) (xy 160.7748 105.498824)
|
||||
(xy 160.732264 105.396133) (xy 160.653667 105.317536) (xy 160.550976 105.275) (xy 160.439824 105.275) (xy 159.539824 105.275)
|
||||
(xy 159.437133 105.317536) (xy 159.358536 105.396133) (xy 159.316 105.498824) (xy 159.316 105.609976) (xy 159.316 106.109976)
|
||||
(xy 159.358536 106.212667) (xy 159.437133 106.291264) (xy 159.539824 106.3338) (xy 159.650976 106.3338) (xy 160.550976 106.3338)
|
||||
(xy 160.653667 106.291264) (xy 160.684031 106.2609) (xy 161.0487 106.2609) (xy 161.0487 106.430776) (xy 161.091236 106.533467)
|
||||
(xy 161.169833 106.612064) (xy 161.272524 106.6546) (xy 161.383676 106.6546) (xy 162.780676 106.6546) (xy 162.883367 106.612064)
|
||||
(xy 162.961964 106.533467) (xy 163.0045 106.430776) (xy 163.0045 110.228526) (xy 162.413226 110.8198) (xy 161.7472 110.8198)
|
||||
(xy 161.7472 110.266176) (xy 161.7472 110.155024) (xy 161.7472 109.26572) (xy 161.7472 109.12348) (xy 161.7472 108.234176)
|
||||
(xy 161.7472 108.123024) (xy 161.704664 108.020333) (xy 161.626067 107.941736) (xy 161.523376 107.8992) (xy 160.67532 107.8992)
|
||||
(xy 160.60547 107.96905) (xy 160.60547 109.19333) (xy 161.67735 109.19333) (xy 161.7472 109.12348) (xy 161.7472 109.26572)
|
||||
(xy 161.67735 109.19587) (xy 160.60547 109.19587) (xy 160.60547 110.42015) (xy 160.67532 110.49) (xy 161.523376 110.49)
|
||||
(xy 161.626067 110.447464) (xy 161.704664 110.368867) (xy 161.7472 110.266176) (xy 161.7472 110.8198) (xy 160.60293 110.8198)
|
||||
(xy 160.60293 110.42015) (xy 160.60293 109.19587) (xy 160.60293 109.19333) (xy 160.60293 107.96905) (xy 160.53308 107.8992)
|
||||
(xy 159.685024 107.8992) (xy 159.582333 107.941736) (xy 159.503736 108.020333) (xy 159.4612 108.123024) (xy 159.4612 108.234176)
|
||||
(xy 159.4612 109.12348) (xy 159.53105 109.19333) (xy 160.60293 109.19333) (xy 160.60293 109.19587) (xy 159.53105 109.19587)
|
||||
(xy 159.4612 109.26572) (xy 159.4612 110.155024) (xy 159.4612 110.266176) (xy 159.503736 110.368867) (xy 159.582333 110.447464)
|
||||
(xy 159.685024 110.49) (xy 160.53308 110.49) (xy 160.60293 110.42015) (xy 160.60293 110.8198) (xy 158.3944 110.8198)
|
||||
(xy 158.3944 110.449696) (xy 158.501607 110.428372) (xy 158.872423 110.180601) (xy 159.120194 109.809785) (xy 159.2072 109.372378)
|
||||
(xy 159.2072 109.016822) (xy 159.120194 108.579415) (xy 158.872423 108.208599) (xy 158.501607 107.960828) (xy 158.0642 107.873822)
|
||||
(xy 157.626793 107.960828) (xy 157.255977 108.208599) (xy 157.008206 108.579415) (xy 156.9212 109.016822) (xy 156.9212 109.372378)
|
||||
(xy 157.008206 109.809785) (xy 157.255977 110.180601) (xy 157.626793 110.428372) (xy 157.734 110.449696) (xy 157.734 110.9642)
|
||||
(xy 157.759135 111.090562) (xy 157.830713 111.197687) (xy 158.016513 111.383487) (xy 158.123638 111.455065) (xy 158.25 111.480201)
|
||||
(xy 158.25 111.4802) (xy 158.250005 111.4802) (xy 162.55 111.4802) (xy 162.676362 111.455065) (xy 162.783487 111.383487)
|
||||
(xy 163.583487 110.583487) (xy 163.655065 110.476362) (xy 163.6802 110.35) (xy 163.680201 110.35) (xy 163.6802 110.349994)
|
||||
(xy 163.6802 101.75) (xy 163.655065 101.623638) (xy 163.583487 101.516513) (xy 161.433487 99.366513) (xy 161.326362 99.294935)
|
||||
(xy 161.2 99.2698) (xy 159.35 99.2698) (xy 159.223638 99.294935) (xy 159.116513 99.366513) (xy 159.025 99.458026)
|
||||
(xy 158.933487 99.366513) (xy 158.826362 99.294935) (xy 158.7 99.2698) (xy 154.100005 99.2698) (xy 154.1 99.269799)
|
||||
(xy 153.973638 99.294935) (xy 153.866513 99.366513) (xy 152.766513 100.466513) (xy 152.694935 100.573638) (xy 152.6698 100.7)
|
||||
(xy 152.6698 102.3) (xy 152.694935 102.426362) (xy 152.766513 102.533487) (xy 152.96651 102.733483) (xy 152.966513 102.733487)
|
||||
(xy 152.966514 102.733487) (xy 153.073638 102.805065) (xy 153.2 102.830201) (xy 153.2 102.8302) (xy 153.200005 102.8302)
|
||||
(xy 153.2206 102.8302) (xy 153.2206 102.97888) (xy 153.29045 103.04873) (xy 153.74873 103.04873) (xy 153.74873 103.02873)
|
||||
(xy 153.75127 103.02873) (xy 153.75127 103.04873) (xy 154.20955 103.04873) (xy 154.2794 102.97888) (xy 154.2794 102.955773)
|
||||
(xy 154.868129 103.544502) (xy 154.7968 103.615832) (xy 154.7968 104.0004) (xy 154.387007 104.0004) (xy 154.365194 103.978548)
|
||||
(xy 154.2794 103.942923) (xy 154.2794 103.405576) (xy 154.2794 103.12112) (xy 154.20955 103.05127) (xy 153.75127 103.05127)
|
||||
(xy 153.75127 103.55955) (xy 153.82112 103.6294) (xy 153.944424 103.6294) (xy 154.055576 103.6294) (xy 154.158267 103.586864)
|
||||
(xy 154.236864 103.508267) (xy 154.2794 103.405576) (xy 154.2794 103.942923) (xy 154.225798 103.920666) (xy 154.074864 103.920534)
|
||||
(xy 153.935368 103.978173) (xy 153.887912 104.025544) (xy 153.74873 103.886362) (xy 153.74873 103.55955) (xy 153.74873 103.05127)
|
||||
(xy 153.29045 103.05127) (xy 153.2206 103.12112) (xy 153.2206 103.405576) (xy 153.263136 103.508267) (xy 153.341733 103.586864)
|
||||
(xy 153.444424 103.6294) (xy 153.555576 103.6294) (xy 153.67888 103.6294) (xy 153.74873 103.55955) (xy 153.74873 103.886362)
|
||||
(xy 153.675 103.812632) (xy 153.412173 104.075458) (xy 153.376346 104.076713) (xy 153.376346 103.986301) (xy 153.239069 103.92356)
|
||||
(xy 153.088231 103.91813) (xy 152.946798 103.970835) (xy 152.923654 103.986301) (xy 152.926759 104.074963) (xy 153.15 104.298204)
|
||||
(xy 153.373241 104.074963) (xy 153.376346 103.986301) (xy 153.376346 104.076713) (xy 153.375037 104.076759) (xy 153.151796 104.3)
|
||||
(xy 153.165938 104.314142) (xy 153.164142 104.315938) (xy 153.15 104.301796) (xy 153.135857 104.315938) (xy 153.134061 104.314142)
|
||||
(xy 153.148204 104.3) (xy 152.924963 104.076759) (xy 152.836301 104.073654) (xy 152.77356 104.210931) (xy 152.76813 104.361769)
|
||||
(xy 152.798624 104.4436) (xy 152.037637 104.4436) (xy 152.036864 104.441733) (xy 151.958267 104.363136) (xy 151.855576 104.3206)
|
||||
(xy 151.744424 104.3206) (xy 151.5762 104.3206) (xy 151.5762 97.681563) (xy 152.031563 97.2262) (xy 163.468437 97.2262)
|
||||
(xy 163.9238 97.681563) (xy 163.9238 111.318437)
|
||||
)
|
||||
)
|
||||
)
|
||||
(zone (net 0) (net_name "") (layer F.Cu) (tstamp 550C7E3B) (hatch edge 0.508)
|
||||
(connect_pads (clearance 0.2032))
|
||||
(min_thickness 0.1524)
|
||||
(keepout (tracks allowed) (vias allowed) (copperpour not_allowed))
|
||||
(fill yes (arc_segments 16) (thermal_gap 0.2032) (thermal_bridge_width 0.15494) (smoothing chamfer) (radius 0.5))
|
||||
(polygon
|
||||
(pts
|
||||
(xy 153.9 104.55) (xy 153.45 104.55) (xy 153.45 104.1) (xy 153.9 104.1)
|
||||
)
|
||||
)
|
||||
)
|
||||
(zone (net 0) (net_name "") (layer F.Cu) (tstamp 550C7E43) (hatch edge 0.508)
|
||||
(connect_pads (clearance 0.2032))
|
||||
(min_thickness 0.1524)
|
||||
(keepout (tracks allowed) (vias allowed) (copperpour not_allowed))
|
||||
(fill yes (arc_segments 16) (thermal_gap 0.2032) (thermal_bridge_width 0.15494) (smoothing chamfer) (radius 0.5))
|
||||
(polygon
|
||||
(pts
|
||||
(xy 158.85 107.05) (xy 155 107.05) (xy 155 103.2) (xy 158.85 103.2)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
14069
samples/KiCad/tc14badge.brd
Normal file
14069
samples/KiCad/tc14badge.brd
Normal file
File diff suppressed because it is too large
Load Diff
74
samples/LSL/LSL.lslp
Normal file
74
samples/LSL/LSL.lslp
Normal file
@@ -0,0 +1,74 @@
|
||||
/*
|
||||
Testing syntax highlighting
|
||||
for the Linden Scripting Language
|
||||
*/
|
||||
|
||||
integer someIntNormal = 3672;
|
||||
integer someIntHex = 0x00000000;
|
||||
integer someIntMath = PI_BY_TWO;
|
||||
|
||||
integer event = 5673;// 'event' is invalid.illegal
|
||||
|
||||
key someKeyTexture = TEXTURE_DEFAULT;
|
||||
string someStringSpecial = EOF;
|
||||
|
||||
some_user_defined_function_without_return_type(string inputAsString)
|
||||
{
|
||||
llSay(PUBLIC_CHANNEL, inputAsString);
|
||||
}
|
||||
|
||||
string user_defined_function_returning_a_string(key inputAsKey)
|
||||
{
|
||||
return (string)inputAsKey;
|
||||
}
|
||||
|
||||
default
|
||||
{
|
||||
state_entry()
|
||||
{
|
||||
key someKey = NULL_KEY;
|
||||
someKey = llGetOwner();
|
||||
|
||||
string someString = user_defined_function_returning_a_string(someKey);
|
||||
|
||||
some_user_defined_function_without_return_type(someString);
|
||||
}
|
||||
|
||||
touch_start(integer num_detected)
|
||||
{
|
||||
list agentsInRegion = llGetAgentList(AGENT_LIST_REGION, []);
|
||||
integer numOfAgents = llGetListLength(agentsInRegion);
|
||||
|
||||
integer index; // defaults to 0
|
||||
for (; index <= numOfAgents - 1; index++) // for each agent in region
|
||||
{
|
||||
llRegionSayTo(llList2Key(agentsInRegion, index), PUBLIC_CHANNEL, "Hello, Avatar!");
|
||||
}
|
||||
}
|
||||
|
||||
touch_end(integer num_detected)
|
||||
{
|
||||
someIntNormal = 3672;
|
||||
someIntHex = 0x00000000;
|
||||
someIntMath = PI_BY_TWO;
|
||||
|
||||
event = 5673;// 'event' is invalid.illegal
|
||||
|
||||
someKeyTexture = TEXTURE_DEFAULT;
|
||||
someStringSpecial = EOF;
|
||||
|
||||
llSetInventoryPermMask("some item", MASK_NEXT, PERM_ALL);// 'llSetInventoryPermMask' is reserved.godmode
|
||||
|
||||
llWhisper(PUBLIC_CHANNEL, "Leaving \"default\" now...");
|
||||
state other;
|
||||
}
|
||||
}
|
||||
|
||||
state other
|
||||
{
|
||||
state_entry()
|
||||
{
|
||||
llWhisper(PUBLIC_CHANNEL, "Entered \"state other\", returning to \"default\" again...");
|
||||
state default;
|
||||
}
|
||||
}
|
||||
601
samples/Lex/zend_ini_scanner.l
Normal file
601
samples/Lex/zend_ini_scanner.l
Normal file
@@ -0,0 +1,601 @@
|
||||
/*
|
||||
+----------------------------------------------------------------------+
|
||||
| Zend Engine |
|
||||
+----------------------------------------------------------------------+
|
||||
| Copyright (c) 1998-2012 Zend Technologies Ltd. (http://www.zend.com) |
|
||||
+----------------------------------------------------------------------+
|
||||
| This source file is subject to version 2.00 of the Zend license, |
|
||||
| that is bundled with this package in the file LICENSE, and is |
|
||||
| available through the world-wide-web at the following url: |
|
||||
| http://www.zend.com/license/2_00.txt. |
|
||||
| If you did not receive a copy of the Zend license and are unable to |
|
||||
| obtain it through the world-wide-web, please send a note to |
|
||||
| license@zend.com so we can mail you a copy immediately. |
|
||||
+----------------------------------------------------------------------+
|
||||
| Authors: Zeev Suraski <zeev@zend.com> |
|
||||
| Jani Taskinen <jani@php.net> |
|
||||
| Marcus Boerger <helly@php.net> |
|
||||
| Nuno Lopes <nlopess@php.net> |
|
||||
| Scott MacVicar <scottmac@php.net> |
|
||||
+----------------------------------------------------------------------+
|
||||
*/
|
||||
|
||||
/* $Id$ */
|
||||
|
||||
#include <errno.h>
|
||||
#include "zend.h"
|
||||
#include "zend_globals.h"
|
||||
#include <zend_ini_parser.h>
|
||||
#include "zend_ini_scanner.h"
|
||||
|
||||
#if 0
|
||||
# define YYDEBUG(s, c) printf("state: %d char: %c\n", s, c)
|
||||
#else
|
||||
# define YYDEBUG(s, c)
|
||||
#endif
|
||||
|
||||
#include "zend_ini_scanner_defs.h"
|
||||
|
||||
#define YYCTYPE unsigned char
|
||||
/* allow the scanner to read one null byte after the end of the string (from ZEND_MMAP_AHEAD)
|
||||
* so that if will be able to terminate to match the current token (e.g. non-enclosed string) */
|
||||
#define YYFILL(n) { if (YYCURSOR > YYLIMIT) return 0; }
|
||||
#define YYCURSOR SCNG(yy_cursor)
|
||||
#define YYLIMIT SCNG(yy_limit)
|
||||
#define YYMARKER SCNG(yy_marker)
|
||||
|
||||
#define YYGETCONDITION() SCNG(yy_state)
|
||||
#define YYSETCONDITION(s) SCNG(yy_state) = s
|
||||
|
||||
#define STATE(name) yyc##name
|
||||
|
||||
/* emulate flex constructs */
|
||||
#define BEGIN(state) YYSETCONDITION(STATE(state))
|
||||
#define YYSTATE YYGETCONDITION()
|
||||
#define yytext ((char*)SCNG(yy_text))
|
||||
#define yyleng SCNG(yy_leng)
|
||||
#define yyless(x) do { YYCURSOR = (unsigned char*)yytext + x; \
|
||||
yyleng = (unsigned int)x; } while(0)
|
||||
|
||||
/* #define yymore() goto yymore_restart */
|
||||
|
||||
/* perform sanity check. If this message is triggered you should
|
||||
increase the ZEND_MMAP_AHEAD value in the zend_streams.h file */
|
||||
/*!max:re2c */
|
||||
#if ZEND_MMAP_AHEAD < (YYMAXFILL + 1)
|
||||
# error ZEND_MMAP_AHEAD should be greater than YYMAXFILL
|
||||
#endif
|
||||
|
||||
|
||||
/* How it works (for the core ini directives):
|
||||
* ===========================================
|
||||
*
|
||||
* 1. Scanner scans file for tokens and passes them to parser.
|
||||
* 2. Parser parses the tokens and passes the name/value pairs to the callback
|
||||
* function which stores them in the configuration hash table.
|
||||
* 3. Later REGISTER_INI_ENTRIES() is called which triggers the actual
|
||||
* registering of ini entries and uses zend_get_configuration_directive()
|
||||
* to fetch the previously stored name/value pair from configuration hash table
|
||||
* and registers the static ini entries which match the name to the value
|
||||
* into EG(ini_directives) hash table.
|
||||
* 4. PATH section entries are used per-request from down to top, each overriding
|
||||
* previous if one exists. zend_alter_ini_entry() is called for each entry.
|
||||
* Settings in PATH section are ZEND_INI_SYSTEM accessible and thus mimics the
|
||||
* php_admin_* directives used within Apache httpd.conf when PHP is compiled as
|
||||
* module for Apache.
|
||||
* 5. User defined ini files (like .htaccess for apache) are parsed for each request and
|
||||
* stored in separate hash defined by SAPI.
|
||||
*/
|
||||
|
||||
/* TODO: (ordered by importance :-)
|
||||
* ===============================================================================
|
||||
*
|
||||
* - Separate constant lookup totally from plain strings (using CONSTANT pattern)
|
||||
* - Add #if .. #else .. #endif and ==, !=, <, > , <=, >= operators
|
||||
* - Add #include "some.ini"
|
||||
* - Allow variables to refer to options also when using parse_ini_file()
|
||||
*
|
||||
*/
|
||||
|
||||
/* Globals Macros */
|
||||
#define SCNG INI_SCNG
|
||||
#ifdef ZTS
|
||||
ZEND_API ts_rsrc_id ini_scanner_globals_id;
|
||||
#else
|
||||
ZEND_API zend_ini_scanner_globals ini_scanner_globals;
|
||||
#endif
|
||||
|
||||
/* Eat leading whitespace */
|
||||
#define EAT_LEADING_WHITESPACE() \
|
||||
while (yytext[0]) { \
|
||||
if (yytext[0] == ' ' || yytext[0] == '\t') { \
|
||||
SCNG(yy_text)++; \
|
||||
yyleng--; \
|
||||
} else { \
|
||||
break; \
|
||||
} \
|
||||
}
|
||||
|
||||
/* Eat trailing whitespace + extra char */
|
||||
#define EAT_TRAILING_WHITESPACE_EX(ch) \
|
||||
while (yyleng > 0 && ( \
|
||||
(ch != 'X' && yytext[yyleng - 1] == ch) || \
|
||||
yytext[yyleng - 1] == '\n' || \
|
||||
yytext[yyleng - 1] == '\r' || \
|
||||
yytext[yyleng - 1] == '\t' || \
|
||||
yytext[yyleng - 1] == ' ') \
|
||||
) { \
|
||||
yyleng--; \
|
||||
}
|
||||
|
||||
/* Eat trailing whitespace */
|
||||
#define EAT_TRAILING_WHITESPACE() EAT_TRAILING_WHITESPACE_EX('X')
|
||||
|
||||
#define zend_ini_copy_value(retval, str, len) { \
|
||||
Z_STRVAL_P(retval) = zend_strndup(str, len); \
|
||||
Z_STRLEN_P(retval) = len; \
|
||||
Z_TYPE_P(retval) = IS_STRING; \
|
||||
}
|
||||
|
||||
#define RETURN_TOKEN(type, str, len) { \
|
||||
zend_ini_copy_value(ini_lval, str, len); \
|
||||
return type; \
|
||||
}
|
||||
|
||||
static void _yy_push_state(int new_state TSRMLS_DC)
|
||||
{
|
||||
zend_stack_push(&SCNG(state_stack), (void *) &YYGETCONDITION(), sizeof(int));
|
||||
YYSETCONDITION(new_state);
|
||||
}
|
||||
|
||||
#define yy_push_state(state_and_tsrm) _yy_push_state(yyc##state_and_tsrm)
|
||||
|
||||
static void yy_pop_state(TSRMLS_D)
|
||||
{
|
||||
int *stack_state;
|
||||
zend_stack_top(&SCNG(state_stack), (void **) &stack_state);
|
||||
YYSETCONDITION(*stack_state);
|
||||
zend_stack_del_top(&SCNG(state_stack));
|
||||
}
|
||||
|
||||
static void yy_scan_buffer(char *str, unsigned int len TSRMLS_DC)
|
||||
{
|
||||
YYCURSOR = (YYCTYPE*)str;
|
||||
SCNG(yy_start) = YYCURSOR;
|
||||
YYLIMIT = YYCURSOR + len;
|
||||
}
|
||||
|
||||
#define ini_filename SCNG(filename)
|
||||
|
||||
/* {{{ init_ini_scanner()
|
||||
*/
|
||||
static int init_ini_scanner(int scanner_mode, zend_file_handle *fh TSRMLS_DC)
|
||||
{
|
||||
/* Sanity check */
|
||||
if (scanner_mode != ZEND_INI_SCANNER_NORMAL && scanner_mode != ZEND_INI_SCANNER_RAW) {
|
||||
zend_error(E_WARNING, "Invalid scanner mode");
|
||||
return FAILURE;
|
||||
}
|
||||
|
||||
SCNG(lineno) = 1;
|
||||
SCNG(scanner_mode) = scanner_mode;
|
||||
SCNG(yy_in) = fh;
|
||||
|
||||
if (fh != NULL) {
|
||||
ini_filename = zend_strndup(fh->filename, strlen(fh->filename));
|
||||
} else {
|
||||
ini_filename = NULL;
|
||||
}
|
||||
|
||||
zend_stack_init(&SCNG(state_stack));
|
||||
BEGIN(INITIAL);
|
||||
|
||||
return SUCCESS;
|
||||
}
|
||||
/* }}} */
|
||||
|
||||
/* {{{ shutdown_ini_scanner()
|
||||
*/
|
||||
void shutdown_ini_scanner(TSRMLS_D)
|
||||
{
|
||||
zend_stack_destroy(&SCNG(state_stack));
|
||||
if (ini_filename) {
|
||||
free(ini_filename);
|
||||
}
|
||||
}
|
||||
/* }}} */
|
||||
|
||||
/* {{{ zend_ini_scanner_get_lineno()
|
||||
*/
|
||||
int zend_ini_scanner_get_lineno(TSRMLS_D)
|
||||
{
|
||||
return SCNG(lineno);
|
||||
}
|
||||
/* }}} */
|
||||
|
||||
/* {{{ zend_ini_scanner_get_filename()
|
||||
*/
|
||||
char *zend_ini_scanner_get_filename(TSRMLS_D)
|
||||
{
|
||||
return ini_filename ? ini_filename : "Unknown";
|
||||
}
|
||||
/* }}} */
|
||||
|
||||
/* {{{ zend_ini_open_file_for_scanning()
|
||||
*/
|
||||
int zend_ini_open_file_for_scanning(zend_file_handle *fh, int scanner_mode TSRMLS_DC)
|
||||
{
|
||||
char *buf;
|
||||
size_t size;
|
||||
|
||||
if (zend_stream_fixup(fh, &buf, &size TSRMLS_CC) == FAILURE) {
|
||||
return FAILURE;
|
||||
}
|
||||
|
||||
if (init_ini_scanner(scanner_mode, fh TSRMLS_CC) == FAILURE) {
|
||||
zend_file_handle_dtor(fh TSRMLS_CC);
|
||||
return FAILURE;
|
||||
}
|
||||
|
||||
yy_scan_buffer(buf, size TSRMLS_CC);
|
||||
|
||||
return SUCCESS;
|
||||
}
|
||||
/* }}} */
|
||||
|
||||
/* {{{ zend_ini_prepare_string_for_scanning()
|
||||
*/
|
||||
int zend_ini_prepare_string_for_scanning(char *str, int scanner_mode TSRMLS_DC)
|
||||
{
|
||||
int len = strlen(str);
|
||||
|
||||
if (init_ini_scanner(scanner_mode, NULL TSRMLS_CC) == FAILURE) {
|
||||
return FAILURE;
|
||||
}
|
||||
|
||||
yy_scan_buffer(str, len TSRMLS_CC);
|
||||
|
||||
return SUCCESS;
|
||||
}
|
||||
/* }}} */
|
||||
|
||||
/* {{{ zend_ini_escape_string()
|
||||
*/
|
||||
static void zend_ini_escape_string(zval *lval, char *str, int len, char quote_type TSRMLS_DC)
|
||||
{
|
||||
register char *s, *t;
|
||||
char *end;
|
||||
|
||||
zend_ini_copy_value(lval, str, len);
|
||||
|
||||
/* convert escape sequences */
|
||||
s = t = Z_STRVAL_P(lval);
|
||||
end = s + Z_STRLEN_P(lval);
|
||||
|
||||
while (s < end) {
|
||||
if (*s == '\\') {
|
||||
s++;
|
||||
if (s >= end) {
|
||||
*t++ = '\\';
|
||||
continue;
|
||||
}
|
||||
switch (*s) {
|
||||
case '"':
|
||||
if (*s != quote_type) {
|
||||
*t++ = '\\';
|
||||
*t++ = *s;
|
||||
break;
|
||||
}
|
||||
case '\\':
|
||||
case '$':
|
||||
*t++ = *s;
|
||||
Z_STRLEN_P(lval)--;
|
||||
break;
|
||||
default:
|
||||
*t++ = '\\';
|
||||
*t++ = *s;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
*t++ = *s;
|
||||
}
|
||||
if (*s == '\n' || (*s == '\r' && (*(s+1) != '\n'))) {
|
||||
SCNG(lineno)++;
|
||||
}
|
||||
s++;
|
||||
}
|
||||
*t = 0;
|
||||
}
|
||||
/* }}} */
|
||||
|
||||
int ini_lex(zval *ini_lval TSRMLS_DC)
|
||||
{
|
||||
restart:
|
||||
SCNG(yy_text) = YYCURSOR;
|
||||
|
||||
/* yymore_restart: */
|
||||
/* detect EOF */
|
||||
if (YYCURSOR >= YYLIMIT) {
|
||||
if (YYSTATE == STATE(ST_VALUE) || YYSTATE == STATE(ST_RAW)) {
|
||||
BEGIN(INITIAL);
|
||||
return END_OF_LINE;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Eat any UTF-8 BOM we find in the first 3 bytes */
|
||||
if (YYCURSOR == SCNG(yy_start) && YYCURSOR + 3 < YYLIMIT) {
|
||||
if (memcmp(YYCURSOR, "\xef\xbb\xbf", 3) == 0) {
|
||||
YYCURSOR += 3;
|
||||
goto restart;
|
||||
}
|
||||
}
|
||||
/*!re2c
|
||||
re2c:yyfill:check = 0;
|
||||
LNUM [0-9]+
|
||||
DNUM ([0-9]*[\.][0-9]+)|([0-9]+[\.][0-9]*)
|
||||
NUMBER [-]?{LNUM}|{DNUM}
|
||||
ANY_CHAR (.|[\n\t])
|
||||
NEWLINE ("\r"|"\n"|"\r\n")
|
||||
TABS_AND_SPACES [ \t]
|
||||
WHITESPACE [ \t]+
|
||||
CONSTANT [a-zA-Z_][a-zA-Z0-9_]*
|
||||
LABEL [^=\n\r\t;|&$~(){}!"\[]+
|
||||
TOKENS [:,.\[\]"'()|^&+-/*=%$!~<>?@{}]
|
||||
OPERATORS [&|~()!]
|
||||
DOLLAR_CURLY "${"
|
||||
|
||||
SECTION_RAW_CHARS [^\]\n\r]
|
||||
SINGLE_QUOTED_CHARS [^']
|
||||
RAW_VALUE_CHARS [^"\n\r;\000]
|
||||
|
||||
LITERAL_DOLLAR ("$"([^{\000]|("\\"{ANY_CHAR})))
|
||||
VALUE_CHARS ([^$= \t\n\r;&|~()!"'\000]|{LITERAL_DOLLAR})
|
||||
SECTION_VALUE_CHARS ([^$\n\r;"'\]\\]|("\\"{ANY_CHAR})|{LITERAL_DOLLAR})
|
||||
|
||||
<!*> := yyleng = YYCURSOR - SCNG(yy_text);
|
||||
|
||||
<INITIAL>"[" { /* Section start */
|
||||
/* Enter section data lookup state */
|
||||
if (SCNG(scanner_mode) == ZEND_INI_SCANNER_RAW) {
|
||||
yy_push_state(ST_SECTION_RAW TSRMLS_CC);
|
||||
} else {
|
||||
yy_push_state(ST_SECTION_VALUE TSRMLS_CC);
|
||||
}
|
||||
return TC_SECTION;
|
||||
}
|
||||
|
||||
<ST_VALUE,ST_SECTION_VALUE,ST_OFFSET>"'"{SINGLE_QUOTED_CHARS}+"'" { /* Raw string */
|
||||
/* Eat leading and trailing single quotes */
|
||||
if (yytext[0] == '\'' && yytext[yyleng - 1] == '\'') {
|
||||
SCNG(yy_text)++;
|
||||
yyleng = yyleng - 2;
|
||||
}
|
||||
RETURN_TOKEN(TC_RAW, yytext, yyleng);
|
||||
}
|
||||
|
||||
<ST_SECTION_RAW,ST_SECTION_VALUE>"]"{TABS_AND_SPACES}*{NEWLINE}? { /* End of section */
|
||||
BEGIN(INITIAL);
|
||||
SCNG(lineno)++;
|
||||
return ']';
|
||||
}
|
||||
|
||||
<INITIAL>{LABEL}"["{TABS_AND_SPACES}* { /* Start of option with offset */
|
||||
/* Eat leading whitespace */
|
||||
EAT_LEADING_WHITESPACE();
|
||||
|
||||
/* Eat trailing whitespace and [ */
|
||||
EAT_TRAILING_WHITESPACE_EX('[');
|
||||
|
||||
/* Enter offset lookup state */
|
||||
yy_push_state(ST_OFFSET TSRMLS_CC);
|
||||
|
||||
RETURN_TOKEN(TC_OFFSET, yytext, yyleng);
|
||||
}
|
||||
|
||||
<ST_OFFSET>{TABS_AND_SPACES}*"]" { /* End of section or an option offset */
|
||||
BEGIN(INITIAL);
|
||||
return ']';
|
||||
}
|
||||
|
||||
<ST_DOUBLE_QUOTES,ST_SECTION_VALUE,ST_VALUE,ST_OFFSET>{DOLLAR_CURLY} { /* Variable start */
|
||||
yy_push_state(ST_VARNAME TSRMLS_CC);
|
||||
return TC_DOLLAR_CURLY;
|
||||
}
|
||||
|
||||
<ST_VARNAME>{LABEL} { /* Variable name */
|
||||
/* Eat leading whitespace */
|
||||
EAT_LEADING_WHITESPACE();
|
||||
|
||||
/* Eat trailing whitespace */
|
||||
EAT_TRAILING_WHITESPACE();
|
||||
|
||||
RETURN_TOKEN(TC_VARNAME, yytext, yyleng);
|
||||
}
|
||||
|
||||
<ST_VARNAME>"}" { /* Variable end */
|
||||
yy_pop_state(TSRMLS_C);
|
||||
return '}';
|
||||
}
|
||||
|
||||
<INITIAL,ST_VALUE>("true"|"on"|"yes"){TABS_AND_SPACES}* { /* TRUE value (when used outside option value/offset this causes parse error!) */
|
||||
RETURN_TOKEN(BOOL_TRUE, "1", 1);
|
||||
}
|
||||
|
||||
<INITIAL,ST_VALUE>("false"|"off"|"no"|"none"|"null"){TABS_AND_SPACES}* { /* FALSE value (when used outside option value/offset this causes parse error!)*/
|
||||
RETURN_TOKEN(BOOL_FALSE, "", 0);
|
||||
}
|
||||
|
||||
<INITIAL>{LABEL} { /* Get option name */
|
||||
/* Eat leading whitespace */
|
||||
EAT_LEADING_WHITESPACE();
|
||||
|
||||
/* Eat trailing whitespace */
|
||||
EAT_TRAILING_WHITESPACE();
|
||||
|
||||
RETURN_TOKEN(TC_LABEL, yytext, yyleng);
|
||||
}
|
||||
|
||||
<INITIAL>{TABS_AND_SPACES}*[=]{TABS_AND_SPACES}* { /* Start option value */
|
||||
if (SCNG(scanner_mode) == ZEND_INI_SCANNER_RAW) {
|
||||
yy_push_state(ST_RAW TSRMLS_CC);
|
||||
} else {
|
||||
yy_push_state(ST_VALUE TSRMLS_CC);
|
||||
}
|
||||
return '=';
|
||||
}
|
||||
|
||||
<ST_RAW>["] {
|
||||
while (YYCURSOR < YYLIMIT) {
|
||||
switch (*YYCURSOR++) {
|
||||
case '\n':
|
||||
SCNG(lineno)++;
|
||||
break;
|
||||
case '\r':
|
||||
if (*YYCURSOR != '\n') {
|
||||
SCNG(lineno)++;
|
||||
}
|
||||
break;
|
||||
case '"':
|
||||
yyleng = YYCURSOR - SCNG(yy_text) - 2;
|
||||
SCNG(yy_text)++;
|
||||
RETURN_TOKEN(TC_RAW, yytext, yyleng);
|
||||
case '\\':
|
||||
if (YYCURSOR < YYLIMIT) {
|
||||
YYCURSOR++;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
yyleng = YYCURSOR - SCNG(yy_text);
|
||||
RETURN_TOKEN(TC_RAW, yytext, yyleng);
|
||||
}
|
||||
|
||||
<ST_RAW>{RAW_VALUE_CHARS}+ { /* Raw value, only used when SCNG(scanner_mode) == ZEND_INI_SCANNER_RAW. */
|
||||
RETURN_TOKEN(TC_RAW, yytext, yyleng);
|
||||
}
|
||||
|
||||
<ST_SECTION_RAW>{SECTION_RAW_CHARS}+ { /* Raw value, only used when SCNG(scanner_mode) == ZEND_INI_SCANNER_RAW. */
|
||||
RETURN_TOKEN(TC_RAW, yytext, yyleng);
|
||||
}
|
||||
|
||||
<ST_VALUE,ST_RAW>{TABS_AND_SPACES}*{NEWLINE} { /* End of option value */
|
||||
BEGIN(INITIAL);
|
||||
SCNG(lineno)++;
|
||||
return END_OF_LINE;
|
||||
}
|
||||
|
||||
<ST_SECTION_VALUE,ST_VALUE,ST_OFFSET>{CONSTANT} { /* Get constant option value */
|
||||
RETURN_TOKEN(TC_CONSTANT, yytext, yyleng);
|
||||
}
|
||||
|
||||
<ST_SECTION_VALUE,ST_VALUE,ST_OFFSET>{NUMBER} { /* Get number option value as string */
|
||||
RETURN_TOKEN(TC_NUMBER, yytext, yyleng);
|
||||
}
|
||||
|
||||
<INITIAL>{TOKENS} { /* Disallow these chars outside option values */
|
||||
return yytext[0];
|
||||
}
|
||||
|
||||
<ST_VALUE>{OPERATORS}{TABS_AND_SPACES}* { /* Boolean operators */
|
||||
return yytext[0];
|
||||
}
|
||||
|
||||
<ST_VALUE>[=] { /* Make = used in option value to trigger error */
|
||||
yyless(0);
|
||||
BEGIN(INITIAL);
|
||||
return END_OF_LINE;
|
||||
}
|
||||
|
||||
<ST_VALUE>{VALUE_CHARS}+ { /* Get everything else as option/offset value */
|
||||
RETURN_TOKEN(TC_STRING, yytext, yyleng);
|
||||
}
|
||||
|
||||
<ST_SECTION_VALUE,ST_OFFSET>{SECTION_VALUE_CHARS}+ { /* Get rest as section/offset value */
|
||||
RETURN_TOKEN(TC_STRING, yytext, yyleng);
|
||||
}
|
||||
|
||||
<ST_SECTION_VALUE,ST_VALUE,ST_OFFSET>{TABS_AND_SPACES}*["] { /* Double quoted '"' string start */
|
||||
yy_push_state(ST_DOUBLE_QUOTES TSRMLS_CC);
|
||||
return '"';
|
||||
}
|
||||
|
||||
<ST_DOUBLE_QUOTES>["]{TABS_AND_SPACES}* { /* Double quoted '"' string ends */
|
||||
yy_pop_state(TSRMLS_C);
|
||||
return '"';
|
||||
}
|
||||
|
||||
<ST_DOUBLE_QUOTES>[^] { /* Escape double quoted string contents */
|
||||
if (YYCURSOR > YYLIMIT) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
while (YYCURSOR < YYLIMIT) {
|
||||
switch (*YYCURSOR++) {
|
||||
case '"':
|
||||
if (YYCURSOR < YYLIMIT && YYCURSOR[-2] == '\\' && *YYCURSOR != '\r' && *YYCURSOR != '\n') {
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case '$':
|
||||
if (*YYCURSOR == '{') {
|
||||
break;
|
||||
}
|
||||
continue;
|
||||
case '\\':
|
||||
if (YYCURSOR < YYLIMIT && *YYCURSOR != '"') {
|
||||
YYCURSOR++;
|
||||
}
|
||||
/* fall through */
|
||||
default:
|
||||
continue;
|
||||
}
|
||||
|
||||
YYCURSOR--;
|
||||
break;
|
||||
}
|
||||
|
||||
yyleng = YYCURSOR - SCNG(yy_text);
|
||||
|
||||
zend_ini_escape_string(ini_lval, yytext, yyleng, '"' TSRMLS_CC);
|
||||
return TC_QUOTED_STRING;
|
||||
}
|
||||
|
||||
<ST_SECTION_VALUE,ST_VALUE,ST_OFFSET>{WHITESPACE} {
|
||||
RETURN_TOKEN(TC_WHITESPACE, yytext, yyleng);
|
||||
}
|
||||
|
||||
<INITIAL,ST_RAW>{TABS_AND_SPACES}+ {
|
||||
/* eat whitespace */
|
||||
goto restart;
|
||||
}
|
||||
|
||||
<INITIAL>{TABS_AND_SPACES}*{NEWLINE} {
|
||||
SCNG(lineno)++;
|
||||
return END_OF_LINE;
|
||||
}
|
||||
|
||||
<INITIAL,ST_VALUE,ST_RAW>{TABS_AND_SPACES}*[;][^\r\n]*{NEWLINE} { /* Comment */
|
||||
BEGIN(INITIAL);
|
||||
SCNG(lineno)++;
|
||||
return END_OF_LINE;
|
||||
}
|
||||
|
||||
<INITIAL>{TABS_AND_SPACES}*[#][^\r\n]*{NEWLINE} { /* #Comment */
|
||||
zend_error(E_DEPRECATED, "Comments starting with '#' are deprecated in %s on line %d", zend_ini_scanner_get_filename(TSRMLS_C), SCNG(lineno));
|
||||
BEGIN(INITIAL);
|
||||
SCNG(lineno)++;
|
||||
return END_OF_LINE;
|
||||
}
|
||||
|
||||
<ST_VALUE,ST_RAW>[^] { /* End of option value (if EOF is reached before EOL */
|
||||
BEGIN(INITIAL);
|
||||
return END_OF_LINE;
|
||||
}
|
||||
|
||||
<*>[^] {
|
||||
return 0;
|
||||
}
|
||||
|
||||
*/
|
||||
}
|
||||
2
samples/Linux Kernel Module/bcm4334x.mod
Normal file
2
samples/Linux Kernel Module/bcm4334x.mod
Normal file
@@ -0,0 +1,2 @@
|
||||
/data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/bcm4334x.ko
|
||||
/data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/dhd_pno.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/dhd_common.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/dhd_ip.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/dhd_custom_gpio.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/dhd_linux.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/dhd_linux_sched.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/dhd_cfg80211.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/dhd_linux_wq.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/aiutils.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/bcmevent.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/bcmutils.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/bcmwifi_channels.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/hndpmu.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/linux_osl.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/sbutils.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/siutils.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/wl_android.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/wl_cfg80211.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/wl_cfgp2p.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/wl_cfg_btcoex.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/wldev_common.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/wl_linux_mon.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/dhd_linux_platdev.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/bcmsdh.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/bcmsdh_linux.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/bcmsdh_sdmmc.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/bcmsdh_sdmmc_linux.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/dhd_cdc.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/dhd_wlfc.o /data/israel/edison/poky/meta-edison/recipes-kernel/bcm43340/driver_bcm43x/dhd_sdio.o
|
||||
2
samples/Linux Kernel Module/mbcache.mod
Normal file
2
samples/Linux Kernel Module/mbcache.mod
Normal file
@@ -0,0 +1,2 @@
|
||||
fs/mbcache.ko
|
||||
fs/mbcache.o
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user