mirror of
https://github.com/KevinMidboe/linguist.git
synced 2025-10-29 17:50:22 +00:00
Compare commits
631 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e6ab516fb7 | ||
|
|
7501b82df1 | ||
|
|
aa6b881971 | ||
|
|
3928734d0f | ||
|
|
c7868a95bc | ||
|
|
2012647f78 | ||
|
|
84471a5463 | ||
|
|
57a3c14f2b | ||
|
|
d9914307eb | ||
|
|
71cdf46197 | ||
|
|
8a27884c70 | ||
|
|
b881e3e6cb | ||
|
|
ca718d8f2a | ||
|
|
c6625b1b8a | ||
|
|
16a6d680c4 | ||
|
|
270fa8f5d3 | ||
|
|
b1f5e93b4a | ||
|
|
79a61c72e1 | ||
|
|
3f04c11537 | ||
|
|
b2270613d7 | ||
|
|
0fe854421b | ||
|
|
de074f421e | ||
|
|
27590c39bd | ||
|
|
67191d4d5e | ||
|
|
00764f3d59 | ||
|
|
4a2cb32149 | ||
|
|
1a11664239 | ||
|
|
9520cbb44c | ||
|
|
1aea6b2cdb | ||
|
|
6ff950341a | ||
|
|
b9501e42b2 | ||
|
|
065c809dd5 | ||
|
|
5b9ea4a78f | ||
|
|
b72c4d4400 | ||
|
|
d46e214985 | ||
|
|
799c47ce7a | ||
|
|
b5121e59dd | ||
|
|
f6a7b4929f | ||
|
|
162b77ab5a | ||
|
|
92904efd45 | ||
|
|
93fabe487f | ||
|
|
74d704bea2 | ||
|
|
ee1bd50dd1 | ||
|
|
07096f84f5 | ||
|
|
a9b3bd632b | ||
|
|
eec324890e | ||
|
|
ca6ac8f0db | ||
|
|
60ab4a5fe7 | ||
|
|
10eb5830f0 | ||
|
|
835ceae6f6 | ||
|
|
abe3aa47f6 | ||
|
|
53e34072ed | ||
|
|
f83f761d0a | ||
|
|
9c18bf3a89 | ||
|
|
f6e1ab444e | ||
|
|
0ae8b2959d | ||
|
|
46b0b1e5e2 | ||
|
|
b44dfb4ab8 | ||
|
|
868e528810 | ||
|
|
0a4c850ef1 | ||
|
|
b3c4232251 | ||
|
|
0c38df47b9 | ||
|
|
bfd4005760 | ||
|
|
fc9fad15a3 | ||
|
|
b5091e88ad | ||
|
|
2610808b6d | ||
|
|
3cfee4f214 | ||
|
|
70fd116eaf | ||
|
|
62aac9c2f7 | ||
|
|
afcf1c6c22 | ||
|
|
f3f0365b13 | ||
|
|
9bc12843fe | ||
|
|
5e3ceddf69 | ||
|
|
d377e23193 | ||
|
|
e6dabd59ad | ||
|
|
f0c7380132 | ||
|
|
697ad4c568 | ||
|
|
1efd9b384d | ||
|
|
c1e71dc215 | ||
|
|
d2c7d27d13 | ||
|
|
1efd4c83f9 | ||
|
|
0f7677423f | ||
|
|
2a0b0e9f93 | ||
|
|
faec60188f | ||
|
|
709a688858 | ||
|
|
2448ff8314 | ||
|
|
311202102d | ||
|
|
6812a22706 | ||
|
|
fb727ce731 | ||
|
|
6af499e352 | ||
|
|
66ec33cf8e | ||
|
|
f2694f3a74 | ||
|
|
d069d0e444 | ||
|
|
56ee61b17c | ||
|
|
b945726017 | ||
|
|
6f8a7d1070 | ||
|
|
b032886c21 | ||
|
|
988739d566 | ||
|
|
8cd80801e8 | ||
|
|
c3b7a1a6fb | ||
|
|
9d0eff75ad | ||
|
|
3ccb548b6d | ||
|
|
eeedd53f32 | ||
|
|
11a3b5b73c | ||
|
|
eacc48e8c7 | ||
|
|
5b72b4d353 | ||
|
|
3f940ce8b8 | ||
|
|
b2e3ea2334 | ||
|
|
4637da8c32 | ||
|
|
6b88c5ba86 | ||
|
|
5fdb596214 | ||
|
|
c989b02285 | ||
|
|
c8301dc20b | ||
|
|
ca4ea03828 | ||
|
|
ae27c71d5a | ||
|
|
3d1555e278 | ||
|
|
54fab9eb4e | ||
|
|
8fea8a0b47 | ||
|
|
f14ae8e51b | ||
|
|
6b60e5e786 | ||
|
|
40413dfcc7 | ||
|
|
07f5ad1daa | ||
|
|
57f5a3e780 | ||
|
|
3be007526c | ||
|
|
9bfbd0550c | ||
|
|
0301a5dcdf | ||
|
|
db994a1197 | ||
|
|
855c13ea2a | ||
|
|
bfa7eced44 | ||
|
|
b1d103b1f3 | ||
|
|
fc816d3429 | ||
|
|
04a4e8c8e6 | ||
|
|
ab69fd01ac | ||
|
|
cc6106f31b | ||
|
|
ead85379ed | ||
|
|
f8d6be55ee | ||
|
|
a241d75043 | ||
|
|
864a6c0a20 | ||
|
|
1c20c54191 | ||
|
|
4d722d1fd1 | ||
|
|
b67254e986 | ||
|
|
041cf9c94e | ||
|
|
b08c5a8421 | ||
|
|
125eaa4cc3 | ||
|
|
6b001cf861 | ||
|
|
5c4129f85b | ||
|
|
fa56879790 | ||
|
|
41713d7719 | ||
|
|
17a9463588 | ||
|
|
fb9f271720 | ||
|
|
8de50edb41 | ||
|
|
ab33fccddd | ||
|
|
bd95ac0beb | ||
|
|
7b3efb185f | ||
|
|
a0065febe2 | ||
|
|
9374784651 | ||
|
|
aa6af3deed | ||
|
|
a19e501b44 | ||
|
|
889a395340 | ||
|
|
eb8eb28ca7 | ||
|
|
697b3351e6 | ||
|
|
9fd80bfd67 | ||
|
|
7b58b1ea59 | ||
|
|
c454396c26 | ||
|
|
2e9d8f5520 | ||
|
|
c8ea3fba5a | ||
|
|
56af13047c | ||
|
|
c46900396a | ||
|
|
b235ed1223 | ||
|
|
16d9612603 | ||
|
|
721e5b4656 | ||
|
|
9b8b39f444 | ||
|
|
e32a837fb2 | ||
|
|
9961f8bc1c | ||
|
|
c066867d59 | ||
|
|
21093165e1 | ||
|
|
df88de14e3 | ||
|
|
94de431aa5 | ||
|
|
502557a97f | ||
|
|
52938f6dbf | ||
|
|
d87fad649c | ||
|
|
d8666e5309 | ||
|
|
0c071990cb | ||
|
|
77dfb19a50 | ||
|
|
49254f1f74 | ||
|
|
9dd952c175 | ||
|
|
0b9897db1f | ||
|
|
9d11128362 | ||
|
|
ee17ab3e26 | ||
|
|
06af36dac2 | ||
|
|
51d6d741e5 | ||
|
|
b593a8ae67 | ||
|
|
7b30240a7f | ||
|
|
71f124faa5 | ||
|
|
470bd34349 | ||
|
|
65087dd7b8 | ||
|
|
89c5361f25 | ||
|
|
f82cc26e4f | ||
|
|
15232fc072 | ||
|
|
0a7aab947c | ||
|
|
5906fa81bb | ||
|
|
1b2f574af2 | ||
|
|
ca4bc6110f | ||
|
|
a944769d61 | ||
|
|
98a31515ef | ||
|
|
3e665099ac | ||
|
|
5400b534e4 | ||
|
|
6f2d4dc195 | ||
|
|
4f7fac3ba1 | ||
|
|
fd6569830a | ||
|
|
5d4cad6394 | ||
|
|
b790a49282 | ||
|
|
537b83c759 | ||
|
|
c0b9e2c3f4 | ||
|
|
f488b9b9f7 | ||
|
|
7060b116f4 | ||
|
|
0f4cf11294 | ||
|
|
048496723b | ||
|
|
70068f74f1 | ||
|
|
6f197bacc7 | ||
|
|
24a89d2d75 | ||
|
|
1ac16cbec7 | ||
|
|
8144438f39 | ||
|
|
f141abbc73 | ||
|
|
5329b96793 | ||
|
|
7a70931066 | ||
|
|
9a070d7bb3 | ||
|
|
189b2d684b | ||
|
|
71dfac26fe | ||
|
|
efdc790374 | ||
|
|
a3613dc438 | ||
|
|
97afedd861 | ||
|
|
00a436f175 | ||
|
|
aa2e3000cd | ||
|
|
10e0fa4360 | ||
|
|
0473af368f | ||
|
|
b82f563c38 | ||
|
|
344297895b | ||
|
|
d3c525645b | ||
|
|
59d02e5138 | ||
|
|
8522dc1d33 | ||
|
|
53d238f310 | ||
|
|
92ed2d189e | ||
|
|
7133c3b11a | ||
|
|
d72114083b | ||
|
|
683925fcd5 | ||
|
|
1c8bf32d35 | ||
|
|
28913833f4 | ||
|
|
c6752be546 | ||
|
|
395e474cad | ||
|
|
16ea189aa6 | ||
|
|
ac81fc5da9 | ||
|
|
58191c10b3 | ||
|
|
c2ca23d580 | ||
|
|
b5dfb40c7d | ||
|
|
7e647fd915 | ||
|
|
5b9f9bc0e6 | ||
|
|
b1c057fa30 | ||
|
|
ca348dd373 | ||
|
|
b802045c5c | ||
|
|
3c244a9501 | ||
|
|
832a7b9b06 | ||
|
|
e7d856345f | ||
|
|
ac559effaa | ||
|
|
95d0daba80 | ||
|
|
a0ad1523a1 | ||
|
|
06c049b8c0 | ||
|
|
24c7380765 | ||
|
|
73ef1bf156 | ||
|
|
c8b30a62f9 | ||
|
|
48dfdd2dfe | ||
|
|
68727f724a | ||
|
|
f46e053633 | ||
|
|
d2e739ba8c | ||
|
|
37174e1d2c | ||
|
|
cdb5206def | ||
|
|
d636eaf1e3 | ||
|
|
49f3eb1286 | ||
|
|
8ab94a8643 | ||
|
|
f72c337c5b | ||
|
|
d22321de07 | ||
|
|
473e5db51f | ||
|
|
8b9fc4683a | ||
|
|
3b4415cc3c | ||
|
|
2afce1754a | ||
|
|
f232b93214 | ||
|
|
db64f192fa | ||
|
|
ca96ecdc55 | ||
|
|
2a06d1aa19 | ||
|
|
b2fa2a1f46 | ||
|
|
6839516b5c | ||
|
|
7247c80fac | ||
|
|
01d05d1d4e | ||
|
|
8db4cc482e | ||
|
|
074f17ed98 | ||
|
|
68b553ea55 | ||
|
|
fd5da9cb15 | ||
|
|
d081f687b0 | ||
|
|
34abe5b983 | ||
|
|
5765f1faf1 | ||
|
|
550b67215c | ||
|
|
d7b2826113 | ||
|
|
b4f94c7c25 | ||
|
|
d24677adbb | ||
|
|
04f981eeac | ||
|
|
b53d84d8ff | ||
|
|
fea8bb21a0 | ||
|
|
996ed8a8b1 | ||
|
|
daef164163 | ||
|
|
61a5cab1f2 | ||
|
|
2fddaaf3d7 | ||
|
|
741d246581 | ||
|
|
1b23e81541 | ||
|
|
362d300cb0 | ||
|
|
7b185cc2f3 | ||
|
|
932726863f | ||
|
|
a35a3e98ce | ||
|
|
7c404e72d2 | ||
|
|
8736e2305d | ||
|
|
a13664698f | ||
|
|
6e934067a8 | ||
|
|
3040642f97 | ||
|
|
dd7e44f957 | ||
|
|
6b56a243e7 | ||
|
|
5d08605aef | ||
|
|
bf705cbaf2 | ||
|
|
fe827896e0 | ||
|
|
317219e479 | ||
|
|
64e7df7596 | ||
|
|
33dc865c30 | ||
|
|
0d469e2966 | ||
|
|
b3ff84872b | ||
|
|
5d176a781c | ||
|
|
e0c97f97ba | ||
|
|
83f358976e | ||
|
|
9ee6153891 | ||
|
|
825e640061 | ||
|
|
e5ae213839 | ||
|
|
74e034c689 | ||
|
|
a55a60a161 | ||
|
|
9d865ec018 | ||
|
|
8b01e3dead | ||
|
|
9e3cc01715 | ||
|
|
0f204767a9 | ||
|
|
11e3251efd | ||
|
|
1f1416a5f7 | ||
|
|
b3786f3825 | ||
|
|
09c2eee91e | ||
|
|
dc78b14902 | ||
|
|
500ce0959a | ||
|
|
aa0c9e3572 | ||
|
|
e6de75d48a | ||
|
|
a5ad0a34f8 | ||
|
|
6e609cc4e3 | ||
|
|
27727a927f | ||
|
|
5ff580df0a | ||
|
|
b34acac722 | ||
|
|
37840856ed | ||
|
|
acfad4371f | ||
|
|
ae42cc0307 | ||
|
|
d06860df37 | ||
|
|
7d5d162f6b | ||
|
|
d39a75b68b | ||
|
|
d72f7311f6 | ||
|
|
3f81b7c179 | ||
|
|
33d1255a5a | ||
|
|
0ebea6a0ff | ||
|
|
c28da3a4a9 | ||
|
|
538f479b60 | ||
|
|
88cc73fa49 | ||
|
|
d6d368a65d | ||
|
|
c5be9cc3e9 | ||
|
|
c462c2bd31 | ||
|
|
3426165621 | ||
|
|
98b99e38bb | ||
|
|
d8e3bec499 | ||
|
|
7c759d4d29 | ||
|
|
41d438b47e | ||
|
|
41911d6921 | ||
|
|
dca18d77cb | ||
|
|
040af5dad2 | ||
|
|
01bb6c37ab | ||
|
|
c624d68628 | ||
|
|
4867c49bd9 | ||
|
|
a354eddf4b | ||
|
|
9b78c533a5 | ||
|
|
090ea576b9 | ||
|
|
6a2d33a4b3 | ||
|
|
b54a9c7412 | ||
|
|
2c62da7834 | ||
|
|
0145a0adb2 | ||
|
|
473282d64c | ||
|
|
c2c068e9db | ||
|
|
13d1f662d1 | ||
|
|
bdd57f58a0 | ||
|
|
b1bcabd6e6 | ||
|
|
e128c3fa82 | ||
|
|
efac9fe750 | ||
|
|
2b8545a8fa | ||
|
|
b275b5d728 | ||
|
|
1f46cfafa7 | ||
|
|
b1dcdf3418 | ||
|
|
4bfd65deb8 | ||
|
|
213cf322f5 | ||
|
|
61102812a0 | ||
|
|
580cfce7fb | ||
|
|
f1383d7a45 | ||
|
|
e4ce5bfe39 | ||
|
|
4d2b38497d | ||
|
|
fc5ae1cfbc | ||
|
|
7e76d1cc6b | ||
|
|
cf834e8a21 | ||
|
|
ee61466042 | ||
|
|
35884d482c | ||
|
|
802de8112c | ||
|
|
9a76cfc85f | ||
|
|
dc41dd888d | ||
|
|
6ed64f25a2 | ||
|
|
827ad80311 | ||
|
|
9e3d8ac4e9 | ||
|
|
114a331106 | ||
|
|
9aa24a216a | ||
|
|
1b327e29ba | ||
|
|
13702451ab | ||
|
|
f0242f6f97 | ||
|
|
9775820398 | ||
|
|
26a35ea43d | ||
|
|
81ebef2e29 | ||
|
|
7daf26bcd0 | ||
|
|
231f705098 | ||
|
|
893ab8fd8d | ||
|
|
5afdd2c533 | ||
|
|
e4f5c0066a | ||
|
|
a167f852dd | ||
|
|
b428bce126 | ||
|
|
e62d0e19a5 | ||
|
|
9b8bf9068f | ||
|
|
6e05edc350 | ||
|
|
dd8eaf2893 | ||
|
|
1068cfb4b5 | ||
|
|
73b1737dc7 | ||
|
|
1d86f40fcd | ||
|
|
ecc750f445 | ||
|
|
cf5268a7d4 | ||
|
|
59fb481138 | ||
|
|
16e22b3b77 | ||
|
|
e24efad5ff | ||
|
|
58a34cdb7d | ||
|
|
b1c6b330e9 | ||
|
|
7c3e265033 | ||
|
|
13695a716c | ||
|
|
c9e43804d6 | ||
|
|
1535e3553e | ||
|
|
0ac05bbbeb | ||
|
|
d3f979d640 | ||
|
|
0e9ded45dc | ||
|
|
a2ca886510 | ||
|
|
aa701c6766 | ||
|
|
25a1af3775 | ||
|
|
0d8e0a2970 | ||
|
|
c0fff6c8a8 | ||
|
|
e6b4428614 | ||
|
|
4e6e69833d | ||
|
|
1d9faff4c6 | ||
|
|
7025cbe760 | ||
|
|
e922b7c2ca | ||
|
|
96518d2d0f | ||
|
|
1241b20ba1 | ||
|
|
f03f5c1628 | ||
|
|
cb550a3662 | ||
|
|
d1f90d61c5 | ||
|
|
16e65fe189 | ||
|
|
62a0faa729 | ||
|
|
fbb3ab2292 | ||
|
|
b3b75e5ef8 | ||
|
|
8b36210db5 | ||
|
|
a74f3b3e46 | ||
|
|
e214a52de5 | ||
|
|
0624a9395c | ||
|
|
b2e7f7ffa6 | ||
|
|
b312b39a10 | ||
|
|
80e2d112b2 | ||
|
|
519b169df0 | ||
|
|
5c2cfbc334 | ||
|
|
7d91e4959a | ||
|
|
0c5aa2a7eb | ||
|
|
0d7a264981 | ||
|
|
52ff2d2e74 | ||
|
|
8a7ceaa845 | ||
|
|
fd9ce2d1cf | ||
|
|
2c2b37bec3 | ||
|
|
c777f2d388 | ||
|
|
eca10056a8 | ||
|
|
c7bab11ebe | ||
|
|
6995fc28b6 | ||
|
|
102f14d0e9 | ||
|
|
aac168402b | ||
|
|
152d49513f | ||
|
|
d5564c808d | ||
|
|
82410e07b2 | ||
|
|
94d90b30b5 | ||
|
|
06997f0da2 | ||
|
|
55aafa416d | ||
|
|
6226a46988 | ||
|
|
8d216f0c43 | ||
|
|
7f5bb25542 | ||
|
|
5fcdf6adc2 | ||
|
|
6a565a849b | ||
|
|
66fc67e34c | ||
|
|
7cf140940e | ||
|
|
60e90bab23 | ||
|
|
4f58258186 | ||
|
|
03e2904ebf | ||
|
|
bea90b256e | ||
|
|
8eb37ba956 | ||
|
|
8d20c1fb59 | ||
|
|
9a1abf0c49 | ||
|
|
5aae7a4000 | ||
|
|
d9509a1750 | ||
|
|
978c448fb8 | ||
|
|
997c0fca10 | ||
|
|
3ae6e68492 | ||
|
|
851c93a1f7 | ||
|
|
a5f7355e16 | ||
|
|
18ffdbaa65 | ||
|
|
c089222bc6 | ||
|
|
37f9535d27 | ||
|
|
4650368bc2 | ||
|
|
88b14ed455 | ||
|
|
54a2a47bc0 | ||
|
|
ffcc970140 | ||
|
|
7a811e39e0 | ||
|
|
11f158cbb3 | ||
|
|
5d5550c48b | ||
|
|
fd570d906a | ||
|
|
deab0662f9 | ||
|
|
7238f50a6b | ||
|
|
499fcd1f3f | ||
|
|
dc0ddc82d6 | ||
|
|
436fc34cb9 | ||
|
|
f072cd96e3 | ||
|
|
3441a001c7 | ||
|
|
bc747844ea | ||
|
|
a887f58bcc | ||
|
|
f42afef6e0 | ||
|
|
18eaf22cb9 | ||
|
|
d94f427e12 | ||
|
|
b94eb42db6 | ||
|
|
d2297f5516 | ||
|
|
ef6f58b828 | ||
|
|
eb0bf16cce | ||
|
|
ca51415540 | ||
|
|
8ae32e1d47 | ||
|
|
0a6165c4d9 | ||
|
|
cf8521a629 | ||
|
|
b11c7f3dc0 | ||
|
|
01151aad5c | ||
|
|
6b283068a9 | ||
|
|
ccd7d4d89d | ||
|
|
208ec3906f | ||
|
|
84d4fccb4d | ||
|
|
8d8ea959ee | ||
|
|
1c73db499f | ||
|
|
16a4b4947f | ||
|
|
4b2abb2064 | ||
|
|
c581b6a5a7 | ||
|
|
4c66582f87 | ||
|
|
11388a5355 | ||
|
|
24ca98b1a3 | ||
|
|
90a293727d | ||
|
|
e869f6c173 | ||
|
|
5b187d1f20 | ||
|
|
7b5d1c075d | ||
|
|
07173d2238 | ||
|
|
6b747f7d65 | ||
|
|
aef19d72f9 | ||
|
|
e1a661bffc | ||
|
|
560f9b15d7 | ||
|
|
452fc59d4f | ||
|
|
ed4bbe97d1 | ||
|
|
f05c437221 | ||
|
|
682cc2d82d | ||
|
|
29197736c7 | ||
|
|
483488a2fa | ||
|
|
b36c4f2428 | ||
|
|
e1dbd68713 | ||
|
|
0ecb865797 | ||
|
|
1ced06483e | ||
|
|
861cee33d5 | ||
|
|
6b882438b0 | ||
|
|
87eb4577ea | ||
|
|
7563bf43e9 | ||
|
|
ce8cfed7ff | ||
|
|
8742de9a88 | ||
|
|
4dcdb0c79c | ||
|
|
5a646384f6 | ||
|
|
8917f1a91a | ||
|
|
2d392581e2 | ||
|
|
25d160e850 | ||
|
|
e688c865bc | ||
|
|
8bf1defdc1 | ||
|
|
bc8d65e7d3 | ||
|
|
3180c5d554 | ||
|
|
be122ca1a5 | ||
|
|
b05f6f0018 | ||
|
|
e811021806 | ||
|
|
656f4f440d | ||
|
|
7fb62de4d7 | ||
|
|
26a5325dc3 | ||
|
|
4881e0aa51 | ||
|
|
743f7c76de | ||
|
|
f8ef01f557 | ||
|
|
402fa5c2cd | ||
|
|
5ac1e847a5 | ||
|
|
0737a21e38 | ||
|
|
03369b8a6c | ||
|
|
3b2ddb1a18 | ||
|
|
1e20b12241 | ||
|
|
81c41df15c | ||
|
|
8b736189e0 | ||
|
|
188d2367df | ||
|
|
5aeac500da | ||
|
|
5730ab28ab | ||
|
|
1c56b03a28 | ||
|
|
924fddf698 | ||
|
|
885b5aab41 | ||
|
|
23e55e92ca | ||
|
|
0cfdbfb91c | ||
|
|
5217f19faa | ||
|
|
296d170ba9 | ||
|
|
a97fd74399 |
191
.gitmodules
vendored
191
.gitmodules
vendored
@@ -7,9 +7,6 @@
|
||||
[submodule "vendor/grammars/sublime-cirru"]
|
||||
path = vendor/grammars/sublime-cirru
|
||||
url = https://github.com/Cirru/sublime-cirru
|
||||
[submodule "vendor/grammars/Sublime-Logos"]
|
||||
path = vendor/grammars/Sublime-Logos
|
||||
url = https://github.com/Cykey/Sublime-Logos
|
||||
[submodule "vendor/grammars/SublimeBrainfuck"]
|
||||
path = vendor/grammars/SublimeBrainfuck
|
||||
url = https://github.com/Drako/SublimeBrainfuck
|
||||
@@ -25,18 +22,9 @@
|
||||
[submodule "vendor/grammars/Sublime-REBOL"]
|
||||
path = vendor/grammars/Sublime-REBOL
|
||||
url = https://github.com/Oldes/Sublime-REBOL
|
||||
[submodule "vendor/grammars/Sublime-Inform"]
|
||||
path = vendor/grammars/Sublime-Inform
|
||||
url = https://github.com/PogiNate/Sublime-Inform
|
||||
[submodule "vendor/grammars/autoitv3-tmbundle"]
|
||||
path = vendor/grammars/autoitv3-tmbundle
|
||||
url = https://github.com/Red-Nova-Technologies/autoitv3-tmbundle
|
||||
[submodule "vendor/grammars/Sublime-VimL"]
|
||||
path = vendor/grammars/Sublime-VimL
|
||||
url = https://github.com/SalGnt/Sublime-VimL
|
||||
[submodule "vendor/grammars/boo-sublime"]
|
||||
path = vendor/grammars/boo-sublime
|
||||
url = https://github.com/Shammah/boo-sublime
|
||||
[submodule "vendor/grammars/ColdFusion"]
|
||||
path = vendor/grammars/ColdFusion
|
||||
url = https://github.com/SublimeText/ColdFusion
|
||||
@@ -85,12 +73,12 @@
|
||||
[submodule "vendor/grammars/language-shellscript"]
|
||||
path = vendor/grammars/language-shellscript
|
||||
url = https://github.com/atom/language-shellscript
|
||||
[submodule "vendor/grammars/language-supercollider"]
|
||||
path = vendor/grammars/language-supercollider
|
||||
url = https://github.com/supercollider/language-supercollider
|
||||
[submodule "vendor/grammars/language-yaml"]
|
||||
path = vendor/grammars/language-yaml
|
||||
url = https://github.com/atom/language-yaml
|
||||
[submodule "vendor/grammars/sublime-sourcepawn"]
|
||||
path = vendor/grammars/sublime-sourcepawn
|
||||
url = https://github.com/austinwagner/sublime-sourcepawn
|
||||
[submodule "vendor/grammars/Sublime-Lasso"]
|
||||
path = vendor/grammars/Sublime-Lasso
|
||||
url = https://github.com/bfad/Sublime-Lasso
|
||||
@@ -109,9 +97,6 @@
|
||||
[submodule "vendor/grammars/sublime-MuPAD"]
|
||||
path = vendor/grammars/sublime-MuPAD
|
||||
url = https://github.com/ccreutzig/sublime-MuPAD
|
||||
[submodule "vendor/grammars/nesC.tmbundle"]
|
||||
path = vendor/grammars/nesC.tmbundle
|
||||
url = https://github.com/cdwilson/nesC.tmbundle
|
||||
[submodule "vendor/grammars/haxe-sublime-bundle"]
|
||||
path = vendor/grammars/haxe-sublime-bundle
|
||||
url = https://github.com/clemos/haxe-sublime-bundle
|
||||
@@ -133,9 +118,6 @@
|
||||
[submodule "vendor/grammars/fancy-tmbundle"]
|
||||
path = vendor/grammars/fancy-tmbundle
|
||||
url = https://github.com/fancy-lang/fancy-tmbundle
|
||||
[submodule "vendor/grammars/monkey.tmbundle"]
|
||||
path = vendor/grammars/monkey.tmbundle
|
||||
url = https://github.com/gingerbeardman/monkey.tmbundle
|
||||
[submodule "vendor/grammars/dart-sublime-bundle"]
|
||||
path = vendor/grammars/dart-sublime-bundle
|
||||
url = https://github.com/guillermooo/dart-sublime-bundle
|
||||
@@ -166,21 +148,12 @@
|
||||
[submodule "vendor/grammars/fish-tmbundle"]
|
||||
path = vendor/grammars/fish-tmbundle
|
||||
url = https://github.com/l15n/fish-tmbundle
|
||||
[submodule "vendor/grammars/sublime-idris"]
|
||||
path = vendor/grammars/sublime-idris
|
||||
url = https://github.com/laughedelic/sublime-idris
|
||||
[submodule "vendor/grammars/sublime-better-typescript"]
|
||||
path = vendor/grammars/sublime-better-typescript
|
||||
url = https://github.com/lavrton/sublime-better-typescript
|
||||
[submodule "vendor/grammars/moonscript-tmbundle"]
|
||||
path = vendor/grammars/moonscript-tmbundle
|
||||
url = https://github.com/leafo/moonscript-tmbundle
|
||||
[submodule "vendor/grammars/Isabelle.tmbundle"]
|
||||
path = vendor/grammars/Isabelle.tmbundle
|
||||
url = https://github.com/lsf37/Isabelle.tmbundle
|
||||
[submodule "vendor/grammars/x86-assembly-textmate-bundle"]
|
||||
path = vendor/grammars/x86-assembly-textmate-bundle
|
||||
url = https://github.com/lunixbochs/x86-assembly-textmate-bundle
|
||||
[submodule "vendor/grammars/Alloy.tmbundle"]
|
||||
path = vendor/grammars/Alloy.tmbundle
|
||||
url = https://github.com/macekond/Alloy.tmbundle
|
||||
@@ -211,9 +184,6 @@
|
||||
[submodule "vendor/grammars/Julia.tmbundle"]
|
||||
path = vendor/grammars/Julia.tmbundle
|
||||
url = https://github.com/nanoant/Julia.tmbundle
|
||||
[submodule "vendor/grammars/assembly.tmbundle"]
|
||||
path = vendor/grammars/assembly.tmbundle
|
||||
url = https://github.com/nanoant/assembly.tmbundle
|
||||
[submodule "vendor/grammars/ooc.tmbundle"]
|
||||
path = vendor/grammars/ooc.tmbundle
|
||||
url = https://github.com/nilium/ooc.tmbundle
|
||||
@@ -223,9 +193,6 @@
|
||||
[submodule "vendor/grammars/sublime-tea"]
|
||||
path = vendor/grammars/sublime-tea
|
||||
url = https://github.com/pferruggiaro/sublime-tea
|
||||
[submodule "vendor/grammars/puppet-textmate-bundle"]
|
||||
path = vendor/grammars/puppet-textmate-bundle
|
||||
url = https://github.com/puppet-textmate-bundle/puppet-textmate-bundle
|
||||
[submodule "vendor/grammars/abap.tmbundle"]
|
||||
path = vendor/grammars/abap.tmbundle
|
||||
url = https://github.com/pvl/abap.tmbundle
|
||||
@@ -256,9 +223,6 @@
|
||||
[submodule "vendor/grammars/SublimeXtend"]
|
||||
path = vendor/grammars/SublimeXtend
|
||||
url = https://github.com/staltz/SublimeXtend
|
||||
[submodule "vendor/grammars/Stata.tmbundle"]
|
||||
path = vendor/grammars/Stata.tmbundle
|
||||
url = https://github.com/statatmbundle/Stata.tmbundle
|
||||
[submodule "vendor/grammars/Vala-TMBundle"]
|
||||
path = vendor/grammars/Vala-TMBundle
|
||||
url = https://github.com/technosophos/Vala-TMBundle
|
||||
@@ -346,9 +310,6 @@
|
||||
[submodule "vendor/grammars/latex.tmbundle"]
|
||||
path = vendor/grammars/latex.tmbundle
|
||||
url = https://github.com/textmate/latex.tmbundle
|
||||
[submodule "vendor/grammars/less.tmbundle"]
|
||||
path = vendor/grammars/less.tmbundle
|
||||
url = https://github.com/textmate/less.tmbundle
|
||||
[submodule "vendor/grammars/lilypond.tmbundle"]
|
||||
path = vendor/grammars/lilypond.tmbundle
|
||||
url = https://github.com/textmate/lilypond.tmbundle
|
||||
@@ -397,9 +358,6 @@
|
||||
[submodule "vendor/grammars/processing.tmbundle"]
|
||||
path = vendor/grammars/processing.tmbundle
|
||||
url = https://github.com/textmate/processing.tmbundle
|
||||
[submodule "vendor/grammars/prolog.tmbundle"]
|
||||
path = vendor/grammars/prolog.tmbundle
|
||||
url = https://github.com/textmate/prolog.tmbundle
|
||||
[submodule "vendor/grammars/python-django.tmbundle"]
|
||||
path = vendor/grammars/python-django.tmbundle
|
||||
url = https://github.com/textmate/python-django.tmbundle
|
||||
@@ -460,9 +418,6 @@
|
||||
[submodule "vendor/grammars/llvm.tmbundle"]
|
||||
path = vendor/grammars/llvm.tmbundle
|
||||
url = https://github.com/whitequark/llvm.tmbundle
|
||||
[submodule "vendor/grammars/sublime-nix"]
|
||||
path = vendor/grammars/sublime-nix
|
||||
url = https://github.com/wmertens/sublime-nix
|
||||
[submodule "vendor/grammars/oz-tmbundle"]
|
||||
path = vendor/grammars/oz-tmbundle
|
||||
url = https://github.com/eregon/oz-tmbundle
|
||||
@@ -521,24 +476,12 @@
|
||||
[submodule "vendor/grammars/liquid.tmbundle"]
|
||||
path = vendor/grammars/liquid.tmbundle
|
||||
url = https://github.com/bastilian/validcode-textmate-bundles
|
||||
[submodule "vendor/grammars/ats.sublime"]
|
||||
path = vendor/grammars/ats.sublime
|
||||
url = https://github.com/steinwaywhw/ats-mode-sublimetext
|
||||
[submodule "vendor/grammars/Modelica"]
|
||||
path = vendor/grammars/Modelica
|
||||
url = https://github.com/BorisChumichev/modelicaSublimeTextPackage
|
||||
[submodule "vendor/grammars/sublime-apl"]
|
||||
path = vendor/grammars/sublime-apl
|
||||
url = https://github.com/StoneCypher/sublime-apl
|
||||
[submodule "vendor/grammars/CLIPS-sublime"]
|
||||
path = vendor/grammars/CLIPS-sublime
|
||||
url = https://github.com/psicomante/CLIPS-sublime
|
||||
[submodule "vendor/grammars/Creole"]
|
||||
path = vendor/grammars/Creole
|
||||
url = https://github.com/Siddley/Creole
|
||||
[submodule "vendor/grammars/GDScript-sublime"]
|
||||
path = vendor/grammars/GDScript-sublime
|
||||
url = https://github.com/beefsack/GDScript-sublime
|
||||
[submodule "vendor/grammars/sublime-golo"]
|
||||
path = vendor/grammars/sublime-golo
|
||||
url = https://github.com/TypeUnsafe/sublime-golo
|
||||
@@ -551,9 +494,6 @@
|
||||
[submodule "vendor/grammars/G-Code"]
|
||||
path = vendor/grammars/G-Code
|
||||
url = https://github.com/robotmaster/sublime-text-syntax-highlighting
|
||||
[submodule "vendor/grammars/grace-tmbundle"]
|
||||
path = vendor/grammars/grace-tmbundle
|
||||
url = https://github.com/zmthy/grace-tmbundle
|
||||
[submodule "vendor/grammars/sublime-text-ox"]
|
||||
path = vendor/grammars/sublime-text-ox
|
||||
url = https://github.com/andreashetland/sublime-text-ox
|
||||
@@ -563,9 +503,6 @@
|
||||
[submodule "vendor/grammars/ec.tmbundle"]
|
||||
path = vendor/grammars/ec.tmbundle
|
||||
url = https://github.com/ecere/ec.tmbundle
|
||||
[submodule "vendor/grammars/InnoSetup"]
|
||||
path = vendor/grammars/InnoSetup
|
||||
url = https://github.com/idleberg/InnoSetup-Sublime-Text
|
||||
[submodule "vendor/grammars/gap-tmbundle"]
|
||||
path = vendor/grammars/gap-tmbundle
|
||||
url = https://github.com/dhowden/gap-tmbundle
|
||||
@@ -587,9 +524,6 @@
|
||||
[submodule "vendor/grammars/SublimeClarion"]
|
||||
path = vendor/grammars/SublimeClarion
|
||||
url = https://github.com/fushnisoft/SublimeClarion
|
||||
[submodule "vendor/grammars/oracle.tmbundle"]
|
||||
path = vendor/grammars/oracle.tmbundle
|
||||
url = https://github.com/mulander/oracle.tmbundle.git
|
||||
[submodule "vendor/grammars/BrightScript.tmbundle"]
|
||||
path = vendor/grammars/BrightScript.tmbundle
|
||||
url = https://github.com/cmink/BrightScript.tmbundle
|
||||
@@ -599,18 +533,12 @@
|
||||
[submodule "vendor/grammars/asciidoc.tmbundle"]
|
||||
path = vendor/grammars/asciidoc.tmbundle
|
||||
url = https://github.com/zuckschwerdt/asciidoc.tmbundle
|
||||
[submodule "vendor/grammars/sublime-text-pig-latin"]
|
||||
path = vendor/grammars/sublime-text-pig-latin
|
||||
url = https://github.com/goblindegook/sublime-text-pig-latin
|
||||
[submodule "vendor/grammars/Lean.tmbundle"]
|
||||
path = vendor/grammars/Lean.tmbundle
|
||||
url = https://github.com/leanprover/Lean.tmbundle
|
||||
[submodule "vendor/grammars/ampl"]
|
||||
path = vendor/grammars/ampl
|
||||
url = https://github.com/ampl/sublime-ampl
|
||||
[submodule "vendor/grammars/openscad.tmbundle"]
|
||||
path = vendor/grammars/openscad.tmbundle
|
||||
url = https://github.com/tbuser/openscad.tmbundle
|
||||
[submodule "vendor/grammars/sublime-varnish"]
|
||||
path = vendor/grammars/sublime-varnish
|
||||
url = https://github.com/brandonwamboldt/sublime-varnish
|
||||
@@ -655,7 +583,118 @@
|
||||
url = https://github.com/SRI-CSL/SMT.tmbundle.git
|
||||
[submodule "vendor/grammars/language-crystal"]
|
||||
path = vendor/grammars/language-crystal
|
||||
url = https://github.com/k2b6s9j/language-crystal
|
||||
url = https://github.com/atom-crystal/language-crystal
|
||||
[submodule "vendor/grammars/language-xbase"]
|
||||
path = vendor/grammars/language-xbase
|
||||
url = https://github.com/hernad/atom-language-harbour
|
||||
[submodule "vendor/grammars/language-ncl"]
|
||||
path = vendor/grammars/language-ncl
|
||||
url = https://github.com/rpavlick/language-ncl.git
|
||||
[submodule "vendor/grammars/pawn-sublime-language"]
|
||||
path = vendor/grammars/pawn-sublime-language
|
||||
url = https://github.com/Southclaw/pawn-sublime-language.git
|
||||
[submodule "vendor/grammars/atom-language-purescript"]
|
||||
path = vendor/grammars/atom-language-purescript
|
||||
url = https://github.com/purescript-contrib/atom-language-purescript
|
||||
[submodule "vendor/grammars/vue-syntax-highlight"]
|
||||
path = vendor/grammars/vue-syntax-highlight
|
||||
url = https://github.com/vuejs/vue-syntax-highlight
|
||||
[submodule "vendor/grammars/st2-zonefile"]
|
||||
path = vendor/grammars/st2-zonefile
|
||||
url = https://github.com/sixty4k/st2-zonefile
|
||||
[submodule "vendor/grammars/sublimeprolog"]
|
||||
path = vendor/grammars/sublimeprolog
|
||||
url = https://github.com/alnkpa/sublimeprolog
|
||||
[submodule "vendor/grammars/sublime-aspectj"]
|
||||
path = vendor/grammars/sublime-aspectj
|
||||
url = https://github.com/pchaigno/sublime-aspectj
|
||||
[submodule "vendor/grammars/sublime-typescript"]
|
||||
path = vendor/grammars/sublime-typescript
|
||||
url = https://github.com/Microsoft/TypeScript-Sublime-Plugin
|
||||
[submodule "vendor/grammars/sublime-pony"]
|
||||
path = vendor/grammars/sublime-pony
|
||||
url = https://github.com/CausalityLtd/sublime-pony
|
||||
[submodule "vendor/grammars/X10"]
|
||||
path = vendor/grammars/X10
|
||||
url = https://github.com/x10-lang/x10-highlighting
|
||||
[submodule "vendor/grammars/language-babel"]
|
||||
path = vendor/grammars/language-babel
|
||||
url = https://github.com/gandm/language-babel
|
||||
[submodule "vendor/grammars/UrWeb-Language-Definition"]
|
||||
path = vendor/grammars/UrWeb-Language-Definition
|
||||
url = https://github.com/gwalborn/UrWeb-Language-Definition.git
|
||||
[submodule "vendor/grammars/Stata.tmbundle"]
|
||||
path = vendor/grammars/Stata.tmbundle
|
||||
url = https://github.com/pschumm/Stata.tmbundle
|
||||
[submodule "vendor/grammars/FreeMarker.tmbundle"]
|
||||
path = vendor/grammars/FreeMarker.tmbundle
|
||||
url = https://github.com/freemarker/FreeMarker.tmbundle
|
||||
[submodule "vendor/grammars/MagicPython"]
|
||||
path = vendor/grammars/MagicPython
|
||||
url = https://github.com/MagicStack/MagicPython
|
||||
[submodule "vendor/grammars/language-click"]
|
||||
path = vendor/grammars/language-click
|
||||
url = https://github.com/stenverbois/language-click.git
|
||||
[submodule "vendor/grammars/language-maxscript"]
|
||||
path = vendor/grammars/language-maxscript
|
||||
url = https://github.com/Alhadis/language-maxscript
|
||||
[submodule "vendor/grammars/language-renpy"]
|
||||
path = vendor/grammars/language-renpy
|
||||
url = https://github.com/williamd1k0/language-renpy.git
|
||||
[submodule "vendor/grammars/language-inform7"]
|
||||
path = vendor/grammars/language-inform7
|
||||
url = https://github.com/erkyrath/language-inform7
|
||||
[submodule "vendor/grammars/atom-language-stan"]
|
||||
path = vendor/grammars/atom-language-stan
|
||||
url = https://github.com/jrnold/atom-language-stan
|
||||
[submodule "vendor/grammars/language-yang"]
|
||||
path = vendor/grammars/language-yang
|
||||
url = https://github.com/DzonyKalafut/language-yang.git
|
||||
[submodule "vendor/grammars/perl6fe"]
|
||||
path = vendor/grammars/perl6fe
|
||||
url = https://github.com/MadcapJake/language-perl6fe.git
|
||||
[submodule "vendor/grammars/language-less"]
|
||||
path = vendor/grammars/language-less
|
||||
url = https://github.com/atom/language-less.git
|
||||
[submodule "vendor/grammars/language-povray"]
|
||||
path = vendor/grammars/language-povray
|
||||
url = https://github.com/c-lipka/language-povray
|
||||
[submodule "vendor/grammars/sublime-terra"]
|
||||
path = vendor/grammars/sublime-terra
|
||||
url = https://github.com/pyk/sublime-terra
|
||||
[submodule "vendor/grammars/SublimePuppet"]
|
||||
path = vendor/grammars/SublimePuppet
|
||||
url = https://github.com/russCloak/SublimePuppet
|
||||
[submodule "vendor/grammars/sublimeassembly"]
|
||||
path = vendor/grammars/sublimeassembly
|
||||
url = https://github.com/Nessphoro/sublimeassembly
|
||||
[submodule "vendor/grammars/monkey"]
|
||||
path = vendor/grammars/monkey
|
||||
url = https://github.com/gingerbeardman/monkey.tmbundle
|
||||
[submodule "vendor/grammars/assembly"]
|
||||
path = vendor/grammars/assembly
|
||||
url = https://github.com/nanoant/assembly.tmbundle
|
||||
[submodule "vendor/grammars/boo"]
|
||||
path = vendor/grammars/boo
|
||||
url = https://github.com/Shammah/boo-sublime
|
||||
[submodule "vendor/grammars/logos"]
|
||||
path = vendor/grammars/logos
|
||||
url = https://github.com/Cykey/Sublime-Logos
|
||||
[submodule "vendor/grammars/pig-latin"]
|
||||
path = vendor/grammars/pig-latin
|
||||
url = https://github.com/goblindegook/sublime-text-pig-latin
|
||||
[submodule "vendor/grammars/sourcepawn"]
|
||||
path = vendor/grammars/sourcepawn
|
||||
url = https://github.com/austinwagner/sublime-sourcepawn
|
||||
[submodule "vendor/grammars/gdscript"]
|
||||
path = vendor/grammars/gdscript
|
||||
url = https://github.com/beefsack/GDScript-sublime
|
||||
[submodule "vendor/grammars/nesC"]
|
||||
path = vendor/grammars/nesC
|
||||
url = https://github.com/cdwilson/nesC.tmbundle
|
||||
[submodule "vendor/grammars/ats"]
|
||||
path = vendor/grammars/ats
|
||||
url = https://github.com/steinwaywhw/ats-mode-sublimetext
|
||||
[submodule "vendor/grammars/grace"]
|
||||
path = vendor/grammars/grace
|
||||
url = https://github.com/zmthy/grace-tmbundle
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
language: ruby
|
||||
sudo: false
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- libicu-dev
|
||||
- libicu48
|
||||
before_install: script/travis/before_install
|
||||
rvm:
|
||||
- 1.9.3
|
||||
- 2.0.0
|
||||
- 2.1
|
||||
- 2.2
|
||||
|
||||
@@ -12,7 +12,7 @@ We try only to add new extensions once they have some usage on GitHub. In most c
|
||||
|
||||
To add support for a new extension:
|
||||
|
||||
0. Add your extension to the language entry in [`languages.yml`][languages].
|
||||
0. Add your extension to the language entry in [`languages.yml`][languages], keeping the extensions in alphabetical order.
|
||||
0. Add at least one sample for your extension to the [samples directory][samples] in the correct subdirectory.
|
||||
0. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage.
|
||||
|
||||
@@ -58,7 +58,7 @@ Syntax highlighting in GitHub is performed using TextMate-compatible grammars. T
|
||||
|
||||
Assuming your code is being detected as the right language, in most cases this is due to a bug in the language grammar rather than a bug in Linguist. [`grammars.yml`][grammars] lists all the grammars we use for syntax highlighting on github.com. Find the one corresponding to your code's programming language and submit a bug report upstream. If you can, try to reproduce the highlighting problem in the text editor that the grammar is designed for (TextMate, Sublime Text, or Atom) and include that information in your bug report.
|
||||
|
||||
You can also try to fix the bug yourself and submit a Pull Request. [TextMate's documentation](http://manual.macromates.com/en/language_grammars) offers a good introduction on how to work with TextMate-compatible grammars. You can test grammars using [Lightshow](https://github-lightshow.herokuapp.com).
|
||||
You can also try to fix the bug yourself and submit a Pull Request. [TextMate's documentation](https://manual.macromates.com/en/language_grammars) offers a good introduction on how to work with TextMate-compatible grammars. You can test grammars using [Lightshow](https://github-lightshow.herokuapp.com).
|
||||
|
||||
Once the bug has been fixed upstream, we'll pick it up for GitHub in the next release of Linguist.
|
||||
|
||||
@@ -74,9 +74,9 @@ To run the tests:
|
||||
|
||||
bundle exec rake test
|
||||
|
||||
Sometimes getting the tests running can be too much work, especially if you don't have much Ruby experience. It's okay: be lazy and let our build bot [Travis](http://travis-ci.org/#!/github/linguist) run the tests for you. Just open a pull request and the bot will start cranking away.
|
||||
Sometimes getting the tests running can be too much work, especially if you don't have much Ruby experience. It's okay: be lazy and let our build bot [Travis](https://travis-ci.org/#!/github/linguist) run the tests for you. Just open a pull request and the bot will start cranking away.
|
||||
|
||||
Here's our current build status: [](http://travis-ci.org/github/linguist)
|
||||
Here's our current build status: [](https://travis-ci.org/github/linguist)
|
||||
|
||||
|
||||
## Releasing
|
||||
|
||||
1
Gemfile
1
Gemfile
@@ -1,4 +1,3 @@
|
||||
source 'https://rubygems.org'
|
||||
gemspec :name => "github-linguist"
|
||||
gemspec :name => "github-linguist-grammars"
|
||||
gem 'byebug' if RUBY_VERSION >= '2.0'
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,4 +1,4 @@
|
||||
Copyright (c) 2011-2015 GitHub, Inc.
|
||||
Copyright (c) 2011-2016 GitHub, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
|
||||
20
README.md
20
README.md
@@ -13,11 +13,11 @@ See [Troubleshooting](#troubleshooting) and [`CONTRIBUTING.md`](/CONTRIBUTING.md
|
||||
|
||||

|
||||
|
||||
The Language stats bar is built by aggregating the languages of each file in that repository. If it is reporting a language that you don't expect:
|
||||
The Language stats bar displays languages percentages for the files in the repository. The percentages are calculated based on the bytes of code for each language as reported by the [List Languages](https://developer.github.com/v3/repos/#list-languages) API. If the bar is reporting a language that you don't expect:
|
||||
|
||||
0. Click on the name of the language in the stats bar to see a list of the files that are identified as that language.
|
||||
0. If you see files that you didn't write, consider moving the files into one of the [paths for vendored code](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml), or use the [manual overrides](#overrides) feature to ignore them.
|
||||
0. If the files are being misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you an add, especially links to public repositories, is helpful.
|
||||
0. If you see files that you didn't write, consider moving the files into one of the [paths for vendored code](/lib/linguist/vendor.yml), or use the [manual overrides](#overrides) feature to ignore them.
|
||||
0. If the files are being misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you can add, especially links to public repositories, is helpful.
|
||||
0. If there are no reported issues of this misclassification, [open an issue][new-issue] and include a link to the repository or a sample of the code that is being misclassified.
|
||||
|
||||
## Overrides
|
||||
@@ -33,9 +33,9 @@ $ cat .gitattributes
|
||||
*.rb linguist-language=Java
|
||||
```
|
||||
|
||||
Checking code you didn't write, such as JavaScript libraries, into your git repo is a common practice, but this often inflates your project's language stats and may even cause your project to be labeled as another language. By default, Linguist treats all of the paths defined in [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository. Vendored files are also hidden by default in diffs on github.com.
|
||||
Checking code you didn't write, such as JavaScript libraries, into your git repo is a common practice, but this often inflates your project's language stats and may even cause your project to be labeled as another language. By default, Linguist treats all of the paths defined in [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository.
|
||||
|
||||
Use the `linguist-vendored` attribute to vendor or un-vendor paths. Please note, overriding the vendored (or un-vendored) status of a file only affects the language statistics for the repository and not the behavior in diffs on github.com.
|
||||
Use the `linguist-vendored` attribute to vendor or un-vendor paths.
|
||||
|
||||
```
|
||||
$ cat .gitattributes
|
||||
@@ -59,6 +59,9 @@ Alternatively, you can use Vim or Emacs style modelines to set the language for
|
||||
|
||||
##### Vim
|
||||
```
|
||||
# Some examples of various styles:
|
||||
vim: syntax=java
|
||||
vim: set syntax=ruby:
|
||||
vim: set filetype=prolog:
|
||||
vim: set ft=cpp:
|
||||
```
|
||||
@@ -111,4 +114,9 @@ lib/linguist.rb
|
||||
|
||||
Please check out our [contributing guidelines](CONTRIBUTING.md).
|
||||
|
||||
##
|
||||
## License
|
||||
|
||||
The language grammars included in this gem are covered by their repositories'
|
||||
respective licenses. `grammars.yml` specifies the repository for each grammar.
|
||||
|
||||
All other files are covered by the MIT license, see `LICENSE`.
|
||||
|
||||
10
Rakefile
10
Rakefile
@@ -40,18 +40,14 @@ task :samples do
|
||||
end
|
||||
|
||||
task :build_gem => :samples do
|
||||
rm_rf "grammars"
|
||||
sh "script/convert-grammars"
|
||||
languages = YAML.load_file("lib/linguist/languages.yml")
|
||||
File.write("lib/linguist/languages.json", Yajl.dump(languages))
|
||||
`gem build github-linguist.gemspec`
|
||||
File.delete("lib/linguist/languages.json")
|
||||
end
|
||||
|
||||
task :build_grammars_gem do
|
||||
rm_rf "grammars"
|
||||
sh "script/convert-grammars"
|
||||
sh "gem", "build", "github-linguist-grammars.gemspec"
|
||||
end
|
||||
|
||||
namespace :benchmark do
|
||||
benchmark_path = "benchmark/results"
|
||||
|
||||
@@ -62,7 +58,7 @@ namespace :benchmark do
|
||||
|
||||
corpus = File.expand_path(ENV["CORPUS"] || "samples")
|
||||
|
||||
require 'linguist/language'
|
||||
require 'linguist'
|
||||
|
||||
results = Hash.new
|
||||
Dir.glob("#{corpus}/**/*").each do |file|
|
||||
|
||||
138
bin/git-linguist
Executable file
138
bin/git-linguist
Executable file
@@ -0,0 +1,138 @@
|
||||
#!/usr/bin/env ruby
|
||||
|
||||
require 'linguist'
|
||||
require 'rugged'
|
||||
require 'optparse'
|
||||
require 'json'
|
||||
require 'tmpdir'
|
||||
require 'zlib'
|
||||
|
||||
class GitLinguist
|
||||
def initialize(path, commit_oid, incremental = true)
|
||||
@repo_path = path
|
||||
@commit_oid = commit_oid
|
||||
@incremental = incremental
|
||||
end
|
||||
|
||||
def linguist
|
||||
if @commit_oid.nil?
|
||||
raise "git-linguist must be called with a specific commit OID to perform language computation"
|
||||
end
|
||||
repo = Linguist::Repository.new(rugged, @commit_oid)
|
||||
|
||||
if @incremental && stats = load_language_stats
|
||||
old_commit_oid, old_stats = stats
|
||||
|
||||
# A cache with NULL oid means that we want to froze
|
||||
# these language stats in place and stop computing
|
||||
# them (for performance reasons)
|
||||
return old_stats if old_commit_oid == NULL_OID
|
||||
repo.load_existing_stats(old_commit_oid, old_stats)
|
||||
end
|
||||
|
||||
result = yield repo
|
||||
|
||||
save_language_stats(@commit_oid, repo.cache)
|
||||
result
|
||||
end
|
||||
|
||||
def load_language_stats
|
||||
version, oid, stats = load_cache
|
||||
if version == LANGUAGE_STATS_CACHE_VERSION && oid && stats
|
||||
[oid, stats]
|
||||
end
|
||||
end
|
||||
|
||||
def save_language_stats(oid, stats)
|
||||
cache = [LANGUAGE_STATS_CACHE_VERSION, oid, stats]
|
||||
write_cache(cache)
|
||||
end
|
||||
|
||||
def clear_language_stats
|
||||
File.unlink(cache_file)
|
||||
rescue Errno::ENOENT
|
||||
end
|
||||
|
||||
def disable_language_stats
|
||||
save_language_stats(NULL_OID, {})
|
||||
end
|
||||
|
||||
protected
|
||||
NULL_OID = ("0" * 40).freeze
|
||||
|
||||
LANGUAGE_STATS_CACHE = 'language-stats.cache'
|
||||
LANGUAGE_STATS_CACHE_VERSION = "v3:#{Linguist::VERSION}"
|
||||
|
||||
def rugged
|
||||
@rugged ||= Rugged::Repository.bare(@repo_path)
|
||||
end
|
||||
|
||||
def cache_file
|
||||
File.join(@repo_path, LANGUAGE_STATS_CACHE)
|
||||
end
|
||||
|
||||
def write_cache(object)
|
||||
return unless File.directory? @repo_path
|
||||
|
||||
begin
|
||||
tmp_path = Dir::Tmpname.make_tmpname(cache_file, nil)
|
||||
File.open(tmp_path, "wb") do |f|
|
||||
marshal = Marshal.dump(object)
|
||||
f.write(Zlib::Deflate.deflate(marshal))
|
||||
end
|
||||
|
||||
File.rename(tmp_path, cache_file)
|
||||
rescue => e
|
||||
(File.unlink(tmp_path) rescue nil)
|
||||
raise e
|
||||
end
|
||||
end
|
||||
|
||||
def load_cache
|
||||
marshal = File.open(cache_file, "rb") { |f| Zlib::Inflate.inflate(f.read) }
|
||||
Marshal.load(marshal)
|
||||
rescue SystemCallError, ::Zlib::DataError, ::Zlib::BufError, TypeError
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
def git_linguist(args)
|
||||
incremental = true
|
||||
commit = nil
|
||||
|
||||
parser = OptionParser.new do |opts|
|
||||
opts.banner = "Usage: git-linguist [OPTIONS] stats|breakdown|dump-cache|clear|disable"
|
||||
|
||||
opts.on("-f", "--force", "Force a full rescan") { incremental = false }
|
||||
opts.on("--commit=COMMIT", "Commit to index") { |v| commit = v}
|
||||
end
|
||||
|
||||
parser.parse!(args)
|
||||
|
||||
git_dir = `git rev-parse --git-dir`.strip
|
||||
raise "git-linguist must be ran in a Git repository (#{Dir.pwd})" unless $?.success?
|
||||
wrapper = GitLinguist.new(git_dir, commit, incremental)
|
||||
|
||||
case args.pop
|
||||
when "stats"
|
||||
wrapper.linguist do |linguist|
|
||||
puts JSON.dump(linguist.languages)
|
||||
end
|
||||
when "breakdown"
|
||||
wrapper.linguist do |linguist|
|
||||
puts JSON.dump(linguist.breakdown_by_file)
|
||||
end
|
||||
when "dump-cache"
|
||||
puts JSON.dump(wrapper.load_language_stats)
|
||||
when "clear"
|
||||
wrapper.clear_language_stats
|
||||
when "disable"
|
||||
wrapper.disable_language_stats
|
||||
else
|
||||
$stderr.print(parser.help)
|
||||
exit 1
|
||||
end
|
||||
end
|
||||
|
||||
git_linguist(ARGV)
|
||||
@@ -1,14 +0,0 @@
|
||||
require File.expand_path('../lib/linguist/version', __FILE__)
|
||||
|
||||
Gem::Specification.new do |s|
|
||||
s.name = 'github-linguist-grammars'
|
||||
s.version = Linguist::VERSION
|
||||
s.summary = "Language grammars for use with github-linguist"
|
||||
|
||||
s.authors = "GitHub"
|
||||
s.homepage = "https://github.com/github/linguist"
|
||||
|
||||
s.files = ['lib/linguist/grammars.rb'] + Dir['grammars/*']
|
||||
|
||||
s.add_development_dependency 'plist', '~>3.1'
|
||||
end
|
||||
@@ -10,8 +10,8 @@ Gem::Specification.new do |s|
|
||||
s.homepage = "https://github.com/github/linguist"
|
||||
s.license = "MIT"
|
||||
|
||||
s.files = Dir['lib/**/*'] - ['lib/linguist/grammars.rb']
|
||||
s.executables << 'linguist'
|
||||
s.files = Dir['lib/**/*'] + Dir['grammars/*'] + ['LICENSE']
|
||||
s.executables = ['linguist', 'git-linguist']
|
||||
|
||||
s.add_dependency 'charlock_holmes', '~> 0.7.3'
|
||||
s.add_dependency 'escape_utils', '~> 1.1.0'
|
||||
@@ -20,8 +20,11 @@ Gem::Specification.new do |s|
|
||||
|
||||
s.add_development_dependency 'minitest', '>= 5.0'
|
||||
s.add_development_dependency 'mocha'
|
||||
s.add_development_dependency 'plist', '~>3.1'
|
||||
s.add_development_dependency 'pry'
|
||||
s.add_development_dependency 'rake'
|
||||
s.add_development_dependency 'yajl-ruby'
|
||||
s.add_development_dependency 'color-proximity', '~> 0.2.1'
|
||||
s.add_development_dependency 'licensed'
|
||||
|
||||
end
|
||||
|
||||
131
grammars.yml
Normal file → Executable file
131
grammars.yml
Normal file → Executable file
@@ -29,33 +29,27 @@ vendor/grammars/AutoHotkey/:
|
||||
vendor/grammars/BrightScript.tmbundle/:
|
||||
- source.brightauthorproject
|
||||
- source.brightscript
|
||||
vendor/grammars/CLIPS-sublime:
|
||||
- source.clips
|
||||
vendor/grammars/ColdFusion:
|
||||
- source.cfscript
|
||||
- source.cfscript.cfc
|
||||
- text.cfml.basic
|
||||
- text.html.cfm
|
||||
vendor/grammars/Creole:
|
||||
- text.html.creole
|
||||
vendor/grammars/Docker.tmbundle:
|
||||
- source.dockerfile
|
||||
vendor/grammars/Elm.tmLanguage:
|
||||
- source.elm
|
||||
vendor/grammars/FreeMarker.tmbundle:
|
||||
- text.html.ftl
|
||||
vendor/grammars/G-Code/:
|
||||
- source.LS
|
||||
- source.MCPOST
|
||||
- source.MOD
|
||||
- source.apt
|
||||
- source.gcode
|
||||
vendor/grammars/GDScript-sublime/:
|
||||
- source.gdscript
|
||||
vendor/grammars/Handlebars:
|
||||
- text.html.handlebars
|
||||
vendor/grammars/IDL-Syntax:
|
||||
- source.webidl
|
||||
vendor/grammars/InnoSetup/:
|
||||
- source.inno
|
||||
vendor/grammars/Isabelle.tmbundle:
|
||||
- source.isabelle.root
|
||||
- source.isabelle.theory
|
||||
@@ -67,6 +61,9 @@ vendor/grammars/Lean.tmbundle:
|
||||
- source.lean
|
||||
vendor/grammars/LiveScript.tmbundle:
|
||||
- source.livescript
|
||||
vendor/grammars/MagicPython:
|
||||
- source.python
|
||||
- source.regexp.python
|
||||
vendor/grammars/Modelica/:
|
||||
- source.modelica
|
||||
vendor/grammars/NSIS:
|
||||
@@ -92,7 +89,7 @@ vendor/grammars/Scalate.tmbundle:
|
||||
- text.html.ssp
|
||||
vendor/grammars/Slash.tmbundle:
|
||||
- text.html.slash
|
||||
vendor/grammars/Stata.tmbundle:
|
||||
vendor/grammars/Stata.tmbundle/:
|
||||
- source.mata
|
||||
- source.stata
|
||||
vendor/grammars/Stylus/:
|
||||
@@ -101,12 +98,8 @@ vendor/grammars/Sublime-Coq:
|
||||
- source.coq
|
||||
vendor/grammars/Sublime-HTTP:
|
||||
- source.httpspec
|
||||
vendor/grammars/Sublime-Inform:
|
||||
- source.Inform7
|
||||
vendor/grammars/Sublime-Lasso:
|
||||
- file.lasso
|
||||
vendor/grammars/Sublime-Logos:
|
||||
- source.logos
|
||||
vendor/grammars/Sublime-Loom:
|
||||
- source.loomscript
|
||||
vendor/grammars/Sublime-Modula-2/:
|
||||
@@ -131,19 +124,23 @@ vendor/grammars/SublimeBrainfuck:
|
||||
vendor/grammars/SublimeClarion/:
|
||||
- source.clarion
|
||||
vendor/grammars/SublimePapyrus/:
|
||||
- source.compiled-papyrus
|
||||
- source.papyrus
|
||||
- source.papyrus-assembly
|
||||
- source.papyrus.skyrim
|
||||
vendor/grammars/SublimePuppet/:
|
||||
- source.puppet
|
||||
vendor/grammars/SublimeXtend:
|
||||
- source.xtend
|
||||
vendor/grammars/TXL/:
|
||||
- source.txl
|
||||
vendor/grammars/Textmate-Gosu-Bundle:
|
||||
- source.gosu.2
|
||||
vendor/grammars/UrWeb-Language-Definition:
|
||||
- source.ur
|
||||
vendor/grammars/VBDotNetSyntax:
|
||||
- source.vbnet
|
||||
vendor/grammars/Vala-TMBundle:
|
||||
- source.vala
|
||||
vendor/grammars/X10:
|
||||
- source.x10
|
||||
vendor/grammars/abap.tmbundle:
|
||||
- source.abap
|
||||
vendor/grammars/actionscript3-tmbundle:
|
||||
@@ -171,23 +168,28 @@ vendor/grammars/asciidoc.tmbundle/:
|
||||
vendor/grammars/asp.tmbundle:
|
||||
- source.asp
|
||||
- text.html.asp
|
||||
vendor/grammars/assembly.tmbundle:
|
||||
vendor/grammars/assembly/:
|
||||
- objdump.x86asm
|
||||
- source.x86asm
|
||||
vendor/grammars/atom-fsharp/:
|
||||
- source.fsharp
|
||||
- source.fsharp.fsi
|
||||
- source.fsharp.fsl
|
||||
- source.fsharp.fsx
|
||||
vendor/grammars/atom-language-purescript/:
|
||||
- source.purescript
|
||||
vendor/grammars/atom-language-stan/:
|
||||
- source.stan
|
||||
vendor/grammars/atom-salt:
|
||||
- source.python.salt
|
||||
- source.yaml.salt
|
||||
vendor/grammars/ats.sublime:
|
||||
vendor/grammars/ats:
|
||||
- source.ats
|
||||
vendor/grammars/autoitv3-tmbundle:
|
||||
- source.autoit.3
|
||||
vendor/grammars/awk-sublime:
|
||||
- source.awk
|
||||
vendor/grammars/bison.tmbundle:
|
||||
- source.bison
|
||||
vendor/grammars/boo-sublime:
|
||||
vendor/grammars/boo/:
|
||||
- source.boo
|
||||
vendor/grammars/bro-sublime:
|
||||
- source.bro
|
||||
@@ -257,13 +259,15 @@ vendor/grammars/fortran.tmbundle:
|
||||
- source.fortran.modern
|
||||
vendor/grammars/gap-tmbundle/:
|
||||
- source.gap
|
||||
vendor/grammars/gdscript/:
|
||||
- source.gdscript
|
||||
vendor/grammars/gettext.tmbundle:
|
||||
- source.po
|
||||
vendor/grammars/gnuplot-tmbundle:
|
||||
- source.gnuplot
|
||||
vendor/grammars/go-tmbundle:
|
||||
- source.go
|
||||
vendor/grammars/grace-tmbundle/:
|
||||
vendor/grammars/grace:
|
||||
- source.grace
|
||||
vendor/grammars/gradle.tmbundle:
|
||||
- source.groovy.gradle
|
||||
@@ -293,8 +297,8 @@ vendor/grammars/io.tmbundle:
|
||||
vendor/grammars/ioke-outdated:
|
||||
- source.ioke
|
||||
vendor/grammars/jade-tmbundle:
|
||||
- source.jade
|
||||
- source.pyjade
|
||||
- text.jade
|
||||
vendor/grammars/jasmin-sublime:
|
||||
- source.jasmin
|
||||
vendor/grammars/java.tmbundle:
|
||||
@@ -310,6 +314,11 @@ vendor/grammars/json.tmbundle:
|
||||
- source.json
|
||||
vendor/grammars/kotlin-sublime-package:
|
||||
- source.Kotlin
|
||||
vendor/grammars/language-babel/:
|
||||
- source.js.jsx
|
||||
- source.regexp.babel
|
||||
vendor/grammars/language-click/:
|
||||
- source.click
|
||||
vendor/grammars/language-clojure:
|
||||
- source.clojure
|
||||
vendor/grammars/language-coffee-script:
|
||||
@@ -318,6 +327,7 @@ vendor/grammars/language-coffee-script:
|
||||
vendor/grammars/language-crystal:
|
||||
- source.crystal
|
||||
vendor/grammars/language-csharp:
|
||||
- source.cake
|
||||
- source.cs
|
||||
- source.csx
|
||||
- source.nant-build
|
||||
@@ -325,24 +335,39 @@ vendor/grammars/language-gfm:
|
||||
- source.gfm
|
||||
vendor/grammars/language-hy:
|
||||
- source.hy
|
||||
vendor/grammars/language-inform7:
|
||||
- source.inform7
|
||||
vendor/grammars/language-javascript:
|
||||
- source.js
|
||||
- source.js.regexp
|
||||
- source.js.regexp.replacement
|
||||
vendor/grammars/language-jsoniq/:
|
||||
- source.jq
|
||||
- source.xq
|
||||
vendor/grammars/language-less/:
|
||||
- source.css.less
|
||||
vendor/grammars/language-maxscript:
|
||||
- source.maxscript
|
||||
vendor/grammars/language-ncl:
|
||||
- source.ncl
|
||||
vendor/grammars/language-povray:
|
||||
- source.pov-ray sdl
|
||||
vendor/grammars/language-python:
|
||||
- source.python
|
||||
- source.regexp.python
|
||||
- text.python.console
|
||||
- text.python.traceback
|
||||
vendor/grammars/language-renpy:
|
||||
- source.renpy
|
||||
vendor/grammars/language-shellscript:
|
||||
- source.shell
|
||||
- text.shell-session
|
||||
vendor/grammars/language-supercollider:
|
||||
- source.supercollider
|
||||
vendor/grammars/language-xbase:
|
||||
- source.harbour
|
||||
vendor/grammars/language-yaml:
|
||||
- source.yaml
|
||||
vendor/grammars/language-yang/:
|
||||
- source.yang
|
||||
vendor/grammars/latex.tmbundle:
|
||||
- text.bibtex
|
||||
- text.log.latex
|
||||
@@ -350,8 +375,6 @@ vendor/grammars/latex.tmbundle:
|
||||
- text.tex.latex
|
||||
- text.tex.latex.beamer
|
||||
- text.tex.latex.memoir
|
||||
vendor/grammars/less.tmbundle:
|
||||
- source.css.less
|
||||
vendor/grammars/lilypond.tmbundle:
|
||||
- source.lilypond
|
||||
vendor/grammars/liquid.tmbundle:
|
||||
@@ -360,6 +383,8 @@ vendor/grammars/lisp.tmbundle:
|
||||
- source.lisp
|
||||
vendor/grammars/llvm.tmbundle:
|
||||
- source.llvm
|
||||
vendor/grammars/logos:
|
||||
- source.logos
|
||||
vendor/grammars/logtalk.tmbundle:
|
||||
- source.logtalk
|
||||
vendor/grammars/lua.tmbundle:
|
||||
@@ -379,13 +404,13 @@ vendor/grammars/mediawiki.tmbundle/:
|
||||
- text.html.mediawiki
|
||||
vendor/grammars/mercury-tmlanguage:
|
||||
- source.mercury
|
||||
vendor/grammars/monkey.tmbundle:
|
||||
vendor/grammars/monkey/:
|
||||
- source.monkey
|
||||
vendor/grammars/moonscript-tmbundle:
|
||||
- source.moonscript
|
||||
vendor/grammars/nemerle.tmbundle:
|
||||
- source.nemerle
|
||||
vendor/grammars/nesC.tmbundle:
|
||||
vendor/grammars/nesC:
|
||||
- source.nesc
|
||||
vendor/grammars/ninja.tmbundle:
|
||||
- source.ninja
|
||||
@@ -403,21 +428,25 @@ vendor/grammars/ooc.tmbundle:
|
||||
- source.ooc
|
||||
vendor/grammars/opa.tmbundle:
|
||||
- source.opa
|
||||
vendor/grammars/openscad.tmbundle/:
|
||||
- source.scad
|
||||
vendor/grammars/oracle.tmbundle:
|
||||
- source.plsql.oracle
|
||||
vendor/grammars/oz-tmbundle/Syntaxes/Oz.tmLanguage:
|
||||
- source.oz
|
||||
vendor/grammars/pascal.tmbundle:
|
||||
- source.pascal
|
||||
vendor/grammars/pawn-sublime-language/:
|
||||
- source.pawn
|
||||
vendor/grammars/perl.tmbundle/:
|
||||
- source.perl
|
||||
- source.perl.6
|
||||
vendor/grammars/perl6fe:
|
||||
- source.meta-info
|
||||
- source.perl6fe
|
||||
- source.regexp.perl6fe
|
||||
vendor/grammars/php-smarty.tmbundle:
|
||||
- text.html.smarty
|
||||
vendor/grammars/php.tmbundle:
|
||||
- text.html.php
|
||||
vendor/grammars/pig-latin/:
|
||||
- source.pig_latin
|
||||
vendor/grammars/pike-textmate:
|
||||
- source.pike
|
||||
vendor/grammars/postscript.tmbundle:
|
||||
@@ -426,12 +455,8 @@ vendor/grammars/powershell:
|
||||
- source.powershell
|
||||
vendor/grammars/processing.tmbundle:
|
||||
- source.processing
|
||||
vendor/grammars/prolog.tmbundle:
|
||||
- source.prolog
|
||||
vendor/grammars/protobuf-tmbundle:
|
||||
- source.protobuf
|
||||
vendor/grammars/puppet-textmate-bundle:
|
||||
- source.puppet
|
||||
vendor/grammars/python-django.tmbundle:
|
||||
- source.python.django
|
||||
- text.html.django
|
||||
@@ -463,8 +488,12 @@ vendor/grammars/smali-sublime/smali.tmLanguage:
|
||||
- source.smali
|
||||
vendor/grammars/smalltalk-tmbundle:
|
||||
- source.smalltalk
|
||||
vendor/grammars/sourcepawn/:
|
||||
- source.sp
|
||||
vendor/grammars/sql.tmbundle:
|
||||
- source.sql
|
||||
vendor/grammars/st2-zonefile:
|
||||
- text.zone_file
|
||||
vendor/grammars/standard-ml.tmbundle:
|
||||
- source.cm
|
||||
- source.ml
|
||||
@@ -472,10 +501,10 @@ vendor/grammars/sublime-MuPAD:
|
||||
- source.mupad
|
||||
vendor/grammars/sublime-apl/:
|
||||
- source.apl
|
||||
vendor/grammars/sublime-aspectj/:
|
||||
- source.aspectj
|
||||
vendor/grammars/sublime-befunge:
|
||||
- source.befunge
|
||||
vendor/grammars/sublime-better-typescript:
|
||||
- source.ts
|
||||
vendor/grammars/sublime-bsv:
|
||||
- source.bsv
|
||||
vendor/grammars/sublime-cirru:
|
||||
@@ -485,8 +514,6 @@ vendor/grammars/sublime-glsl:
|
||||
- source.glsl
|
||||
vendor/grammars/sublime-golo/:
|
||||
- source.golo
|
||||
vendor/grammars/sublime-idris:
|
||||
- source.idris
|
||||
vendor/grammars/sublime-mask:
|
||||
- source.mask
|
||||
vendor/grammars/sublime-netlinx:
|
||||
@@ -494,26 +521,27 @@ vendor/grammars/sublime-netlinx:
|
||||
- source.netlinx.erb
|
||||
vendor/grammars/sublime-nginx:
|
||||
- source.nginx
|
||||
vendor/grammars/sublime-nix:
|
||||
- source.nix
|
||||
vendor/grammars/sublime-opal/:
|
||||
- source.opal
|
||||
- source.opalsysdefs
|
||||
vendor/grammars/sublime-pony:
|
||||
- source.pony
|
||||
vendor/grammars/sublime-robot-plugin:
|
||||
- text.robot
|
||||
vendor/grammars/sublime-rust:
|
||||
- source.rust
|
||||
vendor/grammars/sublime-sourcepawn:
|
||||
- source.sp
|
||||
vendor/grammars/sublime-spintools/:
|
||||
- source.regexp.spin
|
||||
- source.spin
|
||||
vendor/grammars/sublime-tea:
|
||||
- source.tea
|
||||
vendor/grammars/sublime-terra:
|
||||
- source.terra
|
||||
vendor/grammars/sublime-text-ox/:
|
||||
- source.ox
|
||||
vendor/grammars/sublime-text-pig-latin/:
|
||||
- source.pig_latin
|
||||
vendor/grammars/sublime-typescript/:
|
||||
- source.ts
|
||||
- source.tsx
|
||||
vendor/grammars/sublime-varnish:
|
||||
- source.varnish.vcl
|
||||
vendor/grammars/sublime_cobol:
|
||||
@@ -524,6 +552,11 @@ vendor/grammars/sublime_cobol:
|
||||
vendor/grammars/sublime_man_page_support:
|
||||
- source.man
|
||||
- text.groff
|
||||
vendor/grammars/sublimeassembly/:
|
||||
- source.assembly
|
||||
vendor/grammars/sublimeprolog/:
|
||||
- source.prolog
|
||||
- source.prolog.eclipse
|
||||
vendor/grammars/sublimetext-cuda-cpp:
|
||||
- source.cuda-c++
|
||||
vendor/grammars/swift.tmbundle:
|
||||
@@ -540,8 +573,8 @@ vendor/grammars/turtle.tmbundle:
|
||||
- source.turtle
|
||||
vendor/grammars/verilog.tmbundle:
|
||||
- source.verilog
|
||||
vendor/grammars/x86-assembly-textmate-bundle:
|
||||
- source.asm.x86
|
||||
vendor/grammars/vue-syntax-highlight:
|
||||
- text.html.vue
|
||||
vendor/grammars/xc.tmbundle/:
|
||||
- source.xc
|
||||
vendor/grammars/xml.tmbundle:
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
require 'linguist/blob_helper'
|
||||
require 'linguist/generated'
|
||||
require 'linguist/grammars'
|
||||
require 'linguist/heuristics'
|
||||
require 'linguist/language'
|
||||
require 'linguist/repository'
|
||||
@@ -8,13 +9,91 @@ require 'linguist/shebang'
|
||||
require 'linguist/version'
|
||||
|
||||
class << Linguist
|
||||
# Public: Detects the Language of the blob.
|
||||
#
|
||||
# blob - an object that includes the Linguist `BlobHelper` interface;
|
||||
# see Linguist::LazyBlob and Linguist::FileBlob for examples
|
||||
#
|
||||
# Returns Language or nil.
|
||||
def detect(blob)
|
||||
# Bail early if the blob is binary or empty.
|
||||
return nil if blob.likely_binary? || blob.binary? || blob.empty?
|
||||
|
||||
Linguist.instrument("linguist.detection", :blob => blob) do
|
||||
# Call each strategy until one candidate is returned.
|
||||
languages = []
|
||||
returning_strategy = nil
|
||||
|
||||
STRATEGIES.each do |strategy|
|
||||
returning_strategy = strategy
|
||||
candidates = Linguist.instrument("linguist.strategy", :blob => blob, :strategy => strategy, :candidates => languages) do
|
||||
strategy.call(blob, languages)
|
||||
end
|
||||
if candidates.size == 1
|
||||
languages = candidates
|
||||
break
|
||||
elsif candidates.size > 1
|
||||
# More than one candidate was found, pass them to the next strategy.
|
||||
languages = candidates
|
||||
else
|
||||
# No candidates, try the next strategy
|
||||
end
|
||||
end
|
||||
|
||||
Linguist.instrument("linguist.detected", :blob => blob, :strategy => returning_strategy, :language => languages.first)
|
||||
|
||||
languages.first
|
||||
end
|
||||
end
|
||||
|
||||
# Internal: The strategies used to detect the language of a file.
|
||||
#
|
||||
# A strategy is an object that has a `.call` method that takes two arguments:
|
||||
#
|
||||
# blob - An object that quacks like a blob.
|
||||
# languages - An Array of candidate Language objects that were returned by the
|
||||
# previous strategy.
|
||||
#
|
||||
# A strategy should return an Array of Language candidates.
|
||||
#
|
||||
# Strategies are called in turn until a single Language is returned.
|
||||
STRATEGIES = [
|
||||
Linguist::Strategy::Modeline,
|
||||
Linguist::Shebang,
|
||||
Linguist::Strategy::Filename,
|
||||
Linguist::Heuristics,
|
||||
Linguist::Classifier
|
||||
]
|
||||
|
||||
# Public: Set an instrumenter.
|
||||
#
|
||||
# class CustomInstrumenter
|
||||
# def instrument(name, payload = {})
|
||||
# warn "Instrumenting #{name}: #{payload[:blob]}"
|
||||
# end
|
||||
# end
|
||||
#
|
||||
# Linguist.instrumenter = CustomInstrumenter
|
||||
#
|
||||
# The instrumenter must conform to the `ActiveSupport::Notifications`
|
||||
# interface, which defines `#instrument` and accepts:
|
||||
#
|
||||
# name - the String name of the event (e.g. "linguist.detected")
|
||||
# payload - a Hash of the exception context.
|
||||
attr_accessor :instrumenter
|
||||
|
||||
# Internal: Perform instrumentation on a block
|
||||
#
|
||||
# Linguist.instrument("linguist.dosomething", :blob => blob) do
|
||||
# # logic to instrument here.
|
||||
# end
|
||||
#
|
||||
def instrument(*args, &bk)
|
||||
if instrumenter
|
||||
instrumenter.instrument(*args, &bk)
|
||||
else
|
||||
yield if block_given?
|
||||
elsif block_given?
|
||||
yield
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
73
lib/linguist/blob.rb
Normal file
73
lib/linguist/blob.rb
Normal file
@@ -0,0 +1,73 @@
|
||||
require 'linguist/blob_helper'
|
||||
|
||||
module Linguist
|
||||
# A Blob is a wrapper around the content of a file to make it quack
|
||||
# like a Grit::Blob. It provides the basic interface: `name`,
|
||||
# `data`, `path` and `size`.
|
||||
class Blob
|
||||
include BlobHelper
|
||||
|
||||
# Public: Initialize a new Blob.
|
||||
#
|
||||
# path - A path String (does not necessarily exists on the file system).
|
||||
# content - Content of the file.
|
||||
#
|
||||
# Returns a Blob.
|
||||
def initialize(path, content)
|
||||
@path = path
|
||||
@content = content
|
||||
end
|
||||
|
||||
# Public: Filename
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# Blob.new("/path/to/linguist/lib/linguist.rb", "").path
|
||||
# # => "/path/to/linguist/lib/linguist.rb"
|
||||
#
|
||||
# Returns a String
|
||||
attr_reader :path
|
||||
|
||||
# Public: File name
|
||||
#
|
||||
# Returns a String
|
||||
def name
|
||||
File.basename(@path)
|
||||
end
|
||||
|
||||
# Public: File contents.
|
||||
#
|
||||
# Returns a String.
|
||||
def data
|
||||
@content
|
||||
end
|
||||
|
||||
# Public: Get byte size
|
||||
#
|
||||
# Returns an Integer.
|
||||
def size
|
||||
@content.bytesize
|
||||
end
|
||||
|
||||
# Public: Get file extension.
|
||||
#
|
||||
# Returns a String.
|
||||
def extension
|
||||
extensions.last || ""
|
||||
end
|
||||
|
||||
# Public: Return an array of the file extensions
|
||||
#
|
||||
# >> Linguist::Blob.new("app/views/things/index.html.erb").extensions
|
||||
# => [".html.erb", ".erb"]
|
||||
#
|
||||
# Returns an Array
|
||||
def extensions
|
||||
_, *segments = name.downcase.split(".")
|
||||
|
||||
segments.map.with_index do |segment, index|
|
||||
"." + segments[index..-1].join(".")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -6,7 +6,7 @@ require 'yaml'
|
||||
|
||||
module Linguist
|
||||
# DEPRECATED Avoid mixing into Blob classes. Prefer functional interfaces
|
||||
# like `Language.detect` over `Blob#language`. Functions are much easier to
|
||||
# like `Linguist.detect` over `Blob#language`. Functions are much easier to
|
||||
# cache and compose.
|
||||
#
|
||||
# Avoid adding additional bloat to this module.
|
||||
@@ -325,7 +325,7 @@ module Linguist
|
||||
#
|
||||
# Returns a Language or nil if none is detected
|
||||
def language
|
||||
@language ||= Language.detect(self)
|
||||
@language ||= Linguist.detect(self)
|
||||
end
|
||||
|
||||
# Internal: Get the TextMate compatible scope for the blob
|
||||
|
||||
@@ -13,11 +13,18 @@
|
||||
- (^|/)[Dd]ocumentation/
|
||||
- (^|/)javadoc/
|
||||
- ^man/
|
||||
- ^[Ee]xamples/
|
||||
|
||||
## Documentation files ##
|
||||
|
||||
- (^|/)CHANGE(S|LOG)?(\.|$)
|
||||
- (^|/)CONTRIBUTING(\.|$)
|
||||
- (^|/)COPYING(\.|$)
|
||||
- (^|/)INSTALL(\.|$)
|
||||
- (^|/)LICEN[CS]E(\.|$)
|
||||
- (^|/)[Ll]icen[cs]e(\.|$)
|
||||
- (^|/)README(\.|$)
|
||||
- (^|/)[Rr]eadme(\.|$)
|
||||
|
||||
# Samples folders
|
||||
- ^[Ss]amples/
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
require 'linguist/blob_helper'
|
||||
require 'linguist/blob'
|
||||
|
||||
module Linguist
|
||||
# A FileBlob is a wrapper around a File object to make it quack
|
||||
# like a Grit::Blob. It provides the basic interface: `name`,
|
||||
# `data`, `path` and `size`.
|
||||
class FileBlob
|
||||
class FileBlob < Blob
|
||||
include BlobHelper
|
||||
|
||||
# Public: Initialize a new FileBlob from a path
|
||||
@@ -18,20 +19,6 @@ module Linguist
|
||||
@path = base_path ? path.sub("#{base_path}/", '') : path
|
||||
end
|
||||
|
||||
# Public: Filename
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# FileBlob.new("/path/to/linguist/lib/linguist.rb").path
|
||||
# # => "/path/to/linguist/lib/linguist.rb"
|
||||
#
|
||||
# FileBlob.new("/path/to/linguist/lib/linguist.rb",
|
||||
# "/path/to/linguist").path
|
||||
# # => "lib/linguist.rb"
|
||||
#
|
||||
# Returns a String
|
||||
attr_reader :path
|
||||
|
||||
# Public: Read file permissions
|
||||
#
|
||||
# Returns a String like '100644'
|
||||
@@ -39,13 +26,6 @@ module Linguist
|
||||
File.stat(@fullpath).mode.to_s(8)
|
||||
end
|
||||
|
||||
# Public: File name
|
||||
#
|
||||
# Returns a String
|
||||
def name
|
||||
File.basename(@fullpath)
|
||||
end
|
||||
|
||||
# Public: Read file contents.
|
||||
#
|
||||
# Returns a String.
|
||||
@@ -59,26 +39,5 @@ module Linguist
|
||||
def size
|
||||
File.size(@fullpath)
|
||||
end
|
||||
|
||||
# Public: Get file extension.
|
||||
#
|
||||
# Returns a String.
|
||||
def extension
|
||||
extensions.last || ""
|
||||
end
|
||||
|
||||
# Public: Return an array of the file extensions
|
||||
#
|
||||
# >> Linguist::FileBlob.new("app/views/things/index.html.erb").extensions
|
||||
# => [".html.erb", ".erb"]
|
||||
#
|
||||
# Returns an Array
|
||||
def extensions
|
||||
basename, *segments = name.downcase.split(".")
|
||||
|
||||
segments.map.with_index do |segment, index|
|
||||
"." + segments[index..-1].join(".")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -71,7 +71,10 @@ module Linguist
|
||||
generated_jni_header? ||
|
||||
vcr_cassette? ||
|
||||
generated_module? ||
|
||||
generated_unity3d_meta?
|
||||
generated_unity3d_meta? ||
|
||||
generated_racc? ||
|
||||
generated_jflex? ||
|
||||
generated_grammarkit?
|
||||
end
|
||||
|
||||
# Internal: Is the blob an Xcode file?
|
||||
@@ -241,22 +244,26 @@ module Linguist
|
||||
return lines[0].include?("Code generated by")
|
||||
end
|
||||
|
||||
PROTOBUF_EXTENSIONS = ['.py', '.java', '.h', '.cc', '.cpp']
|
||||
|
||||
# Internal: Is the blob a C++, Java or Python source file generated by the
|
||||
# Protocol Buffer compiler?
|
||||
#
|
||||
# Returns true of false.
|
||||
def generated_protocol_buffer?
|
||||
return false unless ['.py', '.java', '.h', '.cc', '.cpp'].include?(extname)
|
||||
return false unless PROTOBUF_EXTENSIONS.include?(extname)
|
||||
return false unless lines.count > 1
|
||||
|
||||
return lines[0].include?("Generated by the protocol buffer compiler. DO NOT EDIT!")
|
||||
end
|
||||
|
||||
APACHE_THRIFT_EXTENSIONS = ['.rb', '.py', '.go', '.js', '.m', '.java', '.h', '.cc', '.cpp']
|
||||
|
||||
# Internal: Is the blob generated by Apache Thrift compiler?
|
||||
#
|
||||
# Returns true or false
|
||||
def generated_apache_thrift?
|
||||
return false unless ['.rb', '.py', '.go', '.js', '.m', '.java', '.h', '.cc', '.cpp'].include?(extname)
|
||||
return false unless APACHE_THRIFT_EXTENSIONS.include?(extname)
|
||||
return false unless lines.count > 1
|
||||
|
||||
return lines[0].include?("Autogenerated by Thrift Compiler") || lines[1].include?("Autogenerated by Thrift Compiler")
|
||||
@@ -355,5 +362,45 @@ module Linguist
|
||||
return false unless lines.count > 1
|
||||
return lines[0].include?("fileFormatVersion: ")
|
||||
end
|
||||
|
||||
# Internal: Is this a Racc-generated file?
|
||||
#
|
||||
# A Racc-generated file contains:
|
||||
# # This file is automatically generated by Racc x.y.z
|
||||
# on the third line.
|
||||
#
|
||||
# Return true or false
|
||||
def generated_racc?
|
||||
return false unless extname == '.rb'
|
||||
return false unless lines.count > 2
|
||||
return lines[2].start_with?("# This file is automatically generated by Racc")
|
||||
end
|
||||
|
||||
# Internal: Is this a JFlex-generated file?
|
||||
#
|
||||
# A JFlex-generated file contains:
|
||||
# /* The following code was generated by JFlex x.y.z on d/at/e ti:me */
|
||||
# on the first line.
|
||||
#
|
||||
# Return true or false
|
||||
def generated_jflex?
|
||||
return false unless extname == '.java'
|
||||
return false unless lines.count > 1
|
||||
return lines[0].start_with?("/* The following code was generated by JFlex ")
|
||||
end
|
||||
|
||||
# Internal: Is this a GrammarKit-generated file?
|
||||
#
|
||||
# A GrammarKit-generated file typically contain:
|
||||
# // This is a generated file. Not intended for manual editing.
|
||||
# on the first line. This is not always the case, as it's possible to
|
||||
# customize the class header.
|
||||
#
|
||||
# Return true or false
|
||||
def generated_grammarkit?
|
||||
return false unless extname == '.java'
|
||||
return false unless lines.count > 1
|
||||
return lines[0].start_with?("// This is a generated file. Not intended for manual editing.")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
# Note: This file is included in the github-linguist-grammars gem, not the
|
||||
# github-linguist gem.
|
||||
|
||||
module Linguist
|
||||
module Grammars
|
||||
# Get the path to the directory containing the language grammar JSON files.
|
||||
|
||||
@@ -13,11 +13,14 @@ module Linguist
|
||||
# ])
|
||||
#
|
||||
# Returns an Array of languages, or empty if none matched or were inconclusive.
|
||||
def self.call(blob, languages)
|
||||
def self.call(blob, candidates)
|
||||
data = blob.data
|
||||
|
||||
@heuristics.each do |heuristic|
|
||||
return Array(heuristic.call(data)) if heuristic.matches?(languages)
|
||||
if heuristic.matches?(blob.name)
|
||||
languages = Array(heuristic.call(data))
|
||||
return languages if languages.any? || languages.all? { |l| candidates.include?(l) }
|
||||
end
|
||||
end
|
||||
|
||||
[] # No heuristics matched
|
||||
@@ -30,7 +33,7 @@ module Linguist
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# disambiguate "Perl", "Prolog" do |data|
|
||||
# disambiguate ".pm" do |data|
|
||||
# if data.include?("use strict")
|
||||
# Language["Perl"]
|
||||
# elsif /^[^#]+:-/.match(data)
|
||||
@@ -38,22 +41,23 @@ module Linguist
|
||||
# end
|
||||
# end
|
||||
#
|
||||
def self.disambiguate(*languages, &heuristic)
|
||||
@heuristics << new(languages, &heuristic)
|
||||
def self.disambiguate(*extensions, &heuristic)
|
||||
@heuristics << new(extensions, &heuristic)
|
||||
end
|
||||
|
||||
# Internal: Array of defined heuristics
|
||||
@heuristics = []
|
||||
|
||||
# Internal
|
||||
def initialize(languages, &heuristic)
|
||||
@languages = languages
|
||||
def initialize(extensions, &heuristic)
|
||||
@extensions = extensions
|
||||
@heuristic = heuristic
|
||||
end
|
||||
|
||||
# Internal: Check if this heuristic matches the candidate languages.
|
||||
def matches?(candidates)
|
||||
candidates.any? && candidates.all? { |l| @languages.include?(l.name) }
|
||||
def matches?(filename)
|
||||
filename = filename.downcase
|
||||
@extensions.any? { |ext| filename.end_with?(ext) }
|
||||
end
|
||||
|
||||
# Internal: Perform the heuristic
|
||||
@@ -62,99 +66,9 @@ module Linguist
|
||||
end
|
||||
|
||||
# Common heuristics
|
||||
ObjectiveCRegex = /^[ \t]*@(interface|class|protocol|property|end|synchronised|selector|implementation)\b/
|
||||
ObjectiveCRegex = /^\s*(@(interface|class|protocol|property|end|synchronised|selector|implementation)\b|#import\s+.+\.h[">])/
|
||||
|
||||
disambiguate "BitBake", "BlitzBasic" do |data|
|
||||
if /^\s*; /.match(data) || data.include?("End Function")
|
||||
Language["BlitzBasic"]
|
||||
elsif /^\s*(# |include|require)\b/.match(data)
|
||||
Language["BitBake"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "C#", "Smalltalk" do |data|
|
||||
if /![\w\s]+methodsFor: /.match(data)
|
||||
Language["Smalltalk"]
|
||||
elsif /^\s*namespace\s*[\w\.]+\s*{/.match(data) || /^\s*\/\//.match(data)
|
||||
Language["C#"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Objective-C", "C++", "C" do |data|
|
||||
if ObjectiveCRegex.match(data)
|
||||
Language["Objective-C"]
|
||||
elsif (/^\s*#\s*include <(cstdint|string|vector|map|list|array|bitset|queue|stack|forward_list|unordered_map|unordered_set|(i|o|io)stream)>/.match(data) ||
|
||||
/^\s*template\s*</.match(data) || /^[ \t]*try/.match(data) || /^[ \t]*catch\s*\(/.match(data) || /^[ \t]*(class|(using[ \t]+)?namespace)\s+\w+/.match(data) || /^[ \t]*(private|public|protected):$/.match(data) || /std::\w+/.match(data))
|
||||
Language["C++"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Perl", "Perl6", "Prolog" do |data|
|
||||
if data.include?("use v6")
|
||||
Language["Perl6"]
|
||||
elsif data.match(/use strict|use\s+v?5\./)
|
||||
Language["Perl"]
|
||||
elsif /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "ECL", "Prolog" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif data.include?(":=")
|
||||
Language["ECL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "IDL", "Prolog", "INI", "QMake" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif data.include?("last_client=")
|
||||
Language["INI"]
|
||||
elsif data.include?("HEADERS") && data.include?("SOURCES")
|
||||
Language["QMake"]
|
||||
elsif /^\s*function[ \w,]+$/.match(data)
|
||||
Language["IDL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "GAP", "Scilab" do |data|
|
||||
if (data.include?("gap> "))
|
||||
Language["GAP"]
|
||||
# Heads up - we don't usually write heuristics like this (with no regex match)
|
||||
else
|
||||
Language["Scilab"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Common Lisp", "OpenCL", "Cool" do |data|
|
||||
if /^\s*\((defun|in-package|defpackage) /i.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^class/x.match(data)
|
||||
Language["Cool"]
|
||||
elsif /\/\* |\/\/ |^\}/.match(data)
|
||||
Language["OpenCL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Hack", "PHP" do |data|
|
||||
if data.include?("<?hh")
|
||||
Language["Hack"]
|
||||
elsif /<?[^h]/.match(data)
|
||||
Language["PHP"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Scala", "SuperCollider" do |data|
|
||||
if /\^(this|super)\./.match(data) || /^\s*(\+|\*)\s*\w+\s*{/.match(data) || /^\s*~\w+\s*=\./.match(data)
|
||||
Language["SuperCollider"]
|
||||
elsif /^\s*import (scala|java)\./.match(data) || /^\s*val\s+\w+\s*=/.match(data) || /^\s*class\b/.match(data)
|
||||
Language["Scala"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "AsciiDoc", "AGS Script", "Public Key" do |data|
|
||||
disambiguate ".asc" do |data|
|
||||
if /^(----[- ]BEGIN|ssh-(rsa|dss)) /.match(data)
|
||||
Language["Public Key"]
|
||||
elsif /^[=-]+(\s|\n)|{{[A-Za-z]/.match(data)
|
||||
@@ -164,15 +78,75 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "FORTRAN", "Forth", "Formatted" do |data|
|
||||
disambiguate ".bb" do |data|
|
||||
if /^\s*; /.match(data) || data.include?("End Function")
|
||||
Language["BlitzBasic"]
|
||||
elsif /^\s*(# |include|require)\b/.match(data)
|
||||
Language["BitBake"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ch" do |data|
|
||||
if /^\s*#\s*(if|ifdef|ifndef|define|command|xcommand|translate|xtranslate|include|pragma|undef)\b/i.match(data)
|
||||
Language["xBase"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".cl" do |data|
|
||||
if /^\s*\((defun|in-package|defpackage) /i.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^class/x.match(data)
|
||||
Language["Cool"]
|
||||
elsif /\/\* |\/\/ |^\}/.match(data)
|
||||
Language["OpenCL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".cs" do |data|
|
||||
if /![\w\s]+methodsFor: /.match(data)
|
||||
Language["Smalltalk"]
|
||||
elsif /^\s*namespace\s*[\w\.]+\s*{/.match(data) || /^\s*\/\//.match(data)
|
||||
Language["C#"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".d" do |data|
|
||||
if /^module /.match(data)
|
||||
Language["D"]
|
||||
elsif /^((dtrace:::)?BEGIN|provider |#pragma (D (option|attributes)|ident)\s)/.match(data)
|
||||
Language["DTrace"]
|
||||
elsif /(\/.*:( .* \\)$| : \\$|^ : |: \\$)/.match(data)
|
||||
Language["Makefile"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ecl" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["ECLiPSe"]
|
||||
elsif data.include?(":=")
|
||||
Language["ECL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".for", ".f" do |data|
|
||||
if /^: /.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^([c*][^a-z]| (subroutine|program)\s|\s*!)/i.match(data)
|
||||
elsif /^([c*][^abd-z]| (subroutine|program|end)\s|\s*!)/i.match(data)
|
||||
Language["FORTRAN"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "F#", "Forth", "GLSL", "Filterscript" do |data|
|
||||
disambiguate ".fr" do |data|
|
||||
if /^(: |also |new-device|previous )/.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^\s*(import|module|package|data|type) /.match(data)
|
||||
Language["Frege"]
|
||||
else
|
||||
Language["Text"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".fs" do |data|
|
||||
if /^(: |new-device)/.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^\s*(#light|import|let|module|namespace|open|type)/.match(data)
|
||||
@@ -184,7 +158,48 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Limbo", "M", "MUF", "Mathematica", "Matlab", "Mercury", "Objective-C" do |data|
|
||||
disambiguate ".gs" do |data|
|
||||
Language["Gosu"] if /^uses java\./.match(data)
|
||||
end
|
||||
|
||||
disambiguate ".h" do |data|
|
||||
if ObjectiveCRegex.match(data)
|
||||
Language["Objective-C"]
|
||||
elsif (/^\s*#\s*include <(cstdint|string|vector|map|list|array|bitset|queue|stack|forward_list|unordered_map|unordered_set|(i|o|io)stream)>/.match(data) ||
|
||||
/^\s*template\s*</.match(data) || /^[ \t]*try/.match(data) || /^[ \t]*catch\s*\(/.match(data) || /^[ \t]*(class|(using[ \t]+)?namespace)\s+\w+/.match(data) || /^[ \t]*(private|public|protected):$/.match(data) || /std::\w+/.match(data))
|
||||
Language["C++"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".l" do |data|
|
||||
if /\(def(un|macro)\s/.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^(%[%{}]xs|<.*>)/.match(data)
|
||||
Language["Lex"]
|
||||
elsif /^\.[a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
elsif /^\((de|class|rel|code|data|must)\s/.match(data)
|
||||
Language["PicoLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ls" do |data|
|
||||
if /^\s*package\s*[\w\.\/\*\s]*\s*{/.match(data)
|
||||
Language["LoomScript"]
|
||||
else
|
||||
Language["LiveScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".lsp", ".lisp" do |data|
|
||||
if /^\s*\((defun|in-package|defpackage) /i.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^\s*\(define /.match(data)
|
||||
Language["NewLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".m" do |data|
|
||||
if ObjectiveCRegex.match(data)
|
||||
Language["Objective-C"]
|
||||
elsif data.include?(":- module")
|
||||
@@ -202,46 +217,144 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Gosu", "JavaScript" do |data|
|
||||
Language["Gosu"] if /^uses java\./.match(data)
|
||||
end
|
||||
|
||||
disambiguate "LoomScript", "LiveScript" do |data|
|
||||
if /^\s*package\s*[\w\.\/\*\s]*\s*{/.match(data)
|
||||
Language["LoomScript"]
|
||||
else
|
||||
Language["LiveScript"]
|
||||
disambiguate ".ml" do |data|
|
||||
if /(^\s*module)|let rec |match\s+(\S+\s)+with/.match(data)
|
||||
Language["OCaml"]
|
||||
elsif /=> |case\s+(\S+\s)+of/.match(data)
|
||||
Language["Standard ML"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Common Lisp", "NewLisp" do |data|
|
||||
if /^\s*\((defun|in-package|defpackage) /i.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^\s*\(define /.match(data)
|
||||
Language["NewLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "TypeScript", "XML" do |data|
|
||||
if data.include?("<TS ")
|
||||
disambiguate ".mod" do |data|
|
||||
if data.include?('<!ENTITY ')
|
||||
Language["XML"]
|
||||
elsif /MODULE\s\w+\s*;/i.match(data) || /^\s*END \w+;$/i.match(data)
|
||||
Language["Modula-2"]
|
||||
else
|
||||
Language["TypeScript"]
|
||||
[Language["Linux Kernel Module"], Language["AMPL"]]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Frege", "Forth", "Text" do |data|
|
||||
if /^(: |also |new-device|previous )/.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^\s*(import|module|package|data|type) /.match(data)
|
||||
Language["Frege"]
|
||||
disambiguate ".ms" do |data|
|
||||
if /^[.'][a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
elsif /(?<!\S)\.(include|globa?l)\s/.match(data) || /(?<!\/\*)(\A|\n)\s*\.[A-Za-z]/.match(data.gsub(/"([^\\"]|\\.)*"|'([^\\']|\\.)*'|\\\s*(?:--.*)?\n/, ""))
|
||||
Language["GAS"]
|
||||
else
|
||||
Language["MAXScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".n" do |data|
|
||||
if /^[.']/.match(data)
|
||||
Language["Groff"]
|
||||
elsif /^(module|namespace|using)\s/.match(data)
|
||||
Language["Nemerle"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".ncl" do |data|
|
||||
if data.include?("THE_TITLE")
|
||||
Language["Text"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "PLSQL", "SQLPL", "PLpgSQL", "SQL" do |data|
|
||||
if /^\\i\b|AS \$\$|LANGUAGE '+plpgsql'+/i.match(data) || /SECURITY (DEFINER|INVOKER)/i.match(data) || /BEGIN( WORK| TRANSACTION)?;/i.match(data)
|
||||
disambiguate ".nl" do |data|
|
||||
if /^(b|g)[0-9]+ /.match(data)
|
||||
Language["NL"]
|
||||
else
|
||||
Language["NewLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".php" do |data|
|
||||
if data.include?("<?hh")
|
||||
Language["Hack"]
|
||||
elsif /<?[^h]/.match(data)
|
||||
Language["PHP"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pl" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif /use strict|use\s+v?5\./.match(data)
|
||||
Language["Perl"]
|
||||
elsif /^(use v6|(my )?class|module)/.match(data)
|
||||
Language["Perl6"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pm", ".t" do |data|
|
||||
if /use strict|use\s+v?5\./.match(data)
|
||||
Language["Perl"]
|
||||
elsif /^(use v6|(my )?class|module)/.match(data)
|
||||
Language["Perl6"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pod" do |data|
|
||||
if /^=\w+$/.match(data)
|
||||
Language["Pod"]
|
||||
else
|
||||
Language["Perl"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pro" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif data.include?("last_client=")
|
||||
Language["INI"]
|
||||
elsif data.include?("HEADERS") && data.include?("SOURCES")
|
||||
Language["QMake"]
|
||||
elsif /^\s*function[ \w,]+$/.match(data)
|
||||
Language["IDL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".props" do |data|
|
||||
if /^(\s*)(<Project|<Import|<Property|<?xml|xmlns)/i.match(data)
|
||||
Language["XML"]
|
||||
elsif /\w+\s*=\s*/i.match(data)
|
||||
Language["INI"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".r" do |data|
|
||||
if /\bRebol\b/i.match(data)
|
||||
Language["Rebol"]
|
||||
elsif data.include?("<-")
|
||||
Language["R"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".rpy" do |data|
|
||||
if /(^(import|from|class|def)\s)/m.match(data)
|
||||
Language["Python"]
|
||||
else
|
||||
Language["Ren'Py"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".rs" do |data|
|
||||
if /^(use |fn |mod |pub |macro_rules|impl|#!?\[)/.match(data)
|
||||
Language["Rust"]
|
||||
elsif /#include|#pragma\s+(rs|version)|__attribute__/.match(data)
|
||||
Language["RenderScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".sc" do |data|
|
||||
if /\^(this|super)\./.match(data) || /^\s*(\+|\*)\s*\w+\s*{/.match(data) || /^\s*~\w+\s*=\./.match(data)
|
||||
Language["SuperCollider"]
|
||||
elsif /^\s*import (scala|java)\./.match(data) || /^\s*val\s+\w+\s*=/.match(data) || /^\s*class\b/.match(data)
|
||||
Language["Scala"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".sql" do |data|
|
||||
if /^\\i\b|AS \$\$|LANGUAGE '?plpgsql'?/i.match(data) || /SECURITY (DEFINER|INVOKER)/i.match(data) || /BEGIN( WORK| TRANSACTION)?;/i.match(data)
|
||||
#Postgres
|
||||
Language["PLpgSQL"]
|
||||
elsif /(alter module)|(language sql)|(begin( NOT)+ atomic)/i.match(data) || /signal SQLSTATE '[0-9]+'/i.match(data)
|
||||
@@ -256,75 +369,20 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "D", "DTrace", "Makefile" do |data|
|
||||
if /^module /.match(data)
|
||||
Language["D"]
|
||||
elsif /^((dtrace:::)?BEGIN|provider |#pragma (D (option|attributes)|ident)\s)/.match(data)
|
||||
Language["DTrace"]
|
||||
elsif /(\/.*:( .* \\)$| : \\$|^ : |: \\$)/.match(data)
|
||||
Language["Makefile"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "OCaml", "Standard ML" do |data|
|
||||
if /(^\s*module)|let rec |match\s+(\S+\s)+with/.match(data)
|
||||
Language["OCaml"]
|
||||
elsif /=> |case\s+(\S+\s)+of/.match(data)
|
||||
Language["Standard ML"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "XML", "Modula-2", "Linux Kernel Module", "AMPL" do |data|
|
||||
if data.include?('<!ENTITY ')
|
||||
disambiguate ".ts" do |data|
|
||||
if data.include?("<TS ")
|
||||
Language["XML"]
|
||||
elsif /MODULE\s\w+\s*;/i.match(data) || /^\s*END \w+;$/i.match(data)
|
||||
Language["Modula-2"]
|
||||
else
|
||||
[Language["Linux Kernel Module"], Language["AMPL"]]
|
||||
Language["TypeScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "NL", "NewLisp" do |data|
|
||||
if /^(b|g)[0-9]+ /.match(data)
|
||||
Language["NL"]
|
||||
disambiguate ".tst" do |data|
|
||||
if (data.include?("gap> "))
|
||||
Language["GAP"]
|
||||
# Heads up - we don't usually write heuristics like this (with no regex match)
|
||||
else
|
||||
Language["NewLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Rust", "RenderScript" do |data|
|
||||
if /^(use |fn |mod |pub |macro_rules|impl|#!?\[)/.match(data)
|
||||
Language["Rust"]
|
||||
elsif /#include|#pragma\s+(rs|version)|__attribute__/.match(data)
|
||||
Language["RenderScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Common Lisp", "Lex", "Groff", "PicoLisp" do |data|
|
||||
if /\(def(un|macro)\s/.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^(%[%{}]xs|<.*>)/.match(data)
|
||||
Language["Lex"]
|
||||
elsif /^\.[a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
elsif /^\((de|class|rel|code|data|must)\s/.match(data)
|
||||
Language["PicoLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Groff", "Nemerle" do |data|
|
||||
if /^[.']/.match(data)
|
||||
Language["Groff"]
|
||||
elsif /^(module|namespace|using)\s/.match(data)
|
||||
Language["Nemerle"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "GAS", "Groff" do |data|
|
||||
if /^[.'][a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
elsif /((^|\s)move?[. ])|\.(include|globa?l)\s/.match(data)
|
||||
Language["GAS"]
|
||||
Language["Scilab"]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -87,14 +87,6 @@ module Linguist
|
||||
language
|
||||
end
|
||||
|
||||
STRATEGIES = [
|
||||
Linguist::Strategy::Modeline,
|
||||
Linguist::Shebang,
|
||||
Linguist::Strategy::Filename,
|
||||
Linguist::Heuristics,
|
||||
Linguist::Classifier
|
||||
]
|
||||
|
||||
# Public: Detects the Language of the blob.
|
||||
#
|
||||
# blob - an object that includes the Linguist `BlobHelper` interface;
|
||||
@@ -102,34 +94,8 @@ module Linguist
|
||||
#
|
||||
# Returns Language or nil.
|
||||
def self.detect(blob)
|
||||
# Bail early if the blob is binary or empty.
|
||||
return nil if blob.likely_binary? || blob.binary? || blob.empty?
|
||||
|
||||
Linguist.instrument("linguist.detection", :blob => blob) do
|
||||
# Call each strategy until one candidate is returned.
|
||||
languages = []
|
||||
returning_strategy = nil
|
||||
|
||||
STRATEGIES.each do |strategy|
|
||||
returning_strategy = strategy
|
||||
candidates = Linguist.instrument("linguist.strategy", :blob => blob, :strategy => strategy, :candidates => languages) do
|
||||
strategy.call(blob, languages)
|
||||
end
|
||||
if candidates.size == 1
|
||||
languages = candidates
|
||||
break
|
||||
elsif candidates.size > 1
|
||||
# More than one candidate was found, pass them to the next strategy.
|
||||
languages = candidates
|
||||
else
|
||||
# No candidates, try the next strategy
|
||||
end
|
||||
end
|
||||
|
||||
Linguist.instrument("linguist.detected", :blob => blob, :strategy => returning_strategy, :language => languages.first)
|
||||
|
||||
languages.first
|
||||
end
|
||||
warn "[DEPRECATED] `Linguist::Language.detect` is deprecated. Use `Linguist.detect`. #{caller[0]}"
|
||||
Linguist.detect(blob)
|
||||
end
|
||||
|
||||
# Public: Get all Languages
|
||||
@@ -150,7 +116,8 @@ module Linguist
|
||||
#
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.find_by_name(name)
|
||||
name && @name_index[name.downcase]
|
||||
return nil if name.to_s.empty?
|
||||
name && (@name_index[name.downcase] || @name_index[name.split(',').first.downcase])
|
||||
end
|
||||
|
||||
# Public: Look up Language by one of its aliases.
|
||||
@@ -164,7 +131,8 @@ module Linguist
|
||||
#
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.find_by_alias(name)
|
||||
name && @alias_index[name.downcase]
|
||||
return nil if name.to_s.empty?
|
||||
name && (@alias_index[name.downcase] || @alias_index[name.split(',').first.downcase])
|
||||
end
|
||||
|
||||
# Public: Look up Languages by filename.
|
||||
@@ -240,7 +208,8 @@ module Linguist
|
||||
#
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.[](name)
|
||||
name && @index[name.downcase]
|
||||
return nil if name.to_s.empty?
|
||||
name && (@index[name.downcase] || @index[name.split(',').first.downcase])
|
||||
end
|
||||
|
||||
# Public: A List of popular languages
|
||||
|
||||
417
lib/linguist/languages.yml
Normal file → Executable file
417
lib/linguist/languages.yml
Normal file → Executable file
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,11 @@ require 'rugged'
|
||||
|
||||
module Linguist
|
||||
class LazyBlob
|
||||
GIT_ATTR = ['linguist-documentation', 'linguist-language', 'linguist-vendored']
|
||||
GIT_ATTR = ['linguist-documentation',
|
||||
'linguist-language',
|
||||
'linguist-vendored',
|
||||
'linguist-generated']
|
||||
|
||||
GIT_ATTR_OPTS = { :priority => [:index], :skip_system => true }
|
||||
GIT_ATTR_FLAGS = Rugged::Repository::Attributes.parse_opts(GIT_ATTR_OPTS)
|
||||
|
||||
@@ -24,6 +28,7 @@ module Linguist
|
||||
@oid = oid
|
||||
@path = path
|
||||
@mode = mode
|
||||
@data = nil
|
||||
end
|
||||
|
||||
def git_attributes
|
||||
@@ -31,14 +36,6 @@ module Linguist
|
||||
name, GIT_ATTR, GIT_ATTR_FLAGS)
|
||||
end
|
||||
|
||||
def vendored?
|
||||
if attr = git_attributes['linguist-vendored']
|
||||
return boolean_attribute(attr)
|
||||
else
|
||||
return super
|
||||
end
|
||||
end
|
||||
|
||||
def documentation?
|
||||
if attr = git_attributes['linguist-documentation']
|
||||
boolean_attribute(attr)
|
||||
@@ -47,6 +44,22 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
def generated?
|
||||
if attr = git_attributes['linguist-generated']
|
||||
boolean_attribute(attr)
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
def vendored?
|
||||
if attr = git_attributes['linguist-vendored']
|
||||
return boolean_attribute(attr)
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
def language
|
||||
return @language if defined?(@language)
|
||||
|
||||
@@ -67,11 +80,15 @@ module Linguist
|
||||
@size
|
||||
end
|
||||
|
||||
def cleanup!
|
||||
@data.clear if @data
|
||||
end
|
||||
|
||||
protected
|
||||
|
||||
# Returns true if the attribute is present and not the string "false".
|
||||
def boolean_attribute(attr)
|
||||
attr != "false"
|
||||
def boolean_attribute(attribute)
|
||||
attribute != "false"
|
||||
end
|
||||
|
||||
def load_blob!
|
||||
|
||||
@@ -30,6 +30,9 @@ module Linguist
|
||||
@repository = repo
|
||||
@commit_oid = commit_oid
|
||||
|
||||
@old_commit_oid = nil
|
||||
@old_stats = nil
|
||||
|
||||
raise TypeError, 'commit_oid must be a commit SHA1' unless commit_oid.is_a?(String)
|
||||
end
|
||||
|
||||
@@ -126,12 +129,13 @@ module Linguist
|
||||
end
|
||||
|
||||
protected
|
||||
MAX_TREE_SIZE = 100_000
|
||||
|
||||
def compute_stats(old_commit_oid, cache = nil)
|
||||
return {} if current_tree.count_recursive(MAX_TREE_SIZE) >= MAX_TREE_SIZE
|
||||
|
||||
old_tree = old_commit_oid && Rugged::Commit.lookup(repository, old_commit_oid).tree
|
||||
|
||||
read_index
|
||||
|
||||
diff = Rugged::Tree.diff(repository, old_tree, current_tree)
|
||||
|
||||
# Clear file map and fetch full diff if any .gitattributes files are changed
|
||||
@@ -157,8 +161,11 @@ module Linguist
|
||||
|
||||
blob = Linguist::LazyBlob.new(repository, delta.new_file[:oid], new, mode.to_s(8))
|
||||
|
||||
next unless blob.include_in_language_stats?
|
||||
file_map[new] = [blob.language.group.name, blob.size]
|
||||
if blob.include_in_language_stats?
|
||||
file_map[new] = [blob.language.group.name, blob.size]
|
||||
end
|
||||
|
||||
blob.cleanup!
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
@@ -42,10 +42,10 @@ module Linguist
|
||||
return unless script
|
||||
|
||||
# "python2.6" -> "python2"
|
||||
script.sub! /(\.\d+)$/, ''
|
||||
script.sub!(/(\.\d+)$/, '')
|
||||
|
||||
# #! perl -> perl
|
||||
script.sub! /^#!\s*/, ''
|
||||
script.sub!(/^#!\s*/, '')
|
||||
|
||||
# Check for multiline shebang hacks that call `exec`
|
||||
if script == 'sh' &&
|
||||
|
||||
@@ -1,8 +1,19 @@
|
||||
module Linguist
|
||||
module Strategy
|
||||
class Modeline
|
||||
EmacsModeline = /-\*-\s*(?:(?!mode)[\w-]+\s*:\s*(?:[\w+-]+)\s*;?\s*)*(?:mode\s*:)?\s*([\w+-]+)\s*(?:;\s*(?!mode)[\w-]+\s*:\s*[\w+-]+\s*)*;?\s*-\*-/i
|
||||
VimModeline = /vim:\s*set\s*(?:ft|filetype)=(\w+):/i
|
||||
EMACS_MODELINE = /-\*-\s*(?:(?!mode)[\w-]+\s*:\s*(?:[\w+-]+)\s*;?\s*)*(?:mode\s*:)?\s*([\w+-]+)\s*(?:;\s*(?!mode)[\w-]+\s*:\s*[\w+-]+\s*)*;?\s*-\*-/i
|
||||
|
||||
# First form vim modeline
|
||||
# [text]{white}{vi:|vim:|ex:}[white]{options}
|
||||
# ex: 'vim: syntax=ruby'
|
||||
VIM_MODELINE_1 = /(?:vim|vi|ex):\s*(?:ft|filetype|syntax)=(\w+)\s?/i
|
||||
|
||||
# Second form vim modeline (compatible with some versions of Vi)
|
||||
# [text]{white}{vi:|vim:|Vim:|ex:}[white]se[t] {options}:[text]
|
||||
# ex: 'vim set syntax=ruby:'
|
||||
VIM_MODELINE_2 = /(?:vim|vi|Vim|ex):\s*se(?:t)?.*\s(?:ft|filetype|syntax)=(\w+)\s?.*:/i
|
||||
|
||||
MODELINES = [EMACS_MODELINE, VIM_MODELINE_1, VIM_MODELINE_2]
|
||||
|
||||
# Public: Detects language based on Vim and Emacs modelines
|
||||
#
|
||||
@@ -22,7 +33,7 @@ module Linguist
|
||||
#
|
||||
# Returns a String or nil
|
||||
def self.modeline(data)
|
||||
match = data.match(EmacsModeline) || data.match(VimModeline)
|
||||
match = MODELINES.map { |regex| data.match(regex) }.reject(&:nil?).first
|
||||
match[1] if match
|
||||
end
|
||||
end
|
||||
|
||||
@@ -86,17 +86,17 @@ module Linguist
|
||||
if s.peek(1) == "\""
|
||||
s.getch
|
||||
else
|
||||
s.skip_until(/[^\\]"/)
|
||||
s.skip_until(/(?<!\\)"/)
|
||||
end
|
||||
elsif s.scan(/'/)
|
||||
if s.peek(1) == "'"
|
||||
s.getch
|
||||
else
|
||||
s.skip_until(/[^\\]'/)
|
||||
s.skip_until(/(?<!\\)'/)
|
||||
end
|
||||
|
||||
# Skip number literals
|
||||
elsif s.scan(/(0x)?\d(\d|\.)*/)
|
||||
elsif s.scan(/(0x\h(\h|\.)*|\d(\d|\.)*)([uU][lL]{0,2}|([eE][-+]\d*)?[fFlL]*)/)
|
||||
|
||||
# SGML style brackets
|
||||
elsif token = s.scan(/<[^\s<>][^<>]*>/)
|
||||
|
||||
@@ -20,10 +20,17 @@
|
||||
- ^deps/
|
||||
- ^tools/
|
||||
- (^|/)configure$
|
||||
- (^|/)configure.ac$
|
||||
- (^|/)config.guess$
|
||||
- (^|/)config.sub$
|
||||
|
||||
# stuff autogenerated by autoconf - still C deps
|
||||
- (^|/)aclocal.m4
|
||||
- (^|/)libtool.m4
|
||||
- (^|/)ltoptions.m4
|
||||
- (^|/)ltsugar.m4
|
||||
- (^|/)ltversion.m4
|
||||
- (^|/)lt~obsolete.m4
|
||||
|
||||
# Linters
|
||||
- cpplint.py
|
||||
|
||||
@@ -78,6 +85,9 @@
|
||||
# Haxelib projects often contain a neko bytecode file named run.n
|
||||
- run.n$
|
||||
|
||||
# Bootstrap Datepicker
|
||||
- bootstrap-datepicker/
|
||||
|
||||
## Commonly Bundled JavaScript frameworks ##
|
||||
|
||||
# jQuery
|
||||
@@ -88,6 +98,34 @@
|
||||
- (^|/)jquery\-ui(\-\d\.\d+(\.\d+)?)?(\.\w+)?\.(js|css)$
|
||||
- (^|/)jquery\.(ui|effects)\.([^.]*)\.(js|css)$
|
||||
|
||||
# jQuery Gantt
|
||||
- jquery.fn.gantt.js
|
||||
|
||||
# jQuery fancyBox
|
||||
- jquery.fancybox.(js|css)
|
||||
|
||||
# Fuel UX
|
||||
- fuelux.js
|
||||
|
||||
# jQuery File Upload
|
||||
- (^|/)jquery\.fileupload(-\w+)?\.js$
|
||||
|
||||
# Slick
|
||||
- (^|/)slick\.\w+.js$
|
||||
|
||||
# Leaflet plugins
|
||||
- (^|/)Leaflet\.Coordinates-\d+\.\d+\.\d+\.src\.js$
|
||||
- leaflet.draw-src.js
|
||||
- leaflet.draw.css
|
||||
- Control.FullScreen.css
|
||||
- Control.FullScreen.js
|
||||
- leaflet.spin.js
|
||||
- wicket-leaflet.js
|
||||
|
||||
# Sublime Text workspace files
|
||||
- .sublime-project
|
||||
- .sublime-workspace
|
||||
|
||||
# Prototype
|
||||
- (^|/)prototype(.*)\.js$
|
||||
- (^|/)effects\.js$
|
||||
@@ -122,7 +160,7 @@
|
||||
- (^|/)Chart\.js$
|
||||
|
||||
# Codemirror
|
||||
- (^|/)[Cc]ode[Mm]irror/(lib|mode|theme|addon|keymap)
|
||||
- (^|/)[Cc]ode[Mm]irror/(\d+\.\d+/)?(lib|mode|theme|addon|keymap|demo)
|
||||
|
||||
# SyntaxHighlighter - http://alexgorbatchev.com/
|
||||
- (^|/)shBrush([^.]*)\.js$
|
||||
@@ -164,6 +202,11 @@
|
||||
|
||||
## Obj-C ##
|
||||
|
||||
# Xcode
|
||||
|
||||
- \.xctemplate/
|
||||
- \.imageset/
|
||||
|
||||
# Carthage
|
||||
- ^Carthage/
|
||||
|
||||
@@ -179,6 +222,10 @@
|
||||
# Fabric
|
||||
- Fabric.framework/
|
||||
|
||||
# git config files
|
||||
- gitattributes$
|
||||
- gitignore$
|
||||
- gitmodules$
|
||||
|
||||
## Groovy ##
|
||||
|
||||
@@ -224,21 +271,9 @@
|
||||
# Html5shiv
|
||||
- (^|/)html5shiv\.js$
|
||||
|
||||
# Samples folders
|
||||
- ^[Ss]amples/
|
||||
|
||||
# LICENSE, README, git config files
|
||||
- ^COPYING$
|
||||
- LICENSE$
|
||||
- License$
|
||||
- gitattributes$
|
||||
- gitignore$
|
||||
- gitmodules$
|
||||
- ^README$
|
||||
- ^readme$
|
||||
|
||||
# Test fixtures
|
||||
- ^[Tt]ests?/fixtures/
|
||||
- ^[Ss]pecs?/fixtures/
|
||||
|
||||
# PhoneGap/Cordova
|
||||
- (^|/)cordova([^.]*)\.js$
|
||||
@@ -274,3 +309,6 @@
|
||||
|
||||
# Android Google APIs
|
||||
- (^|/)\.google_apis/
|
||||
|
||||
# Jenkins Pipeline
|
||||
- ^Jenkinsfile$
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
module Linguist
|
||||
VERSION = "4.5.9"
|
||||
VERSION = "4.8.1"
|
||||
end
|
||||
|
||||
@@ -2,5 +2,6 @@
|
||||
"repository": "https://github.com/github/linguist",
|
||||
"dependencies": {
|
||||
"season": "~>5.0"
|
||||
}
|
||||
},
|
||||
"license": "MIT"
|
||||
}
|
||||
|
||||
86
samples/C#/build.cake
Normal file
86
samples/C#/build.cake
Normal file
@@ -0,0 +1,86 @@
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// ARGUMENTS
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
var target = Argument<string>("target", "Default");
|
||||
var configuration = Argument<string>("configuration", "Release");
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// GLOBAL VARIABLES
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
var solutions = GetFiles("./**/*.sln");
|
||||
var solutionPaths = solutions.Select(solution => solution.GetDirectory());
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// SETUP / TEARDOWN
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
Setup(() =>
|
||||
{
|
||||
// Executed BEFORE the first task.
|
||||
Information("Running tasks...");
|
||||
});
|
||||
|
||||
Teardown(() =>
|
||||
{
|
||||
// Executed AFTER the last task.
|
||||
Information("Finished running tasks.");
|
||||
});
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// TASK DEFINITIONS
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
Task("Clean")
|
||||
.Does(() =>
|
||||
{
|
||||
// Clean solution directories.
|
||||
foreach(var path in solutionPaths)
|
||||
{
|
||||
Information("Cleaning {0}", path);
|
||||
CleanDirectories(path + "/**/bin/" + configuration);
|
||||
CleanDirectories(path + "/**/obj/" + configuration);
|
||||
}
|
||||
});
|
||||
|
||||
Task("Restore")
|
||||
.Does(() =>
|
||||
{
|
||||
// Restore all NuGet packages.
|
||||
foreach(var solution in solutions)
|
||||
{
|
||||
Information("Restoring {0}...", solution);
|
||||
NuGetRestore(solution);
|
||||
}
|
||||
});
|
||||
|
||||
Task("Build")
|
||||
.IsDependentOn("Clean")
|
||||
.IsDependentOn("Restore")
|
||||
.Does(() =>
|
||||
{
|
||||
// Build all solutions.
|
||||
foreach(var solution in solutions)
|
||||
{
|
||||
Information("Building {0}", solution);
|
||||
MSBuild(solution, settings =>
|
||||
settings.SetPlatformTarget(PlatformTarget.MSIL)
|
||||
.WithProperty("TreatWarningsAsErrors","true")
|
||||
.WithTarget("Build")
|
||||
.SetConfiguration(configuration));
|
||||
}
|
||||
});
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// TARGETS
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
Task("Default")
|
||||
.IsDependentOn("Build");
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// EXECUTION
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
RunTarget(target);
|
||||
@@ -1,3 +1,3 @@
|
||||
Year,Make,Model,Length
|
||||
1997,Ford,E350,2.34
|
||||
2000,Mercury,Cougar,2.38
|
||||
2000,Mercury,Cougar,2.38
|
||||
|
6
samples/Charity/example.ch
Normal file
6
samples/Charity/example.ch
Normal file
@@ -0,0 +1,6 @@
|
||||
%
|
||||
% Some very badly written Charity
|
||||
%
|
||||
|
||||
data LA(A) -> D = ss: A -> D
|
||||
| ff: -> D.
|
||||
133
samples/Click/sr2.click
Normal file
133
samples/Click/sr2.click
Normal file
@@ -0,0 +1,133 @@
|
||||
rates :: AvailableRates
|
||||
elementclass sr2 {
|
||||
$sr2_ip, $sr2_nm, $wireless_mac, $gateway, $probes|
|
||||
|
||||
|
||||
arp :: ARPTable();
|
||||
lt :: LinkTable(IP $sr2_ip);
|
||||
|
||||
|
||||
gw :: SR2GatewaySelector(ETHTYPE 0x062c,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
LT lt,
|
||||
ARP arp,
|
||||
PERIOD 15,
|
||||
GW $gateway);
|
||||
|
||||
|
||||
gw -> SR2SetChecksum -> [0] output;
|
||||
|
||||
set_gw :: SR2SetGateway(SEL gw);
|
||||
|
||||
|
||||
es :: SR2ETTStat(ETHTYPE 0x0641,
|
||||
ETH $wireless_mac,
|
||||
IP $sr2_ip,
|
||||
PERIOD 30000,
|
||||
TAU 300000,
|
||||
ARP arp,
|
||||
PROBES $probes,
|
||||
ETT metric,
|
||||
RT rates);
|
||||
|
||||
|
||||
metric :: SR2ETTMetric(LT lt);
|
||||
|
||||
|
||||
forwarder :: SR2Forwarder(ETHTYPE 0x0643,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
ARP arp,
|
||||
LT lt);
|
||||
|
||||
|
||||
querier :: SR2Querier(ETH $wireless_mac,
|
||||
SR forwarder,
|
||||
LT lt,
|
||||
ROUTE_DAMPENING true,
|
||||
TIME_BEFORE_SWITCH 5,
|
||||
DEBUG true);
|
||||
|
||||
|
||||
query_forwarder :: SR2MetricFlood(ETHTYPE 0x0644,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
LT lt,
|
||||
ARP arp,
|
||||
DEBUG false);
|
||||
|
||||
query_responder :: SR2QueryResponder(ETHTYPE 0x0645,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
LT lt,
|
||||
ARP arp,
|
||||
DEBUG true);
|
||||
|
||||
|
||||
query_responder -> SR2SetChecksum -> [0] output;
|
||||
query_forwarder -> SR2SetChecksum -> SR2Print(forwarding) -> [0] output;
|
||||
query_forwarder [1] -> query_responder;
|
||||
|
||||
data_ck :: SR2SetChecksum()
|
||||
|
||||
input [1]
|
||||
-> host_cl :: IPClassifier(dst net $sr2_ip mask $sr2_nm,
|
||||
-)
|
||||
-> querier
|
||||
-> data_ck;
|
||||
|
||||
|
||||
host_cl [1] -> [0] set_gw [0] -> querier;
|
||||
|
||||
forwarder[0]
|
||||
-> dt ::DecIPTTL
|
||||
-> data_ck
|
||||
-> [2] output;
|
||||
|
||||
|
||||
dt[1]
|
||||
-> Print(ttl-error)
|
||||
-> ICMPError($sr2_ip, timeexceeded, 0)
|
||||
-> querier;
|
||||
|
||||
|
||||
// queries
|
||||
querier [1] -> [1] query_forwarder;
|
||||
es -> SetTimestamp() -> [1] output;
|
||||
|
||||
|
||||
forwarder[1] //ip packets to me
|
||||
-> SR2StripHeader()
|
||||
-> CheckIPHeader()
|
||||
-> from_gw_cl :: IPClassifier(src net $sr2_ip mask $sr2_nm,
|
||||
-)
|
||||
-> [3] output;
|
||||
|
||||
from_gw_cl [1] -> [1] set_gw [1] -> [3] output;
|
||||
|
||||
input [0]
|
||||
-> ncl :: Classifier(
|
||||
12/0643 , //sr2_forwarder
|
||||
12/0644 , //sr2
|
||||
12/0645 , //replies
|
||||
12/0641 , //sr2_es
|
||||
12/062c , //sr2_gw
|
||||
);
|
||||
|
||||
|
||||
ncl[0] -> SR2CheckHeader() -> [0] forwarder;
|
||||
ncl[1] -> SR2CheckHeader() -> PrintSR(query) -> query_forwarder
|
||||
ncl[2] -> SR2CheckHeader() -> query_responder;
|
||||
ncl[3] -> es;
|
||||
ncl[4] -> SR2CheckHeader() -> gw;
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
Idle -> s :: sr2(2.0.0.1, 255.0.0.0, 00:00:00:00:00:01, false, "12 60 12 1500") -> Discard;
|
||||
Idle -> [1] s;
|
||||
s[1] -> Discard;
|
||||
s[2] -> Discard;
|
||||
s[3] -> Discard;
|
||||
142
samples/Click/thomer-nat.click
Normal file
142
samples/Click/thomer-nat.click
Normal file
@@ -0,0 +1,142 @@
|
||||
// This Click configuration implements a firewall and NAT, roughly based on the
|
||||
// mazu-nat.click example.
|
||||
//
|
||||
// This example assumes there is one interface that is IP-aliased. In this
|
||||
// example, eth0 and eth0:0 have IP addresses 66.68.65.90 and 192.168.1.1,
|
||||
// respectively. There is a local network, 192.168.1.0/24, and an upstream
|
||||
// gateway, 66.58.65.89. Traffic from the local network is NATed.
|
||||
//
|
||||
// Connections can be initiated from the NAT box itself, also.
|
||||
//
|
||||
// For bugs, suggestions, and, corrections, please email me.
|
||||
//
|
||||
// Author: Thomer M. Gil (click@thomer.com)
|
||||
|
||||
AddressInfo(
|
||||
eth0-in 192.168.1.1 192.168.1.0/24 00:0d:87:9d:1c:e9,
|
||||
eth0-ex 66.58.65.90 00:0d:87:9d:1c:e9,
|
||||
gw-addr 66.58.65.89 00:20:6f:14:54:c2
|
||||
);
|
||||
|
||||
|
||||
elementclass SniffGatewayDevice {
|
||||
$device |
|
||||
from :: FromDevice($device)
|
||||
-> t1 :: Tee
|
||||
-> output;
|
||||
input -> q :: Queue(1024)
|
||||
-> t2 :: PullTee
|
||||
-> to :: ToDevice($device);
|
||||
t1[1] -> ToHostSniffers;
|
||||
t2[1] -> ToHostSniffers($device);
|
||||
ScheduleInfo(from .1, to 1);
|
||||
}
|
||||
|
||||
|
||||
device :: SniffGatewayDevice(eth0);
|
||||
arpq_in :: ARPQuerier(eth0-in) -> device;
|
||||
ip_to_extern :: GetIPAddress(16)
|
||||
-> CheckIPHeader
|
||||
-> EtherEncap(0x800, eth0-ex, gw-addr)
|
||||
-> device;
|
||||
ip_to_host :: EtherEncap(0x800, gw-addr, eth0-ex)
|
||||
-> ToHost;
|
||||
ip_to_intern :: GetIPAddress(16)
|
||||
-> CheckIPHeader
|
||||
-> arpq_in;
|
||||
|
||||
|
||||
arp_class :: Classifier(
|
||||
12/0806 20/0001, // [0] ARP requests
|
||||
12/0806 20/0002, // [1] ARP replies to host
|
||||
12/0800); // [2] IP packets
|
||||
|
||||
device -> arp_class;
|
||||
|
||||
// ARP crap
|
||||
arp_class[0] -> ARPResponder(eth0-in, eth0-ex) -> device;
|
||||
arp_class[1] -> arp_t :: Tee;
|
||||
arp_t[0] -> ToHost;
|
||||
arp_t[1] -> [1]arpq_in;
|
||||
|
||||
|
||||
// IP packets
|
||||
arp_class[2] -> Strip(14)
|
||||
-> CheckIPHeader
|
||||
-> ipclass :: IPClassifier(dst host eth0-ex,
|
||||
dst host eth0-in,
|
||||
src net eth0-in);
|
||||
|
||||
// Define pattern NAT
|
||||
iprw :: IPRewriterPatterns(NAT eth0-ex 50000-65535 - -);
|
||||
|
||||
// Rewriting rules for UDP/TCP packets
|
||||
// output[0] rewritten to go into the wild
|
||||
// output[1] rewritten to come back from the wild or no match
|
||||
rw :: IPRewriter(pattern NAT 0 1,
|
||||
pass 1);
|
||||
|
||||
// Rewriting rules for ICMP packets
|
||||
irw :: ICMPPingRewriter(eth0-ex, -);
|
||||
irw[0] -> ip_to_extern;
|
||||
irw[1] -> icmp_me_or_intern :: IPClassifier(dst host eth0-ex, -);
|
||||
icmp_me_or_intern[0] -> ip_to_host;
|
||||
icmp_me_or_intern[1] -> ip_to_intern;
|
||||
|
||||
// Rewriting rules for ICMP error packets
|
||||
ierw :: ICMPRewriter(rw irw);
|
||||
ierw[0] -> icmp_me_or_intern;
|
||||
ierw[1] -> icmp_me_or_intern;
|
||||
|
||||
|
||||
// Packets directed at eth0-ex.
|
||||
// Send it through IPRewriter(pass). If there was a mapping, it will be
|
||||
// rewritten such that dst is eth0-in:net, otherwise dst will still be for
|
||||
// eth0-ex.
|
||||
ipclass[0] -> [1]rw;
|
||||
|
||||
// packets that were rewritten, heading into the wild world.
|
||||
rw[0] -> ip_to_extern;
|
||||
|
||||
// packets that come back from the wild or are not part of an established
|
||||
// connection.
|
||||
rw[1] -> established_class :: IPClassifier(dst host eth0-ex,
|
||||
dst net eth0-in);
|
||||
|
||||
// not established yet or returning packets for a connection that was
|
||||
// established from this host itself.
|
||||
established_class[0] ->
|
||||
firewall :: IPClassifier(dst tcp port ssh,
|
||||
dst tcp port smtp,
|
||||
dst tcp port domain,
|
||||
dst udp port domain,
|
||||
icmp type echo-reply,
|
||||
proto icmp,
|
||||
port > 4095,
|
||||
-);
|
||||
|
||||
firewall[0] -> ip_to_host; // ssh
|
||||
firewall[1] -> ip_to_host; // smtp
|
||||
firewall[2] -> ip_to_host; // domain (t)
|
||||
firewall[3] -> ip_to_host; // domain (u)
|
||||
firewall[4] -> [0]irw; // icmp reply
|
||||
firewall[5] -> [0]ierw; // other icmp
|
||||
firewall[6] -> ip_to_host; // port > 4095, probably for connection
|
||||
// originating from host itself
|
||||
firewall[7] -> Discard; // don't allow incoming for port <= 4095
|
||||
|
||||
// established connection
|
||||
established_class[1] -> ip_to_intern;
|
||||
|
||||
// To eth0-in. Only accept from inside network.
|
||||
ipclass[1] -> IPClassifier(src net eth0-in) -> ip_to_host;
|
||||
|
||||
// Packets from eth0-in:net either stay on local network or go to the wild.
|
||||
// Those that go into the wild need to go through the appropriate rewriting
|
||||
// element. (Either UDP/TCP rewriter or ICMP rewriter.)
|
||||
ipclass[2] -> inter_class :: IPClassifier(dst net eth0-in, -);
|
||||
inter_class[0] -> ip_to_intern;
|
||||
inter_class[1] -> ip_udp_class :: IPClassifier(tcp or udp,
|
||||
icmp type echo);
|
||||
ip_udp_class[0] -> [0]rw;
|
||||
ip_udp_class[1] -> [0]irw;
|
||||
17
samples/CoffeeScript/build.cake
Normal file
17
samples/CoffeeScript/build.cake
Normal file
@@ -0,0 +1,17 @@
|
||||
fs = require 'fs'
|
||||
|
||||
{print} = require 'sys'
|
||||
{spawn} = require 'child_process'
|
||||
|
||||
build = (callback) ->
|
||||
coffee = spawn 'coffee', ['-c', '-o', '.', '.']
|
||||
coffee.stderr.on 'data', (data) ->
|
||||
process.stderr.write data.toString()
|
||||
coffee.stdout.on 'data', (data) ->
|
||||
print data.toString()
|
||||
coffee.on 'exit', (code) ->
|
||||
callback?() if code is 0
|
||||
|
||||
task 'build', 'Build from source', ->
|
||||
build()
|
||||
|
||||
2
samples/Common Lisp/config.sexp
Normal file
2
samples/Common Lisp/config.sexp
Normal file
@@ -0,0 +1,2 @@
|
||||
((exe_name hello)
|
||||
(link_order (world hello)))
|
||||
103
samples/Common Lisp/rss.sexp
Normal file
103
samples/Common Lisp/rss.sexp
Normal file
@@ -0,0 +1,103 @@
|
||||
|
||||
(:TURTLE
|
||||
|
||||
(:@PREFIX "rdf:" "<http://www.w3.org/1999/02/22-rdf-syntax-ns#>")
|
||||
(:@PREFIX "owl:" "<http://www.w3.org/2002/07/owl#>")
|
||||
(:@PREFIX "dc:" "<http://purl.org/dc/elements/1.1/>")
|
||||
(:@PREFIX "xsd:" "<http://www.w3.org/2001/XMLSchema#>")
|
||||
(:@PREFIX "rdfs:" "<http://www.w3.org/2000/01/rdf-schema#>")
|
||||
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/channel>")
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:URIREF #1="<http://www.w3.org/1999/02/22-rdf-syntax-ns#type>")
|
||||
(:OBJECTS
|
||||
(:QNAME "rdfs:Class")))
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:QNAME "rdfs:comment")
|
||||
(:OBJECTS
|
||||
(:STRING "An RSS information channel.")))
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS
|
||||
(:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:QNAME "rdfs:label")
|
||||
(:OBJECTS
|
||||
(:STRING "Channel"))))
|
||||
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/description>")
|
||||
|
||||
(:PREDICATE-OBJECT-LIST
|
||||
(:URIREF #1#)
|
||||
(:OBJECTS
|
||||
(:QNAME "rdf:Property")))
|
||||
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS (:STRING "A short text description of the subject.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Description")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:subPropertyOf") (:OBJECTS (:QNAME "dc:description"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/image>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdfs:Class")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment") (:OBJECTS (:STRING "An RSS image.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Image"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/item>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdfs:Class")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment") (:OBJECTS (:STRING "An RSS item.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Item"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/items>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS
|
||||
(:STRING "Points to a list of rss:item elements that are members of the subject channel.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Items"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/link>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS (:STRING "The URL to which an HTML rendering of the subject will link.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Link")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:subPropertyOf") (:OBJECTS (:QNAME "dc:identifier"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/name>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS (:STRING "The text input field's (variable) name.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Name"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/textinput>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdfs:Class")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment") (:OBJECTS (:STRING "An RSS text input.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Text Input"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/title>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS (:STRING "A descriptive title for the channel.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "Title")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:subPropertyOf") (:OBJECTS (:QNAME "dc:title"))))
|
||||
(:TRIPLES (:URIREF "<http://purl.org/rss/1.0/url>")
|
||||
(:PREDICATE-OBJECT-LIST (:URIREF #1#) (:OBJECTS (:QNAME "rdf:Property")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:comment")
|
||||
(:OBJECTS
|
||||
(:STRING
|
||||
"The URL of the image to used in the 'src' attribute of the channel's image tag when rendered as HTML.")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:isDefinedBy")
|
||||
(:OBJECTS (:URIREF "<http://purl.org/rss/1.0/>")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:label") (:OBJECTS (:STRING "URL")))
|
||||
(:PREDICATE-OBJECT-LIST (:QNAME "rdfs:subPropertyOf") (:OBJECTS (:QNAME "dc:identifier")))))
|
||||
13
samples/DNS Zone/sample.arpa
Normal file
13
samples/DNS Zone/sample.arpa
Normal file
@@ -0,0 +1,13 @@
|
||||
$ORIGIN 0.0.0.c.2.1.0.3.0.0.2.1.e.f.f.3.ip6.arpa.
|
||||
$TTL 60
|
||||
@ IN SOA ns root (
|
||||
2002042901 ; SERIAL
|
||||
7200 ; REFRESH
|
||||
600 ; RETRY
|
||||
36000000 ; EXPIRE
|
||||
120 ; MINIMUM
|
||||
)
|
||||
|
||||
NS ns.example.com.
|
||||
|
||||
c.a.7.e.d.7.e.f.f.f.0.2.8.0.a.0 PTR sip01.example.com.
|
||||
12
samples/DNS Zone/sneaky.net.zone
Normal file
12
samples/DNS Zone/sneaky.net.zone
Normal file
@@ -0,0 +1,12 @@
|
||||
$TTL 3d
|
||||
@ IN SOA root.localhost. root.sneaky.net. (
|
||||
2015042907 ; serial
|
||||
3d ; refresh
|
||||
1h ; retry
|
||||
12d ; expire
|
||||
2h ; negative response TTL
|
||||
)
|
||||
IN NS root.localhost.
|
||||
IN NS localhost. ; secondary name server is preferably externally maintained
|
||||
|
||||
www IN A 3.141.59.26
|
||||
856
samples/Erlang/elixir_parser.yrl
Normal file
856
samples/Erlang/elixir_parser.yrl
Normal file
@@ -0,0 +1,856 @@
|
||||
Nonterminals
|
||||
grammar expr_list
|
||||
expr container_expr block_expr access_expr
|
||||
no_parens_expr no_parens_zero_expr no_parens_one_expr no_parens_one_ambig_expr
|
||||
bracket_expr bracket_at_expr bracket_arg matched_expr unmatched_expr max_expr
|
||||
unmatched_op_expr matched_op_expr no_parens_op_expr no_parens_many_expr
|
||||
comp_op_eol at_op_eol unary_op_eol and_op_eol or_op_eol capture_op_eol
|
||||
add_op_eol mult_op_eol two_op_eol three_op_eol pipe_op_eol stab_op_eol
|
||||
arrow_op_eol match_op_eol when_op_eol in_op_eol in_match_op_eol
|
||||
type_op_eol rel_op_eol
|
||||
open_paren close_paren empty_paren eoe
|
||||
list list_args open_bracket close_bracket
|
||||
tuple open_curly close_curly
|
||||
bit_string open_bit close_bit
|
||||
map map_op map_close map_args map_expr struct_op
|
||||
assoc_op_eol assoc_expr assoc_base assoc_update assoc_update_kw assoc
|
||||
container_args_base container_args
|
||||
call_args_parens_expr call_args_parens_base call_args_parens parens_call
|
||||
call_args_no_parens_one call_args_no_parens_ambig call_args_no_parens_expr
|
||||
call_args_no_parens_comma_expr call_args_no_parens_all call_args_no_parens_many
|
||||
call_args_no_parens_many_strict
|
||||
stab stab_eoe stab_expr stab_op_eol_and_expr stab_parens_many
|
||||
kw_eol kw_base kw call_args_no_parens_kw_expr call_args_no_parens_kw
|
||||
dot_op dot_alias dot_alias_container
|
||||
dot_identifier dot_op_identifier dot_do_identifier
|
||||
dot_paren_identifier dot_bracket_identifier
|
||||
do_block fn_eoe do_eoe end_eoe block_eoe block_item block_list
|
||||
.
|
||||
|
||||
Terminals
|
||||
identifier kw_identifier kw_identifier_safe kw_identifier_unsafe bracket_identifier
|
||||
paren_identifier do_identifier block_identifier
|
||||
fn 'end' aliases
|
||||
number atom atom_safe atom_unsafe bin_string list_string sigil
|
||||
dot_call_op op_identifier
|
||||
comp_op at_op unary_op and_op or_op arrow_op match_op in_op in_match_op
|
||||
type_op dual_op add_op mult_op two_op three_op pipe_op stab_op when_op assoc_op
|
||||
capture_op rel_op
|
||||
'true' 'false' 'nil' 'do' eol ';' ',' '.'
|
||||
'(' ')' '[' ']' '{' '}' '<<' '>>' '%{}' '%'
|
||||
.
|
||||
|
||||
Rootsymbol grammar.
|
||||
|
||||
%% Two shift/reduce conflicts coming from call_args_parens.
|
||||
Expect 2.
|
||||
|
||||
%% Changes in ops and precedence should be reflected on lib/elixir/lib/macro.ex
|
||||
%% Note though the operator => in practice has lower precedence than all others,
|
||||
%% its entry in the table is only to support the %{user | foo => bar} syntax.
|
||||
Left 5 do.
|
||||
Right 10 stab_op_eol. %% ->
|
||||
Left 20 ','.
|
||||
Nonassoc 30 capture_op_eol. %% &
|
||||
Left 40 in_match_op_eol. %% <-, \\ (allowed in matches along =)
|
||||
Right 50 when_op_eol. %% when
|
||||
Right 60 type_op_eol. %% ::
|
||||
Right 70 pipe_op_eol. %% |
|
||||
Right 80 assoc_op_eol. %% =>
|
||||
Right 90 match_op_eol. %% =
|
||||
Left 130 or_op_eol. %% ||, |||, or
|
||||
Left 140 and_op_eol. %% &&, &&&, and
|
||||
Left 150 comp_op_eol. %% ==, !=, =~, ===, !==
|
||||
Left 160 rel_op_eol. %% <, >, <=, >=
|
||||
Left 170 arrow_op_eol. %% |>, <<<, >>>, ~>>, <<~, ~>, <~, <~>, <|>
|
||||
Left 180 in_op_eol. %% in
|
||||
Left 190 three_op_eol. %% ^^^
|
||||
Right 200 two_op_eol. %% ++, --, .., <>
|
||||
Left 210 add_op_eol. %% +, -
|
||||
Left 220 mult_op_eol. %% *, /
|
||||
Nonassoc 300 unary_op_eol. %% +, -, !, ^, not, ~~~
|
||||
Left 310 dot_call_op.
|
||||
Left 310 dot_op. %% .
|
||||
Nonassoc 320 at_op_eol. %% @
|
||||
Nonassoc 330 dot_identifier.
|
||||
|
||||
%%% MAIN FLOW OF EXPRESSIONS
|
||||
|
||||
grammar -> eoe : nil.
|
||||
grammar -> expr_list : to_block('$1').
|
||||
grammar -> eoe expr_list : to_block('$2').
|
||||
grammar -> expr_list eoe : to_block('$1').
|
||||
grammar -> eoe expr_list eoe : to_block('$2').
|
||||
grammar -> '$empty' : nil.
|
||||
|
||||
% Note expressions are on reverse order
|
||||
expr_list -> expr : ['$1'].
|
||||
expr_list -> expr_list eoe expr : ['$3'|'$1'].
|
||||
|
||||
expr -> matched_expr : '$1'.
|
||||
expr -> no_parens_expr : '$1'.
|
||||
expr -> unmatched_expr : '$1'.
|
||||
|
||||
%% In Elixir we have three main call syntaxes: with parentheses,
|
||||
%% without parentheses and with do blocks. They are represented
|
||||
%% in the AST as matched, no_parens and unmatched.
|
||||
%%
|
||||
%% Calls without parentheses are further divided according to how
|
||||
%% problematic they are:
|
||||
%%
|
||||
%% (a) no_parens_one: a call with one unproblematic argument
|
||||
%% (e.g. `f a` or `f g a` and similar) (includes unary operators)
|
||||
%%
|
||||
%% (b) no_parens_many: a call with several arguments (e.g. `f a, b`)
|
||||
%%
|
||||
%% (c) no_parens_one_ambig: a call with one argument which is
|
||||
%% itself a no_parens_many or no_parens_one_ambig (e.g. `f g a, b`
|
||||
%% or `f g h a, b` and similar)
|
||||
%%
|
||||
%% Note, in particular, that no_parens_one_ambig expressions are
|
||||
%% ambiguous and are interpreted such that the outer function has
|
||||
%% arity 1 (e.g. `f g a, b` is interpreted as `f(g(a, b))` rather
|
||||
%% than `f(g(a), b)`). Hence the name, no_parens_one_ambig.
|
||||
%%
|
||||
%% The distinction is required because we can't, for example, have
|
||||
%% a function call with a do block as argument inside another do
|
||||
%% block call, unless there are parentheses:
|
||||
%%
|
||||
%% if if true do true else false end do #=> invalid
|
||||
%% if(if true do true else false end) do #=> valid
|
||||
%%
|
||||
%% Similarly, it is not possible to nest calls without parentheses
|
||||
%% if their arity is more than 1:
|
||||
%%
|
||||
%% foo a, bar b, c #=> invalid
|
||||
%% foo(a, bar b, c) #=> invalid
|
||||
%% foo bar a, b #=> valid
|
||||
%% foo a, bar(b, c) #=> valid
|
||||
%%
|
||||
%% So the different grammar rules need to take into account
|
||||
%% if calls without parentheses are do blocks in particular
|
||||
%% segments and act accordingly.
|
||||
matched_expr -> matched_expr matched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
matched_expr -> unary_op_eol matched_expr : build_unary_op('$1', '$2').
|
||||
matched_expr -> at_op_eol matched_expr : build_unary_op('$1', '$2').
|
||||
matched_expr -> capture_op_eol matched_expr : build_unary_op('$1', '$2').
|
||||
matched_expr -> no_parens_one_expr : '$1'.
|
||||
matched_expr -> no_parens_zero_expr : '$1'.
|
||||
matched_expr -> access_expr : '$1'.
|
||||
matched_expr -> access_expr kw_identifier : throw_invalid_kw_identifier('$2').
|
||||
|
||||
unmatched_expr -> matched_expr unmatched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
unmatched_expr -> unmatched_expr matched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
unmatched_expr -> unmatched_expr unmatched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
unmatched_expr -> unmatched_expr no_parens_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
unmatched_expr -> unary_op_eol expr : build_unary_op('$1', '$2').
|
||||
unmatched_expr -> at_op_eol expr : build_unary_op('$1', '$2').
|
||||
unmatched_expr -> capture_op_eol expr : build_unary_op('$1', '$2').
|
||||
unmatched_expr -> block_expr : '$1'.
|
||||
|
||||
no_parens_expr -> matched_expr no_parens_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
|
||||
no_parens_expr -> unary_op_eol no_parens_expr : build_unary_op('$1', '$2').
|
||||
no_parens_expr -> at_op_eol no_parens_expr : build_unary_op('$1', '$2').
|
||||
no_parens_expr -> capture_op_eol no_parens_expr : build_unary_op('$1', '$2').
|
||||
no_parens_expr -> no_parens_one_ambig_expr : '$1'.
|
||||
no_parens_expr -> no_parens_many_expr : '$1'.
|
||||
|
||||
block_expr -> parens_call call_args_parens do_block : build_identifier('$1', '$2' ++ '$3').
|
||||
block_expr -> parens_call call_args_parens call_args_parens do_block : build_nested_parens('$1', '$2', '$3' ++ '$4').
|
||||
block_expr -> dot_do_identifier do_block : build_identifier('$1', '$2').
|
||||
block_expr -> dot_identifier call_args_no_parens_all do_block : build_identifier('$1', '$2' ++ '$3').
|
||||
|
||||
matched_op_expr -> match_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> add_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> mult_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> two_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> three_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> and_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> or_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> in_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> in_match_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> type_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> when_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> pipe_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> comp_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> rel_op_eol matched_expr : {'$1', '$2'}.
|
||||
matched_op_expr -> arrow_op_eol matched_expr : {'$1', '$2'}.
|
||||
%% Warn for no parens subset
|
||||
matched_op_expr -> arrow_op_eol no_parens_one_expr : warn_pipe('$1', '$2'), {'$1', '$2'}.
|
||||
|
||||
unmatched_op_expr -> match_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> add_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> mult_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> two_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> three_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> and_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> or_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> in_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> in_match_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> type_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> when_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> pipe_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> comp_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> rel_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
unmatched_op_expr -> arrow_op_eol unmatched_expr : {'$1', '$2'}.
|
||||
|
||||
no_parens_op_expr -> match_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> add_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> mult_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> two_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> three_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> and_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> or_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> in_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> in_match_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> type_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> when_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> pipe_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> comp_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> rel_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
no_parens_op_expr -> arrow_op_eol no_parens_expr : {'$1', '$2'}.
|
||||
%% Warn for no parens subset
|
||||
no_parens_op_expr -> arrow_op_eol no_parens_one_ambig_expr : warn_pipe('$1', '$2'), {'$1', '$2'}.
|
||||
no_parens_op_expr -> arrow_op_eol no_parens_many_expr : warn_pipe('$1', '$2'), {'$1', '$2'}.
|
||||
|
||||
%% Allow when (and only when) with keywords
|
||||
no_parens_op_expr -> when_op_eol call_args_no_parens_kw : {'$1', '$2'}.
|
||||
|
||||
no_parens_one_ambig_expr -> dot_op_identifier call_args_no_parens_ambig : build_identifier('$1', '$2').
|
||||
no_parens_one_ambig_expr -> dot_identifier call_args_no_parens_ambig : build_identifier('$1', '$2').
|
||||
|
||||
no_parens_many_expr -> dot_op_identifier call_args_no_parens_many_strict : build_identifier('$1', '$2').
|
||||
no_parens_many_expr -> dot_identifier call_args_no_parens_many_strict : build_identifier('$1', '$2').
|
||||
|
||||
no_parens_one_expr -> dot_op_identifier call_args_no_parens_one : build_identifier('$1', '$2').
|
||||
no_parens_one_expr -> dot_identifier call_args_no_parens_one : build_identifier('$1', '$2').
|
||||
no_parens_zero_expr -> dot_do_identifier : build_identifier('$1', nil).
|
||||
no_parens_zero_expr -> dot_identifier : build_identifier('$1', nil).
|
||||
|
||||
%% From this point on, we just have constructs that can be
|
||||
%% used with the access syntax. Notice that (dot_)identifier
|
||||
%% is not included in this list simply because the tokenizer
|
||||
%% marks identifiers followed by brackets as bracket_identifier.
|
||||
access_expr -> bracket_at_expr : '$1'.
|
||||
access_expr -> bracket_expr : '$1'.
|
||||
access_expr -> at_op_eol number : build_unary_op('$1', ?exprs('$2')).
|
||||
access_expr -> unary_op_eol number : build_unary_op('$1', ?exprs('$2')).
|
||||
access_expr -> capture_op_eol number : build_unary_op('$1', ?exprs('$2')).
|
||||
access_expr -> fn_eoe stab end_eoe : build_fn('$1', reverse('$2')).
|
||||
access_expr -> open_paren stab close_paren : build_stab(reverse('$2')).
|
||||
access_expr -> open_paren stab ';' close_paren : build_stab(reverse('$2')).
|
||||
access_expr -> open_paren ';' stab ';' close_paren : build_stab(reverse('$3')).
|
||||
access_expr -> open_paren ';' stab close_paren : build_stab(reverse('$3')).
|
||||
access_expr -> open_paren ';' close_paren : build_stab([]).
|
||||
access_expr -> empty_paren : nil.
|
||||
access_expr -> number : ?exprs('$1').
|
||||
access_expr -> list : element(1, '$1').
|
||||
access_expr -> map : '$1'.
|
||||
access_expr -> tuple : '$1'.
|
||||
access_expr -> 'true' : ?id('$1').
|
||||
access_expr -> 'false' : ?id('$1').
|
||||
access_expr -> 'nil' : ?id('$1').
|
||||
access_expr -> bin_string : build_bin_string('$1').
|
||||
access_expr -> list_string : build_list_string('$1').
|
||||
access_expr -> bit_string : '$1'.
|
||||
access_expr -> sigil : build_sigil('$1').
|
||||
access_expr -> max_expr : '$1'.
|
||||
|
||||
%% Aliases and properly formed calls. Used by map_expr.
|
||||
max_expr -> atom : ?exprs('$1').
|
||||
max_expr -> atom_safe : build_quoted_atom('$1', true).
|
||||
max_expr -> atom_unsafe : build_quoted_atom('$1', false).
|
||||
max_expr -> parens_call call_args_parens : build_identifier('$1', '$2').
|
||||
max_expr -> parens_call call_args_parens call_args_parens : build_nested_parens('$1', '$2', '$3').
|
||||
max_expr -> dot_alias : '$1'.
|
||||
|
||||
bracket_arg -> open_bracket kw close_bracket : build_list('$1', '$2').
|
||||
bracket_arg -> open_bracket container_expr close_bracket : build_list('$1', '$2').
|
||||
bracket_arg -> open_bracket container_expr ',' close_bracket : build_list('$1', '$2').
|
||||
|
||||
bracket_expr -> dot_bracket_identifier bracket_arg : build_access(build_identifier('$1', nil), '$2').
|
||||
bracket_expr -> access_expr bracket_arg : build_access('$1', '$2').
|
||||
|
||||
bracket_at_expr -> at_op_eol dot_bracket_identifier bracket_arg :
|
||||
build_access(build_unary_op('$1', build_identifier('$2', nil)), '$3').
|
||||
bracket_at_expr -> at_op_eol access_expr bracket_arg :
|
||||
build_access(build_unary_op('$1', '$2'), '$3').
|
||||
|
||||
%% Blocks
|
||||
|
||||
do_block -> do_eoe 'end' : [[{do, nil}]].
|
||||
do_block -> do_eoe stab end_eoe : [[{do, build_stab(reverse('$2'))}]].
|
||||
do_block -> do_eoe block_list 'end' : [[{do, nil}|'$2']].
|
||||
do_block -> do_eoe stab_eoe block_list 'end' : [[{do, build_stab(reverse('$2'))}|'$3']].
|
||||
|
||||
eoe -> eol : '$1'.
|
||||
eoe -> ';' : '$1'.
|
||||
eoe -> eol ';' : '$1'.
|
||||
|
||||
fn_eoe -> 'fn' : '$1'.
|
||||
fn_eoe -> 'fn' eoe : '$1'.
|
||||
|
||||
do_eoe -> 'do' : '$1'.
|
||||
do_eoe -> 'do' eoe : '$1'.
|
||||
|
||||
end_eoe -> 'end' : '$1'.
|
||||
end_eoe -> eoe 'end' : '$2'.
|
||||
|
||||
block_eoe -> block_identifier : '$1'.
|
||||
block_eoe -> block_identifier eoe : '$1'.
|
||||
|
||||
stab -> stab_expr : ['$1'].
|
||||
stab -> stab eoe stab_expr : ['$3'|'$1'].
|
||||
|
||||
stab_eoe -> stab : '$1'.
|
||||
stab_eoe -> stab eoe : '$1'.
|
||||
|
||||
%% Here, `element(1, Token)` is the stab operator,
|
||||
%% while `element(2, Token)` is the expression.
|
||||
stab_expr -> expr :
|
||||
'$1'.
|
||||
stab_expr -> stab_op_eol_and_expr :
|
||||
build_op(element(1, '$1'), [], element(2, '$1')).
|
||||
stab_expr -> empty_paren stab_op_eol_and_expr :
|
||||
build_op(element(1, '$2'), [], element(2, '$2')).
|
||||
stab_expr -> call_args_no_parens_all stab_op_eol_and_expr :
|
||||
build_op(element(1, '$2'), unwrap_when(unwrap_splice('$1')), element(2, '$2')).
|
||||
stab_expr -> stab_parens_many stab_op_eol_and_expr :
|
||||
build_op(element(1, '$2'), unwrap_splice('$1'), element(2, '$2')).
|
||||
stab_expr -> stab_parens_many when_op expr stab_op_eol_and_expr :
|
||||
build_op(element(1, '$4'), [{'when', meta_from_token('$2'), unwrap_splice('$1') ++ ['$3']}], element(2, '$4')).
|
||||
|
||||
stab_op_eol_and_expr -> stab_op_eol expr : {'$1', '$2'}.
|
||||
stab_op_eol_and_expr -> stab_op_eol : warn_empty_stab_clause('$1'), {'$1', nil}.
|
||||
|
||||
block_item -> block_eoe stab_eoe : {?exprs('$1'), build_stab(reverse('$2'))}.
|
||||
block_item -> block_eoe : {?exprs('$1'), nil}.
|
||||
|
||||
block_list -> block_item : ['$1'].
|
||||
block_list -> block_item block_list : ['$1'|'$2'].
|
||||
|
||||
%% Helpers
|
||||
|
||||
open_paren -> '(' : '$1'.
|
||||
open_paren -> '(' eol : '$1'.
|
||||
close_paren -> ')' : '$1'.
|
||||
close_paren -> eol ')' : '$2'.
|
||||
|
||||
empty_paren -> open_paren ')' : '$1'.
|
||||
|
||||
open_bracket -> '[' : '$1'.
|
||||
open_bracket -> '[' eol : '$1'.
|
||||
close_bracket -> ']' : '$1'.
|
||||
close_bracket -> eol ']' : '$2'.
|
||||
|
||||
open_bit -> '<<' : '$1'.
|
||||
open_bit -> '<<' eol : '$1'.
|
||||
close_bit -> '>>' : '$1'.
|
||||
close_bit -> eol '>>' : '$2'.
|
||||
|
||||
open_curly -> '{' : '$1'.
|
||||
open_curly -> '{' eol : '$1'.
|
||||
close_curly -> '}' : '$1'.
|
||||
close_curly -> eol '}' : '$2'.
|
||||
|
||||
% Operators
|
||||
|
||||
add_op_eol -> add_op : '$1'.
|
||||
add_op_eol -> add_op eol : '$1'.
|
||||
add_op_eol -> dual_op : '$1'.
|
||||
add_op_eol -> dual_op eol : '$1'.
|
||||
|
||||
mult_op_eol -> mult_op : '$1'.
|
||||
mult_op_eol -> mult_op eol : '$1'.
|
||||
|
||||
two_op_eol -> two_op : '$1'.
|
||||
two_op_eol -> two_op eol : '$1'.
|
||||
|
||||
three_op_eol -> three_op : '$1'.
|
||||
three_op_eol -> three_op eol : '$1'.
|
||||
|
||||
pipe_op_eol -> pipe_op : '$1'.
|
||||
pipe_op_eol -> pipe_op eol : '$1'.
|
||||
|
||||
capture_op_eol -> capture_op : '$1'.
|
||||
capture_op_eol -> capture_op eol : '$1'.
|
||||
|
||||
unary_op_eol -> unary_op : '$1'.
|
||||
unary_op_eol -> unary_op eol : '$1'.
|
||||
unary_op_eol -> dual_op : '$1'.
|
||||
unary_op_eol -> dual_op eol : '$1'.
|
||||
|
||||
match_op_eol -> match_op : '$1'.
|
||||
match_op_eol -> match_op eol : '$1'.
|
||||
|
||||
and_op_eol -> and_op : '$1'.
|
||||
and_op_eol -> and_op eol : '$1'.
|
||||
|
||||
or_op_eol -> or_op : '$1'.
|
||||
or_op_eol -> or_op eol : '$1'.
|
||||
|
||||
in_op_eol -> in_op : '$1'.
|
||||
in_op_eol -> in_op eol : '$1'.
|
||||
|
||||
in_match_op_eol -> in_match_op : '$1'.
|
||||
in_match_op_eol -> in_match_op eol : '$1'.
|
||||
|
||||
type_op_eol -> type_op : '$1'.
|
||||
type_op_eol -> type_op eol : '$1'.
|
||||
|
||||
when_op_eol -> when_op : '$1'.
|
||||
when_op_eol -> when_op eol : '$1'.
|
||||
|
||||
stab_op_eol -> stab_op : '$1'.
|
||||
stab_op_eol -> stab_op eol : '$1'.
|
||||
|
||||
at_op_eol -> at_op : '$1'.
|
||||
at_op_eol -> at_op eol : '$1'.
|
||||
|
||||
comp_op_eol -> comp_op : '$1'.
|
||||
comp_op_eol -> comp_op eol : '$1'.
|
||||
|
||||
rel_op_eol -> rel_op : '$1'.
|
||||
rel_op_eol -> rel_op eol : '$1'.
|
||||
|
||||
arrow_op_eol -> arrow_op : '$1'.
|
||||
arrow_op_eol -> arrow_op eol : '$1'.
|
||||
|
||||
% Dot operator
|
||||
|
||||
dot_op -> '.' : '$1'.
|
||||
dot_op -> '.' eol : '$1'.
|
||||
|
||||
dot_identifier -> identifier : '$1'.
|
||||
dot_identifier -> matched_expr dot_op identifier : build_dot('$2', '$1', '$3').
|
||||
|
||||
dot_alias -> aliases : {'__aliases__', meta_from_token('$1', 0), ?exprs('$1')}.
|
||||
dot_alias -> matched_expr dot_op aliases : build_dot_alias('$2', '$1', '$3').
|
||||
dot_alias -> matched_expr dot_op dot_alias_container : build_dot_container('$2', '$1', '$3').
|
||||
|
||||
dot_alias_container -> open_curly '}' : [].
|
||||
dot_alias_container -> open_curly container_args close_curly : '$2'.
|
||||
|
||||
dot_op_identifier -> op_identifier : '$1'.
|
||||
dot_op_identifier -> matched_expr dot_op op_identifier : build_dot('$2', '$1', '$3').
|
||||
|
||||
dot_do_identifier -> do_identifier : '$1'.
|
||||
dot_do_identifier -> matched_expr dot_op do_identifier : build_dot('$2', '$1', '$3').
|
||||
|
||||
dot_bracket_identifier -> bracket_identifier : '$1'.
|
||||
dot_bracket_identifier -> matched_expr dot_op bracket_identifier : build_dot('$2', '$1', '$3').
|
||||
|
||||
dot_paren_identifier -> paren_identifier : '$1'.
|
||||
dot_paren_identifier -> matched_expr dot_op paren_identifier : build_dot('$2', '$1', '$3').
|
||||
|
||||
parens_call -> dot_paren_identifier : '$1'.
|
||||
parens_call -> matched_expr dot_call_op : {'.', meta_from_token('$2'), ['$1']}. % Fun/local calls
|
||||
|
||||
% Function calls with no parentheses
|
||||
|
||||
call_args_no_parens_expr -> matched_expr : '$1'.
|
||||
call_args_no_parens_expr -> no_parens_expr : throw_no_parens_many_strict('$1').
|
||||
|
||||
call_args_no_parens_comma_expr -> matched_expr ',' call_args_no_parens_expr : ['$3', '$1'].
|
||||
call_args_no_parens_comma_expr -> call_args_no_parens_comma_expr ',' call_args_no_parens_expr : ['$3'|'$1'].
|
||||
|
||||
call_args_no_parens_all -> call_args_no_parens_one : '$1'.
|
||||
call_args_no_parens_all -> call_args_no_parens_ambig : '$1'.
|
||||
call_args_no_parens_all -> call_args_no_parens_many : '$1'.
|
||||
|
||||
call_args_no_parens_one -> call_args_no_parens_kw : ['$1'].
|
||||
call_args_no_parens_one -> matched_expr : ['$1'].
|
||||
|
||||
call_args_no_parens_ambig -> no_parens_expr : ['$1'].
|
||||
|
||||
call_args_no_parens_many -> matched_expr ',' call_args_no_parens_kw : ['$1', '$3'].
|
||||
call_args_no_parens_many -> call_args_no_parens_comma_expr : reverse('$1').
|
||||
call_args_no_parens_many -> call_args_no_parens_comma_expr ',' call_args_no_parens_kw : reverse(['$3'|'$1']).
|
||||
|
||||
call_args_no_parens_many_strict -> call_args_no_parens_many : '$1'.
|
||||
call_args_no_parens_many_strict -> open_paren call_args_no_parens_kw close_paren : throw_no_parens_strict('$1').
|
||||
call_args_no_parens_many_strict -> open_paren call_args_no_parens_many close_paren : throw_no_parens_strict('$1').
|
||||
|
||||
stab_parens_many -> open_paren call_args_no_parens_kw close_paren : ['$2'].
|
||||
stab_parens_many -> open_paren call_args_no_parens_many close_paren : '$2'.
|
||||
|
||||
% Containers
|
||||
|
||||
container_expr -> matched_expr : '$1'.
|
||||
container_expr -> unmatched_expr : '$1'.
|
||||
container_expr -> no_parens_expr : throw_no_parens_container_strict('$1').
|
||||
|
||||
container_args_base -> container_expr : ['$1'].
|
||||
container_args_base -> container_args_base ',' container_expr : ['$3'|'$1'].
|
||||
|
||||
container_args -> container_args_base : lists:reverse('$1').
|
||||
container_args -> container_args_base ',' : lists:reverse('$1').
|
||||
container_args -> container_args_base ',' kw : lists:reverse(['$3'|'$1']).
|
||||
|
||||
% Function calls with parentheses
|
||||
|
||||
call_args_parens_expr -> matched_expr : '$1'.
|
||||
call_args_parens_expr -> unmatched_expr : '$1'.
|
||||
call_args_parens_expr -> no_parens_expr : throw_no_parens_many_strict('$1').
|
||||
|
||||
call_args_parens_base -> call_args_parens_expr : ['$1'].
|
||||
call_args_parens_base -> call_args_parens_base ',' call_args_parens_expr : ['$3'|'$1'].
|
||||
|
||||
call_args_parens -> empty_paren : [].
|
||||
call_args_parens -> open_paren no_parens_expr close_paren : ['$2'].
|
||||
call_args_parens -> open_paren kw close_paren : ['$2'].
|
||||
call_args_parens -> open_paren call_args_parens_base close_paren : reverse('$2').
|
||||
call_args_parens -> open_paren call_args_parens_base ',' kw close_paren : reverse(['$4'|'$2']).
|
||||
|
||||
% KV
|
||||
|
||||
kw_eol -> kw_identifier : ?exprs('$1').
|
||||
kw_eol -> kw_identifier eol : ?exprs('$1').
|
||||
kw_eol -> kw_identifier_safe : build_quoted_atom('$1', true).
|
||||
kw_eol -> kw_identifier_safe eol : build_quoted_atom('$1', true).
|
||||
kw_eol -> kw_identifier_unsafe : build_quoted_atom('$1', false).
|
||||
kw_eol -> kw_identifier_unsafe eol : build_quoted_atom('$1', false).
|
||||
|
||||
kw_base -> kw_eol container_expr : [{'$1', '$2'}].
|
||||
kw_base -> kw_base ',' kw_eol container_expr : [{'$3', '$4'}|'$1'].
|
||||
|
||||
kw -> kw_base : reverse('$1').
|
||||
kw -> kw_base ',' : reverse('$1').
|
||||
|
||||
call_args_no_parens_kw_expr -> kw_eol matched_expr : {'$1', '$2'}.
|
||||
call_args_no_parens_kw_expr -> kw_eol no_parens_expr : {'$1', '$2'}.
|
||||
|
||||
call_args_no_parens_kw -> call_args_no_parens_kw_expr : ['$1'].
|
||||
call_args_no_parens_kw -> call_args_no_parens_kw_expr ',' call_args_no_parens_kw : ['$1'|'$3'].
|
||||
|
||||
% Lists
|
||||
|
||||
list_args -> kw : '$1'.
|
||||
list_args -> container_args_base : reverse('$1').
|
||||
list_args -> container_args_base ',' : reverse('$1').
|
||||
list_args -> container_args_base ',' kw : reverse('$1', '$3').
|
||||
|
||||
list -> open_bracket ']' : build_list('$1', []).
|
||||
list -> open_bracket list_args close_bracket : build_list('$1', '$2').
|
||||
|
||||
% Tuple
|
||||
|
||||
tuple -> open_curly '}' : build_tuple('$1', []).
|
||||
tuple -> open_curly container_args close_curly : build_tuple('$1', '$2').
|
||||
|
||||
% Bitstrings
|
||||
|
||||
bit_string -> open_bit '>>' : build_bit('$1', []).
|
||||
bit_string -> open_bit container_args close_bit : build_bit('$1', '$2').
|
||||
|
||||
% Map and structs
|
||||
|
||||
%% Allow unquote/@something/aliases inside maps and structs.
|
||||
map_expr -> max_expr : '$1'.
|
||||
map_expr -> dot_identifier : build_identifier('$1', nil).
|
||||
map_expr -> at_op_eol map_expr : build_unary_op('$1', '$2').
|
||||
|
||||
assoc_op_eol -> assoc_op : '$1'.
|
||||
assoc_op_eol -> assoc_op eol : '$1'.
|
||||
|
||||
assoc_expr -> matched_expr assoc_op_eol matched_expr : {'$1', '$3'}.
|
||||
assoc_expr -> unmatched_expr assoc_op_eol unmatched_expr : {'$1', '$3'}.
|
||||
assoc_expr -> matched_expr assoc_op_eol unmatched_expr : {'$1', '$3'}.
|
||||
assoc_expr -> unmatched_expr assoc_op_eol matched_expr : {'$1', '$3'}.
|
||||
assoc_expr -> map_expr : '$1'.
|
||||
|
||||
assoc_update -> matched_expr pipe_op_eol assoc_expr : {'$2', '$1', ['$3']}.
|
||||
assoc_update -> unmatched_expr pipe_op_eol assoc_expr : {'$2', '$1', ['$3']}.
|
||||
|
||||
assoc_update_kw -> matched_expr pipe_op_eol kw : {'$2', '$1', '$3'}.
|
||||
assoc_update_kw -> unmatched_expr pipe_op_eol kw : {'$2', '$1', '$3'}.
|
||||
|
||||
assoc_base -> assoc_expr : ['$1'].
|
||||
assoc_base -> assoc_base ',' assoc_expr : ['$3'|'$1'].
|
||||
|
||||
assoc -> assoc_base : reverse('$1').
|
||||
assoc -> assoc_base ',' : reverse('$1').
|
||||
|
||||
map_op -> '%{}' : '$1'.
|
||||
map_op -> '%{}' eol : '$1'.
|
||||
|
||||
map_close -> kw close_curly : '$1'.
|
||||
map_close -> assoc close_curly : '$1'.
|
||||
map_close -> assoc_base ',' kw close_curly : reverse('$1', '$3').
|
||||
|
||||
map_args -> open_curly '}' : build_map('$1', []).
|
||||
map_args -> open_curly map_close : build_map('$1', '$2').
|
||||
map_args -> open_curly assoc_update close_curly : build_map_update('$1', '$2', []).
|
||||
map_args -> open_curly assoc_update ',' close_curly : build_map_update('$1', '$2', []).
|
||||
map_args -> open_curly assoc_update ',' map_close : build_map_update('$1', '$2', '$4').
|
||||
map_args -> open_curly assoc_update_kw close_curly : build_map_update('$1', '$2', []).
|
||||
|
||||
struct_op -> '%' : '$1'.
|
||||
|
||||
map -> map_op map_args : '$2'.
|
||||
map -> struct_op map_expr map_args : {'%', meta_from_token('$1'), ['$2', '$3']}.
|
||||
map -> struct_op map_expr eol map_args : {'%', meta_from_token('$1'), ['$2', '$4']}.
|
||||
|
||||
Erlang code.
|
||||
|
||||
-define(file(), get(elixir_parser_file)).
|
||||
-define(id(Token), element(1, Token)).
|
||||
-define(location(Token), element(2, Token)).
|
||||
-define(exprs(Token), element(3, Token)).
|
||||
-define(meta(Node), element(2, Node)).
|
||||
-define(rearrange_uop(Op), (Op == 'not' orelse Op == '!')).
|
||||
|
||||
%% The following directive is needed for (significantly) faster
|
||||
%% compilation of the generated .erl file by the HiPE compiler
|
||||
-compile([{hipe, [{regalloc, linear_scan}]}]).
|
||||
-import(lists, [reverse/1, reverse/2]).
|
||||
|
||||
meta_from_token(Token, Counter) -> [{counter, Counter}|meta_from_token(Token)].
|
||||
meta_from_token(Token) -> meta_from_location(?location(Token)).
|
||||
|
||||
meta_from_location({Line, Column, EndColumn})
|
||||
when is_integer(Line), is_integer(Column), is_integer(EndColumn) -> [{line, Line}].
|
||||
|
||||
%% Operators
|
||||
|
||||
build_op({_Kind, Location, 'in'}, {UOp, _, [Left]}, Right) when ?rearrange_uop(UOp) ->
|
||||
{UOp, meta_from_location(Location), [{'in', meta_from_location(Location), [Left, Right]}]};
|
||||
|
||||
build_op({_Kind, Location, Op}, Left, Right) ->
|
||||
{Op, meta_from_location(Location), [Left, Right]}.
|
||||
|
||||
build_unary_op({_Kind, Location, Op}, Expr) ->
|
||||
{Op, meta_from_location(Location), [Expr]}.
|
||||
|
||||
build_list(Marker, Args) ->
|
||||
{Args, ?location(Marker)}.
|
||||
|
||||
build_tuple(_Marker, [Left, Right]) ->
|
||||
{Left, Right};
|
||||
build_tuple(Marker, Args) ->
|
||||
{'{}', meta_from_token(Marker), Args}.
|
||||
|
||||
build_bit(Marker, Args) ->
|
||||
{'<<>>', meta_from_token(Marker), Args}.
|
||||
|
||||
build_map(Marker, Args) ->
|
||||
{'%{}', meta_from_token(Marker), Args}.
|
||||
|
||||
build_map_update(Marker, {Pipe, Left, Right}, Extra) ->
|
||||
{'%{}', meta_from_token(Marker), [build_op(Pipe, Left, Right ++ Extra)]}.
|
||||
|
||||
%% Blocks
|
||||
|
||||
build_block([{Op, _, [_]}]=Exprs) when ?rearrange_uop(Op) -> {'__block__', [], Exprs};
|
||||
build_block([{unquote_splicing, _, Args}]=Exprs) when
|
||||
length(Args) =< 2 -> {'__block__', [], Exprs};
|
||||
build_block([Expr]) -> Expr;
|
||||
build_block(Exprs) -> {'__block__', [], Exprs}.
|
||||
|
||||
%% Dots
|
||||
|
||||
build_dot_alias(Dot, {'__aliases__', _, Left}, {'aliases', _, Right}) ->
|
||||
{'__aliases__', meta_from_token(Dot), Left ++ Right};
|
||||
|
||||
build_dot_alias(_Dot, Atom, {'aliases', _, _} = Token) when is_atom(Atom) ->
|
||||
throw_bad_atom(Token);
|
||||
|
||||
build_dot_alias(Dot, Other, {'aliases', _, Right}) ->
|
||||
{'__aliases__', meta_from_token(Dot), [Other|Right]}.
|
||||
|
||||
build_dot_container(Dot, Left, Right) ->
|
||||
Meta = meta_from_token(Dot),
|
||||
{{'.', Meta, [Left, '{}']}, Meta, Right}.
|
||||
|
||||
build_dot(Dot, Left, Right) ->
|
||||
{'.', meta_from_token(Dot), [Left, extract_identifier(Right)]}.
|
||||
|
||||
extract_identifier({Kind, _, Identifier}) when
|
||||
Kind == identifier; Kind == bracket_identifier; Kind == paren_identifier;
|
||||
Kind == do_identifier; Kind == op_identifier ->
|
||||
Identifier.
|
||||
|
||||
%% Identifiers
|
||||
|
||||
build_nested_parens(Dot, Args1, Args2) ->
|
||||
Identifier = build_identifier(Dot, Args1),
|
||||
Meta = ?meta(Identifier),
|
||||
{Identifier, Meta, Args2}.
|
||||
|
||||
build_identifier({'.', Meta, _} = Dot, Args) ->
|
||||
FArgs = case Args of
|
||||
nil -> [];
|
||||
_ -> Args
|
||||
end,
|
||||
{Dot, Meta, FArgs};
|
||||
|
||||
build_identifier({op_identifier, Location, Identifier}, [Arg]) ->
|
||||
{Identifier, [{ambiguous_op, nil}|meta_from_location(Location)], [Arg]};
|
||||
|
||||
build_identifier({_, Location, Identifier}, Args) ->
|
||||
{Identifier, meta_from_location(Location), Args}.
|
||||
|
||||
%% Fn
|
||||
|
||||
build_fn(Op, [{'->', _, [_, _]}|_] = Stab) ->
|
||||
{fn, meta_from_token(Op), build_stab(Stab)};
|
||||
build_fn(Op, _Stab) ->
|
||||
throw(meta_from_token(Op), "expected clauses to be defined with -> inside: ", "'fn'").
|
||||
|
||||
%% Access
|
||||
|
||||
build_access(Expr, {List, Location}) ->
|
||||
Meta = meta_from_location(Location),
|
||||
{{'.', Meta, ['Elixir.Access', get]}, Meta, [Expr, List]}.
|
||||
|
||||
%% Interpolation aware
|
||||
|
||||
build_sigil({sigil, Location, Sigil, Parts, Modifiers}) ->
|
||||
Meta = meta_from_location(Location),
|
||||
{list_to_atom("sigil_" ++ [Sigil]), Meta, [{'<<>>', Meta, string_parts(Parts)}, Modifiers]}.
|
||||
|
||||
build_bin_string({bin_string, _Location, [H]}) when is_binary(H) ->
|
||||
H;
|
||||
build_bin_string({bin_string, Location, Args}) ->
|
||||
{'<<>>', meta_from_location(Location), string_parts(Args)}.
|
||||
|
||||
build_list_string({list_string, _Location, [H]}) when is_binary(H) ->
|
||||
elixir_utils:characters_to_list(H);
|
||||
build_list_string({list_string, Location, Args}) ->
|
||||
Meta = meta_from_location(Location),
|
||||
{{'.', Meta, ['Elixir.String', to_char_list]}, Meta, [{'<<>>', Meta, string_parts(Args)}]}.
|
||||
|
||||
build_quoted_atom({_, _Location, [H]}, Safe) when is_binary(H) ->
|
||||
Op = binary_to_atom_op(Safe), erlang:Op(H, utf8);
|
||||
build_quoted_atom({_, Location, Args}, Safe) ->
|
||||
Meta = meta_from_location(Location),
|
||||
{{'.', Meta, [erlang, binary_to_atom_op(Safe)]}, Meta, [{'<<>>', Meta, string_parts(Args)}, utf8]}.
|
||||
|
||||
binary_to_atom_op(true) -> binary_to_existing_atom;
|
||||
binary_to_atom_op(false) -> binary_to_atom.
|
||||
|
||||
string_parts(Parts) ->
|
||||
[string_part(Part) || Part <- Parts].
|
||||
string_part(Binary) when is_binary(Binary) ->
|
||||
Binary;
|
||||
string_part({Location, Tokens}) ->
|
||||
Form = string_tokens_parse(Tokens),
|
||||
Meta = meta_from_location(Location),
|
||||
{'::', Meta, [{{'.', Meta, ['Elixir.Kernel', to_string]}, Meta, [Form]}, {binary, Meta, nil}]}.
|
||||
|
||||
string_tokens_parse(Tokens) ->
|
||||
case parse(Tokens) of
|
||||
{ok, Forms} -> Forms;
|
||||
{error, _} = Error -> throw(Error)
|
||||
end.
|
||||
|
||||
%% Keywords
|
||||
|
||||
build_stab([{'->', Meta, [Left, Right]}|T]) ->
|
||||
build_stab(Meta, T, Left, [Right], []);
|
||||
|
||||
build_stab(Else) ->
|
||||
build_block(Else).
|
||||
|
||||
build_stab(Old, [{'->', New, [Left, Right]}|T], Marker, Temp, Acc) ->
|
||||
H = {'->', Old, [Marker, build_block(reverse(Temp))]},
|
||||
build_stab(New, T, Left, [Right], [H|Acc]);
|
||||
|
||||
build_stab(Meta, [H|T], Marker, Temp, Acc) ->
|
||||
build_stab(Meta, T, Marker, [H|Temp], Acc);
|
||||
|
||||
build_stab(Meta, [], Marker, Temp, Acc) ->
|
||||
H = {'->', Meta, [Marker, build_block(reverse(Temp))]},
|
||||
reverse([H|Acc]).
|
||||
|
||||
%% Every time the parser sees a (unquote_splicing())
|
||||
%% it assumes that a block is being spliced, wrapping
|
||||
%% the splicing in a __block__. But in the stab clause,
|
||||
%% we can have (unquote_splicing(1, 2, 3)) -> :ok, in such
|
||||
%% case, we don't actually want the block, since it is
|
||||
%% an arg style call. unwrap_splice unwraps the splice
|
||||
%% from such blocks.
|
||||
unwrap_splice([{'__block__', [], [{unquote_splicing, _, _}] = Splice}]) ->
|
||||
Splice;
|
||||
|
||||
unwrap_splice(Other) -> Other.
|
||||
|
||||
unwrap_when(Args) ->
|
||||
case elixir_utils:split_last(Args) of
|
||||
{Start, {'when', Meta, [_, _] = End}} ->
|
||||
[{'when', Meta, Start ++ End}];
|
||||
{_, _} ->
|
||||
Args
|
||||
end.
|
||||
|
||||
to_block([One]) -> One;
|
||||
to_block(Other) -> {'__block__', [], reverse(Other)}.
|
||||
|
||||
%% Warnings and errors
|
||||
|
||||
throw(Meta, Error, Token) ->
|
||||
Line =
|
||||
case lists:keyfind(line, 1, Meta) of
|
||||
{line, L} -> L;
|
||||
false -> 0
|
||||
end,
|
||||
throw({error, {Line, ?MODULE, [Error, Token]}}).
|
||||
|
||||
throw_bad_atom(Token) ->
|
||||
throw(meta_from_token(Token), "atom cannot be followed by an alias. If the '.' was meant to be "
|
||||
"part of the atom's name, the atom name must be quoted. Syntax error before: ", "'.'").
|
||||
|
||||
throw_no_parens_strict(Token) ->
|
||||
throw(meta_from_token(Token), "unexpected parentheses. If you are making a "
|
||||
"function call, do not insert spaces between the function name and the "
|
||||
"opening parentheses. Syntax error before: ", "'('").
|
||||
|
||||
throw_no_parens_many_strict(Node) ->
|
||||
throw(?meta(Node),
|
||||
"unexpected comma. Parentheses are required to solve ambiguity in nested calls.\n\n"
|
||||
"This error happens when you have nested function calls without parentheses. "
|
||||
"For example:\n\n"
|
||||
" one a, two b, c, d\n\n"
|
||||
"In the example above, we don't know if the parameters \"c\" and \"d\" apply "
|
||||
"to the function \"one\" or \"two\". You can solve this by explicitly adding "
|
||||
"parentheses:\n\n"
|
||||
" one a, two(b, c, d)\n\n"
|
||||
"Elixir cannot compile otherwise. Syntax error before: ", "','").
|
||||
|
||||
throw_no_parens_container_strict(Node) ->
|
||||
throw(?meta(Node),
|
||||
"unexpected comma. Parentheses are required to solve ambiguity inside containers.\n\n"
|
||||
"This error may happen when you forget a comma in a list or other container:\n\n"
|
||||
" [a, b c, d]\n\n"
|
||||
"Or when you have ambiguous calls:\n\n"
|
||||
" [one, two three, four, five]\n\n"
|
||||
"In the example above, we don't know if the parameters \"four\" and \"five\" "
|
||||
"belongs to the list or the function \"two\". You can solve this by explicitly "
|
||||
"adding parentheses:\n\n"
|
||||
" [one, two(three, four), five]\n\n"
|
||||
"Elixir cannot compile otherwise. Syntax error before: ", "','").
|
||||
|
||||
throw_invalid_kw_identifier({_, _, do} = Token) ->
|
||||
throw(meta_from_token(Token), elixir_tokenizer:invalid_do_error("unexpected keyword \"do:\""), "'do:'");
|
||||
throw_invalid_kw_identifier({_, _, KW} = Token) ->
|
||||
throw(meta_from_token(Token), "syntax error before: ", "'" ++ atom_to_list(KW) ++ "':").
|
||||
|
||||
%% TODO: Make those warnings errors.
|
||||
warn_empty_stab_clause({stab_op, {Line, _Begin, _End}, '->'}) ->
|
||||
elixir_errors:warn(Line, ?file(),
|
||||
"an expression is always required on the right side of ->. "
|
||||
"Please provide a value after ->").
|
||||
|
||||
warn_pipe({arrow_op, {Line, _Begin, _End}, Op}, {_, [_|_], [_|_]}) ->
|
||||
elixir_errors:warn(Line, ?file(),
|
||||
io_lib:format(
|
||||
"you are piping into a function call without parentheses, which may be ambiguous. "
|
||||
"Please wrap the function you are piping into in parentheses. For example:\n\n"
|
||||
" foo 1 ~ts bar 2 ~ts baz 3\n\n"
|
||||
"Should be written as:\n\n"
|
||||
" foo(1) ~ts bar(2) ~ts baz(3)\n",
|
||||
[Op, Op, Op, Op]
|
||||
)
|
||||
);
|
||||
warn_pipe(_Token, _) ->
|
||||
ok.
|
||||
256
samples/Erlang/lfe_scan.xrl
Normal file
256
samples/Erlang/lfe_scan.xrl
Normal file
@@ -0,0 +1,256 @@
|
||||
%% Copyright (c) 2008-2013 Robert Virding
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
|
||||
%% File : lfe_scan.xrl
|
||||
%% Author : Robert Virding
|
||||
%% Purpose : Token definitions for Lisp Flavoured Erlang.
|
||||
|
||||
Definitions.
|
||||
B = [01]
|
||||
O = [0-7]
|
||||
D = [0-9]
|
||||
H = [0-9a-fA-F]
|
||||
B36 = [0-9a-zA-Z]
|
||||
U = [A-Z]
|
||||
L = [a-z]
|
||||
A = ({U}|{L})
|
||||
DEL = [][()}{";\000-\s]
|
||||
SYM = [^][()}{";\000-\s\177-\237]
|
||||
SSYM = [^][()}{"|;#`',\000-\s\177-\237]
|
||||
WS = ([\000-\s]|;[^\n]*)
|
||||
|
||||
Rules.
|
||||
%% Bracketed Comments using #| foo |#
|
||||
#{D}*\|[^\|]*\|+([^#\|][^\|]*\|+)*# :
|
||||
block_comment(string:substr(TokenChars, 3)).
|
||||
|
||||
%% Separators
|
||||
' : {token,{'\'',TokenLine}}.
|
||||
` : {token,{'`',TokenLine}}.
|
||||
, : {token,{',',TokenLine}}.
|
||||
,@ : {token,{',@',TokenLine}}.
|
||||
\. : {token,{'.',TokenLine}}.
|
||||
[][()}{] : {token,{list_to_atom(TokenChars),TokenLine}}.
|
||||
|
||||
#{D}*[bB]\( : {token,{'#B(',TokenLine}}.
|
||||
#{D}*[mM]\( : {token,{'#M(',TokenLine}}.
|
||||
#{D}*\( : {token,{'#(',TokenLine}}.
|
||||
#{D}*\. : {token,{'#.',TokenLine}}.
|
||||
|
||||
#{D}*` : {token,{'#`',TokenLine}}.
|
||||
#{D}*; : {token,{'#;',TokenLine}}.
|
||||
#{D}*, : {token,{'#,',TokenLine}}.
|
||||
#{D}*,@ : {token,{'#,@',TokenLine}}.
|
||||
|
||||
%% Characters
|
||||
#{D}*\\(x{H}+|.) : char_token(skip_past(TokenChars, $\\, $\\), TokenLine).
|
||||
|
||||
%% Based numbers
|
||||
#{D}*\*{SYM}+ : base_token(skip_past(TokenChars, $*, $*), 2, TokenLine).
|
||||
#{D}*[bB]{SYM}+ : base_token(skip_past(TokenChars, $b, $B), 2, TokenLine).
|
||||
#{D}*[oO]{SYM}+ : base_token(skip_past(TokenChars, $o, $O), 8, TokenLine).
|
||||
#{D}*[dD]{SYM}+ : base_token(skip_past(TokenChars, $d, $D), 10, TokenLine).
|
||||
#{D}*[xX]{SYM}+ : base_token(skip_past(TokenChars, $x, $X), 16, TokenLine).
|
||||
#{D}*[rR]{SYM}+ :
|
||||
%% Scan over digit chars to get base.
|
||||
{Base,[_|Ds]} = base1(tl(TokenChars), 10, 0),
|
||||
base_token(Ds, Base, TokenLine).
|
||||
|
||||
%% String
|
||||
"(\\x{H}+;|\\.|[^"\\])*" :
|
||||
%% Strip quotes.
|
||||
S = string:substr(TokenChars, 2, TokenLen - 2),
|
||||
{token,{string,TokenLine,chars(S)}}.
|
||||
%% Binary string
|
||||
#"(\\x{H}+;|\\.|[^"\\])*" :
|
||||
%% Strip quotes.
|
||||
S = string:substr(TokenChars, 3, TokenLen - 3),
|
||||
Bin = unicode:characters_to_binary(chars(S), utf8, utf8),
|
||||
{token,{binary,TokenLine,Bin}}.
|
||||
%% Symbols
|
||||
\|(\\x{H}+;|\\.|[^|\\])*\| :
|
||||
%% Strip quotes.
|
||||
S = string:substr(TokenChars, 2, TokenLen - 2),
|
||||
symbol_token(chars(S), TokenLine).
|
||||
%% Funs
|
||||
#'{SSYM}{SYM}*/{D}+ :
|
||||
%% Strip sharpsign single-quote.
|
||||
FunStr = string:substr(TokenChars,3),
|
||||
{token,{'#\'',TokenLine,FunStr}}.
|
||||
%% Atoms
|
||||
[+-]?{D}+ :
|
||||
case catch {ok,list_to_integer(TokenChars)} of
|
||||
{ok,I} -> {token,{number,TokenLine,I}};
|
||||
_ -> {error,"illegal integer"}
|
||||
end.
|
||||
[+-]?{D}+\.{D}+([eE][+-]?{D}+)? :
|
||||
case catch {ok,list_to_float(TokenChars)} of
|
||||
{ok,F} -> {token,{number,TokenLine,F}};
|
||||
_ -> {error,"illegal float"}
|
||||
end.
|
||||
{SSYM}{SYM}* :
|
||||
symbol_token(TokenChars, TokenLine).
|
||||
{WS}+ : skip_token.
|
||||
|
||||
Erlang code.
|
||||
%% Copyright (c) 2008-2013 Robert Virding
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
|
||||
%% File : lfe_scan.erl
|
||||
%% Author : Robert Virding
|
||||
%% Purpose : Token definitions for Lisp Flavoured Erlang.
|
||||
|
||||
-export([start_symbol_char/1,symbol_char/1]).
|
||||
|
||||
-import(string, [substr/2,substr/3]).
|
||||
|
||||
%% start_symbol_char(Char) -> true | false.
|
||||
%% symbol_char(Char) -> true | false.
|
||||
%% Define start symbol chars and symbol chars.
|
||||
|
||||
start_symbol_char($#) -> false;
|
||||
start_symbol_char($`) -> false;
|
||||
start_symbol_char($') -> false; %'
|
||||
start_symbol_char($,) -> false;
|
||||
start_symbol_char($|) -> false; %Symbol quote character
|
||||
start_symbol_char(C) -> symbol_char(C).
|
||||
|
||||
symbol_char($() -> false;
|
||||
symbol_char($)) -> false;
|
||||
symbol_char($[) -> false;
|
||||
symbol_char($]) -> false;
|
||||
symbol_char(${) -> false;
|
||||
symbol_char($}) -> false;
|
||||
symbol_char($") -> false;
|
||||
symbol_char($;) -> false;
|
||||
symbol_char(C) -> ((C > $\s) and (C =< $~)) orelse (C > $\240).
|
||||
|
||||
%% symbol_token(Chars, Line) -> {token,{symbol,Line,Symbol}} | {error,E}.
|
||||
%% Build a symbol from list of legal characters, else error.
|
||||
|
||||
symbol_token(Cs, L) ->
|
||||
case catch {ok,list_to_atom(Cs)} of
|
||||
{ok,S} -> {token,{symbol,L,S}};
|
||||
_ -> {error,"illegal symbol"}
|
||||
end.
|
||||
|
||||
%% base_token(Chars, Base, Line) -> Integer.
|
||||
%% Convert a string of Base characters into a number. We only allow
|
||||
%% base betqeen 2 and 36, and an optional sign character first.
|
||||
|
||||
base_token(_, B, _) when B < 2; B > 36 ->
|
||||
{error,"illegal number base"};
|
||||
base_token([$+|Cs], B, L) -> base_token(Cs, B, +1, L);
|
||||
base_token([$-|Cs], B, L) -> base_token(Cs, B, -1, L);
|
||||
base_token(Cs, B, L) -> base_token(Cs, B, +1, L).
|
||||
|
||||
base_token(Cs, B, S, L) ->
|
||||
case base1(Cs, B, 0) of
|
||||
{N,[]} -> {token,{number,L,S*N}};
|
||||
{_,_} -> {error,"illegal based number"}
|
||||
end.
|
||||
|
||||
base1([C|Cs], Base, SoFar) when C >= $0, C =< $9, C < Base + $0 ->
|
||||
Next = SoFar * Base + (C - $0),
|
||||
base1(Cs, Base, Next);
|
||||
base1([C|Cs], Base, SoFar) when C >= $a, C =< $z, C < Base + $a - 10 ->
|
||||
Next = SoFar * Base + (C - $a + 10),
|
||||
base1(Cs, Base, Next);
|
||||
base1([C|Cs], Base, SoFar) when C >= $A, C =< $Z, C < Base + $A - 10 ->
|
||||
Next = SoFar * Base + (C - $A + 10),
|
||||
base1(Cs, Base, Next);
|
||||
base1([C|Cs], _Base, SoFar) -> {SoFar,[C|Cs]};
|
||||
base1([], _Base, N) -> {N,[]}.
|
||||
|
||||
-define(IS_UNICODE(C), ((C >= 0) and (C =< 16#10FFFF))).
|
||||
|
||||
%% char_token(InputChars, Line) -> {token,{number,L,N}} | {error,E}.
|
||||
%% Convert an input string into the corresponding character. For a
|
||||
%% sequence of hex characters we check resultant is code is in the
|
||||
%% unicode range.
|
||||
|
||||
char_token([$x,C|Cs], L) ->
|
||||
case base1([C|Cs], 16, 0) of
|
||||
{N,[]} when ?IS_UNICODE(N) -> {token,{number,L,N}};
|
||||
_ -> {error,"illegal character"}
|
||||
end;
|
||||
char_token([C], L) -> {token,{number,L,C}}.
|
||||
|
||||
%% chars(InputChars) -> Chars.
|
||||
%% Convert an input string into the corresponding string characters.
|
||||
%% We know that the input string is correct.
|
||||
|
||||
chars([$\\,$x,C|Cs0]) ->
|
||||
case hex_char(C) of
|
||||
true ->
|
||||
case base1([C|Cs0], 16, 0) of
|
||||
{N,[$;|Cs1]} -> [N|chars(Cs1)];
|
||||
_Other -> [escape_char($x)|chars([C|Cs0])]
|
||||
end;
|
||||
false -> [escape_char($x)|chars([C|Cs0])]
|
||||
end;
|
||||
chars([$\\,C|Cs]) -> [escape_char(C)|chars(Cs)];
|
||||
chars([C|Cs]) -> [C|chars(Cs)];
|
||||
chars([]) -> [].
|
||||
|
||||
hex_char(C) when C >= $0, C =< $9 -> true;
|
||||
hex_char(C) when C >= $a, C =< $f -> true;
|
||||
hex_char(C) when C >= $A, C =< $F -> true;
|
||||
hex_char(_) -> false.
|
||||
|
||||
escape_char($b) -> $\b; %\b = BS
|
||||
escape_char($t) -> $\t; %\t = TAB
|
||||
escape_char($n) -> $\n; %\n = LF
|
||||
escape_char($v) -> $\v; %\v = VT
|
||||
escape_char($f) -> $\f; %\f = FF
|
||||
escape_char($r) -> $\r; %\r = CR
|
||||
escape_char($e) -> $\e; %\e = ESC
|
||||
escape_char($s) -> $\s; %\s = SPC
|
||||
escape_char($d) -> $\d; %\d = DEL
|
||||
escape_char(C) -> C.
|
||||
|
||||
%% Block Comment:
|
||||
%% Provide a sensible error when people attempt to include nested
|
||||
%% comments because currently the parser cannot process them without
|
||||
%% a rebuild. But simply exploding on a '#|' is not going to be that
|
||||
%% helpful.
|
||||
|
||||
block_comment(TokenChars) ->
|
||||
%% Check we're not opening another comment block.
|
||||
case string:str(TokenChars, "#|") of
|
||||
0 -> skip_token; %% No nesting found
|
||||
_ -> {error, "illegal nested block comment"}
|
||||
end.
|
||||
|
||||
%% skip_until(String, Char1, Char2) -> String.
|
||||
%% skip_past(String, Char1, Char2) -> String.
|
||||
|
||||
%% skip_until([C|_]=Cs, C1, C2) when C =:= C1 ; C =:= C2 -> Cs;
|
||||
%% skip_until([_|Cs], C1, C2) -> skip_until(Cs, C1, C2);
|
||||
%% skip_until([], _, _) -> [].
|
||||
|
||||
skip_past([C|Cs], C1, C2) when C =:= C1 ; C =:= C2 -> Cs;
|
||||
skip_past([_|Cs], C1, C2) -> skip_past(Cs, C1, C2);
|
||||
skip_past([], _, _) -> [].
|
||||
54
samples/FLUX/gameserver.fx
Normal file
54
samples/FLUX/gameserver.fx
Normal file
@@ -0,0 +1,54 @@
|
||||
typedef engine isEngineMessage;
|
||||
typedef turn isTurnMessage;
|
||||
typedef connect isConnectMessage;
|
||||
typedef disconnect isDisconnectMessage;
|
||||
|
||||
ClientMessage(char* data) => ();
|
||||
ParseMessage(char* data) => (int type, int client, char* data);
|
||||
ReadMessage(int type, int client, char* data) => ();
|
||||
|
||||
ParseEngine(int type, int client, char* data) => (int client, int direction);
|
||||
DoEngine(int client, int direction) => ();
|
||||
|
||||
ParseTurn(int type, int client, char* data) => (int client, int direction);
|
||||
DoTurn(int client, int direction) => ();
|
||||
|
||||
ParseConnect(int type, int client, char* data)
|
||||
=> (int client, char* host, int port);
|
||||
DoConnect(int client, char* host, int port) => ();
|
||||
|
||||
ParseDisconnect(int type, int client, char* data) => (int client);
|
||||
DoDisconnect(int client) => ();
|
||||
|
||||
UpdateBoard(ClientList clients) => (ClientList clients);
|
||||
SendData(ClientList clients) => ();
|
||||
|
||||
DoUpdate(ClientList clients) => ();
|
||||
|
||||
DataTimer() => (ClientList clients);
|
||||
|
||||
GetClients() => (ClientList clients);
|
||||
|
||||
Wait(ClientList clients) => (ClientList clients);
|
||||
|
||||
Listen () => (char* data);
|
||||
|
||||
source Listen => ClientMessage;
|
||||
source DataTimer => DoUpdate;
|
||||
|
||||
DataTimer = GetClients -> Wait;
|
||||
|
||||
DoUpdate = UpdateBoard -> SendData;
|
||||
|
||||
ClientMessage=ParseMessage -> ReadMessage;
|
||||
|
||||
ReadMessage:[engine, _, _] = ParseEngine -> DoEngine;
|
||||
ReadMessage:[turn, _, _] = ParseTurn -> DoTurn;
|
||||
ReadMessage:[connect, _, _] = ParseConnect -> DoConnect;
|
||||
ReadMessage:[disconnect, _, _] = ParseDisconnect -> DoDisconnect;
|
||||
|
||||
atomic GetClients:{client_lock};
|
||||
atomic DoConnect:{client_lock};
|
||||
atomic DoDisconnect:{client_lock};
|
||||
|
||||
|
||||
44
samples/FLUX/imageserver.fx
Normal file
44
samples/FLUX/imageserver.fx
Normal file
@@ -0,0 +1,44 @@
|
||||
typedef xml TestXML;
|
||||
typedef html TestHTML;
|
||||
|
||||
typedef inCache TestInCache;
|
||||
|
||||
Page (int socket) => ();
|
||||
|
||||
ReadRequest (int socket) => (int socket, bool close, image_tag *request);
|
||||
|
||||
CheckCache (int socket, bool close, image_tag *request)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
|
||||
Handler (int socket, bool close, image_tag *request)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
|
||||
Complete (int socket, bool close, image_tag *request) => ();
|
||||
|
||||
ReadInFromDisk (int socket, bool close, image_tag *request)
|
||||
=> (int socket, bool close, image_tag *request, __u8 *rgb_data);
|
||||
|
||||
Write (int socket, bool close, image_tag *request)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
|
||||
Compress(int socket, bool close, image_tag *request, __u8 *rgb_data)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
|
||||
StoreInCache(int socket, bool close, image_tag *request)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
|
||||
Listen ()
|
||||
=> (int socket);
|
||||
|
||||
source Listen => Page;
|
||||
|
||||
Handler:[_, _, inCache]=;
|
||||
Handler:[_, _, _]=ReadInFromDisk -> Compress -> StoreInCache;
|
||||
|
||||
Page = ReadRequest -> CheckCache-> Handler -> Write -> Complete;
|
||||
|
||||
atomic CheckCache:{cache};
|
||||
atomic StoreInCache:{cache};
|
||||
atomic Complete:{cache};
|
||||
|
||||
handle error ReadInFromDisk => FourOhFor;
|
||||
151
samples/FLUX/mbittorrent.fx
Normal file
151
samples/FLUX/mbittorrent.fx
Normal file
@@ -0,0 +1,151 @@
|
||||
typedef choke TestChoke;
|
||||
typedef unchoke TestUnchoke;
|
||||
typedef interested TestInterested;
|
||||
typedef uninterested TestUninterested;
|
||||
typedef request TestRequest;
|
||||
typedef cancel TestCancel;
|
||||
typedef piece TestPiece;
|
||||
typedef bitfield TestBitfield;
|
||||
typedef have TestHave;
|
||||
typedef piececomplete TestPieceComplete;
|
||||
|
||||
CheckinWithTracker (torrent_data_t *tdata)
|
||||
=> ();
|
||||
|
||||
SendRequestToTracker (torrent_data_t *tdata)
|
||||
=> (torrent_data_t *tdata, int socket);
|
||||
|
||||
GetTrackerResponse (torrent_data_t *tdata, int socket)
|
||||
=> ();
|
||||
|
||||
UpdateChokeList (torrent_data_t *tdata)
|
||||
=> ();
|
||||
|
||||
PickChoked (torrent_data_t *tdata)
|
||||
=> (torrent_data_t *tdata, chokelist_t clist);
|
||||
|
||||
SendChokeUnchoke (torrent_data_t *tdata, chokelist_t clist)
|
||||
=> ();
|
||||
|
||||
SetupConnection (torrent_data_t *tdata, int socket)
|
||||
=> ();
|
||||
|
||||
Handshake (torrent_data_t *tdata, int socket)
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
SendBitfield (torrent_data_t *tdata, client_data_t *client)
|
||||
=> ();
|
||||
|
||||
Message (torrent_data_t *tdata, client_data_t *client)
|
||||
=> ();
|
||||
|
||||
ReadMessage (torrent_data_t *tdata, client_data_t *client)
|
||||
=> (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload);
|
||||
|
||||
HandleMessage (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (client_data_t *client);
|
||||
|
||||
MessageDone (client_data_t *client)
|
||||
=> ();
|
||||
|
||||
CompletePiece (torrent_data_t *tdata, client_data_t *client, int piece)
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
VerifyPiece (torrent_data_t *tdata, client_data_t *client, int piece)
|
||||
=> (torrent_data_t *tdata, client_data_t *client, int piece);
|
||||
|
||||
SendHave (torrent_data_t *tdata, client_data_t *client, int piece)
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
SendUninterested (torrent_data_t *tdata, client_data_t *client)
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
Choke (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (client_data_t *client);
|
||||
|
||||
Cancel (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (client_data_t *client);
|
||||
|
||||
Interested (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (client_data_t *client);
|
||||
|
||||
Uninterested (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (client_data_t *client);
|
||||
|
||||
Bitfield (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (client_data_t *client);
|
||||
|
||||
Unchoke (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
SendRequest (torrent_data_t *tdata, client_data_t *client)
|
||||
=> (client_data_t *client);
|
||||
|
||||
Have (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
Piece (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (torrent_data_t *tdata, client_data_t *client, int piece);
|
||||
|
||||
Request (torrent_data_t *tdata, client_data_t *client, int type, int length, char *payload)
|
||||
=> (client_data_t *client);
|
||||
|
||||
SendKeepAlives (torrent_data_t *tdata)
|
||||
=> ();
|
||||
|
||||
GetClients ()
|
||||
=> (int maxfd, fd_set *fds);
|
||||
|
||||
SelectSockets (int maxfd, fd_set *fds)
|
||||
=> (fd_set *fds);
|
||||
|
||||
CheckSockets (fd_set *fds)
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
TrackerTimer ()
|
||||
=> (torrent_data_t *tdata);
|
||||
|
||||
ChokeTimer ()
|
||||
=> (torrent_data_t *tdata);
|
||||
|
||||
Connect ()
|
||||
=> (torrent_data_t *tdata, int socket);
|
||||
|
||||
KeepAliveTimer ()
|
||||
=> (torrent_data_t *tdata);
|
||||
|
||||
Listen ()
|
||||
=> (torrent_data_t *tdata, client_data_t *client);
|
||||
|
||||
source TrackerTimer => CheckinWithTracker;
|
||||
source ChokeTimer => UpdateChokeList;
|
||||
source Connect => SetupConnection;
|
||||
source Listen => Message;
|
||||
source KeepAliveTimer => SendKeepAlives;
|
||||
|
||||
Listen = GetClients -> SelectSockets -> CheckSockets;
|
||||
CheckinWithTracker = SendRequestToTracker -> GetTrackerResponse;
|
||||
UpdateChokeList = PickChoked -> SendChokeUnchoke;
|
||||
SetupConnection = Handshake -> SendBitfield;
|
||||
Message = ReadMessage -> HandleMessage -> MessageDone;
|
||||
|
||||
CompletePiece:[_, _, piececomplete] = VerifyPiece -> SendHave -> SendUninterested;
|
||||
|
||||
HandleMessage:[_, _, choke, _, _] = Choke;
|
||||
HandleMessage:[_, _, unchoke, _, _] = Unchoke -> SendRequest;
|
||||
HandleMessage:[_, _, interested, _, _] = Interested;
|
||||
|
||||
HandleMessage:[_, _, uninterested, _, _] = Uninterested;
|
||||
HandleMessage:[_, _, request, _, _] = Request;
|
||||
HandleMessage:[_, _, cancel, _, _] = Cancel;
|
||||
HandleMessage:[_, _, piece, _, _] = Piece -> CompletePiece -> SendRequest;
|
||||
HandleMessage:[_, _, bitfield, _, _] = Bitfield;
|
||||
HandleMessage:[_, _, have, _, _] = Have -> SendRequest;
|
||||
|
||||
atomic GetClients:{BigLock};
|
||||
atomic CheckSockets:{BigLock};
|
||||
atomic Message:{BigLock};
|
||||
atomic CheckinWithTracker:{BigLock};
|
||||
atomic UpdateChokeList:{BigLock};
|
||||
atomic SetupConnection:{BigLock};
|
||||
atomic SendKeepAlives:{BigLock};
|
||||
38
samples/FLUX/test.fx
Normal file
38
samples/FLUX/test.fx
Normal file
@@ -0,0 +1,38 @@
|
||||
// concrete node signatures
|
||||
Listen ()
|
||||
=> (int socket);
|
||||
|
||||
ReadRequest (int socket)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
|
||||
CheckCache (int socket, bool close, image_tag *request)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
|
||||
// omitted for space:
|
||||
// ReadInFromDisk, StoreInCache
|
||||
Compress (int socket, bool close, image_tag *request, __u8 *rgb_data)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
Write (int socket, bool close, image_tag *request)
|
||||
=> (int socket, bool close, image_tag *request);
|
||||
Complete (int socket, bool close, image_tag *request) => ();
|
||||
|
||||
// source node
|
||||
source Listen => Image;
|
||||
|
||||
// abstract node
|
||||
Image = ReadRequest -> CheckCache -> Handler -> Write -> Complete;
|
||||
|
||||
// predicate type & dispatch
|
||||
typedef hit TestInCache;
|
||||
Handler:[_, _, hit] = ;
|
||||
Handler:[_, _, _] =
|
||||
ReadInFromDisk -> Compress -> StoreInCache;
|
||||
|
||||
// error handler
|
||||
handle error ReadInFromDisk => FourOhFor;
|
||||
|
||||
// atomicity constraints
|
||||
atomic CheckCache:{cache};
|
||||
atomic StoreInCache:{cache};
|
||||
atomic Complete:{cache};
|
||||
|
||||
3608
samples/Formatted/NiAlH_jea.eam.fs
Normal file
3608
samples/Formatted/NiAlH_jea.eam.fs
Normal file
File diff suppressed because it is too large
Load Diff
31
samples/FreeMarker/example.ftl
Normal file
31
samples/FreeMarker/example.ftl
Normal file
@@ -0,0 +1,31 @@
|
||||
<#import "layout.ftl" as layout>
|
||||
|
||||
<#assign results = [
|
||||
{
|
||||
"title": "Example Result",
|
||||
"description": "Lorem ipsum dolor sit amet, pede id pellentesque, sollicitudin turpis sed in sed sed, libero dictum."
|
||||
}
|
||||
] />
|
||||
|
||||
<@layout.page title="FreeMarker Example">
|
||||
<#if results?size == 0>
|
||||
There were no results.
|
||||
<#else>
|
||||
<ul>
|
||||
<#list results as result>
|
||||
<li>
|
||||
<strong>${result.title}</strong>
|
||||
<p>${result.description}</p>
|
||||
</li>
|
||||
</#list>
|
||||
</ul>
|
||||
</#if>
|
||||
|
||||
<#-- This is a FreeMarker comment -->
|
||||
<@currentTime />
|
||||
</@layout.page>
|
||||
|
||||
|
||||
<#macro currentTime>
|
||||
${.now?string.full}
|
||||
</#macro>
|
||||
32
samples/FreeMarker/layout.ftl
Normal file
32
samples/FreeMarker/layout.ftl
Normal file
@@ -0,0 +1,32 @@
|
||||
<#ftl strip_text=true />
|
||||
|
||||
<#macro page title>
|
||||
<!doctype html>
|
||||
<html lang="${.lang}">
|
||||
<head>
|
||||
<title>${title}</title>
|
||||
<@metaTags />
|
||||
</head>
|
||||
<body>
|
||||
<#nested />
|
||||
<@footer />
|
||||
</body>
|
||||
</html>
|
||||
</#macro>
|
||||
|
||||
|
||||
<#---
|
||||
Default meta tags
|
||||
-->
|
||||
<#macro metaTags>
|
||||
<#compress>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width,initial-scale=1">
|
||||
<meta name="format-detection" content="telephone=no">
|
||||
</#compress>
|
||||
</#macro>
|
||||
|
||||
<#macro footer>
|
||||
<p>This page is using FreeMarker v${.version}</p>
|
||||
</#macro>
|
||||
46
samples/Groovy/filenames/Jenkinsfile
vendored
Normal file
46
samples/Groovy/filenames/Jenkinsfile
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
jettyUrl = 'http://localhost:8081/'
|
||||
|
||||
def servers
|
||||
|
||||
stage 'Dev'
|
||||
node {
|
||||
checkout scm
|
||||
servers = load 'servers.groovy'
|
||||
mvn '-o clean package'
|
||||
dir('target') {stash name: 'war', includes: 'x.war'}
|
||||
}
|
||||
|
||||
stage 'QA'
|
||||
parallel(longerTests: {
|
||||
runTests(servers, 30)
|
||||
}, quickerTests: {
|
||||
runTests(servers, 20)
|
||||
})
|
||||
|
||||
stage name: 'Staging', concurrency: 1
|
||||
node {
|
||||
servers.deploy 'staging'
|
||||
}
|
||||
|
||||
input message: "Does ${jettyUrl}staging/ look good?"
|
||||
|
||||
stage name: 'Production', concurrency: 1
|
||||
node {
|
||||
sh "wget -O - -S ${jettyUrl}staging/"
|
||||
echo 'Production server looks to be alive'
|
||||
servers.deploy 'production'
|
||||
echo "Deployed to ${jettyUrl}production/"
|
||||
}
|
||||
|
||||
def mvn(args) {
|
||||
sh "${tool 'Maven 3.x'}/bin/mvn ${args}"
|
||||
}
|
||||
|
||||
def runTests(servers, duration) {
|
||||
node {
|
||||
checkout scm
|
||||
servers.runWithServer {id ->
|
||||
mvn "-o -f sometests test -Durl=${jettyUrl}${id}/ -Dduration=${duration}"
|
||||
}
|
||||
}
|
||||
}
|
||||
6
samples/HCL/example.hcl
Normal file
6
samples/HCL/example.hcl
Normal file
@@ -0,0 +1,6 @@
|
||||
consul = "1.2.3.4"
|
||||
|
||||
// This is a comment
|
||||
template "foo" {
|
||||
bar = "zip"
|
||||
}
|
||||
13
samples/HCL/example.tf
Normal file
13
samples/HCL/example.tf
Normal file
@@ -0,0 +1,13 @@
|
||||
resource "aws_instance" "web" {
|
||||
// Copies the myapp.conf file to /etc/myapp.conf
|
||||
provisioner "file" {
|
||||
source = "conf/myapp.conf"
|
||||
destination = "/etc/myapp.conf"
|
||||
}
|
||||
|
||||
// Copies the configs.d folder to /etc/configs.d
|
||||
provisioner "file" {
|
||||
source = "conf/configs.d"
|
||||
destination = "/etc"
|
||||
}
|
||||
}
|
||||
27
samples/HLSL/accelerated_surface_win.hlsl
Normal file
27
samples/HLSL/accelerated_surface_win.hlsl
Normal file
@@ -0,0 +1,27 @@
|
||||
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
// To compile these two shaders:
|
||||
// fxc /E pixelMain /T ps_2_0 accelerated_surface_win.hlsl
|
||||
// fxc /E vertexMain /T vs_2_0 accelerated_surface_win.hlsl
|
||||
//
|
||||
// fxc is in the DirectX SDK.
|
||||
|
||||
struct Vertex {
|
||||
float4 position : POSITION;
|
||||
float2 texCoord : TEXCOORD0;
|
||||
};
|
||||
|
||||
texture t;
|
||||
sampler s;
|
||||
|
||||
// Passes a position and texture coordinate to the pixel shader.
|
||||
Vertex vertexMain(Vertex input) {
|
||||
return input;
|
||||
};
|
||||
|
||||
// Samples a texture at the given texture coordinate and returns the result.
|
||||
float4 pixelMain(float2 texCoord : TEXCOORD0) : COLOR0 {
|
||||
return tex2D(s, texCoord);
|
||||
};
|
||||
105
samples/HLSL/corridor.fx
Normal file
105
samples/HLSL/corridor.fx
Normal file
@@ -0,0 +1,105 @@
|
||||
float4x4 matWorldView : WORLDVIEW;
|
||||
float4x4 matWorldViewProjection : WORLDVIEWPROJECTION;
|
||||
|
||||
struct VS_INPUT {
|
||||
float4 Position : POSITION0;
|
||||
float3 Normal : NORMAL;
|
||||
float3 Tangent : TANGENT;
|
||||
float3 Binormal : BINORMAL;
|
||||
float2 TexCoord0 : TEXCOORD0;
|
||||
float2 TexCoord1 : TEXCOORD1;
|
||||
};
|
||||
|
||||
struct VS_OUTPUT {
|
||||
float4 Position : POSITION0;
|
||||
float2 TexCoord0 : TEXCOORD0;
|
||||
float2 TexCoord1 : TEXCOORD1;
|
||||
float3x3 TangentToView : TEXCOORD2;
|
||||
};
|
||||
|
||||
VS_OUTPUT vs_main(VS_INPUT input)
|
||||
{
|
||||
VS_OUTPUT output;
|
||||
output.Position = mul(input.Position, matWorldViewProjection);
|
||||
output.TexCoord0 = input.TexCoord0 * 5;
|
||||
output.TexCoord1 = input.TexCoord1;
|
||||
output.TangentToView[0] = mul(float4(input.Tangent, 0), matWorldView).xyz;
|
||||
output.TangentToView[1] = mul(float4(input.Binormal, 0), matWorldView).xyz;
|
||||
output.TangentToView[2] = mul(float4(input.Normal, 0), matWorldView).xyz;
|
||||
return output;
|
||||
}
|
||||
|
||||
struct PS_OUTPUT {
|
||||
float4 gbuffer0 : COLOR0;
|
||||
float4 gbuffer1 : COLOR1;
|
||||
};
|
||||
|
||||
texture albedo_tex;
|
||||
sampler albedo_samp = sampler_state {
|
||||
Texture = (albedo_tex);
|
||||
MipFilter = Linear;
|
||||
MinFilter = Linear;
|
||||
MagFilter = Linear;
|
||||
AddressU = Wrap;
|
||||
AddressV = Wrap;
|
||||
sRGBTexture = True;
|
||||
};
|
||||
|
||||
texture normal_tex;
|
||||
sampler normal_samp = sampler_state {
|
||||
Texture = (normal_tex);
|
||||
MipFilter = Linear;
|
||||
MinFilter = Linear;
|
||||
MagFilter = Linear;
|
||||
AddressU = Wrap;
|
||||
AddressV = Wrap;
|
||||
sRGBTexture = False;
|
||||
};
|
||||
|
||||
texture specular_tex;
|
||||
sampler specular_samp = sampler_state {
|
||||
Texture = (specular_tex);
|
||||
MipFilter = Linear;
|
||||
MinFilter = Linear;
|
||||
MagFilter = Linear;
|
||||
AddressU = Wrap;
|
||||
AddressV = Wrap;
|
||||
sRGBTexture = True;
|
||||
};
|
||||
|
||||
texture ao_tex;
|
||||
sampler ao_samp = sampler_state {
|
||||
Texture = (ao_tex);
|
||||
MipFilter = Linear;
|
||||
MinFilter = Linear;
|
||||
MagFilter = Linear;
|
||||
AddressU = Wrap;
|
||||
AddressV = Wrap;
|
||||
sRGBTexture = True;
|
||||
};
|
||||
|
||||
PS_OUTPUT ps_main(VS_OUTPUT Input)
|
||||
{
|
||||
PS_OUTPUT o;
|
||||
|
||||
float3 tangentNormal = normalize(tex2D(normal_samp, Input.TexCoord0).xyz * 2 - 1);
|
||||
float3 eyeNormal = normalize(mul(tangentNormal, Input.TangentToView));
|
||||
|
||||
float3 albedo = tex2D(albedo_samp, Input.TexCoord0).rgb;
|
||||
float ao = tex2D(ao_samp, Input.TexCoord1).r * 0.75;
|
||||
float spec = tex2D(specular_samp, Input.TexCoord0).r;
|
||||
|
||||
o.gbuffer0 = float4(eyeNormal, spec * ao);
|
||||
o.gbuffer1 = float4(albedo, 1 - ao);
|
||||
return o;
|
||||
}
|
||||
|
||||
technique mesh {
|
||||
pass Geometry {
|
||||
VertexShader = compile vs_3_0 vs_main();
|
||||
PixelShader = compile ps_3_0 ps_main();
|
||||
|
||||
AlphaBlendEnable = False;
|
||||
ZWriteEnable = True;
|
||||
}
|
||||
}
|
||||
119
samples/HLSL/jellyfish.fx
Normal file
119
samples/HLSL/jellyfish.fx
Normal file
@@ -0,0 +1,119 @@
|
||||
float4x4 matWorldViewProjection : WORLDVIEWPROJECTION;
|
||||
float4x4 matWorldView : WORLDVIEW;
|
||||
float4x4 matWorld : WORLD;
|
||||
float4x4 matView : VIEW;
|
||||
|
||||
uniform float4 vViewPosition;
|
||||
|
||||
struct VS_INPUT
|
||||
{
|
||||
float3 Pos: POSITION;
|
||||
float3 Normal: NORMAL;
|
||||
float3 Tangent: TANGENT;
|
||||
float3 Binormal: BINORMAL;
|
||||
};
|
||||
|
||||
struct VS_OUTPUT
|
||||
{
|
||||
float4 Pos : POSITION;
|
||||
float3 reflection : TEXCOORD1;
|
||||
float3 refraction : TEXCOORD2;
|
||||
float fresnel : TEXCOORD3;
|
||||
};
|
||||
|
||||
uniform float3 amt;
|
||||
uniform float3 scale;
|
||||
uniform float3 phase;
|
||||
|
||||
float3 deform(float3 p)
|
||||
{
|
||||
float s = 3;
|
||||
float3 p2 = p * scale + phase;
|
||||
s += sin(p2.x) * amt.x;
|
||||
s += sin(p2.y) * amt.y;
|
||||
s += sin(p2.z) * amt.z;
|
||||
return p * s / 3;
|
||||
}
|
||||
|
||||
VS_OUTPUT vs_main( VS_INPUT In )
|
||||
{
|
||||
VS_OUTPUT Out;
|
||||
|
||||
float3 pos = In.Pos;
|
||||
float3 norm = In.Normal;
|
||||
|
||||
float3 p1 = pos + In.Tangent * 0.05;
|
||||
float3 p2 = pos + In.Binormal * 0.05;
|
||||
pos = deform(pos);
|
||||
p1 = deform(p1);
|
||||
p2 = deform(p2);
|
||||
|
||||
p1 -= pos;
|
||||
p2 -= pos;
|
||||
norm = normalize(cross(p1, p2));
|
||||
|
||||
float3 view = normalize(pos - vViewPosition.xyz);
|
||||
|
||||
Out.Pos = mul(float4(pos, 1.0), matWorldViewProjection);
|
||||
Out.reflection = reflect(view, norm);
|
||||
Out.refraction = reflect(view, norm * 0.4f); /* fake, but who cares? */
|
||||
Out.fresnel = dot(view, norm);
|
||||
norm = mul(float4(norm, 0.0), matWorldViewProjection);
|
||||
|
||||
return Out;
|
||||
}
|
||||
|
||||
#define PS_INPUT VS_OUTPUT
|
||||
|
||||
#if 0
|
||||
textureCUBE reflectionMap;
|
||||
samplerCUBE reflectionMapSampler = sampler_state
|
||||
{
|
||||
Texture = (reflectionMap);
|
||||
MipFilter = LINEAR;
|
||||
MinFilter = LINEAR;
|
||||
MagFilter = LINEAR;
|
||||
};
|
||||
#else
|
||||
// textures
|
||||
texture reflectionMap
|
||||
<
|
||||
string type = "CUBE";
|
||||
string name = "test_cube.dds";
|
||||
>;
|
||||
|
||||
samplerCUBE reflectionMapSampler = sampler_state
|
||||
{
|
||||
Texture = (reflectionMap);
|
||||
MipFilter = LINEAR;
|
||||
MinFilter = LINEAR;
|
||||
MagFilter = LINEAR;
|
||||
};
|
||||
#endif
|
||||
|
||||
struct PS_OUTPUT
|
||||
{
|
||||
float4 color : COLOR0;
|
||||
};
|
||||
|
||||
PS_OUTPUT ps_main( PS_INPUT In )
|
||||
{
|
||||
PS_OUTPUT Out;
|
||||
|
||||
float4 reflection = texCUBE(reflectionMapSampler, normalize(In.reflection)) * 1.5;
|
||||
float4 refraction = texCUBE(reflectionMapSampler, normalize(In.refraction));
|
||||
float fresnel = In.fresnel;
|
||||
// float fresnel = abs(normalize(In.normal).z);
|
||||
Out.color = lerp(reflection, refraction, fresnel) * pow(1.0 - fresnel * 0.75, 1.0);
|
||||
|
||||
return Out;
|
||||
}
|
||||
|
||||
technique blur_ps_vs_2_0
|
||||
{
|
||||
pass P0
|
||||
{
|
||||
VertexShader = compile vs_2_0 vs_main();
|
||||
PixelShader = compile ps_2_0 ps_main();
|
||||
}
|
||||
}
|
||||
41
samples/HLSL/noise.fx
Normal file
41
samples/HLSL/noise.fx
Normal file
@@ -0,0 +1,41 @@
|
||||
float alpha = 1.f;
|
||||
|
||||
texture tex;
|
||||
sampler tex_sampler = sampler_state
|
||||
{
|
||||
Texture = (tex);
|
||||
MipFilter = LINEAR;
|
||||
MinFilter = LINEAR;
|
||||
MagFilter = LINEAR;
|
||||
|
||||
AddressU = WRAP;
|
||||
AddressV = WRAP;
|
||||
};
|
||||
|
||||
struct VS_OUTPUT
|
||||
{
|
||||
float4 pos : POSITION;
|
||||
float2 tex : TEXCOORD1;
|
||||
};
|
||||
|
||||
VS_OUTPUT vertex(float4 ipos : POSITION, float2 tex : TEXCOORD0)
|
||||
{
|
||||
VS_OUTPUT Out;
|
||||
Out.pos = ipos;
|
||||
Out.tex = tex * 2;
|
||||
return Out;
|
||||
}
|
||||
|
||||
float4 pixel(VS_OUTPUT In) : COLOR
|
||||
{
|
||||
return tex2D(tex_sampler, In.tex) * alpha;
|
||||
}
|
||||
|
||||
technique blur_ps_vs_2_0
|
||||
{
|
||||
pass P0
|
||||
{
|
||||
VertexShader = compile vs_2_0 vertex();
|
||||
PixelShader = compile ps_2_0 pixel();
|
||||
}
|
||||
}
|
||||
26
samples/HTML+EEX/index.html.eex
Normal file
26
samples/HTML+EEX/index.html.eex
Normal file
@@ -0,0 +1,26 @@
|
||||
<h1>Listing Books</h1>
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<th>Title</th>
|
||||
<th>Summary</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
</tr>
|
||||
|
||||
<%= for book <- @books do %>
|
||||
<tr>
|
||||
<%# comment %>
|
||||
<td><%= book.title %></td>
|
||||
<td><%= book.content %></td>
|
||||
<td><%= link "Show", to: book_path(@conn, :show, book) %></td>
|
||||
<td><%= link "Edit", to: book_path(@conn, :edit, book) %></td>
|
||||
<td><%= link "Delete", to: book_path(@conn, :delete, book), method: :delete, data: [confirm: "Are you sure?"] %></td>
|
||||
</tr>
|
||||
<% end %>
|
||||
</table>
|
||||
|
||||
<br />
|
||||
|
||||
<%= link "New book", to: book_path(@conn, :new) %>
|
||||
@@ -1,6 +1,74 @@
|
||||
Version 1 of Trivial Extension by Andrew Plotkin begins here.
|
||||
Version 2 of Trivial Extension by Andrew Plotkin begins here.
|
||||
|
||||
"This is the rubric of the extension."
|
||||
|
||||
"provided for the Linguist package by Andrew Plotkin"
|
||||
|
||||
[Note the two special quoted lines above.]
|
||||
|
||||
A cow is a kind of animal. A cow can be purple.
|
||||
|
||||
Understand "cow" as a cow.
|
||||
Understand "purple" as a purple cow.
|
||||
|
||||
Check pushing a cow:
|
||||
instead say "Cow-tipping, at your age?[paragraph break]Inconceivable."
|
||||
|
||||
[Here are the possible levels of heading:]
|
||||
|
||||
Volume One
|
||||
|
||||
Text-line is always "A line of text."
|
||||
|
||||
Book 2
|
||||
|
||||
Part the third - indented headings still count
|
||||
|
||||
Chapter IV - not for release
|
||||
|
||||
[Heading labels are case-insensitive.]
|
||||
|
||||
section foobar
|
||||
|
||||
[A line beginning "Volume" that does not have blank lines before and after it is *not* a header line. So the following should all be part of section foobar. Sadly, the "Volume is..." line gets colored as a header, because Atom's regexp model can't recognize "thing with blank lines before and after"!]
|
||||
|
||||
Measure is a kind of value.
|
||||
Volume is a measure. Length is a measure.
|
||||
Area is a measure.
|
||||
|
||||
[And now some Inform 6 inclusions.]
|
||||
|
||||
To say em -- running on:
|
||||
(- style underline; -).
|
||||
To say /em -- running on:
|
||||
(- style roman; -).
|
||||
|
||||
Include (-
|
||||
|
||||
! Inform 6 comments start with a ! mark and run to the end of the line.
|
||||
Global cowcount;
|
||||
|
||||
[ inform6func arg;
|
||||
print "Here is some text; ", (address) 'dictword', ".^";
|
||||
cowcount++; ! increment this variable
|
||||
];
|
||||
|
||||
Object i6cow
|
||||
with name 'cow' 'animal',
|
||||
with description "It looks like a cow.",
|
||||
has animate scenery;
|
||||
|
||||
-) after "Global Variables" in "Output.i6t".
|
||||
|
||||
Trivial Extension ends here.
|
||||
|
||||
---- DOCUMENTATION ----
|
||||
|
||||
Everything after the "---- DOCUMENTATION ----" line is documentation, so it should have the comment style.
|
||||
|
||||
However, tab-indented lines are sample Inform code within the documentation:
|
||||
|
||||
Horns are a kind of thing. Every cow has horns.
|
||||
say "Moo[if the noun is purple] indigo[end if]."
|
||||
|
||||
So we need to allow for that.
|
||||
|
||||
@@ -2,11 +2,61 @@
|
||||
|
||||
Include Trivial Extension by Andrew Plotkin.
|
||||
|
||||
Volume 1 - overview
|
||||
|
||||
Chapter - setting the scene
|
||||
|
||||
The Kitchen is a room.
|
||||
|
||||
[This kitchen is modelled after the one in Zork, although it lacks the detail to establish this to the player.]
|
||||
[Comment: this kitchen is modelled after the one in Zork, although it lacks the detail to establish this to the player.]
|
||||
|
||||
Section - the kitchen table
|
||||
|
||||
The spicerack is a container in the Kitchen.
|
||||
|
||||
Table of Spices
|
||||
Name Flavor
|
||||
"cinnamon" 5
|
||||
"nutmeg" 4
|
||||
"szechuan pepper" 8
|
||||
|
||||
The description of the spicerack is "It's mostly empty."
|
||||
|
||||
Chapter - a character
|
||||
|
||||
A purple cow called Gelett is in the Kitchen.
|
||||
|
||||
[This comment spans multiple lines..
|
||||
|
||||
...and this line contains [nested square[] brackets]...
|
||||
|
||||
...which is legal in Inform 7.]
|
||||
|
||||
Instead of examining Gelett:
|
||||
say "You'd rather see than be one."
|
||||
|
||||
Instead of examining Gelett:
|
||||
say "You'd rather see than be one."
|
||||
|
||||
Check smelling Gelett:
|
||||
say "This text contains several lines.
|
||||
|
||||
A blank line is displayed as a paragraph break,
|
||||
but a simple line break is not.";
|
||||
stop the action.
|
||||
|
||||
Section - cow catching
|
||||
|
||||
Gelett has a number called the mooness.
|
||||
|
||||
Instead of taking Gelett:
|
||||
increment the mooness of Gelett;
|
||||
if the mooness of Gelett is one:
|
||||
say "Gelett moos once.";
|
||||
else:
|
||||
say "Gelett moos [mooness of Gelett in words] times.";
|
||||
|
||||
Volume 2 - the turn cycle
|
||||
|
||||
Every turn:
|
||||
say "A turn passes[one of][or] placidly[or] idly[or] tediously[at random]."
|
||||
|
||||
82
samples/JSON/geo.geojson
Normal file
82
samples/JSON/geo.geojson
Normal file
@@ -0,0 +1,82 @@
|
||||
{
|
||||
"type": "FeatureCollection",
|
||||
"features": [
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"name": "Australia Post - North Ryde BC",
|
||||
"geo": [-33.787792, 151.13288],
|
||||
"streetAddress": "11 Waterloo Road",
|
||||
"addressLocality": "Macquarie Park",
|
||||
"addressRegion": "New South Wales",
|
||||
"addressCountry": "Australia",
|
||||
"postalCode": "2113"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [151.13288, -33.787792, 0]
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"name": "George Weston Foods Limited",
|
||||
"geo": [-37.8263884, 144.9105381],
|
||||
"streetAddress": "Level 3, 187 Todd Road",
|
||||
"addressLocality": "Port Melbourne",
|
||||
"addressRegion": "Victoria",
|
||||
"addressCountry": "Australia",
|
||||
"postalCode": "3207"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[144.9097088901841, -37.82622654171794, 0],
|
||||
[144.9099724266943, -37.82679388891783, 0],
|
||||
[144.9110127325916, -37.82651526396403, 0],
|
||||
[144.9112227645738, -37.82655667152123, 0],
|
||||
[144.9113739439796, -37.82618552508767, 0],
|
||||
[144.9112740633105, -37.82615750100924, 0],
|
||||
[144.9111355846674, -37.82584493693527, 0],
|
||||
[144.9097088901841, -37.82622654171794, 0]
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"name": "George Weston Foods Limited",
|
||||
"geo": [-37.05202791502396, 144.2085614999388],
|
||||
"streetAddress": "67 Richards Road",
|
||||
"addressLocality": "Castlemaine",
|
||||
"addressRegion": "Victoria",
|
||||
"addressCountry": "Australia",
|
||||
"postalCode": "3450"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Polygon",
|
||||
"coordinates": [
|
||||
[
|
||||
[144.2052428913937, -37.04906391287216, 0],
|
||||
[144.205540392692, -37.05049727485623, 0],
|
||||
[144.2059800881858, -37.05066835966983, 0],
|
||||
[144.206490656024, -37.05279538900776, 0],
|
||||
[144.2064525845008, -37.05366195881602, 0],
|
||||
[144.2084322301922, -37.0538920493147, 0],
|
||||
[144.2084811895712, -37.05266519735124, 0],
|
||||
[144.2079784002005, -37.05041270555773, 0],
|
||||
[144.2074017905817, -37.04817406993293, 0],
|
||||
[144.2061363939852, -37.04834972871226, 0],
|
||||
[144.2052428913937, -37.04906391287216, 0]
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
1
samples/JSON/switzerland.topojson
Normal file
1
samples/JSON/switzerland.topojson
Normal file
File diff suppressed because one or more lines are too long
23
samples/JSX/sample.jsx
Normal file
23
samples/JSX/sample.jsx
Normal file
@@ -0,0 +1,23 @@
|
||||
'use strict';
|
||||
|
||||
const React = require('react')
|
||||
|
||||
module.exports = React.createClass({
|
||||
render: function() {
|
||||
let {feeds, log} = this.props;
|
||||
|
||||
log.info(feeds);
|
||||
return <div className="feed-list">
|
||||
<h3>News Feed's</h3>
|
||||
<ul>
|
||||
{feeds.map(function(feed) {
|
||||
return <li key={feed.name} className={feed.fetched ? 'loaded' : 'loading'}>
|
||||
{feed.data && feed.data.length > 0 ?
|
||||
<span>{feed.name} <span className='light'>({feed.data.length})</span></span>
|
||||
: 'feed.name' }
|
||||
</li>
|
||||
})}
|
||||
</ul>
|
||||
</div>;
|
||||
}
|
||||
});
|
||||
625
samples/Java/GrammarKit.java
Normal file
625
samples/Java/GrammarKit.java
Normal file
@@ -0,0 +1,625 @@
|
||||
// This is a generated file. Not intended for manual editing.
|
||||
package org.intellij.grammar.parser;
|
||||
|
||||
import com.intellij.lang.PsiBuilder;
|
||||
import com.intellij.lang.PsiBuilder.Marker;
|
||||
import static org.intellij.grammar.psi.BnfTypes.*;
|
||||
import static org.intellij.grammar.parser.GeneratedParserUtilBase.*;
|
||||
import com.intellij.psi.tree.IElementType;
|
||||
import com.intellij.lang.ASTNode;
|
||||
import com.intellij.psi.tree.TokenSet;
|
||||
import com.intellij.lang.PsiParser;
|
||||
import com.intellij.lang.LightPsiParser;
|
||||
|
||||
@SuppressWarnings({"SimplifiableIfStatement", "UnusedAssignment"})
|
||||
public class GrammarParser implements PsiParser, LightPsiParser {
|
||||
|
||||
public ASTNode parse(IElementType t, PsiBuilder b) {
|
||||
parseLight(t, b);
|
||||
return b.getTreeBuilt();
|
||||
}
|
||||
|
||||
public void parseLight(IElementType t, PsiBuilder b) {
|
||||
boolean r;
|
||||
b = adapt_builder_(t, b, this, EXTENDS_SETS_);
|
||||
Marker m = enter_section_(b, 0, _COLLAPSE_, null);
|
||||
if (t == BNF_ATTR) {
|
||||
r = attr(b, 0);
|
||||
}
|
||||
else if (t == BNF_ATTR_PATTERN) {
|
||||
r = attr_pattern(b, 0);
|
||||
}
|
||||
else if (t == BNF_ATTR_VALUE) {
|
||||
r = attr_value(b, 0);
|
||||
}
|
||||
else if (t == BNF_ATTRS) {
|
||||
r = attrs(b, 0);
|
||||
}
|
||||
else if (t == BNF_CHOICE) {
|
||||
r = choice(b, 0);
|
||||
}
|
||||
else if (t == BNF_EXPRESSION) {
|
||||
r = expression(b, 0);
|
||||
}
|
||||
else if (t == BNF_LITERAL_EXPRESSION) {
|
||||
r = literal_expression(b, 0);
|
||||
}
|
||||
else if (t == BNF_MODIFIER) {
|
||||
r = modifier(b, 0);
|
||||
}
|
||||
else if (t == BNF_PAREN_EXPRESSION) {
|
||||
r = paren_expression(b, 0);
|
||||
}
|
||||
else if (t == BNF_PREDICATE) {
|
||||
r = predicate(b, 0);
|
||||
}
|
||||
else if (t == BNF_PREDICATE_SIGN) {
|
||||
r = predicate_sign(b, 0);
|
||||
}
|
||||
else if (t == BNF_QUANTIFIED) {
|
||||
r = quantified(b, 0);
|
||||
}
|
||||
else if (t == BNF_QUANTIFIER) {
|
||||
r = quantifier(b, 0);
|
||||
}
|
||||
else if (t == BNF_REFERENCE_OR_TOKEN) {
|
||||
r = reference_or_token(b, 0);
|
||||
}
|
||||
else if (t == BNF_RULE) {
|
||||
r = rule(b, 0);
|
||||
}
|
||||
else if (t == BNF_SEQUENCE) {
|
||||
r = sequence(b, 0);
|
||||
}
|
||||
else if (t == BNF_STRING_LITERAL_EXPRESSION) {
|
||||
r = string_literal_expression(b, 0);
|
||||
}
|
||||
else {
|
||||
r = parse_root_(t, b, 0);
|
||||
}
|
||||
exit_section_(b, 0, m, t, r, true, TRUE_CONDITION);
|
||||
}
|
||||
|
||||
protected boolean parse_root_(IElementType t, PsiBuilder b, int l) {
|
||||
return grammar(b, l + 1);
|
||||
}
|
||||
|
||||
public static final TokenSet[] EXTENDS_SETS_ = new TokenSet[] {
|
||||
create_token_set_(BNF_LITERAL_EXPRESSION, BNF_STRING_LITERAL_EXPRESSION),
|
||||
create_token_set_(BNF_CHOICE, BNF_EXPRESSION, BNF_LITERAL_EXPRESSION, BNF_PAREN_EXPRESSION,
|
||||
BNF_PREDICATE, BNF_QUANTIFIED, BNF_REFERENCE_OR_TOKEN, BNF_SEQUENCE,
|
||||
BNF_STRING_LITERAL_EXPRESSION),
|
||||
};
|
||||
|
||||
/* ********************************************************** */
|
||||
// id attr_pattern? '=' attr_value ';'?
|
||||
public static boolean attr(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr")) return false;
|
||||
boolean r, p;
|
||||
Marker m = enter_section_(b, l, _NONE_, "<attr>");
|
||||
r = consumeToken(b, BNF_ID);
|
||||
p = r; // pin = 1
|
||||
r = r && report_error_(b, attr_1(b, l + 1));
|
||||
r = p && report_error_(b, consumeToken(b, BNF_OP_EQ)) && r;
|
||||
r = p && report_error_(b, attr_value(b, l + 1)) && r;
|
||||
r = p && attr_4(b, l + 1) && r;
|
||||
exit_section_(b, l, m, BNF_ATTR, r, p, attr_recover_until_parser_);
|
||||
return r || p;
|
||||
}
|
||||
|
||||
// attr_pattern?
|
||||
private static boolean attr_1(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr_1")) return false;
|
||||
attr_pattern(b, l + 1);
|
||||
return true;
|
||||
}
|
||||
|
||||
// ';'?
|
||||
private static boolean attr_4(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr_4")) return false;
|
||||
consumeToken(b, BNF_SEMICOLON);
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '(' string ')'
|
||||
public static boolean attr_pattern(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr_pattern")) return false;
|
||||
if (!nextTokenIs(b, BNF_LEFT_PAREN)) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = consumeToken(b, BNF_LEFT_PAREN);
|
||||
r = r && consumeToken(b, BNF_STRING);
|
||||
r = r && consumeToken(b, BNF_RIGHT_PAREN);
|
||||
exit_section_(b, m, BNF_ATTR_PATTERN, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// !'}'
|
||||
static boolean attr_recover_until(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr_recover_until")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NOT_, null);
|
||||
r = !consumeToken(b, BNF_RIGHT_BRACE);
|
||||
exit_section_(b, l, m, null, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// (reference_or_token | literal_expression) !'='
|
||||
public static boolean attr_value(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr_value")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NONE_, "<attr value>");
|
||||
r = attr_value_0(b, l + 1);
|
||||
r = r && attr_value_1(b, l + 1);
|
||||
exit_section_(b, l, m, BNF_ATTR_VALUE, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
// reference_or_token | literal_expression
|
||||
private static boolean attr_value_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr_value_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = reference_or_token(b, l + 1);
|
||||
if (!r) r = literal_expression(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// !'='
|
||||
private static boolean attr_value_1(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attr_value_1")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NOT_, null);
|
||||
r = !consumeToken(b, BNF_OP_EQ);
|
||||
exit_section_(b, l, m, null, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '{' attr* '}'
|
||||
public static boolean attrs(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attrs")) return false;
|
||||
if (!nextTokenIs(b, BNF_LEFT_BRACE)) return false;
|
||||
boolean r, p;
|
||||
Marker m = enter_section_(b, l, _NONE_, null);
|
||||
r = consumeToken(b, BNF_LEFT_BRACE);
|
||||
p = r; // pin = 1
|
||||
r = r && report_error_(b, attrs_1(b, l + 1));
|
||||
r = p && consumeToken(b, BNF_RIGHT_BRACE) && r;
|
||||
exit_section_(b, l, m, BNF_ATTRS, r, p, null);
|
||||
return r || p;
|
||||
}
|
||||
|
||||
// attr*
|
||||
private static boolean attrs_1(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "attrs_1")) return false;
|
||||
int c = current_position_(b);
|
||||
while (true) {
|
||||
if (!attr(b, l + 1)) break;
|
||||
if (!empty_element_parsed_guard_(b, "attrs_1", c)) break;
|
||||
c = current_position_(b);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '{' sequence ('|' sequence)* '}' | sequence choice_tail*
|
||||
public static boolean choice(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "choice")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _COLLAPSE_, "<choice>");
|
||||
r = choice_0(b, l + 1);
|
||||
if (!r) r = choice_1(b, l + 1);
|
||||
exit_section_(b, l, m, BNF_CHOICE, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
// '{' sequence ('|' sequence)* '}'
|
||||
private static boolean choice_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "choice_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = consumeToken(b, BNF_LEFT_BRACE);
|
||||
r = r && sequence(b, l + 1);
|
||||
r = r && choice_0_2(b, l + 1);
|
||||
r = r && consumeToken(b, BNF_RIGHT_BRACE);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// ('|' sequence)*
|
||||
private static boolean choice_0_2(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "choice_0_2")) return false;
|
||||
int c = current_position_(b);
|
||||
while (true) {
|
||||
if (!choice_0_2_0(b, l + 1)) break;
|
||||
if (!empty_element_parsed_guard_(b, "choice_0_2", c)) break;
|
||||
c = current_position_(b);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// '|' sequence
|
||||
private static boolean choice_0_2_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "choice_0_2_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = consumeToken(b, BNF_OP_OR);
|
||||
r = r && sequence(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// sequence choice_tail*
|
||||
private static boolean choice_1(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "choice_1")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = sequence(b, l + 1);
|
||||
r = r && choice_1_1(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// choice_tail*
|
||||
private static boolean choice_1_1(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "choice_1_1")) return false;
|
||||
int c = current_position_(b);
|
||||
while (true) {
|
||||
if (!choice_tail(b, l + 1)) break;
|
||||
if (!empty_element_parsed_guard_(b, "choice_1_1", c)) break;
|
||||
c = current_position_(b);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '|' sequence
|
||||
static boolean choice_tail(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "choice_tail")) return false;
|
||||
if (!nextTokenIs(b, BNF_OP_OR)) return false;
|
||||
boolean r, p;
|
||||
Marker m = enter_section_(b, l, _NONE_, null);
|
||||
r = consumeToken(b, BNF_OP_OR);
|
||||
p = r; // pin = 1
|
||||
r = r && sequence(b, l + 1);
|
||||
exit_section_(b, l, m, null, r, p, null);
|
||||
return r || p;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// choice?
|
||||
public static boolean expression(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "expression")) return false;
|
||||
Marker m = enter_section_(b, l, _COLLAPSE_, "<expression>");
|
||||
choice(b, l + 1);
|
||||
exit_section_(b, l, m, BNF_EXPRESSION, true, false, null);
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// (attrs | rule) *
|
||||
static boolean grammar(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "grammar")) return false;
|
||||
int c = current_position_(b);
|
||||
while (true) {
|
||||
if (!grammar_0(b, l + 1)) break;
|
||||
if (!empty_element_parsed_guard_(b, "grammar", c)) break;
|
||||
c = current_position_(b);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// attrs | rule
|
||||
private static boolean grammar_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "grammar_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = attrs(b, l + 1);
|
||||
if (!r) r = rule(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// string_literal_expression | number
|
||||
public static boolean literal_expression(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "literal_expression")) return false;
|
||||
if (!nextTokenIs(b, "<literal expression>", BNF_NUMBER, BNF_STRING)) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _COLLAPSE_, "<literal expression>");
|
||||
r = string_literal_expression(b, l + 1);
|
||||
if (!r) r = consumeToken(b, BNF_NUMBER);
|
||||
exit_section_(b, l, m, BNF_LITERAL_EXPRESSION, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// 'private' | 'external' | 'wrapped'
|
||||
public static boolean modifier(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "modifier")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NONE_, "<modifier>");
|
||||
r = consumeToken(b, "private");
|
||||
if (!r) r = consumeToken(b, "external");
|
||||
if (!r) r = consumeToken(b, "wrapped");
|
||||
exit_section_(b, l, m, BNF_MODIFIER, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// quantified | predicate
|
||||
static boolean option(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "option")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = quantified(b, l + 1);
|
||||
if (!r) r = predicate(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '(' expression ')'
|
||||
public static boolean paren_expression(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "paren_expression")) return false;
|
||||
if (!nextTokenIs(b, BNF_LEFT_PAREN)) return false;
|
||||
boolean r, p;
|
||||
Marker m = enter_section_(b, l, _NONE_, null);
|
||||
r = consumeToken(b, BNF_LEFT_PAREN);
|
||||
p = r; // pin = 1
|
||||
r = r && report_error_(b, expression(b, l + 1));
|
||||
r = p && consumeToken(b, BNF_RIGHT_PAREN) && r;
|
||||
exit_section_(b, l, m, BNF_PAREN_EXPRESSION, r, p, null);
|
||||
return r || p;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// predicate_sign simple
|
||||
public static boolean predicate(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "predicate")) return false;
|
||||
if (!nextTokenIs(b, "<predicate>", BNF_OP_NOT, BNF_OP_AND)) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NONE_, "<predicate>");
|
||||
r = predicate_sign(b, l + 1);
|
||||
r = r && simple(b, l + 1);
|
||||
exit_section_(b, l, m, BNF_PREDICATE, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '&' | '!'
|
||||
public static boolean predicate_sign(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "predicate_sign")) return false;
|
||||
if (!nextTokenIs(b, "<predicate sign>", BNF_OP_NOT, BNF_OP_AND)) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NONE_, "<predicate sign>");
|
||||
r = consumeToken(b, BNF_OP_AND);
|
||||
if (!r) r = consumeToken(b, BNF_OP_NOT);
|
||||
exit_section_(b, l, m, BNF_PREDICATE_SIGN, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '[' expression ']' | simple quantifier?
|
||||
public static boolean quantified(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "quantified")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _COLLAPSE_, "<quantified>");
|
||||
r = quantified_0(b, l + 1);
|
||||
if (!r) r = quantified_1(b, l + 1);
|
||||
exit_section_(b, l, m, BNF_QUANTIFIED, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
// '[' expression ']'
|
||||
private static boolean quantified_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "quantified_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = consumeToken(b, BNF_LEFT_BRACKET);
|
||||
r = r && expression(b, l + 1);
|
||||
r = r && consumeToken(b, BNF_RIGHT_BRACKET);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// simple quantifier?
|
||||
private static boolean quantified_1(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "quantified_1")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = simple(b, l + 1);
|
||||
r = r && quantified_1_1(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// quantifier?
|
||||
private static boolean quantified_1_1(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "quantified_1_1")) return false;
|
||||
quantifier(b, l + 1);
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// '?' | '+' | '*'
|
||||
public static boolean quantifier(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "quantifier")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NONE_, "<quantifier>");
|
||||
r = consumeToken(b, BNF_OP_OPT);
|
||||
if (!r) r = consumeToken(b, BNF_OP_ONEMORE);
|
||||
if (!r) r = consumeToken(b, BNF_OP_ZEROMORE);
|
||||
exit_section_(b, l, m, BNF_QUANTIFIER, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// id
|
||||
public static boolean reference_or_token(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "reference_or_token")) return false;
|
||||
if (!nextTokenIs(b, BNF_ID)) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = consumeToken(b, BNF_ID);
|
||||
exit_section_(b, m, BNF_REFERENCE_OR_TOKEN, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// modifier* id '::=' expression attrs? ';'?
|
||||
public static boolean rule(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "rule")) return false;
|
||||
boolean r, p;
|
||||
Marker m = enter_section_(b, l, _NONE_, "<rule>");
|
||||
r = rule_0(b, l + 1);
|
||||
r = r && consumeToken(b, BNF_ID);
|
||||
r = r && consumeToken(b, BNF_OP_IS);
|
||||
p = r; // pin = 3
|
||||
r = r && report_error_(b, expression(b, l + 1));
|
||||
r = p && report_error_(b, rule_4(b, l + 1)) && r;
|
||||
r = p && rule_5(b, l + 1) && r;
|
||||
exit_section_(b, l, m, BNF_RULE, r, p, rule_recover_until_parser_);
|
||||
return r || p;
|
||||
}
|
||||
|
||||
// modifier*
|
||||
private static boolean rule_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "rule_0")) return false;
|
||||
int c = current_position_(b);
|
||||
while (true) {
|
||||
if (!modifier(b, l + 1)) break;
|
||||
if (!empty_element_parsed_guard_(b, "rule_0", c)) break;
|
||||
c = current_position_(b);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// attrs?
|
||||
private static boolean rule_4(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "rule_4")) return false;
|
||||
attrs(b, l + 1);
|
||||
return true;
|
||||
}
|
||||
|
||||
// ';'?
|
||||
private static boolean rule_5(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "rule_5")) return false;
|
||||
consumeToken(b, BNF_SEMICOLON);
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// !'{'
|
||||
static boolean rule_recover_until(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "rule_recover_until")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NOT_, null);
|
||||
r = !consumeToken(b, BNF_LEFT_BRACE);
|
||||
exit_section_(b, l, m, null, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// option +
|
||||
public static boolean sequence(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "sequence")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _COLLAPSE_, "<sequence>");
|
||||
r = option(b, l + 1);
|
||||
int c = current_position_(b);
|
||||
while (r) {
|
||||
if (!option(b, l + 1)) break;
|
||||
if (!empty_element_parsed_guard_(b, "sequence", c)) break;
|
||||
c = current_position_(b);
|
||||
}
|
||||
exit_section_(b, l, m, BNF_SEQUENCE, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// !(modifier* id '::=' ) reference_or_token | literal_expression | paren_expression
|
||||
static boolean simple(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "simple")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = simple_0(b, l + 1);
|
||||
if (!r) r = literal_expression(b, l + 1);
|
||||
if (!r) r = paren_expression(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// !(modifier* id '::=' ) reference_or_token
|
||||
private static boolean simple_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "simple_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = simple_0_0(b, l + 1);
|
||||
r = r && reference_or_token(b, l + 1);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// !(modifier* id '::=' )
|
||||
private static boolean simple_0_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "simple_0_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b, l, _NOT_, null);
|
||||
r = !simple_0_0_0(b, l + 1);
|
||||
exit_section_(b, l, m, null, r, false, null);
|
||||
return r;
|
||||
}
|
||||
|
||||
// modifier* id '::='
|
||||
private static boolean simple_0_0_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "simple_0_0_0")) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = simple_0_0_0_0(b, l + 1);
|
||||
r = r && consumeToken(b, BNF_ID);
|
||||
r = r && consumeToken(b, BNF_OP_IS);
|
||||
exit_section_(b, m, null, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
// modifier*
|
||||
private static boolean simple_0_0_0_0(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "simple_0_0_0_0")) return false;
|
||||
int c = current_position_(b);
|
||||
while (true) {
|
||||
if (!modifier(b, l + 1)) break;
|
||||
if (!empty_element_parsed_guard_(b, "simple_0_0_0_0", c)) break;
|
||||
c = current_position_(b);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ********************************************************** */
|
||||
// string
|
||||
public static boolean string_literal_expression(PsiBuilder b, int l) {
|
||||
if (!recursion_guard_(b, l, "string_literal_expression")) return false;
|
||||
if (!nextTokenIs(b, BNF_STRING)) return false;
|
||||
boolean r;
|
||||
Marker m = enter_section_(b);
|
||||
r = consumeToken(b, BNF_STRING);
|
||||
exit_section_(b, m, BNF_STRING_LITERAL_EXPRESSION, r);
|
||||
return r;
|
||||
}
|
||||
|
||||
final static Parser attr_recover_until_parser_ = new Parser() {
|
||||
public boolean parse(PsiBuilder b, int l) {
|
||||
return attr_recover_until(b, l + 1);
|
||||
}
|
||||
};
|
||||
final static Parser rule_recover_until_parser_ = new Parser() {
|
||||
public boolean parse(PsiBuilder b, int l) {
|
||||
return rule_recover_until(b, l + 1);
|
||||
}
|
||||
};
|
||||
}
|
||||
482
samples/Java/JFlexLexer.java
Normal file
482
samples/Java/JFlexLexer.java
Normal file
@@ -0,0 +1,482 @@
|
||||
/* The following code was generated by JFlex 1.4.3 on 28/01/16 11:27 */
|
||||
|
||||
package test;
|
||||
import com.intellij.lexer.*;
|
||||
import com.intellij.psi.tree.IElementType;
|
||||
import static org.intellij.grammar.psi.BnfTypes.*;
|
||||
|
||||
|
||||
/**
|
||||
* This class is a scanner generated by
|
||||
* <a href="http://www.jflex.de/">JFlex</a> 1.4.3
|
||||
* on 28/01/16 11:27 from the specification file
|
||||
* <tt>/home/abigail/code/intellij-grammar-kit-test/src/test/_GrammarLexer.flex</tt>
|
||||
*/
|
||||
public class _GrammarLexer implements FlexLexer {
|
||||
/** initial size of the lookahead buffer */
|
||||
private static final int ZZ_BUFFERSIZE = 16384;
|
||||
|
||||
/** lexical states */
|
||||
public static final int YYINITIAL = 0;
|
||||
|
||||
/**
|
||||
* ZZ_LEXSTATE[l] is the state in the DFA for the lexical state l
|
||||
* ZZ_LEXSTATE[l+1] is the state in the DFA for the lexical state l
|
||||
* at the beginning of a line
|
||||
* l is of the form l = 2*k, k a non negative integer
|
||||
*/
|
||||
private static final int ZZ_LEXSTATE[] = {
|
||||
0, 0
|
||||
};
|
||||
|
||||
/**
|
||||
* Translates characters to character classes
|
||||
*/
|
||||
private static final String ZZ_CMAP_PACKED =
|
||||
"\11\0\1\1\1\1\1\0\1\1\1\1\22\0\1\1\101\0\1\13"+
|
||||
"\1\0\1\3\1\14\1\0\1\10\1\0\1\2\3\0\1\12\1\7"+
|
||||
"\3\0\1\6\1\4\1\5\1\11\uff8a\0";
|
||||
|
||||
/**
|
||||
* Translates characters to character classes
|
||||
*/
|
||||
private static final char [] ZZ_CMAP = zzUnpackCMap(ZZ_CMAP_PACKED);
|
||||
|
||||
/**
|
||||
* Translates DFA states to action switch labels.
|
||||
*/
|
||||
private static final int [] ZZ_ACTION = zzUnpackAction();
|
||||
|
||||
private static final String ZZ_ACTION_PACKED_0 =
|
||||
"\1\0\1\1\1\2\3\1\1\3\10\0\1\4\1\5";
|
||||
|
||||
private static int [] zzUnpackAction() {
|
||||
int [] result = new int[17];
|
||||
int offset = 0;
|
||||
offset = zzUnpackAction(ZZ_ACTION_PACKED_0, offset, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static int zzUnpackAction(String packed, int offset, int [] result) {
|
||||
int i = 0; /* index in packed string */
|
||||
int j = offset; /* index in unpacked array */
|
||||
int l = packed.length();
|
||||
while (i < l) {
|
||||
int count = packed.charAt(i++);
|
||||
int value = packed.charAt(i++);
|
||||
do result[j++] = value; while (--count > 0);
|
||||
}
|
||||
return j;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Translates a state to a row index in the transition table
|
||||
*/
|
||||
private static final int [] ZZ_ROWMAP = zzUnpackRowMap();
|
||||
|
||||
private static final String ZZ_ROWMAP_PACKED_0 =
|
||||
"\0\0\0\15\0\32\0\47\0\64\0\101\0\15\0\116"+
|
||||
"\0\133\0\150\0\165\0\202\0\217\0\234\0\251\0\15"+
|
||||
"\0\15";
|
||||
|
||||
private static int [] zzUnpackRowMap() {
|
||||
int [] result = new int[17];
|
||||
int offset = 0;
|
||||
offset = zzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static int zzUnpackRowMap(String packed, int offset, int [] result) {
|
||||
int i = 0; /* index in packed string */
|
||||
int j = offset; /* index in unpacked array */
|
||||
int l = packed.length();
|
||||
while (i < l) {
|
||||
int high = packed.charAt(i++) << 16;
|
||||
result[j++] = high | packed.charAt(i++);
|
||||
}
|
||||
return j;
|
||||
}
|
||||
|
||||
/**
|
||||
* The transition table of the DFA
|
||||
*/
|
||||
private static final int [] ZZ_TRANS = zzUnpackTrans();
|
||||
|
||||
private static final String ZZ_TRANS_PACKED_0 =
|
||||
"\1\2\1\3\1\4\1\2\1\5\2\2\1\6\5\2"+
|
||||
"\16\0\1\3\16\0\1\7\16\0\1\10\20\0\1\11"+
|
||||
"\11\0\1\12\20\0\1\13\4\0\1\14\25\0\1\15"+
|
||||
"\10\0\1\16\21\0\1\17\10\0\1\20\12\0\1\21"+
|
||||
"\6\0";
|
||||
|
||||
private static int [] zzUnpackTrans() {
|
||||
int [] result = new int[182];
|
||||
int offset = 0;
|
||||
offset = zzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static int zzUnpackTrans(String packed, int offset, int [] result) {
|
||||
int i = 0; /* index in packed string */
|
||||
int j = offset; /* index in unpacked array */
|
||||
int l = packed.length();
|
||||
while (i < l) {
|
||||
int count = packed.charAt(i++);
|
||||
int value = packed.charAt(i++);
|
||||
value--;
|
||||
do result[j++] = value; while (--count > 0);
|
||||
}
|
||||
return j;
|
||||
}
|
||||
|
||||
|
||||
/* error codes */
|
||||
private static final int ZZ_UNKNOWN_ERROR = 0;
|
||||
private static final int ZZ_NO_MATCH = 1;
|
||||
private static final int ZZ_PUSHBACK_2BIG = 2;
|
||||
private static final char[] EMPTY_BUFFER = new char[0];
|
||||
private static final int YYEOF = -1;
|
||||
private static java.io.Reader zzReader = null; // Fake
|
||||
|
||||
/* error messages for the codes above */
|
||||
private static final String ZZ_ERROR_MSG[] = {
|
||||
"Unkown internal scanner error",
|
||||
"Error: could not match input",
|
||||
"Error: pushback value was too large"
|
||||
};
|
||||
|
||||
/**
|
||||
* ZZ_ATTRIBUTE[aState] contains the attributes of state <code>aState</code>
|
||||
*/
|
||||
private static final int [] ZZ_ATTRIBUTE = zzUnpackAttribute();
|
||||
|
||||
private static final String ZZ_ATTRIBUTE_PACKED_0 =
|
||||
"\1\0\1\11\4\1\1\11\10\0\2\11";
|
||||
|
||||
private static int [] zzUnpackAttribute() {
|
||||
int [] result = new int[17];
|
||||
int offset = 0;
|
||||
offset = zzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static int zzUnpackAttribute(String packed, int offset, int [] result) {
|
||||
int i = 0; /* index in packed string */
|
||||
int j = offset; /* index in unpacked array */
|
||||
int l = packed.length();
|
||||
while (i < l) {
|
||||
int count = packed.charAt(i++);
|
||||
int value = packed.charAt(i++);
|
||||
do result[j++] = value; while (--count > 0);
|
||||
}
|
||||
return j;
|
||||
}
|
||||
|
||||
/** the current state of the DFA */
|
||||
private int zzState;
|
||||
|
||||
/** the current lexical state */
|
||||
private int zzLexicalState = YYINITIAL;
|
||||
|
||||
/** this buffer contains the current text to be matched and is
|
||||
the source of the yytext() string */
|
||||
private CharSequence zzBuffer = "";
|
||||
|
||||
/** this buffer may contains the current text array to be matched when it is cheap to acquire it */
|
||||
private char[] zzBufferArray;
|
||||
|
||||
/** the textposition at the last accepting state */
|
||||
private int zzMarkedPos;
|
||||
|
||||
/** the textposition at the last state to be included in yytext */
|
||||
private int zzPushbackPos;
|
||||
|
||||
/** the current text position in the buffer */
|
||||
private int zzCurrentPos;
|
||||
|
||||
/** startRead marks the beginning of the yytext() string in the buffer */
|
||||
private int zzStartRead;
|
||||
|
||||
/** endRead marks the last character in the buffer, that has been read
|
||||
from input */
|
||||
private int zzEndRead;
|
||||
|
||||
/**
|
||||
* zzAtBOL == true <=> the scanner is currently at the beginning of a line
|
||||
*/
|
||||
private boolean zzAtBOL = true;
|
||||
|
||||
/** zzAtEOF == true <=> the scanner is at the EOF */
|
||||
private boolean zzAtEOF;
|
||||
|
||||
/* user code: */
|
||||
public _GrammarLexer() {
|
||||
this((java.io.Reader)null);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates a new scanner
|
||||
*
|
||||
* @param in the java.io.Reader to read input from.
|
||||
*/
|
||||
public _GrammarLexer(java.io.Reader in) {
|
||||
this.zzReader = in;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Unpacks the compressed character translation table.
|
||||
*
|
||||
* @param packed the packed character translation table
|
||||
* @return the unpacked character translation table
|
||||
*/
|
||||
private static char [] zzUnpackCMap(String packed) {
|
||||
char [] map = new char[0x10000];
|
||||
int i = 0; /* index in packed string */
|
||||
int j = 0; /* index in unpacked array */
|
||||
while (i < 52) {
|
||||
int count = packed.charAt(i++);
|
||||
char value = packed.charAt(i++);
|
||||
do map[j++] = value; while (--count > 0);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
public final int getTokenStart(){
|
||||
return zzStartRead;
|
||||
}
|
||||
|
||||
public final int getTokenEnd(){
|
||||
return getTokenStart() + yylength();
|
||||
}
|
||||
|
||||
public void reset(CharSequence buffer, int start, int end,int initialState){
|
||||
zzBuffer = buffer;
|
||||
zzBufferArray = com.intellij.util.text.CharArrayUtil.fromSequenceWithoutCopying(buffer);
|
||||
zzCurrentPos = zzMarkedPos = zzStartRead = start;
|
||||
zzPushbackPos = 0;
|
||||
zzAtEOF = false;
|
||||
zzAtBOL = true;
|
||||
zzEndRead = end;
|
||||
yybegin(initialState);
|
||||
}
|
||||
|
||||
/**
|
||||
* Refills the input buffer.
|
||||
*
|
||||
* @return <code>false</code>, iff there was new input.
|
||||
*
|
||||
* @exception java.io.IOException if any I/O-Error occurs
|
||||
*/
|
||||
private boolean zzRefill() throws java.io.IOException {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the current lexical state.
|
||||
*/
|
||||
public final int yystate() {
|
||||
return zzLexicalState;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Enters a new lexical state
|
||||
*
|
||||
* @param newState the new lexical state
|
||||
*/
|
||||
public final void yybegin(int newState) {
|
||||
zzLexicalState = newState;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the text matched by the current regular expression.
|
||||
*/
|
||||
public final CharSequence yytext() {
|
||||
return zzBuffer.subSequence(zzStartRead, zzMarkedPos);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the character at position <tt>pos</tt> from the
|
||||
* matched text.
|
||||
*
|
||||
* It is equivalent to yytext().charAt(pos), but faster
|
||||
*
|
||||
* @param pos the position of the character to fetch.
|
||||
* A value from 0 to yylength()-1.
|
||||
*
|
||||
* @return the character at position pos
|
||||
*/
|
||||
public final char yycharat(int pos) {
|
||||
return zzBufferArray != null ? zzBufferArray[zzStartRead+pos]:zzBuffer.charAt(zzStartRead+pos);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the length of the matched text region.
|
||||
*/
|
||||
public final int yylength() {
|
||||
return zzMarkedPos-zzStartRead;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Reports an error that occured while scanning.
|
||||
*
|
||||
* In a wellformed scanner (no or only correct usage of
|
||||
* yypushback(int) and a match-all fallback rule) this method
|
||||
* will only be called with things that "Can't Possibly Happen".
|
||||
* If this method is called, something is seriously wrong
|
||||
* (e.g. a JFlex bug producing a faulty scanner etc.).
|
||||
*
|
||||
* Usual syntax/scanner level error handling should be done
|
||||
* in error fallback rules.
|
||||
*
|
||||
* @param errorCode the code of the errormessage to display
|
||||
*/
|
||||
private void zzScanError(int errorCode) {
|
||||
String message;
|
||||
try {
|
||||
message = ZZ_ERROR_MSG[errorCode];
|
||||
}
|
||||
catch (ArrayIndexOutOfBoundsException e) {
|
||||
message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR];
|
||||
}
|
||||
|
||||
throw new Error(message);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Pushes the specified amount of characters back into the input stream.
|
||||
*
|
||||
* They will be read again by then next call of the scanning method
|
||||
*
|
||||
* @param number the number of characters to be read again.
|
||||
* This number must not be greater than yylength()!
|
||||
*/
|
||||
public void yypushback(int number) {
|
||||
if ( number > yylength() )
|
||||
zzScanError(ZZ_PUSHBACK_2BIG);
|
||||
|
||||
zzMarkedPos -= number;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Resumes scanning until the next regular expression is matched,
|
||||
* the end of input is encountered or an I/O-Error occurs.
|
||||
*
|
||||
* @return the next token
|
||||
* @exception java.io.IOException if any I/O-Error occurs
|
||||
*/
|
||||
public IElementType advance() throws java.io.IOException {
|
||||
int zzInput;
|
||||
int zzAction;
|
||||
|
||||
// cached fields:
|
||||
int zzCurrentPosL;
|
||||
int zzMarkedPosL;
|
||||
int zzEndReadL = zzEndRead;
|
||||
CharSequence zzBufferL = zzBuffer;
|
||||
char[] zzBufferArrayL = zzBufferArray;
|
||||
char [] zzCMapL = ZZ_CMAP;
|
||||
|
||||
int [] zzTransL = ZZ_TRANS;
|
||||
int [] zzRowMapL = ZZ_ROWMAP;
|
||||
int [] zzAttrL = ZZ_ATTRIBUTE;
|
||||
|
||||
while (true) {
|
||||
zzMarkedPosL = zzMarkedPos;
|
||||
|
||||
zzAction = -1;
|
||||
|
||||
zzCurrentPosL = zzCurrentPos = zzStartRead = zzMarkedPosL;
|
||||
|
||||
zzState = ZZ_LEXSTATE[zzLexicalState];
|
||||
|
||||
|
||||
zzForAction: {
|
||||
while (true) {
|
||||
|
||||
if (zzCurrentPosL < zzEndReadL)
|
||||
zzInput = (zzBufferArrayL != null ? zzBufferArrayL[zzCurrentPosL++] : zzBufferL.charAt(zzCurrentPosL++));
|
||||
else if (zzAtEOF) {
|
||||
zzInput = YYEOF;
|
||||
break zzForAction;
|
||||
}
|
||||
else {
|
||||
// store back cached positions
|
||||
zzCurrentPos = zzCurrentPosL;
|
||||
zzMarkedPos = zzMarkedPosL;
|
||||
boolean eof = zzRefill();
|
||||
// get translated positions and possibly new buffer
|
||||
zzCurrentPosL = zzCurrentPos;
|
||||
zzMarkedPosL = zzMarkedPos;
|
||||
zzBufferL = zzBuffer;
|
||||
zzEndReadL = zzEndRead;
|
||||
if (eof) {
|
||||
zzInput = YYEOF;
|
||||
break zzForAction;
|
||||
}
|
||||
else {
|
||||
zzInput = (zzBufferArrayL != null ? zzBufferArrayL[zzCurrentPosL++] : zzBufferL.charAt(zzCurrentPosL++));
|
||||
}
|
||||
}
|
||||
int zzNext = zzTransL[ zzRowMapL[zzState] + zzCMapL[zzInput] ];
|
||||
if (zzNext == -1) break zzForAction;
|
||||
zzState = zzNext;
|
||||
|
||||
int zzAttributes = zzAttrL[zzState];
|
||||
if ( (zzAttributes & 1) == 1 ) {
|
||||
zzAction = zzState;
|
||||
zzMarkedPosL = zzCurrentPosL;
|
||||
if ( (zzAttributes & 8) == 8 ) break zzForAction;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
// store back cached position
|
||||
zzMarkedPos = zzMarkedPosL;
|
||||
|
||||
switch (zzAction < 0 ? zzAction : ZZ_ACTION[zzAction]) {
|
||||
case 1:
|
||||
{ return com.intellij.psi.TokenType.BAD_CHARACTER;
|
||||
}
|
||||
case 6: break;
|
||||
case 4:
|
||||
{ return BNF_STRING;
|
||||
}
|
||||
case 7: break;
|
||||
case 5:
|
||||
{ return BNF_NUMBER;
|
||||
}
|
||||
case 8: break;
|
||||
case 3:
|
||||
{ return BNF_ID;
|
||||
}
|
||||
case 9: break;
|
||||
case 2:
|
||||
{ return com.intellij.psi.TokenType.WHITE_SPACE;
|
||||
}
|
||||
case 10: break;
|
||||
default:
|
||||
if (zzInput == YYEOF && zzStartRead == zzCurrentPos) {
|
||||
zzAtEOF = true;
|
||||
return null;
|
||||
}
|
||||
else {
|
||||
zzScanError(ZZ_NO_MATCH);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
19
samples/JavaScript/logo.jscad
Normal file
19
samples/JavaScript/logo.jscad
Normal file
@@ -0,0 +1,19 @@
|
||||
// title : OpenJSCAD.org Logo
|
||||
// author : Rene K. Mueller
|
||||
// license : MIT License
|
||||
// revision : 0.003
|
||||
// tags : Logo,Intersection,Sphere,Cube
|
||||
// file : logo.jscad
|
||||
|
||||
function main() {
|
||||
return union(
|
||||
difference(
|
||||
cube({size: 3, center: true}),
|
||||
sphere({r:2, center: true})
|
||||
),
|
||||
intersection(
|
||||
sphere({r: 1.3, center: true}),
|
||||
cube({size: 2.1, center: true})
|
||||
)
|
||||
).translate([0,0,1.5]).scale(10);
|
||||
}
|
||||
210
samples/Jupyter Notebook/JupyterNotebook.ipynb
Normal file
210
samples/Jupyter Notebook/JupyterNotebook.ipynb
Normal file
File diff suppressed because one or more lines are too long
14069
samples/KiCad/tc14badge.brd
Normal file
14069
samples/KiCad/tc14badge.brd
Normal file
File diff suppressed because it is too large
Load Diff
74
samples/LSL/LSL.lslp
Normal file
74
samples/LSL/LSL.lslp
Normal file
@@ -0,0 +1,74 @@
|
||||
/*
|
||||
Testing syntax highlighting
|
||||
for the Linden Scripting Language
|
||||
*/
|
||||
|
||||
integer someIntNormal = 3672;
|
||||
integer someIntHex = 0x00000000;
|
||||
integer someIntMath = PI_BY_TWO;
|
||||
|
||||
integer event = 5673;// 'event' is invalid.illegal
|
||||
|
||||
key someKeyTexture = TEXTURE_DEFAULT;
|
||||
string someStringSpecial = EOF;
|
||||
|
||||
some_user_defined_function_without_return_type(string inputAsString)
|
||||
{
|
||||
llSay(PUBLIC_CHANNEL, inputAsString);
|
||||
}
|
||||
|
||||
string user_defined_function_returning_a_string(key inputAsKey)
|
||||
{
|
||||
return (string)inputAsKey;
|
||||
}
|
||||
|
||||
default
|
||||
{
|
||||
state_entry()
|
||||
{
|
||||
key someKey = NULL_KEY;
|
||||
someKey = llGetOwner();
|
||||
|
||||
string someString = user_defined_function_returning_a_string(someKey);
|
||||
|
||||
some_user_defined_function_without_return_type(someString);
|
||||
}
|
||||
|
||||
touch_start(integer num_detected)
|
||||
{
|
||||
list agentsInRegion = llGetAgentList(AGENT_LIST_REGION, []);
|
||||
integer numOfAgents = llGetListLength(agentsInRegion);
|
||||
|
||||
integer index; // defaults to 0
|
||||
for (; index <= numOfAgents - 1; index++) // for each agent in region
|
||||
{
|
||||
llRegionSayTo(llList2Key(agentsInRegion, index), PUBLIC_CHANNEL, "Hello, Avatar!");
|
||||
}
|
||||
}
|
||||
|
||||
touch_end(integer num_detected)
|
||||
{
|
||||
someIntNormal = 3672;
|
||||
someIntHex = 0x00000000;
|
||||
someIntMath = PI_BY_TWO;
|
||||
|
||||
event = 5673;// 'event' is invalid.illegal
|
||||
|
||||
someKeyTexture = TEXTURE_DEFAULT;
|
||||
someStringSpecial = EOF;
|
||||
|
||||
llSetInventoryPermMask("some item", MASK_NEXT, PERM_ALL);// 'llSetInventoryPermMask' is reserved.godmode
|
||||
|
||||
llWhisper(PUBLIC_CHANNEL, "Leaving \"default\" now...");
|
||||
state other;
|
||||
}
|
||||
}
|
||||
|
||||
state other
|
||||
{
|
||||
state_entry()
|
||||
{
|
||||
llWhisper(PUBLIC_CHANNEL, "Entered \"state other\", returning to \"default\" again...");
|
||||
state default;
|
||||
}
|
||||
}
|
||||
38
samples/M4/htmlgen.m4
Normal file
38
samples/M4/htmlgen.m4
Normal file
@@ -0,0 +1,38 @@
|
||||
dnl Took from https://en.wikipedia.org/wiki/M4_(computer_language)
|
||||
divert(-1)
|
||||
|
||||
M4 has multiple output queues that can be manipulated with the
|
||||
`divert' macro. Valid queues range from 0 to 10, inclusive, with
|
||||
the default queue being 0.
|
||||
|
||||
Calling the `divert' macro with an invalid queue causes text to be
|
||||
discarded until another call. Note that even while output is being
|
||||
discarded, quotes around `divert' and other macros are needed to
|
||||
prevent expansion.
|
||||
|
||||
# Macros aren't expanded within comments, meaning that keywords such
|
||||
# as divert and other built-ins may be used without consequence.
|
||||
|
||||
# HTML utility macro:
|
||||
|
||||
define(`H2_COUNT', 0)
|
||||
|
||||
# The H2_COUNT macro is redefined every time the H2 macro is used:
|
||||
|
||||
define(`H2',
|
||||
`define(`H2_COUNT', incr(H2_COUNT))<h2>H2_COUNT. $1</h2>')
|
||||
|
||||
divert(1)dnl
|
||||
dnl
|
||||
dnl The dnl macro causes m4 to discard the rest of the line, thus
|
||||
dnl preventing unwanted blank lines from appearing in the output.
|
||||
dnl
|
||||
H2(First Section)
|
||||
H2(Second Section)
|
||||
H2(Conclusion)
|
||||
dnl
|
||||
divert(0)dnl
|
||||
dnl
|
||||
<HTML>
|
||||
undivert(1)dnl One of the queues is being pushed to output.
|
||||
</HTML>
|
||||
172
samples/M4Sugar/ax_ruby_devel.m4
Normal file
172
samples/M4Sugar/ax_ruby_devel.m4
Normal file
@@ -0,0 +1,172 @@
|
||||
# ===========================================================================
|
||||
# http://www.gnu.org/software/autoconf-archive/ax_ruby_devel.html
|
||||
# ===========================================================================
|
||||
#
|
||||
# SYNOPSIS
|
||||
#
|
||||
# AX_RUBY_DEVEL([version])
|
||||
#
|
||||
# DESCRIPTION
|
||||
#
|
||||
# This macro checks for Ruby and tries to get the include path to
|
||||
# 'ruby.h'. It provides the $(RUBY_CPPFLAGS) and $(RUBY_LDFLAGS) output
|
||||
# variables. It also exports $(RUBY_EXTRA_LIBS) for embedding Ruby in your
|
||||
# code.
|
||||
#
|
||||
# You can search for some particular version of Ruby by passing a
|
||||
# parameter to this macro, for example "1.8.6".
|
||||
#
|
||||
# LICENSE
|
||||
#
|
||||
# Copyright (c) 2008 Rafal Rzepecki <divided.mind@gmail.com>
|
||||
# Copyright (c) 2008 Sebastian Huber <sebastian-huber@web.de>
|
||||
# Copyright (c) 2008 Alan W. Irwin
|
||||
# Copyright (c) 2008 Rafael Laboissiere <rafael@laboissiere.net>
|
||||
# Copyright (c) 2008 Andrew Collier
|
||||
# Copyright (c) 2008 Matteo Settenvini <matteo@member.fsf.org>
|
||||
# Copyright (c) 2008 Horst Knorr <hk_classes@knoda.org>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation, either version 3 of the License, or (at your
|
||||
# option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
||||
# Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
# As a special exception, the respective Autoconf Macro's copyright owner
|
||||
# gives unlimited permission to copy, distribute and modify the configure
|
||||
# scripts that are the output of Autoconf when processing the Macro. You
|
||||
# need not follow the terms of the GNU General Public License when using
|
||||
# or distributing such scripts, even though portions of the text of the
|
||||
# Macro appear in them. The GNU General Public License (GPL) does govern
|
||||
# all other use of the material that constitutes the Autoconf Macro.
|
||||
#
|
||||
# This special exception to the GPL applies to versions of the Autoconf
|
||||
# Macro released by the Autoconf Archive. When you make and distribute a
|
||||
# modified version of the Autoconf Macro, you may extend this special
|
||||
# exception to the GPL to apply to your modified version as well.
|
||||
|
||||
#serial 11
|
||||
|
||||
AC_DEFUN([AX_RUBY_DEVEL],[
|
||||
AC_REQUIRE([AX_WITH_RUBY])
|
||||
AS_IF([test -n "$1"], [AX_PROG_RUBY_VERSION([$1])])
|
||||
|
||||
#
|
||||
# Check if you have mkmf, else fail
|
||||
#
|
||||
AC_MSG_CHECKING([for the mkmf Ruby package])
|
||||
ac_mkmf_result=`$RUBY -rmkmf -e ";" 2>&1`
|
||||
if test -z "$ac_mkmf_result"; then
|
||||
AC_MSG_RESULT([yes])
|
||||
else
|
||||
AC_MSG_RESULT([no])
|
||||
AC_MSG_ERROR([cannot import Ruby module "mkmf".
|
||||
Please check your Ruby installation. The error was:
|
||||
$ac_mkmf_result])
|
||||
fi
|
||||
|
||||
#
|
||||
# Check for Ruby include path
|
||||
#
|
||||
AC_MSG_CHECKING([for Ruby include path])
|
||||
if test -z "$RUBY_CPPFLAGS"; then
|
||||
ruby_path=`$RUBY -rmkmf -e 'print Config::CONFIG[["archdir"]]'`
|
||||
if test -n "${ruby_path}"; then
|
||||
ruby_path="-I$ruby_path"
|
||||
fi
|
||||
RUBY_CPPFLAGS=$ruby_path
|
||||
fi
|
||||
AC_MSG_RESULT([$RUBY_CPPFLAGS])
|
||||
AC_SUBST([RUBY_CPPFLAGS])
|
||||
|
||||
#
|
||||
# Check for Ruby library path
|
||||
#
|
||||
AC_MSG_CHECKING([for Ruby library path])
|
||||
if test -z "$RUBY_LDFLAGS"; then
|
||||
RUBY_LDFLAGS=`$RUBY -rmkmf -e 'print Config::CONFIG[["LIBRUBYARG_SHARED"]]'`
|
||||
fi
|
||||
AC_MSG_RESULT([$RUBY_LDFLAGS])
|
||||
AC_SUBST([RUBY_LDFLAGS])
|
||||
|
||||
#
|
||||
# Check for site packages
|
||||
#
|
||||
AC_MSG_CHECKING([for Ruby site-packages path])
|
||||
if test -z "$RUBY_SITE_PKG"; then
|
||||
RUBY_SITE_PKG=`$RUBY -rmkmf -e 'print Config::CONFIG[["sitearchdir"]]'`
|
||||
fi
|
||||
AC_MSG_RESULT([$RUBY_SITE_PKG])
|
||||
AC_SUBST([RUBY_SITE_PKG])
|
||||
|
||||
#
|
||||
# libraries which must be linked in when embedding
|
||||
#
|
||||
AC_MSG_CHECKING(ruby extra libraries)
|
||||
if test -z "$RUBY_EXTRA_LIBS"; then
|
||||
RUBY_EXTRA_LIBS=`$RUBY -rmkmf -e 'print Config::CONFIG[["SOLIBS"]]'`
|
||||
fi
|
||||
AC_MSG_RESULT([$RUBY_EXTRA_LIBS])
|
||||
AC_SUBST(RUBY_EXTRA_LIBS)
|
||||
|
||||
#
|
||||
# linking flags needed when embedding
|
||||
# (is it even needed for Ruby?)
|
||||
#
|
||||
# AC_MSG_CHECKING(ruby extra linking flags)
|
||||
# if test -z "$RUBY_EXTRA_LDFLAGS"; then
|
||||
# RUBY_EXTRA_LDFLAGS=`$RUBY -rmkmf -e 'print Config::CONFIG[["LINKFORSHARED"]]'`
|
||||
# fi
|
||||
# AC_MSG_RESULT([$RUBY_EXTRA_LDFLAGS])
|
||||
# AC_SUBST(RUBY_EXTRA_LDFLAGS)
|
||||
|
||||
# this flags breaks ruby.h, and is sometimes defined by KDE m4 macros
|
||||
CFLAGS="`echo "$CFLAGS" | sed -e 's/-std=iso9899:1990//g;'`"
|
||||
#
|
||||
# final check to see if everything compiles alright
|
||||
#
|
||||
AC_MSG_CHECKING([consistency of all components of ruby development environment])
|
||||
AC_LANG_PUSH([C])
|
||||
# save current global flags
|
||||
ac_save_LIBS="$LIBS"
|
||||
LIBS="$ac_save_LIBS $RUBY_LDFLAGS"
|
||||
ac_save_CPPFLAGS="$CPPFLAGS"
|
||||
CPPFLAGS="$ac_save_CPPFLAGS $RUBY_CPPFLAGS"
|
||||
AC_TRY_LINK([
|
||||
#include <ruby.h>
|
||||
],[
|
||||
ruby_init();
|
||||
],[rubyexists=yes],[rubyexists=no])
|
||||
|
||||
AC_MSG_RESULT([$rubyexists])
|
||||
|
||||
if test ! "$rubyexists" = "yes"; then
|
||||
AC_MSG_ERROR([
|
||||
Could not link test program to Ruby. Maybe the main Ruby library has been
|
||||
installed in some non-standard library path. If so, pass it to configure,
|
||||
via the LDFLAGS environment variable.
|
||||
Example: ./configure LDFLAGS="-L/usr/non-standard-path/ruby/lib"
|
||||
============================================================================
|
||||
ERROR!
|
||||
You probably have to install the development version of the Ruby package
|
||||
for your distribution. The exact name of this package varies among them.
|
||||
============================================================================
|
||||
])
|
||||
RUBY_VERSION=""
|
||||
fi
|
||||
AC_LANG_POP
|
||||
# turn back to default flags
|
||||
CPPFLAGS="$ac_save_CPPFLAGS"
|
||||
LIBS="$ac_save_LIBS"
|
||||
|
||||
#
|
||||
# all done!
|
||||
#
|
||||
])
|
||||
143
samples/M4Sugar/filenames/configure.ac
Normal file
143
samples/M4Sugar/filenames/configure.ac
Normal file
@@ -0,0 +1,143 @@
|
||||
# -*- Autoconf -*-
|
||||
# Process this file with autoconf to produce a configure script.
|
||||
|
||||
AC_PREREQ(2.68)
|
||||
AC_INIT([GARDEN],
|
||||
[1.0.9],
|
||||
[bubla@users.sourceforge.net])
|
||||
AC_CONFIG_AUX_DIR([build-aux])
|
||||
AM_INIT_AUTOMAKE([-Wall])
|
||||
AC_CONFIG_SRCDIR([src/input.h])
|
||||
AC_CONFIG_HEADERS([src/configure.h])
|
||||
AC_CONFIG_MACRO_DIR([m4])
|
||||
|
||||
AC_ARG_ENABLE([debug],
|
||||
[AS_HELP_STRING([--enable-debug],
|
||||
[Builds the debug version of the library [[default = no]]]) ],
|
||||
[],
|
||||
[enable_debug="no"])
|
||||
|
||||
AS_IF([test "x$enable_debug" = "xyes"],
|
||||
[CFLAGS="${CFLAGS} -g -O0"])
|
||||
|
||||
# Checks for programs.
|
||||
AC_PROG_CC
|
||||
AC_PROG_LIBTOOL
|
||||
LT_PROG_RC
|
||||
|
||||
AC_CANONICAL_HOST
|
||||
|
||||
dnl
|
||||
dnl Check whether it makes sense to install a garden.desktop file
|
||||
dnl
|
||||
AC_CHECK_PROG([have_freedesktop],
|
||||
[update-desktop-database],
|
||||
[yes])
|
||||
|
||||
AM_CONDITIONAL([HAVE_FREEDESKTOP],
|
||||
[test "x$have_freedesktop" = "xyes"])
|
||||
|
||||
AM_CONDITIONAL([WANT_FREEDESKTOP],
|
||||
[test "x$enable_desktop_install" = "xyes"])
|
||||
|
||||
AC_ARG_ENABLE([desktop-install],
|
||||
[AS_HELP_STRING([--enable-desktop-install],
|
||||
[Whether you want to install the garden.desktop file if applicable. !!! DO NOT USE if you are a PACKAGER!!!])]
|
||||
)
|
||||
|
||||
AS_CASE([$host],
|
||||
[*mingw* | *cygwin*],
|
||||
[AC_DEFINE([WINDOWS_VERSION],
|
||||
[1],
|
||||
[Define when building for Windows])
|
||||
windows_version="yes"])
|
||||
|
||||
AM_CONDITIONAL([WINDOWS_VERSION],
|
||||
[test "x$windows_version" = "xyes" ])
|
||||
|
||||
dnl now the datadir specification, that is useful if one does want to play without installing
|
||||
AC_ARG_ENABLE([datadir],
|
||||
[AS_HELP_STRING([--enable-datadir=path-to-your-garden-datafiles],
|
||||
[Normally you dont have to use this, but it is handy when you want to play the game without installing it or if you want to use already installed data. In the first case, use for instance --enable-datadir=`pwd`/data (if it makes sense)])],
|
||||
[DATADIR_NAME="$enableval"],
|
||||
[DATADIR_NAME='$(datadir)/'garden])
|
||||
|
||||
# Sets the data subdirectory
|
||||
AC_SUBST([DATADIR_NAME])
|
||||
|
||||
# Checks for libraries.
|
||||
AC_CHECK_HEADER([allegro.h],
|
||||
[],
|
||||
[have_allegro="no"
|
||||
AC_MSG_ERROR([You don't even have Allegro headers... Get Allegro first!]) ])
|
||||
|
||||
AC_CHECK_LIB([m], [sin])
|
||||
|
||||
test "x$host_os" != "x$build_os" && CROSS_COMPILING="yes"
|
||||
|
||||
try_link_allegro ()
|
||||
{
|
||||
LIBS_SAVE=$LIBS
|
||||
LIBS="$LIBS $1"
|
||||
AC_MSG_CHECKING([for Allegro using $1])
|
||||
AC_LINK_IFELSE([AC_LANG_PROGRAM([AC_INCLUDES_DEFAULT
|
||||
[ #include <allegro.h> ]],
|
||||
[allegro_init();])END_OF_MAIN() ],
|
||||
[have_allegro="yes"],
|
||||
[have_allegro="no"])
|
||||
AC_MSG_RESULT([$have_allegro])
|
||||
LIBS=$LIBS_SAVE
|
||||
AS_IF([test "x$have_allegro" == "xyes"],
|
||||
[return 0],
|
||||
[return 1])
|
||||
}
|
||||
# first check for 'official allegro'
|
||||
# The official allegro does not support cross-compiling, though...
|
||||
AM_PATH_ALLEGRO([4.2.0])
|
||||
|
||||
try_link_allegro "$allegro_LIBS"
|
||||
AS_IF([test "x$?" == "x0"],
|
||||
[[LIBS="$LIBS $allegro_LIBS"
|
||||
ALLEGRO_LIB=`echo $allegro_LIBS | sed -e 's/.*-l\([^[:blank:]]*\).*/\1/'`
|
||||
have_allegro="yes"]])
|
||||
|
||||
ALLEGRO_RELEASE_LIBS="alleg alleg42 alleg44"
|
||||
ALLEGRO_DEBUG_LIBS="alld42 alleg44-debug"
|
||||
AS_IF([test "x$enable_debug" = "xyes"],
|
||||
[ALLEGRO_LIBS="$ALLEGRO_DEBUG_LIBS $ALLEGRO_RELEASE_LIBS"],
|
||||
[ALLEGRO_LIBS="$ALLEGRO_RELEASE_LIBS $ALLEGRO_DEBUG_LIBS"])
|
||||
|
||||
for lib in $ALLEGRO_LIBS
|
||||
do
|
||||
ldflag="-l$lib"
|
||||
AS_IF([test "x$have_allegro" == "xyes"],
|
||||
[break])
|
||||
try_link_allegro $ldflag
|
||||
AS_IF([test "x$?" == "x0"],
|
||||
[LIBS="$LIBS $ldflag"
|
||||
ALLEGRO_LIB="$lib"
|
||||
have_allegro="yes"])
|
||||
done
|
||||
|
||||
AS_IF([test "x$have_allegro" != "xyes"],
|
||||
[AC_MSG_ERROR([Unable to find Allegro game programming library 4.2, check out www.allegro.cc (or your distro repositories if you use a unix-like system)]) ])
|
||||
|
||||
# Checks for header files.
|
||||
AC_CHECK_HEADERS([string.h sys/stat.h])
|
||||
|
||||
# Checks for typedefs, structures, and compiler characteristics.
|
||||
AC_C_INLINE
|
||||
AC_HEADER_STDBOOL
|
||||
|
||||
# Checks for library functions.
|
||||
|
||||
AC_SUBST([ALLEGRO_LIB])
|
||||
|
||||
AC_CONFIG_FILES([Makefile
|
||||
src/Makefile
|
||||
data/Makefile
|
||||
resources/Makefile
|
||||
docs/garden.doxyfile
|
||||
pkgs/w32/winstaller.nsi])
|
||||
|
||||
AC_OUTPUT
|
||||
46
samples/M4Sugar/list.m4
Normal file
46
samples/M4Sugar/list.m4
Normal file
@@ -0,0 +1,46 @@
|
||||
m4_define([m4_list_declare], [m4_do(
|
||||
[m4_define([$1_GET], [m4_expand([m4_list_nth([$1], $][1)])])],
|
||||
[m4_define([$1_FOREACH], [m4_foreach([item], [m4_dquote_elt(m4_list_contents([$1]))], m4_quote($][1))])],
|
||||
)])
|
||||
|
||||
m4_define([m4_list_add], [m4_do(
|
||||
[m4_pushdef([_LIST_NAME], [[_LIST_$1]])],
|
||||
[m4_ifndef(_LIST_NAME,
|
||||
[m4_define(_LIST_NAME, m4_dquote(m4_escape([$2])))],
|
||||
[m4_define(_LIST_NAME, m4_dquote(m4_list_contents([$1]), m4_escape([$2])))],
|
||||
)],
|
||||
[m4_popdef([_LIST_NAME])],
|
||||
)])
|
||||
|
||||
m4_define([m4_list_contents], [m4_do(
|
||||
[m4_pushdef([_LIST_NAME], [[_LIST_$1]])],
|
||||
[m4_ifndef(_LIST_NAME, [], m4_quote(_LIST_NAME))],
|
||||
[m4_popdef([_LIST_NAME])],
|
||||
)])
|
||||
|
||||
m4_define([m4_list_nth], [m4_argn([$2], m4_list_contents([$1]))])
|
||||
|
||||
m4_define([m4_list_pop_front], [m4_do(
|
||||
[m4_pushdef([_LIST_NAME], [[_LIST_$1]])],
|
||||
[m4_car(m4_unquote(_LIST_NAME))],
|
||||
[m4_define(_LIST_NAME, m4_cdr(m4_unquote(_LIST_NAME)))],
|
||||
[m4_popdef([_LIST_NAME])],
|
||||
)])
|
||||
|
||||
m4_define([m4_list_pop_back], [m4_do(
|
||||
[m4_pushdef([_LIST_NAME], [[_LIST_$1]])],
|
||||
[m4_define(_LIST_NAME, m4_dquote(m4_reverse(m4_unquote(_LIST_NAME))))],
|
||||
[m4_list_pop_front([$1])],
|
||||
[m4_define(_LIST_NAME, m4_dquote(m4_reverse(m4_unquote(_LIST_NAME))))],
|
||||
[m4_popdef([_LIST_NAME])],
|
||||
)])
|
||||
|
||||
dnl
|
||||
dnl $1: List name
|
||||
dnl $2: What
|
||||
dnl $3: If contains
|
||||
dnl $4: If not
|
||||
m4_define([m4_list_contains], [m4_do(
|
||||
[m4_foreach([item], m4_list_contents([$1]), m4_if(item, [$2], [[$3]], [[$4]]))]
|
||||
)])
|
||||
|
||||
29
samples/MAXScript/macro-1.mcr
Normal file
29
samples/MAXScript/macro-1.mcr
Normal file
@@ -0,0 +1,29 @@
|
||||
-- Taken from an example from Autodesk's MAXScript reference:
|
||||
-- http://help.autodesk.com/view/3DSMAX/2016/ENU/?guid=__files_GUID_84E24969_C175_4389_B9A6_3B2699B66785_htm
|
||||
|
||||
macroscript MoveToSurface
|
||||
category: "HowTo"
|
||||
(
|
||||
fn g_filter o = superclassof o == Geometryclass
|
||||
fn find_intersection z_node node_to_z = (
|
||||
local testRay = ray node_to_z.pos [0,0,-1]
|
||||
local nodeMaxZ = z_node.max.z
|
||||
testRay.pos.z = nodeMaxZ + 0.0001 * abs nodeMaxZ
|
||||
intersectRay z_node testRay
|
||||
)
|
||||
|
||||
on isEnabled return selection.count > 0
|
||||
|
||||
on Execute do (
|
||||
target_mesh = pickObject message:"Pick Target Surface:" filter:g_filter
|
||||
|
||||
if isValidNode target_mesh then (
|
||||
undo "MoveToSurface" on (
|
||||
for i in selection do (
|
||||
int_point = find_intersection target_mesh i
|
||||
if int_point != undefined then i.pos = int_point.pos
|
||||
)--end i loop
|
||||
)--end undo
|
||||
)--end if
|
||||
)--end execute
|
||||
)--end script
|
||||
53
samples/MAXScript/macro-2.mcr
Normal file
53
samples/MAXScript/macro-2.mcr
Normal file
@@ -0,0 +1,53 @@
|
||||
-- Taken from an example from Autodesk's MAXScript reference:
|
||||
-- http://help.autodesk.com/view/3DSMAX/2016/ENU/?guid=__files_GUID_0876DF46_FAA3_4131_838D_5739A67FF2C1_htm
|
||||
|
||||
macroscript FreeSpline category:"HowTo" tooltip:"FreeSpline" (
|
||||
local old_pos
|
||||
local new_spline
|
||||
local second_knot_set
|
||||
|
||||
fn get_mouse_pos pen_pos old_pen_pos = (
|
||||
if old_pos == undefined then old_pos = old_pen_pos
|
||||
if distance pen_pos old_pos > 10 then
|
||||
(
|
||||
if second_knot_set then
|
||||
addKnot new_spline 1 #smooth #curve pen_pos
|
||||
else
|
||||
(
|
||||
setKnotPoint new_spline 1 2 pen_pos
|
||||
second_knot_set = true
|
||||
)
|
||||
old_pos = pen_pos
|
||||
updateShape new_spline
|
||||
)-- end if
|
||||
)-- end fn
|
||||
|
||||
fn draw_new_line old_pen_pos = (
|
||||
pickPoint mouseMoveCallback:#(get_mouse_pos,old_pen_pos)
|
||||
)
|
||||
|
||||
undo"Free Spline"on(
|
||||
new_spline = splineShape ()
|
||||
old_pen_pos = pickPoint ()
|
||||
|
||||
if old_pen_pos == #RightClick then
|
||||
delete new_spline
|
||||
else
|
||||
(
|
||||
select new_spline
|
||||
new_spline.pos = old_pen_pos
|
||||
addNewSpline new_spline
|
||||
addKnot new_spline 1 #smooth #curve old_pen_pos
|
||||
addKnot new_spline 1 #smooth #curve old_pen_pos
|
||||
second_knot_set = false
|
||||
draw_new_line old_pen_pos
|
||||
q = querybox "Close Spline?" title:"Free Spline"
|
||||
if q then
|
||||
(
|
||||
close new_spline 1
|
||||
updateshape new_spline
|
||||
)
|
||||
select new_spline
|
||||
)--end else
|
||||
)--end undo
|
||||
)--end script
|
||||
64
samples/MAXScript/svg-renderer.ms
Normal file
64
samples/MAXScript/svg-renderer.ms
Normal file
@@ -0,0 +1,64 @@
|
||||
-- Taken from a 3-part tutorial from Autodesk's MAXScript reference
|
||||
-- Source: http://help.autodesk.com/view/3DSMAX/2016/ENU/?guid=__files_GUID_6B5EDC11_A154_4AA7_A972_A11AC36949E9_htm
|
||||
|
||||
fn ColourToHex col = (
|
||||
local theComponents = #(bit.intAsHex col.r, bit.intAsHex col.g, bit.intAsHex col.b)
|
||||
local theValue = "#"
|
||||
for i in theComponents do
|
||||
theValue += (if i.count == 1 then "0" else "") + i
|
||||
theValue
|
||||
)
|
||||
|
||||
local st = timestamp()
|
||||
local theFileName = (getDir #userscripts + "\\PolygonRendering3.svg")
|
||||
local theSVGfile = createFile theFileName
|
||||
format "<svg xmlns=\"http://www.w3.org/2000/svg\"\n" to:theSVGfile
|
||||
format "\t\txmlns:xlink=\"http://www.w3.org/1999/xlink\">\n" to:theSVGfile
|
||||
|
||||
local theViewTM = viewport.getTM()
|
||||
theViewTM.row4 = [0,0,0]
|
||||
|
||||
local theViewTM2 = viewport.getTM()
|
||||
local theViewSize = getViewSize()
|
||||
local theViewScale = getViewSize()
|
||||
theViewScale.x /= 1024.0
|
||||
theViewScale.y /= 1024.0
|
||||
|
||||
local theStrokeThickness = 3
|
||||
|
||||
gw.setTransform (matrix3 1)
|
||||
for o in Geometry where not o.isHiddenInVpt and classof o != TargetObject do (
|
||||
local theStrokeColour = white
|
||||
local theFillColour = o.wirecolor
|
||||
|
||||
local theMesh = snapshotAsMesh o
|
||||
for f = 1 to theMesh.numfaces do (
|
||||
local theNormal = normalize (getFaceNormal theMesh f)
|
||||
|
||||
if (theNormal*theViewTM).z > 0 do
|
||||
(
|
||||
local theFace = getFace theMesh f
|
||||
local v1 = gw.transPoint (getVert theMesh theFace.x)
|
||||
local v2 = gw.transPoint (getVert theMesh theFace.y)
|
||||
local v3 = gw.transPoint (getVert theMesh theFace.z)
|
||||
|
||||
v1.x /= theViewScale.x
|
||||
v1.y /= theViewScale.y
|
||||
v2.x /= theViewScale.x
|
||||
v2.y /= theViewScale.y
|
||||
v3.x /= theViewScale.x
|
||||
v3.y /= theViewScale.y
|
||||
|
||||
format "\t<polygon points='%,% %,% %,%' \n" v1.x v1.y v2.x v2.y v3.x v3.y to:theSVGfile
|
||||
format "\tstyle='stroke:%; fill:%; stroke-width:%'/>\n" (ColourToHex theStrokeColour) (ColourToHex theFillColour) theStrokeThickness to:theSVGfile
|
||||
)--end if normal positive
|
||||
)--end f loop
|
||||
)--end o loop
|
||||
|
||||
format "</svg>\n" to:theSVGfile
|
||||
close theSVGfile
|
||||
local theSVGMap = VectorMap vectorFile:theFileName alphasource:0
|
||||
local theBitmap = bitmap theViewSize.x theViewSize.y
|
||||
renderMap theSVGMap into:theBitmap filter:true
|
||||
display theBitmap
|
||||
format "Render Time: % sec.\n" ((timestamp()-st)/1000.0)
|
||||
22
samples/MAXScript/volume-calc.ms
Normal file
22
samples/MAXScript/volume-calc.ms
Normal file
@@ -0,0 +1,22 @@
|
||||
fn CalculateVolumeAndCentreOfMass obj =
|
||||
(
|
||||
local Volume= 0.0
|
||||
local Centre= [0.0, 0.0, 0.0]
|
||||
local theMesh = snapshotasmesh obj
|
||||
local numFaces = theMesh.numfaces
|
||||
for i = 1 to numFaces do
|
||||
(
|
||||
local Face= getFace theMesh i
|
||||
local vert2 = getVert theMesh Face.z
|
||||
local vert1 = getVert theMesh Face.y
|
||||
local vert0 = getVert theMesh Face.x
|
||||
local dV = Dot (Cross (vert1 - vert0) (vert2 - vert0)) vert0
|
||||
Volume+= dV
|
||||
Centre+= (vert0 + vert1 + vert2) * dV
|
||||
)
|
||||
delete theMesh
|
||||
Volume /= 6
|
||||
Centre /= 24
|
||||
Centre /= Volume
|
||||
#(Volume,Centre)
|
||||
)
|
||||
31
samples/Makefile/filenames/Makefile.inc
Normal file
31
samples/Makefile/filenames/Makefile.inc
Normal file
@@ -0,0 +1,31 @@
|
||||
# $OpenBSD: Makefile.inc,v 1.2 2003/11/14 20:09:20 drahn Exp $
|
||||
# $NetBSD: Makefile.inc,v 1.1 1996/09/30 16:34:59 ws Exp $
|
||||
|
||||
.if !defined(__stand_makefile_inc)
|
||||
__stand_makefile_inc=1
|
||||
|
||||
KERN_AS= library
|
||||
|
||||
S=$(.CURDIR)/../../../$(R)
|
||||
|
||||
.if !make(libdep) && !make(sadep) && !make(salibdir) && !make(kernlibdir) && !make(obj) && !defined(NOMACHINE)
|
||||
.BEGIN:
|
||||
@([ -h machine ] || ln -s $(S)/arch/$(MACHINE)/include machine)
|
||||
.endif
|
||||
|
||||
#
|
||||
EXTRACFLAGS= -msoft-float
|
||||
REAL_VIRT?= -v
|
||||
ENTRY?= _start
|
||||
|
||||
INCLUDES+= -I. -I$(.OBJDIR) -I$(.CURDIR)/.. -I$(S)/arch -I$(S)
|
||||
INCLUDES+= -I$(S)/lib/libsa
|
||||
DEFS+= -DSTANDALONE
|
||||
CFLAGS+= $(INCLUDES) $(DEFS) $(EXTRACFLAGS)
|
||||
CFLAGS+= -fno-stack-protector
|
||||
LDFLAGS?= -X -N -Ttext $(RELOC) -e $(ENTRY)
|
||||
|
||||
cleandir:
|
||||
rm -rf lib machine
|
||||
|
||||
.endif
|
||||
1
samples/Mathematica/TestArithmetic.mt
Normal file
1
samples/Mathematica/TestArithmetic.mt
Normal file
@@ -0,0 +1 @@
|
||||
Test[1 + 2, 3, TestID -> "One plus two"]
|
||||
1
samples/Mathematica/TestString.mt
Normal file
1
samples/Mathematica/TestString.mt
Normal file
@@ -0,0 +1 @@
|
||||
Test["a" <> "b", "ab", TestID -> "Concat \"a\" and \"b\""]
|
||||
5
samples/Mathematica/TestSuite.mt
Normal file
5
samples/Mathematica/TestSuite.mt
Normal file
@@ -0,0 +1,5 @@
|
||||
TestSuite[
|
||||
{ "TestArithmetic.mt"
|
||||
, "TestString.mt"
|
||||
}
|
||||
]
|
||||
694
samples/MediaWiki/README.wiki
Normal file
694
samples/MediaWiki/README.wiki
Normal file
@@ -0,0 +1,694 @@
|
||||
= Name =
|
||||
|
||||
'''nginx_tcp_proxy_module''' - support TCP proxy with Nginx
|
||||
|
||||
= Installation =
|
||||
|
||||
Download the latest stable version of the release tarball of this module from [http://github.com/yaoweibin/nginx_tcp_proxy_module github]
|
||||
|
||||
Grab the nginx source code from [http://nginx.org/ nginx.org], for example, the version 1.2.1 (see nginx compatibility), and then build the source with this module:
|
||||
|
||||
<geshi lang="bash">
|
||||
$ wget 'http://nginx.org/download/nginx-1.2.1.tar.gz'
|
||||
$ tar -xzvf nginx-1.2.1.tar.gz
|
||||
$ cd nginx-1.2.1/
|
||||
$ patch -p1 < /path/to/nginx_tcp_proxy_module/tcp.patch
|
||||
|
||||
$ ./configure --add-module=/path/to/nginx_tcp_proxy_module
|
||||
|
||||
$ make
|
||||
$ make install
|
||||
</geshi>
|
||||
|
||||
|
||||
= Synopsis =
|
||||
|
||||
<geshi lang="nginx">
|
||||
http {
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
|
||||
location /status {
|
||||
tcp_check_status;
|
||||
}
|
||||
}
|
||||
}
|
||||
</geshi>
|
||||
|
||||
<geshi lang="nginx">
|
||||
|
||||
#You can also include tcp_proxy.conf file individually
|
||||
|
||||
#include /path/to/tcp_proxy.conf;
|
||||
|
||||
tcp {
|
||||
|
||||
upstream cluster {
|
||||
# simple round-robin
|
||||
server 192.168.0.1:80;
|
||||
server 192.168.0.2:80;
|
||||
|
||||
check interval=3000 rise=2 fall=5 timeout=1000;
|
||||
|
||||
#check interval=3000 rise=2 fall=5 timeout=1000 type=ssl_hello;
|
||||
|
||||
#check interval=3000 rise=2 fall=5 timeout=1000 type=http;
|
||||
#check_http_send "GET / HTTP/1.0\r\n\r\n";
|
||||
#check_http_expect_alive http_2xx http_3xx;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 8888;
|
||||
|
||||
proxy_pass cluster;
|
||||
}
|
||||
}
|
||||
</geshi>
|
||||
|
||||
= Description =
|
||||
|
||||
This module actually include many modules: ngx_tcp_module, ngx_tcp_core_module, ngx_tcp_upstream_module, ngx_tcp_proxy_module, ngx_tcp_websocket_module, ngx_tcp_ssl_module, ngx_tcp_upstream_ip_hash_module. All these modules work together to support TCP proxy with Nginx. I also added other features: ip_hash, upstream server health check, status monitor.
|
||||
|
||||
The motivation of writing these modules is Nginx's high performance and robustness. At first, I developed this module just for general TCP proxy. And now, this module is frequently used in websocket reverse proxying.
|
||||
|
||||
Note, You can't use the same listening port with HTTP modules.
|
||||
|
||||
= Directives =
|
||||
|
||||
== ngx_tcp_moodule ==
|
||||
|
||||
=== tcp ===
|
||||
|
||||
'''syntax:''' ''tcp {...}''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''main''
|
||||
|
||||
'''description:''' All the tcp related directives are contained in the tcp block.
|
||||
|
||||
|
||||
'''ngx_tcp_core_moodule'''
|
||||
|
||||
=== server ===
|
||||
|
||||
'''syntax:''' ''server {...}''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''tcp''
|
||||
|
||||
'''description:''' All the specific server directives are contained in the server block.
|
||||
|
||||
=== listen ===
|
||||
|
||||
'''syntax:''' ''listen address:port [ bind | ssl | default]''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''server''
|
||||
|
||||
'''description:''' The same as [http://wiki.nginx.org/NginxMailCoreModule#listen listen]. The parameter of default means the default server if you have several server blocks with the same port.
|
||||
|
||||
=== access_log ===
|
||||
|
||||
'''syntax:''' ''access_log path [buffer=size] | off''
|
||||
|
||||
'''default:''' ''access_log logs/tcp_access.log''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' Set the access.log. Each record's format is like this:
|
||||
|
||||
<pre>
|
||||
|
||||
log_time worker_process_pid client_ip host_ip accept_time upstream_ip bytes_read bytes_write
|
||||
|
||||
2011/08/02 06:19:07 [5972] 127.0.0.1 0.0.0.0:1982 2011/08/02 06:18:19 172.19.0.129:80 80 236305
|
||||
|
||||
</pre>
|
||||
|
||||
* ''log_time'': The current time when writing this log. The log action is called when the proxy session is closed.
|
||||
* ''worker_process_pid'': the pid of worker process
|
||||
* ''client_ip'': the client ip
|
||||
* ''host_ip'': the server ip and port
|
||||
* ''accept_time'': the time when the server accepts client's connection
|
||||
* ''upstream_ip'': the upstream server's ip
|
||||
* ''bytes_read'': the bytes read from client
|
||||
* ''bytes_write'': the bytes written to client
|
||||
|
||||
=== allow ===
|
||||
|
||||
'''syntax:''' ''allow [ address | CIDR | all ]''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''server''
|
||||
|
||||
'''description:''' Directive grants access for the network or addresses indicated.
|
||||
|
||||
=== deny ===
|
||||
|
||||
'''syntax:''' ''deny [ address | CIDR | all ]''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''server''
|
||||
|
||||
'''description:''' Directive grants access for the network or addresses indicated.
|
||||
|
||||
=== so_keepalive ===
|
||||
|
||||
'''syntax:''' ''so_keepalive on|off''
|
||||
|
||||
'''default:''' ''off''
|
||||
|
||||
'''context:''' ''main, server''
|
||||
|
||||
'''description:''' The same as [http://wiki.nginx.org/NginxMailCoreModule#so_keepalive so_keepalive].
|
||||
|
||||
=== tcp_nodelay ===
|
||||
|
||||
'''syntax:''' ''tcp_nodelay on|off''
|
||||
|
||||
'''default:''' ''on''
|
||||
|
||||
'''context:''' ''main, server''
|
||||
|
||||
'''description:''' The same as [http://wiki.nginx.org/NginxHttpCoreModule#tcp_nodelay tcp_nodelay].
|
||||
|
||||
=== timeout ===
|
||||
|
||||
'''syntax:''' ''timeout milliseconds''
|
||||
|
||||
'''default:''' ''60000''
|
||||
|
||||
'''context:''' ''main, server''
|
||||
|
||||
'''description:''' set the timeout value with clients.
|
||||
|
||||
=== server_name ===
|
||||
|
||||
'''syntax:''' ''server_name name''
|
||||
|
||||
'''default:''' ''The name of the host, obtained through gethostname()''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' The same as [http://wiki.nginx.org/NginxMailCoreModule#server_name server_name]. You can specify several server name in different server block with the same port. They can be used in websocket module.
|
||||
|
||||
=== resolver ===
|
||||
|
||||
'''syntax:''' ''resolver address''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' DNS server
|
||||
|
||||
=== resolver_timeout ===
|
||||
|
||||
'''syntax:''' ''resolver_timeout time''
|
||||
|
||||
'''default:''' ''30s''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' Resolver timeout in seconds.
|
||||
|
||||
|
||||
== ngx_tcp_upstream_module ==
|
||||
|
||||
=== upstream ===
|
||||
|
||||
'''syntax:''' ''upstream {...}''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''tcp''
|
||||
|
||||
'''description:''' All the upstream directives are contained in this block. The upstream server will be dispatched with round robin by default.
|
||||
|
||||
=== server ===
|
||||
|
||||
'''syntax:''' ''server name [parameters]''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''upstream''
|
||||
|
||||
'''description:''' Most of the parameters are the same as [http://wiki.nginx.org/NginxHttpUpstreamModule#server server]. Default port is 80.
|
||||
|
||||
=== check ===
|
||||
|
||||
'''syntax:''' ''check interval=milliseconds [fall=count] [rise=count] [timeout=milliseconds] [type=tcp|ssl_hello|smtp|mysql|pop3|imap]''
|
||||
|
||||
'''default:''' ''none, if parameters omitted, default parameters are interval=30000 fall=5 rise=2 timeout=1000''
|
||||
|
||||
'''context:''' ''upstream''
|
||||
|
||||
'''description:''' Add the health check for the upstream servers. At present, the check method is a simple tcp connect.
|
||||
|
||||
The parameters' meanings are:
|
||||
|
||||
* ''interval'': the check request's interval time.
|
||||
* ''fall''(fall_count): After fall_count check failures, the server is marked down.
|
||||
* ''rise''(rise_count): After rise_count check success, the server is marked up.
|
||||
* ''timeout'': the check request's timeout.
|
||||
* ''type'': the check protocol type:
|
||||
# ''tcp'' is a simple tcp socket connect and peek one byte.
|
||||
# ''ssl_hello'' sends a client ssl hello packet and receives the server ssl hello packet.
|
||||
# ''http'' sends a http request packet, receives and parses the http response to diagnose if the upstream server is alive.
|
||||
# ''smtp'' sends a smtp request packet, receives and parses the smtp response to diagnose if the upstream server is alive. The response begins with '2' should be an OK response.
|
||||
# ''mysql'' connects to the mysql server, receives the greeting response to diagnose if the upstream server is alive.
|
||||
# ''pop3'' receives and parses the pop3 response to diagnose if the upstream server is alive. The response begins with '+' should be an OK response.
|
||||
# ''imap'' connects to the imap server, receives the greeting response to diagnose if the upstream server is alive.
|
||||
|
||||
=== check_http_send ===
|
||||
|
||||
'''syntax:''' ''check_http_send http_packet''
|
||||
|
||||
'''default:''' ''"GET / HTTP/1.0\r\n\r\n"''
|
||||
|
||||
'''context:''' ''upstream''
|
||||
|
||||
'''description:''' If you set the check type is http, then the check function will sends this http packet to check the upstream server.
|
||||
|
||||
=== check_http_expect_alive ===
|
||||
|
||||
'''syntax:''' ''check_http_expect_alive [ http_2xx | http_3xx | http_4xx | http_5xx ]''
|
||||
|
||||
'''default:''' ''http_2xx | http_3xx''
|
||||
|
||||
'''context:''' ''upstream''
|
||||
|
||||
'''description:''' These status codes indicate the upstream server's http response is OK, the backend is alive.
|
||||
|
||||
=== check_smtp_send ===
|
||||
|
||||
'''syntax:''' ''check_smtp_send smtp_packet''
|
||||
|
||||
'''default:''' ''"HELO smtp.localdomain\r\n"''
|
||||
|
||||
'''context:''' ''upstream''
|
||||
|
||||
'''description:''' If you set the check type is smtp, then the check function will sends this smtp packet to check the upstream server.
|
||||
|
||||
=== check_smtp_expect_alive ===
|
||||
|
||||
'''syntax:''' ''check_smtp_expect_alive [smtp_2xx | smtp_3xx | smtp_4xx | smtp_5xx]''
|
||||
|
||||
'''default:''' ''smtp_2xx''
|
||||
|
||||
'''context:''' ''upstream''
|
||||
|
||||
'''description:''' These status codes indicate the upstream server's smtp response is OK, the backend is alive.
|
||||
|
||||
=== check_shm_size ===
|
||||
|
||||
'''syntax:''' ''check_shm_size size''
|
||||
|
||||
'''default:''' ''(number_of_checked_upstream_blocks + 1) * pagesize''
|
||||
|
||||
'''context:''' ''tcp''
|
||||
|
||||
'''description:''' If you store hundreds of servers in one upstream block, the shared memory for health check may be not enough, you can enlarged it by this directive.
|
||||
|
||||
=== tcp_check_status ===
|
||||
|
||||
'''syntax:''' ''tcp_check_status''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''location''
|
||||
|
||||
'''description:''' Display the health checking servers' status by HTTP. This directive is set in the http block.
|
||||
|
||||
The table field meanings are:
|
||||
|
||||
* ''Index'': The server index in the check table
|
||||
* ''Name'' : The upstream server name
|
||||
* ''Status'': The marked status of the server.
|
||||
* ''Busyness'': The number of connections which are connecting to the server.
|
||||
* ''Rise counts'': Count the successful checking
|
||||
* ''Fall counts'': Count the unsuccessful checking
|
||||
* ''Access counts'': Count the times accessing to this server
|
||||
* ''Check type'': The type of the check packet
|
||||
|
||||
|
||||
'''ngx_tcp_upstream_busyness_module'''
|
||||
|
||||
=== busyness ===
|
||||
|
||||
'''syntax:''' ''busyness''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''upstream''
|
||||
|
||||
'''description:''' the upstream server will be dispatched by backend servers' busyness.
|
||||
|
||||
|
||||
'''ngx_tcp_upstream_ip_hash_module'''
|
||||
|
||||
=== ip_hash ===
|
||||
|
||||
'''syntax:''' ''ip_hash''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''upstream''
|
||||
|
||||
'''description:''' the upstream server will be dispatched by ip_hash.
|
||||
|
||||
|
||||
== ngx_tcp_proxy_module ==
|
||||
|
||||
=== proxy_pass ===
|
||||
|
||||
'''syntax:''' ''proxy_pass host:port''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''server''
|
||||
|
||||
'''description:''' proxy the request to the backend server. Default port is 80.
|
||||
|
||||
=== proxy_buffer ===
|
||||
|
||||
'''syntax:''' ''proxy_buffer size''
|
||||
|
||||
'''default:''' ''4k''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' set the size of proxy buffer.
|
||||
|
||||
=== proxy_connect_timeout ===
|
||||
|
||||
'''syntax:''' ''proxy_connect_timeout miliseconds''
|
||||
|
||||
'''default:''' ''60000''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' set the timeout value of connection to backends.
|
||||
|
||||
=== proxy_read_timeout ===
|
||||
|
||||
'''syntax:''' ''proxy_read_timeout miliseconds''
|
||||
|
||||
'''default:''' ''60000''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' set the timeout value of reading from backends.
|
||||
|
||||
=== proxy_send_timeout ===
|
||||
|
||||
'''syntax:''' ''proxy_send_timeout miliseconds''
|
||||
|
||||
'''default:''' ''60000''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' set the timeout value of sending to backends.
|
||||
|
||||
|
||||
== ngx_tcp_websocket_module ==
|
||||
|
||||
=== websocket_pass ===
|
||||
|
||||
'''syntax:''' ''websocket_pass [path] host:port''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''server''
|
||||
|
||||
'''description:''' proxy the websocket request to the backend server. Default port is 80. You can specify several different paths in the same server block.
|
||||
|
||||
=== websocket_buffer ===
|
||||
|
||||
'''syntax:''' ''websocket_buffer size''
|
||||
|
||||
'''default:''' ''4k''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' set the size of proxy buffer.
|
||||
|
||||
=== websocket_connect_timeout ===
|
||||
|
||||
'''syntax:''' ''websocket_connect_timeout miliseconds''
|
||||
|
||||
'''default:''' ''60000''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' set the timeout value of connection to backends.
|
||||
|
||||
=== websocket_read_timeout ===
|
||||
|
||||
'''syntax:''' ''websocket_read_timeout miliseconds''
|
||||
|
||||
'''default:''' ''60000''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' set the timeout value of reading from backends. Your timeout will be the minimum of this and the *timeout* parameter, so if you want a long timeout for your websockets, make sure to set both paramaters.
|
||||
|
||||
=== websocket_send_timeout ===
|
||||
|
||||
'''syntax:''' ''websocket_send_timeout miliseconds''
|
||||
|
||||
'''default:''' ''60000''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
'''description:''' set the timeout value of sending to backends.
|
||||
|
||||
|
||||
== ngx_tcp_ssl_module ==
|
||||
|
||||
The default config file includes this ngx_tcp_ssl_module. If you want to just compile nginx without ngx_tcp_ssl_module, copy the ngx_tcp_proxy_module/config_without_ssl to ngx_tcp_proxy_module/config, reconfigrure and compile nginx.
|
||||
|
||||
=== ssl ===
|
||||
|
||||
'''syntax:''' ''ssl [on|off] ''
|
||||
|
||||
'''default:''' ''ssl off''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
Enables SSL for a server.
|
||||
|
||||
=== ssl_certificate ===
|
||||
|
||||
'''syntax:''' ''ssl_certificate file''
|
||||
|
||||
'''default:''' ''ssl_certificate cert.pem''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
This directive specifies the file containing the certificate, in PEM format. This file can contain also other certificates and the server private key.
|
||||
|
||||
=== ssl_certificate_key ===
|
||||
|
||||
'''syntax:''' ''ssl_certificate_key file''
|
||||
|
||||
'''default:''' ''ssl_certificate_key cert.pem''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
This directive specifies the file containing the private key, in PEM format.
|
||||
|
||||
=== ssl_client_certificate ===
|
||||
|
||||
'''syntax:''' ''ssl_client_certificate file''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
This directive specifies the file containing the CA (root) certificate, in PEM format, that is used for validating client certificates.
|
||||
|
||||
=== ssl_dhparam ===
|
||||
|
||||
'''syntax:''' ''ssl_dhparam file''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
This directive specifies a file containing Diffie-Hellman key agreement protocol cryptographic parameters, in PEM format, utilized for exchanging session keys between server and client.
|
||||
|
||||
=== ssl_ciphers ===
|
||||
|
||||
'''syntax:''' ''ssl_ciphers openssl_cipherlist_spec''
|
||||
|
||||
'''default:''' ''ssl_ciphers HIGH:!aNULL:!MD5''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
This directive describes the list of cipher suites the server supports for establishing a secure connection. Cipher suites are specified in the [http://openssl.org/docs/apps/ciphers.html OpenSSL] cipherlist format, for example:
|
||||
|
||||
<geshi lang="nginx">
|
||||
ssl_ciphers ALL:!ADH:!EXPORT56:RC4+RSA:+HIGH:+MEDIUM:+LOW:+SSLv2:+EXP;
|
||||
</geshi>
|
||||
|
||||
The complete cipherlist supported by the currently installed version of OpenSSL in your platform can be obtained by issuing the command:
|
||||
<pre>
|
||||
openssl ciphers
|
||||
</pre>
|
||||
|
||||
=== ssl_crl ===
|
||||
|
||||
'''syntax:''' ''ssl_crl file''
|
||||
|
||||
'''default:''' ''none''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
This directive specifies the filename of a Certificate Revocation List, in PEM format, which is used to check the revocation status of certificates.
|
||||
|
||||
=== ssl_prefer_server_ciphers ===
|
||||
|
||||
'''syntax:''' ''ssl_prefer_server_ciphers [on|off] ''
|
||||
|
||||
'''default:''' ''ssl_prefer_server_ciphers off''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
The server requires that the cipher suite list for protocols SSLv3 and TLSv1 are to be preferred over the client supported cipher suite list.
|
||||
|
||||
=== ssl_protocols ===
|
||||
|
||||
'''syntax:''' ''ssl_protocols [SSLv2] [SSLv3] [TLSv1] [TLSv1.1] [TLSv1.2]''
|
||||
|
||||
'''default:''' ''ssl_protocols SSLv3 TLSv1 TLSv1.1 TLSv1.2''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
This directive enables the protocol versions specified.
|
||||
|
||||
=== ssl_verify_client ===
|
||||
|
||||
'''syntax:''' ''ssl_verify_client on|off|optional''
|
||||
|
||||
'''default:''' ''ssl_verify_client off''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
This directive enables the verification of the client identity. Parameter 'optional' checks the client identity using its certificate in case it was made available to the server.
|
||||
|
||||
=== ssl_verify_depth ===
|
||||
|
||||
'''syntax:''' ''ssl_verify_depth number''
|
||||
|
||||
'''default:''' ''ssl_verify_depth 1''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
This directive sets how deep the server should go in the client provided certificate chain in order to verify the client identity.
|
||||
|
||||
=== ssl_session_cache ===
|
||||
|
||||
'''syntax:''' ''ssl_session_cache off|none|builtin:size and/or shared:name:size''
|
||||
|
||||
'''default:''' ''ssl_session_cache off''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
The directive sets the types and sizes of caches to store the SSL sessions.
|
||||
|
||||
The cache types are:
|
||||
|
||||
* off -- Hard off: nginx says explicitly to a client that sessions can not reused.
|
||||
* none -- Soft off: nginx says to a client that session can be resued, but nginx actually never reuses them. This is workaround for some mail clients as ssl_session_cache may be used in mail proxy as well as in HTTP server.
|
||||
* builtin -- the OpenSSL builtin cache, is used inside one worker process only. The cache size is assigned in the number of the sessions. Note: there appears to be a memory fragmentation issue using this method, please take that into consideration when using this. See "References" below.
|
||||
* shared -- the cache is shared between all worker processes. The size of the cache is assigned in bytes: 1 MB cache can contain roughly 4000 sessions. Each shared cache must be given an arbitrary name. A shared cache with a given name can be used in several virtual hosts.
|
||||
It's possible to use both types of cache — builtin and shared — simultaneously, for example:
|
||||
|
||||
<geshi lang="nginx">
|
||||
ssl_session_cache builtin:1000 shared:SSL:10m;
|
||||
</geshi>
|
||||
|
||||
Bear in mind however, that using only shared cache, i.e., without builtin, should be more effective.
|
||||
|
||||
=== ssl_session_timeout ===
|
||||
|
||||
'''syntax:''' ''ssl_session_timeout time''
|
||||
|
||||
'''default:''' ''ssl_session_timeout 5m''
|
||||
|
||||
'''context:''' ''tcp, server''
|
||||
|
||||
This directive defines the maximum time during which the client can re-use the previously negotiated cryptographic parameters of the secure session that is stored in the SSL cache.
|
||||
|
||||
= Compatibility =
|
||||
|
||||
* My test bed is 0.7.65+
|
||||
|
||||
= Notes =
|
||||
|
||||
The http_response_parse.rl and smtp_response_parse.rl are [http://www.complang.org/ragel/ ragel] scripts , you can edit the script and compile it like this:
|
||||
|
||||
<geshi lang="bash">
|
||||
$ ragel -G2 http_response_parse.rl
|
||||
$ ragel -G2 smtp_response_parse.rl
|
||||
</geshi>
|
||||
|
||||
= TODO =
|
||||
|
||||
* refact this module, make it more extendable for adding third-party modules
|
||||
* manipulate header like http module's proxy_set_header
|
||||
* built-in variable support
|
||||
* custom log format
|
||||
* syslog support
|
||||
* FTP/IRC proxying
|
||||
|
||||
= Known Issues =
|
||||
|
||||
* This module can't use the same listening port with the HTTP module.
|
||||
|
||||
= Changelogs =
|
||||
|
||||
== v0.2.0 ==
|
||||
|
||||
* add ssl proxy module
|
||||
* add websocket proxy module
|
||||
* add upstream busyness module
|
||||
* add tcp access log module
|
||||
|
||||
== v0.19 ==
|
||||
|
||||
* add many check methods
|
||||
|
||||
== v0.1 ==
|
||||
|
||||
* first release
|
||||
|
||||
= Authors =
|
||||
|
||||
Weibin Yao(姚伟斌) ''yaoweibin at gmail dot com''
|
||||
|
||||
= Copyright & License =
|
||||
|
||||
This README template copy from [http://github.com/agentzh agentzh].
|
||||
|
||||
I borrowed a lot of code from upstream and mail module from the nginx 0.7.* core. This part of code is copyrighted by Igor Sysoev. And the health check part is borrowed the design of Jack Lindamood's healthcheck module [http://github.com/cep21/healthcheck_nginx_upstreams healthcheck_nginx_upstreams];
|
||||
|
||||
This module is licensed under the BSD license.
|
||||
|
||||
Copyright (C) 2013 by Weibin Yao <yaoweibin@gmail.com>.
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
99
samples/Metal/ITMVisualisationEngine.metal
Normal file
99
samples/Metal/ITMVisualisationEngine.metal
Normal file
@@ -0,0 +1,99 @@
|
||||
// Copyright 2014 Isis Innovation Limited and the authors of InfiniTAM
|
||||
|
||||
#include <metal_stdlib>
|
||||
|
||||
#include "../../DeviceAgnostic/ITMSceneReconstructionEngine.h"
|
||||
#include "../../DeviceAgnostic/ITMVisualisationEngine.h"
|
||||
#include "ITMVisualisationEngine_Metal.h"
|
||||
|
||||
using namespace metal;
|
||||
|
||||
kernel void genericRaycastVH_device(DEVICEPTR(Vector4f) *pointsRay [[ buffer(0) ]],
|
||||
const CONSTPTR(ITMVoxel) *voxelData [[ buffer(1) ]],
|
||||
const CONSTPTR(typename ITMVoxelIndex::IndexData) *voxelIndex [[ buffer(2) ]],
|
||||
const CONSTPTR(Vector2f) *minmaxdata [[ buffer(3) ]],
|
||||
const CONSTPTR(CreateICPMaps_Params) *params [[ buffer(4) ]],
|
||||
uint2 threadIdx [[ thread_position_in_threadgroup ]],
|
||||
uint2 blockIdx [[ threadgroup_position_in_grid ]],
|
||||
uint2 blockDim [[ threads_per_threadgroup ]])
|
||||
{
|
||||
int x = threadIdx.x + blockIdx.x * blockDim.x, y = threadIdx.y + blockIdx.y * blockDim.y;
|
||||
|
||||
if (x >= params->imgSize.x || y >= params->imgSize.y) return;
|
||||
|
||||
int locId = x + y * params->imgSize.x;
|
||||
int locId2 = (int)floor((float)x / minmaximg_subsample) + (int)floor((float)y / minmaximg_subsample) * params->imgSize.x;
|
||||
|
||||
castRay<ITMVoxel, ITMVoxelIndex>(pointsRay[locId], x, y, voxelData, voxelIndex, params->invM, params->invProjParams,
|
||||
params->voxelSizes.y, params->lightSource.w, minmaxdata[locId2]);
|
||||
}
|
||||
|
||||
kernel void genericRaycastVGMissingPoints_device(DEVICEPTR(Vector4f) *forwardProjection [[ buffer(0) ]],
|
||||
const CONSTPTR(int) *fwdProjMissingPoints [[ buffer(1) ]],
|
||||
const CONSTPTR(ITMVoxel) *voxelData [[ buffer(2) ]],
|
||||
const CONSTPTR(typename ITMVoxelIndex::IndexData) *voxelIndex [[ buffer(3) ]],
|
||||
const CONSTPTR(Vector2f) *minmaxdata [[ buffer(4) ]],
|
||||
const CONSTPTR(CreateICPMaps_Params) *params [[ buffer(5) ]],
|
||||
uint2 threadIdx [[ thread_position_in_threadgroup ]],
|
||||
uint2 blockIdx [[ threadgroup_position_in_grid ]],
|
||||
uint2 blockDim [[ threads_per_threadgroup ]])
|
||||
{
|
||||
int pointId = threadIdx.x + blockIdx.x * blockDim.x;
|
||||
|
||||
if (pointId >= params->imgSize.z) return;
|
||||
|
||||
int locId = fwdProjMissingPoints[pointId];
|
||||
int y = locId / params->imgSize.x, x = locId - y * params->imgSize.x;
|
||||
int locId2 = (int)floor((float)x / minmaximg_subsample) + (int)floor((float)y / minmaximg_subsample) * params->imgSize.x;
|
||||
|
||||
castRay<ITMVoxel, ITMVoxelIndex>(forwardProjection[locId], x, y, voxelData, voxelIndex, params->invM, params->invProjParams,
|
||||
params->voxelSizes.y, params->lightSource.w, minmaxdata[locId2]);
|
||||
}
|
||||
|
||||
kernel void renderICP_device(const CONSTPTR(Vector4f) *pointsRay [[ buffer(0) ]],
|
||||
DEVICEPTR(Vector4f) *pointsMap [[ buffer(1) ]],
|
||||
DEVICEPTR(Vector4f) *normalsMap [[ buffer(2) ]],
|
||||
DEVICEPTR(Vector4u) *outRendering [[ buffer(3) ]],
|
||||
const CONSTPTR(CreateICPMaps_Params) *params [[ buffer(4) ]],
|
||||
uint2 threadIdx [[ thread_position_in_threadgroup ]],
|
||||
uint2 blockIdx [[ threadgroup_position_in_grid ]],
|
||||
uint2 blockDim [[ threads_per_threadgroup ]])
|
||||
{
|
||||
int x = threadIdx.x + blockIdx.x * blockDim.x, y = threadIdx.y + blockIdx.y * blockDim.y;
|
||||
|
||||
if (x >= params->imgSize.x || y >= params->imgSize.y) return;
|
||||
|
||||
processPixelICP<false>(outRendering, pointsMap, normalsMap, pointsRay, params->imgSize.xy, x, y, params->voxelSizes.x, TO_VECTOR3(params->lightSource));
|
||||
}
|
||||
|
||||
kernel void renderForward_device(DEVICEPTR(Vector4u) *outRendering [[ buffer(0) ]],
|
||||
const CONSTPTR(Vector4f) *pointsRay [[ buffer(1) ]],
|
||||
const CONSTPTR(CreateICPMaps_Params) *params [[ buffer(2) ]],
|
||||
uint2 threadIdx [[ thread_position_in_threadgroup ]],
|
||||
uint2 blockIdx [[ threadgroup_position_in_grid ]],
|
||||
uint2 blockDim [[ threads_per_threadgroup ]])
|
||||
{
|
||||
int x = threadIdx.x + blockIdx.x * blockDim.x, y = threadIdx.y + blockIdx.y * blockDim.y;
|
||||
|
||||
if (x >= params->imgSize.x || y >= params->imgSize.y) return;
|
||||
|
||||
processPixelForwardRender<false>(outRendering, pointsRay, params->imgSize.xy, x, y, params->voxelSizes.x, TO_VECTOR3(params->lightSource));
|
||||
}
|
||||
|
||||
kernel void forwardProject_device(DEVICEPTR(Vector4f) *forwardProjection [[ buffer(0) ]],
|
||||
const CONSTPTR(Vector4f) *pointsRay [[ buffer(1) ]],
|
||||
const CONSTPTR(CreateICPMaps_Params) *params [[ buffer(2) ]],
|
||||
uint2 threadIdx [[ thread_position_in_threadgroup ]],
|
||||
uint2 blockIdx [[ threadgroup_position_in_grid ]],
|
||||
uint2 blockDim [[ threads_per_threadgroup ]])
|
||||
{
|
||||
int x = (threadIdx.x + blockIdx.x * blockDim.x), y = (threadIdx.y + blockIdx.y * blockDim.y);
|
||||
|
||||
if (x >= params->imgSize.x || y >= params->imgSize.y) return;
|
||||
|
||||
int locId = x + y * params->imgSize.x;
|
||||
Vector4f pixel = pointsRay[locId];
|
||||
|
||||
int locId_new = forwardProjectPixel(pixel * params->voxelSizes.x, params->M, params->projParams, params->imgSize.xy);
|
||||
if (locId_new >= 0) forwardProjection[locId_new] = pixel;
|
||||
}
|
||||
109
samples/NCL/PrnOscPat_driver.ncl
Normal file
109
samples/NCL/PrnOscPat_driver.ncl
Normal file
@@ -0,0 +1,109 @@
|
||||
undef("PrnOscPat_driver")
|
||||
function PrnOscPat_driver(eof[*][*][*]:numeric, eof_ts[*][*]:numeric, kPOP[1]:integer)
|
||||
; =================================================================
|
||||
; compute Principal Oscillation Patterns (POPs)
|
||||
; =================================================================
|
||||
local dim_ts, dim_eof, neof, ntim, nlat, mlon, dnam_ts, dnam_eof, neof, j \
|
||||
, cov0, cov1, cov0_inverse, A, z, Z, pr, pi, zr, zi, mean, stdev \
|
||||
, evlr, eigi, eigr
|
||||
begin
|
||||
|
||||
dim_ts = dimsizes(eof_ts) ; (neof,ntim)
|
||||
dim_eof = dimsizes(eof) ; (neof,nlat,mlon)
|
||||
|
||||
ntim = dim_ts(1)
|
||||
neof = dim_eof(0)
|
||||
nlat = dim_eof(1)
|
||||
mlon = dim_eof(2)
|
||||
|
||||
dnam_ts = getvardims(eof_ts) ; dimension names
|
||||
dnam_eof= getvardims(eof) ; used at end for meta data
|
||||
|
||||
; =================================================================
|
||||
; lag-0 and lag-1 matrices
|
||||
; =================================================================
|
||||
|
||||
if (get_ncl_version().eq."6.1.2") then ; bug in 6.1.2
|
||||
cov0 = covcorm(eof_ts,(/1,0/)) ; lag-0 covariance matrix
|
||||
else
|
||||
cov0 = covcorm(eof_ts,(/0,1/)) ; lag-0 covariance matrix (n x n)
|
||||
end if
|
||||
; either
|
||||
cov1 = covcorm_xy(eof_ts, eof_ts, (/0,1,0/)) ; lag-1
|
||||
;cov1 = covcorm_xy(eof_ts(:,0:ntim-2) \ ; alternative, brute force
|
||||
; ,eof_ts(:,1:ntim-1), (/0,0,0/))
|
||||
;printVarSummary(cov1)
|
||||
|
||||
; =================================================================
|
||||
; matrix A contains information for evolution of the POP system.
|
||||
; POPs are eigenvectors of A.
|
||||
; =================================================================
|
||||
|
||||
cov0_inverse = inverse_matrix(cov0)
|
||||
A = cov1#inverse_matrix(cov0) ; [*][*] => neof x neof
|
||||
|
||||
; =================================================================
|
||||
; NCL 6.1.1 of dgeevx: evlr(2,2,N,N) ; (left(0)/right(1), real(0)/imag(1),:,:)
|
||||
; Eigenvalues are returned as attributes: eigi = evlr@eigi ; eigr = evlr@eigr
|
||||
; =================================================================
|
||||
|
||||
evlr = dgeevx_lapack(A, "B", "V", "V", "B", False)
|
||||
|
||||
; =================================================================
|
||||
; POP time series from eigenvalues and right eigenvectors
|
||||
; =================================================================
|
||||
;PR = (/ evlr(1,0,:,:) /) ; right ev (1), real part (0)
|
||||
;PI = (/ evlr(1,1,:,:) /) ; right ev (1), imag part (1)
|
||||
; kPOP is what we want; use righteigenvector
|
||||
pr = (/ evlr(1,0,kPOP-1,:) /) ; right ev (1), real part (0), row 'kPOP-1'
|
||||
pi = (/ evlr(1,1,kPOP-1,:) /) ; right ev (1), imag part (1), row 'kPOP-1'
|
||||
|
||||
z = inverse_matrix( (/ (/sum(pr*pr), sum(pr*pi)/) \
|
||||
, (/sum(pr*pi), sum(pi*pi)/) /))#(/pr,pi/)#eof_ts
|
||||
|
||||
; complex conjugate
|
||||
z = (/z(0,:), -z(1,:)/) ; real & imag series
|
||||
z = dim_rmvmean_n(z,1)
|
||||
mean = dim_avg_n(z,1) ; calculate mean
|
||||
stdev= dim_stddev_n(z,1) ; calculate stdev
|
||||
z = dim_standardize_n(z,1,1) ; standardize time series
|
||||
|
||||
z!0 = "nPOP" ; add meta data
|
||||
z!1 = dnam_ts(1)
|
||||
z&nPOP = (/0,1/)
|
||||
z&$dnam_ts(1)$ = eof_ts&$dnam_ts(1)$
|
||||
z@stdev = stdev
|
||||
z@mean = mean
|
||||
z@long_name = "POP timeseries"
|
||||
;printVarSummary(z)
|
||||
|
||||
; =================================================================
|
||||
; POP spatial patterns
|
||||
; =================================================================
|
||||
|
||||
zr = pr(0)*eof(0,:,:) ; construct POP spatial domain
|
||||
zi = pi(0)*eof(0,:,:)
|
||||
do j=1,neof-1
|
||||
zr = zr + pr(j)*eof(j,:,:)
|
||||
zi = zi + pi(j)*eof(j,:,:)
|
||||
end do
|
||||
|
||||
Z = (/zr*stdev(0), -zi*stdev(1)/) ; scale patterns by time series stdev
|
||||
|
||||
Z!0 = "nPOP" ; add meta data
|
||||
Z!1 = dnam_eof(1)
|
||||
Z!2 = dnam_eof(2)
|
||||
|
||||
Z&nPOP = (/0,1/)
|
||||
Z&$dnam_eof(1)$ = eof&$dnam_eof(1)$
|
||||
Z&$dnam_eof(2)$ = eof&$dnam_eof(2)$
|
||||
Z@long_name = "POP pattern"
|
||||
;printVarSummary(Z)
|
||||
|
||||
; =================================================================
|
||||
; return POP time series and POP spatial patterns as a
|
||||
; variable of type 'list' which contains 2 variables
|
||||
; =================================================================
|
||||
|
||||
return( [/z, Z/] ) ; this is type "list"
|
||||
end
|
||||
115
samples/NCL/WRF_static_2.ncl
Normal file
115
samples/NCL/WRF_static_2.ncl
Normal file
@@ -0,0 +1,115 @@
|
||||
;*************************************************
|
||||
; WRF static: panel different variables
|
||||
;************************************************
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl"
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl"
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/wrf/WRF_contributed.ncl"
|
||||
begin
|
||||
;************************************************
|
||||
; open file and read in data
|
||||
;************************************************
|
||||
f = addfile("static.wrfsi.nc", "r")
|
||||
;************************************************
|
||||
; Read variables
|
||||
;************************************************
|
||||
use = f->use(0,0,:,:) ; land use dominant category
|
||||
stl = f->stl(0,0,:,:) ; top layer (0-30cm) dom cat soiltype
|
||||
sbl = f->sbl(0,0,:,:) ; bottom layer (30-90cm) dom cat soiltype
|
||||
lat2d = f->lat(0,0,:,:)
|
||||
lon2d = f->lon(0,0,:,:)
|
||||
lsMask= f->lnd(0,0,:,:) ; land (1) water (0) mas
|
||||
|
||||
;************************************************
|
||||
; Use mask function to set all ocean areas to _FillValue
|
||||
;************************************************
|
||||
use = mask(use,lsMask,1)
|
||||
stl = mask(stl,lsMask,1)
|
||||
sbl = mask(sbl,lsMask,1)
|
||||
|
||||
;************************************************
|
||||
; Associate 2D coordinates with variables for plotting
|
||||
;************************************************
|
||||
use@lat2d = lat2d
|
||||
use@lon2d = lon2d
|
||||
stl@lat2d = lat2d
|
||||
stl@lon2d = lon2d
|
||||
sbl@lat2d = lat2d
|
||||
sbl@lon2d = lon2d
|
||||
|
||||
;************************************************
|
||||
; The file should be examined via: ncdump -v grid_type static.wrsi
|
||||
; This will print the print type. then enter below.
|
||||
;************************************************
|
||||
projection = "mercator"
|
||||
|
||||
;************************************************
|
||||
; create plots
|
||||
;************************************************
|
||||
wks = gsn_open_wks("ps" ,"WRF_static") ; ps,pdf,x11,ncgm,eps
|
||||
gsn_define_colormap(wks ,"BlAqGrYeOrReVi200"); choose colormap
|
||||
|
||||
res = True ; plot mods desired
|
||||
res@gsnSpreadColors = True ; use full range of colormap
|
||||
res@cnFillOn = True ; color plot desired
|
||||
res@cnLinesOn = False ; turn off contour lines
|
||||
res@cnLineLabelsOn = False ; turn off contour labels
|
||||
res@cnLevelSpacingF = 1 ; manually specify interval
|
||||
res@cnFillMode = "RasterFill" ; activate raster mode
|
||||
res@lbLabelAutoStride = True ; let NCL figure lb stride
|
||||
|
||||
;************************************************
|
||||
; Turn on lat / lon labeling
|
||||
;************************************************
|
||||
;;res@pmTickMarkDisplayMode = "Always" ; turn on tickmarks
|
||||
|
||||
dimll = dimsizes(lat2d)
|
||||
nlat = dimll(0)
|
||||
mlon = dimll(1)
|
||||
|
||||
res@mpProjection = projection
|
||||
res@mpLimitMode = "Corners"
|
||||
res@mpLeftCornerLatF = lat2d(0,0)
|
||||
res@mpLeftCornerLonF = lon2d(0,0)
|
||||
res@mpRightCornerLatF = lat2d(nlat-1,mlon-1)
|
||||
res@mpRightCornerLonF = lon2d(nlat-1,mlon-1)
|
||||
|
||||
res@mpCenterLonF = f->LoV ; set center logitude
|
||||
|
||||
if (projection.eq."LambertConformal") then
|
||||
res@mpLambertParallel1F = f->Latin1
|
||||
res@mpLambertParallel2F = f->Latin2
|
||||
res@mpLambertMeridianF = f->LoV
|
||||
end if
|
||||
|
||||
res@mpFillOn = False ; turn off map fill
|
||||
res@mpOutlineDrawOrder = "PostDraw" ; draw continental outline last
|
||||
res@mpOutlineBoundarySets = "GeophysicalAndUSStates" ; state boundaries
|
||||
|
||||
;;res@tfDoNDCOverlay = True ; True only for 'native' grid
|
||||
res@gsnAddCyclic = False ; data are not cyclic
|
||||
|
||||
;************************************************
|
||||
; allocate array for 3 plots
|
||||
;************************************************
|
||||
plts = new (3,"graphic")
|
||||
|
||||
;************************************************
|
||||
; Tell NCL not to draw or advance frame for individual plots
|
||||
;************************************************
|
||||
res@gsnDraw = False ; (a) do not draw
|
||||
res@gsnFrame = False ; (b) do not advance 'frame'
|
||||
|
||||
plts(0) = gsn_csm_contour_map(wks,use,res)
|
||||
plts(1) = gsn_csm_contour_map(wks,stl,res)
|
||||
plts(2) = gsn_csm_contour_map(wks,sbl,res)
|
||||
;************************************************
|
||||
; create panel: panel plots have their own set of resources
|
||||
;************************************************
|
||||
resP = True ; modify the panel plot
|
||||
resP@txString = "Land Use and Soil Type"
|
||||
resP@gsnMaximize = True ; maximize panel area
|
||||
resP@gsnPanelRowSpec = True ; specify 1 top, 2 lower level
|
||||
gsn_panel(wks,plts,(/1,2/),resP) ; now draw as one plot
|
||||
|
||||
end
|
||||
|
||||
160
samples/NCL/WRF_track_1.ncl
Normal file
160
samples/NCL/WRF_track_1.ncl
Normal file
@@ -0,0 +1,160 @@
|
||||
;********************************************************
|
||||
; Plot storm stracks from wrfout files.
|
||||
;********************************************************
|
||||
;
|
||||
; JUN-18-2005
|
||||
; So-Young Ha (MMM/NCAR)
|
||||
; SEP-01-2006
|
||||
; Slightly modified by Mary Haley to add some extra comments.
|
||||
; ===========================================
|
||||
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl"
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl"
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl"
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/wrf/WRF_contributed.ncl"
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/wrf/WRFUserARW.ncl"
|
||||
|
||||
|
||||
begin
|
||||
|
||||
; DATES
|
||||
date = (/1512,1600,1612,1700,1712,1800,1812,1900/)
|
||||
ndate = dimsizes(date)
|
||||
|
||||
sdate = sprinti("%4.0i",date)
|
||||
|
||||
; Experiment name (for legend)
|
||||
EXP = (/"EXP_I"/) ; (/"EXP_I","EXP_II","EXP_III"/)
|
||||
nexp = dimsizes(EXP)
|
||||
|
||||
; To get lat/lon info.
|
||||
|
||||
a = addfile("wrfout_d01_2003-07-15_00:00:00.nc","r")
|
||||
|
||||
lat2d = a->XLAT(0,:,:)
|
||||
lon2d = a->XLONG(0,:,:)
|
||||
dimll = dimsizes(lat2d)
|
||||
nlat = dimll(0)
|
||||
mlon = dimll(1)
|
||||
|
||||
; Sea Level Pressure
|
||||
slp = wrf_user_getvar(a,"slp",0)
|
||||
dims = dimsizes(slp)
|
||||
|
||||
; Array for track
|
||||
time = new(ndate,string)
|
||||
imin = new(ndate,integer)
|
||||
jmin = new(ndate,integer)
|
||||
smin = new(ndate,integer)
|
||||
|
||||
; =======
|
||||
; ndate
|
||||
; =======
|
||||
fs = systemfunc("ls wrfout*00")
|
||||
nfs= dimsizes(fs)
|
||||
if(nfs .ne. ndate) then
|
||||
print("Check input data:"+nfs+" .ne. "+ndate)
|
||||
end if
|
||||
|
||||
do ifs=0,nfs-1
|
||||
f = addfile(fs(ifs)+".nc","r")
|
||||
time(ifs) = wrf_user_list_times(f)
|
||||
; print(time(ifs))
|
||||
slp2d = wrf_user_getvar(f,"slp",0)
|
||||
|
||||
; We need to convert 2-D array to 1-D array to find the minima.
|
||||
slp1d = ndtooned(slp2d)
|
||||
smin(ifs) = minind(slp1d)
|
||||
|
||||
; Convert the index for 1-D array back to the indeces for 2-D array.
|
||||
minij = ind_resolve(ind(slp1d.eq.min(slp2d)),dims)
|
||||
imin(ifs) = minij(0,0)
|
||||
jmin(ifs) = minij(0,1)
|
||||
|
||||
; print(time(ifs)+" : "+min(slp2d)+" ("+imin(ifs)+","+jmin(ifs)+")")
|
||||
end do
|
||||
;
|
||||
|
||||
; Graphics section
|
||||
|
||||
wks=gsn_open_wks("ps","track") ; Open PS file.
|
||||
gsn_define_colormap(wks,"BlGrYeOrReVi200") ; Change color map.
|
||||
|
||||
res = True
|
||||
res@gsnDraw = False ; Turn off draw.
|
||||
res@gsnFrame = False ; Turn off frame advance.
|
||||
res@gsnMaximize = True ; Maximize plot in frame.
|
||||
|
||||
res@tiMainString = "Hurricane Isabel" ; Main title
|
||||
|
||||
WRF_map_c(a,res,0) ; Set up map resources
|
||||
; (plot options)
|
||||
plot = gsn_csm_map(wks,res) ; Create a map.
|
||||
|
||||
; Set up resources for polymarkers.
|
||||
gsres = True
|
||||
gsres@gsMarkerIndex = 16 ; filled dot
|
||||
;gsres@gsMarkerSizeF = 0.005 ; default - 0.007
|
||||
cols = (/5,160,40/)
|
||||
|
||||
; Set up resources for polylines.
|
||||
res_lines = True
|
||||
res_lines@gsLineThicknessF = 3. ; 3x as thick
|
||||
|
||||
dot = new(ndate,graphic) ; Make sure each gsn_add_polyxxx call
|
||||
line = new(ndate,graphic) ; is assigned to a unique variable.
|
||||
|
||||
; Loop through each date and add polylines to the plot.
|
||||
do i = 0,ndate-2
|
||||
res_lines@gsLineColor = cols(0)
|
||||
xx=(/lon2d(imin(i),jmin(i)),lon2d(imin(i+1),jmin(i+1))/)
|
||||
yy=(/lat2d(imin(i),jmin(i)),lat2d(imin(i+1),jmin(i+1))/)
|
||||
line(i) = gsn_add_polyline(wks,plot,xx,yy,res_lines)
|
||||
end do
|
||||
|
||||
lon1d = ndtooned(lon2d)
|
||||
lat1d = ndtooned(lat2d)
|
||||
|
||||
; Loop through each date and add polymarkers to the plot.
|
||||
do i = 0,ndate-1
|
||||
print("dot:"+lon1d(smin(i))+","+lat1d(smin(i)))
|
||||
gsres@gsMarkerColor = cols(0)
|
||||
dot(i)=gsn_add_polymarker(wks,plot,lon1d(smin(i)),lat1d(smin(i)),gsres)
|
||||
end do
|
||||
|
||||
; Date (Legend)
|
||||
txres = True
|
||||
txres@txFontHeightF = 0.015
|
||||
txres@txFontColor = cols(0)
|
||||
|
||||
txid1 = new(ndate,graphic)
|
||||
; Loop through each date and draw a text string on the plot.
|
||||
do i = 0, ndate-1
|
||||
txres@txJust = "CenterRight"
|
||||
ix = smin(i) - 4
|
||||
print("Eye:"+ix)
|
||||
if(i.eq.1) then
|
||||
txres@txJust = "CenterLeft"
|
||||
ix = ix + 8
|
||||
end if
|
||||
txid1(i) = gsn_add_text(wks,plot,sdate(i),lon1d(ix),lat1d(ix),txres)
|
||||
end do
|
||||
|
||||
; Add marker and text for legend. (Or you can just use "pmLegend" instead.)
|
||||
txres@txJust = "CenterLeft"
|
||||
|
||||
txid2 = new(nexp,graphic)
|
||||
pmid2 = new(nexp,graphic)
|
||||
do i = 0,nexp-1
|
||||
gsres@gsMarkerColor = cols(i)
|
||||
txres@txFontColor = cols(i)
|
||||
ii = ((/129,119,109/)) ; ilat
|
||||
jj = ((/110,110,110/)) ; jlon
|
||||
ji = ii*mlon+jj ; col x row
|
||||
pmid2(i) = gsn_add_polymarker(wks,plot,lon1d(ji(i)),lat1d(ji(i)),gsres)
|
||||
txid2(i) = gsn_add_text(wks,plot,EXP(i),lon1d(ji(i)+5),lat1d(ji(i)),txres)
|
||||
end do
|
||||
|
||||
draw(plot)
|
||||
frame(wks)
|
||||
end
|
||||
129
samples/NCL/cru_8.ncl
Normal file
129
samples/NCL/cru_8.ncl
Normal file
@@ -0,0 +1,129 @@
|
||||
;*****************************************************
|
||||
; cru_8.ncl
|
||||
;
|
||||
; Concepts illustrated:
|
||||
; - Plotting CRU (Climate Research Unit)/ BADC data
|
||||
; - Selecting a sub-period
|
||||
; - calculating a climatology
|
||||
; - Drawing raster contours; very basic graphics
|
||||
;
|
||||
;*****************************************************
|
||||
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" ; not needed 6.20 onward
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl"
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl"
|
||||
|
||||
; create references (pointers) to the files
|
||||
|
||||
diri = "./"
|
||||
fcld = addfile(diri+"cru_ts3.21.1901.2012.cld.dat.nc", "r")
|
||||
fdtr = addfile(diri+"cru_ts3.21.1901.2012.dtr.dat.nc", "r")
|
||||
ffrs = addfile(diri+"cru_ts3.21.1901.2012.frs.dat.nc", "r")
|
||||
fpet = addfile(diri+"cru_ts3.21.1901.2012.pet.dat.nc", "r")
|
||||
fpre = addfile(diri+"cru_ts3.21.1901.2012.pre.dat.nc", "r")
|
||||
ftmn = addfile(diri+"cru_ts3.21.1901.2012.tmn.dat.nc", "r")
|
||||
ftmp = addfile(diri+"cru_ts3.21.1901.2012.tmp.dat.nc", "r")
|
||||
ftmx = addfile(diri+"cru_ts3.21.1901.2012.tmx.dat.nc", "r")
|
||||
fvap = addfile(diri+"cru_ts3.21.1901.2012.vap.dat.nc", "r")
|
||||
fwet = addfile(diri+"cru_ts3.21.1901.2012.wet.dat.nc", "r")
|
||||
|
||||
; specify start & last dates (arbitrary)
|
||||
|
||||
ymStrt = 199101
|
||||
ymLast = 200012
|
||||
|
||||
; get index values of start/lat dates
|
||||
|
||||
time = fcld->time
|
||||
yyyymm = cd_calendar(time, -1)
|
||||
|
||||
ntStrt = ind(yyyymm.eq.ymStrt) ; index values
|
||||
ntLast = ind(yyyymm.eq.ymLast)
|
||||
|
||||
; read time segment
|
||||
|
||||
cld = fcld->cld(ntStrt:ntLast,:,:)
|
||||
dtr = fdtr->dtr(ntStrt:ntLast,:,:)
|
||||
frs = ffrs->frs(ntStrt:ntLast,:,:)
|
||||
pet = fpet->pet(ntStrt:ntLast,:,:)
|
||||
pre = fpre->pre(ntStrt:ntLast,:,:)
|
||||
tmn = ftmn->tmn(ntStrt:ntLast,:,:)
|
||||
tmp = ftmp->tmp(ntStrt:ntLast,:,:)
|
||||
tmx = ftmx->tmx(ntStrt:ntLast,:,:)
|
||||
vap = fvap->vap(ntStrt:ntLast,:,:)
|
||||
wet = fwet->wet(ntStrt:ntLast,:,:)
|
||||
|
||||
printVarSummary(cld) ; [time | 120] x [lat | 360] x [lon | 720]
|
||||
|
||||
; calculate monthly climatologies
|
||||
|
||||
cldclm = clmMonTLL(cld)
|
||||
dtrclm = clmMonTLL(dtr)
|
||||
frsclm = clmMonTLL(frs)
|
||||
petclm = clmMonTLL(pet)
|
||||
preclm = clmMonTLL(pre)
|
||||
tmnclm = clmMonTLL(tmn)
|
||||
tmpclm = clmMonTLL(tmp)
|
||||
tmxclm = clmMonTLL(tmx)
|
||||
vapclm = clmMonTLL(vap)
|
||||
wetclm = clmMonTLL(wet)
|
||||
|
||||
|
||||
printVarSummary(cldclm) ; [month | 12] x [lat | 360] x [lon | 720]
|
||||
|
||||
;************************************
|
||||
; create plots ... very simple
|
||||
;************************************
|
||||
|
||||
nt = 6
|
||||
month = "July"
|
||||
yrStrt = ymStrt/100
|
||||
yrLast = ymLast/100
|
||||
title = month+": "+yrStrt+"-"+yrLast
|
||||
|
||||
wks = gsn_open_wks("ps","cru") ; open a ps file
|
||||
gsn_define_colormap(wks,"ncl_default") ; choose colormap; not needed 6.20 onward
|
||||
plot = new(2,graphic) ; create graphic array
|
||||
|
||||
res = True
|
||||
res@cnFillOn = True ; turn on color fill; not needed 6.20 onward
|
||||
res@cnFillMode = "RasterFill" ; Raster Mode
|
||||
res@cnLinesOn = False ; Turn off contour lines
|
||||
|
||||
res@gsnDraw = False ; do not draw picture
|
||||
res@gsnFrame = False ; do not advance frame
|
||||
res@lbOrientation = "Vertical" ; vertical label bar
|
||||
|
||||
resp = True
|
||||
resp@gsnMaximize = True ; make ps, eps, pdf large
|
||||
|
||||
resp@txString = title+": CLD, FRS"
|
||||
plot(0)=gsn_csm_contour_map_ce(wks,cldclm(nt,:,:),res)
|
||||
plot(1)=gsn_csm_contour_map_ce(wks,frsclm(nt,:,:),res)
|
||||
gsn_panel(wks,plot,(/2,1/),resp)
|
||||
|
||||
resp@txString = title+": PET, VAP"
|
||||
plot(0)=gsn_csm_contour_map_ce(wks,petclm(nt,:,:),res)
|
||||
plot(1)=gsn_csm_contour_map_ce(wks,vapclm(nt,:,:),res)
|
||||
gsn_panel(wks,plot,(/2,1/),resp)
|
||||
|
||||
resp@txString = title+": TMN, TMX"
|
||||
plot(0)=gsn_csm_contour_map_ce(wks,tmnclm(nt,:,:),res)
|
||||
plot(1)=gsn_csm_contour_map_ce(wks,tmxclm(nt,:,:),res)
|
||||
gsn_panel(wks,plot,(/2,1/),resp)
|
||||
|
||||
resp@txString = title+": TMP, DTR"
|
||||
plot(0)=gsn_csm_contour_map_ce(wks,tmpclm(nt,:,:),res)
|
||||
plot(1)=gsn_csm_contour_map_ce(wks,dtrclm(nt,:,:),res)
|
||||
gsn_panel(wks,plot,(/2,1/),resp)
|
||||
|
||||
resp@txString = title+": WET, PRE"
|
||||
plot(0)=gsn_csm_contour_map_ce(wks,wetclm(nt,:,:),res)
|
||||
|
||||
;colors = (/ ... /)
|
||||
;res@cnFillPalette = colors ; optional: distinct colors for categories
|
||||
res@cnLevelSelectionMode = "ExplicitLevels" ; use unequal spacing
|
||||
res@cnLevels = (/2.0,10,25,37.5,50,75,100,125,150,175,200,300,400,500,750/)
|
||||
|
||||
plot(1)=gsn_csm_contour_map_ce(wks,preclm(nt,:,:),res)
|
||||
gsn_panel(wks,plot,(/2,1/),resp)
|
||||
20
samples/NCL/gsn_csm_xy2_time_series_inputs.ncl
Normal file
20
samples/NCL/gsn_csm_xy2_time_series_inputs.ncl
Normal file
@@ -0,0 +1,20 @@
|
||||
;******************** Inputs Regarding Input and Output Data *************************************
|
||||
|
||||
;netCDFFilePath = "NULL-MYD04_L2.051-MIL2ASAE.0022-AERONET_AOD_L2.2-20112106165049.nc"
|
||||
;outputFilePath = "plot-output"
|
||||
|
||||
;******************* Inputs Regarding Data Structure ***********************************************
|
||||
|
||||
;lPlotVariablesList = "mean_AERONET_AOD_L2_2_AOD0558intrp_Ames,mean_MIL2ASAE_0022_AOD0866b_Ames"
|
||||
;rPlotVariablesList = "medn_MYD04_L2_051_AOD0550dpbl_l_Ames"
|
||||
|
||||
;xDimName = "time"
|
||||
;xDimSize = 365
|
||||
|
||||
;******************* Inputs Regarding the View Annotations ****************************************
|
||||
|
||||
;title = "MAPSS Time Series"
|
||||
;yLAxisLabel = "Mean AOD"
|
||||
;yRAxisLabel = "Median AOD"
|
||||
|
||||
;*******************END INPUTS ********************************************************************
|
||||
128
samples/NCL/hdf4sds_7.ncl
Normal file
128
samples/NCL/hdf4sds_7.ncl
Normal file
@@ -0,0 +1,128 @@
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl"
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl"
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl"
|
||||
;**************************************************************
|
||||
; User Input
|
||||
;***************************************************************
|
||||
; INPUT
|
||||
diri = "./" ; input directory
|
||||
fili = "wv_LV3_MET08_20050102_12345678_L00013712E00013712.hdf"
|
||||
|
||||
pltDir = "./" ; directory for plot output
|
||||
sfx = get_file_suffix(fili,1)
|
||||
;pltName = sfx@fBase ; output graphic name
|
||||
pltName = "hdf4sds"
|
||||
pltType = "ps"
|
||||
|
||||
;***************************************************************
|
||||
; End User Input
|
||||
;***************************************************************
|
||||
|
||||
;***************************************************************
|
||||
; Open SEVIRI L3 'wv' HDF file
|
||||
;***************************************************************
|
||||
; Note the rather unusual data format: flag *prepended* to data value
|
||||
;***************************************************************
|
||||
; integer twc_lv3 ( fakeDim0, fakeDim1 )
|
||||
; long_name : total water vapour column + flag
|
||||
; units : fmmmm
|
||||
; format : I4
|
||||
; valid_range : ( 10000, 38000 )
|
||||
; _FillValue : -99
|
||||
; legend_01 : f = flag
|
||||
; legend_02 : f = 1 averaged level 2 values
|
||||
; legend_03 : f = 2 interpolated from averaged level 2 values
|
||||
; legend_04 : f = 3 gaps filled with NVAP climatology
|
||||
; legend_05 : mmmm = water vapour column in mm * 100. as integer
|
||||
; legend_06 : Example: 11025 means: flag = 1, 10.25 mm water vapour column
|
||||
; min_lat : -74.75
|
||||
; max_lat : 61.75
|
||||
; min_lon : -75.25
|
||||
; max_lon : 75.25
|
||||
; dlat : 0.5
|
||||
; dlon : 0.5
|
||||
;---------------------------------------------------------------
|
||||
|
||||
f = addfile (diri+fili, "r")
|
||||
ifx = f->twc_lv3 ; fmmmm (integer)
|
||||
printVarSummary(ifx)
|
||||
|
||||
flag = ifx/10000 ; extract flag
|
||||
ix = ifx - flag*10000 ; extract mmmm
|
||||
x = ix*0.01 ; scale
|
||||
|
||||
; create meta data for 'x'
|
||||
|
||||
dimx = dimsizes(x)
|
||||
nlat = dimx(0) ; grid size x(nlat,mlon)
|
||||
mlon = dimx(1)
|
||||
|
||||
lat = fspan(ifx@min_lat, ifx@max_lat, nlat)
|
||||
lat@units = "degrees_north"
|
||||
lon = fspan(ifx@min_lon, ifx@max_lon, mlon)
|
||||
lon@units = "degrees_east"
|
||||
|
||||
x!0 = "lat"
|
||||
x!1 = "lon"
|
||||
x&lat = lat
|
||||
x&lon = lon
|
||||
x@long_name = "SEVIRI: Total Water Vapor"
|
||||
x@units = "mm"
|
||||
|
||||
delete( [/ifx, ix/] ) ; no longer needed
|
||||
|
||||
;***************************************************************
|
||||
; Create plot
|
||||
;***************************************************************
|
||||
wks = gsn_open_wks(pltType, pltDir+pltName)
|
||||
|
||||
plot = new (2, "graphic")
|
||||
|
||||
res = True ; plot mods desired
|
||||
res@gsnAddCyclic = False ; data noty global
|
||||
res@gsnDraw = False
|
||||
res@gsnFrame = False
|
||||
|
||||
res@cnFillOn = True ; turn on color fill
|
||||
res@cnLinesOn = False ; turn of contour lines
|
||||
res@cnFillMode = "RasterFill" ; Raster Mode
|
||||
res@cnLinesOn = False ; Turn off contour lines
|
||||
res@cnLineLabelsOn = False ; Turn off contour lines
|
||||
res@cnMissingValFillColor= "background" ; "foreground"
|
||||
|
||||
res@mpCenterLonF = 0.5*(min(x&lon) + max(x&lon))
|
||||
res@mpMinLatF = min(x&lat)
|
||||
res@mpMaxLatF = max(x&lat)
|
||||
res@mpMinLonF = min(x&lon)
|
||||
res@mpMaxLonF = max(x&lon)
|
||||
|
||||
;res@lbOrientation = "Vertical"
|
||||
|
||||
plot(0) = gsn_csm_contour_map_ce(wks,x, res)
|
||||
|
||||
; plot flag
|
||||
|
||||
copy_VarCoords(x, flag)
|
||||
flag@long_name = "Flag"
|
||||
flag@units = "1=avg(L2), 2=int(L2), 3=NVAP"
|
||||
print(flag&lat+" "+flag(:,{30}))
|
||||
|
||||
res@cnLevelSelectionMode = "ManualLevels" ; set manual contour levels
|
||||
res@cnMinLevelValF = 2 ; set min contour level
|
||||
res@cnMaxLevelValF = 3 ; one less than max
|
||||
res@cnLevelSpacingF = 1 ; set contour spacing
|
||||
|
||||
res@lbLabelStrings = ispan(1,3,1) ; 1, 2, 3
|
||||
res@lbLabelPosition = "Center" ; label position
|
||||
res@lbLabelAlignment = "BoxCenters"
|
||||
|
||||
res@gsnLeftString = ""
|
||||
res@gsnRightString = ""
|
||||
res@gsnCenterString = "flag: 1=avg(L2), 2=int(L2), 3=NVAP"
|
||||
|
||||
plot(1) = gsn_csm_contour_map_ce(wks,flag, res)
|
||||
|
||||
resP = True ; modify the panel plot
|
||||
resP@txString = fili
|
||||
resP@gsnMaximize = True
|
||||
gsn_panel(wks,plot,(/1,2/),resP) ; now draw as one plot
|
||||
125
samples/NCL/mask_12.ncl
Normal file
125
samples/NCL/mask_12.ncl
Normal file
@@ -0,0 +1,125 @@
|
||||
;----------------------------------------------------------------------
|
||||
; mask_12.ncl
|
||||
;
|
||||
; Concepts illustrated:
|
||||
; - Using a worldwide shapefile to create a land/ocean mask
|
||||
; - Masking a data array based on a geographical area
|
||||
; - Attaching shapefile polylines to a map plot
|
||||
; - Attaching lat/lon points to a map using gsn_coordinates
|
||||
;----------------------------------------------------------------------
|
||||
; Downloaded GSHHS shapefiles from:
|
||||
;
|
||||
; http://www.ngdc.noaa.gov/mgg/shorelines/data/gshhg/latest/
|
||||
;
|
||||
; Used the "coarsest" one: "GSHHS_shp/c/GSHHS_c_L1.shp".
|
||||
;----------------------------------------------------------------------
|
||||
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl"
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl"
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl"
|
||||
load "./shapefile_mask_data.ncl"
|
||||
|
||||
;----------------------------------------------------------------------
|
||||
; Main code
|
||||
;----------------------------------------------------------------------
|
||||
begin
|
||||
WRITE_MASK = True
|
||||
DEBUG = False
|
||||
|
||||
;---Read data to plot and mask
|
||||
dir = "$NCARG_ROOT/lib/ncarg/data/cdf/"
|
||||
cdf_prefix = "uv300"
|
||||
cdf_file = dir + cdf_prefix + ".nc"
|
||||
fin = addfile(cdf_file,"r")
|
||||
u = fin->U(1,:,:)
|
||||
;
|
||||
; Create a mask array the same size as "u", using
|
||||
; lat/lon data read off a shapefile.
|
||||
;
|
||||
shpfile = "GSHHS_shp/c/GSHHS_c_L1.shp"
|
||||
opt = True
|
||||
opt@return_mask = True
|
||||
|
||||
land_mask = shapefile_mask_data(u,shpfile,opt)
|
||||
|
||||
;---Mask "u" against land and ocean.
|
||||
u_land_mask = where(land_mask.eq.1,u,u@_FillValue)
|
||||
u_ocean_mask = where(land_mask.eq.0,u,u@_FillValue)
|
||||
copy_VarMeta(u,u_land_mask)
|
||||
copy_VarMeta(u,u_ocean_mask)
|
||||
|
||||
;---Start the graphics
|
||||
wks = gsn_open_wks("ps","mask")
|
||||
|
||||
res = True
|
||||
|
||||
res@gsnMaximize = True ; maximize plot in frame
|
||||
res@gsnDraw = False ; don't draw plot yet
|
||||
res@gsnFrame = False ; don't advance frame yet
|
||||
|
||||
res@cnFillOn = True
|
||||
res@cnLineLabelsOn = False
|
||||
res@cnLinesOn = False
|
||||
|
||||
;---Make sure both plots have same contour levels
|
||||
mnmxint = nice_mnmxintvl(min(u),max(u),25,False)
|
||||
res@cnLevelSelectionMode = "ManualLevels"
|
||||
res@cnMinLevelValF = mnmxint(0)
|
||||
res@cnMaxLevelValF = mnmxint(1)
|
||||
res@cnLevelSpacingF = mnmxint(2)
|
||||
|
||||
res@lbLabelBarOn = False
|
||||
res@gsnAddCyclic = False
|
||||
|
||||
res@mpFillOn = False
|
||||
res@mpOutlineOn = False
|
||||
|
||||
res@gsnRightString = ""
|
||||
res@gsnLeftString = ""
|
||||
|
||||
;---Create plot of original data and attach shapefile outlines
|
||||
res@tiMainString = "Original data with shapefile outlines"
|
||||
map_data = gsn_csm_contour_map(wks,u,res)
|
||||
dum1 = gsn_add_shapefile_polylines(wks,map_data,shpfile,False)
|
||||
|
||||
;---Create plots of masked data
|
||||
res@tiMainString = "Original data masked against land"
|
||||
map_land_mask = gsn_csm_contour_map(wks,u_land_mask,res)
|
||||
res@tiMainString = "Original data masked against ocean"
|
||||
map_ocean_mask = gsn_csm_contour_map(wks,u_ocean_mask,res)
|
||||
|
||||
if(DEBUG) then
|
||||
mkres = True
|
||||
; mkres@gsMarkerSizeF = 0.007
|
||||
mkres@gsnCoordsAttach = True
|
||||
gsn_coordinates(wks,map_data,u,mkres)
|
||||
mkres@gsnCoordsNonMissingColor = "yellow"
|
||||
mkres@gsnCoordsMissingColor = "black"
|
||||
gsn_coordinates(wks,map_land_mask,u_land_mask,mkres)
|
||||
gsn_coordinates(wks,map_ocean_mask,u_ocean_mask,mkres)
|
||||
end if
|
||||
|
||||
;---Add shapefile outlines
|
||||
dum2 = gsn_add_shapefile_polylines(wks,map_land_mask,shpfile,False)
|
||||
dum3 = gsn_add_shapefile_polylines(wks,map_ocean_mask,shpfile,False)
|
||||
|
||||
;---Draw all three plots on one page
|
||||
pres = True
|
||||
pres@gsnMaximize = True
|
||||
pres@gsnPanelLabelBar = True
|
||||
gsn_panel(wks,(/map_data,map_land_mask,map_ocean_mask/),(/3,1/),pres)
|
||||
|
||||
if(WRITE_MASK) then
|
||||
delete(fin) ; Close file before we open again.
|
||||
;
|
||||
; Make copy of file so we don't overwrite original.
|
||||
; This is not necessary, but it's safer.
|
||||
;
|
||||
new_cdf_file = cdf_prefix + "_with_mask.nc"
|
||||
system("/bin/cp " + cdf_file + " " + new_cdf_file)
|
||||
finout = addfile(new_cdf_file,"w")
|
||||
filevardef(finout, "land_mask", typeof(land_mask), (/ "lat", "lon" /) )
|
||||
finout->land_mask = (/land_mask/)
|
||||
end if
|
||||
end
|
||||
|
||||
115
samples/NCL/mcsst_1.ncl
Normal file
115
samples/NCL/mcsst_1.ncl
Normal file
@@ -0,0 +1,115 @@
|
||||
;*****************************************************
|
||||
; mcsst_1.ncl
|
||||
;
|
||||
; Concepts illustrated:
|
||||
; - Plotting NAVO MCSST data
|
||||
; - Using fbindirread to read in fortran binary data
|
||||
; - Converting "byte" data to "float"
|
||||
; - Adding meta data (attributes and coordinates) to a variable
|
||||
; - Adding gray to an existing color map
|
||||
; - Spanning all but the last two colors in a color map for contour fill
|
||||
; - Drawing raster contours
|
||||
;
|
||||
;*****************************************************
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl"
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl"
|
||||
;***************************************
|
||||
; type of data available on file
|
||||
;***************************************
|
||||
; ipar=0 Weekly Binned Sea Surface Temperature
|
||||
; ipar=1 Number of Points in Bin
|
||||
; ipar=2 Weekly Binned Sea Surface Temperature Anomaly
|
||||
; ipar=3 Interpolated Sea Surface Temperature
|
||||
; ipar=4 Interpolated Sea Surface Temperature Anomaly
|
||||
;***************************************
|
||||
begin
|
||||
ipar = 3
|
||||
fname = "2001311d18N16.dat"
|
||||
tmp = fbindirread(fname,ipar,(/1024,2048/),"byte")
|
||||
;***************************************
|
||||
; convert to float and then change to true SST
|
||||
;***************************************
|
||||
xslope = 0.15
|
||||
if(ipar.eq.4.or.ipar.eq.2)then ; anom has different intercept
|
||||
yint = -20.0
|
||||
end if
|
||||
if(ipar.eq.3.or.ipar.eq.0)then
|
||||
yint = -3.0
|
||||
end if
|
||||
sst = new((/1024,2048/),"float") ; create float var
|
||||
sst = tmp*xslope+yint ; convert to float
|
||||
delete(tmp) ; delete unecessary array
|
||||
;***************************************
|
||||
; assign missing values. The original missing value was zero, but since it was
|
||||
; not assigned in NCL, it was not recognized. The new missing values are
|
||||
; listed below. These will be changed later.
|
||||
;***************************************
|
||||
if(ipar.eq.4)then
|
||||
sst@_FillValue = -20
|
||||
end if
|
||||
if(ipar.eq.3.or.ipar.eq.0)then
|
||||
sst@_FillValue = -3
|
||||
end if
|
||||
;***************************************
|
||||
; create coordinate variables
|
||||
;***************************************
|
||||
nlat = 1024
|
||||
dy = 180./nlat
|
||||
lat = (90. -(ispan(0,1023,1)*dy))-dy/2
|
||||
lat!0 = "lat"
|
||||
lat&lat = lat
|
||||
lat@units = "degrees_north"
|
||||
|
||||
nlon = 2048
|
||||
dx = 360./nlon
|
||||
lon = (ispan(0,2047,1)*dx)+dx/2-180. ; note -180. added by sjm to align
|
||||
lon!0 = "lon"
|
||||
lon&lon = lon
|
||||
lon@units = "degrees_east"
|
||||
;***************************************
|
||||
; fill out the netCDF data model
|
||||
;***************************************
|
||||
sst!0 = "lat" ; name dimensions
|
||||
sst!1 = "lon" ; ditto
|
||||
sst = sst(::-1,:) ; reverse lat orientation
|
||||
sst@long_name = "NAVO MCSST" ; assign long_name
|
||||
sst@units = "deg C" ; assign units
|
||||
sst&lat = lat ; assign lat cv
|
||||
sst&lon = lon ; assign lon cv
|
||||
sst@_FillValue = -999. ; assign missing value
|
||||
;***************************************
|
||||
; get year and day from filename
|
||||
;***************************************
|
||||
res = True ; plot mods desired
|
||||
title = stringtochar(fname) ; parse file name to get date
|
||||
year = title(0:3)
|
||||
jday = title(4:6)
|
||||
res@gsnCenterString = year+" "+jday ; create center string
|
||||
;***************************************
|
||||
; create plot
|
||||
;***************************************
|
||||
wks = gsn_open_wks("ps","mcsst") ; open workstation (plot destination)
|
||||
gsn_define_colormap(wks,"BlGrYeOrReVi200") ; choose colormap
|
||||
;
|
||||
; This will not be necessary in V6.1.0 and later. Named colors can
|
||||
; be used without having to first add them to the color map.
|
||||
;
|
||||
d = NhlNewColor(wks,0.8,0.8,0.8) ; add gray to colormap
|
||||
|
||||
|
||||
res@cnFillOn = True ; turn on color
|
||||
res@gsnSpreadColors = True ; use full range of colormap
|
||||
res@gsnSpreadColorStart = 2 ; start at color 2
|
||||
res@gsnSpreadColorEnd = -3 ; don't use added gray
|
||||
res@cnLinesOn = False ; no contour lines
|
||||
res@cnFillDrawOrder = "PreDraw" ; draw contours before continents
|
||||
res@gsnMaximize = True ; maximize plot
|
||||
|
||||
|
||||
; For a grid this size, it is better to use raster mode. It will be
|
||||
; significantly faster, and will not go over NCL's 16mb default plot size.
|
||||
res@cnFillMode = "RasterFill" ; turn on raster mode
|
||||
|
||||
plot = gsn_csm_contour_map_ce(wks,sst,res) ; contour the variable
|
||||
|
||||
end
|
||||
3
samples/NCL/primero.ncl
Normal file
3
samples/NCL/primero.ncl
Normal file
@@ -0,0 +1,3 @@
|
||||
val=102
|
||||
a=val/4.
|
||||
print(a)
|
||||
172
samples/NCL/topo_9.ncl
Normal file
172
samples/NCL/topo_9.ncl
Normal file
@@ -0,0 +1,172 @@
|
||||
;----------------------------------------------------------------------
|
||||
; topo_9.ncl
|
||||
;
|
||||
; Concepts illustrated:
|
||||
; - Recreating a jpeg topographic image as an NCL map object
|
||||
; - Zooming in on a jpeg image
|
||||
; - Drawing a box around an area of interest on a map
|
||||
; - Attaching polylines to a map
|
||||
; - Using "overlay" to overlay multiple contour plots
|
||||
; - Using more than 256 colors per frame
|
||||
; - Using functions for cleaner code
|
||||
;----------------------------------------------------------------------
|
||||
; NOTE: This example will only work with NCL V6.1.0 and later.
|
||||
;
|
||||
; This script recreates a JPEG image that was converted to a NetCDF
|
||||
; file with color separated bands using the open source tool
|
||||
; "gdal_translate":
|
||||
;
|
||||
; gdal_translate -ot Int16 -of netCDF EarthMap_2500x1250.jpg \
|
||||
; EarthMap_2500x1250.nc
|
||||
;----------------------------------------------------------------------
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl"
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl"
|
||||
|
||||
;----------------------------------------------------------------------
|
||||
; This function imports a JPEG image that's on the whole globe,
|
||||
; and recreates it as an NCL map object that is zoomed in on the
|
||||
; southern tip of Africa.
|
||||
;----------------------------------------------------------------------
|
||||
undef("recreate_jpeg_image")
|
||||
function recreate_jpeg_image(wks,minlat,maxlat,minlon,maxlon)
|
||||
begin
|
||||
orig_jpg_filename = "EarthMap_2500x1250.jpg"
|
||||
nc_filename = "EarthMap_2500x1250.nc"
|
||||
|
||||
;--You could use a system call to do the NetCDF conversion
|
||||
; cmd = "gdal_translate -ot Int16 -of netCDF " + jpeg_filename + \
|
||||
; " " + nc_filename)
|
||||
; system(cmd)
|
||||
|
||||
;---Read the three bands of data
|
||||
f = addfile(nc_filename,"r")
|
||||
Band1 = where(f->Band1.gt.255, 255, f->Band1) ; red channel
|
||||
Band2 = where(f->Band2.gt.255, 255, f->Band2) ; green channel
|
||||
Band3 = where(f->Band3.gt.255, 255, f->Band3) ; blue channel
|
||||
|
||||
band_dims = dimsizes(Band3)
|
||||
nlat = band_dims(0)
|
||||
nlon = band_dims(1)
|
||||
print("dimensions of image = " + nlat + " x " + nlon)
|
||||
|
||||
;
|
||||
; Add lat/lon data so we can overlay on a map, and/or
|
||||
; overlay contours. We know the image is global,
|
||||
; cylindrical equidistant, and centered about lon=0.
|
||||
;
|
||||
lat = fspan( -90, 90,nlat)
|
||||
lon = fspan(-180,180,nlon)
|
||||
lat@units = "degrees_north"
|
||||
lon@units = "degrees_east"
|
||||
|
||||
Band1!0 = "lat"
|
||||
Band1!1 = "lon"
|
||||
Band2!0 = "lat"
|
||||
Band2!1 = "lon"
|
||||
Band3!0 = "lat"
|
||||
Band3!1 = "lon"
|
||||
Band1&lat = lat
|
||||
Band1&lon = lon
|
||||
Band2&lat = lat
|
||||
Band2&lon = lon
|
||||
Band3&lat = lat
|
||||
Band3&lon = lon
|
||||
|
||||
res = True
|
||||
|
||||
res@gsnMaximize = True
|
||||
|
||||
res@gsnFrame = False ; Don't draw or advance
|
||||
res@gsnDraw = False ; frame yet.
|
||||
|
||||
res@cnFillOn = True
|
||||
res@cnFillMode = "RasterFill" ; Raster fill can be faster
|
||||
|
||||
res@cnLevelSelectionMode = "EqualSpacedLevels"
|
||||
res@cnMaxLevelCount = 254
|
||||
res@cnFillBackgroundColor = (/ 1., 1., 1., 1./)
|
||||
|
||||
res@cnLinesOn = False ; Turn off contour lines .
|
||||
res@cnLineLabelsOn = False ; Turn off contour labels
|
||||
res@cnInfoLabelOn = False ; Turn off info label
|
||||
res@lbLabelBarOn = False ; Turn off labelbar
|
||||
res@gsnRightString = "" ; Turn off subtitles
|
||||
res@gsnLeftString = ""
|
||||
res@pmTickMarkDisplayMode = "Always"
|
||||
|
||||
;---Construct RGBA colormaps...
|
||||
ramp = fspan(0., 1., 255)
|
||||
reds = new((/255, 4/), float)
|
||||
greens = new((/255, 4/), float)
|
||||
blues = new((/255, 4/), float)
|
||||
|
||||
reds = 0
|
||||
greens = 0
|
||||
blues = 0
|
||||
|
||||
reds(:,0) = ramp
|
||||
greens(:,1) = ramp
|
||||
blues(:,2) = ramp
|
||||
|
||||
; The red contour map is plotted fully opaque; the green and blue
|
||||
; are plotted completely transparent. When overlain, the colors
|
||||
; combine (rather magically).
|
||||
reds(:,3) = 1.
|
||||
greens(:,3) = 0
|
||||
blues(:,3) = 0
|
||||
|
||||
res@cnFillColors = greens
|
||||
greenMap = gsn_csm_contour(wks, Band2, res)
|
||||
|
||||
res@cnFillColors = blues
|
||||
blueMap = gsn_csm_contour(wks, Band3, res)
|
||||
|
||||
;---This will be our base, so make it a map plot.
|
||||
res@cnFillColors = reds
|
||||
res@gsnAddCyclic = False
|
||||
|
||||
res@mpFillOn = False
|
||||
|
||||
;---Zoom in on area of interest
|
||||
res@mpMinLatF = minlat
|
||||
res@mpMaxLatF = maxlat
|
||||
res@mpMinLonF = minlon
|
||||
res@mpMaxLonF = maxlon
|
||||
|
||||
redMap = gsn_csm_contour_map(wks, Band1, res)
|
||||
|
||||
;---Overlay everything to create the topo map
|
||||
overlay(redMap, greenMap)
|
||||
overlay(redMap, blueMap)
|
||||
|
||||
return(redMap)
|
||||
end
|
||||
|
||||
;----------------------------------------------------------------------
|
||||
; Main code
|
||||
;----------------------------------------------------------------------
|
||||
begin
|
||||
;---Recreating jpeg images only works for X11 and PNG.
|
||||
wks = gsn_open_wks("png","topo")
|
||||
|
||||
;---Southern part of Africa
|
||||
minlat = -40
|
||||
maxlat = 5
|
||||
minlon = 10
|
||||
maxlon = 40
|
||||
|
||||
map = recreate_jpeg_image(wks,minlat,maxlat,minlon,maxlon)
|
||||
|
||||
;---Overlay a red box
|
||||
lonbox = (/ 15, 35, 35, 15, 15/)
|
||||
latbox = (/-30,-30,-10,-10,-30/)
|
||||
|
||||
lnres = True
|
||||
lnres@gsLineColor = "red" ; red box
|
||||
lnres@gsLineThicknessF = 4.0 ; make box thicker
|
||||
box = gsn_add_polyline(wks,map,lonbox,latbox,lnres)
|
||||
|
||||
draw(map) ; Drawing the map will draw the red box
|
||||
frame(wks)
|
||||
|
||||
end
|
||||
120
samples/NCL/traj_3.ncl
Normal file
120
samples/NCL/traj_3.ncl
Normal file
@@ -0,0 +1,120 @@
|
||||
;*************************************************
|
||||
; traj_3.ncl
|
||||
;*************************************************
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl"
|
||||
external TRAJ "./particle.so"
|
||||
;*************************************************
|
||||
begin
|
||||
|
||||
path = "./data.asc"
|
||||
data = asciiread(path,(/500,6/),"float")
|
||||
;*************************************************
|
||||
; some parameters
|
||||
;*************************************************
|
||||
np = 1
|
||||
nq = 500
|
||||
ncor= 8
|
||||
xrot = new((/np,nq/),float)
|
||||
yrot = new((/np,nq/),float)
|
||||
xaxis = new(ncor,float)
|
||||
yaxis = new(ncor,float)
|
||||
;**************************************************
|
||||
; convert data into rotated format
|
||||
;**************************************************
|
||||
TRAJ::particle(path,xrot,yrot,nq,np,xaxis,yaxis,ncor)
|
||||
;**************************************************
|
||||
; create plot
|
||||
;**************************************************
|
||||
wks = gsn_open_wks("ps","traj") ; Open an ps file
|
||||
|
||||
xyres = True
|
||||
xyres@gsnFrame = False ; don't advance the frame
|
||||
xyres@gsnDraw = False ; don't draw indivdual plots
|
||||
xyres@tmXTBorderOn = False ; don't draw top axis
|
||||
xyres@tmXBBorderOn = False ; don't draw bottom axis
|
||||
xyres@tmYRBorderOn = False ; don't draw right axis
|
||||
xyres@tmYLBorderOn = False ; don't draw left axis
|
||||
xyres@tmXTOn = False ; don't draw top-axis tick marks
|
||||
xyres@tmXBOn = False ; don't draw bottom-axis tick marks
|
||||
xyres@tmYROn = False ; don't draw right-axis tick marks
|
||||
xyres@tmYLOn = False ; don't draw left-axis tick marks
|
||||
|
||||
xyres@xyLineColors = (/"red"/) ; set the line color to red
|
||||
xyres@xyLineThicknessF = 4.0 ; 4 times the line thickness
|
||||
|
||||
xyres@trXMaxF = 15000 ; choose range of axis even though
|
||||
xyres@trXMinF = -10000 ; we don't see them
|
||||
xyres@trYMaxF = 1000
|
||||
xyres@trYMinF = -1000
|
||||
|
||||
plot = gsn_xy(wks,xrot,yrot,xyres) ; Draw trajectory
|
||||
;**********************************************
|
||||
; create arrays needed for the bounding box
|
||||
;**********************************************
|
||||
a1 = new(5,float)
|
||||
b1 = new(5,float)
|
||||
a2 = new(5,float)
|
||||
b2 = new(5,float)
|
||||
a3 = new(2,float)
|
||||
b3 = new(2,float)
|
||||
a4 = new(2,float)
|
||||
b4 = new(2,float)
|
||||
a5 = new(2,float)
|
||||
b5 = new(2,float)
|
||||
a6 = new(2,float)
|
||||
b6 = new(2,float)
|
||||
a0 = new(2,float)
|
||||
b0 = new(2,float)
|
||||
;**********************************************
|
||||
; determine values of each bounding line from information
|
||||
; returned from particle.f
|
||||
;**********************************************
|
||||
a1(0:3) = xaxis(:3)
|
||||
b1(0:3) = yaxis(:3)
|
||||
a1(4) = xaxis(0)
|
||||
b1(4) = yaxis(0)
|
||||
|
||||
a2(0:3) = xaxis(4:)
|
||||
b2(0:3) = yaxis(4:)
|
||||
a2(4) = xaxis(4)
|
||||
b2(4) = yaxis(4)
|
||||
|
||||
a3 = xaxis(0:4:4)
|
||||
b3 = yaxis(0:4:4)
|
||||
a4 = xaxis(1:5:4)
|
||||
b4 = yaxis(1:5:4)
|
||||
|
||||
a5 = xaxis(2:6:4)
|
||||
b5 = yaxis(2:6:4)
|
||||
a6 = xaxis(3:7:4)
|
||||
b6 = yaxis(3:7:4)
|
||||
|
||||
a0(0) = xaxis(3)
|
||||
b0(0) = yaxis(3)
|
||||
a0(1) = xrot(0,0)
|
||||
b0(1) = yrot(0,0)
|
||||
;***************************************************************
|
||||
; create bounding box by drawing multiple xy plots on top of
|
||||
; each other. each with their individual axis turned off.
|
||||
;***************************************************************
|
||||
xyres@xyLineColors = (/"black"/) ; line color
|
||||
xyres@xyLineThicknessF = 1.0 ; regular line thickness
|
||||
|
||||
bottom = gsn_xy(wks,a1,b1,xyres) ; Draw the bottom bounding box.
|
||||
top = gsn_xy(wks,a2,b2,xyres) ; Draw the top bounding box.
|
||||
side1 = gsn_xy(wks,a3,b3,xyres) ; Draw a side line.
|
||||
side2 = gsn_xy(wks,a4,b4,xyres) ; Draw a side line.
|
||||
side3 = gsn_xy(wks,a5,b5,xyres) ; Draw a side line.
|
||||
side4 = gsn_xy(wks,a6,b6,xyres) ; Draw a side line.
|
||||
;***************************************************************
|
||||
; now draw a large brown line to represent the chimney
|
||||
;***************************************************************
|
||||
xyres@xyLineColors = (/"brown"/) ; chimney color
|
||||
xyres@xyLineThicknessF = 9.0 ; thick line
|
||||
xyres@tiMainString = "Pollutant Trajectory in a 3D Volume"
|
||||
chimney = gsn_xy(wks,a0,b0,xyres) ; Draw the chimney.
|
||||
|
||||
draw(wks)
|
||||
frame(wks)
|
||||
|
||||
end
|
||||
167
samples/NCL/tsdiagram_1.ncl
Normal file
167
samples/NCL/tsdiagram_1.ncl
Normal file
@@ -0,0 +1,167 @@
|
||||
; Read potential temp (TEMP), salinity (SALT)
|
||||
; Compute potential density (PD) for specified range PD(t,s)
|
||||
; (use ncl function based on Yeager's algorithm for rho computation)
|
||||
; Assumes annual and zonally avgeraged input data set (i.e, one time slice)
|
||||
; Used K.Lindsay's "za" for zonal avg -- already binned into basins
|
||||
; Plots temp vs salt (scatter plot), pd overlay
|
||||
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl"
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl"
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl"
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl"
|
||||
|
||||
begin
|
||||
; ================================> ; PARAMETERS
|
||||
case = "PHC2_gx1v3"
|
||||
ocnfile = "za_PHC2_T_S_gx1v3.nc"
|
||||
|
||||
depth_min = 14895.82 ; in cm, depth of first layer to be included
|
||||
depth_max = 537499.9
|
||||
;
|
||||
; plot limits
|
||||
;
|
||||
smincn = 32.5
|
||||
smaxcn = 37.0
|
||||
tmincn = -2.
|
||||
tmaxcn = 22.
|
||||
;
|
||||
; Choose basin index
|
||||
;
|
||||
; 0 = global 1 = southern ocean 2 = pacific 3 = indian 6 = atlantic
|
||||
; 8 = labrador 9 = GIN 10 = arctic
|
||||
;
|
||||
bi = 2
|
||||
|
||||
;=====> basin check
|
||||
|
||||
if(bi.lt.0.or.bi.gt.10) then
|
||||
print("basin index "+ bi + " not supported")
|
||||
exit
|
||||
end if
|
||||
|
||||
if(bi.eq.0) then
|
||||
basin = "Global"
|
||||
blab = "global"
|
||||
end if
|
||||
if(bi.eq.1) then
|
||||
basin = "Southern Ocean"
|
||||
blab = "so"
|
||||
end if
|
||||
if(bi.eq.2) then
|
||||
basin = "Pacific Ocean"
|
||||
blab = "pacific"
|
||||
end if
|
||||
if(bi.eq.3) then
|
||||
basin = "Indian Ocean"
|
||||
blab = "indian"
|
||||
end if
|
||||
if(bi.eq.6) then
|
||||
basin = "Atlantic Ocean"
|
||||
blab = "atlanticn"
|
||||
end if
|
||||
if(bi.eq.8) then
|
||||
basin = "Labrador Sea"
|
||||
blab = "lab"
|
||||
end if
|
||||
if(bi.eq.9) then
|
||||
basin = "GIN Sea"
|
||||
blab = "gin"
|
||||
end if
|
||||
if(bi.eq.10) then
|
||||
basin = "Arctic Ocean"
|
||||
blab = "arctic"
|
||||
end if
|
||||
|
||||
;=====> initial resource settings
|
||||
|
||||
wks = gsn_open_wks("ps","tsdiagram") ; Open a Postscript file
|
||||
|
||||
;===== data
|
||||
focn = addfile(ocnfile, "r")
|
||||
salt = focn->SALT(0,:,{depth_min:depth_max},:) ;(basins, z_t, lat_t)
|
||||
temp = focn->TEMP(0,:,{depth_min:depth_max},:)
|
||||
|
||||
;====section out choice basin
|
||||
temp_ba = temp(bi,:,:)
|
||||
salt_ba = salt(bi,:,:)
|
||||
|
||||
;===== put into scatter array format
|
||||
tdata_ba = ndtooned(temp_ba)
|
||||
sdata_ba = ndtooned(salt_ba)
|
||||
|
||||
ydata = tdata_ba
|
||||
xdata = sdata_ba
|
||||
|
||||
;============== compute potenial density (PD), using rho_mwjf
|
||||
;
|
||||
; for potential density, depth = 0. (i.e. density as if brought to surface)
|
||||
;
|
||||
;===========================================================================
|
||||
; WARNING: T-S diagrams use POTENTIAL DENSITY... if set depth to something
|
||||
; other then 0, then you will be plotting density contours computed for the
|
||||
; specified depth layer.
|
||||
;===========================================================================
|
||||
|
||||
depth = 0. ;in meters
|
||||
tspan = fspan(tmincn,tmaxcn,51)
|
||||
sspan = fspan(smincn,smaxcn,51)
|
||||
|
||||
; the more points the better... using Yeager's numbers
|
||||
|
||||
t_range = conform_dims((/51,51/),tspan,0)
|
||||
s_range = conform_dims((/51,51/),sspan,1)
|
||||
|
||||
pd = rho_mwjf(t_range,s_range,depth)
|
||||
|
||||
pd!0 = "temp"
|
||||
pd!1 = "salt"
|
||||
pd&temp = tspan
|
||||
pd&salt = sspan
|
||||
pd = 1000.*(pd-1.) ; Put into kg/m3 pot den units
|
||||
|
||||
; printVarSummary(pd)
|
||||
; printVarInfo(pd,"rho_mwjf")
|
||||
|
||||
;=================Graphics
|
||||
|
||||
;--- scatter plot
|
||||
res = True
|
||||
res@gsnMaximize = True
|
||||
res@gsnDraw = False
|
||||
res@gsnFrame = False
|
||||
|
||||
res@xyMarkLineModes = "Markers"
|
||||
res@xyMarkers = 16
|
||||
res@xyMarkerColors = "black"
|
||||
res@pmLegendDisplayMode = "Never"
|
||||
res@txFontHeightF = 0.01
|
||||
res@tiMainString = case + " ANN AVG: T-S Diagram"
|
||||
res@tiXAxisString = salt@units
|
||||
res@tiXAxisFontHeightF = 0.02
|
||||
res@tiYAxisString = temp@units
|
||||
res@tiYAxisFontHeightF = 0.02
|
||||
res@trXMinF = smincn
|
||||
res@trXMaxF = smaxcn
|
||||
res@trYMinF = tmincn
|
||||
res@trYMaxF = tmaxcn
|
||||
res@gsnRightString = depth_min/100. + "-"+depth_max/100. +"m"
|
||||
res@gsnLeftString = basin
|
||||
|
||||
plot = gsn_csm_xy(wks,xdata,ydata,res)
|
||||
|
||||
;----- pd overlay
|
||||
resov = True
|
||||
resov@gsnDraw = False
|
||||
resov@gsnFrame = False
|
||||
resov@cnLevelSelectionMode = "AutomaticLevels"
|
||||
resov@cnInfoLabelOn = "False"
|
||||
resov@cnLineLabelPlacementMode = "Constant"
|
||||
resov@cnLineLabelFontHeightF = ".02"
|
||||
|
||||
plotpd = gsn_csm_contour(wks,pd,resov)
|
||||
overlay(plot,plotpd)
|
||||
|
||||
draw(plot)
|
||||
frame(wks)
|
||||
|
||||
end
|
||||
141
samples/NCL/unique_9.ncl
Normal file
141
samples/NCL/unique_9.ncl
Normal file
@@ -0,0 +1,141 @@
|
||||
;************************************
|
||||
; unique_9.ncl
|
||||
;
|
||||
; Concepts illustrated:
|
||||
; - Drawing raster contours over a map
|
||||
; - Creating a topography plot using raster contours
|
||||
; - Reading data from binary files
|
||||
; - Manually creating lat/lon coordinate arrays
|
||||
; - Customizing a labelbar for a contour plot
|
||||
;************************************
|
||||
; This example generates a topo map over
|
||||
; the area of Trinidad, Colorado.
|
||||
;************************************
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl"
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl"
|
||||
|
||||
begin
|
||||
|
||||
wks = gsn_open_wks("ps","unique")
|
||||
|
||||
;----------------- read the west binary data -------------------------
|
||||
binfile = "trinidad-w.bin"
|
||||
|
||||
quad_name = fbinrecread(binfile,0,60,"character")
|
||||
|
||||
map_cornersW = fbinrecread(binfile,1,4,"double")
|
||||
|
||||
lonW = fbinrecread(binfile,2,(/1201/),"double")
|
||||
|
||||
latW = fbinrecread(binfile,3,(/1201/),"double")
|
||||
|
||||
minmax_elevW = fbinrecread(binfile,4,2,"double")
|
||||
|
||||
tmpW = fbinrecread(binfile,5,(/1201,1201/),"integer")
|
||||
|
||||
;----------------- read the east binary data -------------------------
|
||||
binfile = "trinidad-e.bin"
|
||||
|
||||
quad_name = fbinrecread(binfile,0,60,"character")
|
||||
|
||||
map_cornersE = fbinrecread(binfile,1,4,"double")
|
||||
|
||||
lonE = fbinrecread(binfile,2,(/1201/),"double")
|
||||
|
||||
latE = fbinrecread(binfile,3,(/1201/),"double")
|
||||
|
||||
minmax_elevE = fbinrecread(binfile,4,2,"double")
|
||||
|
||||
tmpE = fbinrecread(binfile,5,(/1201,1201/),"integer")
|
||||
|
||||
;----------------------------------------------------------------------
|
||||
min_elev = min((/minmax_elevW(0),minmax_elevE(0)/))*3.28
|
||||
max_elev = max((/minmax_elevW(1),minmax_elevE(1)/))*3.28
|
||||
|
||||
lat = new(1201,"double")
|
||||
lat = latW
|
||||
lat!0 = "lat"
|
||||
lat&lat = latW ; same as latE
|
||||
lat@long_name = "latitude"
|
||||
lat@units = "degrees_north"
|
||||
|
||||
lon = new(2401,"double")
|
||||
lon(0:1200) = lonW
|
||||
lon(1201:2400) = lonE(1:1200)
|
||||
lon!0 = "lon"
|
||||
lon&lon = lon
|
||||
lon@long_name = "longitude"
|
||||
lon@units = "degrees_east"
|
||||
|
||||
data = new((/1201,2401/),"float") ; (lat,lon)
|
||||
data!0 = "lat"
|
||||
data&lat = lat
|
||||
data!1 = "lon"
|
||||
data&lon = lon
|
||||
data(:,0:1200) = (/tmpW*3.28/) ; convert to feet
|
||||
data(:,1201:2400) = (/tmpE(:,1:1200)*3.28/) ; convert to feet
|
||||
;-------------------------------------------------------------
|
||||
|
||||
;
|
||||
; Define colormap.
|
||||
;
|
||||
cmap = (/(/1.00, 1.00, 1.00/),(/0.00, 0.00, 0.00/), \
|
||||
(/0.51, 0.13, 0.94/),(/0.00, 0.00, 0.59/), \
|
||||
(/0.00, 0.00, 0.80/),(/0.25, 0.41, 0.88/), \
|
||||
(/0.12, 0.56, 1.00/),(/0.00, 0.75, 1.00/), \
|
||||
(/0.63, 0.82, 1.00/),(/0.82, 0.96, 1.00/), \
|
||||
(/1.00, 1.00, 0.78/),(/1.00, 0.88, 0.20/), \
|
||||
(/1.00, 0.67, 0.00/),(/1.00, 0.43, 0.00/), \
|
||||
(/1.00, 0.00, 0.00/),(/0.78, 0.00, 0.00/), \
|
||||
(/0.63, 0.14, 0.14/),(/1.00, 0.41, 0.70/)/)
|
||||
|
||||
gsn_define_colormap(wks,cmap)
|
||||
|
||||
res = True
|
||||
res@gsnMaximize = True
|
||||
res@gsnAddCyclic = False
|
||||
|
||||
; map plot resources
|
||||
res@mpFillOn = False
|
||||
res@mpLimitMode = "Corners"
|
||||
res@mpDataBaseVersion = "Ncarg4_1"
|
||||
res@mpOutlineBoundarySets = "AllBoundaries"
|
||||
res@mpLeftCornerLonF = map_cornersW(0)
|
||||
res@mpLeftCornerLatF = map_cornersW(1)
|
||||
res@mpRightCornerLonF = map_cornersE(2)
|
||||
res@mpRightCornerLatF = map_cornersE(3)
|
||||
|
||||
; contour resources
|
||||
res@cnFillOn = True
|
||||
res@cnLinesOn = False
|
||||
res@cnFillMode = "RasterFill"
|
||||
res@cnLevelSelectionMode = "ExplicitLevels"
|
||||
res@cnLevels = (/ 5000., 6000., 7000., 8000., 8500., 9000., \
|
||||
9500.,10000.,10500.,11000.,11500.,12000., \
|
||||
12500.,13000.,13500./)
|
||||
|
||||
; tickmark resources
|
||||
res@pmTickMarkDisplayMode = "Always"
|
||||
res@tmXBLabelFontHeightF = 0.010
|
||||
|
||||
; labelbar resources
|
||||
res@pmLabelBarWidthF = 0.60
|
||||
res@txFontHeightF = 0.012
|
||||
res@lbTitleString = "elevation above mean sea level (feet)"
|
||||
res@lbTitleFontHeightF = 0.012
|
||||
res@lbLabelFontHeightF = 0.008
|
||||
res@lbTitleOffsetF = -0.27
|
||||
res@lbBoxMinorExtentF = 0.15
|
||||
res@pmLabelBarOrthogonalPosF = -.05
|
||||
|
||||
; title resources
|
||||
res@tiMainString = "USGS DEM TRINIDAD (1 x 2 degrees)"
|
||||
res@tiMainOffsetYF = -0.02 ; Move title down towards graphic.
|
||||
res@tiMainFontHeightF = 0.015
|
||||
res@gsnLeftString = "Min Elevation: "+min_elev
|
||||
res@gsnRightString = "Max Elevation: "+max_elev
|
||||
res@gsnCenterString = "Scale 1:250,000"
|
||||
|
||||
plot = gsn_csm_contour_map(wks,data,res)
|
||||
|
||||
end
|
||||
131
samples/NCL/viewport_4.ncl
Normal file
131
samples/NCL/viewport_4.ncl
Normal file
@@ -0,0 +1,131 @@
|
||||
; ***********************************************
|
||||
; viewport_4.ncl
|
||||
;
|
||||
; Concepts illustrated:
|
||||
; - Drawing an XY plot with multiple curves
|
||||
; - Using drawNDCGrid to draw a nicely labeled NDC grid
|
||||
; - Changing the size/shape of an XY plot using viewport resources
|
||||
; - Drawing two XY plots on the same page using viewport resources
|
||||
; - Drawing polylines, polymarkers, and text in NDC space
|
||||
; - Using "getvalues" to retrieve resource values
|
||||
; - Maximizing plots after they've been created
|
||||
; ***********************************************
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl"
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl"
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl"
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl"
|
||||
|
||||
;********************************************************************
|
||||
; Draw a box around the viewport of the given object..
|
||||
;********************************************************************
|
||||
procedure draw_vp_box(wks,plot)
|
||||
local vpx, vpy, vpw, vph, xbox, ybox, lnres, mkres, txres
|
||||
begin
|
||||
|
||||
; Retrieve the viewport values of the drawable object.
|
||||
getvalues plot
|
||||
"vpXF" : vpx
|
||||
"vpYF" : vpy
|
||||
"vpWidthF" : vpw
|
||||
"vpHeightF" : vph
|
||||
end getvalues
|
||||
|
||||
; Set up some marker resources.
|
||||
mkres = True
|
||||
mkres@gsMarkerIndex = 16 ; filled dot
|
||||
mkres@gsMarkerSizeF = 0.02 ; larger than default
|
||||
mkres@gsMarkerColor = "Red"
|
||||
|
||||
; Draw a single marker at the vpXF/vpYF location.
|
||||
gsn_polymarker_ndc(wks,vpx,vpy,mkres)
|
||||
|
||||
|
||||
; Set up some text resources.
|
||||
txres = True
|
||||
txres@txJust = "BottomLeft"
|
||||
txres@txFontHeightF = 0.018
|
||||
txres@txFontColor = "Blue"
|
||||
txres@txBackgroundFillColor = "white"
|
||||
|
||||
gsn_text_ndc(wks,"(vpXF="+vpx+", vpYF="+vpy+")",vpx,vpy+0.02,txres)
|
||||
; Set up some line resources.
|
||||
lnres = True
|
||||
lnres@gsLineColor = "Red" ; line color
|
||||
lnres@gsLineThicknessF = 2.0 ; 3.5 times as thick
|
||||
|
||||
; Draw lines indicating the width and height
|
||||
xline = (/vpx, vpx+vpw/)
|
||||
yline = (/vpy-0.05,vpy-0.05/)
|
||||
gsn_polyline_ndc(wks,xline,yline,lnres)
|
||||
|
||||
xline = (/vpx+0.05,vpx+0.05/)
|
||||
yline = (/vpy,vpy-vph/)
|
||||
gsn_polyline_ndc(wks,xline,yline,lnres)
|
||||
|
||||
txres@txJust = "CenterCenter"
|
||||
gsn_text_ndc(wks,"vpWidthF = " + vpw,vpx+vpw/2.,vpy-0.05,txres)
|
||||
|
||||
txres@txAngleF = 90.
|
||||
gsn_text_ndc(wks,"vpHeightF = " + vph,vpx+0.05,vpy-vph/2.,txres)
|
||||
end
|
||||
|
||||
;********************************************************************
|
||||
; Main code
|
||||
;********************************************************************
|
||||
begin
|
||||
;************************************************
|
||||
; read in data
|
||||
;************************************************
|
||||
f = addfile ("$NCARG_ROOT/lib/ncarg/data/cdf/uv300.nc","r")
|
||||
u = f->U ; get u data
|
||||
;************************************************
|
||||
; plotting parameters
|
||||
;************************************************
|
||||
wks = gsn_open_wks ("ps","viewport") ; open workstation
|
||||
|
||||
res = True ; plot mods desired
|
||||
|
||||
res@gsnFrame = False ; don't advance frame yet
|
||||
|
||||
res@vpWidthF = 0.8 ; set width and height
|
||||
res@vpHeightF = 0.3
|
||||
|
||||
; First plot
|
||||
res@tiMainString = "Plot 1"
|
||||
|
||||
res@vpXF = 0.15
|
||||
res@vpYF = 0.9 ; Higher on the page
|
||||
|
||||
plot1 = gsn_csm_xy (wks,u&lat,u(0,:,{82}),res) ; create plot
|
||||
|
||||
; Second plot
|
||||
res@tiMainString = "Plot 2"
|
||||
|
||||
res@vpXF = 0.15 ; Same X location as first plot
|
||||
res@vpYF = 0.4 ; Lower on the page
|
||||
|
||||
plot2 = gsn_csm_xy (wks,u&lat,u(0,:,{3}),res) ; create plot
|
||||
|
||||
; Advance the frame
|
||||
frame(wks)
|
||||
|
||||
; Now draw the two plots with illustrations.
|
||||
|
||||
drawNDCGrid(wks) ; Draw helpful grid lines showing NDC square.
|
||||
|
||||
draw(plot1) ; Draw the two plots
|
||||
draw(plot2)
|
||||
|
||||
draw_vp_box(wks,plot1) ; Draw boxes around the two viewports.
|
||||
draw_vp_box(wks,plot2)
|
||||
|
||||
frame(wks) ; Advance the frame.
|
||||
|
||||
|
||||
;
|
||||
; Uncomment the next two lines if you want to maximize these plots for
|
||||
; PS or PDF output.
|
||||
;
|
||||
; psres = True
|
||||
; maximize_output(wks,psres) ; calls draw and frame for you
|
||||
end
|
||||
120
samples/NCL/weather_sym_6.ncl
Normal file
120
samples/NCL/weather_sym_6.ncl
Normal file
@@ -0,0 +1,120 @@
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl"
|
||||
|
||||
begin
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;
|
||||
; Example of plotting station model data over a map
|
||||
; illustrating how the wind barb directions are adjusted
|
||||
; for the map projection.
|
||||
;
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;
|
||||
; City names.
|
||||
;
|
||||
cities = (/ "NCAR", "Seattle", "San Francisco", \
|
||||
"Los Angeles", "Billings", "El Paso", \
|
||||
"Houston", "Kansas City", "Minneapolis", \
|
||||
"Chicago", "Detroit", "Atlanta", \
|
||||
"Miami", "New York", "Eugene", \
|
||||
"Boise", "Salt Lake", "Phoenix", \
|
||||
"Albuquerque", "Bismarck", "Tulsa", \
|
||||
"Dallas", "Little Rock", "Lexington", \
|
||||
"Charlotte", "Norfolk", "Bangor" \
|
||||
/)
|
||||
city_lats = (/ 40.0, 47.6, 37.8, \
|
||||
34.1, 45.8, 31.8, \
|
||||
29.8, 39.1, 45.0, \
|
||||
41.9, 42.3, 33.8, \
|
||||
25.8, 40.8, 44.1, \
|
||||
43.6, 40.7, 33.5, \
|
||||
35.1, 46.7, 36.0, \
|
||||
32.8, 34.7, 38.1, \
|
||||
35.2, 36.8, 44.8 \
|
||||
/)
|
||||
city_lons = (/ -105.0, -122.3, -122.4, \
|
||||
-118.3, -108.5, -106.5, \
|
||||
-095.3, -094.1, -093.8, \
|
||||
-087.6, -083.1, -084.4, \
|
||||
-080.2, -074.0, -123.1, \
|
||||
-116.2, -111.9, -112.1, \
|
||||
-106.6, -100.8, -096.0, \
|
||||
-096.8, -092.3, -084.1, \
|
||||
-080.8, -076.3, -068.8 \
|
||||
/)
|
||||
;
|
||||
; Station model data for the 27 cities.
|
||||
;
|
||||
imdat = (/"11000000751126021360300004955054054600007757087712", \
|
||||
"11103100011104021080300004959055050600517043080369", \
|
||||
"11206200031102021040300004963056046601517084081470", \
|
||||
"11309300061000021020300004967057042602017125082581", \
|
||||
"11412400091002021010300004971058038602517166083592", \
|
||||
"11515500121004020000300004975050034603017207084703", \
|
||||
"11618600151006020030300004979051030603507248085814", \
|
||||
"11721700181008020050300004983052026604007289086925", \
|
||||
"11824800211009020070300004987053022604507323087036", \
|
||||
"11927900241011020110300004991054018605017364088147", \
|
||||
"11030000271013020130300004995055014605517405089258", \
|
||||
"11133100301015020170300004999056010606017446080369", \
|
||||
"11236200331017020200300004000057006606517487081470", \
|
||||
"11339300361019020230300004004058002607017528082581", \
|
||||
"11442400391021020250300004008050000607517569083692", \
|
||||
"11545500421023020270300004012051040608017603084703", \
|
||||
"11648600451025020290300004017052008608517644085814", \
|
||||
"11751700481027020310300004021053012609017685086925", \
|
||||
"11854800511029020330300004025054016609507726087036", \
|
||||
"11958900541031020360300004029055018610007767088147", \
|
||||
"11060000571033020380300004033056030610507808089258", \
|
||||
"11163100601035020410300004037057034611007849080369", \
|
||||
"11266200631037020430300004041058043611507883081470", \
|
||||
"11369300661039020470300004045050041612007924082581", \
|
||||
"11472400691041020500300004048051025612507965083692", \
|
||||
"11575500721043020530300004051052022613507996084703", \
|
||||
"11678600751048021580300004055053013614007337085814" \
|
||||
/)
|
||||
|
||||
;
|
||||
; Define a color map and open a workstation.
|
||||
;
|
||||
cmap = (/ \
|
||||
(/ 1., 1., 1. /), \ ; color index 0 - white
|
||||
(/ 0., 0., 0. /) \ ; color index 1 - black
|
||||
/)
|
||||
wks = gsn_open_wks("ps","weather_sym")
|
||||
gsn_define_colormap(wks,cmap)
|
||||
|
||||
;
|
||||
; Draw a world map.
|
||||
;
|
||||
mpres = True
|
||||
mpres@gsnFrame = False
|
||||
mpres@mpSatelliteDistF = 1.3
|
||||
mpres@mpOutlineBoundarySets = "USStates"
|
||||
mpres@mpCenterLatF = 40.
|
||||
mpres@mpCenterLonF = -97.
|
||||
mpres@mpCenterRotF = 35.
|
||||
map = gsn_map(wks,"Satellite",mpres)
|
||||
|
||||
;
|
||||
; Scale the station model plot (all aspects of the station
|
||||
; model plots are scaled as per the size of the wind barb).
|
||||
;
|
||||
wmsetp("wbs",0.018)
|
||||
;
|
||||
; In the middle of Nebraska, draw a wind barb for a north wind
|
||||
; with a magnitude of 15 knots.
|
||||
;
|
||||
wmbarbmap(wks,42.,-99.,0.,-15.)
|
||||
|
||||
;
|
||||
; Draw the station model data at the selected cities. The call
|
||||
; to wmsetp informs wmstnm that the wind barbs will be drawn over
|
||||
; a map. To illustrate the adjustment for plotting the model
|
||||
; data over a map, all winds are from the north.
|
||||
;
|
||||
wmsetp("ezf",1)
|
||||
wmstnm(wks,city_lats,city_lons,imdat)
|
||||
|
||||
frame(wks)
|
||||
|
||||
end
|
||||
151
samples/NCL/xy_29.ncl
Normal file
151
samples/NCL/xy_29.ncl
Normal file
@@ -0,0 +1,151 @@
|
||||
; xy_29.ncl
|
||||
;
|
||||
; Concepts illustrated:
|
||||
; - Reading data from an ASCII file with headers
|
||||
; - Creating a separate procedure to create a specific plot
|
||||
; - Attaching polymarkers to an XY plot
|
||||
;
|
||||
; This script was originally from Dr. Birgit Hassler (NOAA)
|
||||
;****************************************************
|
||||
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl"
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl"
|
||||
load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl"
|
||||
|
||||
;************************************************
|
||||
; Plot Procedure
|
||||
;************************************************
|
||||
procedure plotTCOPolym(pltName[1]:string, pltType[1]:string, filName[1]:string \
|
||||
,xTitle[1]:string , yTitle[1]:string \
|
||||
,year[*]:numeric, y[*]:numeric)
|
||||
local wks, res, ntim, gsres, MarkerCol, OldYear, i, xmarker, ymarker
|
||||
|
||||
begin
|
||||
wks = gsn_open_wks(pltType,pltName)
|
||||
gsn_define_colormap(wks,"default")
|
||||
|
||||
res = True
|
||||
res@gsnMaximize = True ; make "ps", "eps", "pdf" large
|
||||
|
||||
res@vpHeightF = 0.5 ; change aspect ratio of plot
|
||||
res@vpWidthF = 0.75
|
||||
res@vpXF = 0.15 ; start plot at x ndc coord
|
||||
res@tiXAxisString = xTitle
|
||||
res@tiYAxisString = yTitle
|
||||
res@tiMainString = filName
|
||||
|
||||
ntim = dimsizes(year)
|
||||
res@trXMinF = year(0)-1
|
||||
res@trXMaxF = year(ntim-1)+1
|
||||
|
||||
res@gsnDraw = False
|
||||
res@gsnFrame = False
|
||||
res@xyMarkLineMode = "markers"
|
||||
res@xyMarker = 16
|
||||
res@xyMarkerColor = "Background"
|
||||
plot = gsn_csm_xy (wks,year,y,res) ; create plot frame ork
|
||||
|
||||
; add different color polymarkers for each year
|
||||
gsres = True
|
||||
MarkerCol = 2
|
||||
OldYear = year(0)
|
||||
|
||||
do i=0,ntim-1
|
||||
xmarker = year(i)
|
||||
ymarker = y(i)
|
||||
|
||||
if (i.gt.0) then
|
||||
if (year(i).gt.OldYear) then
|
||||
MarkerCol = MarkerCol+1
|
||||
end if
|
||||
OldYear = year(i)
|
||||
end if
|
||||
|
||||
gsres@gsMarkerColor = MarkerCol
|
||||
gsres@gsMarkerIndex = 16
|
||||
;gsres@gsMarkerSizeF = 15.0
|
||||
; add (attach) polymarkers to existing plot object
|
||||
plot@$unique_string("dum")$ = gsn_add_polymarker(wks,plot,xmarker,ymarker,gsres)
|
||||
end do
|
||||
|
||||
draw(plot)
|
||||
frame(wks)
|
||||
end
|
||||
|
||||
;***********************************************************
|
||||
; MAIN
|
||||
;***********************************************************
|
||||
pltType = "ps" ; "ps", "eps", "png", "x11"
|
||||
|
||||
; read multiple ascii file names
|
||||
;;fili = "Southpole_TCOTimeSeries_11.dat"
|
||||
|
||||
diri = "./"
|
||||
fili = systemfunc("cd "+diri+" ; ls *TCOT*dat")
|
||||
print(fili)
|
||||
|
||||
nfil = dimsizes(fili)
|
||||
|
||||
nhead= 4 ; number of header lines on ascii file(s)
|
||||
ncol = 4 ; year, month, day, O3
|
||||
|
||||
do nf=0,nfil-1
|
||||
sfx = get_file_suffix(fili(nf), 0) ; sfx = ".dat"
|
||||
filx = sfx@fBase ; filx= "Southpole_TCOTimeSeries_11"
|
||||
; read ascii files
|
||||
data = readAsciiTable(diri+fili(nf), ncol, "float", nhead)
|
||||
dimd = dimsizes(data)
|
||||
ntim = dimd(0) ; # rows
|
||||
|
||||
year = toint( data(:,0) ) ; user decision ... convert to integer
|
||||
mon = toint( data(:,1) )
|
||||
day = toint( data(:,2) )
|
||||
|
||||
hour = new (ntim, "integer", "No_FillValue")
|
||||
mn = new (ntim, "integer", "No_FillValue")
|
||||
sec = new (ntim, "double" , "No_FillValue")
|
||||
hour = 0
|
||||
mn = 0
|
||||
sec = 0d0
|
||||
; create COARDS/udunits time variable
|
||||
;;tunits = "days since 1900-01-01 00:00:0.0"
|
||||
tunits = "days since "+year(0)+"-"+mon(0)+"-"+day(0)+" 00:00:0.0"
|
||||
time = cd_inv_calendar(year,mon,day,hour,mn,sec,tunits, 0)
|
||||
time!0 = "time"
|
||||
time&time = time
|
||||
;printVarSummary(time)
|
||||
|
||||
; create a Gregorin 'date' variable
|
||||
date = year*10000 + mon*100 + day
|
||||
date!0 = "time"
|
||||
date@units = "yyyymmdd"
|
||||
date&time = time
|
||||
;printVarSummary(date)
|
||||
|
||||
O3 = data(:,3)
|
||||
O3@long_name = "total column ozone"
|
||||
O3@units = "DU"
|
||||
|
||||
O3!0 = "time"
|
||||
O3&time = time
|
||||
;printVarSummary(O3)
|
||||
;print(" ")
|
||||
;print(date+" "+time+" "+O3)
|
||||
|
||||
; plot
|
||||
yTitle = O3@long_name
|
||||
year@long_name = "YEAR"
|
||||
|
||||
plotTCOPolym (filx, pltType, fili(nf), year@long_name, yTitle, year, O3)
|
||||
|
||||
delete(time) ; delete ... size (# rows) may change in the next file
|
||||
delete(date)
|
||||
delete(year)
|
||||
delete(mon )
|
||||
delete(day )
|
||||
delete(mn )
|
||||
delete(sec )
|
||||
delete(O3 )
|
||||
delete(data)
|
||||
end do
|
||||
|
||||
16
samples/Objective-C/Siesta.h
Normal file
16
samples/Objective-C/Siesta.h
Normal file
@@ -0,0 +1,16 @@
|
||||
//
|
||||
// Siesta.h
|
||||
// Siesta
|
||||
//
|
||||
// Created by Paul on 2015/6/14.
|
||||
// Copyright © 2015 Bust Out Solutions. MIT license.
|
||||
//
|
||||
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
//! Project version number for Siesta.
|
||||
FOUNDATION_EXPORT double SiestaVersionNumber;
|
||||
|
||||
//! Project version string for Siesta.
|
||||
FOUNDATION_EXPORT const unsigned char SiestaVersionString[];
|
||||
|
||||
8399
samples/PAWN/fixes.inc
Normal file
8399
samples/PAWN/fixes.inc
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user