mirror of
https://github.com/KevinMidboe/linguist.git
synced 2025-10-29 17:50:22 +00:00
Compare commits
558 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
12c22d0311 | ||
|
|
d28f5e87c0 | ||
|
|
471fabfff5 | ||
|
|
2e1a6d9d43 | ||
|
|
d2d22e849e | ||
|
|
6c41bfa44d | ||
|
|
d54bcc85e7 | ||
|
|
6d94ddb114 | ||
|
|
7fd720bf39 | ||
|
|
e10558e444 | ||
|
|
7c48d5ee1f | ||
|
|
10ffd870e2 | ||
|
|
3a4ab156b0 | ||
|
|
651d863069 | ||
|
|
a3c595a4a9 | ||
|
|
7660714a9e | ||
|
|
71002dfb65 | ||
|
|
013cfdcdaf | ||
|
|
5ad6add91e | ||
|
|
2f5b49f4ae | ||
|
|
353479fe72 | ||
|
|
6f5c935837 | ||
|
|
1bf91d4281 | ||
|
|
8d2dd55c94 | ||
|
|
6a86e8ea97 | ||
|
|
a817d95d6c | ||
|
|
22b8d462c2 | ||
|
|
066052ddd2 | ||
|
|
d673010420 | ||
|
|
fffd1b986d | ||
|
|
64471be009 | ||
|
|
d43ffe09b3 | ||
|
|
e0c1107a25 | ||
|
|
3475aefd04 | ||
|
|
4e4a18c71a | ||
|
|
9a2f2c1fb2 | ||
|
|
b2ee2cc7b8 | ||
|
|
ee0b4f96a8 | ||
|
|
665f9ee5b5 | ||
|
|
4eb8903bed | ||
|
|
d11b2f05bb | ||
|
|
11f7e6e1b4 | ||
|
|
72acbc567b | ||
|
|
1170ac6105 | ||
|
|
bdc724d548 | ||
|
|
74cd03de0b | ||
|
|
0ba6a7adf0 | ||
|
|
0019abe5e7 | ||
|
|
30d2883436 | ||
|
|
cc5881dca2 | ||
|
|
d17f5dfd9e | ||
|
|
4c037c644f | ||
|
|
a08f0da30d | ||
|
|
654cfd7a47 | ||
|
|
88e79cd3a8 | ||
|
|
4543c7a0b3 | ||
|
|
f5bc9735af | ||
|
|
107a27aa25 | ||
|
|
5c29ce0695 | ||
|
|
462a570d3c | ||
|
|
7f1af4215a | ||
|
|
7a141a923c | ||
|
|
0db1d1c8ca | ||
|
|
201b0ba53c | ||
|
|
6d770ab68f | ||
|
|
6ca149de1d | ||
|
|
2e76ce740e | ||
|
|
a664b9dd0c | ||
|
|
d49701f470 | ||
|
|
67fae52b32 | ||
|
|
bb9f6ff082 | ||
|
|
ad7fc977df | ||
|
|
0479a89982 | ||
|
|
d7b9791514 | ||
|
|
832d379ace | ||
|
|
b8f3078966 | ||
|
|
d496aaae55 | ||
|
|
87e60cfd78 | ||
|
|
2077fa3837 | ||
|
|
eaa03e15ed | ||
|
|
95bedf0bfc | ||
|
|
3a1b17f1f9 | ||
|
|
9fe9bf617f | ||
|
|
3c34da8bd3 | ||
|
|
9fa7adaa61 | ||
|
|
54a2f5347a | ||
|
|
a716d3ad49 | ||
|
|
8f70604466 | ||
|
|
2044e191a2 | ||
|
|
8fe9ec0521 | ||
|
|
034e510ba5 | ||
|
|
bdec1ac64d | ||
|
|
36a0d760e9 | ||
|
|
a901e85c3c | ||
|
|
6e9dc2339d | ||
|
|
3864e712ef | ||
|
|
8376f1e4a4 | ||
|
|
1b0fd752d3 | ||
|
|
bef473a48b | ||
|
|
0c60078d27 | ||
|
|
2f65462ce0 | ||
|
|
ace6156c65 | ||
|
|
ada8feba34 | ||
|
|
75d685a7f4 | ||
|
|
6b7f20323b | ||
|
|
c2ab5bc09d | ||
|
|
95d5b8bdbc | ||
|
|
da7b3182e8 | ||
|
|
08790f2f0a | ||
|
|
896270e617 | ||
|
|
fb40ee986f | ||
|
|
20b82e4bc9 | ||
|
|
513347911e | ||
|
|
1a3960e95d | ||
|
|
7d9a47b7c3 | ||
|
|
c80d085e33 | ||
|
|
98518e5c8c | ||
|
|
2b7a488d64 | ||
|
|
25aa6669be | ||
|
|
ef9e1c4e4f | ||
|
|
cf483c28e3 | ||
|
|
fecc39d97d | ||
|
|
339370a703 | ||
|
|
359e5157a8 | ||
|
|
207bd8d77c | ||
|
|
ba5454808e | ||
|
|
9196ba91bb | ||
|
|
5ff1b02e49 | ||
|
|
4f92d620eb | ||
|
|
e7f5779659 | ||
|
|
512cfc4858 | ||
|
|
437ba70b9e | ||
|
|
fadca563bc | ||
|
|
7a601b196e | ||
|
|
bf6bd246fd | ||
|
|
168ff4c050 | ||
|
|
d6fdbafa3c | ||
|
|
3e1570a716 | ||
|
|
160c0b4ac0 | ||
|
|
cf0bc3914f | ||
|
|
96154627d3 | ||
|
|
6f07b62a3f | ||
|
|
cae17b91b8 | ||
|
|
69b68f3a44 | ||
|
|
20a3e7e4b8 | ||
|
|
119a8fff1e | ||
|
|
8094b1bd92 | ||
|
|
98fc4d78aa | ||
|
|
d773c2e90d | ||
|
|
7929e7ab9c | ||
|
|
e8e95f113c | ||
|
|
429c791377 | ||
|
|
e536eea5b6 | ||
|
|
0a5b5eadeb | ||
|
|
12351d3a8a | ||
|
|
7421b2e553 | ||
|
|
8aa4dce6f4 | ||
|
|
feeceefe99 | ||
|
|
60483e3216 | ||
|
|
49837e0c20 | ||
|
|
c7668ad882 | ||
|
|
4f37563be1 | ||
|
|
9c3ab95048 | ||
|
|
bf5651e127 | ||
|
|
f854a12043 | ||
|
|
dd09f02f53 | ||
|
|
268f43d668 | ||
|
|
d95b7504ab | ||
|
|
4d2b6ee99e | ||
|
|
6ad6984fe7 | ||
|
|
97d48a204a | ||
|
|
cc56ddb354 | ||
|
|
3ce527b0b9 | ||
|
|
94d4d92cc0 | ||
|
|
72b268b253 | ||
|
|
2c7885bbc1 | ||
|
|
36120a9122 | ||
|
|
6305ec3f31 | ||
|
|
b319731a2d | ||
|
|
885740dad6 | ||
|
|
b178268cbc | ||
|
|
3ae556893f | ||
|
|
43b297636d | ||
|
|
8cd17698fe | ||
|
|
3886c406ab | ||
|
|
b56671c20d | ||
|
|
f3cbad065f | ||
|
|
4b3b1a80f6 | ||
|
|
7abcc39c8c | ||
|
|
a5b915d571 | ||
|
|
0fcdca653a | ||
|
|
9ec801d495 | ||
|
|
4ccbdcb93c | ||
|
|
53f909f2a1 | ||
|
|
f8603705a8 | ||
|
|
3bc1b97a68 | ||
|
|
27ed17e62e | ||
|
|
f3d5090d51 | ||
|
|
d030f9be99 | ||
|
|
774d18ed8f | ||
|
|
d39f5eedf1 | ||
|
|
1a1e21f344 | ||
|
|
96c7bc30d8 | ||
|
|
0328b1cb3c | ||
|
|
ad0cc7f39d | ||
|
|
42a491ab8b | ||
|
|
ef4b25591b | ||
|
|
fbc99cf7e6 | ||
|
|
5d0e9484ce | ||
|
|
1bc6a6dfe5 | ||
|
|
30be3265fb | ||
|
|
ecaad7979f | ||
|
|
d638edbeae | ||
|
|
91779b6de9 | ||
|
|
3abb0e80d5 | ||
|
|
f4c1cc576b | ||
|
|
986235dce7 | ||
|
|
1f0c88a934 | ||
|
|
94f7dd2238 | ||
|
|
79fd12eb75 | ||
|
|
05a98be1e5 | ||
|
|
24eb1d3fe2 | ||
|
|
75d1bcdc69 | ||
|
|
7549eff9c1 | ||
|
|
6e2b4f7514 | ||
|
|
846cff5721 | ||
|
|
efd25ec4d2 | ||
|
|
5c94b50386 | ||
|
|
c0fbc9ef8c | ||
|
|
1f429fb488 | ||
|
|
ec28ea299f | ||
|
|
08558aa118 | ||
|
|
7e319b797f | ||
|
|
3957a11f25 | ||
|
|
743922d45a | ||
|
|
5f70776cf3 | ||
|
|
289f91997c | ||
|
|
163ea9ecdd | ||
|
|
9be941acc8 | ||
|
|
e95314f072 | ||
|
|
6fef6b578a | ||
|
|
dd59814563 | ||
|
|
b704b20695 | ||
|
|
71885b8a79 | ||
|
|
59a6963a89 | ||
|
|
6e9dfdff30 | ||
|
|
470419d732 | ||
|
|
5b05653881 | ||
|
|
899dc07883 | ||
|
|
83dfd408e9 | ||
|
|
417171cfe1 | ||
|
|
e8e82a1ca3 | ||
|
|
6843aa7cc8 | ||
|
|
952acc983d | ||
|
|
650f267be7 | ||
|
|
3ace4c57f7 | ||
|
|
ab1b603c78 | ||
|
|
8430f694e5 | ||
|
|
71f0cafc78 | ||
|
|
4614287a55 | ||
|
|
f477f811df | ||
|
|
95bb5a1ae4 | ||
|
|
70699037ae | ||
|
|
7edddec920 | ||
|
|
8f3779c94c | ||
|
|
917bc48348 | ||
|
|
581dc36c47 | ||
|
|
a4286ba950 | ||
|
|
5fa85f268f | ||
|
|
ada6f6882a | ||
|
|
78a0030d46 | ||
|
|
e2d6aecd81 | ||
|
|
a2e99e8ddb | ||
|
|
b499a074cf | ||
|
|
188e579df7 | ||
|
|
d82e4801ff | ||
|
|
d258d146b8 | ||
|
|
96e6b3f53e | ||
|
|
ae6b0f0d40 | ||
|
|
2c2c4740a8 | ||
|
|
14740e8a89 | ||
|
|
b357257f4d | ||
|
|
4428e62b99 | ||
|
|
575ad0d8a2 | ||
|
|
5ad9deb199 | ||
|
|
e99f6edb56 | ||
|
|
3149d1232b | ||
|
|
3c6218f20e | ||
|
|
68f04a50aa | ||
|
|
ec01672f6c | ||
|
|
be6b1bb3ee | ||
|
|
6f4557a103 | ||
|
|
dc96f62f9e | ||
|
|
2f86bd8bda | ||
|
|
fbe43b61d4 | ||
|
|
546d4163a9 | ||
|
|
55132f2955 | ||
|
|
ebdd2d4a23 | ||
|
|
4c63827517 | ||
|
|
401067f637 | ||
|
|
8e6609c192 | ||
|
|
1f1ffcbfa6 | ||
|
|
065dd713c1 | ||
|
|
71132d48ff | ||
|
|
d77e0c62c1 | ||
|
|
e853c36039 | ||
|
|
2be3220824 | ||
|
|
6ae39e50ae | ||
|
|
c783acc973 | ||
|
|
e5210b5137 | ||
|
|
41f0950c04 | ||
|
|
9f439cd7fb | ||
|
|
c911c5c045 | ||
|
|
27a7873e08 | ||
|
|
5fe233384e | ||
|
|
d42ad45423 | ||
|
|
84235478ef | ||
|
|
509b35a19f | ||
|
|
65296e86a3 | ||
|
|
c9b7bb73b9 | ||
|
|
795f42cbaa | ||
|
|
d540ec0cb8 | ||
|
|
202cba1a14 | ||
|
|
20d5b81085 | ||
|
|
d8b806592d | ||
|
|
84f9b83fc2 | ||
|
|
7cb5106656 | ||
|
|
6f21df7624 | ||
|
|
4c7dc565e7 | ||
|
|
3c60fba430 | ||
|
|
224eef9ffa | ||
|
|
42beadcf34 | ||
|
|
80ed2d6d30 | ||
|
|
c2bf6fe7f5 | ||
|
|
084a9ab976 | ||
|
|
356b942114 | ||
|
|
2c5d720146 | ||
|
|
64f83eee07 | ||
|
|
542cf9c52b | ||
|
|
0bbccc1bc1 | ||
|
|
6f014f8638 | ||
|
|
66ca5aca89 | ||
|
|
33afa30c30 | ||
|
|
1634d787ec | ||
|
|
df7b529e23 | ||
|
|
56d90bedd0 | ||
|
|
8369d253f8 | ||
|
|
516d3e226c | ||
|
|
f4208cb27d | ||
|
|
47b785a8fd | ||
|
|
6a2bf3fd2f | ||
|
|
ff678642e6 | ||
|
|
e3eb1b90c5 | ||
|
|
49125f077c | ||
|
|
324031cb68 | ||
|
|
f2ab426d38 | ||
|
|
587ab35d65 | ||
|
|
16a6dda3dd | ||
|
|
034137f533 | ||
|
|
6f75e18bfa | ||
|
|
15191b068d | ||
|
|
ee9cc24e52 | ||
|
|
0584fbf42b | ||
|
|
f5f9ccee7f | ||
|
|
0d183e2e89 | ||
|
|
b9eacfefc8 | ||
|
|
0239127db4 | ||
|
|
281d1ea91c | ||
|
|
df92ecaa55 | ||
|
|
60d40c8ad8 | ||
|
|
f428c561c9 | ||
|
|
8fbb7a1d93 | ||
|
|
e98223b2b3 | ||
|
|
410fe2843d | ||
|
|
dc8d69e0f4 | ||
|
|
a056765e38 | ||
|
|
97cd1e3886 | ||
|
|
4e7da98bfa | ||
|
|
0a56f5282d | ||
|
|
04b9ca2fd4 | ||
|
|
ddbe3df6de | ||
|
|
ab5c88a7f3 | ||
|
|
4046a0efe9 | ||
|
|
5aefc7cdff | ||
|
|
70eb779ce5 | ||
|
|
39f5d28348 | ||
|
|
e70407f16b | ||
|
|
c25475296d | ||
|
|
1363af0317 | ||
|
|
e97b7454e9 | ||
|
|
2418356eff | ||
|
|
735caa03b1 | ||
|
|
f577aece08 | ||
|
|
b7f1bfdb92 | ||
|
|
b5301e280b | ||
|
|
741816db52 | ||
|
|
546a6e2a68 | ||
|
|
1be5e8c63e | ||
|
|
917a25317a | ||
|
|
949167e6ad | ||
|
|
a359905a06 | ||
|
|
4ce606306d | ||
|
|
707fcd29ea | ||
|
|
970cbe35b8 | ||
|
|
fa9e8aa5c0 | ||
|
|
a5e6d37049 | ||
|
|
dacac135fb | ||
|
|
0ddf0d5d0d | ||
|
|
90a5251fa7 | ||
|
|
a97c69e06f | ||
|
|
a323eb43ed | ||
|
|
79243b77fd | ||
|
|
5ab2720ddc | ||
|
|
c72a49b77f | ||
|
|
a0d92b80df | ||
|
|
1a32a6252b | ||
|
|
6df7cbaf6e | ||
|
|
0a90cd3a0a | ||
|
|
c529939481 | ||
|
|
db70630eaa | ||
|
|
24862d9759 | ||
|
|
cb5f2685cd | ||
|
|
baa298873a | ||
|
|
a8f57d37bf | ||
|
|
d97ee52f4e | ||
|
|
1cfdb6decd | ||
|
|
f93272f0bd | ||
|
|
e783e953d9 | ||
|
|
39cac919c7 | ||
|
|
fec82173d9 | ||
|
|
35efed73d4 | ||
|
|
21b8e16afc | ||
|
|
6efee51101 | ||
|
|
1490425ecb | ||
|
|
afac6a918d | ||
|
|
bf0e488c06 | ||
|
|
37c2bee3ae | ||
|
|
4debcabb88 | ||
|
|
5a0c637ad0 | ||
|
|
d91215680a | ||
|
|
0cbcbd4614 | ||
|
|
19135f08c3 | ||
|
|
855cc52f0f | ||
|
|
4e5da23474 | ||
|
|
3cca4ec0ac | ||
|
|
e393f7feb4 | ||
|
|
b76ac7bab0 | ||
|
|
a3c842ba18 | ||
|
|
1640370c9d | ||
|
|
b6427fa05a | ||
|
|
2a04d854cc | ||
|
|
6d190d7d7c | ||
|
|
b8a2bd595a | ||
|
|
8b7d5546f7 | ||
|
|
b63423ce37 | ||
|
|
0b02b68538 | ||
|
|
bbd1646ae5 | ||
|
|
299a9c7bc9 | ||
|
|
37f8d5b5e1 | ||
|
|
2a6a59c047 | ||
|
|
c4da2dd557 | ||
|
|
583392d179 | ||
|
|
1488796779 | ||
|
|
7458a2f9ff | ||
|
|
348dab4b9f | ||
|
|
5c824e0771 | ||
|
|
b9c88a758c | ||
|
|
a51d637e58 | ||
|
|
d002dfa70c | ||
|
|
d118017d27 | ||
|
|
d10d328eb1 | ||
|
|
e472d7b8b1 | ||
|
|
a54d6f3a8a | ||
|
|
67ccb24b41 | ||
|
|
42ff3f58d9 | ||
|
|
ad778571a2 | ||
|
|
bb12b86445 | ||
|
|
ab61b06c34 | ||
|
|
de6b2f3307 | ||
|
|
5fd56c75d5 | ||
|
|
6aa70a3e93 | ||
|
|
93186947c2 | ||
|
|
77444284e3 | ||
|
|
6950b028df | ||
|
|
565767ef42 | ||
|
|
a1f26d64d5 | ||
|
|
be30e1c658 | ||
|
|
3aec4dd2ea | ||
|
|
ed34caa565 | ||
|
|
81b8c57bae | ||
|
|
0780b7239a | ||
|
|
1d626f7378 | ||
|
|
ff3438b810 | ||
|
|
1762c2cefd | ||
|
|
7aaf99c9b1 | ||
|
|
07cd9aa994 | ||
|
|
6ae4aa50e2 | ||
|
|
22fbcc244b | ||
|
|
1a3177409c | ||
|
|
de4b538d82 | ||
|
|
70b1ec97db | ||
|
|
a97e328484 | ||
|
|
e446b86b90 | ||
|
|
901e8da911 | ||
|
|
e9036d675e | ||
|
|
351e348ac0 | ||
|
|
feea9bfd30 | ||
|
|
f1282b857d | ||
|
|
70e0ce1d73 | ||
|
|
a5673e7fb6 | ||
|
|
d06529fd14 | ||
|
|
a02f19f5a3 | ||
|
|
a9a62fff15 | ||
|
|
7625c92307 | ||
|
|
7dd318ca76 | ||
|
|
e5bc2845cd | ||
|
|
4ddd8d9d2b | ||
|
|
37ffdb9020 | ||
|
|
dce00d01d9 | ||
|
|
768ec76ff0 | ||
|
|
8c841903a5 | ||
|
|
d4888098e5 | ||
|
|
a349daeaeb | ||
|
|
7a57a0b594 | ||
|
|
66eabd8bdc | ||
|
|
be82b55408 | ||
|
|
526ca1761a | ||
|
|
8655d966ad | ||
|
|
3fb4973aab | ||
|
|
1d4149168d | ||
|
|
26ab33754f | ||
|
|
c5a654e692 | ||
|
|
3ac69ed4e0 | ||
|
|
6739a1471b | ||
|
|
230d91ecd8 | ||
|
|
4603f3b2e7 | ||
|
|
9d6b8b4715 | ||
|
|
897c2c5ab6 | ||
|
|
f9ad5dda56 | ||
|
|
ff6a10698e | ||
|
|
6072a63f99 | ||
|
|
1bd935b2b4 | ||
|
|
7702583314 | ||
|
|
5ffc4c0158 | ||
|
|
7b44baa417 | ||
|
|
f3d654a965 | ||
|
|
2e6aae0256 | ||
|
|
2c5e8c4308 | ||
|
|
506b75c5e8 | ||
|
|
af4b64070f | ||
|
|
29d8e58d69 | ||
|
|
bde19a76cc | ||
|
|
49ebf4b303 | ||
|
|
0017310dd1 | ||
|
|
35355b157c | ||
|
|
81d9c682da | ||
|
|
e4975fc476 | ||
|
|
e1064b13c0 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -4,3 +4,5 @@ benchmark/
|
||||
lib/linguist/samples.json
|
||||
/grammars
|
||||
/node_modules
|
||||
test/fixtures/ace_modes.json
|
||||
/vendor/gems/
|
||||
|
||||
629
.gitmodules
vendored
Normal file
629
.gitmodules
vendored
Normal file
@@ -0,0 +1,629 @@
|
||||
[submodule "vendor/grammars/go-tmbundle"]
|
||||
path = vendor/grammars/go-tmbundle
|
||||
url = https://github.com/AlanQuatermain/go-tmbundle
|
||||
[submodule "vendor/grammars/PHP-Twig.tmbundle"]
|
||||
path = vendor/grammars/PHP-Twig.tmbundle
|
||||
url = https://github.com/Anomareh/PHP-Twig.tmbundle
|
||||
[submodule "vendor/grammars/sublime-cirru"]
|
||||
path = vendor/grammars/sublime-cirru
|
||||
url = https://github.com/Cirru/sublime-cirru
|
||||
[submodule "vendor/grammars/Sublime-Logos"]
|
||||
path = vendor/grammars/Sublime-Logos
|
||||
url = https://github.com/Cykey/Sublime-Logos
|
||||
[submodule "vendor/grammars/SublimeBrainfuck"]
|
||||
path = vendor/grammars/SublimeBrainfuck
|
||||
url = https://github.com/Drako/SublimeBrainfuck
|
||||
[submodule "vendor/grammars/awk-sublime"]
|
||||
path = vendor/grammars/awk-sublime
|
||||
url = https://github.com/JohnNilsson/awk-sublime
|
||||
[submodule "vendor/grammars/Sublime-SQF-Language"]
|
||||
path = vendor/grammars/Sublime-SQF-Language
|
||||
url = https://github.com/JonBons/Sublime-SQF-Language
|
||||
[submodule "vendor/grammars/SCSS.tmbundle"]
|
||||
path = vendor/grammars/SCSS.tmbundle
|
||||
url = https://github.com/MarioRicalde/SCSS.tmbundle
|
||||
[submodule "vendor/grammars/Sublime-REBOL"]
|
||||
path = vendor/grammars/Sublime-REBOL
|
||||
url = https://github.com/Oldes/Sublime-REBOL
|
||||
[submodule "vendor/grammars/Sublime-Inform"]
|
||||
path = vendor/grammars/Sublime-Inform
|
||||
url = https://github.com/PogiNate/Sublime-Inform
|
||||
[submodule "vendor/grammars/autoitv3-tmbundle"]
|
||||
path = vendor/grammars/autoitv3-tmbundle
|
||||
url = https://github.com/Red-Nova-Technologies/autoitv3-tmbundle
|
||||
[submodule "vendor/grammars/Sublime-VimL"]
|
||||
path = vendor/grammars/Sublime-VimL
|
||||
url = https://github.com/SalGnt/Sublime-VimL
|
||||
[submodule "vendor/grammars/boo-sublime"]
|
||||
path = vendor/grammars/boo-sublime
|
||||
url = https://github.com/Shammah/boo-sublime
|
||||
[submodule "vendor/grammars/ColdFusion"]
|
||||
path = vendor/grammars/ColdFusion
|
||||
url = https://github.com/SublimeText/ColdFusion
|
||||
[submodule "vendor/grammars/NSIS"]
|
||||
path = vendor/grammars/NSIS
|
||||
url = https://github.com/SublimeText/NSIS
|
||||
[submodule "vendor/grammars/NimLime"]
|
||||
path = vendor/grammars/NimLime
|
||||
url = https://github.com/Varriount/NimLime
|
||||
[submodule "vendor/grammars/gradle.tmbundle"]
|
||||
path = vendor/grammars/gradle.tmbundle
|
||||
url = https://github.com/alkemist/gradle.tmbundle
|
||||
[submodule "vendor/grammars/Sublime-Loom"]
|
||||
path = vendor/grammars/Sublime-Loom
|
||||
url = https://github.com/ambethia/Sublime-Loom
|
||||
[submodule "vendor/grammars/VBDotNetSyntax"]
|
||||
path = vendor/grammars/VBDotNetSyntax
|
||||
url = https://github.com/angryant0007/VBDotNetSyntax
|
||||
[submodule "vendor/grammars/cool-tmbundle"]
|
||||
path = vendor/grammars/cool-tmbundle
|
||||
url = https://github.com/anunayk/cool-tmbundle
|
||||
[submodule "vendor/grammars/Docker.tmbundle"]
|
||||
path = vendor/grammars/Docker.tmbundle
|
||||
url = https://github.com/asbjornenge/Docker.tmbundle
|
||||
[submodule "vendor/grammars/jasmin-sublime"]
|
||||
path = vendor/grammars/jasmin-sublime
|
||||
url = https://github.com/atmarksharp/jasmin-sublime
|
||||
[submodule "vendor/grammars/language-clojure"]
|
||||
path = vendor/grammars/language-clojure
|
||||
url = https://github.com/atom/language-clojure
|
||||
[submodule "vendor/grammars/language-coffee-script"]
|
||||
path = vendor/grammars/language-coffee-script
|
||||
url = https://github.com/atom/language-coffee-script
|
||||
[submodule "vendor/grammars/language-csharp"]
|
||||
path = vendor/grammars/language-csharp
|
||||
url = https://github.com/atom/language-csharp
|
||||
[submodule "vendor/grammars/language-gfm"]
|
||||
path = vendor/grammars/language-gfm
|
||||
url = https://github.com/atom/language-gfm
|
||||
[submodule "vendor/grammars/language-javascript"]
|
||||
path = vendor/grammars/language-javascript
|
||||
url = https://github.com/atom/language-javascript
|
||||
[submodule "vendor/grammars/language-python"]
|
||||
path = vendor/grammars/language-python
|
||||
url = https://github.com/atom/language-python
|
||||
[submodule "vendor/grammars/language-shellscript"]
|
||||
path = vendor/grammars/language-shellscript
|
||||
url = https://github.com/atom/language-shellscript
|
||||
[submodule "vendor/grammars/language-yaml"]
|
||||
path = vendor/grammars/language-yaml
|
||||
url = https://github.com/atom/language-yaml
|
||||
[submodule "vendor/grammars/sublime-sourcepawn"]
|
||||
path = vendor/grammars/sublime-sourcepawn
|
||||
url = https://github.com/austinwagner/sublime-sourcepawn
|
||||
[submodule "vendor/grammars/Sublime-Lasso"]
|
||||
path = vendor/grammars/Sublime-Lasso
|
||||
url = https://github.com/bfad/Sublime-Lasso
|
||||
[submodule "vendor/grammars/chapel-tmbundle"]
|
||||
path = vendor/grammars/chapel-tmbundle
|
||||
url = https://github.com/bholt/chapel-tmbundle
|
||||
[submodule "vendor/grammars/sublime-nginx"]
|
||||
path = vendor/grammars/sublime-nginx
|
||||
url = https://github.com/brandonwamboldt/sublime-nginx
|
||||
[submodule "vendor/grammars/bro-sublime"]
|
||||
path = vendor/grammars/bro-sublime
|
||||
url = https://github.com/bro/bro-sublime
|
||||
[submodule "vendor/grammars/sublime_man_page_support"]
|
||||
path = vendor/grammars/sublime_man_page_support
|
||||
url = https://github.com/carsonoid/sublime_man_page_support
|
||||
[submodule "vendor/grammars/sublime-MuPAD"]
|
||||
path = vendor/grammars/sublime-MuPAD
|
||||
url = https://github.com/ccreutzig/sublime-MuPAD
|
||||
[submodule "vendor/grammars/nesC.tmbundle"]
|
||||
path = vendor/grammars/nesC.tmbundle
|
||||
url = https://github.com/cdwilson/nesC.tmbundle
|
||||
[submodule "vendor/grammars/haxe-sublime-bundle"]
|
||||
path = vendor/grammars/haxe-sublime-bundle
|
||||
url = https://github.com/clemos/haxe-sublime-bundle
|
||||
[submodule "vendor/grammars/cucumber-tmbundle"]
|
||||
path = vendor/grammars/cucumber-tmbundle
|
||||
url = https://github.com/cucumber/cucumber-tmbundle
|
||||
[submodule "vendor/grammars/Handlebars"]
|
||||
path = vendor/grammars/Handlebars
|
||||
url = https://github.com/daaain/Handlebars
|
||||
[submodule "vendor/grammars/powershell"]
|
||||
path = vendor/grammars/powershell
|
||||
url = https://github.com/SublimeText/PowerShell
|
||||
[submodule "vendor/grammars/jade-tmbundle"]
|
||||
path = vendor/grammars/jade-tmbundle
|
||||
url = https://github.com/davidrios/jade-tmbundle
|
||||
[submodule "vendor/grammars/elixir-tmbundle"]
|
||||
path = vendor/grammars/elixir-tmbundle
|
||||
url = https://github.com/elixir-lang/elixir-tmbundle
|
||||
[submodule "vendor/grammars/sublime-glsl"]
|
||||
path = vendor/grammars/sublime-glsl
|
||||
url = https://github.com/euler0/sublime-glsl
|
||||
[submodule "vendor/grammars/fancy-tmbundle"]
|
||||
path = vendor/grammars/fancy-tmbundle
|
||||
url = https://github.com/fancy-lang/fancy-tmbundle
|
||||
[submodule "vendor/grammars/fsharpbinding"]
|
||||
path = vendor/grammars/fsharpbinding
|
||||
url = https://github.com/fsharp/fsharpbinding
|
||||
[submodule "vendor/grammars/monkey.tmbundle"]
|
||||
path = vendor/grammars/monkey.tmbundle
|
||||
url = https://github.com/gingerbeardman/monkey.tmbundle
|
||||
[submodule "vendor/grammars/dart-sublime-bundle"]
|
||||
path = vendor/grammars/dart-sublime-bundle
|
||||
url = https://github.com/guillermooo/dart-sublime-bundle
|
||||
[submodule "vendor/grammars/sublimetext-cuda-cpp"]
|
||||
path = vendor/grammars/sublimetext-cuda-cpp
|
||||
url = https://github.com/harrism/sublimetext-cuda-cpp
|
||||
[submodule "vendor/grammars/pike-textmate"]
|
||||
path = vendor/grammars/pike-textmate
|
||||
url = https://github.com/hww3/pike-textmate
|
||||
[submodule "vendor/grammars/ceylon-sublimetext"]
|
||||
path = vendor/grammars/ceylon-sublimetext
|
||||
url = https://github.com/jeancharles-roger/ceylon-sublimetext
|
||||
[submodule "vendor/grammars/Sublime-Text-2-OpenEdge-ABL"]
|
||||
path = vendor/grammars/Sublime-Text-2-OpenEdge-ABL
|
||||
url = https://github.com/jfairbank/Sublime-Text-2-OpenEdge-ABL
|
||||
[submodule "vendor/grammars/sublime-rust"]
|
||||
path = vendor/grammars/sublime-rust
|
||||
url = https://github.com/jhasse/sublime-rust
|
||||
[submodule "vendor/grammars/sublime-befunge"]
|
||||
path = vendor/grammars/sublime-befunge
|
||||
url = https://github.com/johanasplund/sublime-befunge
|
||||
[submodule "vendor/grammars/RDoc.tmbundle"]
|
||||
path = vendor/grammars/RDoc.tmbundle
|
||||
url = https://github.com/joshaven/RDoc.tmbundle
|
||||
[submodule "vendor/grammars/Textmate-Gosu-Bundle"]
|
||||
path = vendor/grammars/Textmate-Gosu-Bundle
|
||||
url = https://github.com/jpcamara/Textmate-Gosu-Bundle
|
||||
[submodule "vendor/grammars/jquery-tmbundle"]
|
||||
path = vendor/grammars/jquery-tmbundle
|
||||
url = https://github.com/kswedberg/jquery-tmbundle
|
||||
[submodule "vendor/grammars/fish-tmbundle"]
|
||||
path = vendor/grammars/fish-tmbundle
|
||||
url = https://github.com/l15n/fish-tmbundle
|
||||
[submodule "vendor/grammars/sublime-idris"]
|
||||
path = vendor/grammars/sublime-idris
|
||||
url = https://github.com/laughedelic/sublime-idris
|
||||
[submodule "vendor/grammars/sublime-better-typescript"]
|
||||
path = vendor/grammars/sublime-better-typescript
|
||||
url = https://github.com/lavrton/sublime-better-typescript
|
||||
[submodule "vendor/grammars/moonscript-tmbundle"]
|
||||
path = vendor/grammars/moonscript-tmbundle
|
||||
url = https://github.com/leafo/moonscript-tmbundle
|
||||
[submodule "vendor/grammars/Isabelle.tmbundle"]
|
||||
path = vendor/grammars/Isabelle.tmbundle
|
||||
url = https://github.com/lsf37/Isabelle.tmbundle
|
||||
[submodule "vendor/grammars/x86-assembly-textmate-bundle"]
|
||||
path = vendor/grammars/x86-assembly-textmate-bundle
|
||||
url = https://github.com/lunixbochs/x86-assembly-textmate-bundle
|
||||
[submodule "vendor/grammars/Alloy.tmbundle"]
|
||||
path = vendor/grammars/Alloy.tmbundle
|
||||
url = https://github.com/macekond/Alloy.tmbundle
|
||||
[submodule "vendor/grammars/opa.tmbundle"]
|
||||
path = vendor/grammars/opa.tmbundle
|
||||
url = https://github.com/mads379/opa.tmbundle
|
||||
[submodule "vendor/grammars/scala.tmbundle"]
|
||||
path = vendor/grammars/scala.tmbundle
|
||||
url = https://github.com/mads379/scala.tmbundle
|
||||
[submodule "vendor/grammars/mako-tmbundle"]
|
||||
path = vendor/grammars/mako-tmbundle
|
||||
url = https://github.com/marconi/mako-tmbundle
|
||||
[submodule "vendor/grammars/gnuplot-tmbundle"]
|
||||
path = vendor/grammars/gnuplot-tmbundle
|
||||
url = https://github.com/mattfoster/gnuplot-tmbundle
|
||||
[submodule "vendor/grammars/idl.tmbundle"]
|
||||
path = vendor/grammars/idl.tmbundle
|
||||
url = https://github.com/mgalloy/idl.tmbundle
|
||||
[submodule "vendor/grammars/protobuf-tmbundle"]
|
||||
path = vendor/grammars/protobuf-tmbundle
|
||||
url = https://github.com/michaeledgar/protobuf-tmbundle
|
||||
[submodule "vendor/grammars/Sublime-Coq"]
|
||||
path = vendor/grammars/Sublime-Coq
|
||||
url = https://github.com/mkolosick/Sublime-Coq
|
||||
[submodule "vendor/grammars/Agda.tmbundle"]
|
||||
path = vendor/grammars/Agda.tmbundle
|
||||
url = https://github.com/mokus0/Agda.tmbundle
|
||||
[submodule "vendor/grammars/Julia.tmbundle"]
|
||||
path = vendor/grammars/Julia.tmbundle
|
||||
url = https://github.com/nanoant/Julia.tmbundle
|
||||
[submodule "vendor/grammars/assembly.tmbundle"]
|
||||
path = vendor/grammars/assembly.tmbundle
|
||||
url = https://github.com/nanoant/assembly.tmbundle
|
||||
[submodule "vendor/grammars/ooc.tmbundle"]
|
||||
path = vendor/grammars/ooc.tmbundle
|
||||
url = https://github.com/nilium/ooc.tmbundle
|
||||
[submodule "vendor/grammars/LiveScript.tmbundle"]
|
||||
path = vendor/grammars/LiveScript.tmbundle
|
||||
url = https://github.com/paulmillr/LiveScript.tmbundle
|
||||
[submodule "vendor/grammars/sublime-tea"]
|
||||
path = vendor/grammars/sublime-tea
|
||||
url = https://github.com/pferruggiaro/sublime-tea
|
||||
[submodule "vendor/grammars/puppet-textmate-bundle"]
|
||||
path = vendor/grammars/puppet-textmate-bundle
|
||||
url = https://github.com/puppet-textmate-bundle/puppet-textmate-bundle
|
||||
[submodule "vendor/grammars/abap.tmbundle"]
|
||||
path = vendor/grammars/abap.tmbundle
|
||||
url = https://github.com/pvl/abap.tmbundle
|
||||
[submodule "vendor/grammars/mercury-tmlanguage"]
|
||||
path = vendor/grammars/mercury-tmlanguage
|
||||
url = https://github.com/sebgod/mercury-tmlanguage
|
||||
[submodule "vendor/grammars/mathematica-tmbundle"]
|
||||
path = vendor/grammars/mathematica-tmbundle
|
||||
url = https://github.com/shadanan/mathematica-tmbundle
|
||||
[submodule "vendor/grammars/sublime-robot-plugin"]
|
||||
path = vendor/grammars/sublime-robot-plugin
|
||||
url = https://github.com/shellderp/sublime-robot-plugin
|
||||
[submodule "vendor/grammars/actionscript3-tmbundle"]
|
||||
path = vendor/grammars/actionscript3-tmbundle
|
||||
url = https://github.com/honzabrecka/actionscript3-tmbundle
|
||||
[submodule "vendor/grammars/Sublime-QML"]
|
||||
path = vendor/grammars/Sublime-QML
|
||||
url = https://github.com/skozlovf/Sublime-QML
|
||||
[submodule "vendor/grammars/Slash.tmbundle"]
|
||||
path = vendor/grammars/Slash.tmbundle
|
||||
url = https://github.com/slash-lang/Slash.tmbundle
|
||||
[submodule "vendor/grammars/factor"]
|
||||
path = vendor/grammars/factor
|
||||
url = https://github.com/slavapestov/factor
|
||||
[submodule "vendor/grammars/ruby-slim.tmbundle"]
|
||||
path = vendor/grammars/ruby-slim.tmbundle
|
||||
url = https://github.com/slim-template/ruby-slim.tmbundle
|
||||
[submodule "vendor/grammars/SublimeXtend"]
|
||||
path = vendor/grammars/SublimeXtend
|
||||
url = https://github.com/staltz/SublimeXtend
|
||||
[submodule "vendor/grammars/Stata.tmbundle"]
|
||||
path = vendor/grammars/Stata.tmbundle
|
||||
url = https://github.com/statatmbundle/Stata.tmbundle
|
||||
[submodule "vendor/grammars/Vala-TMBundle"]
|
||||
path = vendor/grammars/Vala-TMBundle
|
||||
url = https://github.com/technosophos/Vala-TMBundle
|
||||
[submodule "vendor/grammars/ant.tmbundle"]
|
||||
path = vendor/grammars/ant.tmbundle
|
||||
url = https://github.com/textmate/ant.tmbundle
|
||||
[submodule "vendor/grammars/antlr.tmbundle"]
|
||||
path = vendor/grammars/antlr.tmbundle
|
||||
url = https://github.com/textmate/antlr.tmbundle
|
||||
[submodule "vendor/grammars/apache.tmbundle"]
|
||||
path = vendor/grammars/apache.tmbundle
|
||||
url = https://github.com/textmate/apache.tmbundle
|
||||
[submodule "vendor/grammars/applescript.tmbundle"]
|
||||
path = vendor/grammars/applescript.tmbundle
|
||||
url = https://github.com/textmate/applescript.tmbundle
|
||||
[submodule "vendor/grammars/asp.tmbundle"]
|
||||
path = vendor/grammars/asp.tmbundle
|
||||
url = https://github.com/textmate/asp.tmbundle
|
||||
[submodule "vendor/grammars/bison.tmbundle"]
|
||||
path = vendor/grammars/bison.tmbundle
|
||||
url = https://github.com/textmate/bison.tmbundle
|
||||
[submodule "vendor/grammars/capnproto.tmbundle"]
|
||||
path = vendor/grammars/capnproto.tmbundle
|
||||
url = https://github.com/textmate/capnproto.tmbundle
|
||||
[submodule "vendor/grammars/cmake.tmbundle"]
|
||||
path = vendor/grammars/cmake.tmbundle
|
||||
url = https://github.com/textmate/cmake.tmbundle
|
||||
[submodule "vendor/grammars/cpp-qt.tmbundle"]
|
||||
path = vendor/grammars/cpp-qt.tmbundle
|
||||
url = https://github.com/textmate/cpp-qt.tmbundle
|
||||
[submodule "vendor/grammars/css.tmbundle"]
|
||||
path = vendor/grammars/css.tmbundle
|
||||
url = https://github.com/textmate/css.tmbundle
|
||||
[submodule "vendor/grammars/d.tmbundle"]
|
||||
path = vendor/grammars/d.tmbundle
|
||||
url = https://github.com/textmate/d.tmbundle
|
||||
[submodule "vendor/grammars/diff.tmbundle"]
|
||||
path = vendor/grammars/diff.tmbundle
|
||||
url = https://github.com/textmate/diff.tmbundle
|
||||
[submodule "vendor/grammars/dylan.tmbundle"]
|
||||
path = vendor/grammars/dylan.tmbundle
|
||||
url = https://github.com/textmate/dylan.tmbundle
|
||||
[submodule "vendor/grammars/eiffel.tmbundle"]
|
||||
path = vendor/grammars/eiffel.tmbundle
|
||||
url = https://github.com/textmate/eiffel.tmbundle
|
||||
[submodule "vendor/grammars/erlang.tmbundle"]
|
||||
path = vendor/grammars/erlang.tmbundle
|
||||
url = https://github.com/textmate/erlang.tmbundle
|
||||
[submodule "vendor/grammars/fortran.tmbundle"]
|
||||
path = vendor/grammars/fortran.tmbundle
|
||||
url = https://github.com/textmate/fortran.tmbundle
|
||||
[submodule "vendor/grammars/gettext.tmbundle"]
|
||||
path = vendor/grammars/gettext.tmbundle
|
||||
url = https://github.com/textmate/gettext.tmbundle
|
||||
[submodule "vendor/grammars/graphviz.tmbundle"]
|
||||
path = vendor/grammars/graphviz.tmbundle
|
||||
url = https://github.com/textmate/graphviz.tmbundle
|
||||
[submodule "vendor/grammars/groovy.tmbundle"]
|
||||
path = vendor/grammars/groovy.tmbundle
|
||||
url = https://github.com/textmate/groovy.tmbundle
|
||||
[submodule "vendor/grammars/haskell.tmbundle"]
|
||||
path = vendor/grammars/haskell.tmbundle
|
||||
url = https://github.com/textmate/haskell.tmbundle
|
||||
[submodule "vendor/grammars/html.tmbundle"]
|
||||
path = vendor/grammars/html.tmbundle
|
||||
url = https://github.com/textmate/html.tmbundle
|
||||
[submodule "vendor/grammars/ini.tmbundle"]
|
||||
path = vendor/grammars/ini.tmbundle
|
||||
url = https://github.com/textmate/ini.tmbundle
|
||||
[submodule "vendor/grammars/desktop.tmbundle"]
|
||||
path = vendor/grammars/desktop.tmbundle
|
||||
url = https://github.com/Mailaender/desktop.tmbundle.git
|
||||
[submodule "vendor/grammars/io.tmbundle"]
|
||||
path = vendor/grammars/io.tmbundle
|
||||
url = https://github.com/textmate/io.tmbundle
|
||||
[submodule "vendor/grammars/java.tmbundle"]
|
||||
path = vendor/grammars/java.tmbundle
|
||||
url = https://github.com/textmate/java.tmbundle
|
||||
[submodule "vendor/grammars/javadoc.tmbundle"]
|
||||
path = vendor/grammars/javadoc.tmbundle
|
||||
url = https://github.com/textmate/javadoc.tmbundle
|
||||
[submodule "vendor/grammars/javascript-objective-j.tmbundle"]
|
||||
path = vendor/grammars/javascript-objective-j.tmbundle
|
||||
url = https://github.com/textmate/javascript-objective-j.tmbundle
|
||||
[submodule "vendor/grammars/json.tmbundle"]
|
||||
path = vendor/grammars/json.tmbundle
|
||||
url = https://github.com/textmate/json.tmbundle
|
||||
[submodule "vendor/grammars/latex.tmbundle"]
|
||||
path = vendor/grammars/latex.tmbundle
|
||||
url = https://github.com/textmate/latex.tmbundle
|
||||
[submodule "vendor/grammars/less.tmbundle"]
|
||||
path = vendor/grammars/less.tmbundle
|
||||
url = https://github.com/textmate/less.tmbundle
|
||||
[submodule "vendor/grammars/lilypond.tmbundle"]
|
||||
path = vendor/grammars/lilypond.tmbundle
|
||||
url = https://github.com/textmate/lilypond.tmbundle
|
||||
[submodule "vendor/grammars/lisp.tmbundle"]
|
||||
path = vendor/grammars/lisp.tmbundle
|
||||
url = https://github.com/textmate/lisp.tmbundle
|
||||
[submodule "vendor/grammars/logtalk.tmbundle"]
|
||||
path = vendor/grammars/logtalk.tmbundle
|
||||
url = https://github.com/textmate/logtalk.tmbundle
|
||||
[submodule "vendor/grammars/lua.tmbundle"]
|
||||
path = vendor/grammars/lua.tmbundle
|
||||
url = https://github.com/textmate/lua.tmbundle
|
||||
[submodule "vendor/grammars/make.tmbundle"]
|
||||
path = vendor/grammars/make.tmbundle
|
||||
url = https://github.com/textmate/make.tmbundle
|
||||
[submodule "vendor/grammars/matlab.tmbundle"]
|
||||
path = vendor/grammars/matlab.tmbundle
|
||||
url = https://github.com/textmate/matlab.tmbundle
|
||||
[submodule "vendor/grammars/maven.tmbundle"]
|
||||
path = vendor/grammars/maven.tmbundle
|
||||
url = https://github.com/textmate/maven.tmbundle
|
||||
[submodule "vendor/grammars/nemerle.tmbundle"]
|
||||
path = vendor/grammars/nemerle.tmbundle
|
||||
url = https://github.com/textmate/nemerle.tmbundle
|
||||
[submodule "vendor/grammars/ninja.tmbundle"]
|
||||
path = vendor/grammars/ninja.tmbundle
|
||||
url = https://github.com/textmate/ninja.tmbundle
|
||||
[submodule "vendor/grammars/objective-c.tmbundle"]
|
||||
path = vendor/grammars/objective-c.tmbundle
|
||||
url = https://github.com/textmate/objective-c.tmbundle
|
||||
[submodule "vendor/grammars/ocaml.tmbundle"]
|
||||
path = vendor/grammars/ocaml.tmbundle
|
||||
url = https://github.com/textmate/ocaml.tmbundle
|
||||
[submodule "vendor/grammars/pascal.tmbundle"]
|
||||
path = vendor/grammars/pascal.tmbundle
|
||||
url = https://github.com/textmate/pascal.tmbundle
|
||||
[submodule "vendor/grammars/perl.tmbundle"]
|
||||
path = vendor/grammars/perl.tmbundle
|
||||
url = https://github.com/textmate/perl.tmbundle
|
||||
[submodule "vendor/grammars/php-smarty.tmbundle"]
|
||||
path = vendor/grammars/php-smarty.tmbundle
|
||||
url = https://github.com/textmate/php-smarty.tmbundle
|
||||
[submodule "vendor/grammars/php.tmbundle"]
|
||||
path = vendor/grammars/php.tmbundle
|
||||
url = https://github.com/textmate/php.tmbundle
|
||||
[submodule "vendor/grammars/postscript.tmbundle"]
|
||||
path = vendor/grammars/postscript.tmbundle
|
||||
url = https://github.com/textmate/postscript.tmbundle
|
||||
[submodule "vendor/grammars/processing.tmbundle"]
|
||||
path = vendor/grammars/processing.tmbundle
|
||||
url = https://github.com/textmate/processing.tmbundle
|
||||
[submodule "vendor/grammars/prolog.tmbundle"]
|
||||
path = vendor/grammars/prolog.tmbundle
|
||||
url = https://github.com/textmate/prolog.tmbundle
|
||||
[submodule "vendor/grammars/python-django.tmbundle"]
|
||||
path = vendor/grammars/python-django.tmbundle
|
||||
url = https://github.com/textmate/python-django.tmbundle
|
||||
[submodule "vendor/grammars/r.tmbundle"]
|
||||
path = vendor/grammars/r.tmbundle
|
||||
url = https://github.com/textmate/r.tmbundle
|
||||
[submodule "vendor/grammars/restructuredtext.tmbundle"]
|
||||
path = vendor/grammars/restructuredtext.tmbundle
|
||||
url = https://github.com/textmate/restructuredtext.tmbundle
|
||||
[submodule "vendor/grammars/ruby-haml.tmbundle"]
|
||||
path = vendor/grammars/ruby-haml.tmbundle
|
||||
url = https://github.com/textmate/ruby-haml.tmbundle
|
||||
[submodule "vendor/grammars/ruby-on-rails-tmbundle"]
|
||||
path = vendor/grammars/ruby-on-rails-tmbundle
|
||||
url = https://github.com/textmate/ruby-on-rails-tmbundle
|
||||
[submodule "vendor/grammars/scheme.tmbundle"]
|
||||
path = vendor/grammars/scheme.tmbundle
|
||||
url = https://github.com/textmate/scheme.tmbundle
|
||||
[submodule "vendor/grammars/scilab.tmbundle"]
|
||||
path = vendor/grammars/scilab.tmbundle
|
||||
url = https://github.com/textmate/scilab.tmbundle
|
||||
[submodule "vendor/grammars/sql.tmbundle"]
|
||||
path = vendor/grammars/sql.tmbundle
|
||||
url = https://github.com/textmate/sql.tmbundle
|
||||
[submodule "vendor/grammars/standard-ml.tmbundle"]
|
||||
path = vendor/grammars/standard-ml.tmbundle
|
||||
url = https://github.com/textmate/standard-ml.tmbundle
|
||||
[submodule "vendor/grammars/swift.tmbundle"]
|
||||
path = vendor/grammars/swift.tmbundle
|
||||
url = https://github.com/textmate/swift.tmbundle
|
||||
[submodule "vendor/grammars/tcl.tmbundle"]
|
||||
path = vendor/grammars/tcl.tmbundle
|
||||
url = https://github.com/textmate/tcl.tmbundle
|
||||
[submodule "vendor/grammars/text.tmbundle"]
|
||||
path = vendor/grammars/text.tmbundle
|
||||
url = https://github.com/textmate/text.tmbundle
|
||||
[submodule "vendor/grammars/textile.tmbundle"]
|
||||
path = vendor/grammars/textile.tmbundle
|
||||
url = https://github.com/textmate/textile.tmbundle
|
||||
[submodule "vendor/grammars/textmate.tmbundle"]
|
||||
path = vendor/grammars/textmate.tmbundle
|
||||
url = https://github.com/textmate/textmate.tmbundle
|
||||
[submodule "vendor/grammars/thrift.tmbundle"]
|
||||
path = vendor/grammars/thrift.tmbundle
|
||||
url = https://github.com/textmate/thrift.tmbundle
|
||||
[submodule "vendor/grammars/toml.tmbundle"]
|
||||
path = vendor/grammars/toml.tmbundle
|
||||
url = https://github.com/textmate/toml.tmbundle
|
||||
[submodule "vendor/grammars/verilog.tmbundle"]
|
||||
path = vendor/grammars/verilog.tmbundle
|
||||
url = https://github.com/textmate/verilog.tmbundle
|
||||
[submodule "vendor/grammars/xml.tmbundle"]
|
||||
path = vendor/grammars/xml.tmbundle
|
||||
url = https://github.com/textmate/xml.tmbundle
|
||||
[submodule "vendor/grammars/smalltalk-tmbundle"]
|
||||
path = vendor/grammars/smalltalk-tmbundle
|
||||
url = https://github.com/tomas-stefano/smalltalk-tmbundle
|
||||
[submodule "vendor/grammars/ioke-outdated"]
|
||||
path = vendor/grammars/ioke-outdated
|
||||
url = https://github.com/vic/ioke-outdated
|
||||
[submodule "vendor/grammars/kotlin-sublime-package"]
|
||||
path = vendor/grammars/kotlin-sublime-package
|
||||
url = https://github.com/vkostyukov/kotlin-sublime-package
|
||||
[submodule "vendor/grammars/c.tmbundle"]
|
||||
path = vendor/grammars/c.tmbundle
|
||||
url = https://github.com/textmate/c.tmbundle
|
||||
[submodule "vendor/grammars/zephir-sublime"]
|
||||
path = vendor/grammars/zephir-sublime
|
||||
url = https://github.com/vmg/zephir-sublime
|
||||
[submodule "vendor/grammars/llvm.tmbundle"]
|
||||
path = vendor/grammars/llvm.tmbundle
|
||||
url = https://github.com/whitequark/llvm.tmbundle
|
||||
[submodule "vendor/grammars/sublime-nix"]
|
||||
path = vendor/grammars/sublime-nix
|
||||
url = https://github.com/wmertens/sublime-nix
|
||||
[submodule "vendor/grammars/ada.tmbundle"]
|
||||
path = vendor/grammars/ada.tmbundle
|
||||
url = https://github.com/aroben/ada.tmbundle
|
||||
branch = better-with-highlighting
|
||||
[submodule "vendor/grammars/oz-tmbundle"]
|
||||
path = vendor/grammars/oz-tmbundle
|
||||
url = https://github.com/eregon/oz-tmbundle
|
||||
[submodule "vendor/grammars/ebundles"]
|
||||
path = vendor/grammars/ebundles
|
||||
url = https://github.com/ericzou/ebundles
|
||||
[submodule "vendor/grammars/sublime-mask"]
|
||||
path = vendor/grammars/sublime-mask
|
||||
url = https://github.com/tenbits/sublime-mask
|
||||
[submodule "vendor/grammars/sublime_cobol"]
|
||||
path = vendor/grammars/sublime_cobol
|
||||
url = https://bitbucket.org/bitlang/sublime_cobol
|
||||
[submodule "vendor/grammars/ruby.tmbundle"]
|
||||
path = vendor/grammars/ruby.tmbundle
|
||||
url = https://github.com/aroben/ruby.tmbundle
|
||||
branch = pl
|
||||
[submodule "vendor/grammars/IDL-Syntax"]
|
||||
path = vendor/grammars/IDL-Syntax
|
||||
url = https://github.com/andik/IDL-Syntax
|
||||
[submodule "vendor/grammars/sas.tmbundle"]
|
||||
path = vendor/grammars/sas.tmbundle
|
||||
url = https://github.com/rpardee/sas.tmbundle
|
||||
[submodule "vendor/grammars/atom-salt"]
|
||||
path = vendor/grammars/atom-salt
|
||||
url = https://github.com/saltstack/atom-salt
|
||||
[submodule "vendor/grammars/Scalate.tmbundle"]
|
||||
path = vendor/grammars/Scalate.tmbundle
|
||||
url = https://github.com/scalate/Scalate.tmbundle
|
||||
[submodule "vendor/grammars/Elm.tmLanguage"]
|
||||
path = vendor/grammars/Elm.tmLanguage
|
||||
url = https://github.com/deadfoxygrandpa/Elm.tmLanguage
|
||||
[submodule "vendor/grammars/sublime-bsv"]
|
||||
path = vendor/grammars/sublime-bsv
|
||||
url = https://github.com/thotypous/sublime-bsv
|
||||
[submodule "vendor/grammars/Sublime-HTTP"]
|
||||
path = vendor/grammars/Sublime-HTTP
|
||||
url = https://github.com/httpspec/sublime-highlighting
|
||||
[submodule "vendor/grammars/sass-textmate-bundle"]
|
||||
path = vendor/grammars/sass-textmate-bundle
|
||||
url = https://github.com/nathos/sass-textmate-bundle
|
||||
[submodule "vendor/grammars/carto-atom"]
|
||||
path = vendor/grammars/carto-atom
|
||||
url = https://github.com/yohanboniface/carto-atom
|
||||
[submodule "vendor/grammars/Sublime-Nit"]
|
||||
path = vendor/grammars/Sublime-Nit
|
||||
url = https://github.com/R4PaSs/Sublime-Nit
|
||||
[submodule "vendor/grammars/language-hy"]
|
||||
path = vendor/grammars/language-hy
|
||||
url = https://github.com/rwtolbert/language-hy
|
||||
[submodule "vendor/grammars/Racket"]
|
||||
path = vendor/grammars/Racket
|
||||
url = https://github.com/soegaard/racket-highlight-for-github
|
||||
[submodule "vendor/grammars/turtle.tmbundle"]
|
||||
path = vendor/grammars/turtle.tmbundle
|
||||
url = https://github.com/peta/turtle.tmbundle
|
||||
[submodule "vendor/grammars/liquid.tmbundle"]
|
||||
path = vendor/grammars/liquid.tmbundle
|
||||
url = https://github.com/bastilian/validcode-textmate-bundles
|
||||
[submodule "vendor/grammars/ats.sublime"]
|
||||
path = vendor/grammars/ats.sublime
|
||||
url = https://github.com/steinwaywhw/ats-mode-sublimetext
|
||||
[submodule "vendor/grammars/Modelica"]
|
||||
path = vendor/grammars/Modelica
|
||||
url = https://github.com/BorisChumichev/modelicaSublimeTextPackage
|
||||
[submodule "vendor/grammars/sublime-apl"]
|
||||
path = vendor/grammars/sublime-apl
|
||||
url = https://github.com/StoneCypher/sublime-apl
|
||||
[submodule "vendor/grammars/CLIPS-sublime"]
|
||||
path = vendor/grammars/CLIPS-sublime
|
||||
url = https://github.com/psicomante/CLIPS-sublime
|
||||
[submodule "vendor/grammars/Creole"]
|
||||
path = vendor/grammars/Creole
|
||||
url = https://github.com/Siddley/Creole
|
||||
[submodule "vendor/grammars/GDScript-sublime"]
|
||||
path = vendor/grammars/GDScript-sublime
|
||||
url = https://github.com/beefsack/GDScript-sublime
|
||||
[submodule "vendor/grammars/sublime-golo"]
|
||||
path = vendor/grammars/sublime-golo
|
||||
url = https://github.com/TypeUnsafe/sublime-golo
|
||||
[submodule "vendor/grammars/JSyntax"]
|
||||
path = vendor/grammars/JSyntax
|
||||
url = https://github.com/bcj/JSyntax
|
||||
[submodule "vendor/grammars/TXL"]
|
||||
path = vendor/grammars/TXL
|
||||
url = https://github.com/MikeHoffert/Sublime-Text-TXL-syntax
|
||||
[submodule "vendor/grammars/G-Code"]
|
||||
path = vendor/grammars/G-Code
|
||||
url = https://github.com/robotmaster/sublime-text-syntax-highlighting
|
||||
[submodule "vendor/grammars/grace-tmbundle"]
|
||||
path = vendor/grammars/grace-tmbundle
|
||||
url = https://github.com/zmthy/grace-tmbundle
|
||||
[submodule "vendor/grammars/sublime-text-ox"]
|
||||
path = vendor/grammars/sublime-text-ox
|
||||
url = https://github.com/andreashetland/sublime-text-ox
|
||||
[submodule "vendor/grammars/AutoHotkey"]
|
||||
path = vendor/grammars/AutoHotkey
|
||||
url = https://github.com/ahkscript/SublimeAutoHotkey
|
||||
[submodule "vendor/grammars/ec.tmbundle"]
|
||||
path = vendor/grammars/ec.tmbundle
|
||||
url = https://github.com/ecere/ec.tmbundle
|
||||
[submodule "vendor/grammars/InnoSetup"]
|
||||
path = vendor/grammars/InnoSetup
|
||||
url = https://github.com/idleberg/InnoSetup-Sublime-Text
|
||||
[submodule "vendor/grammars/gap-tmbundle"]
|
||||
path = vendor/grammars/gap-tmbundle
|
||||
url = https://github.com/dhowden/gap-tmbundle
|
||||
[submodule "vendor/grammars/SublimePapyrus"]
|
||||
path = vendor/grammars/SublimePapyrus
|
||||
url = https://github.com/Kapiainen/SublimePapyrus
|
||||
[submodule "vendor/grammars/sublime-spintools"]
|
||||
path = vendor/grammars/sublime-spintools
|
||||
url = https://github.com/bitbased/sublime-spintools
|
||||
[submodule "vendor/grammars/PogoScript.tmbundle"]
|
||||
path = vendor/grammars/PogoScript.tmbundle
|
||||
url = https://github.com/featurist/PogoScript.tmbundle
|
||||
[submodule "vendor/grammars/sublime-opal"]
|
||||
path = vendor/grammars/sublime-opal
|
||||
url = https://github.com/artifactz/sublime-opal
|
||||
[submodule "vendor/grammars/mediawiki.tmbundle"]
|
||||
path = vendor/grammars/mediawiki.tmbundle
|
||||
url = https://github.com/textmate/mediawiki.tmbundle
|
||||
[submodule "vendor/grammars/BrightScript.tmbundle"]
|
||||
path = vendor/grammars/BrightScript.tmbundle
|
||||
url = https://github.com/cmink/BrightScript.tmbundle
|
||||
[submodule "vendor/grammars/Stylus"]
|
||||
path = vendor/grammars/Stylus
|
||||
url = https://github.com/billymoon/Stylus
|
||||
[submodule "vendor/grammars/asciidoc.tmbundle"]
|
||||
path = vendor/grammars/asciidoc.tmbundle
|
||||
url = https://github.com/zuckschwerdt/asciidoc.tmbundle
|
||||
[submodule "vendor/grammars/sublime-text-pig-latin"]
|
||||
path = vendor/grammars/sublime-text-pig-latin
|
||||
url = https://github.com/goblindegook/sublime-text-pig-latin
|
||||
11
.travis.yml
11
.travis.yml
@@ -1,9 +1,5 @@
|
||||
before_install:
|
||||
- git fetch origin master:master
|
||||
- git fetch origin v2.0.0:v2.0.0
|
||||
- git fetch origin test/attributes:test/attributes
|
||||
- git fetch origin test/master:test/master
|
||||
- sudo apt-get install libicu-dev -y
|
||||
sudo: false
|
||||
before_install: script/travis/before_install
|
||||
rvm:
|
||||
- 1.9.3
|
||||
- 2.0.0
|
||||
@@ -11,3 +7,6 @@ rvm:
|
||||
- 2.2
|
||||
notifications:
|
||||
disabled: true
|
||||
git:
|
||||
submodules: false
|
||||
cache: bundler
|
||||
|
||||
@@ -1,31 +1,81 @@
|
||||
## Contributing
|
||||
# Contributing
|
||||
|
||||
The majority of contributions won't need to touch any Ruby code at all. The [master language list][languages] is just a YAML configuration file.
|
||||
Hi there! We're thrilled that you'd like to contribute to this project. Your help is essential for keeping it great. The majority of contributions won't need to touch any Ruby code at all.
|
||||
|
||||
Almost all bug fixes or new language additions should come with some additional code samples. Just drop them under [`samples/`][samples] in the correct subdirectory and our test suite will automatically test them. In most cases you shouldn't need to add any new assertions.
|
||||
## Adding a language
|
||||
|
||||
### My code is detected as the wrong language
|
||||
We try only to add languages once they have some usage on GitHub. In most cases we prefer that languages be in use in hundreds of repositories before supporting them in Linguist.
|
||||
|
||||
This can usually be solved either by adding a new filename or file name extension to the language's entry in [`languages.yml`][languages] or adding more [samples][samples] for your language to the repository to make Linguist's classifier smarter.
|
||||
To add support for a new language:
|
||||
|
||||
### Syntax highlighting looks wrong
|
||||
0. Add an entry for your language to [`languages.yml`][languages].
|
||||
0. Add a grammar for your language. Please only add grammars that have a license that permits redistribution.
|
||||
0. Add your grammar as a submodule: `git submodule add https://github.com/JaneSmith/MyGrammar vendor/grammars/MyGrammar`.
|
||||
0. Add your grammar to [`grammars.yml`][grammars] by running `script/convert-grammars --add vendor/grammars/MyGrammar`.
|
||||
0. Add samples for your language to the [samples directory][samples] in the correct subdirectory.
|
||||
0. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage.
|
||||
|
||||
Assuming your code is being detected as the right language (see above), in most cases this is due to a bug in the language grammar rather than a bug in Linguist. [`grammars.yml`][grammars] lists all the grammars we use for syntax highlighting on github.com. Find the one corresponding to your code's programming language and submit a bug report upstream.
|
||||
In addition, if your new language defines an extension that's already listed in [`languages.yml`][languages] (such as `.foo`) then sometimes a few more steps will need to be taken:
|
||||
|
||||
You can also try to fix the bug yourself and submit a Pull Request. [This piece from TextMate's documentation](http://manual.macromates.com/en/language_grammars) offers a good introduction on how to work with TextMate-compatible grammars. You can test grammars using [Lightshow](https://lightshow.githubapp.com).
|
||||
0. Make sure that example `.foo` files are present in the [samples directory][samples] for each language that uses `.foo`.
|
||||
0. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.foo` files. (ping @arfon or @bkeepers to help with this) to ensure we're not misclassifying files.
|
||||
0. If the Bayesian classifier does a bad job with the sample `.foo` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
|
||||
|
||||
Remember, the goal here is to try and avoid false positives!
|
||||
|
||||
## Fixing a misclassified language
|
||||
|
||||
Most languages are detected by their file extension defined in [languages.yml][languages]. For disambiguating between files with common extensions, linguist applies some [heuristics](/lib/linguist/heuristics.rb) and a [statistical classifier](lib/linguist/classifier.rb). This process can help differentiate between, for example, `.h` files which could be either C, C++, or Obj-C.
|
||||
|
||||
Misclassifications can often be solved by either adding a new filename or extension for the language or adding more [samples][samples] to make the classifier smarter.
|
||||
|
||||
## Fixing syntax highlighting
|
||||
|
||||
Syntax highlighting in GitHub is performed using TextMate-compatible grammars. These are the same grammars that TextMate, Sublime Text and Atom use. Every language in [languages.yml][languages] is mapped to its corresponding TM `scope`. This scope will be used when picking up a grammar for highlighting.
|
||||
|
||||
Assuming your code is being detected as the right language, in most cases this is due to a bug in the language grammar rather than a bug in Linguist. [`grammars.yml`][grammars] lists all the grammars we use for syntax highlighting on github.com. Find the one corresponding to your code's programming language and submit a bug report upstream. If you can, try to reproduce the highlighting problem in the text editor that the grammar is designed for (TextMate, Sublime Text, or Atom) and include that information in your bug report.
|
||||
|
||||
You can also try to fix the bug yourself and submit a Pull Request. [TextMate's documentation](http://manual.macromates.com/en/language_grammars) offers a good introduction on how to work with TextMate-compatible grammars. You can test grammars using [Lightshow](https://github-lightshow.herokuapp.com).
|
||||
|
||||
Once the bug has been fixed upstream, please let us know and we'll pick it up for GitHub.
|
||||
|
||||
### I want to add support for the `X` programming language
|
||||
## Testing
|
||||
|
||||
Great! You'll need to:
|
||||
For development you are going to want to checkout out the source. To get it, clone the repo and run [Bundler](http://gembundler.com/) to install its dependencies.
|
||||
|
||||
0. Add an entry for your language to [`languages.yml`][languages].
|
||||
0. Add a grammar for your language to [`grammars.yml`][grammars] by running `script/download-grammars --add URL`. Please only add grammars that have a license that permits redistribution.
|
||||
0. Add samples for your language to the [samples directory][samples].
|
||||
git clone https://github.com/github/linguist.git
|
||||
cd linguist/
|
||||
script/bootstrap
|
||||
|
||||
We try only to add languages once they have some usage on GitHub, so please note in-the-wild usage examples in your pull request. In most cases we prefer that languages already be in use in hundreds of repositories before supporting them in Linguist.
|
||||
To run the tests:
|
||||
|
||||
bundle exec rake test
|
||||
|
||||
Sometimes getting the tests running can be too much work, especially if you don't have much Ruby experience. It's okay: be lazy and let our build bot [Travis](http://travis-ci.org/#!/github/linguist) run the tests for you. Just open a pull request and the bot will start cranking away.
|
||||
|
||||
Here's our current build status: [](http://travis-ci.org/github/linguist)
|
||||
|
||||
## Releasing
|
||||
|
||||
If you are the current maintainer of this gem:
|
||||
|
||||
0. Create a branch for the release: `git checkout -b cut-release-vxx.xx.xx`
|
||||
0. Make sure your local dependencies are up to date: `script/bootstrap`
|
||||
0. If grammar submodules have not been updated recently, update them: `git submodule update --remote && git commit -a`
|
||||
0. Ensure that samples are updated: `bundle exec rake samples`
|
||||
0. Ensure that tests are green: `bundle exec rake test`
|
||||
0. Bump gem version in `lib/linguist/version.rb`, [like this](https://github.com/github/linguist/commit/8d2ea90a5ba3b2fe6e1508b7155aa4632eea2985).
|
||||
0. Make a PR to github/linguist, [like this](https://github.com/github/linguist/pull/1238).
|
||||
0. Build a local gem: `bundle exec rake build_gem`
|
||||
0. Test the gem:
|
||||
0. Bump the Gemfile and Gemfile.lock versions for an app which relies on this gem
|
||||
0. Install the new gem locally
|
||||
0. Test behavior locally, branch deploy, whatever needs to happen
|
||||
0. Merge github/linguist PR
|
||||
0. Tag and push: `git tag vx.xx.xx; git push --tags`
|
||||
0. Push to rubygems.org -- `gem push github-linguist-3.0.0.gem`
|
||||
|
||||
[grammars]: /grammars.yml
|
||||
[languages]: /lib/linguist/languages.yml
|
||||
[samples]: /samples
|
||||
[new-issue]: https://github.com/github/linguist/issues/new
|
||||
|
||||
1
Gemfile
1
Gemfile
@@ -1,5 +1,4 @@
|
||||
source 'https://rubygems.org'
|
||||
gemspec :name => "github-linguist"
|
||||
gemspec :name => "github-linguist-grammars"
|
||||
gem 'test-unit', require: false if RUBY_VERSION >= '2.2'
|
||||
gem 'byebug' if RUBY_VERSION >= '2.0'
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,4 +1,4 @@
|
||||
Copyright (c) 2011-2014 GitHub, Inc.
|
||||
Copyright (c) 2011-2015 GitHub, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
|
||||
224
README.md
224
README.md
@@ -1,45 +1,80 @@
|
||||
# Linguist
|
||||
|
||||
We use this library at GitHub to detect blob languages, ignore binary files, suppress generated files in diffs, and generate language breakdown graphs.
|
||||
[issues]: https://github.com/github/linguist/issues
|
||||
[new-issue]: https://github.com/github/linguist/issues/new
|
||||
|
||||
Tips for filing issues and creating pull requests can be found in [`CONTRIBUTING.md`](/CONTRIBUTING.md).
|
||||
This library is used on GitHub.com to detect blob languages, ignore binary or vendored files, suppress generated files in diffs, and generate language breakdown graphs.
|
||||
|
||||
## Features
|
||||
See [Troubleshooting](#troubleshooting) and [`CONTRIBUTING.md`](/CONTRIBUTING.md) before filing an issue or creating a pull request.
|
||||
|
||||
### Language detection
|
||||
## Troubleshooting
|
||||
|
||||
Linguist defines a list of all languages known to GitHub in a [yaml file](https://github.com/github/linguist/blob/master/lib/linguist/languages.yml).
|
||||
### My repository is detected as the wrong language
|
||||
|
||||
Most languages are detected by their file extension. For disambiguating between files with common extensions, we first apply some common-sense heuristics to pick out obvious languages. After that, we use a
|
||||
[statistical
|
||||
classifier](https://github.com/github/linguist/blob/master/lib/linguist/classifier.rb).
|
||||
This process can help us tell the difference between, for example, `.h` files which could be either C, C++, or Obj-C.
|
||||

|
||||
|
||||
```ruby
|
||||
The Language stats bar is built by aggregating the languages of each file in that repository. If it is reporting a language that you don't expect:
|
||||
|
||||
Linguist::FileBlob.new("lib/linguist.rb").language.name #=> "Ruby"
|
||||
0. Click on the name of the language in the stats bar to see a list of the files that are identified as that language.
|
||||
0. If you see files that you didn't write, consider moving the files into one of the [paths for vendored code](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml), or use the [manual overrides](#overrides) feature to ignore them.
|
||||
0. If the files are being misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you an add, especially links to public repositories, is helpful.
|
||||
0. If there are no reported issues of this misclassification, [open an issue][new-issue] and include a link to the repository or a sample of the code that is being misclassified.
|
||||
|
||||
Linguist::FileBlob.new("bin/linguist").language.name #=> "Ruby"
|
||||
## Overrides
|
||||
|
||||
Linguist supports a number of different custom overrides strategies for language definitions and vendored paths.
|
||||
|
||||
### Using gitattributes
|
||||
|
||||
Add a `.gitattributes` file to your project and use standard git-style path matchers for the files you want to override to set `linguist-documentation`, `linguist-language`, and `linguist-vendored`.
|
||||
|
||||
```
|
||||
$ cat .gitattributes
|
||||
*.rb linguist-language=Java
|
||||
```
|
||||
|
||||
See [lib/linguist/language.rb](https://github.com/github/linguist/blob/master/lib/linguist/language.rb) and [lib/linguist/languages.yml](https://github.com/github/linguist/blob/master/lib/linguist/languages.yml).
|
||||
Checking code you didn't write, such as JavaScript libraries, into your git repo is a common practice, but this often inflates your project's language stats and may even cause your project to be labeled as another language. By default, Linguist treats all of the paths defined in [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository. Vendored files are also hidden by default in diffs on github.com.
|
||||
|
||||
### Syntax Highlighting
|
||||
Use the `linguist-vendored` attribute to vendor or un-vendor paths.
|
||||
|
||||
Syntax highlighting in GitHub is performed using TextMate-compatible grammars. These are the same grammars that TextMate, Sublime Text and Atom use.
|
||||
```
|
||||
$ cat .gitattributes
|
||||
special-vendored-path/* linguist-vendored
|
||||
jquery.js linguist-vendored=false
|
||||
```
|
||||
|
||||
Every language in `languages.yml` is mapped to its corresponding TM `scope`. This scope will be used when picking up a grammar for highlighting. **When adding a new language to Linguist, please add its corresponding scope too (assuming there's an existing TextMate bundle, Sublime Text package, or Atom package) so syntax highlighting works for it**.
|
||||
Similar to vendored files, Linguist excludes documentation files from your project's language stats. (Unlike vendored files, documentation files are displayed in diffs on github.com.) [lib/linguist/documentation.yml](lib/linguist/documentation.yml) lists common documentation paths and excludes them from the language statistics for your repository.
|
||||
|
||||
### Stats
|
||||
Use the `linguist-documentation` attribute to mark or unmark paths as documentation.
|
||||
|
||||
The Language stats bar that you see on every repository is built by aggregating the languages of each file in that repository. The top language in the graph determines the project's primary language.
|
||||
```
|
||||
$ cat .gitattributes
|
||||
project-docs/* linguist-documentation
|
||||
docs/formatter.rb linguist-documentation=false
|
||||
```
|
||||
|
||||
The repository stats API, accessed through `#languages`, can be used on a directory:
|
||||
### Using Emacs and Vim modelines
|
||||
|
||||
***API UPDATE***
|
||||
Alternatively, you can use Vim and Emacs style modelines to set the language for a single file. Modelines can be placed anywhere within a file and are respected when determining how to syntax-highlight a file on GitHub.com
|
||||
|
||||
Since [Version 3.0.0](https://github.com/github/linguist/releases/tag/v3.0.0) Linguist expects a git repository (in the form of a [Rugged::Repository](https://github.com/libgit2/rugged#repositories)) to be passed when initializing `Linguist::Repository`.
|
||||
```
|
||||
Vim
|
||||
vim: set filetype=prolog:
|
||||
vim: set ft=cpp:
|
||||
|
||||
Emacs
|
||||
-*- mode: php;-*-
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
Install the gem:
|
||||
|
||||
```
|
||||
$ gem install github-linguist
|
||||
```
|
||||
|
||||
Then use it in your application:
|
||||
|
||||
```ruby
|
||||
require 'rugged'
|
||||
@@ -51,146 +86,27 @@ project.language #=> "Ruby"
|
||||
project.languages #=> { "Ruby" => 119387 }
|
||||
```
|
||||
|
||||
These stats are also printed out by the `linguist` binary. You can use the
|
||||
These stats are also printed out by the `linguist` executable. You can use the
|
||||
`--breakdown` flag, and the binary will also output the breakdown of files by language.
|
||||
|
||||
You can try running `linguist` on the root directory in this repository itself:
|
||||
|
||||
$ bundle exec linguist --breakdown
|
||||
```
|
||||
$ bundle exec linguist --breakdown
|
||||
|
||||
100.00% Ruby
|
||||
100.00% Ruby
|
||||
|
||||
Ruby:
|
||||
Gemfile
|
||||
Rakefile
|
||||
bin/linguist
|
||||
github-linguist.gemspec
|
||||
lib/linguist.rb
|
||||
lib/linguist/blob_helper.rb
|
||||
lib/linguist/classifier.rb
|
||||
lib/linguist/file_blob.rb
|
||||
lib/linguist/generated.rb
|
||||
lib/linguist/heuristics.rb
|
||||
lib/linguist/language.rb
|
||||
lib/linguist/lazy_blob.rb
|
||||
lib/linguist/md5.rb
|
||||
lib/linguist/repository.rb
|
||||
lib/linguist/samples.rb
|
||||
lib/linguist/tokenizer.rb
|
||||
lib/linguist/version.rb
|
||||
test/test_blob.rb
|
||||
test/test_classifier.rb
|
||||
test/test_heuristics.rb
|
||||
test/test_language.rb
|
||||
test/test_md5.rb
|
||||
test/test_pedantic.rb
|
||||
test/test_repository.rb
|
||||
test/test_samples.rb
|
||||
test/test_tokenizer.rb
|
||||
|
||||
#### Ignore vendored files
|
||||
|
||||
Checking other code into your git repo is a common practice. But this often inflates your project's language stats and may even cause your project to be labeled as another language. We are able to identify some of these files and directories and exclude them.
|
||||
|
||||
```ruby
|
||||
Linguist::FileBlob.new("vendor/plugins/foo.rb").vendored? # => true
|
||||
Ruby:
|
||||
Gemfile
|
||||
Rakefile
|
||||
bin/linguist
|
||||
github-linguist.gemspec
|
||||
lib/linguist.rb
|
||||
…
|
||||
```
|
||||
|
||||
See [Linguist::BlobHelper#vendored?](https://github.com/github/linguist/blob/master/lib/linguist/blob_helper.rb) and [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml).
|
||||
## Contributing
|
||||
|
||||
#### Generated file detection
|
||||
Please check out our [contributing guidelines](CONTRIBUTING.md).
|
||||
|
||||
Not all plain text files are true source files. Generated files like minified js and compiled CoffeeScript can be detected and excluded from language stats. As an extra bonus, these files are suppressed in diffs.
|
||||
|
||||
```ruby
|
||||
Linguist::FileBlob.new("underscore.min.js").generated? # => true
|
||||
```
|
||||
|
||||
See [Linguist::Generated#generated?](https://github.com/github/linguist/blob/master/lib/linguist/generated.rb).
|
||||
|
||||
## Overrides
|
||||
|
||||
Linguist supports custom overrides for language definitions and vendored paths. Add a `.gitattributes` file to your project using the keys `linguist-language` and `linguist-vendored` with the standard git-style path matchers for the files you want to override.
|
||||
|
||||
Please note that the overrides currently only affect the language statistics for a repository and not the syntax-highlighting of files.
|
||||
|
||||
```
|
||||
$ cat .gitattributes
|
||||
*.rb linguist-language=Java
|
||||
|
||||
$ linguist --breakdown
|
||||
100.00% Java
|
||||
|
||||
Java:
|
||||
ruby_file.rb
|
||||
```
|
||||
|
||||
By default, Linguist treats all of the paths defined in [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository. Use the `linguist-vendored` attribute to vendor or un-vendor paths.
|
||||
|
||||
```
|
||||
$ cat .gitattributes
|
||||
special-vendored-path/* linguist-vendored
|
||||
jquery.js linguist-vendored=false
|
||||
```
|
||||
|
||||
## Installation
|
||||
|
||||
Github.com is usually running the latest version of the `github-linguist` gem that is released on [RubyGems.org](http://rubygems.org/gems/github-linguist).
|
||||
|
||||
But for development you are going to want to checkout out the source. To get it, clone the repo and run [Bundler](http://gembundler.com/) to install its dependencies.
|
||||
|
||||
git clone https://github.com/github/linguist.git
|
||||
cd linguist/
|
||||
bundle install
|
||||
|
||||
To run the tests:
|
||||
|
||||
bundle exec rake test
|
||||
|
||||
### A note on language extensions
|
||||
|
||||
Linguist has a number of methods available to it for identifying the language of a particular file. The initial lookup is based upon the extension of the file, possible file extensions are defined in an array called `extensions`. Take a look at this example for example for `Perl`:
|
||||
|
||||
```
|
||||
Perl:
|
||||
type: programming
|
||||
ace_mode: perl
|
||||
color: "#0298c3"
|
||||
extensions:
|
||||
- .pl
|
||||
- .PL
|
||||
- .perl
|
||||
- .ph
|
||||
- .plx
|
||||
- .pm
|
||||
- .pod
|
||||
- .psgi
|
||||
interpreters:
|
||||
- perl
|
||||
```
|
||||
Any of the extensions defined are valid but the first in this array should be the most popular.
|
||||
|
||||
### Testing
|
||||
|
||||
Sometimes getting the tests running can be too much work, especially if you don't have much Ruby experience. It's okay: be lazy and let our build bot [Travis](http://travis-ci.org/#!/github/linguist) run the tests for you. Just open a pull request and the bot will start cranking away.
|
||||
|
||||
Here's our current build status, which is hopefully green: [](http://travis-ci.org/github/linguist)
|
||||
|
||||
### Releasing
|
||||
|
||||
If you are the current maintainer of this gem:
|
||||
|
||||
0. Create a branch for the release: `git checkout -b cut-release-vxx.xx.xx`
|
||||
0. Make sure your local dependencies are up to date: `bundle install`
|
||||
0. Ensure that samples are updated: `bundle exec rake samples`
|
||||
0. Ensure that tests are green: `bundle exec rake test`
|
||||
0. Bump gem version in `lib/linguist/version.rb`. For example, [like this](https://github.com/github/linguist/commit/8d2ea90a5ba3b2fe6e1508b7155aa4632eea2985).
|
||||
0. Make a PR to github/linguist. For example, [#1238](https://github.com/github/linguist/pull/1238).
|
||||
0. Build a local gem: `bundle exec rake build_gem`
|
||||
0. Testing:
|
||||
0. Bump the Gemfile and Gemfile.lock versions for an app which relies on this gem
|
||||
0. Install the new gem locally
|
||||
0. Test behavior locally, branch deploy, whatever needs to happen
|
||||
0. Merge github/linguist PR
|
||||
0. Tag and push: `git tag vx.xx.xx; git push --tags`
|
||||
0. Push to rubygems.org -- `gem push github-linguist-3.0.0.gem`
|
||||
##
|
||||
|
||||
21
Rakefile
21
Rakefile
@@ -3,13 +3,14 @@ require 'rake/clean'
|
||||
require 'rake/testtask'
|
||||
require 'yaml'
|
||||
require 'yajl'
|
||||
require 'open-uri'
|
||||
|
||||
task :default => :test
|
||||
|
||||
Rake::TestTask.new
|
||||
|
||||
# Extend test task to check for samples
|
||||
task :test => :check_samples
|
||||
# Extend test task to check for samples and fetch latest Ace modes
|
||||
task :test => [:check_samples, :fetch_ace_modes]
|
||||
|
||||
desc "Check that we have samples.json generated"
|
||||
task :check_samples do
|
||||
@@ -18,6 +19,20 @@ task :check_samples do
|
||||
end
|
||||
end
|
||||
|
||||
desc "Fetch the latest Ace modes from its GitHub repository"
|
||||
task :fetch_ace_modes do
|
||||
ACE_FIXTURE_PATH = File.join('test', 'fixtures', 'ace_modes.json')
|
||||
|
||||
File.delete(ACE_FIXTURE_PATH) if File.exist?(ACE_FIXTURE_PATH)
|
||||
|
||||
begin
|
||||
ace_github_modes = open("https://api.github.com/repos/ajaxorg/ace/contents/lib/ace/mode").read
|
||||
File.write(ACE_FIXTURE_PATH, ace_github_modes)
|
||||
rescue OpenURI::HTTPError, SocketError
|
||||
# no internet? no problem.
|
||||
end
|
||||
end
|
||||
|
||||
task :samples do
|
||||
require 'linguist/samples'
|
||||
json = Yajl.dump(Linguist::Samples.data, :pretty => true)
|
||||
@@ -33,7 +48,7 @@ end
|
||||
|
||||
task :build_grammars_gem do
|
||||
rm_rf "grammars"
|
||||
sh "script/download-grammars"
|
||||
sh "script/convert-grammars"
|
||||
sh "gem", "build", "github-linguist-grammars.gemspec"
|
||||
end
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@ Gem::Specification.new do |s|
|
||||
s.add_dependency 'mime-types', '>= 1.19'
|
||||
s.add_dependency 'rugged', '~> 0.22.0b4'
|
||||
|
||||
s.add_development_dependency 'minitest', '>= 5.0'
|
||||
s.add_development_dependency 'mocha'
|
||||
s.add_development_dependency 'pry'
|
||||
s.add_development_dependency 'rake'
|
||||
|
||||
737
grammars.yml
737
grammars.yml
@@ -9,8 +9,6 @@ http://svn.textmate.org/trunk/Review/Bundles/Forth.tmbundle:
|
||||
- source.forth
|
||||
http://svn.textmate.org/trunk/Review/Bundles/Parrot.tmbundle:
|
||||
- source.parrot.pir
|
||||
http://svn.textmate.org/trunk/Review/Bundles/Ruby%20Sass.tmbundle:
|
||||
- source.sass
|
||||
http://svn.textmate.org/trunk/Review/Bundles/SecondLife%20LSL.tmbundle:
|
||||
- source.lsl
|
||||
http://svn.textmate.org/trunk/Review/Bundles/VHDL.tmbundle:
|
||||
@@ -20,403 +18,510 @@ http://svn.textmate.org/trunk/Review/Bundles/XQuery.tmbundle:
|
||||
https://bitbucket.org/Clams/sublimesystemverilog/get/default.tar.gz:
|
||||
- source.systemverilog
|
||||
- source.ucfconstraints
|
||||
https://bitbucket.org/bitlang/sublime_cobol/raw/b0e9c44ac5f7a2fb553421aa986b35854cbfda4a/COBOL.tmLanguage:
|
||||
- source.cobol
|
||||
https://fan.googlecode.com/hg-history/Build%201.0.55/adm/tools/textmate/Fan.tmbundle/Syntaxes/Fan.tmLanguage:
|
||||
- source.fan
|
||||
https://github.com/AlanQuatermain/go-tmbundle:
|
||||
- source.go
|
||||
https://github.com/Anomareh/PHP-Twig.tmbundle:
|
||||
- text.html.twig
|
||||
https://github.com/Cirru/sublime-cirru/raw/master/Cirru.tmLanguage:
|
||||
- source.cirru
|
||||
https://github.com/Cykey/Sublime-Logos:
|
||||
- source.logos
|
||||
https://github.com/Drako/SublimeBrainfuck/raw/master/Brainfuck.tmLanguage:
|
||||
- source.bf
|
||||
https://github.com/JohnNilsson/awk-sublime/raw/master/AWK.tmLanguage:
|
||||
- source.awk
|
||||
https://github.com/JonBons/Sublime-SQF-Language:
|
||||
- source.sqf
|
||||
https://github.com/MarioRicalde/SCSS.tmbundle:
|
||||
- source.scss
|
||||
https://github.com/Oldes/Sublime-REBOL:
|
||||
- source.rebol
|
||||
https://github.com/PogiNate/Sublime-Inform:
|
||||
- source.Inform7
|
||||
https://github.com/Red-Nova-Technologies/autoitv3-tmbundle:
|
||||
- source.autoit.3
|
||||
https://github.com/SalGnt/Sublime-VimL:
|
||||
- source.viml
|
||||
https://github.com/Shammah/boo-sublime/raw/master/Boo.tmLanguage:
|
||||
- source.boo
|
||||
https://github.com/SublimeText/ColdFusion:
|
||||
vendor/grammars/Agda.tmbundle:
|
||||
- source.agda
|
||||
vendor/grammars/Alloy.tmbundle:
|
||||
- source.alloy
|
||||
vendor/grammars/AutoHotkey/:
|
||||
- source.ahk
|
||||
vendor/grammars/BrightScript.tmbundle/:
|
||||
- source.brightauthorproject
|
||||
- source.brightscript
|
||||
vendor/grammars/CLIPS-sublime:
|
||||
- source.clips
|
||||
vendor/grammars/ColdFusion:
|
||||
- source.cfscript
|
||||
- source.cfscript.cfc
|
||||
- text.cfml.basic
|
||||
- text.html.cfm
|
||||
https://github.com/SublimeText/NSIS:
|
||||
- source.nsis
|
||||
https://github.com/Varriount/NimLime:
|
||||
- source.nimrod
|
||||
- source.nimrod_filter
|
||||
- source.nimrodcfg
|
||||
https://github.com/alkemist/gradle.tmbundle:
|
||||
- source.groovy.gradle
|
||||
https://github.com/ambethia/Sublime-Loom:
|
||||
- source.loomscript
|
||||
https://github.com/angryant0007/VBDotNetSyntax:
|
||||
- source.vbnet
|
||||
https://github.com/anunayk/cool-tmbundle:
|
||||
- source.cool
|
||||
https://github.com/aroben/ada.tmbundle/raw/c45eed4d5f98fe3bcbbffbb9e436601ab5bbde4b/Syntaxes/Ada.plist:
|
||||
- source.ada
|
||||
https://github.com/aroben/ruby.tmbundle@4636a3023153c3034eb6ffc613899ba9cf33b41f:
|
||||
- source.ruby
|
||||
- text.html.erb
|
||||
https://github.com/asbjornenge/Docker.tmbundle:
|
||||
vendor/grammars/Creole:
|
||||
- text.html.creole
|
||||
vendor/grammars/Docker.tmbundle:
|
||||
- source.dockerfile
|
||||
https://github.com/atom/language-clojure:
|
||||
- source.clojure
|
||||
https://github.com/atom/language-coffee-script:
|
||||
- source.coffee
|
||||
- source.litcoffee
|
||||
https://github.com/atom/language-csharp:
|
||||
- source.cs
|
||||
- source.csx
|
||||
- source.nant-build
|
||||
https://github.com/atom/language-javascript:
|
||||
- source.js
|
||||
- source.js.regexp
|
||||
https://github.com/atom/language-python:
|
||||
- source.python
|
||||
- source.regexp.python
|
||||
- text.python.traceback
|
||||
https://github.com/atom/language-shellscript:
|
||||
- source.shell
|
||||
- text.shell-session
|
||||
https://github.com/austinwagner/sublime-sourcepawn:
|
||||
- source.sp
|
||||
https://github.com/bfad/Sublime-Lasso:
|
||||
- file.lasso
|
||||
https://github.com/bholt/chapel-tmbundle:
|
||||
- source.chapel
|
||||
https://github.com/brandonwamboldt/sublime-nginx:
|
||||
- source.nginx
|
||||
https://github.com/bro/bro-sublime:
|
||||
- source.bro
|
||||
https://github.com/carsonoid/sublime_man_page_support/raw/master/man-groff.tmLanguage:
|
||||
- text.groff
|
||||
https://github.com/ccreutzig/sublime-MuPAD:
|
||||
- source.mupad
|
||||
https://github.com/cdwilson/nesC.tmbundle:
|
||||
- source.nesc
|
||||
https://github.com/christophevg/racket-tmbundle:
|
||||
vendor/grammars/Elm.tmLanguage:
|
||||
- source.elm
|
||||
vendor/grammars/G-Code/:
|
||||
- source.LS
|
||||
- source.MCPOST
|
||||
- source.MOD
|
||||
- source.apt
|
||||
- source.gcode
|
||||
vendor/grammars/GDScript-sublime/:
|
||||
- source.gdscript
|
||||
vendor/grammars/Handlebars:
|
||||
- text.html.handlebars
|
||||
vendor/grammars/IDL-Syntax:
|
||||
- source.webidl
|
||||
vendor/grammars/InnoSetup/:
|
||||
- source.inno
|
||||
vendor/grammars/Isabelle.tmbundle:
|
||||
- source.isabelle.root
|
||||
- source.isabelle.theory
|
||||
vendor/grammars/JSyntax/:
|
||||
- source.j
|
||||
vendor/grammars/Julia.tmbundle:
|
||||
- source.julia
|
||||
vendor/grammars/LiveScript.tmbundle:
|
||||
- source.livescript
|
||||
vendor/grammars/Modelica/:
|
||||
- source.modelica
|
||||
vendor/grammars/NSIS:
|
||||
- source.nsis
|
||||
vendor/grammars/NimLime:
|
||||
- source.nim
|
||||
- source.nim_filter
|
||||
- source.nimcfg
|
||||
vendor/grammars/PHP-Twig.tmbundle:
|
||||
- text.html.twig
|
||||
vendor/grammars/PogoScript.tmbundle/:
|
||||
- source.pogoscript
|
||||
vendor/grammars/RDoc.tmbundle:
|
||||
- text.rdoc
|
||||
vendor/grammars/Racket:
|
||||
- source.racket
|
||||
https://github.com/clemos/haxe-sublime-bundle:
|
||||
vendor/grammars/SCSS.tmbundle:
|
||||
- source.scss
|
||||
vendor/grammars/Scalate.tmbundle:
|
||||
- source.scaml
|
||||
- text.html.ssp
|
||||
vendor/grammars/Slash.tmbundle:
|
||||
- text.html.slash
|
||||
vendor/grammars/Stata.tmbundle:
|
||||
- source.mata
|
||||
- source.stata
|
||||
vendor/grammars/Stylus/:
|
||||
- source.stylus
|
||||
vendor/grammars/Sublime-Coq:
|
||||
- source.coq
|
||||
vendor/grammars/Sublime-HTTP:
|
||||
- source.httpspec
|
||||
vendor/grammars/Sublime-Inform:
|
||||
- source.Inform7
|
||||
vendor/grammars/Sublime-Lasso:
|
||||
- file.lasso
|
||||
vendor/grammars/Sublime-Logos:
|
||||
- source.logos
|
||||
vendor/grammars/Sublime-Loom:
|
||||
- source.loomscript
|
||||
vendor/grammars/Sublime-Nit:
|
||||
- source.nit
|
||||
vendor/grammars/Sublime-QML:
|
||||
- source.qml
|
||||
vendor/grammars/Sublime-REBOL:
|
||||
- source.rebol
|
||||
vendor/grammars/Sublime-SQF-Language:
|
||||
- source.sqf
|
||||
vendor/grammars/Sublime-Text-2-OpenEdge-ABL:
|
||||
- source.abl
|
||||
vendor/grammars/Sublime-VimL:
|
||||
- source.viml
|
||||
vendor/grammars/SublimeBrainfuck:
|
||||
- source.bf
|
||||
vendor/grammars/SublimePapyrus/:
|
||||
- source.compiled-papyrus
|
||||
- source.papyrus
|
||||
- source.papyrus-assembly
|
||||
vendor/grammars/SublimeXtend:
|
||||
- source.xtend
|
||||
vendor/grammars/TXL/:
|
||||
- source.txl
|
||||
vendor/grammars/Textmate-Gosu-Bundle:
|
||||
- source.gosu.2
|
||||
vendor/grammars/VBDotNetSyntax:
|
||||
- source.vbnet
|
||||
vendor/grammars/Vala-TMBundle:
|
||||
- source.vala
|
||||
vendor/grammars/abap.tmbundle:
|
||||
- source.abap
|
||||
vendor/grammars/actionscript3-tmbundle:
|
||||
- source.actionscript.3
|
||||
- text.html.asdoc
|
||||
- text.xml.flex-config
|
||||
vendor/grammars/ada.tmbundle:
|
||||
- source.ada
|
||||
vendor/grammars/ant.tmbundle:
|
||||
- text.xml.ant
|
||||
vendor/grammars/antlr.tmbundle:
|
||||
- source.antlr
|
||||
vendor/grammars/apache.tmbundle:
|
||||
- source.apache-config
|
||||
- source.apache-config.mod_perl
|
||||
vendor/grammars/applescript.tmbundle:
|
||||
- source.applescript
|
||||
vendor/grammars/asciidoc.tmbundle/:
|
||||
- text.html.asciidoc
|
||||
vendor/grammars/asp.tmbundle:
|
||||
- source.asp
|
||||
- text.html.asp
|
||||
vendor/grammars/assembly.tmbundle:
|
||||
- objdump.x86asm
|
||||
- source.x86asm
|
||||
vendor/grammars/atom-salt:
|
||||
- source.python.salt
|
||||
- source.yaml.salt
|
||||
vendor/grammars/ats.sublime:
|
||||
- source.ats
|
||||
vendor/grammars/autoitv3-tmbundle:
|
||||
- source.autoit.3
|
||||
vendor/grammars/awk-sublime:
|
||||
- source.awk
|
||||
vendor/grammars/bison.tmbundle:
|
||||
- source.bison
|
||||
vendor/grammars/boo-sublime:
|
||||
- source.boo
|
||||
vendor/grammars/bro-sublime:
|
||||
- source.bro
|
||||
vendor/grammars/c.tmbundle:
|
||||
- source.c
|
||||
- source.c++
|
||||
- source.c.platform
|
||||
vendor/grammars/capnproto.tmbundle:
|
||||
- source.capnp
|
||||
vendor/grammars/carto-atom:
|
||||
- source.css.mss
|
||||
vendor/grammars/ceylon-sublimetext:
|
||||
- module.ceylon
|
||||
- source.ceylon
|
||||
vendor/grammars/chapel-tmbundle:
|
||||
- source.chapel
|
||||
vendor/grammars/cmake.tmbundle:
|
||||
- source.cache.cmake
|
||||
- source.cmake
|
||||
vendor/grammars/cool-tmbundle:
|
||||
- source.cool
|
||||
vendor/grammars/cpp-qt.tmbundle:
|
||||
- source.c++.qt
|
||||
- source.qmake
|
||||
vendor/grammars/css.tmbundle:
|
||||
- source.css
|
||||
vendor/grammars/cucumber-tmbundle:
|
||||
- source.ruby.rspec.cucumber.steps
|
||||
- text.gherkin.feature
|
||||
vendor/grammars/d.tmbundle:
|
||||
- source.d
|
||||
vendor/grammars/dart-sublime-bundle:
|
||||
- source.dart
|
||||
- source.pubspec
|
||||
- text.dart-doccomments
|
||||
vendor/grammars/desktop.tmbundle:
|
||||
- source.desktop
|
||||
vendor/grammars/diff.tmbundle:
|
||||
- source.diff
|
||||
vendor/grammars/dylan.tmbundle:
|
||||
- source.dylan
|
||||
- source.lid
|
||||
- source.makegen
|
||||
vendor/grammars/ebundles/Bundles/MSDOS batch file.tmbundle:
|
||||
- source.dosbatch
|
||||
vendor/grammars/ec.tmbundle/:
|
||||
- source.c.ec
|
||||
vendor/grammars/eiffel.tmbundle:
|
||||
- source.eiffel
|
||||
vendor/grammars/elixir-tmbundle:
|
||||
- source.elixir
|
||||
- text.elixir
|
||||
- text.html.elixir
|
||||
vendor/grammars/erlang.tmbundle:
|
||||
- source.erlang
|
||||
- text.html.erlang.yaws
|
||||
vendor/grammars/factor:
|
||||
- source.factor
|
||||
- text.html.factor
|
||||
vendor/grammars/fancy-tmbundle:
|
||||
- source.fancy
|
||||
vendor/grammars/fish-tmbundle:
|
||||
- source.fish
|
||||
vendor/grammars/fortran.tmbundle:
|
||||
- source.fortran
|
||||
- source.fortran.modern
|
||||
vendor/grammars/fsharpbinding:
|
||||
- source.fsharp
|
||||
vendor/grammars/gap-tmbundle/:
|
||||
- source.gap
|
||||
vendor/grammars/gettext.tmbundle:
|
||||
- source.po
|
||||
vendor/grammars/gnuplot-tmbundle:
|
||||
- source.gnuplot
|
||||
vendor/grammars/go-tmbundle:
|
||||
- source.go
|
||||
vendor/grammars/grace-tmbundle/:
|
||||
- source.grace
|
||||
vendor/grammars/gradle.tmbundle:
|
||||
- source.groovy.gradle
|
||||
vendor/grammars/graphviz.tmbundle:
|
||||
- source.dot
|
||||
vendor/grammars/groovy.tmbundle:
|
||||
- source.groovy
|
||||
vendor/grammars/haskell.tmbundle:
|
||||
- source.haskell
|
||||
- text.tex.latex.haskell
|
||||
vendor/grammars/haxe-sublime-bundle:
|
||||
- source.erazor
|
||||
- source.haxe.2
|
||||
- source.hss.1
|
||||
- source.hxml
|
||||
- source.nmml
|
||||
https://github.com/cucumber/cucumber-tmbundle:
|
||||
- source.ruby.rspec.cucumber.steps
|
||||
- text.gherkin.feature
|
||||
https://github.com/daaain/Handlebars/raw/master/Handlebars.tmLanguage:
|
||||
- text.html.handlebars
|
||||
https://github.com/davidpeckham/powershell.tmbundle:
|
||||
- source.powershell
|
||||
https://github.com/davidrios/jade-tmbundle:
|
||||
- source.jade
|
||||
- source.pyjade
|
||||
https://github.com/elixir-lang/elixir-tmbundle:
|
||||
- source.elixir
|
||||
- text.elixir
|
||||
- text.html.elixir
|
||||
https://github.com/ericzou/ebundles/raw/master/Bundles/MSDOS%20batch%20file.tmbundle/Syntaxes/MSDOS%20batch%20file.tmLanguage:
|
||||
- source.dosbatch
|
||||
https://github.com/euler0/sublime-glsl/raw/master/GLSL.tmLanguage:
|
||||
- source.glsl
|
||||
https://github.com/fancy-lang/fancy-tmbundle:
|
||||
- source.fancy
|
||||
https://github.com/fsharp/fsharpbinding:
|
||||
- source.fsharp
|
||||
https://github.com/gingerbeardman/monkey.tmbundle:
|
||||
- source.monkey
|
||||
https://github.com/guillermooo/dart-sublime-bundle/raw/master/Dart.tmLanguage:
|
||||
- source.dart
|
||||
https://github.com/harrism/sublimetext-cuda-cpp/raw/master/cuda-c%2B%2B.tmLanguage:
|
||||
- source.cuda-c++
|
||||
https://github.com/hww3/pike-textmate:
|
||||
- source.pike
|
||||
https://github.com/jeancharles-roger/ceylon-sublimetext/raw/master/Ceylon.tmLanguage:
|
||||
- source.ceylon
|
||||
https://github.com/jfairbank/Sublime-Text-2-OpenEdge-ABL:
|
||||
- source.abl
|
||||
https://github.com/jhasse/sublime-rust:
|
||||
- source.rust
|
||||
https://github.com/johanasplund/sublime-befunge/raw/master/Befunge-93.tmLanguage:
|
||||
- source.befunge
|
||||
https://github.com/joshaven/RDoc.tmbundle:
|
||||
- text.rdoc
|
||||
https://github.com/jpcamara/Textmate-Gosu-Bundle/raw/master/Gosu.tmbundle/Syntaxes/Gosu.tmLanguage:
|
||||
- source.gosu.2
|
||||
https://github.com/kswedberg/jquery-tmbundle:
|
||||
- source.js.jquery
|
||||
https://github.com/laughedelic/sublime-idris/raw/master/Idris.tmLanguage:
|
||||
- source.idris
|
||||
https://github.com/lavrton/sublime-better-typescript:
|
||||
- source.ts
|
||||
https://github.com/leafo/moonscript-tmbundle:
|
||||
- source.moonscript
|
||||
https://github.com/lsf37/Isabelle.tmbundle:
|
||||
- source.isabelle.theory
|
||||
https://github.com/lunixbochs/x86-assembly-textmate-bundle:
|
||||
- source.asm.x86
|
||||
https://github.com/macekond/Alloy.tmbundle:
|
||||
- source.alloy
|
||||
https://github.com/mads379/opa.tmbundle:
|
||||
- source.opa
|
||||
https://github.com/mads379/scala.tmbundle:
|
||||
- source.sbt
|
||||
- source.scala
|
||||
https://github.com/marconi/mako-tmbundle:
|
||||
- text.html.mako
|
||||
https://github.com/mattfoster/gnuplot-tmbundle:
|
||||
- source.gnuplot
|
||||
https://github.com/mgalloy/idl.tmbundle:
|
||||
vendor/grammars/html.tmbundle:
|
||||
- text.html.basic
|
||||
vendor/grammars/idl.tmbundle:
|
||||
- source.idl
|
||||
- source.idl-dlm
|
||||
- text.idl-idldoc
|
||||
https://github.com/michaeledgar/protobuf-tmbundle:
|
||||
- source.protobuf
|
||||
https://github.com/mkolosick/Sublime-Coq/raw/master/Coq.tmLanguage:
|
||||
- source.coq
|
||||
https://github.com/mokus0/Agda.tmbundle:
|
||||
- source.agda
|
||||
https://github.com/nanoant/Julia.tmbundle:
|
||||
- source.julia
|
||||
https://github.com/nanoant/assembly.tmbundle/raw/master/Syntaxes/objdump%20C%2B%2B.tmLanguage:
|
||||
- objdump.x86asm
|
||||
https://github.com/nilium/ooc.tmbundle:
|
||||
- source.ooc
|
||||
https://github.com/paulmillr/LiveScript.tmbundle:
|
||||
- source.livescript
|
||||
https://github.com/pferruggiaro/sublime-tea:
|
||||
- source.tea
|
||||
https://github.com/puppet-textmate-bundle/puppet-textmate-bundle:
|
||||
- source.puppet
|
||||
https://github.com/pvl/abap.tmbundle:
|
||||
- source.abap
|
||||
https://github.com/scalate/Scalate.tmbundle:
|
||||
- source.scaml
|
||||
- text.html.ssp
|
||||
https://github.com/shadanan/mathematica-tmbundle:
|
||||
- source.mathematica
|
||||
https://github.com/shellderp/sublime-robot-plugin:
|
||||
- text.robot
|
||||
https://github.com/simongregory/actionscript3-tmbundle:
|
||||
- source.actionscript.3
|
||||
- text.html.asdoc
|
||||
- text.xml.flex-config
|
||||
https://github.com/skozlovf/Sublime-QML:
|
||||
- source.qml
|
||||
https://github.com/slash-lang/Slash.tmbundle:
|
||||
- text.html.slash
|
||||
https://github.com/slavapestov/factor/raw/master/misc/Factor.tmbundle/Syntaxes/Factor.tmLanguage:
|
||||
- source.factor
|
||||
https://github.com/slim-template/ruby-slim.tmbundle:
|
||||
- text.slim
|
||||
https://github.com/staltz/SublimeXtend:
|
||||
- source.xtend
|
||||
https://github.com/statatmbundle/Stata.tmbundle:
|
||||
- source.mata
|
||||
- source.stata
|
||||
https://github.com/technosophos/Vala-TMBundle:
|
||||
- source.vala
|
||||
https://github.com/textmate/ant.tmbundle:
|
||||
- text.xml.ant
|
||||
https://github.com/textmate/antlr.tmbundle:
|
||||
- source.antlr
|
||||
https://github.com/textmate/apache.tmbundle:
|
||||
- source.apache-config
|
||||
- source.apache-config.mod_perl
|
||||
https://github.com/textmate/applescript.tmbundle:
|
||||
- source.applescript
|
||||
https://github.com/textmate/asp.tmbundle:
|
||||
- source.asp
|
||||
- text.html.asp
|
||||
https://github.com/textmate/bison.tmbundle:
|
||||
- source.bison
|
||||
https://github.com/textmate/c.tmbundle:
|
||||
- source.c
|
||||
- source.c++
|
||||
- source.c.platform
|
||||
https://github.com/textmate/capnproto.tmbundle:
|
||||
- source.capnp
|
||||
https://github.com/textmate/cmake.tmbundle:
|
||||
- source.cache.cmake
|
||||
- source.cmake
|
||||
https://github.com/textmate/cpp-qt.tmbundle:
|
||||
- source.c++.qt
|
||||
- source.qmake
|
||||
https://github.com/textmate/css.tmbundle:
|
||||
- source.css
|
||||
https://github.com/textmate/d.tmbundle:
|
||||
- source.d
|
||||
https://github.com/textmate/diff.tmbundle:
|
||||
- source.diff
|
||||
https://github.com/textmate/dylan.tmbundle:
|
||||
- source.dylan
|
||||
- source.lid
|
||||
- source.makegen
|
||||
https://github.com/textmate/eiffel.tmbundle:
|
||||
- source.eiffel
|
||||
https://github.com/textmate/erlang.tmbundle:
|
||||
- source.erlang
|
||||
- text.html.erlang.yaws
|
||||
https://github.com/textmate/fortran.tmbundle:
|
||||
- source.fortran
|
||||
- source.fortran.modern
|
||||
https://github.com/textmate/gettext.tmbundle:
|
||||
- source.po
|
||||
https://github.com/textmate/graphviz.tmbundle:
|
||||
- source.dot
|
||||
https://github.com/textmate/groovy.tmbundle:
|
||||
- source.groovy
|
||||
https://github.com/textmate/haskell.tmbundle:
|
||||
- source.haskell
|
||||
- text.tex.latex.haskell
|
||||
https://github.com/textmate/html.tmbundle:
|
||||
- text.html.basic
|
||||
https://github.com/textmate/ini.tmbundle:
|
||||
vendor/grammars/ini.tmbundle:
|
||||
- source.ini
|
||||
https://github.com/textmate/io.tmbundle:
|
||||
vendor/grammars/io.tmbundle:
|
||||
- source.io
|
||||
https://github.com/textmate/java.tmbundle:
|
||||
vendor/grammars/ioke-outdated:
|
||||
- source.ioke
|
||||
vendor/grammars/jade-tmbundle:
|
||||
- source.jade
|
||||
- source.pyjade
|
||||
vendor/grammars/jasmin-sublime:
|
||||
- source.jasmin
|
||||
vendor/grammars/java.tmbundle:
|
||||
- source.java
|
||||
- source.java-properties
|
||||
- text.html.jsp
|
||||
- text.junit-test-report
|
||||
https://github.com/textmate/javadoc.tmbundle:
|
||||
vendor/grammars/javadoc.tmbundle:
|
||||
- text.html.javadoc
|
||||
https://github.com/textmate/javascript-objective-j.tmbundle:
|
||||
vendor/grammars/javascript-objective-j.tmbundle:
|
||||
- source.js.objj
|
||||
https://github.com/textmate/json.tmbundle:
|
||||
vendor/grammars/jquery-tmbundle:
|
||||
- source.js.jquery
|
||||
vendor/grammars/json.tmbundle:
|
||||
- source.json
|
||||
https://github.com/textmate/latex.tmbundle:
|
||||
vendor/grammars/kotlin-sublime-package:
|
||||
- source.Kotlin
|
||||
vendor/grammars/language-clojure:
|
||||
- source.clojure
|
||||
vendor/grammars/language-coffee-script:
|
||||
- source.coffee
|
||||
- source.litcoffee
|
||||
vendor/grammars/language-csharp:
|
||||
- source.cs
|
||||
- source.csx
|
||||
- source.nant-build
|
||||
vendor/grammars/language-gfm:
|
||||
- source.gfm
|
||||
vendor/grammars/language-hy:
|
||||
- source.hy
|
||||
vendor/grammars/language-javascript:
|
||||
- source.js
|
||||
- source.js.regexp
|
||||
vendor/grammars/language-python:
|
||||
- source.python
|
||||
- source.regexp.python
|
||||
- text.python.console
|
||||
- text.python.traceback
|
||||
vendor/grammars/language-shellscript:
|
||||
- source.shell
|
||||
- text.shell-session
|
||||
vendor/grammars/language-yaml:
|
||||
- source.yaml
|
||||
vendor/grammars/latex.tmbundle:
|
||||
- text.bibtex
|
||||
- text.log.latex
|
||||
- text.tex
|
||||
- text.tex.latex
|
||||
- text.tex.latex.beamer
|
||||
- text.tex.latex.memoir
|
||||
https://github.com/textmate/less.tmbundle:
|
||||
vendor/grammars/less.tmbundle:
|
||||
- source.css.less
|
||||
https://github.com/textmate/lilypond.tmbundle:
|
||||
vendor/grammars/lilypond.tmbundle:
|
||||
- source.lilypond
|
||||
https://github.com/textmate/lisp.tmbundle:
|
||||
vendor/grammars/liquid.tmbundle:
|
||||
- text.html.liquid
|
||||
vendor/grammars/lisp.tmbundle:
|
||||
- source.lisp
|
||||
https://github.com/textmate/logtalk.tmbundle:
|
||||
vendor/grammars/llvm.tmbundle:
|
||||
- source.llvm
|
||||
vendor/grammars/logtalk.tmbundle:
|
||||
- source.logtalk
|
||||
https://github.com/textmate/lua.tmbundle:
|
||||
vendor/grammars/lua.tmbundle:
|
||||
- source.lua
|
||||
https://github.com/textmate/make.tmbundle:
|
||||
vendor/grammars/make.tmbundle:
|
||||
- source.makefile
|
||||
https://github.com/textmate/markdown.tmbundle:
|
||||
- text.html.markdown
|
||||
https://github.com/textmate/matlab.tmbundle:
|
||||
vendor/grammars/mako-tmbundle:
|
||||
- text.html.mako
|
||||
vendor/grammars/mathematica-tmbundle:
|
||||
- source.mathematica
|
||||
vendor/grammars/matlab.tmbundle:
|
||||
- source.matlab
|
||||
- source.octave
|
||||
https://github.com/textmate/maven.tmbundle:
|
||||
vendor/grammars/maven.tmbundle:
|
||||
- text.xml.pom
|
||||
https://github.com/textmate/nemerle.tmbundle:
|
||||
vendor/grammars/mediawiki.tmbundle/:
|
||||
- text.html.mediawiki
|
||||
vendor/grammars/mercury-tmlanguage:
|
||||
- source.mercury
|
||||
vendor/grammars/monkey.tmbundle:
|
||||
- source.monkey
|
||||
vendor/grammars/moonscript-tmbundle:
|
||||
- source.moonscript
|
||||
vendor/grammars/nemerle.tmbundle:
|
||||
- source.nemerle
|
||||
https://github.com/textmate/ninja.tmbundle:
|
||||
vendor/grammars/nesC.tmbundle:
|
||||
- source.nesc
|
||||
vendor/grammars/ninja.tmbundle:
|
||||
- source.ninja
|
||||
https://github.com/textmate/objective-c.tmbundle:
|
||||
vendor/grammars/objective-c.tmbundle:
|
||||
- source.objc
|
||||
- source.objc++
|
||||
- source.objc.platform
|
||||
- source.strings
|
||||
https://github.com/textmate/ocaml.tmbundle:
|
||||
vendor/grammars/ocaml.tmbundle:
|
||||
- source.camlp4.ocaml
|
||||
- source.ocaml
|
||||
- source.ocamllex
|
||||
- source.ocamlyacc
|
||||
https://github.com/textmate/pascal.tmbundle:
|
||||
vendor/grammars/ooc.tmbundle:
|
||||
- source.ooc
|
||||
vendor/grammars/opa.tmbundle:
|
||||
- source.opa
|
||||
vendor/grammars/oz-tmbundle/Syntaxes/Oz.tmLanguage:
|
||||
- source.oz
|
||||
vendor/grammars/pascal.tmbundle:
|
||||
- source.pascal
|
||||
https://github.com/textmate/perl.tmbundle:
|
||||
vendor/grammars/perl.tmbundle:
|
||||
- source.perl
|
||||
https://github.com/textmate/php-smarty.tmbundle:
|
||||
vendor/grammars/php-smarty.tmbundle:
|
||||
- source.smarty
|
||||
https://github.com/textmate/php.tmbundle:
|
||||
vendor/grammars/php.tmbundle:
|
||||
- text.html.php
|
||||
https://github.com/textmate/postscript.tmbundle:
|
||||
vendor/grammars/pike-textmate:
|
||||
- source.pike
|
||||
vendor/grammars/postscript.tmbundle:
|
||||
- source.postscript
|
||||
https://github.com/textmate/processing.tmbundle:
|
||||
vendor/grammars/powershell:
|
||||
- source.powershell
|
||||
vendor/grammars/processing.tmbundle:
|
||||
- source.processing
|
||||
https://github.com/textmate/prolog.tmbundle:
|
||||
vendor/grammars/prolog.tmbundle:
|
||||
- source.prolog
|
||||
https://github.com/textmate/python-django.tmbundle:
|
||||
vendor/grammars/protobuf-tmbundle:
|
||||
- source.protobuf
|
||||
vendor/grammars/puppet-textmate-bundle:
|
||||
- source.puppet
|
||||
vendor/grammars/python-django.tmbundle:
|
||||
- source.python.django
|
||||
- text.html.django
|
||||
https://github.com/textmate/r.tmbundle:
|
||||
vendor/grammars/r.tmbundle:
|
||||
- source.r
|
||||
- text.tex.latex.rd
|
||||
https://github.com/textmate/restructuredtext.tmbundle:
|
||||
vendor/grammars/restructuredtext.tmbundle:
|
||||
- text.restructuredtext
|
||||
https://github.com/textmate/ruby-haml.tmbundle:
|
||||
vendor/grammars/ruby-haml.tmbundle:
|
||||
- text.haml
|
||||
https://github.com/textmate/ruby-on-rails-tmbundle:
|
||||
vendor/grammars/ruby-on-rails-tmbundle:
|
||||
- source.js.erb.rails
|
||||
- source.ruby.rails
|
||||
- source.ruby.rails.rjs
|
||||
- source.sql.ruby
|
||||
- text.html.erb.rails
|
||||
https://github.com/textmate/scheme.tmbundle:
|
||||
vendor/grammars/ruby-slim.tmbundle:
|
||||
- text.slim
|
||||
vendor/grammars/ruby.tmbundle:
|
||||
- source.ruby
|
||||
- text.html.erb
|
||||
vendor/grammars/sas.tmbundle:
|
||||
- source.SASLog
|
||||
- source.sas
|
||||
vendor/grammars/sass-textmate-bundle:
|
||||
- source.sass
|
||||
vendor/grammars/scala.tmbundle:
|
||||
- source.sbt
|
||||
- source.scala
|
||||
vendor/grammars/scheme.tmbundle:
|
||||
- source.scheme
|
||||
https://github.com/textmate/scilab.tmbundle:
|
||||
vendor/grammars/scilab.tmbundle:
|
||||
- source.scilab
|
||||
https://github.com/textmate/sql.tmbundle:
|
||||
vendor/grammars/smalltalk-tmbundle:
|
||||
- source.smalltalk
|
||||
vendor/grammars/sql.tmbundle:
|
||||
- source.sql
|
||||
https://github.com/textmate/standard-ml.tmbundle:
|
||||
vendor/grammars/standard-ml.tmbundle:
|
||||
- source.cm
|
||||
- source.ml
|
||||
https://github.com/textmate/swift.tmbundle:
|
||||
vendor/grammars/sublime-MuPAD:
|
||||
- source.mupad
|
||||
vendor/grammars/sublime-apl/:
|
||||
- source.apl
|
||||
vendor/grammars/sublime-befunge:
|
||||
- source.befunge
|
||||
vendor/grammars/sublime-better-typescript:
|
||||
- source.ts
|
||||
vendor/grammars/sublime-bsv:
|
||||
- source.bsv
|
||||
vendor/grammars/sublime-cirru:
|
||||
- source.cirru
|
||||
vendor/grammars/sublime-glsl:
|
||||
- source.essl
|
||||
- source.glsl
|
||||
vendor/grammars/sublime-golo/:
|
||||
- source.golo
|
||||
vendor/grammars/sublime-idris:
|
||||
- source.idris
|
||||
vendor/grammars/sublime-mask:
|
||||
- source.mask
|
||||
vendor/grammars/sublime-nginx:
|
||||
- source.nginx
|
||||
vendor/grammars/sublime-nix:
|
||||
- source.nix
|
||||
vendor/grammars/sublime-opal/:
|
||||
- source.opal
|
||||
- source.opalsysdefs
|
||||
vendor/grammars/sublime-robot-plugin:
|
||||
- text.robot
|
||||
vendor/grammars/sublime-rust:
|
||||
- source.rust
|
||||
vendor/grammars/sublime-sourcepawn:
|
||||
- source.sp
|
||||
vendor/grammars/sublime-spintools/:
|
||||
- source.regexp.spin
|
||||
- source.spin
|
||||
vendor/grammars/sublime-tea:
|
||||
- source.tea
|
||||
vendor/grammars/sublime-text-ox/:
|
||||
- source.ox
|
||||
vendor/grammars/sublime-text-pig-latin/:
|
||||
- source.pig_latin
|
||||
vendor/grammars/sublime_cobol:
|
||||
- source.acucobol
|
||||
- source.cobol
|
||||
- source.opencobol
|
||||
vendor/grammars/sublime_man_page_support:
|
||||
- source.man
|
||||
- text.groff
|
||||
vendor/grammars/sublimetext-cuda-cpp:
|
||||
- source.cuda-c++
|
||||
vendor/grammars/swift.tmbundle:
|
||||
- source.swift
|
||||
https://github.com/textmate/tcl.tmbundle:
|
||||
vendor/grammars/tcl.tmbundle:
|
||||
- source.tcl
|
||||
- text.html.tcl
|
||||
https://github.com/textmate/text.tmbundle:
|
||||
vendor/grammars/text.tmbundle:
|
||||
- text.plain
|
||||
https://github.com/textmate/textile.tmbundle:
|
||||
vendor/grammars/textile.tmbundle:
|
||||
- text.html.textile
|
||||
https://github.com/textmate/textmate.tmbundle:
|
||||
vendor/grammars/textmate.tmbundle:
|
||||
- source.regexp.oniguruma
|
||||
- source.tm-properties
|
||||
https://github.com/textmate/thrift.tmbundle:
|
||||
vendor/grammars/thrift.tmbundle:
|
||||
- source.thrift
|
||||
https://github.com/textmate/toml.tmbundle:
|
||||
vendor/grammars/toml.tmbundle:
|
||||
- source.toml
|
||||
https://github.com/textmate/verilog.tmbundle:
|
||||
vendor/grammars/turtle.tmbundle:
|
||||
- source.sparql
|
||||
- source.turtle
|
||||
vendor/grammars/verilog.tmbundle:
|
||||
- source.verilog
|
||||
https://github.com/textmate/xml.tmbundle:
|
||||
vendor/grammars/x86-assembly-textmate-bundle:
|
||||
- source.asm.x86
|
||||
vendor/grammars/xml.tmbundle:
|
||||
- text.xml
|
||||
- text.xml.xsl
|
||||
https://github.com/textmate/yaml.tmbundle:
|
||||
- source.yaml
|
||||
https://github.com/tomas-stefano/smalltalk-tmbundle:
|
||||
- source.smalltalk
|
||||
https://github.com/vic/ioke-outdated/raw/master/share/TextMate/Ioke.tmbundle/Syntaxes/Ioke.tmLanguage:
|
||||
- source.ioke
|
||||
https://github.com/vkostyukov/kotlin-sublime-package:
|
||||
- source.Kotlin
|
||||
https://github.com/vmg/zephir-sublime:
|
||||
vendor/grammars/zephir-sublime:
|
||||
- source.php.zephir
|
||||
https://github.com/whitequark/llvm.tmbundle:
|
||||
- source.llvm
|
||||
https://github.com/wmertens/sublime-nix:
|
||||
- source.nix
|
||||
https://raw.githubusercontent.com/eregon/oz-tmbundle/master/Syntaxes/Oz.tmLanguage:
|
||||
- source.oz
|
||||
|
||||
@@ -236,6 +236,21 @@ module Linguist
|
||||
name =~ VendoredRegexp ? true : false
|
||||
end
|
||||
|
||||
documentation_paths = YAML.load_file(File.expand_path("../documentation.yml", __FILE__))
|
||||
DocumentationRegexp = Regexp.new(documentation_paths.join('|'))
|
||||
|
||||
# Public: Is the blob in a documentation directory?
|
||||
#
|
||||
# Documentation files are ignored by language statistics.
|
||||
#
|
||||
# See "documentation.yml" for a list of documentation conventions that match
|
||||
# this pattern.
|
||||
#
|
||||
# Return true or false
|
||||
def documentation?
|
||||
name =~ DocumentationRegexp ? true : false
|
||||
end
|
||||
|
||||
# Public: Get each line of data
|
||||
#
|
||||
# Requires Blob#data
|
||||
@@ -317,5 +332,15 @@ module Linguist
|
||||
def tm_scope
|
||||
language && language.tm_scope
|
||||
end
|
||||
|
||||
DETECTABLE_TYPES = [:programming, :markup].freeze
|
||||
|
||||
# Internal: Should this blob be included in repository language statistics?
|
||||
def include_in_language_stats?
|
||||
!vendored? &&
|
||||
!documentation? &&
|
||||
!generated? &&
|
||||
language && DETECTABLE_TYPES.include?(language.type)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
19
lib/linguist/documentation.yml
Normal file
19
lib/linguist/documentation.yml
Normal file
@@ -0,0 +1,19 @@
|
||||
# Documentation files and directories are excluded from language
|
||||
# statistics.
|
||||
#
|
||||
# Lines in this file are Regexps that are matched against the file
|
||||
# pathname.
|
||||
#
|
||||
# Please add additional test coverage to
|
||||
# `test/test_blob.rb#test_documentation` if you make any changes.
|
||||
|
||||
## Documentation Conventions ##
|
||||
|
||||
- ^docs?/
|
||||
- ^Documentation/
|
||||
|
||||
- (^|/)CONTRIBUTING(\.|$)
|
||||
- (^|/)COPYING(\.|$)
|
||||
- (^|/)INSTALL(\.|$)
|
||||
- (^|/)LICEN[CS]E(\.|$)
|
||||
- (^|/)README(\.|$)
|
||||
@@ -51,20 +51,20 @@ module Linguist
|
||||
#
|
||||
# Return true or false
|
||||
def generated?
|
||||
minified_files? ||
|
||||
compiled_coffeescript? ||
|
||||
xcode_file? ||
|
||||
generated_parser? ||
|
||||
generated_net_docfile? ||
|
||||
generated_net_designer_file? ||
|
||||
generated_postscript? ||
|
||||
generated_protocol_buffer? ||
|
||||
generated_jni_header? ||
|
||||
composer_lock? ||
|
||||
node_modules? ||
|
||||
godeps? ||
|
||||
vcr_cassette? ||
|
||||
generated_by_zephir?
|
||||
generated_by_zephir? ||
|
||||
minified_files? ||
|
||||
compiled_coffeescript? ||
|
||||
generated_parser? ||
|
||||
generated_net_docfile? ||
|
||||
generated_postscript? ||
|
||||
generated_protocol_buffer? ||
|
||||
generated_jni_header? ||
|
||||
vcr_cassette?
|
||||
end
|
||||
|
||||
# Internal: Is the blob an Xcode file?
|
||||
|
||||
@@ -53,7 +53,7 @@ module Linguist
|
||||
|
||||
# Internal: Check if this heuristic matches the candidate languages.
|
||||
def matches?(candidates)
|
||||
candidates.all? { |l| @languages.include?(l.name) }
|
||||
candidates.any? && candidates.all? { |l| @languages.include?(l.name) }
|
||||
end
|
||||
|
||||
# Internal: Perform the heuristic
|
||||
@@ -61,11 +61,30 @@ module Linguist
|
||||
@heuristic.call(data)
|
||||
end
|
||||
|
||||
# Common heuristics
|
||||
ObjectiveCRegex = /^[ \t]*@(interface|class|protocol|property|end|synchronised|selector|implementation)\b/
|
||||
|
||||
disambiguate "BitBake", "BlitzBasic" do |data|
|
||||
if /^\s*; /.match(data) || data.include?("End Function")
|
||||
Language["BlitzBasic"]
|
||||
elsif /^\s*(# |include|require)\b/.match(data)
|
||||
Language["BitBake"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "C#", "Smalltalk" do |data|
|
||||
if /![\w\s]+methodsFor: /.match(data)
|
||||
Language["Smalltalk"]
|
||||
elsif /^\s*namespace\s*[\w\.]+\s*{/.match(data) || /^\s*\/\//.match(data)
|
||||
Language["C#"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Objective-C", "C++", "C" do |data|
|
||||
if (/@(interface|class|protocol|property|end|synchronised|selector|implementation)\b/.match(data))
|
||||
if ObjectiveCRegex.match(data)
|
||||
Language["Objective-C"]
|
||||
elsif (/^\s*#\s*include <(cstdint|string|vector|map|list|array|bitset|queue|stack|forward_list|unordered_map|unordered_set|(i|o|io)stream)>/.match(data) ||
|
||||
/^\s*template\s*</.match(data) || /^[^@]class\s+\w+/.match(data) || /^[^@](private|public|protected):$/.match(data) || /std::.+$/.match(data))
|
||||
/^\s*template\s*</.match(data) || /^[ \t]*try/.match(data) || /^[ \t]*catch\s*\(/.match(data) || /^[ \t]*(class|(using[ \t]+)?namespace)\s+\w+/.match(data) || /^[ \t]*(private|public|protected):$/.match(data) || /std::\w+/.match(data))
|
||||
Language["C++"]
|
||||
end
|
||||
end
|
||||
@@ -73,7 +92,7 @@ module Linguist
|
||||
disambiguate "Perl", "Perl6", "Prolog" do |data|
|
||||
if data.include?("use v6")
|
||||
Language["Perl6"]
|
||||
elsif data.include?("use strict")
|
||||
elsif data.match(/use strict|use\s+v?5\./)
|
||||
Language["Perl"]
|
||||
elsif data.include?(":-")
|
||||
Language["Prolog"]
|
||||
@@ -96,6 +115,15 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "GAP", "Scilab" do |data|
|
||||
if (data.include?("gap> "))
|
||||
Language["GAP"]
|
||||
# Heads up - we don't usually write heuristics like this (with no regex match)
|
||||
else
|
||||
Language["Scilab"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Common Lisp", "OpenCL", "Cool" do |data|
|
||||
if data.include?("(defun ")
|
||||
Language["Common Lisp"]
|
||||
@@ -122,14 +150,20 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "AsciiDoc", "AGS Script" do |data|
|
||||
Language["AsciiDoc"] if /^=+(\s|\n)/.match(data)
|
||||
disambiguate "AsciiDoc", "AGS Script", "Public Key" do |data|
|
||||
if /^[=-]+(\s|\n)|{{[A-Za-z]/.match(data)
|
||||
Language["AsciiDoc"]
|
||||
elsif /^(\/\/.+|((import|export)\s+)?(function|int|float|char)\s+((room|repeatedly|on|game)_)?([A-Za-z]+[A-Za-z_0-9]+)\s*[;\(])/.match(data)
|
||||
Language["AGS Script"]
|
||||
elsif /^-----BEGIN/.match(data)
|
||||
Language["Public Key"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "FORTRAN", "Forth" do |data|
|
||||
if /^: /.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^([c*][^a-z]| subroutine\s)/i.match(data)
|
||||
elsif /^([c*][^a-z]| (subroutine|program)\s|\s*!)/i.match(data)
|
||||
Language["FORTRAN"]
|
||||
end
|
||||
end
|
||||
@@ -137,13 +171,27 @@ module Linguist
|
||||
disambiguate "F#", "Forth", "GLSL" do |data|
|
||||
if /^(: |new-device)/.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^(#light|import|let|module|namespace|open|type)/.match(data)
|
||||
elsif /^\s*(#light|import|let|module|namespace|open|type)/.match(data)
|
||||
Language["F#"]
|
||||
elsif /^(#include|#pragma|precision|uniform|varying|void)/.match(data)
|
||||
elsif /^\s*(#include|#pragma|precision|uniform|varying|void)/.match(data)
|
||||
Language["GLSL"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "M", "Mathematica", "Matlab", "Mercury", "Objective-C" do |data|
|
||||
if ObjectiveCRegex.match(data)
|
||||
Language["Objective-C"]
|
||||
elsif data.include?(":- module")
|
||||
Language["Mercury"]
|
||||
elsif /^\s*;/.match(data)
|
||||
Language["M"]
|
||||
elsif /^\s*\(\*/.match(data)
|
||||
Language["Mathematica"]
|
||||
elsif /^\s*%/.match(data)
|
||||
Language["Matlab"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Gosu", "JavaScript" do |data|
|
||||
Language["Gosu"] if /^uses java\./.match(data)
|
||||
end
|
||||
@@ -156,5 +204,30 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Common Lisp", "NewLisp" do |data|
|
||||
if /^\s*\((defun|in-package|defpackage) /.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^\s*\(define /.match(data)
|
||||
Language["NewLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "TypeScript", "XML" do |data|
|
||||
if data.include?("<TS ")
|
||||
Language["XML"]
|
||||
else
|
||||
Language["TypeScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Frege", "Forth", "Text" do |data|
|
||||
if /^(: |also |new-device|previous )/.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^\s*(import|module|package|data|type) /.match(data)
|
||||
Language["Frege"]
|
||||
else
|
||||
Language["Text"]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -11,6 +11,7 @@ require 'linguist/samples'
|
||||
require 'linguist/file_blob'
|
||||
require 'linguist/blob_helper'
|
||||
require 'linguist/strategy/filename'
|
||||
require 'linguist/strategy/modeline'
|
||||
require 'linguist/shebang'
|
||||
|
||||
module Linguist
|
||||
@@ -31,13 +32,6 @@ module Linguist
|
||||
# Valid Languages types
|
||||
TYPES = [:data, :markup, :programming, :prose]
|
||||
|
||||
# Names of non-programming languages that we will still detect
|
||||
#
|
||||
# Returns an array
|
||||
def self.detectable_markup
|
||||
["CSS", "Less", "Sass", "SCSS", "Stylus", "TeX"]
|
||||
end
|
||||
|
||||
# Detect languages by a specific type
|
||||
#
|
||||
# type - A symbol that exists within TYPES
|
||||
@@ -94,8 +88,9 @@ module Linguist
|
||||
end
|
||||
|
||||
STRATEGIES = [
|
||||
Linguist::Strategy::Filename,
|
||||
Linguist::Strategy::Modeline,
|
||||
Linguist::Shebang,
|
||||
Linguist::Strategy::Filename,
|
||||
Linguist::Heuristics,
|
||||
Linguist::Classifier
|
||||
]
|
||||
@@ -155,7 +150,7 @@ module Linguist
|
||||
# Language.find_by_alias('cpp')
|
||||
# # => #<Language name="C++">
|
||||
#
|
||||
# Returns the Lexer or nil if none was found.
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.find_by_alias(name)
|
||||
name && @alias_index[name.downcase]
|
||||
end
|
||||
@@ -219,7 +214,7 @@ module Linguist
|
||||
end
|
||||
|
||||
|
||||
# Public: Look up Language by its name or lexer.
|
||||
# Public: Look up Language by its name.
|
||||
#
|
||||
# name - The String name of the Language
|
||||
#
|
||||
@@ -243,7 +238,7 @@ module Linguist
|
||||
#
|
||||
# This list is configured in "popular.yml".
|
||||
#
|
||||
# Returns an Array of Lexers.
|
||||
# Returns an Array of Languages.
|
||||
def self.popular
|
||||
@popular ||= all.select(&:popular?).sort_by { |lang| lang.name.downcase }
|
||||
end
|
||||
@@ -255,7 +250,7 @@ module Linguist
|
||||
#
|
||||
# This list is created from all the languages not listed in "popular.yml".
|
||||
#
|
||||
# Returns an Array of Lexers.
|
||||
# Returns an Array of Languages.
|
||||
def self.unpopular
|
||||
@unpopular ||= all.select(&:unpopular?).sort_by { |lang| lang.name.downcase }
|
||||
end
|
||||
@@ -269,8 +264,12 @@ module Linguist
|
||||
|
||||
# Public: A List of languages compatible with Ace.
|
||||
#
|
||||
# TODO: Remove this method in a 5.x release. Every language now needs an ace_mode
|
||||
# key, so this function isn't doing anything unique anymore.
|
||||
#
|
||||
# Returns an Array of Languages.
|
||||
def self.ace_modes
|
||||
warn "This method will be deprecated in a future 5.x release. Every language now has an `ace_mode` set."
|
||||
@ace_modes ||= all.select(&:ace_mode).sort_by { |lang| lang.name.downcase }
|
||||
end
|
||||
|
||||
@@ -371,11 +370,6 @@ module Linguist
|
||||
# Returns the name String
|
||||
attr_reader :search_term
|
||||
|
||||
# Public: Get Lexer
|
||||
#
|
||||
# Returns the Lexer
|
||||
attr_reader :lexer
|
||||
|
||||
# Public: Get the name of a TextMate-compatible scope
|
||||
#
|
||||
# Returns the scope
|
||||
@@ -491,16 +485,6 @@ module Linguist
|
||||
@searchable
|
||||
end
|
||||
|
||||
# Public: Highlight syntax of text
|
||||
#
|
||||
# text - String of code to be highlighted
|
||||
# options - A Hash of options (defaults to {})
|
||||
#
|
||||
# Returns html String
|
||||
def colorize(text, options = {})
|
||||
lexer.highlight(text, options)
|
||||
end
|
||||
|
||||
# Public: Return name as String representation
|
||||
def to_s
|
||||
name
|
||||
@@ -544,7 +528,7 @@ module Linguist
|
||||
|
||||
if extnames = extensions[name]
|
||||
extnames.each do |extname|
|
||||
if !options['extensions'].include?(extname)
|
||||
if !options['extensions'].index { |x| x.end_with? extname }
|
||||
warn "#{name} has a sample with extension (#{extname}) that isn't explicitly defined in languages.yml" unless extname == '.script!'
|
||||
options['extensions'] << extname
|
||||
end
|
||||
@@ -576,7 +560,6 @@ module Linguist
|
||||
:color => options['color'],
|
||||
:type => options['type'],
|
||||
:aliases => options['aliases'],
|
||||
:lexer => options['lexer'],
|
||||
:tm_scope => options['tm_scope'],
|
||||
:ace_mode => options['ace_mode'],
|
||||
:wrap => options['wrap'],
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,7 @@ require 'rugged'
|
||||
|
||||
module Linguist
|
||||
class LazyBlob
|
||||
GIT_ATTR = ['linguist-language', 'linguist-vendored']
|
||||
GIT_ATTR = ['linguist-documentation', 'linguist-language', 'linguist-vendored']
|
||||
GIT_ATTR_OPTS = { :priority => [:index], :skip_system => true }
|
||||
GIT_ATTR_FLAGS = Rugged::Repository::Attributes.parse_opts(GIT_ATTR_OPTS)
|
||||
|
||||
@@ -37,6 +37,14 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
def documentation?
|
||||
if attr = git_attributes['linguist-documentation']
|
||||
boolean_attribute(attr)
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
def language
|
||||
return @language if defined?(@language)
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
# This file should only be edited by GitHub staff
|
||||
|
||||
- ActionScript
|
||||
- Bash
|
||||
- C
|
||||
- C#
|
||||
- C++
|
||||
@@ -27,3 +26,4 @@
|
||||
- SQL
|
||||
- Scala
|
||||
- Scheme
|
||||
- Shell
|
||||
|
||||
@@ -156,13 +156,8 @@ module Linguist
|
||||
|
||||
blob = Linguist::LazyBlob.new(repository, delta.new_file[:oid], new, mode.to_s(8))
|
||||
|
||||
# Skip vendored or generated blobs
|
||||
next if blob.vendored? || blob.generated? || blob.language.nil?
|
||||
|
||||
# Only include programming languages and acceptable markup languages
|
||||
if blob.language.type == :programming || Language.detectable_markup.include?(blob.language.name)
|
||||
file_map[new] = [blob.language.group.name, blob.size]
|
||||
end
|
||||
next unless blob.include_in_language_stats?
|
||||
file_map[new] = [blob.language.group.name, blob.size]
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
@@ -34,10 +34,6 @@ module Linguist
|
||||
Dir.entries(ROOT).sort!.each do |category|
|
||||
next if category == '.' || category == '..'
|
||||
|
||||
# Skip text and binary for now
|
||||
# Possibly reconsider this later
|
||||
next if category == 'Text' || category == 'Binary'
|
||||
|
||||
dirname = File.join(ROOT, category)
|
||||
Dir.entries(dirname).each do |filename|
|
||||
next if filename == '.' || filename == '..'
|
||||
|
||||
@@ -18,23 +18,32 @@ module Linguist
|
||||
#
|
||||
# Returns a String or nil
|
||||
def self.interpreter(data)
|
||||
lines = data.lines
|
||||
return unless match = /^#! ?(.*)$/.match(lines.first)
|
||||
shebang = data.lines.first
|
||||
|
||||
tokens = match[1].split(' ')
|
||||
script = tokens.first.split('/').last
|
||||
# First line must start with #!
|
||||
return unless shebang && shebang.start_with?("#!")
|
||||
|
||||
# Get the parts of the shebang without the #!
|
||||
tokens = shebang.sub(/^#!\s*/, '').strip.split(' ')
|
||||
|
||||
# There was nothing after the #!
|
||||
return if tokens.empty?
|
||||
|
||||
# Get the name of the interpreter
|
||||
script = File.basename(tokens.first)
|
||||
|
||||
# Get next argument if interpreter was /usr/bin/env
|
||||
script = tokens[1] if script == 'env'
|
||||
|
||||
# If script has an invalid shebang, we might get here
|
||||
# Interpreter was /usr/bin/env with no arguments
|
||||
return unless script
|
||||
|
||||
# "python2.6" -> "python2"
|
||||
script.sub! $1, '' if script =~ /(\.\d+)$/
|
||||
script.sub! /(\.\d+)$/, ''
|
||||
|
||||
# Check for multiline shebang hacks that call `exec`
|
||||
if script == 'sh' &&
|
||||
lines.first(5).any? { |l| l.match(/exec (\w+).+\$0.+\$@/) }
|
||||
data.lines.first(5).any? { |l| l.match(/exec (\w+).+\$0.+\$@/) }
|
||||
script = $1
|
||||
end
|
||||
|
||||
|
||||
30
lib/linguist/strategy/modeline.rb
Normal file
30
lib/linguist/strategy/modeline.rb
Normal file
@@ -0,0 +1,30 @@
|
||||
module Linguist
|
||||
module Strategy
|
||||
class Modeline
|
||||
EmacsModeline = /-\*-\s*mode:\s*(\w+);?\s*-\*-/i
|
||||
VimModeline = /\/\*\s*vim:\s*set\s*(?:ft|filetype)=(\w+):\s*\*\//i
|
||||
|
||||
# Public: Detects language based on Vim and Emacs modelines
|
||||
#
|
||||
# blob - An object that quacks like a blob.
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# Modeline.call(FileBlob.new("path/to/file"))
|
||||
#
|
||||
# Returns an Array with one Language if the blob has a Vim or Emacs modeline
|
||||
# that matches a Language name or alias. Returns an empty array if no match.
|
||||
def self.call(blob, _ = nil)
|
||||
Array(Language.find_by_alias(modeline(blob.data)))
|
||||
end
|
||||
|
||||
# Public: Get the modeline from the first n-lines of the file
|
||||
#
|
||||
# Returns a String or nil
|
||||
def self.modeline(data)
|
||||
match = data.match(EmacsModeline) || data.match(VimModeline)
|
||||
match[1] if match
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -33,7 +33,8 @@ module Linguist
|
||||
['<!--', '-->'], # XML
|
||||
['{-', '-}'], # Haskell
|
||||
['(*', '*)'], # Coq
|
||||
['"""', '"""'] # Python
|
||||
['"""', '"""'], # Python
|
||||
["'''", "'''"] # Python
|
||||
]
|
||||
|
||||
START_SINGLE_LINE_COMMENT = Regexp.compile(SINGLE_LINE_COMMENTS.map { |c|
|
||||
|
||||
@@ -32,6 +32,7 @@
|
||||
|
||||
# Erlang bundles
|
||||
- ^rebar$
|
||||
- erlang.mk
|
||||
|
||||
# Go dependencies
|
||||
- Godeps/_workspace/
|
||||
@@ -39,24 +40,27 @@
|
||||
# Minified JavaScript and CSS
|
||||
- (\.|-)min\.(js|css)$
|
||||
|
||||
# Stylesheets imported from packages
|
||||
- ([^\s]*)import\.(css|less|scss|styl)$
|
||||
|
||||
# Bootstrap css and js
|
||||
- (^|/)bootstrap([^.]*)\.(js|css)$
|
||||
- (^|/)bootstrap([^.]*)\.(js|css|less|scss|styl)$
|
||||
- (^|/)custom\.bootstrap([^\s]*)(js|css|less|scss|styl)$
|
||||
|
||||
# Font Awesome
|
||||
- font-awesome.css
|
||||
- (^|/)font-awesome\.(css|less|scss|styl)$
|
||||
|
||||
# Foundation css
|
||||
- foundation.css
|
||||
- (^|/)foundation\.(css|less|scss|styl)$
|
||||
|
||||
# Normalize.css
|
||||
- normalize.css
|
||||
- (^|/)normalize\.(css|less|scss|styl)$
|
||||
|
||||
# Bourbon SCSS
|
||||
- (^|/)[Bb]ourbon/.*\.css$
|
||||
- (^|/)[Bb]ourbon/.*\.scss$
|
||||
# Bourbon css
|
||||
- (^|/)[Bb]ourbon/.*\.(css|less|scss|styl)$
|
||||
|
||||
# Animate.css
|
||||
- animate.css
|
||||
- (^|/)animate\.(css|less|scss|styl)$
|
||||
|
||||
# Vendored dependencies
|
||||
- third[-_]?party/
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
module Linguist
|
||||
VERSION = "4.2.0"
|
||||
VERSION = "4.4.1"
|
||||
end
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"repository": "https://github.com/github/linguist",
|
||||
"dependencies": {
|
||||
"season": "~>3.0"
|
||||
"season": "~>5.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,215 +0,0 @@
|
||||
%{
|
||||
#include "./../ATEXT/atextfun.hats"
|
||||
%}
|
||||
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
|
||||
"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
|
||||
<head>
|
||||
<meta http-equiv="content-type" content="text/html; charset=UTF-8" />
|
||||
<title>EFFECTIVATS-DiningPhil2</title>
|
||||
#patscode_style()
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<h1>
|
||||
Effective ATS: Dining Philosophers
|
||||
</h1>
|
||||
|
||||
In this article, I present an implementation of a slight variant of the
|
||||
famous problem of 5-Dining-Philosophers by Dijkstra that makes simple but
|
||||
convincing use of linear types.
|
||||
|
||||
<h2>
|
||||
The Original Problem
|
||||
</h2>
|
||||
|
||||
There are five philosophers sitting around a table and there are also 5
|
||||
forks placed on the table such that each fork is located between the left
|
||||
hand of a philosopher and the right hand of another philosopher. Each
|
||||
philosopher does the following routine repeatedly: thinking and dining. In
|
||||
order to dine, a philosopher needs to first acquire two forks: one located
|
||||
on his left-hand side and the other on his right-hand side. After
|
||||
finishing dining, a philosopher puts the two acquired forks onto the table:
|
||||
one on his left-hand side and the other on his right-hand side.
|
||||
|
||||
<h2>
|
||||
A Variant of the Original Problem
|
||||
</h2>
|
||||
|
||||
The following twist is added to the original version:
|
||||
|
||||
<p>
|
||||
|
||||
After a fork is used, it becomes a "dirty" fork and needs to be put in a
|
||||
tray for dirty forks. There is a cleaner who cleans dirty forks and then
|
||||
puts them back on the table.
|
||||
|
||||
<h2>
|
||||
Channels for Communication
|
||||
</h2>
|
||||
|
||||
A channel is just a shared queue of fixed capacity. The following two
|
||||
functions are for inserting an element into and taking an element out of a
|
||||
given channel:
|
||||
|
||||
<pre
|
||||
class="patsyntax">
|
||||
#pats2xhtml_sats("\
|
||||
fun{a:vt0p} channel_insert (channel (a), a): void
|
||||
fun{a:vt0p} channel_takeout (chan: channel (a)): (a)
|
||||
")</pre>
|
||||
|
||||
If [channel_insert] is called on a channel that is full, then the caller is
|
||||
blocked until an element is taken out of the channel. If [channel_takeout]
|
||||
is called on a channel that is empty, then the caller is blocked until an
|
||||
element is inserted into the channel.
|
||||
|
||||
<h2>
|
||||
A Channel for Each Fork
|
||||
</h2>
|
||||
|
||||
Forks are resources given a linear type. Each fork is initially stored in a
|
||||
channel, which can be obtained by calling the following function:
|
||||
|
||||
<pre
|
||||
class="patsyntax">
|
||||
#pats2xhtml_sats("\
|
||||
fun fork_changet (n: nphil): channel(fork)
|
||||
")</pre>
|
||||
|
||||
where the type [nphil] is defined to be [natLt(5)] (for natural numbers
|
||||
less than 5). The channels for storing forks are chosen to be of capacity
|
||||
2. The reason that channels of capacity 2 are chosen to store at most one
|
||||
element (in each of them) is to guarantee that these channels can never be
|
||||
full (so that there is no attempt made to send signals to awake callers
|
||||
supposedly being blocked due to channels being full).
|
||||
|
||||
|
||||
<h2>
|
||||
A Channel for the Fork Tray
|
||||
</h2>
|
||||
|
||||
A tray for storing "dirty" forks is also a channel, which can be obtained
|
||||
by calling the following function:
|
||||
|
||||
<pre
|
||||
class="patsyntax">
|
||||
#pats2xhtml_sats("\
|
||||
fun forktray_changet ((*void*)): channel(fork)
|
||||
")</pre>
|
||||
|
||||
The capacity chosen for the channel is 6 (instead of 5) so that it can
|
||||
never become full (as there are only 5 forks in total).
|
||||
|
||||
<h2>
|
||||
Philosopher Loop
|
||||
</h2>
|
||||
|
||||
Each philosopher is implemented as a loop:
|
||||
|
||||
<pre
|
||||
class="patsyntax">
|
||||
#pats2xhtml_dats('\
|
||||
implement
|
||||
phil_loop (n) = let
|
||||
//
|
||||
val () = phil_think (n)
|
||||
//
|
||||
val nl = phil_left (n) // = n
|
||||
val nr = phil_right (n) // = (n+1) % 5
|
||||
//
|
||||
val ch_lfork = fork_changet (nl)
|
||||
val ch_rfork = fork_changet (nr)
|
||||
//
|
||||
val lf = channel_takeout (ch_lfork)
|
||||
val () = println! ("phil_loop(", n, ") picks left fork")
|
||||
//
|
||||
val () = randsleep (2) // sleep up to 2 seconds
|
||||
//
|
||||
val rf = channel_takeout (ch_rfork)
|
||||
val () = println! ("phil_loop(", n, ") picks right fork")
|
||||
//
|
||||
val () = phil_dine (n, lf, rf)
|
||||
//
|
||||
val ch_forktray = forktray_changet ()
|
||||
val () = channel_insert (ch_forktray, lf) // left fork to dirty tray
|
||||
val () = channel_insert (ch_forktray, rf) // right fork to dirty tray
|
||||
//
|
||||
in
|
||||
phil_loop (n)
|
||||
end // end of [phil_loop]
|
||||
')</pre>
|
||||
|
||||
It should be straighforward to follow the code for [phil_loop].
|
||||
|
||||
<h2>
|
||||
Fork Cleaner Loop
|
||||
</h2>
|
||||
|
||||
A cleaner is implemented as a loop:
|
||||
|
||||
<pre
|
||||
class="patsyntax">
|
||||
#pats2xhtml_dats('\
|
||||
implement
|
||||
cleaner_loop () = let
|
||||
//
|
||||
val ch = forktray_changet ()
|
||||
val f0 = channel_takeout (ch) // [f0] is dirty
|
||||
//
|
||||
val () = cleaner_wash (f0) // washes dirty [f0]
|
||||
val () = cleaner_return (f0) // puts back cleaned [f0]
|
||||
//
|
||||
in
|
||||
cleaner_loop ()
|
||||
end // end of [cleaner_loop]
|
||||
')</pre>
|
||||
|
||||
The function [cleaner_return] first finds out the number of a given fork
|
||||
and then uses the number to locate the channel for storing the fork. Its
|
||||
actual implementation is given as follows:
|
||||
|
||||
<pre
|
||||
class="patsyntax">
|
||||
#pats2xhtml_dats('\
|
||||
implement
|
||||
cleaner_return (f) =
|
||||
{
|
||||
val n = fork_get_num (f)
|
||||
val ch = fork_changet (n)
|
||||
val () = channel_insert (ch, f)
|
||||
}
|
||||
')</pre>
|
||||
|
||||
It should now be straighforward to follow the code for [cleaner_loop].
|
||||
|
||||
<h2>
|
||||
Testing
|
||||
</h2>
|
||||
|
||||
The entire code of this implementation is stored in the following files:
|
||||
|
||||
<pre>
|
||||
DiningPhil2.sats
|
||||
DiningPhil2.dats
|
||||
DiningPhil2_fork.dats
|
||||
DiningPhil2_thread.dats
|
||||
</pre>
|
||||
|
||||
There is also a Makefile available for compiling the ATS source code into
|
||||
an excutable for testing. One should be able to encounter a deadlock after
|
||||
running the simulation for a while.
|
||||
|
||||
<hr size="2">
|
||||
|
||||
This article is written by <a href="http://www.cs.bu.edu/~hwxi/">Hongwei Xi</a>.
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
||||
%{
|
||||
implement main () = fprint_filsub (stdout_ref, "main_atxt.txt")
|
||||
%}
|
||||
2841
samples/Assembly/forth.nasm
Normal file
2841
samples/Assembly/forth.nasm
Normal file
File diff suppressed because it is too large
Load Diff
25
samples/BitBake/gstreamer-libav.bb
Normal file
25
samples/BitBake/gstreamer-libav.bb
Normal file
@@ -0,0 +1,25 @@
|
||||
include gstreamer1.0-libav.inc
|
||||
|
||||
LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
|
||||
file://COPYING.LIB;md5=6762ed442b3822387a51c92d928ead0d \
|
||||
file://ext/libav/gstav.h;beginline=1;endline=18;md5=a752c35267d8276fd9ca3db6994fca9c \
|
||||
file://gst-libs/ext/libav/LICENSE;md5=23a54f59b82572c203a559346e89ed57 \
|
||||
file://gst-libs/ext/libav/COPYING.GPLv2;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
|
||||
file://gst-libs/ext/libav/COPYING.GPLv3;md5=d32239bcb673463ab874e80d47fae504 \
|
||||
file://gst-libs/ext/libav/COPYING.LGPLv2.1;md5=bd7a443320af8c812e4c18d1b79df004 \
|
||||
file://gst-libs/ext/libav/COPYING.LGPLv3;md5=e6a600fd5e1d9cbde2d983680233ad02"
|
||||
|
||||
SRC_URI = " \
|
||||
http://gstreamer.freedesktop.org/src/gst-libav/gst-libav-${PV}.tar.xz \
|
||||
file://0001-Disable-yasm-for-libav-when-disable-yasm.patch \
|
||||
"
|
||||
SRC_URI[md5sum] = "86540dee14d31daf976eb2713f2294f3"
|
||||
SRC_URI[sha256sum] = "585eb7971006100ad771a852e07bd2f3e23bcc6eb0b1253a40b5a0e40e4e7418"
|
||||
|
||||
LIBAV_EXTRA_CONFIGURE_COMMON_ARG = "--target-os=linux \
|
||||
--cc='${CC}' --as='${CC}' --ld='${CC}' --nm='${NM}' --ar='${AR}' \
|
||||
--ranlib='${RANLIB}' \
|
||||
${GSTREAMER_1_0_DEBUG}"
|
||||
|
||||
S = "${WORKDIR}/gst-libav-${PV}"
|
||||
|
||||
13
samples/BitBake/qtbase-native.bb
Normal file
13
samples/BitBake/qtbase-native.bb
Normal file
@@ -0,0 +1,13 @@
|
||||
require qt5-git.inc
|
||||
require ${PN}.inc
|
||||
|
||||
do_install_append() {
|
||||
# for modules which are still using syncqt and call qtPrepareTool(QMAKE_SYNCQT, syncqt)
|
||||
# e.g. qt3d, qtwayland
|
||||
ln -sf syncqt.pl ${D}${OE_QMAKE_PATH_QT_BINS}/syncqt
|
||||
}
|
||||
|
||||
QT_MODULE_BRANCH = "release"
|
||||
# v5.2.1 + 168 commits
|
||||
SRCREV = "08cbbde61778276ccdda73d89fd64d02c623779f"
|
||||
|
||||
26
samples/C#/AssemblyInfo.cs
Normal file
26
samples/C#/AssemblyInfo.cs
Normal file
@@ -0,0 +1,26 @@
|
||||
using System.Reflection;
|
||||
using System.Runtime.CompilerServices;
|
||||
|
||||
// Information about this assembly is defined by the following attributes.
|
||||
// Change them to the values specific to your project.
|
||||
|
||||
[assembly: AssemblyTitle("Simple")]
|
||||
[assembly: AssemblyDescription("")]
|
||||
[assembly: AssemblyConfiguration("")]
|
||||
[assembly: AssemblyCompany("")]
|
||||
[assembly: AssemblyProduct("")]
|
||||
[assembly: AssemblyCopyright("")]
|
||||
[assembly: AssemblyTrademark("")]
|
||||
[assembly: AssemblyCulture("")]
|
||||
|
||||
// The assembly version has the format "{Major}.{Minor}.{Build}.{Revision}".
|
||||
// The form "{Major}.{Minor}.*" will automatically update the build and revision,
|
||||
// and "{Major}.{Minor}.{Build}.*" will update just the revision.
|
||||
|
||||
[assembly: AssemblyVersion("1.0.*")]
|
||||
|
||||
// The following attributes are used to specify the signing key for the assembly,
|
||||
// if desired. See the Mono documentation for more information about signing.
|
||||
|
||||
//[assembly: AssemblyDelaySign(false)]
|
||||
//[assembly: AssemblyKeyFile("")]
|
||||
20
samples/C#/BsonPropertyValue.cs
Normal file
20
samples/C#/BsonPropertyValue.cs
Normal file
@@ -0,0 +1,20 @@
|
||||
using System;
|
||||
|
||||
namespace MongoDB.Serialization.Descriptors
|
||||
{
|
||||
internal class BsonPropertyValue
|
||||
{
|
||||
public bool IsDictionary { get; private set; }
|
||||
|
||||
public Type Type { get; private set; }
|
||||
|
||||
public object Value { get; private set; }
|
||||
|
||||
public BsonPropertyValue(Type type, object value, bool isDictionary)
|
||||
{
|
||||
Type = type;
|
||||
Value = value;
|
||||
IsDictionary = isDictionary;
|
||||
}
|
||||
}
|
||||
}
|
||||
153
samples/C#/MongoExpressionVisitor.cs
Normal file
153
samples/C#/MongoExpressionVisitor.cs
Normal file
@@ -0,0 +1,153 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.Linq;
|
||||
using System.Linq.Expressions;
|
||||
|
||||
namespace MongoDB.Linq.Expressions
|
||||
{
|
||||
internal class MongoExpressionVisitor : ExpressionVisitor
|
||||
{
|
||||
protected override Expression Visit(Expression exp)
|
||||
{
|
||||
if (exp == null)
|
||||
return null;
|
||||
switch ((MongoExpressionType)exp.NodeType)
|
||||
{
|
||||
case MongoExpressionType.Collection:
|
||||
return VisitCollection((CollectionExpression)exp);
|
||||
case MongoExpressionType.Field:
|
||||
return VisitField((FieldExpression)exp);
|
||||
case MongoExpressionType.Projection:
|
||||
return VisitProjection((ProjectionExpression)exp);
|
||||
case MongoExpressionType.Select:
|
||||
return VisitSelect((SelectExpression)exp);
|
||||
case MongoExpressionType.Aggregate:
|
||||
return VisitAggregate((AggregateExpression)exp);
|
||||
case MongoExpressionType.AggregateSubquery:
|
||||
return VisitAggregateSubquery((AggregateSubqueryExpression)exp);
|
||||
case MongoExpressionType.Scalar:
|
||||
return VisitScalar((ScalarExpression)exp);
|
||||
default:
|
||||
return base.Visit(exp);
|
||||
}
|
||||
}
|
||||
|
||||
protected virtual Expression VisitAggregate(AggregateExpression aggregate)
|
||||
{
|
||||
var exp = Visit(aggregate.Argument);
|
||||
if (exp != aggregate.Argument)
|
||||
return new AggregateExpression(aggregate.Type, aggregate.AggregateType, exp, aggregate.Distinct);
|
||||
|
||||
return aggregate;
|
||||
}
|
||||
|
||||
protected virtual Expression VisitAggregateSubquery(AggregateSubqueryExpression aggregateSubquery)
|
||||
{
|
||||
Expression e = Visit(aggregateSubquery.AggregateAsSubquery);
|
||||
ScalarExpression subquery = (ScalarExpression)e;
|
||||
if (subquery != aggregateSubquery.AggregateAsSubquery)
|
||||
return new AggregateSubqueryExpression(aggregateSubquery.GroupByAlias, aggregateSubquery.AggregateInGroupSelect, subquery);
|
||||
return aggregateSubquery;
|
||||
}
|
||||
|
||||
protected virtual Expression VisitCollection(CollectionExpression collection)
|
||||
{
|
||||
return collection;
|
||||
}
|
||||
|
||||
protected virtual Expression VisitField(FieldExpression field)
|
||||
{
|
||||
var e = Visit(field.Expression);
|
||||
if (field.Expression != e)
|
||||
field = new FieldExpression(e, field.Alias, field.Name);
|
||||
|
||||
return field;
|
||||
}
|
||||
|
||||
protected virtual Expression VisitProjection(ProjectionExpression projection)
|
||||
{
|
||||
var source = (SelectExpression)Visit(projection.Source);
|
||||
var projector = Visit(projection.Projector);
|
||||
if (source != projection.Source || projector != projection.Projector)
|
||||
return new ProjectionExpression(source, projector, projection.Aggregator);
|
||||
return projection;
|
||||
}
|
||||
|
||||
protected ReadOnlyCollection<OrderExpression> VisitOrderBy(ReadOnlyCollection<OrderExpression> orderBys)
|
||||
{
|
||||
if (orderBys != null)
|
||||
{
|
||||
List<OrderExpression> alternate = null;
|
||||
for (int i = 0, n = orderBys.Count; i < n; i++)
|
||||
{
|
||||
OrderExpression expr = orderBys[i];
|
||||
Expression e = this.Visit(expr.Expression);
|
||||
if (alternate == null && e != expr.Expression)
|
||||
alternate = orderBys.Take(i).ToList();
|
||||
if (alternate != null)
|
||||
alternate.Add(new OrderExpression(expr.OrderType, e));
|
||||
}
|
||||
if (alternate != null)
|
||||
return alternate.AsReadOnly();
|
||||
}
|
||||
return orderBys;
|
||||
}
|
||||
|
||||
protected virtual Expression VisitScalar(ScalarExpression scalar)
|
||||
{
|
||||
SelectExpression select = (SelectExpression)Visit(scalar.Select);
|
||||
if (select != scalar.Select)
|
||||
return new ScalarExpression(scalar.Type, select);
|
||||
return scalar;
|
||||
}
|
||||
|
||||
protected virtual Expression VisitSelect(SelectExpression select)
|
||||
{
|
||||
var from = VisitSource(select.From);
|
||||
var where = Visit(select.Where);
|
||||
var groupBy = Visit(select.GroupBy);
|
||||
var orderBy = VisitOrderBy(select.OrderBy);
|
||||
var skip = Visit(select.Skip);
|
||||
var take = Visit(select.Take);
|
||||
var fields = VisitFieldDeclarationList(select.Fields);
|
||||
if (from != select.From || where != select.Where || orderBy != select.OrderBy || groupBy != select.GroupBy || skip != select.Skip || take != select.Take || fields != select.Fields)
|
||||
return new SelectExpression(select.Alias, fields, from, where, orderBy, groupBy, select.IsDistinct, skip, take);
|
||||
return select;
|
||||
}
|
||||
|
||||
protected virtual Expression VisitSource(Expression source)
|
||||
{
|
||||
return Visit(source);
|
||||
}
|
||||
|
||||
protected virtual Expression VisitSubquery(SubqueryExpression subquery)
|
||||
{
|
||||
switch ((MongoExpressionType)subquery.NodeType)
|
||||
{
|
||||
case MongoExpressionType.Scalar:
|
||||
return VisitScalar((ScalarExpression)subquery);
|
||||
}
|
||||
return subquery;
|
||||
}
|
||||
|
||||
protected virtual ReadOnlyCollection<FieldDeclaration> VisitFieldDeclarationList(ReadOnlyCollection<FieldDeclaration> fields)
|
||||
{
|
||||
if (fields == null)
|
||||
return fields;
|
||||
|
||||
List<FieldDeclaration> alternate = null;
|
||||
for (int i = 0, n = fields.Count; i < n; i++)
|
||||
{
|
||||
var f = fields[i];
|
||||
var e = Visit(f.Expression);
|
||||
if (f.Expression != e && alternate == null)
|
||||
alternate = fields.Take(i).ToList();
|
||||
if (alternate != null)
|
||||
alternate.Add(new FieldDeclaration(f.Name, e));
|
||||
}
|
||||
if (alternate != null)
|
||||
return alternate.AsReadOnly();
|
||||
return fields;
|
||||
}
|
||||
}
|
||||
}
|
||||
98
samples/C++/Entity.h
Normal file
98
samples/C++/Entity.h
Normal file
@@ -0,0 +1,98 @@
|
||||
/**
|
||||
* @file Entity.h
|
||||
* @page EntityPage Entity
|
||||
* @brief represent an entity in the game
|
||||
* @author vinz243
|
||||
* @version 0.1.0
|
||||
* This file represents an Entity in the game system
|
||||
* This parent type is a static entity which is shown and loaded into the Physics engine but never updated
|
||||
*/
|
||||
|
||||
#ifndef ENTITY_H
|
||||
#define ENTITY_H
|
||||
|
||||
#include "base.h"
|
||||
/// @namespace Whitedrop
|
||||
namespace Whitedrop {
|
||||
/** @class Entity
|
||||
* This parent type is a static entity which is shown and loaded into the Physics engine but never updated
|
||||
*/
|
||||
class Entity {
|
||||
public:
|
||||
/**
|
||||
* @brief Create static entity
|
||||
* @details creates a static entity instance according to the mesh and the id, the position
|
||||
* This needs to be attached to a World after!
|
||||
* The material name is not the file name but the material name!
|
||||
* @ref WorldPage
|
||||
* @param mesh the name of the mesh for the object, file must be in media/meshes
|
||||
* @param id an unique identifier for the object, shortest as possible
|
||||
* @param dimensions an Ogre::Vector3 which contains the dimensions in meter
|
||||
* @param position the Vector3 which contains it position
|
||||
* @param material the material name
|
||||
*/
|
||||
Entity(std::string mesh, std::string id, Ogre::Vector3 dimensions, Ogre::Vector3 position, std::string material);
|
||||
/**
|
||||
* @brief The copy constructor
|
||||
* @details A copy constr
|
||||
*
|
||||
* @param ref the Entity to be copied from
|
||||
*/
|
||||
Entity(const Entity &ref);
|
||||
|
||||
/**
|
||||
* @brief The assignement operator
|
||||
* @details
|
||||
*
|
||||
* @param ent the entity to be copied
|
||||
*/
|
||||
Entity& operator=(const Entity ent);
|
||||
|
||||
/**
|
||||
* @brief destrctor
|
||||
* @details
|
||||
*/
|
||||
virtual ~Entity(void);
|
||||
|
||||
/**
|
||||
* @brief a constance type of the entity
|
||||
* @details depends of the class.
|
||||
* May contain STATIC, DYNAMIC or ETHERAL
|
||||
*/
|
||||
const std::string type = "STATIC";
|
||||
|
||||
/**
|
||||
* @brief Attach the entity to specified sceneManager
|
||||
* @details This creates the OgreEntity using sceneMgr,
|
||||
* set material, create a Node with name as `<id>_n`,
|
||||
* scale it to match dimensions and translate the node to pos
|
||||
* @param sceneMgr the scene manager to use
|
||||
*/
|
||||
virtual void setup(Ogre::SceneManager* sceneMgr);
|
||||
|
||||
/**
|
||||
* @brief the update method
|
||||
* @details this method should be called on each world update.
|
||||
* Even though the method is necessary declared, the main impl of
|
||||
* a static entity should be empty since it is not updated by physics
|
||||
* However, a Dynamic entity should implement this function in order to:
|
||||
* 1) Get from the physics engine the actor position in the physic world
|
||||
* 2) Update the OgreEntity position and rotation from the previous actor
|
||||
* @return whether it was successful or not, if falsey engine should stop
|
||||
*/
|
||||
virtual bool update(void);
|
||||
|
||||
protected:
|
||||
std::string mMesh = "cube.mesh";
|
||||
std::string mId;
|
||||
std::string mMaterial;
|
||||
Ogre::Vector3 mDimensions;
|
||||
Ogre::Vector3 mPosition;
|
||||
Ogre::Entity* mEntity;
|
||||
Ogre::SceneNode* mNode;
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
#endif
|
||||
116
samples/C++/qsciprinter.cp
Normal file
116
samples/C++/qsciprinter.cp
Normal file
@@ -0,0 +1,116 @@
|
||||
// This module defines interface to the QsciPrinter class.
|
||||
//
|
||||
// Copyright (c) 2011 Riverbank Computing Limited <info@riverbankcomputing.com>
|
||||
//
|
||||
// This file is part of QScintilla.
|
||||
//
|
||||
// This file may be used under the terms of the GNU General Public
|
||||
// License versions 2.0 or 3.0 as published by the Free Software
|
||||
// Foundation and appearing in the files LICENSE.GPL2 and LICENSE.GPL3
|
||||
// included in the packaging of this file. Alternatively you may (at
|
||||
// your option) use any later version of the GNU General Public
|
||||
// License if such license has been publicly approved by Riverbank
|
||||
// Computing Limited (or its successors, if any) and the KDE Free Qt
|
||||
// Foundation. In addition, as a special exception, Riverbank gives you
|
||||
// certain additional rights. These rights are described in the Riverbank
|
||||
// GPL Exception version 1.1, which can be found in the file
|
||||
// GPL_EXCEPTION.txt in this package.
|
||||
//
|
||||
// If you are unsure which license is appropriate for your use, please
|
||||
// contact the sales department at sales@riverbankcomputing.com.
|
||||
//
|
||||
// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
|
||||
// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
|
||||
|
||||
|
||||
#ifndef QSCIPRINTER_H
|
||||
#define QSCIPRINTER_H
|
||||
|
||||
#ifdef __APPLE__
|
||||
extern "C++" {
|
||||
#endif
|
||||
|
||||
#include <qprinter.h>
|
||||
|
||||
#include <Qsci/qsciglobal.h>
|
||||
#include <Qsci/qsciscintilla.h>
|
||||
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
class QRect;
|
||||
class QPainter;
|
||||
QT_END_NAMESPACE
|
||||
|
||||
class QsciScintillaBase;
|
||||
|
||||
|
||||
//! \brief The QsciPrinter class is a sub-class of the Qt QPrinter class that
|
||||
//! is able to print the text of a Scintilla document.
|
||||
//!
|
||||
//! The class can be further sub-classed to alter to layout of the text, adding
|
||||
//! headers and footers for example.
|
||||
class QSCINTILLA_EXPORT QsciPrinter : public QPrinter
|
||||
{
|
||||
public:
|
||||
//! Constructs a printer paint device with mode \a mode.
|
||||
QsciPrinter(PrinterMode mode = ScreenResolution);
|
||||
|
||||
//! Destroys the QsciPrinter instance.
|
||||
virtual ~QsciPrinter();
|
||||
|
||||
//! Format a page, by adding headers and footers for example, before the
|
||||
//! document text is drawn on it. \a painter is the painter to be used to
|
||||
//! add customised text and graphics. \a drawing is true if the page is
|
||||
//! actually being drawn rather than being sized. \a painter drawing
|
||||
//! methods must only be called when \a drawing is true. \a area is the
|
||||
//! area of the page that will be used to draw the text. This should be
|
||||
//! modified if it is necessary to reserve space for any customised text or
|
||||
//! graphics. By default the area is relative to the printable area of the
|
||||
//! page. Use QPrinter::setFullPage() because calling printRange() if you
|
||||
//! want to try and print over the whole page. \a pagenr is the number of
|
||||
//! the page. The first page is numbered 1.
|
||||
virtual void formatPage(QPainter &painter, bool drawing, QRect &area,
|
||||
int pagenr);
|
||||
|
||||
//! Return the number of points to add to each font when printing.
|
||||
//!
|
||||
//! \sa setMagnification()
|
||||
int magnification() const {return mag;}
|
||||
|
||||
//! Sets the number of points to add to each font when printing to \a
|
||||
//! magnification.
|
||||
//!
|
||||
//! \sa magnification()
|
||||
virtual void setMagnification(int magnification);
|
||||
|
||||
//! Print a range of lines from the Scintilla instance \a qsb. \a from is
|
||||
//! the first line to print and a negative value signifies the first line
|
||||
//! of text. \a to is the last line to print and a negative value
|
||||
//! signifies the last line of text. true is returned if there was no
|
||||
//! error.
|
||||
virtual int printRange(QsciScintillaBase *qsb, int from = -1, int to = -1);
|
||||
|
||||
//! Return the line wrap mode used when printing. The default is
|
||||
//! QsciScintilla::WrapWord.
|
||||
//!
|
||||
//! \sa setWrapMode()
|
||||
QsciScintilla::WrapMode wrapMode() const {return wrap;}
|
||||
|
||||
//! Sets the line wrap mode used when printing to \a wmode.
|
||||
//!
|
||||
//! \sa wrapMode()
|
||||
virtual void setWrapMode(QsciScintilla::WrapMode wmode);
|
||||
|
||||
private:
|
||||
int mag;
|
||||
QsciScintilla::WrapMode wrap;
|
||||
|
||||
QsciPrinter(const QsciPrinter &);
|
||||
QsciPrinter &operator=(const QsciPrinter &);
|
||||
};
|
||||
|
||||
#ifdef __APPLE__
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif
|
||||
47
samples/C/bitmap.h
Normal file
47
samples/C/bitmap.h
Normal file
@@ -0,0 +1,47 @@
|
||||
#pragma once
|
||||
|
||||
/* Copyright © 2010 Christoph Sünderhauf
|
||||
*
|
||||
* This file is part of Xelix.
|
||||
*
|
||||
* Xelix is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Xelix is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with Xelix. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#include "generic.h"
|
||||
|
||||
typedef struct {
|
||||
uint32_t numbits;
|
||||
/* an array large enough for numbits to fit in. Might
|
||||
* (if numbits%8!=0) have some spare bits at the end
|
||||
*/
|
||||
uint32_t* bits;
|
||||
} bitmap_t;
|
||||
|
||||
|
||||
// creates a new bitmap.
|
||||
// CONTENT IS RANDOM! - use bitmap_clearall() to clear the bitmap.
|
||||
bitmap_t bitmap_init(uint32_t numbits);
|
||||
|
||||
// returns 1 or 0
|
||||
uint8_t bitmap_get(bitmap_t bitmap, uint32_t bitnum);
|
||||
// sets a bit (to 1)
|
||||
void bitmap_set(bitmap_t bitmap, uint32_t bitnum);
|
||||
// clears a bit (to 0)
|
||||
void bitmap_clear(bitmap_t bitmap, uint32_t bitnum);
|
||||
|
||||
// clears every bit to 0
|
||||
void bitmap_clearAll(bitmap_t bitmap);
|
||||
|
||||
// finds the first bit set to 0 returns 0 if no cleared bit found (0 is also returned if the first bit is cleared)
|
||||
uint32_t bitmap_findFirstClear(bitmap_t bitmap);
|
||||
44
samples/C/color.h
Normal file
44
samples/C/color.h
Normal file
@@ -0,0 +1,44 @@
|
||||
#pragma once
|
||||
|
||||
/* Copyright © 2011 Fritz Grimpen
|
||||
*
|
||||
* This file is part of Xelix.
|
||||
*
|
||||
* Xelix is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Xelix is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with Xelix. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#include <lib/generic.h>
|
||||
|
||||
typedef struct {
|
||||
uint32_t background;
|
||||
uint32_t foreground;
|
||||
} console_color_t;
|
||||
|
||||
#define CONSOLE_COLOR_BLACK 0x0
|
||||
#define CONSOLE_COLOR_BLUE 0x1
|
||||
#define CONSOLE_COLOR_GREEN 0x2
|
||||
#define CONSOLE_COLOR_CYAN 0x3
|
||||
#define CONSOLE_COLOR_RED 0x4
|
||||
#define CONSOLE_COLOR_MAGENTA 0x5
|
||||
#define CONSOLE_COLOR_BROWN 0x6
|
||||
#define CONSOLE_COLOR_LGREY 0x7
|
||||
#define CONSOLE_COLOR_DGREY 0x8
|
||||
#define CONSOLE_COLOR_LBLUE 0x9
|
||||
#define CONSOLE_COLOR_LGREEN 0xa
|
||||
#define CONSOLE_COLOR_LCYAN 0xb
|
||||
#define CONSOLE_COLOR_LRED 0xc
|
||||
#define CONSOLE_COLOR_LMAGENTA 0xd
|
||||
#define CONSOLE_COLOR_YELLOW 0xe
|
||||
#define CONSOLE_COLOR_WHITE 0xf
|
||||
|
||||
52
samples/C/driver.h
Normal file
52
samples/C/driver.h
Normal file
@@ -0,0 +1,52 @@
|
||||
#pragma once
|
||||
|
||||
/* Copyright © 2011 Fritz Grimpen
|
||||
*
|
||||
* This file is part of Xelix.
|
||||
*
|
||||
* Xelix is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Xelix is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with Xelix. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#include <lib/generic.h>
|
||||
#include <console/info.h>
|
||||
|
||||
#define CONSOLE_DRV_CAP_CLEAR 0x01
|
||||
#define CONSOLE_DRV_CAP_SCROLL 0x02
|
||||
#define CONSOLE_DRV_CAP_SET_CURSOR 0x04
|
||||
|
||||
// Input modifier keys
|
||||
typedef struct {
|
||||
bool shift_left:1;
|
||||
bool shift_right:1;
|
||||
bool control_left:1;
|
||||
bool control_right:1;
|
||||
bool alt:1;
|
||||
bool super:1;
|
||||
} console_modifiers_t;
|
||||
|
||||
typedef struct {
|
||||
char character;
|
||||
console_modifiers_t* modifiers;
|
||||
} console_read_t;
|
||||
|
||||
typedef struct {
|
||||
int (*write)(console_info_t*, char);
|
||||
console_read_t* (*read)(console_info_t*);
|
||||
|
||||
int capabilities;
|
||||
|
||||
int (*_clear)(console_info_t*);
|
||||
int (*scroll)(console_info_t*, int32_t);
|
||||
void (*setCursor)(console_info_t*, uint32_t, uint32_t);
|
||||
} console_driver_t;
|
||||
70
samples/C/elf.h
Normal file
70
samples/C/elf.h
Normal file
@@ -0,0 +1,70 @@
|
||||
#pragma once
|
||||
|
||||
/* Copyright © 2011 Lukas Martini
|
||||
*
|
||||
* This file is part of Xelix.
|
||||
*
|
||||
* Xelix is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Xelix is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with Xelix. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#include <lib/generic.h>
|
||||
#include <tasks/scheduler.h>
|
||||
|
||||
#define ELF_TYPE_NONE 0
|
||||
#define ELF_TYPE_REL 1
|
||||
#define ELF_TYPE_EXEC 2
|
||||
#define ELF_TYPE_DYN 3
|
||||
#define ELF_TYPE_CORE 4
|
||||
|
||||
#define ELF_ARCH_NONE 0
|
||||
#define ELF_ARCH_386 3
|
||||
|
||||
#define ELF_VERSION_CURRENT 1
|
||||
|
||||
typedef struct {
|
||||
unsigned char magic[4];
|
||||
/* Note: There _is_ other stuff in here, but we don't need it */
|
||||
unsigned char pad[12];
|
||||
} __attribute__((packed)) elf_ident_t;
|
||||
|
||||
typedef struct {
|
||||
uint32_t type;
|
||||
uint32_t offset;
|
||||
void* virtaddr;
|
||||
void* physaddr;
|
||||
uint32_t filesize;
|
||||
uint32_t memsize;
|
||||
uint32_t flags;
|
||||
uint32_t alignment;
|
||||
} __attribute__((packed)) elf_program_t;
|
||||
|
||||
typedef struct {
|
||||
elf_ident_t ident;
|
||||
uint16_t type; /* Object file type */
|
||||
uint16_t machine; /* Architecture */
|
||||
uint32_t version; /* Object file version */
|
||||
void* entry; /* Entry point virtual address */
|
||||
uint32_t phoff; /* Program header table file offset */
|
||||
uint32_t shoff; /* Section header table file offset */
|
||||
uint32_t flags; /* Processor-specific flags */
|
||||
uint16_t ehsize; /* ELF header size in bytes */
|
||||
uint16_t phentsize; /* Program header table entry size */
|
||||
uint16_t phnum; /* Program header table entry count */
|
||||
uint16_t shentsize; /* Section header table entry size */
|
||||
uint16_t shnum; /* Section header table entry count */
|
||||
uint16_t shstrndx; /* Section header string table index */
|
||||
} __attribute__((packed)) elf_t;
|
||||
|
||||
task_t* elf_load(elf_t* bin, char* name, char** environ, char** argv, int argc);
|
||||
task_t* elf_load_file(char* path, char** environ, char** argv, int argc);
|
||||
45
samples/C/filter.h
Normal file
45
samples/C/filter.h
Normal file
@@ -0,0 +1,45 @@
|
||||
#pragma once
|
||||
|
||||
/* Copyright © 2011 Fritz Grimpen
|
||||
*
|
||||
* This file is part of Xelix.
|
||||
*
|
||||
* Xelix is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Xelix is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with Xelix. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#include <lib/generic.h>
|
||||
#include <console/info.h>
|
||||
#include <console/driver.h>
|
||||
|
||||
struct console_filter {
|
||||
// General callback for all actions etc.
|
||||
// Preferred prototype:
|
||||
// char <name>(char c, console_info_t *info, console_driver_t *input, console_driver_t *output);
|
||||
char (*callback)(char, console_info_t*, console_driver_t*, console_driver_t*);
|
||||
|
||||
// Specific callbacks for read and write
|
||||
// Preferred prototype:
|
||||
// char <name>(char c, console_info_t *info, console_driver_t *input);
|
||||
char (*read_callback)(char, console_info_t*, console_driver_t*);
|
||||
|
||||
// Preferred prototype:
|
||||
// char <name>(char c, console_info_t *info, console_driver_t *output);
|
||||
char (*write_callback)(char, console_info_t*, console_driver_t*);
|
||||
|
||||
// The next filter in the filter chain
|
||||
struct console_filter* next;
|
||||
};
|
||||
|
||||
typedef struct console_filter console_filter_t;
|
||||
|
||||
44
samples/C/info.h
Normal file
44
samples/C/info.h
Normal file
@@ -0,0 +1,44 @@
|
||||
#pragma once
|
||||
|
||||
/* Copyright © 2011 Fritz Grimpen
|
||||
*
|
||||
* This file is part of Xelix.
|
||||
*
|
||||
* Xelix is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Xelix is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with Xelix. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#include <lib/generic.h>
|
||||
#include <console/color.h>
|
||||
|
||||
typedef struct {
|
||||
uint32_t cursor_x;
|
||||
uint32_t cursor_y;
|
||||
|
||||
uint32_t rows;
|
||||
uint32_t columns;
|
||||
|
||||
uint32_t tabstop;
|
||||
|
||||
console_color_t default_color;
|
||||
console_color_t current_color;
|
||||
|
||||
uint8_t nonblocking;
|
||||
uint8_t reverse_video;
|
||||
uint8_t bold;
|
||||
uint8_t blink;
|
||||
uint8_t underline;
|
||||
uint8_t newline_mode;
|
||||
uint8_t auto_echo;
|
||||
uint8_t handle_backspace;
|
||||
} console_info_t;
|
||||
47
samples/C/interface.h
Normal file
47
samples/C/interface.h
Normal file
@@ -0,0 +1,47 @@
|
||||
#pragma once
|
||||
|
||||
/* Copyright © 2011 Fritz Grimpen
|
||||
*
|
||||
* This file is part of Xelix.
|
||||
*
|
||||
* Xelix is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Xelix is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with Xelix. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#include <lib/generic.h>
|
||||
#include <console/info.h>
|
||||
#include <console/filter.h>
|
||||
#include <console/driver.h>
|
||||
|
||||
typedef struct {
|
||||
console_info_t info;
|
||||
|
||||
console_filter_t* input_filter;
|
||||
console_filter_t* output_filter;
|
||||
|
||||
console_driver_t* input_driver;
|
||||
console_driver_t* output_driver;
|
||||
} console_t;
|
||||
|
||||
console_t* default_console;
|
||||
|
||||
// Generate raw console, connected to the Display, Keyboard and the
|
||||
// ECMA-48-Filter
|
||||
void console_init();
|
||||
|
||||
size_t console_write(console_t* console, const char* buffer, int32_t length);
|
||||
#define console_write2(console, buffer) console_write(console, buffer, strlen(buffer))
|
||||
size_t console_read(console_t* console, char* buffer, size_t length);
|
||||
size_t console_scroll(console_t* console, int32_t pages);
|
||||
|
||||
void console_clear(console_t* console);
|
||||
50
samples/C/ip4.h
Normal file
50
samples/C/ip4.h
Normal file
@@ -0,0 +1,50 @@
|
||||
#pragma once
|
||||
|
||||
/* Copyright © 2011 Lukas Martini
|
||||
*
|
||||
* This file is part of Xelix.
|
||||
*
|
||||
* Xelix is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Xelix is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with Xelix. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#include <lib/generic.h>
|
||||
#include <net/net.h>
|
||||
|
||||
#define IP4_TOS_ICMP 0
|
||||
|
||||
typedef uint32_t ip4_addr_t;
|
||||
|
||||
typedef struct {
|
||||
unsigned int hl:4; /* both fields are 4 bits */
|
||||
unsigned int version:4;
|
||||
uint8_t tos;
|
||||
uint16_t len;
|
||||
uint16_t id;
|
||||
uint16_t off;
|
||||
uint8_t ttl;
|
||||
uint8_t p;
|
||||
uint16_t checksum;
|
||||
ip4_addr_t src;
|
||||
ip4_addr_t dst;
|
||||
} ip4_header_t;
|
||||
|
||||
typedef struct {
|
||||
uint8_t type;
|
||||
uint8_t code;
|
||||
uint16_t checksum;
|
||||
uint16_t id;
|
||||
uint16_t sequence;
|
||||
} ip4_icmp_header_t;
|
||||
|
||||
void ip4_receive(net_device_t* origin, net_l2proto_t proto, size_t size, void* raw);
|
||||
110
samples/C/multiboot.h
Normal file
110
samples/C/multiboot.h
Normal file
@@ -0,0 +1,110 @@
|
||||
#pragma once
|
||||
|
||||
/* Copyright © 2010, 2011 Lukas Martini
|
||||
*
|
||||
* This file is part of Xelix.
|
||||
*
|
||||
* Xelix is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Xelix is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with Xelix. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#include <lib/generic.h>
|
||||
|
||||
#define MULTIBOOT_KERNELMAGIC 0x2BADB002
|
||||
|
||||
#define MULTIBOOT_FLAG_MEM 0x001
|
||||
#define MULTIBOOT_FLAG_DEVICE 0x002
|
||||
#define MULTIBOOT_FLAG_CMDLINE 0x004
|
||||
#define MULTIBOOT_FLAG_MODS 0x008
|
||||
#define MULTIBOOT_FLAG_AOUT 0x010
|
||||
#define MULTIBOOT_FLAG_ELF 0x020
|
||||
#define MULTIBOOT_FLAG_MMAP 0x040
|
||||
#define MULTIBOOT_FLAG_CONFIG 0x080
|
||||
#define MULTIBOOT_FLAG_LOADER 0x100
|
||||
#define MULTIBOOT_FLAG_APM 0x200
|
||||
#define MULTIBOOT_FLAG_VBE 0x400
|
||||
|
||||
// The symbol table for a.out.
|
||||
typedef struct
|
||||
{
|
||||
uint32_t tabSize;
|
||||
uint32_t strSize;
|
||||
uint32_t addr;
|
||||
uint32_t reserved;
|
||||
} __attribute__((packed)) multiboot_aoutSymbolTable_t;
|
||||
|
||||
// The section header table for ELF.
|
||||
typedef struct
|
||||
{
|
||||
uint32_t num;
|
||||
uint32_t size;
|
||||
uint32_t addr;
|
||||
uint32_t shndx;
|
||||
} __attribute__((packed)) multiboot_elfSectionHeaderTable_t;
|
||||
|
||||
typedef struct
|
||||
{
|
||||
uint32_t size;
|
||||
uint64_t addr;
|
||||
uint64_t length;
|
||||
uint32_t type;
|
||||
} __attribute__((packed)) multiboot_memoryMap_t;
|
||||
|
||||
typedef struct
|
||||
{
|
||||
uint32_t start;
|
||||
uint32_t end;
|
||||
char* cmdLine;
|
||||
uint32_t reserved;
|
||||
} __attribute__((packed)) multiboot_module_t;
|
||||
|
||||
typedef struct
|
||||
{
|
||||
uint32_t flags;
|
||||
uint32_t memLower;
|
||||
uint32_t memUpper;
|
||||
uint32_t bootDevice;
|
||||
char* cmdLine;
|
||||
uint32_t modsCount;
|
||||
multiboot_module_t* modsAddr;
|
||||
|
||||
union
|
||||
{
|
||||
multiboot_aoutSymbolTable_t aoutSym;
|
||||
multiboot_elfSectionHeaderTable_t elfSec;
|
||||
} u;
|
||||
|
||||
uint32_t mmapLength;
|
||||
uint32_t mmapAddr;
|
||||
|
||||
uint32_t drivesLength;
|
||||
uint32_t drivesAddr;
|
||||
|
||||
// ROM configuration table
|
||||
uint32_t configTable;
|
||||
|
||||
char* bootLoaderName;
|
||||
uint32_t apmTable;
|
||||
|
||||
// Video
|
||||
uint32_t vbeControlInfo;
|
||||
uint32_t vbeModeInfo;
|
||||
uint16_t vbeMode;
|
||||
uint16_t vbeInterfaceSeg;
|
||||
uint16_t vbeInterfaceOff;
|
||||
uint16_t vbeInterfaceLen;
|
||||
} __attribute__((packed)) multiboot_info_t;
|
||||
|
||||
multiboot_info_t* multiboot_info;
|
||||
|
||||
void arch_multiboot_printInfo();
|
||||
43
samples/C/portio.h
Normal file
43
samples/C/portio.h
Normal file
@@ -0,0 +1,43 @@
|
||||
#pragma once
|
||||
|
||||
/* Copyright © 2011 Lukas Martini
|
||||
*
|
||||
* This file is part of Xelix.
|
||||
*
|
||||
* Xelix is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Xelix is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with Xelix. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#include <lib/generic.h>
|
||||
#include <lib/stdint.h>
|
||||
|
||||
// Legacy
|
||||
#define outb(args...) portio_out8(args)
|
||||
#define outw(args...) portio_out16(args)
|
||||
#define outl(args...) portio_out32(args)
|
||||
#define outq(args...) portio_out64(args)
|
||||
|
||||
#define inb(args...) portio_in8(args)
|
||||
#define inw(args...) portio_in16(args)
|
||||
#define inl(args...) portio_in32(args)
|
||||
#define inq(args...) portio_in64(args)
|
||||
|
||||
void portio_out8(uint16_t port, uint8_t value);
|
||||
void portio_out16(uint16_t port, uint16_t value);
|
||||
void portio_out32(uint16_t port, uint32_t value);
|
||||
void portio_out64(uint16_t port, uint64_t value);
|
||||
|
||||
uint8_t portio_in8(uint16_t port);
|
||||
uint16_t portio_in16(uint16_t port);
|
||||
uint32_t portio_in32(uint16_t port);
|
||||
uint64_t portio_in64(uint16_t port);
|
||||
69
samples/C/scheduler.h
Normal file
69
samples/C/scheduler.h
Normal file
@@ -0,0 +1,69 @@
|
||||
#pragma once
|
||||
|
||||
/* Copyright © 2011 Lukas Martini
|
||||
*
|
||||
* This file is part of Xelix.
|
||||
*
|
||||
* Xelix is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Xelix is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with Xelix. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#include <lib/generic.h>
|
||||
#include <hw/cpu.h>
|
||||
#include <memory/vmem.h>
|
||||
|
||||
#define SCHEDULER_MAXNAME 256
|
||||
#define SCHEDULER_TASK_PATH_MAX 256
|
||||
|
||||
// Single linked list
|
||||
typedef struct task {
|
||||
uint32_t pid;
|
||||
char name[SCHEDULER_MAXNAME];
|
||||
struct task *parent;
|
||||
cpu_state_t* state;
|
||||
struct task* next;
|
||||
struct task* previous;
|
||||
|
||||
void* stack;
|
||||
void* entry;
|
||||
struct vmem_context *memory_context;
|
||||
|
||||
// Current task state
|
||||
enum {
|
||||
TASK_STATE_KILLED,
|
||||
TASK_STATE_TERMINATED,
|
||||
TASK_STATE_BLOCKING,
|
||||
TASK_STATE_STOPPED,
|
||||
TASK_STATE_RUNNING
|
||||
} task_state;
|
||||
|
||||
char** environ;
|
||||
char** argv;
|
||||
int argc;
|
||||
|
||||
// TODO Is this actually the same as PATH_MAX in our toolchain?
|
||||
char cwd[SCHEDULER_TASK_PATH_MAX + 1];
|
||||
} task_t;
|
||||
|
||||
int scheduler_state;
|
||||
|
||||
task_t* scheduler_new(void* entry, task_t* parent, char name[SCHEDULER_MAXNAME],
|
||||
char** environ, char** argv, int argc, struct vmem_context* memory_context, bool map_structs);
|
||||
void scheduler_add(task_t *task);
|
||||
void scheduler_terminate_current();
|
||||
task_t* scheduler_get_current();
|
||||
task_t* scheduler_select(cpu_state_t* lastRegs);
|
||||
void scheduler_init();
|
||||
void scheduler_yield();
|
||||
void scheduler_remove(task_t *t);
|
||||
task_t* scheduler_fork(task_t* to_fork, cpu_state_t* state);
|
||||
95
samples/C/syscalls.h
Normal file
95
samples/C/syscalls.h
Normal file
@@ -0,0 +1,95 @@
|
||||
#pragma once
|
||||
|
||||
/* Copyright © 2011 Fritz Grimpen
|
||||
*
|
||||
* This file is part of Xelix.
|
||||
*
|
||||
* Xelix is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Xelix is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with Xelix. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#include <lib/generic.h>
|
||||
#include <tasks/syscall.h>
|
||||
|
||||
#include "syscalls/write.h"
|
||||
#include "syscalls/exit.h"
|
||||
#include "syscalls/getpid.h"
|
||||
#include "syscalls/getppid.h"
|
||||
#include "syscalls/read.h"
|
||||
#include "syscalls/brk.h"
|
||||
#include "syscalls/mmap.h"
|
||||
#include "syscalls/munmap.h"
|
||||
#include "syscalls/test.h"
|
||||
#include "syscalls/hostname.h"
|
||||
#include "syscalls/uname.h"
|
||||
#include "syscalls/open.h"
|
||||
#include "syscalls/execve.h"
|
||||
#include "syscalls/seek.h"
|
||||
#include "syscalls/opendir.h"
|
||||
#include "syscalls/readdir.h"
|
||||
#include "syscalls/kill.h"
|
||||
#include "syscalls/getexecdata.h"
|
||||
#include "syscalls/cwd.h"
|
||||
#include "syscalls/fork.h"
|
||||
|
||||
syscall_t syscall_table[] = {
|
||||
NULL,
|
||||
sys_exit, // 1
|
||||
sys_read, // 2
|
||||
sys_write, // 3
|
||||
sys_getpid, // 4
|
||||
sys_brk, // 5
|
||||
sys_getppid, // 6
|
||||
sys_mmap, // 7
|
||||
sys_munmap, // 8
|
||||
sys_test, // 9
|
||||
sys_get_hostname, // 10
|
||||
sys_set_hostname, // 11
|
||||
sys_uname, // 12
|
||||
sys_open, // 13
|
||||
sys_execve, // 14
|
||||
sys_seek, // 15
|
||||
sys_opendir, // 16
|
||||
sys_readdir, // 17
|
||||
sys_kill, // 18
|
||||
sys_getexecdata, // 19
|
||||
sys_chdir, // 20
|
||||
sys_getcwd, // 21
|
||||
sys_fork, // 22
|
||||
};
|
||||
|
||||
char* syscall_name_table[] = {
|
||||
NULL,
|
||||
"exit", // 1
|
||||
"read", // 2
|
||||
"write", // 3
|
||||
"getpid", // 4
|
||||
"brk", // 5
|
||||
"getppid", // 6
|
||||
"mmap", // 7
|
||||
"munmap", // 8
|
||||
"test", // 9
|
||||
"get_hostname", // 10
|
||||
"set_hostname", // 11
|
||||
"uname", // 12
|
||||
"open", // 13
|
||||
"execve", // 14
|
||||
"seek", // 15
|
||||
"opendir", // 16
|
||||
"readdir", // 17
|
||||
"kill", // 18
|
||||
"getexecdata", // 19
|
||||
"chdir", // 20
|
||||
"getcwd", // 21
|
||||
"fork", // 22
|
||||
};
|
||||
56
samples/C/vfs.h
Normal file
56
samples/C/vfs.h
Normal file
@@ -0,0 +1,56 @@
|
||||
#pragma once
|
||||
|
||||
/* Copyright © 2010, 2011 Lukas Martini
|
||||
*
|
||||
* This file is part of Xelix.
|
||||
*
|
||||
* Xelix is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Xelix is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with Xelix. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#include <lib/generic.h>
|
||||
|
||||
#define VFS_SEEK_SET 0
|
||||
#define VFS_SEEK_CUR 1
|
||||
#define VFS_SEEK_END 2
|
||||
|
||||
typedef struct {
|
||||
uint64_t num;
|
||||
char path[512];
|
||||
char mount_path[512];
|
||||
uint32_t offset;
|
||||
uint32_t mountpoint;
|
||||
} vfs_file_t;
|
||||
|
||||
typedef struct {
|
||||
uint64_t num;
|
||||
char path[512];
|
||||
char mount_path[512];
|
||||
uint32_t mountpoint;
|
||||
} vfs_dir_t;
|
||||
|
||||
typedef void* (*vfs_read_callback_t)(char* path, uint32_t offset, uint32_t size);
|
||||
typedef char* (*vfs_read_dir_callback_t)(char* path, uint32_t offset);
|
||||
|
||||
|
||||
// Used to always store the last read/write attempt (used for kernel panic debugging)
|
||||
char vfs_last_read_attempt[512];
|
||||
|
||||
vfs_file_t* vfs_get_from_id(uint32_t id);
|
||||
vfs_dir_t* vfs_get_dir_from_id(uint32_t id);
|
||||
void* vfs_read(vfs_file_t* fp, uint32_t size);
|
||||
char* vfs_dir_read(vfs_dir_t* dir, uint32_t offset);
|
||||
void vfs_seek(vfs_file_t* fp, uint32_t offset, int origin);
|
||||
vfs_file_t* vfs_open(char* path);
|
||||
vfs_dir_t* vfs_dir_open(char* path);
|
||||
int vfs_mount(char* path, vfs_read_callback_t read_callback, vfs_read_dir_callback_t read_dir_callback);
|
||||
94
samples/C/vmem.h
Normal file
94
samples/C/vmem.h
Normal file
@@ -0,0 +1,94 @@
|
||||
#pragma once
|
||||
|
||||
/* Copyright © 2011 Fritz Grimpen
|
||||
* Copyright © 2013 Lukas Martini
|
||||
*
|
||||
* This file is part of Xelix.
|
||||
*
|
||||
* Xelix is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* Xelix is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with Xelix. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#include <lib/generic.h>
|
||||
|
||||
struct vmem_context;
|
||||
|
||||
struct vmem_page
|
||||
{
|
||||
enum
|
||||
{
|
||||
VMEM_SECTION_STACK, /* Initial stack */
|
||||
VMEM_SECTION_CODE, /* Contains program code and is read-only */
|
||||
VMEM_SECTION_DATA, /* Contains static data */
|
||||
VMEM_SECTION_HEAP, /* Allocated by brk(2) at runtime */
|
||||
VMEM_SECTION_MMAP, /* Allocated by mmap(2) at runtime */
|
||||
VMEM_SECTION_KERNEL, /* Contains kernel-internal data */
|
||||
VMEM_SECTION_UNMAPPED /* Unmapped */
|
||||
} section;
|
||||
|
||||
bool readonly:1;
|
||||
bool cow:1; /* Copy-on-Write mechanism */
|
||||
bool allocated:1;
|
||||
|
||||
void *cow_src_addr;
|
||||
void *virt_addr;
|
||||
void *phys_addr;
|
||||
};
|
||||
|
||||
typedef void (*vmem_iterator_t)(struct vmem_context *, struct vmem_page *, uint32_t);
|
||||
|
||||
/* Initialize vmem_kernelContext for paging_init() */
|
||||
void vmem_init();
|
||||
struct vmem_context *vmem_kernelContext;
|
||||
struct vmem_context *vmem_currentContext;
|
||||
struct vmem_context *vmem_processContext;
|
||||
void *vmem_faultAddress;
|
||||
|
||||
/* Some callbacks for magic functions */
|
||||
void (*vmem_applyPage)(struct vmem_context *, struct vmem_page *);
|
||||
|
||||
/* Generate new page context */
|
||||
struct vmem_context *vmem_new();
|
||||
struct vmem_page *vmem_new_page();
|
||||
|
||||
int vmem_add_page(struct vmem_context *ctx, struct vmem_page *pg);
|
||||
|
||||
struct vmem_page *vmem_get_page_phys(struct vmem_context *ctx, void *phys_addr);
|
||||
struct vmem_page *vmem_get_page_virt(struct vmem_context *ctx, void *virt_addr);
|
||||
struct vmem_page *vmem_get_page(struct vmem_context *ctx, uint32_t offset);
|
||||
|
||||
/* Remove pages in a specific context by physical or virtual address */
|
||||
struct vmem_page *vmem_rm_page_phys(struct vmem_context *ctx, void *phys_addr);
|
||||
struct vmem_page *vmem_rm_page_virt(struct vmem_context *ctx, void *virt_addr);
|
||||
|
||||
/* Iterator */
|
||||
int vmem_iterate(struct vmem_context *ctx, vmem_iterator_t callback);
|
||||
|
||||
uint32_t vmem_count_pages(struct vmem_context *ctx);
|
||||
void vmem_dump_page(struct vmem_page *pg);
|
||||
void vmem_dump(struct vmem_context *ctx);
|
||||
void vmem_handle_fault(uint32_t code, void *addr, void *instruction);
|
||||
|
||||
/* Get/Set cached paging context */
|
||||
void vmem_set_cache(struct vmem_context *ctx, void *cache);
|
||||
void *vmem_get_cache(struct vmem_context *ctx);
|
||||
|
||||
#ifdef __i386__
|
||||
#define PAGE_SIZE 4096
|
||||
#define VMEM_ALIGN(x) (typeof(x))(((intptr_t)(x) & 0xFFFFF000) + 0x1000)
|
||||
#define VMEM_ALIGN_DOWN(x) (typeof(x))( \
|
||||
((intptr_t)(x) - ((intptr_t)(x) % PAGE_SIZE)))
|
||||
#else
|
||||
#define PAGE_SIZE 0
|
||||
#define VMEM_ALIGN(x) (x)
|
||||
#endif
|
||||
343
samples/CLIPS/demo.clp
Normal file
343
samples/CLIPS/demo.clp
Normal file
@@ -0,0 +1,343 @@
|
||||
;;;***************************
|
||||
;;;* DEFFACTS KNOWLEDGE BASE *
|
||||
;;;***************************
|
||||
|
||||
(deffacts MAIN::knowledge-base
|
||||
(welcome (message WelcomeMessage))
|
||||
(goal (variable type.animal))
|
||||
(legalanswers (values yes no))
|
||||
(displayanswers (values "Yes" "No"))
|
||||
(rule (if backbone is yes)
|
||||
(then superphylum is backbone))
|
||||
(rule (if backbone is no)
|
||||
(then superphylum is jellyback))
|
||||
(question (variable backbone)
|
||||
(query backbone.query))
|
||||
(rule (if superphylum is backbone and
|
||||
warm.blooded is yes)
|
||||
(then phylum is warm))
|
||||
(rule (if superphylum is backbone and
|
||||
warm.blooded is no)
|
||||
(then phylum is cold))
|
||||
(question (variable warm.blooded)
|
||||
(query warm.blooded.query))
|
||||
(rule (if superphylum is jellyback and
|
||||
live.prime.in.soil is yes)
|
||||
(then phylum is soil))
|
||||
(rule (if superphylum is jellyback and
|
||||
live.prime.in.soil is no)
|
||||
(then phylum is elsewhere))
|
||||
(question (variable live.prime.in.soil)
|
||||
(query live.prime.in.soil.query))
|
||||
(rule (if phylum is warm and
|
||||
has.breasts is yes)
|
||||
(then class is breasts))
|
||||
(rule (if phylum is warm and
|
||||
has.breasts is no)
|
||||
(then type.animal is bird))
|
||||
(question (variable has.breasts)
|
||||
(query has.breasts.query))
|
||||
(rule (if phylum is cold and
|
||||
always.in.water is yes)
|
||||
(then class is water))
|
||||
(rule (if phylum is cold and
|
||||
always.in.water is no)
|
||||
(then class is dry))
|
||||
(question (variable always.in.water)
|
||||
(query always.in.water.query))
|
||||
(rule (if phylum is soil and
|
||||
flat.bodied is yes)
|
||||
(then type.animal is flatworm))
|
||||
(rule (if phylum is soil and
|
||||
flat.bodied is no)
|
||||
(then type.animal is worm.leech))
|
||||
(question (variable flat.bodied)
|
||||
(query flat.bodied.query))
|
||||
(rule (if phylum is elsewhere and
|
||||
body.in.segments is yes)
|
||||
(then class is segments))
|
||||
(rule (if phylum is elsewhere and
|
||||
body.in.segments is no)
|
||||
(then class is unified))
|
||||
(question (variable body.in.segments)
|
||||
(query body.in.segments.query))
|
||||
(rule (if class is breasts and
|
||||
can.eat.meat is yes)
|
||||
(then order is meat))
|
||||
(rule (if class is breasts and
|
||||
can.eat.meat is no)
|
||||
(then order is vegy))
|
||||
(question (variable can.eat.meat)
|
||||
(query can.eat.meat.query))
|
||||
(rule (if class is water and
|
||||
boney is yes)
|
||||
(then type.animal is fish))
|
||||
(rule (if class is water and
|
||||
boney is no)
|
||||
(then type.animal is shark.ray))
|
||||
(question (variable boney)
|
||||
(query boney.query))
|
||||
(rule (if class is dry and
|
||||
scaly is yes)
|
||||
(then order is scales))
|
||||
(rule (if class is dry and
|
||||
scaly is no)
|
||||
(then order is soft))
|
||||
(question (variable scaly)
|
||||
(query scaly.query))
|
||||
(rule (if class is segments and
|
||||
shell is yes)
|
||||
(then order is shell))
|
||||
(rule (if class is segments and
|
||||
shell is no)
|
||||
(then type.animal is centipede.millipede.insect))
|
||||
(question (variable shell)
|
||||
(query shell.query))
|
||||
(rule (if class is unified and
|
||||
digest.cells is yes)
|
||||
(then order is cells))
|
||||
(rule (if class is unified and
|
||||
digest.cells is no)
|
||||
(then order is stomach))
|
||||
(question (variable digest.cells)
|
||||
(query digest.cells.query))
|
||||
(rule (if order is meat and
|
||||
fly is yes)
|
||||
(then type.animal is bat))
|
||||
(rule (if order is meat and
|
||||
fly is no)
|
||||
(then family is nowings))
|
||||
(question (variable fly)
|
||||
(query fly.query))
|
||||
(rule (if order is vegy and
|
||||
hooves is yes)
|
||||
(then family is hooves))
|
||||
(rule (if order is vegy and
|
||||
hooves is no)
|
||||
(then family is feet))
|
||||
(question (variable hooves)
|
||||
(query hooves.query))
|
||||
(rule (if order is scales and
|
||||
rounded.shell is yes)
|
||||
(then type.animal is turtle))
|
||||
(rule (if order is scales and
|
||||
rounded.shell is no)
|
||||
(then family is noshell))
|
||||
(question (variable rounded.shell)
|
||||
(query rounded.shell.query))
|
||||
(rule (if order is soft and
|
||||
jump is yes)
|
||||
(then type.animal is frog))
|
||||
(rule (if order is soft and
|
||||
jump is no)
|
||||
(then type.animal is salamander))
|
||||
(question (variable jump)
|
||||
(query jump.query))
|
||||
(rule (if order is shell and
|
||||
tail is yes)
|
||||
(then type.animal is lobster))
|
||||
(rule (if order is shell and
|
||||
tail is no)
|
||||
(then type.animal is crab))
|
||||
(question (variable tail)
|
||||
(query tail.query))
|
||||
(rule (if order is cells and
|
||||
stationary is yes)
|
||||
(then family is stationary))
|
||||
(rule (if order is cells and
|
||||
stationary is no)
|
||||
(then type.animal is jellyfish))
|
||||
(question (variable stationary)
|
||||
(query stationary.query))
|
||||
(rule (if order is stomach and
|
||||
multicelled is yes)
|
||||
(then family is multicelled))
|
||||
(rule (if order is stomach and
|
||||
multicelled is no)
|
||||
(then type.animal is protozoa))
|
||||
(question (variable multicelled)
|
||||
(query multicelled.query))
|
||||
(rule (if family is nowings and
|
||||
opposing.thumb is yes)
|
||||
(then genus is thumb))
|
||||
(rule (if family is nowings and
|
||||
opposing.thumb is no)
|
||||
(then genus is nothumb))
|
||||
(question (variable opposing.thumb)
|
||||
(query opposing.thumb.query))
|
||||
(rule (if family is hooves and
|
||||
two.toes is yes)
|
||||
(then genus is twotoes))
|
||||
(rule (if family is hooves and
|
||||
two.toes is no)
|
||||
(then genus is onetoe))
|
||||
(question (variable two.toes)
|
||||
(query two.toes.query))
|
||||
(rule (if family is feet and
|
||||
live.in.water is yes)
|
||||
(then genus is water))
|
||||
(rule (if family is feet and
|
||||
live.in.water is no)
|
||||
(then genus is dry))
|
||||
(question (variable live.in.water)
|
||||
(query live.in.water.query))
|
||||
(rule (if family is noshell and
|
||||
limbs is yes)
|
||||
(then type.animal is crocodile.alligator))
|
||||
(rule (if family is noshell and
|
||||
limbs is no)
|
||||
(then type.animal is snake))
|
||||
(question (variable limbs)
|
||||
(query limbs.query))
|
||||
(rule (if family is stationary and
|
||||
spikes is yes)
|
||||
(then type.animal is sea.anemone))
|
||||
(rule (if family is stationary and
|
||||
spikes is no)
|
||||
(then type.animal is coral.sponge))
|
||||
(question (variable spikes)
|
||||
(query spikes.query))
|
||||
(rule (if family is multicelled and
|
||||
spiral.shell is yes)
|
||||
(then type.animal is snail))
|
||||
(rule (if family is multicelled and
|
||||
spiral.shell is no)
|
||||
(then genus is noshell))
|
||||
(question (variable spiral.shell)
|
||||
(query spiral.shell.query))
|
||||
(rule (if genus is thumb and
|
||||
prehensile.tail is yes)
|
||||
(then type.animal is monkey))
|
||||
(rule (if genus is thumb and
|
||||
prehensile.tail is no)
|
||||
(then species is notail))
|
||||
(question (variable prehensile.tail)
|
||||
(query prehensile.tail.query))
|
||||
(rule (if genus is nothumb and
|
||||
over.400 is yes)
|
||||
(then species is 400))
|
||||
(rule (if genus is nothumb and
|
||||
over.400 is no)
|
||||
(then species is under400))
|
||||
(question (variable over.400)
|
||||
(query over.400.query))
|
||||
(rule (if genus is twotoes and
|
||||
horns is yes)
|
||||
(then species is horns))
|
||||
(rule (if genus is twotoes and
|
||||
horns is no)
|
||||
(then species is nohorns))
|
||||
(question (variable horns)
|
||||
(query horns.query))
|
||||
(rule (if genus is onetoe and
|
||||
plating is yes)
|
||||
(then type.animal is rhinoceros))
|
||||
(rule (if genus is onetoe and
|
||||
plating is no)
|
||||
(then type.animal is horse.zebra))
|
||||
(question (variable plating)
|
||||
(query plating.query))
|
||||
(rule (if genus is water and
|
||||
hunted is yes)
|
||||
(then type.animal is whale))
|
||||
(rule (if genus is water and
|
||||
hunted is no)
|
||||
(then type.animal is dolphin.porpoise))
|
||||
(question (variable hunted)
|
||||
(query hunted.query))
|
||||
(rule (if genus is dry and
|
||||
front.teeth is yes)
|
||||
(then species is teeth))
|
||||
(rule (if genus is dry and
|
||||
front.teeth is no)
|
||||
(then species is noteeth))
|
||||
(question (variable front.teeth)
|
||||
(query front.teeth.query))
|
||||
(rule (if genus is noshell and
|
||||
bivalve is yes)
|
||||
(then type.animal is clam.oyster))
|
||||
(rule (if genus is noshell and
|
||||
bivalve is no)
|
||||
(then type.animal is squid.octopus))
|
||||
(question (variable bivalve)
|
||||
(query bivalve.query))
|
||||
(rule (if species is notail and
|
||||
nearly.hairless is yes)
|
||||
(then type.animal is man))
|
||||
(rule (if species is notail and
|
||||
nearly.hairless is no)
|
||||
(then subspecies is hair))
|
||||
(question (variable nearly.hairless)
|
||||
(query nearly.hairless.query))
|
||||
(rule (if species is 400 and
|
||||
land.based is yes)
|
||||
(then type.animal is bear.tiger.lion))
|
||||
(rule (if species is 400 and
|
||||
land.based is no)
|
||||
(then type.animal is walrus))
|
||||
(question (variable land.based)
|
||||
(query land.based.query))
|
||||
(rule (if species is under400 and
|
||||
thintail is yes)
|
||||
(then type.animal is cat))
|
||||
(rule (if species is under400 and
|
||||
thintail is no)
|
||||
(then type.animal is coyote.wolf.fox.dog))
|
||||
(question (variable thintail)
|
||||
(query thintail.query))
|
||||
(rule (if species is nohorns and
|
||||
lives.in.desert is yes)
|
||||
(then type.animal is camel))
|
||||
(rule (if species is nohorns and
|
||||
lives.in.desert is no and
|
||||
semi.aquatic is no)
|
||||
(then type.animal is giraffe))
|
||||
(rule (if species is nohorns and
|
||||
lives.in.desert is no and
|
||||
semi.aquatic is yes)
|
||||
(then type.animal is hippopotamus))
|
||||
(question (variable lives.in.desert)
|
||||
(query lives.in.desert.query))
|
||||
(question (variable semi.aquatic)
|
||||
(query semi.aquatic.query))
|
||||
(rule (if species is teeth and
|
||||
large.ears is yes)
|
||||
(then type.animal is rabbit))
|
||||
(rule (if species is teeth and
|
||||
large.ears is no)
|
||||
(then type.animal is rat.mouse.squirrel.beaver.porcupine))
|
||||
(question (variable large.ears)
|
||||
(query large.ears.query))
|
||||
(rule (if species is noteeth and
|
||||
pouch is yes)
|
||||
(then type.animal is kangaroo.koala.bear))
|
||||
(rule (if species is noteeth and
|
||||
pouch is no)
|
||||
(then type.animal is mole.shrew.elephant))
|
||||
(question (variable pouch)
|
||||
(query pouch.query))
|
||||
(rule (if subspecies is hair and
|
||||
long.powerful.arms is yes)
|
||||
(then type.animal is orangutan.gorilla.chimpanzee))
|
||||
(rule (if subspecies is hair and
|
||||
long.powerful.arms is no)
|
||||
(then type.animal is baboon))
|
||||
(question (variable long.powerful.arms)
|
||||
(query long.powerful.arms.query))
|
||||
(rule (if species is horns and
|
||||
fleece is yes)
|
||||
(then type.animal is sheep.goat))
|
||||
(rule (if species is horns and
|
||||
fleece is no)
|
||||
(then subsubspecies is nofleece))
|
||||
(question (variable fleece)
|
||||
(query fleece.query))
|
||||
(rule (if subsubspecies is nofleece and
|
||||
domesticated is yes)
|
||||
(then type.animal is cow))
|
||||
(rule (if subsubspecies is nofleece and
|
||||
domesticated is no)
|
||||
(then type.animal is deer.moose.antelope))
|
||||
(question (variable domesticated)
|
||||
(query domesticated.query))
|
||||
(answer (prefix "I think your animal is a ") (variable type.animal) (postfix ".")))
|
||||
281
samples/CLIPS/sudoku.clp
Normal file
281
samples/CLIPS/sudoku.clp
Normal file
@@ -0,0 +1,281 @@
|
||||
;;; http://www.angusj.com/sudoku/hints
|
||||
;;; http://www.scanraid.com/BasicStrategies.htm
|
||||
;;; http://www.sudokuoftheday.com/pages/techniques-overview
|
||||
;;; http://www.sudokuonline.us/sudoku_solving_techniques
|
||||
;;; http://www.sadmansoftware.com/sudoku/techniques.htm
|
||||
;;; http://www.krazydad.com/blog/2005/09/29/an-index-of-sudoku-strategies/
|
||||
|
||||
;;; #######################
|
||||
;;; DEFTEMPLATES & DEFFACTS
|
||||
;;; #######################
|
||||
|
||||
(deftemplate possible
|
||||
(slot row)
|
||||
(slot column)
|
||||
(slot value)
|
||||
(slot group)
|
||||
(slot id))
|
||||
|
||||
(deftemplate impossible
|
||||
(slot id)
|
||||
(slot value)
|
||||
(slot priority)
|
||||
(slot reason))
|
||||
|
||||
(deftemplate technique-employed
|
||||
(slot reason)
|
||||
(slot priority))
|
||||
|
||||
(deftemplate technique
|
||||
(slot name)
|
||||
(slot priority))
|
||||
|
||||
(deffacts startup
|
||||
(phase grid-values))
|
||||
|
||||
(deftemplate size-value
|
||||
(slot size)
|
||||
(slot value))
|
||||
|
||||
(deffacts values
|
||||
(size-value (size 1) (value 1))
|
||||
(size-value (size 2) (value 2))
|
||||
(size-value (size 2) (value 3))
|
||||
(size-value (size 2) (value 4))
|
||||
(size-value (size 3) (value 5))
|
||||
(size-value (size 3) (value 6))
|
||||
(size-value (size 3) (value 7))
|
||||
(size-value (size 3) (value 8))
|
||||
(size-value (size 3) (value 9))
|
||||
(size-value (size 4) (value 10))
|
||||
(size-value (size 4) (value 11))
|
||||
(size-value (size 4) (value 12))
|
||||
(size-value (size 4) (value 13))
|
||||
(size-value (size 4) (value 14))
|
||||
(size-value (size 4) (value 15))
|
||||
(size-value (size 4) (value 16))
|
||||
(size-value (size 5) (value 17))
|
||||
(size-value (size 5) (value 18))
|
||||
(size-value (size 5) (value 19))
|
||||
(size-value (size 5) (value 20))
|
||||
(size-value (size 5) (value 21))
|
||||
(size-value (size 5) (value 22))
|
||||
(size-value (size 5) (value 23))
|
||||
(size-value (size 5) (value 24))
|
||||
(size-value (size 5) (value 25)))
|
||||
|
||||
;;; ###########
|
||||
;;; SETUP RULES
|
||||
;;; ###########
|
||||
|
||||
;;; ***********
|
||||
;;; stress-test
|
||||
;;; ***********
|
||||
|
||||
(defrule stress-test
|
||||
|
||||
(declare (salience 10))
|
||||
|
||||
(phase match)
|
||||
|
||||
(stress-test)
|
||||
|
||||
(priority ?last)
|
||||
|
||||
(not (priority ?p&:(> ?p ?last)))
|
||||
|
||||
(technique (priority ?next&:(> ?next ?last)))
|
||||
|
||||
(not (technique (priority ?p&:(> ?p ?last)&:(< ?p ?next))))
|
||||
|
||||
=>
|
||||
|
||||
(assert (priority ?next)))
|
||||
|
||||
;;; *****************
|
||||
;;; enable-techniques
|
||||
;;; *****************
|
||||
|
||||
(defrule enable-techniques
|
||||
|
||||
(declare (salience 10))
|
||||
|
||||
(phase match)
|
||||
|
||||
(size ?)
|
||||
|
||||
(not (possible (value any)))
|
||||
|
||||
=>
|
||||
|
||||
(assert (priority 1)))
|
||||
|
||||
;;; **********
|
||||
;;; expand-any
|
||||
;;; **********
|
||||
|
||||
(defrule expand-any
|
||||
|
||||
(declare (salience 10))
|
||||
|
||||
(phase expand-any)
|
||||
|
||||
?f <- (possible (row ?r) (column ?c) (value any) (group ?g) (id ?id))
|
||||
|
||||
(not (possible (value any) (id ?id2&:(< ?id2 ?id))))
|
||||
|
||||
(size ?s)
|
||||
|
||||
(size-value (size ?as&:(<= ?as ?s)) (value ?v))
|
||||
|
||||
(not (possible (row ?r) (column ?c) (value ?v)))
|
||||
|
||||
(not (and (size-value (value ?v2&:(< ?v2 ?v)))
|
||||
|
||||
(not (possible (row ?r) (column ?c) (value ?v2)))))
|
||||
|
||||
=>
|
||||
|
||||
(assert (possible (row ?r) (column ?c) (value ?v) (group ?g) (id ?id))))
|
||||
|
||||
;;; *****************
|
||||
;;; position-expanded
|
||||
;;; *****************
|
||||
|
||||
(defrule position-expanded
|
||||
|
||||
(declare (salience 10))
|
||||
|
||||
(phase expand-any)
|
||||
|
||||
?f <- (possible (row ?r) (column ?c) (value any) (group ?g) (id ?id))
|
||||
|
||||
(size ?s)
|
||||
|
||||
(not (and (size-value (size ?as&:(<= ?as ?s)) (value ?v))
|
||||
|
||||
(not (possible (row ?r) (column ?c) (value ?v)))))
|
||||
|
||||
=>
|
||||
|
||||
(retract ?f))
|
||||
|
||||
;;; ###########
|
||||
;;; PHASE RULES
|
||||
;;; ###########
|
||||
|
||||
;;; ***************
|
||||
;;; expand-any-done
|
||||
;;; ***************
|
||||
|
||||
(defrule expand-any-done
|
||||
|
||||
(declare (salience 10))
|
||||
|
||||
?f <- (phase expand-any)
|
||||
|
||||
(not (possible (value any)))
|
||||
|
||||
=>
|
||||
|
||||
(retract ?f)
|
||||
|
||||
(assert (phase initial-output))
|
||||
(assert (print-position 1 1)))
|
||||
|
||||
;;; ***********
|
||||
;;; begin-match
|
||||
;;; ***********
|
||||
|
||||
(defrule begin-match
|
||||
|
||||
(declare (salience -20))
|
||||
|
||||
?f <- (phase initial-output)
|
||||
|
||||
=>
|
||||
|
||||
(retract ?f)
|
||||
|
||||
(assert (phase match)))
|
||||
|
||||
;;; *****************
|
||||
;;; begin-elimination
|
||||
;;; *****************
|
||||
|
||||
(defrule begin-elimination
|
||||
|
||||
(declare (salience -20))
|
||||
|
||||
?f <- (phase match)
|
||||
|
||||
(not (not (impossible)))
|
||||
|
||||
=>
|
||||
|
||||
(retract ?f)
|
||||
|
||||
(assert (phase elimination)))
|
||||
|
||||
;;; *************
|
||||
;;; next-priority
|
||||
;;; *************
|
||||
|
||||
(defrule next-priority
|
||||
|
||||
(declare (salience -20))
|
||||
|
||||
(phase match)
|
||||
|
||||
(not (impossible))
|
||||
|
||||
(priority ?last)
|
||||
|
||||
(not (priority ?p&:(> ?p ?last)))
|
||||
|
||||
(technique (priority ?next&:(> ?next ?last)))
|
||||
|
||||
(not (technique (priority ?p&:(> ?p ?last)&:(< ?p ?next))))
|
||||
|
||||
=>
|
||||
|
||||
(assert (priority ?next)))
|
||||
|
||||
;;; ************
|
||||
;;; begin-output
|
||||
;;; ************
|
||||
|
||||
(defrule begin-output
|
||||
|
||||
(declare (salience -20))
|
||||
|
||||
?f <- (phase match)
|
||||
|
||||
(not (impossible))
|
||||
|
||||
(priority ?last)
|
||||
|
||||
(not (priority ?p&:(> ?p ?last)))
|
||||
|
||||
(not (technique (priority ?next&:(> ?next ?last))))
|
||||
|
||||
=>
|
||||
|
||||
(retract ?f)
|
||||
|
||||
(assert (phase final-output))
|
||||
(assert (print-position 1 1)))
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
12
samples/CMake/filenames/CMakeLists.txt
Normal file
12
samples/CMake/filenames/CMakeLists.txt
Normal file
@@ -0,0 +1,12 @@
|
||||
cmake_minimum_required(VERSION 2.8)
|
||||
|
||||
project(Foo)
|
||||
|
||||
set(CMAKE_SKIP_RPATH TRUE)
|
||||
set(CMAKE_INSTALL_PREFIX "/usr/local")
|
||||
|
||||
add_subdirectory(bar)
|
||||
|
||||
add_executable(foo foo.c)
|
||||
target_link_libraries(foo pthread)
|
||||
install(TARGETS foo DESTINATION bin)
|
||||
15
samples/CMake/sample1.cmake
Normal file
15
samples/CMake/sample1.cmake
Normal file
@@ -0,0 +1,15 @@
|
||||
cmake_minimum_required(VERSION 2.6)
|
||||
|
||||
enable_testing()
|
||||
|
||||
set(CMAKE_BUILD_TYPE debug)
|
||||
|
||||
include_directories("/usr/local/include")
|
||||
|
||||
find_library(ssl_LIBRARY NAMES ssl PATHS "/usr/local/lib")
|
||||
|
||||
add_custom_command(OUTPUT "ver.c" "ver.h" COMMAND ./ver.sh)
|
||||
|
||||
add_executable(foo foo.c bar.c baz.c ver.c)
|
||||
|
||||
target_link_libraries(foo ${ssl_LIBRARY})
|
||||
25
samples/CMake/sample2.cmake
Normal file
25
samples/CMake/sample2.cmake
Normal file
@@ -0,0 +1,25 @@
|
||||
cmake_minimum_required(VERSION 2.8 FATAL_ERROR)
|
||||
|
||||
project(PCLVisualizer)
|
||||
target_link_libraries (PCLVisualizer ${PCL_LIBRARIES})
|
||||
|
||||
#it seems it's needed only on OS X 10.9
|
||||
find_package(GLEW REQUIRED)
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -I/usr/include -v")
|
||||
|
||||
find_package(PCL 1.7 REQUIRED)
|
||||
|
||||
include_directories(${PCL_INCLUDE_DIRS})
|
||||
link_directories(${PCL_LIBRARY_DIRS})
|
||||
add_definitions(${PCL_DEFINITIONS})
|
||||
|
||||
set(PCL_BUILD_TYPE Release)
|
||||
|
||||
file(GLOB PCL_openni_viewer_SRC
|
||||
"src/*.h"
|
||||
"src/*.cpp"
|
||||
)
|
||||
add_executable(PCLVisualizer ${PCL_openni_viewer_SRC})
|
||||
|
||||
#add this line to solve probem in mac os x 10.9
|
||||
target_link_libraries(PCLVisualizer ${PCL_COMMON_LIBRARIES} ${PCL_IO_LIBRARIES} ${PCL_VISUALIZATION_LIBRARIES} ${PCL_FEATURES_LIBRARIES})
|
||||
33
samples/CMake/sample3.cmake
Normal file
33
samples/CMake/sample3.cmake
Normal file
@@ -0,0 +1,33 @@
|
||||
# Specifications for building user and development documentation.
|
||||
#
|
||||
# ====================================================================
|
||||
# Copyright (c) 2009 Ian Blumel. All rights reserved.
|
||||
#
|
||||
# This software is licensed as described in the file LICENSE, which
|
||||
# you should have received as part of this distribution.
|
||||
# ====================================================================
|
||||
|
||||
CMAKE_MINIMUM_REQUIRED(VERSION 2.6)
|
||||
|
||||
FIND_FILE( SPHINX sphinx-build.exe)
|
||||
|
||||
# If we are windows call to the make.bat file, otherwise rely on the Makefile
|
||||
# to handle the processing.
|
||||
IF(WIN32)
|
||||
SET(SPHINX_MAKE make.bat)
|
||||
ELSE(WIN32)
|
||||
SET(SPHINX_MAKE make)
|
||||
ENDIF(WIN32)
|
||||
|
||||
ADD_CUSTOM_TARGET(
|
||||
doc_usr
|
||||
COMMAND ${SPHINX_MAKE} html
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/usr
|
||||
)
|
||||
|
||||
ADD_CUSTOM_TARGET(
|
||||
doc_dev
|
||||
COMMAND ${SPHINX_MAKE} html
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/dev
|
||||
)
|
||||
|
||||
33
samples/CMake/sample4.cmake
Normal file
33
samples/CMake/sample4.cmake
Normal file
@@ -0,0 +1,33 @@
|
||||
cmake_minimum_required (VERSION 2.6)
|
||||
|
||||
set (CMAKE_RUNTIME_OUTPUT_DIRECTORY "bin")
|
||||
|
||||
list(APPEND CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/cmake/vala)
|
||||
find_package(Vala REQUIRED)
|
||||
include(ValaPrecompile)
|
||||
include(ValaVersion)
|
||||
ensure_vala_version("0.11.0" MINIMUM)
|
||||
|
||||
project (template C)
|
||||
|
||||
find_package(PkgConfig)
|
||||
|
||||
pkg_check_modules(GOBJECT REQUIRED gobject-2.0)
|
||||
add_definitions(${GOBJECT_CFLAGS} ${GOBJECT_CFLAGS_OTHER})
|
||||
link_libraries(${GOBJECT_LIBRARIES})
|
||||
link_directories(${GOBJECT_LIBRARY_DIRS})
|
||||
|
||||
|
||||
vala_precompile(VALA_C
|
||||
src/template.vala
|
||||
PACKAGES
|
||||
OPTIONS
|
||||
--thread
|
||||
CUSTOM_VAPIS
|
||||
GENERATE_VAPI
|
||||
GENERATE_HEADER
|
||||
DIRECTORY
|
||||
gen
|
||||
)
|
||||
|
||||
add_executable("template" ${VALA_C})
|
||||
89
samples/CMake/sample5.cmake
Normal file
89
samples/CMake/sample5.cmake
Normal file
@@ -0,0 +1,89 @@
|
||||
# - Check if the STDCALL function exists.
|
||||
# This works for non-cdecl functions (kernel32 functions, for example)
|
||||
# CHECK_STDCALL_FUNCTION_EXISTS(FUNCTION FUNCTION_DUMMY_ARGS VARIABLE)
|
||||
# - macro which checks if the stdcall function exists
|
||||
# FUNCTION_DECLARATION - the definition of the function ( e.g.: Sleep(500) )
|
||||
# VARIABLE - variable to store the result
|
||||
#
|
||||
# The following variables may be set before calling this macro to
|
||||
# modify the way the check is run:
|
||||
#
|
||||
# CMAKE_REQUIRED_FLAGS = string of compile command line flags
|
||||
# CMAKE_REQUIRED_DEFINITIONS = list of macros to define (-DFOO=bar)
|
||||
# CMAKE_REQUIRED_INCLUDES = list of include directories
|
||||
# CMAKE_REQUIRED_LIBRARIES = list of libraries to link
|
||||
# CMAKE_EXTRA_INCLUDE_FILES = list of extra includes to check in
|
||||
|
||||
MACRO(CHECK_STDCALL_FUNCTION_EXISTS FUNCTION_DECLARATION VARIABLE)
|
||||
IF("${VARIABLE}" MATCHES "^${VARIABLE}$")
|
||||
#get includes
|
||||
SET(CHECK_STDCALL_FUNCTION_PREMAIN)
|
||||
FOREACH(def ${CMAKE_EXTRA_INCLUDE_FILES})
|
||||
SET(CHECK_STDCALL_FUNCTION_PREMAIN "${CHECK_STDCALL_FUNCTION_PREMAIN}#include \"${def}\"\n")
|
||||
ENDFOREACH(def)
|
||||
|
||||
#add some default includes
|
||||
IF ( HAVE_WINDOWS_H )
|
||||
SET(CHECK_STDCALL_FUNCTION_PREMAIN "${CHECK_STDCALL_FUNCTION_PREMAIN}#include \"windows.h\"\n")
|
||||
ENDIF ( HAVE_WINDOWS_H )
|
||||
IF ( HAVE_UNISTD_H )
|
||||
SET(CHECK_STDCALL_FUNCTION_PREMAIN "${CHECK_STDCALL_FUNCTION_PREMAIN}#include \"unistd.h\"\n")
|
||||
ENDIF ( HAVE_UNISTD_H )
|
||||
IF ( HAVE_DIRECT_H )
|
||||
SET(CHECK_STDCALL_FUNCTION_PREMAIN "${CHECK_STDCALL_FUNCTION_PREMAIN}#include \"direct.h\"\n")
|
||||
ENDIF ( HAVE_DIRECT_H )
|
||||
IF ( HAVE_IO_H )
|
||||
SET(CHECK_STDCALL_FUNCTION_PREMAIN "${CHECK_STDCALL_FUNCTION_PREMAIN}#include \"io.h\"\n")
|
||||
ENDIF ( HAVE_IO_H )
|
||||
IF ( HAVE_SYS_TIMEB_H )
|
||||
SET(CHECK_STDCALL_FUNCTION_PREMAIN "${CHECK_STDCALL_FUNCTION_PREMAIN}#include \"sys/timeb.h\"\n")
|
||||
ENDIF ( HAVE_SYS_TIMEB_H )
|
||||
|
||||
STRING(REGEX REPLACE "(\\(.*\\))" "" CHECK_STDCALL_FUNCTION_EXISTS_FUNCTION ${FUNCTION_DECLARATION} )
|
||||
|
||||
SET(MACRO_CHECK_STDCALL_FUNCTION_DEFINITIONS "${CMAKE_REQUIRED_FLAGS}")
|
||||
MESSAGE(STATUS "Looking for ${CHECK_STDCALL_FUNCTION_EXISTS_FUNCTION}")
|
||||
|
||||
IF(CMAKE_REQUIRED_LIBRARIES)
|
||||
SET(CHECK_STDCALL_FUNCTION_EXISTS_ADD_LIBRARIES
|
||||
"-DLINK_LIBRARIES:STRING=${CMAKE_REQUIRED_LIBRARIES}")
|
||||
ELSE(CMAKE_REQUIRED_LIBRARIES)
|
||||
SET(CHECK_STDCALL_FUNCTION_EXISTS_ADD_LIBRARIES)
|
||||
ENDIF(CMAKE_REQUIRED_LIBRARIES)
|
||||
|
||||
IF(CMAKE_REQUIRED_INCLUDES)
|
||||
SET(CHECK_STDCALL_FUNCTION_EXISTS_ADD_INCLUDES
|
||||
"-DINCLUDE_DIRECTORIES:STRING=${CMAKE_REQUIRED_INCLUDES}")
|
||||
ELSE(CMAKE_REQUIRED_INCLUDES)
|
||||
SET(CHECK_STDCALL_FUNCTION_EXISTS_ADD_INCLUDES)
|
||||
ENDIF(CMAKE_REQUIRED_INCLUDES)
|
||||
|
||||
SET(CHECK_STDCALL_FUNCTION_DECLARATION ${FUNCTION_DECLARATION})
|
||||
CONFIGURE_FILE("${clucene-shared_SOURCE_DIR}/cmake/CheckStdCallFunctionExists.cpp.in"
|
||||
"${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/CheckStdCallFunctionExists.cpp" IMMEDIATE @ONLY)
|
||||
FILE(READ "${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/CheckStdCallFunctionExists.cpp"
|
||||
CHECK_STDCALL_FUNCTION_CONTENT)
|
||||
|
||||
TRY_COMPILE(${VARIABLE}
|
||||
${CMAKE_BINARY_DIR}
|
||||
"${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/CheckStdCallFunctionExists.cpp"
|
||||
COMPILE_DEFINITIONS ${CMAKE_REQUIRED_DEFINITIONS}
|
||||
CMAKE_FLAGS -DCOMPILE_DEFINITIONS:STRING=${MACRO_CHECK_STDCALL_FUNCTION_DEFINITIONS}
|
||||
"${CHECK_STDCALL_FUNCTION_EXISTS_ADD_LIBRARIES}"
|
||||
"${CHECK_STDCALL_FUNCTION_EXISTS_ADD_INCLUDES}"
|
||||
OUTPUT_VARIABLE OUTPUT)
|
||||
IF(${VARIABLE})
|
||||
SET(${VARIABLE} 1 CACHE INTERNAL "Have function ${FUNCTION_DECLARATION}")
|
||||
MESSAGE(STATUS "Looking for ${FUNCTION_DECLARATION} - found")
|
||||
FILE(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeOutput.log
|
||||
"Determining if the stdcall function ${FUNCTION_DECLARATION} exists passed with the following output:\n"
|
||||
"${OUTPUT}\nCheckStdCallFunctionExists.cpp:\n${CHECK_STDCALL_FUNCTION_CONTENT}\n\n")
|
||||
ELSE(${VARIABLE})
|
||||
MESSAGE(STATUS "Looking for ${FUNCTION_DECLARATION} - not found")
|
||||
SET(${VARIABLE} "" CACHE INTERNAL "Have function ${FUNCTION_DECLARATION}")
|
||||
FILE(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log
|
||||
"Determining if the stdcall function ${FUNCTION_DECLARATION} exists failed with the following output:\n"
|
||||
"${OUTPUT}\nCheckStdCallFunctionExists.cpp:\n${CHECK_STDCALL_FUNCTION_CONTENT}\n\n")
|
||||
ENDIF(${VARIABLE})
|
||||
ENDIF("${VARIABLE}" MATCHES "^${VARIABLE}$")
|
||||
ENDMACRO(CHECK_STDCALL_FUNCTION_EXISTS)
|
||||
22
samples/CMake/uninstall.cmake.in
Normal file
22
samples/CMake/uninstall.cmake.in
Normal file
@@ -0,0 +1,22 @@
|
||||
IF (NOT EXISTS "@PROJECT_BINARY_DIR@/install_manifest.txt")
|
||||
MESSAGE (FATAL_ERROR "Cannot find install manifest: \"@PROJECT_BINARY_DIR@/install_manifest.txt\"")
|
||||
ENDIF (NOT EXISTS "@PROJECT_BINARY_DIR@/install_manifest.txt")
|
||||
|
||||
FILE (READ "@PROJECT_BINARY_DIR@/install_manifest.txt" files)
|
||||
STRING (REGEX REPLACE "\n" ";" files "${files}")
|
||||
FOREACH (file ${files})
|
||||
MESSAGE (STATUS "Uninstalling \"$ENV{DESTDIR}${file}\"")
|
||||
IF (EXISTS "$ENV{DESTDIR}${file}")
|
||||
EXEC_PROGRAM (
|
||||
"@CMAKE_COMMAND@" ARGS "-E remove \"$ENV{DESTDIR}${file}\""
|
||||
OUTPUT_VARIABLE rm_out
|
||||
RETURN_VALUE rm_retval
|
||||
)
|
||||
IF (NOT "${rm_retval}" STREQUAL 0)
|
||||
MESSAGE (FATAL_ERROR "Problem when removing \"$ENV{DESTDIR}${file}\"")
|
||||
ENDIF (NOT "${rm_retval}" STREQUAL 0)
|
||||
ELSE (EXISTS "$ENV{DESTDIR}${file}")
|
||||
MESSAGE (STATUS "File \"$ENV{DESTDIR}${file}\" does not exist.")
|
||||
ENDIF (EXISTS "$ENV{DESTDIR}${file}")
|
||||
ENDFOREACH (file)
|
||||
|
||||
1879
samples/CartoCSS/amenity-points.mss
Normal file
1879
samples/CartoCSS/amenity-points.mss
Normal file
File diff suppressed because it is too large
Load Diff
21
samples/Common Lisp/sample.lsp
Normal file
21
samples/Common Lisp/sample.lsp
Normal file
@@ -0,0 +1,21 @@
|
||||
;;;; -*- lisp -*-
|
||||
|
||||
(in-package :foo)
|
||||
|
||||
;;; Header comment.
|
||||
(defvar *foo*)
|
||||
|
||||
(eval-when (:execute :compile-toplevel :load-toplevel)
|
||||
(defun add (x &optional y &key z)
|
||||
(declare (ignore z))
|
||||
;; Inline comment.
|
||||
(+ x (or y 1))))
|
||||
|
||||
#|
|
||||
Multi-line comment.
|
||||
|#
|
||||
|
||||
(defmacro foo (x &body b)
|
||||
(if x
|
||||
`(1+ ,x) ;After-line comment.
|
||||
42))
|
||||
29
samples/Emacs Lisp/.emacs.desktop
Normal file
29
samples/Emacs Lisp/.emacs.desktop
Normal file
@@ -0,0 +1,29 @@
|
||||
;; -*- mode: emacs-lisp; coding: emacs-mule; -*-
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Desktop File for Emacs
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Created Sat Jan 3 12:46:35 2015
|
||||
;; Desktop file format version 206
|
||||
;; Emacs version 24.3.1
|
||||
|
||||
;; Global section:
|
||||
(setq desktop-missing-file-warning nil)
|
||||
(setq tags-file-name nil)
|
||||
(setq tags-table-list nil)
|
||||
(setq search-ring nil)
|
||||
(setq regexp-search-ring nil)
|
||||
(setq register-alist nil)
|
||||
(setq file-name-history nil)
|
||||
|
||||
;; Buffer section -- buffers listed in same order as in buffer list:
|
||||
(desktop-create-buffer 206
|
||||
"/home/foo/bar"
|
||||
"bar"
|
||||
'fundamental-mode
|
||||
nil
|
||||
11572
|
||||
'(11554 nil)
|
||||
nil
|
||||
nil
|
||||
'((buffer-file-coding-system . undecided-unix)))
|
||||
|
||||
49
samples/F#/Combinators.fs
Normal file
49
samples/F#/Combinators.fs
Normal file
@@ -0,0 +1,49 @@
|
||||
namespace Nessos.FsPickler.Combinators
|
||||
|
||||
open Nessos.FsPickler
|
||||
open Nessos.FsPickler.Json
|
||||
|
||||
/// Json pickling methods
|
||||
[<RequireQualifiedAccess>]
|
||||
module Json =
|
||||
|
||||
let private jsonSerializer = lazy(FsPickler.CreateJson(omitHeader = true))
|
||||
|
||||
/// <summary>
|
||||
/// Pickles a value to Json.
|
||||
/// </summary>
|
||||
/// <param name="pickler">utilized pickler.</param>
|
||||
/// <param name="value">input value.</param>
|
||||
let pickle (pickler : Pickler<'T>) (value : 'T) : string =
|
||||
jsonSerializer.Value.PickleToString (pickler, value)
|
||||
|
||||
/// <summary>
|
||||
/// Unpickles a value from Json.
|
||||
/// </summary>
|
||||
/// <param name="pickler">utilized pickler.</param>
|
||||
/// <param name="pickle">input pickle.</param>
|
||||
let unpickle (pickler : Pickler<'T>) (pickle : string) : 'T =
|
||||
jsonSerializer.Value.UnPickleOfString (pickler, pickle)
|
||||
|
||||
|
||||
/// Bson pickling methods
|
||||
[<RequireQualifiedAccess>]
|
||||
module Bson =
|
||||
|
||||
let private bsonPickler = lazy(FsPickler.CreateBson())
|
||||
|
||||
/// <summary>
|
||||
/// Pickles a value to Bson.
|
||||
/// </summary>
|
||||
/// <param name="pickler">utilized pickler.</param>
|
||||
/// <param name="value">input value.</param>
|
||||
let pickle (pickler : Pickler<'T>) (value : 'T) : byte [] =
|
||||
bsonPickler.Value.Pickle (pickler, value)
|
||||
|
||||
/// <summary>
|
||||
/// Unpickles a value from bson.
|
||||
/// </summary>
|
||||
/// <param name="pickler">utilized pickler.</param>
|
||||
/// <param name="pickle">input pickle.</param>
|
||||
let unpickle (pickler : Pickler<'T>) (pickle : byte []) : 'T =
|
||||
bsonPickler.Value.UnPickle (pickler, pickle)
|
||||
65
samples/F#/JsonFormat.fs
Normal file
65
samples/F#/JsonFormat.fs
Normal file
@@ -0,0 +1,65 @@
|
||||
namespace Nessos.FsPickler.Json
|
||||
|
||||
open System
|
||||
open System.IO
|
||||
open System.Text
|
||||
|
||||
open Newtonsoft.Json
|
||||
|
||||
open Nessos.FsPickler
|
||||
|
||||
/// <summary>
|
||||
/// Factory methods for the Json serialization format.
|
||||
/// </summary>
|
||||
type JsonPickleFormatProvider internal (indent, omitHeader) as self =
|
||||
|
||||
let isCustomSeq isTopLevelSequence =
|
||||
isTopLevelSequence && self.OmitHeader && self.UseCustomTopLevelSequenceSeparator
|
||||
|
||||
let mutable sequenceSeparator = " "
|
||||
|
||||
member val Indent = indent with get,set
|
||||
member val OmitHeader = omitHeader with get,set
|
||||
member val UseCustomTopLevelSequenceSeparator = false with get,set
|
||||
|
||||
member __.SequenceSeparator
|
||||
with get () = sequenceSeparator
|
||||
and set sep =
|
||||
if sep <> null && String.IsNullOrWhiteSpace sep then
|
||||
sequenceSeparator <- sep
|
||||
else
|
||||
invalidArg "SequenceSeparator" "should be non-null whitespace."
|
||||
|
||||
interface ITextPickleFormatProvider with
|
||||
member __.Name = "Json"
|
||||
|
||||
// see discussion : https://github.com/nessos/FsPickler/issues/17
|
||||
member __.DefaultEncoding = new UTF8Encoding(false) :> Encoding
|
||||
|
||||
member __.CreateWriter (stream, encoding, isTopLevelSequence, leaveOpen) =
|
||||
#if NET40
|
||||
if leaveOpen then raise <| new NotSupportedException("'leaveOpen' not supported in .NET 40.")
|
||||
let sw = new StreamWriter(stream, encoding)
|
||||
#else
|
||||
let sw = new StreamWriter(stream, encoding, 1024, leaveOpen)
|
||||
#endif
|
||||
let jw = new JsonTextWriter(sw)
|
||||
new JsonPickleWriter(jw, __.OmitHeader, __.Indent, isCustomSeq isTopLevelSequence, sequenceSeparator, leaveOpen) :> _
|
||||
|
||||
member __.CreateReader (stream, encoding, isTopLevelSequence, leaveOpen) =
|
||||
#if NET40
|
||||
if leaveOpen then raise <| new NotSupportedException("'leaveOpen' not supported in .NET 40.")
|
||||
let sr = new StreamReader(stream, encoding)
|
||||
#else
|
||||
let sr = new StreamReader(stream, encoding, true, 1024, leaveOpen)
|
||||
#endif
|
||||
let jr = new JsonTextReader(sr)
|
||||
new JsonPickleReader(jr, __.OmitHeader, isCustomSeq isTopLevelSequence, leaveOpen) :> _
|
||||
|
||||
member __.CreateWriter (textWriter, isTopLevelSequence, leaveOpen) =
|
||||
let jw = new JsonTextWriter(textWriter)
|
||||
new JsonPickleWriter(jw, __.OmitHeader, __.Indent, isCustomSeq isTopLevelSequence, sequenceSeparator, leaveOpen) :> _
|
||||
|
||||
member __.CreateReader (textReader, isTopLevelSequence, leaveOpen) =
|
||||
let jr = new JsonTextReader(textReader)
|
||||
new JsonPickleReader(jr, __.OmitHeader, isCustomSeq isTopLevelSequence, leaveOpen) :> _
|
||||
202
samples/F#/JsonReader.fs
Normal file
202
samples/F#/JsonReader.fs
Normal file
@@ -0,0 +1,202 @@
|
||||
namespace Nessos.FsPickler.Json
|
||||
|
||||
open System
|
||||
open System.Collections.Generic
|
||||
open System.Globalization
|
||||
open System.IO
|
||||
open System.Numerics
|
||||
open System.Text
|
||||
|
||||
open Newtonsoft.Json
|
||||
|
||||
open Nessos.FsPickler
|
||||
|
||||
/// <summary>
|
||||
/// Json format deserializer
|
||||
/// </summary>
|
||||
type internal JsonPickleReader (jsonReader : JsonReader, omitHeader, isTopLevelSequence, leaveOpen) =
|
||||
|
||||
do
|
||||
jsonReader.CloseInput <- not leaveOpen
|
||||
jsonReader.SupportMultipleContent <- isTopLevelSequence
|
||||
|
||||
let isBsonReader = match jsonReader with :? Bson.BsonReader -> true | _ -> false
|
||||
|
||||
let mutable depth = 0
|
||||
let arrayStack = new Stack<int> ()
|
||||
do arrayStack.Push Int32.MinValue
|
||||
|
||||
// do not write tag if omitting header or array element
|
||||
let omitTag () = (omitHeader && depth = 0) || arrayStack.Peek() = depth - 1
|
||||
|
||||
interface IPickleFormatReader with
|
||||
|
||||
member __.BeginReadRoot (tag : string) =
|
||||
do jsonReader.Read() |> ignore
|
||||
|
||||
if omitHeader then () else
|
||||
|
||||
if jsonReader.TokenType <> JsonToken.StartObject then raise <| new FormatException("invalid json root object.")
|
||||
else
|
||||
do jsonReader.MoveNext()
|
||||
let version = jsonReader.ReadPrimitiveAs<string> false "FsPickler"
|
||||
if version <> jsonFormatVersion then
|
||||
let v = Version(version)
|
||||
raise <| new FormatException(sprintf "Invalid FsPickler format version %O." version)
|
||||
|
||||
let sTag = jsonReader.ReadPrimitiveAs<string> false "type"
|
||||
if tag <> sTag then
|
||||
raise <| new InvalidPickleTypeException(tag, sTag)
|
||||
|
||||
member __.EndReadRoot () =
|
||||
if not omitHeader then jsonReader.Read() |> ignore
|
||||
|
||||
member __.BeginReadObject (tag : string) =
|
||||
|
||||
if not <| omitTag () then
|
||||
jsonReader.ReadProperty tag
|
||||
jsonReader.MoveNext ()
|
||||
|
||||
if isTopLevelSequence && depth = 0 then
|
||||
arrayStack.Push depth
|
||||
depth <- depth + 1
|
||||
ObjectFlags.IsSequenceHeader
|
||||
|
||||
else
|
||||
match jsonReader.TokenType with
|
||||
| JsonToken.Null -> ObjectFlags.IsNull
|
||||
| JsonToken.StartArray ->
|
||||
jsonReader.MoveNext()
|
||||
arrayStack.Push depth
|
||||
depth <- depth + 1
|
||||
ObjectFlags.IsSequenceHeader
|
||||
|
||||
| JsonToken.StartObject ->
|
||||
do jsonReader.MoveNext()
|
||||
depth <- depth + 1
|
||||
|
||||
if jsonReader.ValueAs<string> () = "_flags" then
|
||||
jsonReader.MoveNext()
|
||||
let csvFlags = jsonReader.ValueAs<string>()
|
||||
jsonReader.MoveNext()
|
||||
parseFlagCsv csvFlags
|
||||
else
|
||||
ObjectFlags.None
|
||||
|
||||
| token -> raise <| new FormatException(sprintf "expected start of Json object but was '%O'." token)
|
||||
|
||||
|
||||
member __.EndReadObject () =
|
||||
if isTopLevelSequence && depth = 1 then
|
||||
arrayStack.Pop () |> ignore
|
||||
depth <- depth - 1
|
||||
jsonReader.Read() |> ignore
|
||||
else
|
||||
match jsonReader.TokenType with
|
||||
| JsonToken.Null -> ()
|
||||
| JsonToken.EndObject -> depth <- depth - 1
|
||||
| JsonToken.EndArray ->
|
||||
arrayStack.Pop() |> ignore
|
||||
depth <- depth - 1
|
||||
|
||||
| token -> raise <| new FormatException(sprintf "expected end of Json object but was '%O'." token)
|
||||
|
||||
if omitHeader && depth = 0 then ()
|
||||
else jsonReader.Read() |> ignore
|
||||
|
||||
member __.SerializeUnionCaseNames = true
|
||||
|
||||
member __.PreferLengthPrefixInSequences = false
|
||||
member __.ReadNextSequenceElement () =
|
||||
if isTopLevelSequence && depth = 1 then
|
||||
jsonReader.TokenType <> JsonToken.None
|
||||
else
|
||||
jsonReader.TokenType <> JsonToken.EndArray
|
||||
|
||||
member __.ReadCachedObjectId () = jsonReader.ReadPrimitiveAs<int64> false "id"
|
||||
|
||||
member __.ReadBoolean tag = jsonReader.ReadPrimitiveAs<bool> (omitTag ()) tag
|
||||
member __.ReadByte tag = jsonReader.ReadPrimitiveAs<int64> (omitTag ()) tag |> byte
|
||||
member __.ReadSByte tag = jsonReader.ReadPrimitiveAs<int64> (omitTag ()) tag |> sbyte
|
||||
|
||||
member __.ReadInt16 tag = jsonReader.ReadPrimitiveAs<int64> (omitTag ()) tag |> int16
|
||||
member __.ReadInt32 tag = jsonReader.ReadPrimitiveAs<int64> (omitTag ()) tag |> int
|
||||
member __.ReadInt64 tag = jsonReader.ReadPrimitiveAs<int64> (omitTag ()) tag
|
||||
|
||||
member __.ReadUInt16 tag = jsonReader.ReadPrimitiveAs<int64> (omitTag ()) tag |> uint16
|
||||
member __.ReadUInt32 tag = jsonReader.ReadPrimitiveAs<int64> (omitTag ()) tag |> uint32
|
||||
member __.ReadUInt64 tag = jsonReader.ReadPrimitiveAs<int64> (omitTag ()) tag |> uint64
|
||||
|
||||
member __.ReadSingle tag =
|
||||
if not <| omitTag () then
|
||||
jsonReader.ReadProperty tag
|
||||
jsonReader.MoveNext()
|
||||
|
||||
let value =
|
||||
match jsonReader.TokenType with
|
||||
| JsonToken.Float -> jsonReader.ValueAs<double> () |> single
|
||||
| JsonToken.String -> Single.Parse(jsonReader.ValueAs<string>(), CultureInfo.InvariantCulture)
|
||||
| _ -> raise <| new FormatException("not a float.")
|
||||
|
||||
jsonReader.Read() |> ignore
|
||||
value
|
||||
|
||||
member __.ReadDouble tag =
|
||||
if not <| omitTag () then
|
||||
jsonReader.ReadProperty tag
|
||||
jsonReader.MoveNext()
|
||||
|
||||
let value =
|
||||
match jsonReader.TokenType with
|
||||
| JsonToken.Float -> jsonReader.ValueAs<double> ()
|
||||
| JsonToken.String -> Double.Parse(jsonReader.ValueAs<string>(), CultureInfo.InvariantCulture)
|
||||
| _ -> raise <| new FormatException("not a float.")
|
||||
|
||||
jsonReader.Read() |> ignore
|
||||
value
|
||||
|
||||
member __.ReadChar tag = let value = jsonReader.ReadPrimitiveAs<string> (omitTag ()) tag in value.[0]
|
||||
member __.ReadString tag = jsonReader.ReadPrimitiveAs<string> (omitTag ()) tag
|
||||
member __.ReadBigInteger tag = jsonReader.ReadPrimitiveAs<string> (omitTag ()) tag |> BigInteger.Parse
|
||||
|
||||
member __.ReadGuid tag =
|
||||
if isBsonReader then
|
||||
jsonReader.ReadPrimitiveAs<Guid> (omitTag ()) tag
|
||||
else
|
||||
jsonReader.ReadPrimitiveAs<string> (omitTag ()) tag |> Guid.Parse
|
||||
|
||||
member __.ReadTimeSpan tag = jsonReader.ReadPrimitiveAs<string> (omitTag ()) tag |> TimeSpan.Parse
|
||||
member __.ReadDecimal tag = jsonReader.ReadPrimitiveAs<string> (omitTag ()) tag |> decimal
|
||||
|
||||
// BSON spec mandates the use of Unix time;
|
||||
// this has millisecond precision which results in loss of accuracy w.r.t. ticks
|
||||
// since the goal of FsPickler is to offer faithful representations of .NET objects
|
||||
// we choose to override the spec and serialize ticks outright.
|
||||
// see also https://json.codeplex.com/discussions/212067
|
||||
member __.ReadDate tag =
|
||||
if isBsonReader then
|
||||
let ticks = jsonReader.ReadPrimitiveAs<int64> (omitTag ()) tag
|
||||
DateTime(ticks)
|
||||
else
|
||||
jsonReader.ReadPrimitiveAs<DateTime> (omitTag ()) tag
|
||||
|
||||
member __.ReadBytes tag =
|
||||
if not <| omitTag () then
|
||||
jsonReader.ReadProperty tag
|
||||
jsonReader.Read() |> ignore
|
||||
|
||||
let bytes =
|
||||
if jsonReader.TokenType = JsonToken.Null then null
|
||||
elif isBsonReader then jsonReader.ValueAs<byte []> ()
|
||||
else
|
||||
let base64 = jsonReader.ValueAs<string> ()
|
||||
Convert.FromBase64String base64
|
||||
|
||||
jsonReader.Read() |> ignore
|
||||
|
||||
bytes
|
||||
|
||||
member __.IsPrimitiveArraySerializationSupported = false
|
||||
member __.ReadPrimitiveArray _ _ = raise <| new NotImplementedException()
|
||||
|
||||
member __.Dispose () = (jsonReader :> IDisposable).Dispose()
|
||||
85
samples/F#/JsonSerializer.fs
Normal file
85
samples/F#/JsonSerializer.fs
Normal file
@@ -0,0 +1,85 @@
|
||||
namespace Nessos.FsPickler.Json
|
||||
|
||||
open System
|
||||
|
||||
open Nessos.FsPickler
|
||||
|
||||
type internal OAttribute = System.Runtime.InteropServices.OptionalAttribute
|
||||
type internal DAttribute = System.Runtime.InteropServices.DefaultParameterValueAttribute
|
||||
|
||||
/// <summary>
|
||||
/// Json pickler instance.
|
||||
/// </summary>
|
||||
type JsonSerializer =
|
||||
inherit FsPicklerTextSerializer
|
||||
|
||||
val private format : JsonPickleFormatProvider
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new Json pickler instance.
|
||||
/// </summary>
|
||||
/// <param name="indent">indent out Json pickles.</param>
|
||||
/// <param name="omitHeader">omit FsPickler header in Json pickles.</param>
|
||||
/// <param name="typeConverter">specify a custom type name converter.</param>
|
||||
new ([<O;D(null)>] ?indent, [<O;D(null)>] ?omitHeader, [<O;D(null)>] ?typeConverter) =
|
||||
let indent = defaultArg indent false
|
||||
let omitHeader = defaultArg omitHeader false
|
||||
let json = new JsonPickleFormatProvider(indent, omitHeader)
|
||||
{
|
||||
inherit FsPicklerTextSerializer(json, ?typeConverter = typeConverter)
|
||||
format = json
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets whether Json output should be indented.
|
||||
/// </summary>
|
||||
member x.Indent
|
||||
with get () = x.format.Indent
|
||||
and set b = x.format.Indent <- b
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets whether FsPickler headers should be ignored in pickle format.
|
||||
/// </summary>
|
||||
member x.OmitHeader
|
||||
with get () = x.format.OmitHeader
|
||||
and set b = x.format.OmitHeader <- b
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets a non-null whitespace string that serves as a custom, top-level sequence separator.
|
||||
/// </summary>
|
||||
member x.SequenceSeparator
|
||||
with get () = x.format.SequenceSeparator
|
||||
and set sep = x.format.SequenceSeparator <- sep
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets whether top-level sequences should be serialized using the custom separator.
|
||||
/// </summary>
|
||||
member x.UseCustomTopLevelSequenceSeparator
|
||||
with get () = x.format.UseCustomTopLevelSequenceSeparator
|
||||
and set e = x.format.UseCustomTopLevelSequenceSeparator <- e
|
||||
|
||||
/// <summary>
|
||||
/// BSON pickler instance.
|
||||
/// </summary>
|
||||
type BsonSerializer([<O;D(null)>] ?typeConverter) =
|
||||
inherit FsPicklerSerializer(new BsonPickleFormatProvider(), ?typeConverter = typeConverter)
|
||||
|
||||
|
||||
/// FsPickler static methods.
|
||||
type FsPickler =
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new Json pickler instance.
|
||||
/// </summary>
|
||||
/// <param name="indent">indent out Json pickles.</param>
|
||||
/// <param name="omitHeader">omit FsPickler header in Json pickles.</param>
|
||||
/// <param name="typeConverter">specify a custom type name converter.</param>
|
||||
static member CreateJson([<O;D(null)>] ?indent, [<O;D(null)>] ?omitHeader, [<O;D(null)>] ?typeConverter) =
|
||||
new JsonSerializer(?indent = indent, ?omitHeader = omitHeader, ?typeConverter = typeConverter)
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new Bson pickler instance.
|
||||
/// </summary>
|
||||
/// <param name="typeConverter">specify a custom type name converter.</param>
|
||||
static member CreateBson([<O;D(null)>] ?typeConverter) =
|
||||
new BsonSerializer(?typeConverter = typeConverter)
|
||||
142
samples/F#/JsonWriter.fs
Normal file
142
samples/F#/JsonWriter.fs
Normal file
@@ -0,0 +1,142 @@
|
||||
namespace Nessos.FsPickler.Json
|
||||
|
||||
open System
|
||||
open System.IO
|
||||
open System.Collections.Generic
|
||||
|
||||
open Newtonsoft.Json
|
||||
|
||||
open Nessos.FsPickler
|
||||
|
||||
/// <summary>
|
||||
/// Json format serializer.
|
||||
/// </summary>
|
||||
type internal JsonPickleWriter (jsonWriter : JsonWriter, omitHeader, indented, isTopLevelSequence, separator, leaveOpen) =
|
||||
|
||||
do
|
||||
jsonWriter.Formatting <- if indented then Formatting.Indented else Formatting.None
|
||||
jsonWriter.CloseOutput <- not leaveOpen
|
||||
|
||||
let isBsonWriter = match jsonWriter with :? Bson.BsonWriter -> true | _ -> false
|
||||
|
||||
let mutable depth = 0
|
||||
let mutable isTopLevelSequenceHead = false
|
||||
let mutable currentValueIsNull = false
|
||||
|
||||
let arrayStack = new Stack<int> ()
|
||||
do arrayStack.Push Int32.MinValue
|
||||
|
||||
// do not write tag if omitting header or array element
|
||||
let omitTag () = (omitHeader && depth = 0) || arrayStack.Peek() = depth - 1
|
||||
|
||||
interface IPickleFormatWriter with
|
||||
|
||||
member __.BeginWriteRoot (tag : string) =
|
||||
if omitHeader then () else
|
||||
|
||||
jsonWriter.WriteStartObject()
|
||||
writePrimitive jsonWriter false "FsPickler" jsonFormatVersion
|
||||
writePrimitive jsonWriter false "type" tag
|
||||
|
||||
member __.EndWriteRoot () =
|
||||
if not omitHeader then jsonWriter.WriteEnd()
|
||||
|
||||
member __.BeginWriteObject (tag : string) (flags : ObjectFlags) =
|
||||
|
||||
if not <| omitTag () then
|
||||
jsonWriter.WritePropertyName tag
|
||||
|
||||
if flags.HasFlag ObjectFlags.IsNull then
|
||||
currentValueIsNull <- true
|
||||
jsonWriter.WriteNull()
|
||||
|
||||
elif flags.HasFlag ObjectFlags.IsSequenceHeader then
|
||||
if isTopLevelSequence && depth = 0 then
|
||||
isTopLevelSequenceHead <- true
|
||||
else
|
||||
jsonWriter.WriteStartArray()
|
||||
|
||||
arrayStack.Push depth
|
||||
depth <- depth + 1
|
||||
else
|
||||
jsonWriter.WriteStartObject()
|
||||
depth <- depth + 1
|
||||
|
||||
if flags = ObjectFlags.None then ()
|
||||
else
|
||||
let flagCsv = mkFlagCsv flags
|
||||
writePrimitive jsonWriter false "_flags" flagCsv
|
||||
|
||||
member __.EndWriteObject () =
|
||||
if currentValueIsNull then
|
||||
currentValueIsNull <- false
|
||||
else
|
||||
depth <- depth - 1
|
||||
if arrayStack.Peek () = depth then
|
||||
if isTopLevelSequence && depth = 0 then ()
|
||||
else
|
||||
jsonWriter.WriteEndArray()
|
||||
|
||||
arrayStack.Pop () |> ignore
|
||||
else
|
||||
jsonWriter.WriteEndObject()
|
||||
|
||||
member __.SerializeUnionCaseNames = true
|
||||
|
||||
member __.PreferLengthPrefixInSequences = false
|
||||
member __.WriteNextSequenceElement hasNext =
|
||||
if isTopLevelSequence && depth = 1 then
|
||||
if isTopLevelSequenceHead then
|
||||
isTopLevelSequenceHead <- false
|
||||
else
|
||||
jsonWriter.WriteWhitespace separator
|
||||
|
||||
member __.WriteCachedObjectId id = writePrimitive jsonWriter false "id" id
|
||||
|
||||
member __.WriteBoolean (tag : string) value = writePrimitive jsonWriter (omitTag ()) tag value
|
||||
member __.WriteByte (tag : string) value = writePrimitive jsonWriter (omitTag ()) tag value
|
||||
member __.WriteSByte (tag : string) value = writePrimitive jsonWriter (omitTag ()) tag value
|
||||
|
||||
member __.WriteInt16 (tag : string) value = writePrimitive jsonWriter (omitTag ()) tag value
|
||||
member __.WriteInt32 (tag : string) value = writePrimitive jsonWriter (omitTag ()) tag value
|
||||
member __.WriteInt64 (tag : string) value = writePrimitive jsonWriter (omitTag ()) tag value
|
||||
|
||||
member __.WriteUInt16 (tag : string) value = writePrimitive jsonWriter (omitTag ()) tag value
|
||||
member __.WriteUInt32 (tag : string) value = writePrimitive jsonWriter (omitTag ()) tag value
|
||||
member __.WriteUInt64 (tag : string) value = writePrimitive jsonWriter (omitTag ()) tag value
|
||||
|
||||
member __.WriteSingle (tag : string) value = writePrimitive jsonWriter (omitTag ()) tag value
|
||||
member __.WriteDouble (tag : string) value = writePrimitive jsonWriter (omitTag ()) tag value
|
||||
member __.WriteDecimal (tag : string) value = writePrimitive jsonWriter (omitTag ()) tag (string value)
|
||||
|
||||
member __.WriteChar (tag : string) value = writePrimitive jsonWriter (omitTag ()) tag value
|
||||
member __.WriteString (tag : string) value = writePrimitive jsonWriter (omitTag ()) tag value
|
||||
member __.WriteBigInteger (tag : string) value = writePrimitive jsonWriter (omitTag ()) tag (string value)
|
||||
|
||||
member __.WriteGuid (tag : string) value = writePrimitive jsonWriter (omitTag ()) tag value
|
||||
member __.WriteTimeSpan (tag : string) value = writePrimitive jsonWriter (omitTag ()) tag (string value)
|
||||
|
||||
// BSON spec mandates the use of Unix time;
|
||||
// this has millisecond precision which results in loss of accuracy w.r.t. ticks
|
||||
// since the goal of FsPickler is to offer faithful representations of .NET objects
|
||||
// we choose to override the spec and serialize ticks outright.
|
||||
// see also https://json.codeplex.com/discussions/212067
|
||||
member __.WriteDate (tag : string) value =
|
||||
if isBsonWriter then
|
||||
writePrimitive jsonWriter (omitTag ()) tag value.Ticks
|
||||
else
|
||||
writePrimitive jsonWriter (omitTag ()) tag value
|
||||
|
||||
member __.WriteBytes (tag : string) (value : byte []) =
|
||||
if not <| omitTag () then
|
||||
jsonWriter.WritePropertyName tag
|
||||
|
||||
if obj.ReferenceEquals(value, null) then
|
||||
jsonWriter.WriteNull()
|
||||
else
|
||||
jsonWriter.WriteValue value
|
||||
|
||||
member __.IsPrimitiveArraySerializationSupported = false
|
||||
member __.WritePrimitiveArray _ _ = raise <| NotSupportedException()
|
||||
|
||||
member __.Dispose () = jsonWriter.Flush()
|
||||
68
samples/F#/PerformanceTesters.fs
Normal file
68
samples/F#/PerformanceTesters.fs
Normal file
@@ -0,0 +1,68 @@
|
||||
namespace Nessos.FsPickler.Tests
|
||||
|
||||
open PerfUtil
|
||||
open PerfUtil.NUnit
|
||||
|
||||
open NUnit.Framework
|
||||
|
||||
open Nessos.FsPickler
|
||||
open Nessos.FsPickler.Json
|
||||
|
||||
[<AbstractClass>]
|
||||
type PerfTester () =
|
||||
inherit NUnitPerf<Serializer> ()
|
||||
|
||||
let tests = PerfTest.OfModuleMarker<PerformanceTests.Marker> ()
|
||||
|
||||
override __.PerfTests = tests
|
||||
|
||||
|
||||
type ``Serializer Comparison`` () =
|
||||
inherit PerfTester()
|
||||
|
||||
let fsp = FsPickler.initBinary()
|
||||
let bfs = new BinaryFormatterSerializer() :> Serializer
|
||||
let ndc = new NetDataContractSerializer() :> Serializer
|
||||
let jdn = new JsonDotNetSerializer() :> Serializer
|
||||
let bdn = new JsonDotNetBsonSerializer () :> Serializer
|
||||
let pbn = new ProtoBufSerializer() :> Serializer
|
||||
let ssj = new ServiceStackJsonSerializer() :> Serializer
|
||||
let sst = new ServiceStackTypeSerializer() :> Serializer
|
||||
|
||||
let comparer = new WeightedComparer(spaceFactor = 0.2, leastAcceptableImprovementFactor = 1.)
|
||||
let tester = new ImplementationComparer<_>(fsp, [bfs;ndc;jdn;bdn;pbn;ssj;sst], throwOnError = true, warmup = true, comparer = comparer)
|
||||
|
||||
override __.PerfTester = tester :> _
|
||||
|
||||
|
||||
type ``FsPickler Formats Comparison`` () =
|
||||
inherit PerfTester ()
|
||||
|
||||
let binary = FsPickler.initBinary()
|
||||
let json = FsPickler.initJson()
|
||||
let bson = FsPickler.initBson()
|
||||
let xml = FsPickler.initXml()
|
||||
|
||||
let tester = new ImplementationComparer<_>(binary, [json ; bson; xml], warmup = true, throwOnError = false)
|
||||
|
||||
override __.PerfTester = tester :> _
|
||||
|
||||
|
||||
type ``Past FsPickler Versions Comparison`` () =
|
||||
inherit PerfTester ()
|
||||
|
||||
let persistResults = true
|
||||
let persistenceFile = "fspPerf.xml"
|
||||
|
||||
let fsp = FsPickler.initBinary()
|
||||
let version = typeof<FsPickler>.Assembly.GetName().Version
|
||||
let comparer = new WeightedComparer(spaceFactor = 0.2, leastAcceptableImprovementFactor = 0.8)
|
||||
let tester =
|
||||
new PastImplementationComparer<Serializer>(
|
||||
fsp, version, historyFile = persistenceFile, throwOnError = true, warmup = true, comparer = comparer)
|
||||
|
||||
override __.PerfTester = tester :> _
|
||||
|
||||
[<TestFixtureTearDown>]
|
||||
member __.Persist() =
|
||||
if persistResults then tester.PersistCurrentResults ()
|
||||
207
samples/F#/PerformanceTests.fs
Normal file
207
samples/F#/PerformanceTests.fs
Normal file
@@ -0,0 +1,207 @@
|
||||
namespace Nessos.FsPickler.Tests
|
||||
|
||||
open System
|
||||
open System.Collections.Generic
|
||||
|
||||
open PerfUtil
|
||||
|
||||
open Nessos.FsPickler
|
||||
open Nessos.FsPickler.Tests.Serializer
|
||||
open Nessos.FsPickler.Tests.TestTypes
|
||||
|
||||
module PerformanceTests =
|
||||
|
||||
type Marker = class end
|
||||
|
||||
let guid = Guid.NewGuid()
|
||||
|
||||
[<PerfTest(1000)>]
|
||||
let ``Value: Guid`` s = roundtrip guid s
|
||||
|
||||
let date = DateTime.Now
|
||||
|
||||
[<PerfTest(1000)>]
|
||||
let ``Value: DateTime`` s = roundtrip date s
|
||||
|
||||
[<PerfTest(10000)>]
|
||||
let ``Value: String`` s = roundtrip stringValue s
|
||||
|
||||
|
||||
let boxed = box ([| 1 .. 1000 |], "lorem ipsum")
|
||||
|
||||
[<PerfTest(1000)>]
|
||||
let ``Boxed Object`` s = roundtrip boxed s
|
||||
|
||||
let fsClass = new Class(42, stringValue)
|
||||
|
||||
[<PerfTest(10000)>]
|
||||
let ``Class: Simple F# Class`` s = roundtrip fsClass s
|
||||
|
||||
let serializableClass = new SerializableClass<_>(42, stringValue, [|1..1000|])
|
||||
|
||||
[<PerfTest(10000)>]
|
||||
let ``Class: ISerializable`` s = roundtrip serializableClass s
|
||||
|
||||
let boxedClass = box(Some 42)
|
||||
|
||||
[<PerfTest(10000)>]
|
||||
let ``Subtype Resolution`` s = roundtrip boxedClass s
|
||||
|
||||
let floatArray = Array.init 100000 (fun i -> float i)
|
||||
|
||||
[<PerfTest(10)>]
|
||||
let ``Array: Float`` s = roundtrip floatArray s
|
||||
|
||||
let intArray = Array.init 100000 id
|
||||
|
||||
[<PerfTest(10)>]
|
||||
let ``Array: Int`` s = roundtrip intArray s
|
||||
|
||||
let stringArray = Array.init 10000 (fun i -> stringValue + string i)
|
||||
|
||||
[<PerfTest(100)>]
|
||||
let ``Array: String`` s = roundtrip stringArray s
|
||||
|
||||
let kvarr = [|1..10000|] |> Array.map (fun i -> i, string i)
|
||||
|
||||
[<PerfTest(100)>]
|
||||
let ``Array: Key-Value Pairs`` s = roundtrip kvarr s
|
||||
|
||||
let duArray = [| for i in 1 .. 10000 -> (Something ("asdasdasdas", i)) |]
|
||||
|
||||
[<PerfTest(100)>]
|
||||
let ``Array: Discriminated Unions`` s = roundtrip duArray s
|
||||
|
||||
let objArray =
|
||||
[|
|
||||
box 2; box 3; box "hello" ; box <| Some 3; box(2,3) ;
|
||||
box <| new Class(2, stringValue) ; box <| new SerializableClass<int option>(2, stringValue, Some 12);
|
||||
box stringValue
|
||||
|]
|
||||
|
||||
[<PerfTest(1000)>]
|
||||
let ``Array: Objects`` s = roundtrip objArray s
|
||||
|
||||
|
||||
let array3D = Array3D.init 100 100 100 (fun i j k -> float (i * j + k))
|
||||
|
||||
[<PerfTest(10)>]
|
||||
let ``Array: Rank-3 Float`` s = roundtrip array3D s
|
||||
|
||||
let bclDict = dict [ for i in 1 .. 1000 -> (string i, i)]
|
||||
|
||||
[<PerfTest(100)>]
|
||||
let ``.NET Dictionary`` s = roundtrip bclDict s
|
||||
|
||||
let bclStack = new Stack<string>([for i in 1 .. 1000 -> string i])
|
||||
|
||||
[<PerfTest(100)>]
|
||||
let ``.NET Stack`` s = roundtrip bclStack s
|
||||
|
||||
let bclList = new List<string * int>([for i in 1 .. 1000 -> string i, i])
|
||||
|
||||
[<PerfTest(100)>]
|
||||
let ``.NET List`` s = roundtrip bclList s
|
||||
|
||||
let bclSet = new SortedSet<_>([for i in 1 .. 1000 -> string i])
|
||||
|
||||
[<PerfTest(100)>]
|
||||
let ``.NET Set`` s = roundtrip bclSet s
|
||||
|
||||
let smallTuple = (1, DateTime.Now,"hello")
|
||||
|
||||
[<PerfTest(10000)>]
|
||||
let ``FSharp: Tuple Small`` s = roundtrip smallTuple s
|
||||
|
||||
let largeTuple = (stringValue, 1, 2, 3, true, "", Some(3.14, [2]), 3, 2, 1, stringValue)
|
||||
|
||||
[<PerfTest(10000)>]
|
||||
let ``FSharp: Tuple Large`` s =
|
||||
roundtrip largeTuple s
|
||||
|
||||
let intList = [1..1000]
|
||||
|
||||
[<PerfTest(1000)>]
|
||||
let ``FSharp: List Int`` s = roundtrip intList s
|
||||
|
||||
let stringList = [ for i in 1 .. 1000 -> stringValue + string i ]
|
||||
|
||||
[<PerfTest(1000)>]
|
||||
let ``FSharp: List String`` s = roundtrip stringList s
|
||||
|
||||
let pairList = [ for i in 1 .. 1000 -> (string i, i) ]
|
||||
|
||||
[<PerfTest(1000)>]
|
||||
let ``FSharp: List Key-Value`` s = roundtrip pairList s
|
||||
|
||||
let nestedLst = let n = [1..1000] in [for _ in 1 .. 100 -> n]
|
||||
|
||||
[<PerfTest(1000)>]
|
||||
let ``FSharp: List Nested`` s = roundtrip nestedLst s
|
||||
|
||||
let union = SomethingElse(stringValue, 42, box (Some 42))
|
||||
|
||||
[<PerfTest(10000)>]
|
||||
let ``FSharp: Union`` s = roundtrip union s
|
||||
|
||||
let record = { Int = 42 ; String = stringValue ; Tuple = (13, "") }
|
||||
|
||||
[<PerfTest(10000)>]
|
||||
let ``FSharp: Record`` s = roundtrip record s
|
||||
|
||||
let peano = int2Peano 100
|
||||
|
||||
[<PerfTest(100)>]
|
||||
let ``FSharp: Peano Rectype`` s = roundtrip peano s
|
||||
|
||||
let closure = (@) [ Some([1..100], Set.ofList [1..100]) ]
|
||||
|
||||
[<PerfTest(1000)>]
|
||||
let ``FSharp: Curried Function`` s = roundtrip closure s
|
||||
|
||||
let binTree = mkTree 10
|
||||
|
||||
[<PerfTest(100)>]
|
||||
let ``FSharp: Binary Tree`` s = roundtrip binTree s
|
||||
|
||||
let intSet = [1..1000] |> List.map string |> set
|
||||
|
||||
[<PerfTest(1000)>]
|
||||
let ``FSharp: Set`` s = roundtrip intSet s
|
||||
|
||||
let fsMap = [1..1000] |> Seq.map (fun i -> (string i,i)) |> Map.ofSeq
|
||||
|
||||
[<PerfTest(1000)>]
|
||||
let ``FSharp: Map`` s = roundtrip fsMap s
|
||||
|
||||
let testType = typeof<int * string option * Map<int * string [], string ref option>>
|
||||
|
||||
[<PerfTest(1000)>]
|
||||
let ``Reflection: Type`` s = roundtrip testType s
|
||||
|
||||
let quotationSmall = <@ fun x -> pown 2 x @>
|
||||
|
||||
let quotationLarge =
|
||||
<@
|
||||
async {
|
||||
let rec fibAsync n =
|
||||
async {
|
||||
match n with
|
||||
| _ when n < 0 -> return invalidArg "negative" "n"
|
||||
| _ when n < 2 -> return n
|
||||
| n ->
|
||||
let! fn = fibAsync (n-1)
|
||||
let! fnn = fibAsync (n-2)
|
||||
return fn + fnn
|
||||
}
|
||||
|
||||
let! values = [1..100] |> Seq.map fibAsync |> Async.Parallel
|
||||
return Seq.sum values
|
||||
}
|
||||
@>
|
||||
|
||||
[<PerfTest(10000)>]
|
||||
let ``FSharp: Quotation Small`` s = roundtrip quotationSmall s
|
||||
|
||||
[<PerfTest(1000)>]
|
||||
let ``FSharp: Quotation Large`` s = roundtrip quotationLarge s
|
||||
244
samples/Forth/asm.fr
Normal file
244
samples/Forth/asm.fr
Normal file
@@ -0,0 +1,244 @@
|
||||
\ Copyright 2013-2014 Lars Brinkhoff
|
||||
|
||||
\ Assembler for x86.
|
||||
|
||||
\ Adds to FORTH vocabulary: ASSEMBLER CODE ;CODE.
|
||||
\ Creates ASSEMBLER vocabulary with: END-CODE and x86 opcodes.
|
||||
|
||||
\ Conventional prefix syntax: "<source> <destination> <opcode>,".
|
||||
\ Addressing modes:
|
||||
\ - immediate: "n #"
|
||||
\ - direct: n
|
||||
\ - register: <reg>
|
||||
\ - indirect: "<reg> )"
|
||||
\ - indirect with displacement: "n <reg> )#"
|
||||
\ - indexed: not supported yet
|
||||
|
||||
require lib/common.fth
|
||||
require search.fth
|
||||
|
||||
vocabulary assembler
|
||||
also assembler definitions
|
||||
|
||||
\ Access to the target image.
|
||||
' header, defer header, is header,
|
||||
' cell defer cell is cell
|
||||
' dp defer dp is dp
|
||||
0 value delta
|
||||
|
||||
: aligned cell + 1 - cell negate nand invert ;
|
||||
: align dp @ aligned dp ! ;
|
||||
: allot dp +! ;
|
||||
: here dp @ ;
|
||||
: cells cell * ;
|
||||
: c! delta + c! ;
|
||||
: c, here c! 1 allot ;
|
||||
: h, dup c, 8 rshift c, ;
|
||||
: , dup h, 16 rshift h, ;
|
||||
|
||||
base @ hex
|
||||
|
||||
\ This constant signals that an operand is not a direct address.
|
||||
deadbeef constant -addr
|
||||
|
||||
\ Assembler state.
|
||||
variable opcode
|
||||
variable d
|
||||
variable s
|
||||
variable dir?
|
||||
variable mrrm defer ?mrrm,
|
||||
variable sib defer ?sib,
|
||||
variable disp defer ?disp,
|
||||
variable imm defer ?imm,
|
||||
defer imm,
|
||||
defer immediate-opcode
|
||||
defer reg
|
||||
defer ?opsize
|
||||
|
||||
\ Set opcode. And destination: register or memory.
|
||||
: opcode! 3@ is immediate-opcode >r opcode ! ;
|
||||
: !reg dir? @ if 2 d ! then dir? off ;
|
||||
: !mem dir? off ;
|
||||
|
||||
\ Set bits in mod/reg/rm byte.
|
||||
: -mrrm ['] nop is ?mrrm, ;
|
||||
: mod! mrrm c0 !bits ;
|
||||
: reg@ mrrm 38 @bits ;
|
||||
: reg! mrrm 38 !bits ;
|
||||
: rm@ mrrm 7 @bits ;
|
||||
: rm! rm@ 3 lshift reg! mrrm 7 !bits ;
|
||||
: reg>opcode rm@ opcode 07 !bits ;
|
||||
: opcode>reg opcode @ dup 3 rshift rm! 8 rshift opcode ! ;
|
||||
|
||||
\ Write parts of instruction to memory.
|
||||
: ds d @ s @ + ;
|
||||
: ?twobyte dup FF > if dup 8 rshift c, then ;
|
||||
: opcode, opcode @ ?twobyte ds + c, ;
|
||||
: mrrm, mrrm @ c, ;
|
||||
: sib, sib @ c, ;
|
||||
: imm8, imm @ c, ;
|
||||
: imm16, imm @ h, ;
|
||||
: imm32, imm @ , ;
|
||||
: disp8, disp @ c, ;
|
||||
: disp32, disp @ , ;
|
||||
|
||||
\ Set operand size.
|
||||
: -opsize 2drop r> drop ;
|
||||
: opsize! is imm, s ! ['] -opsize is ?opsize ;
|
||||
: !op8 0 ['] imm8, ?opsize ;
|
||||
: !op32 1 ['] imm32, ?opsize ;
|
||||
: !op16 1 ['] imm16, ?opsize 66 c, ;
|
||||
|
||||
\ Set SIB byte.
|
||||
: !sib ['] sib, is ?sib, ;
|
||||
: sib! 3 lshift + sib ! !sib ;
|
||||
|
||||
\ Set displacement.
|
||||
: byte? -80 80 within ;
|
||||
: disp! is ?disp, disp ! ;
|
||||
: !disp8 ['] disp8, disp! ;
|
||||
: !disp32 ['] disp32, disp! ;
|
||||
: !disp ( a -- u ) dup byte? if !disp8 40 else !disp32 80 then ;
|
||||
: -pc here 5 + negate ;
|
||||
: relative -pc disp +! ;
|
||||
|
||||
\ Set immediate operand.
|
||||
: imm! imm ! ['] imm, is ?imm, ;
|
||||
|
||||
\ Implements addressing modes: register, indirect, indexed, and direct.
|
||||
: reg1 rm! !reg ;
|
||||
: reg2 3 lshift reg! ;
|
||||
: !reg2 ['] reg2 is reg ;
|
||||
: ind dup mod! rm! !mem !reg2 ;
|
||||
: ind# swap !disp + ind ;
|
||||
: idx 04 ind sib! ;
|
||||
: idx# rot !disp 04 + ind sib! ;
|
||||
: addr !disp32 05 ind ;
|
||||
|
||||
\ Reset assembler state.
|
||||
: 0opsize ['] opsize! is ?opsize ;
|
||||
: 0ds d off s off ;
|
||||
: 0reg ['] reg1 is reg ;
|
||||
: 0mrrm c0 mrrm ! ['] mrrm, is ?mrrm, ;
|
||||
: 0sib ['] nop is ?sib, ;
|
||||
: 0disp ['] nop is ?disp, ;
|
||||
: 0imm imm off ['] nop is ?imm, 0 is imm, ;
|
||||
: 0asm 0imm 0disp 0reg 0ds 0mrrm 0sib 0opsize dir? on ;
|
||||
|
||||
\ Enter and exit assembler mode.
|
||||
: start-code also assembler 0asm ;
|
||||
: end-code align previous ;
|
||||
|
||||
\ Implements addressing mode: immediate.
|
||||
: imm8? imm @ byte? ;
|
||||
: ?sign-extend d off imm8? if 2 d ! ['] imm8, is ?imm, then ;
|
||||
: alu# opcode @ reg! 80 opcode ! ?sign-extend ;
|
||||
: mov# B0 s @ 3 lshift + rm@ + opcode ! 0ds -mrrm ;
|
||||
: push# imm8? if ['] imm8, 6A else ['] imm32, 68 then dup opcode ! rm! is ?imm, ;
|
||||
: test# F6 opcode ! ;
|
||||
: imm-op imm! immediate-opcode ;
|
||||
|
||||
\ Process one operand. All operands except a direct address
|
||||
\ have the stack picture ( n*x xt -addr ).
|
||||
: addr? dup -addr <> ;
|
||||
: op addr? if addr else drop execute then ;
|
||||
|
||||
\ Define instruction formats.
|
||||
: instruction, opcode! opcode, ?mrrm, ?sib, ?disp, ?imm, 0asm ;
|
||||
: mnemonic ( u a "name" -- ) create ['] nop 3, does> instruction, ;
|
||||
: format: create ] !csp does> mnemonic ;
|
||||
: immediate: ' latestxt >body ! ;
|
||||
|
||||
\ Instruction formats.
|
||||
format: 0op -mrrm ;
|
||||
format: 1reg op reg>opcode 0ds -mrrm ;
|
||||
format: 1op opcode>reg op d off ;
|
||||
format: 2op op op ;
|
||||
format: 2op-d op op d off ;
|
||||
format: 2op-ds op op 0ds ;
|
||||
format: 1addr op relative -mrrm ;
|
||||
format: 1imm8 !op8 op -mrrm ;
|
||||
|
||||
\ Instruction mnemonics.
|
||||
00 2op add, immediate: alu#
|
||||
08 2op or, immediate: alu#
|
||||
0F44 2op-ds cmove, \ Todo: other condition codes.
|
||||
0FB6 2op-ds movzx,
|
||||
0FBE 2op-ds movsx,
|
||||
10 2op adc, immediate: alu#
|
||||
18 2op sbb, immediate: alu#
|
||||
20 2op and, immediate: alu#
|
||||
26 0op es,
|
||||
28 2op sub, immediate: alu#
|
||||
2E 0op cs,
|
||||
30 2op xor, immediate: alu#
|
||||
36 0op ss,
|
||||
38 2op cmp, immediate: alu#
|
||||
3E 0op ds,
|
||||
50 1reg push, immediate: push#
|
||||
58 1reg pop,
|
||||
64 0op fs,
|
||||
65 0op gs,
|
||||
\ 70 jcc
|
||||
84 2op-d test, immediate: test#
|
||||
86 2op-d xchg,
|
||||
88 2op mov, immediate: mov#
|
||||
8D 2op-ds lea,
|
||||
\ 8F/0 pop, rm
|
||||
90 0op nop,
|
||||
C3 0op ret,
|
||||
\ C6/0 immediate mov to r/m
|
||||
\ C7/0 immediate mov to r/m
|
||||
CD 1imm8 int,
|
||||
E8 1addr call,
|
||||
E9 1addr jmp,
|
||||
\ EB jmp rel8
|
||||
F0 0op lock,
|
||||
F2 0op rep,
|
||||
F3 0op repz,
|
||||
F4 0op hlt,
|
||||
F5 0op cmc,
|
||||
F610 1op not,
|
||||
F618 1op neg,
|
||||
F8 0op clc,
|
||||
F9 0op stc,
|
||||
FA 0op cli,
|
||||
FB 0op sti,
|
||||
FC 0op cld,
|
||||
FD 0op std,
|
||||
\ FE 0 inc rm
|
||||
\ FF 1 dec rm
|
||||
\ FF 2 call rm
|
||||
\ FF 4 jmp rm
|
||||
\ FF 6 push rm
|
||||
|
||||
: sp? dup 4 = ;
|
||||
|
||||
\ Addressing mode syntax: immediate, indirect, and displaced indirect.
|
||||
: # ['] imm-op -addr ;
|
||||
: ) 2drop sp? if 4 ['] idx else ['] ind then -addr 0reg 0opsize ;
|
||||
: )# 2drop sp? if 4 ['] idx# else ['] ind# then -addr 0reg 0opsize ;
|
||||
|
||||
\ Define registers.
|
||||
: reg8 create , does> @ ['] reg -addr !op8 ;
|
||||
: reg16 create , does> @ ['] reg -addr !op16 ;
|
||||
: reg32 create , does> @ ['] reg -addr !op32 ;
|
||||
: reg: dup reg8 dup reg16 dup reg32 1+ ;
|
||||
|
||||
\ Register names.
|
||||
0
|
||||
reg: al ax eax reg: cl cx ecx reg: dl dx edx reg: bl bx ebx
|
||||
reg: ah sp esp reg: ch bp ebp reg: dh si esi reg: bh di edi
|
||||
drop
|
||||
|
||||
\ Runtime for ;CODE. CODE! is defined elsewhere.
|
||||
: (;code) r> code! ;
|
||||
|
||||
base ! only forth definitions also assembler
|
||||
|
||||
\ Standard assembler entry points.
|
||||
: code parse-name header, ?code, start-code ;
|
||||
: ;code postpone (;code) reveal postpone [ ?csp start-code ; immediate
|
||||
|
||||
0asm
|
||||
previous
|
||||
133
samples/Forth/tools.4TH
Normal file
133
samples/Forth/tools.4TH
Normal file
@@ -0,0 +1,133 @@
|
||||
\ -*- forth -*- Copyright 2004, 2013 Lars Brinkhoff
|
||||
|
||||
( Tools words. )
|
||||
|
||||
: .s ( -- )
|
||||
[char] < emit depth (.) ." > "
|
||||
'SP @ >r r@ depth 1- cells +
|
||||
begin
|
||||
dup r@ <>
|
||||
while
|
||||
dup @ .
|
||||
/cell -
|
||||
repeat r> 2drop ;
|
||||
|
||||
: ? @ . ;
|
||||
|
||||
: c? c@ . ;
|
||||
|
||||
: dump bounds do i ? /cell +loop cr ;
|
||||
|
||||
: cdump bounds do i c? loop cr ;
|
||||
|
||||
: again postpone branch , ; immediate
|
||||
|
||||
: see-find ( caddr -- end xt )
|
||||
>r here lastxt @
|
||||
begin
|
||||
dup 0= abort" Undefined word"
|
||||
dup r@ word= if r> drop exit then
|
||||
nip dup >nextxt
|
||||
again ;
|
||||
|
||||
: cabs ( char -- |char| ) dup 127 > if 256 swap - then ;
|
||||
|
||||
: xt. ( xt -- )
|
||||
( >name ) count cabs type ;
|
||||
|
||||
: xt? ( xt -- flag )
|
||||
>r lastxt @ begin
|
||||
?dup
|
||||
while
|
||||
dup r@ = if r> 2drop -1 exit then
|
||||
>nextxt
|
||||
repeat r> drop 0 ;
|
||||
|
||||
: disassemble ( x -- )
|
||||
dup xt? if
|
||||
( >name ) count
|
||||
dup 127 > if ." postpone " then
|
||||
cabs type
|
||||
else
|
||||
.
|
||||
then ;
|
||||
|
||||
: .addr dup . ;
|
||||
|
||||
: see-line ( addr -- )
|
||||
cr ." ( " .addr ." ) " @ disassemble ;
|
||||
|
||||
: see-word ( end xt -- )
|
||||
>r ." : " r@ xt.
|
||||
r@ >body do i see-line /cell +loop
|
||||
." ;" r> c@ 127 > if ." immediate" then ;
|
||||
|
||||
: see bl word see-find see-word cr ;
|
||||
|
||||
: #body bl word see-find >body - ;
|
||||
|
||||
: type-word ( end xt -- flag )
|
||||
xt. space drop 0 ;
|
||||
|
||||
: traverse-dictionary ( in.. xt -- out.. )
|
||||
\ xt execution: ( in.. end xt2 -- in.. 0 | in.. end xt2 -- out.. true )
|
||||
>r here lastxt @ begin
|
||||
?dup
|
||||
while
|
||||
r> 2dup >r >r execute
|
||||
if r> r> 2drop exit then
|
||||
r> dup >nextxt
|
||||
repeat r> 2drop ;
|
||||
|
||||
: words ( -- )
|
||||
['] type-word traverse-dictionary cr ;
|
||||
|
||||
\ ----------------------------------------------------------------------
|
||||
|
||||
( Tools extension words. )
|
||||
|
||||
\ ;code
|
||||
|
||||
\ assembler
|
||||
|
||||
\ in kernel: bye
|
||||
|
||||
\ code
|
||||
|
||||
\ cs-pick
|
||||
|
||||
\ cs-roll
|
||||
|
||||
\ editor
|
||||
|
||||
: forget ' dup >nextxt lastxt ! 'here ! reveal ;
|
||||
|
||||
\ Kernel: state
|
||||
|
||||
\ [else]
|
||||
|
||||
\ [if]
|
||||
|
||||
\ [then]
|
||||
|
||||
\ ----------------------------------------------------------------------
|
||||
|
||||
( Forth2012 tools extension words. )
|
||||
|
||||
\ TODO: n>r
|
||||
|
||||
\ TODO: nr>
|
||||
|
||||
\ TODO: synonym
|
||||
|
||||
: [undefined] bl-word find nip 0= ; immediate
|
||||
|
||||
: [defined] postpone [undefined] invert ; immediate
|
||||
|
||||
\ ----------------------------------------------------------------------
|
||||
|
||||
: @+ ( addr -- addr+/cell x ) dup cell+ swap @ ;
|
||||
|
||||
: !+ ( x addr -- addr+/cell ) tuck ! cell+ ;
|
||||
|
||||
: -rot swap >r swap r> ;
|
||||
161
samples/GAP/bugfix.tst
Normal file
161
samples/GAP/bugfix.tst
Normal file
@@ -0,0 +1,161 @@
|
||||
gap> START_TEST("Test for various former bugs");
|
||||
|
||||
|
||||
gap> # The following used to trigger an error starting with:
|
||||
gap> # "SolutionMat: matrix and vector incompatible called from"
|
||||
gap> K:=AbelianPcpGroup([3,3,3]);;
|
||||
gap> A:=Subgroup(K,[K.1]);;
|
||||
gap> cr:=CRRecordBySubgroup(K,A);;
|
||||
gap> ExtensionsCR(cr);;
|
||||
|
||||
|
||||
# Comparing homomorphisms used to be broken
|
||||
gap> K:=AbelianPcpGroup(1,[3]);;
|
||||
gap> hom1:=GroupHomomorphismByImages(K,K,[K.1],[K.1]);;
|
||||
gap> hom2:=GroupHomomorphismByImages(K,K,[K.1^2],[K.1^2]);;
|
||||
gap> hom1=hom2;
|
||||
true
|
||||
gap> hom1=IdentityMapping(K);
|
||||
true
|
||||
gap> hom2=IdentityMapping(K);
|
||||
true
|
||||
|
||||
|
||||
gap> # The following incorrectly triggered an error at some point
|
||||
gap> IsTorsionFree(ExamplesOfSomePcpGroups(5));
|
||||
true
|
||||
|
||||
|
||||
gap> # Verify IsGeneratorsOfMagmaWithInverses warnings are silenced
|
||||
gap> IsGeneratorsOfMagmaWithInverses(GeneratorsOfGroup(ExamplesOfSomePcpGroups(5)));
|
||||
true
|
||||
|
||||
|
||||
gap> # Check for a bug reported 2012-01-19 by Robert Morse
|
||||
gap> g := PcGroupToPcpGroup(SmallGroup(48,1));
|
||||
Pcp-group with orders [ 2, 2, 2, 2, 3 ]
|
||||
gap> # The next two commands used to trigger errors
|
||||
gap> NonAbelianTensorSquare(Centre(g));
|
||||
Pcp-group with orders [ 8 ]
|
||||
gap> NonAbelianExteriorSquare(Centre(g));
|
||||
Pcp-group with orders [ ]
|
||||
|
||||
|
||||
gap> # Check for a bug reported 2012-01-19 by Robert Morse
|
||||
gap> F := FreeGroup("x","y");
|
||||
<free group on the generators [ x, y ]>
|
||||
gap> x := F.1;; y := F.2;;
|
||||
gap> G := F/[x^2/y^24, y^24, y^x/y^23];
|
||||
<fp group on the generators [ x, y ]>
|
||||
gap> iso := IsomorphismPcGroup(G);
|
||||
[ x, y ] -> [ f1, f2*f5 ]
|
||||
gap> iso1 := IsomorphismPcpGroup(Image(iso));
|
||||
[ f1, f2, f3, f4, f5 ] -> [ g1, g2, g3, g4, g5 ]
|
||||
gap> G := Image(iso*iso1);
|
||||
Pcp-group with orders [ 2, 2, 2, 2, 3 ]
|
||||
gap> # The next command used to trigger an error
|
||||
gap> NonAbelianTensorSquare(Image(iso*iso1));
|
||||
Pcp-group with orders [ 2, 2, 3, 2, 2, 2, 2 ]
|
||||
|
||||
|
||||
gap> # The problem with the previous example is/was that Igs(G)
|
||||
gap> # is set to a non-standard value:
|
||||
gap> Igs(G);
|
||||
[ g1, g2*g5, g3*g4*g5^2, g4*g5, g5 ]
|
||||
gap> # Unfortunately, it seems that a lot of code that
|
||||
gap> # really should be using Ngs or Cgs is using Igs incorrectly.
|
||||
gap> # For example, direct products could return *invalid* embeddings:
|
||||
gap> D := DirectProduct(G, G);
|
||||
Pcp-group with orders [ 2, 2, 2, 2, 3, 2, 2, 2, 2, 3 ]
|
||||
gap> hom:=Embedding(D,1);;
|
||||
gap> mapi:=MappingGeneratorsImages(hom);;
|
||||
gap> GroupHomomorphismByImages(Source(hom),Range(hom),mapi[1],mapi[2]) <> fail;
|
||||
true
|
||||
gap> hom:=Projection(D,1);;
|
||||
gap> mapi:=MappingGeneratorsImages(hom);;
|
||||
gap> GroupHomomorphismByImages(Source(hom),Range(hom),mapi[1],mapi[2]) <> fail;
|
||||
true
|
||||
|
||||
|
||||
gap> # Check for bug computing Schur extension of infinite cyclic groups,
|
||||
gap> # found by Max Horn 2012-05-25
|
||||
gap> G:=AbelianPcpGroup(1,[0]);
|
||||
Pcp-group with orders [ 0 ]
|
||||
gap> # The next command used to trigger an error
|
||||
gap> SchurExtension(G);
|
||||
Pcp-group with orders [ 0 ]
|
||||
|
||||
|
||||
gap> # Check for bug computing Schur extensions of subgroups, found by MH 2012-05-25.
|
||||
gap> G:=HeisenbergPcpGroup(2);
|
||||
Pcp-group with orders [ 0, 0, 0, 0, 0 ]
|
||||
gap> H:=Subgroup(G,[G.2^3*G.3^2, G.1^9]);
|
||||
Pcp-group with orders [ 0, 0, 0 ]
|
||||
gap> # The next command used to trigger an error
|
||||
gap> SchurExtension(H);
|
||||
Pcp-group with orders [ 0, 0, 0, 0, 0, 0 ]
|
||||
|
||||
|
||||
gap> # Check for bug computing Schur extensions of subgroups, found by MH 2012-05-25.
|
||||
gap> G:=HeisenbergPcpGroup(2);
|
||||
Pcp-group with orders [ 0, 0, 0, 0, 0 ]
|
||||
gap> H:=Subgroup(G,[G.1, G.2]);
|
||||
Pcp-group with orders [ 0, 0 ]
|
||||
gap> # The next command used to trigger an error
|
||||
gap> SchurExtension(H);
|
||||
Pcp-group with orders [ 0, 0, 0 ]
|
||||
|
||||
|
||||
gap> # Check for bug computing normalizer of two subgroups, found by MH 2012-05-30.
|
||||
gap> # The problem was caused by incorrect resp. overly restrictive use of Parent().
|
||||
gap> G:=HeisenbergPcpGroup(2);
|
||||
Pcp-group with orders [ 0, 0, 0, 0, 0 ]
|
||||
gap> A:=Subgroup(Subgroup(G,[G.2,G.3,G.4,G.5]), [G.3]);
|
||||
Pcp-group with orders [ 0 ]
|
||||
gap> B:=Subgroup(Subgroup(G,[G.1,G.4,G.5]), [G.4]);
|
||||
Pcp-group with orders [ 0 ]
|
||||
gap> Normalizer(A,B);
|
||||
Pcp-group with orders [ 0 ]
|
||||
gap> # The following used to trigger the error "arguments must have a common parent group"
|
||||
gap> Normalizer(B,A);
|
||||
Pcp-group with orders [ 0 ]
|
||||
|
||||
|
||||
gap> # In polycyclic 2.9 and 2.10, the code for 2-cohomology computations was broken.
|
||||
gap> G := UnitriangularPcpGroup(3,0);
|
||||
Pcp-group with orders [ 0, 0, 0 ]
|
||||
gap> mats := G!.mats;
|
||||
[ [ [ 1, 1, 0 ], [ 0, 1, 0 ], [ 0, 0, 1 ] ],
|
||||
[ [ 1, 0, 0 ], [ 0, 1, 1 ], [ 0, 0, 1 ] ],
|
||||
[ [ 1, 0, 1 ], [ 0, 1, 0 ], [ 0, 0, 1 ] ] ]
|
||||
gap> C := CRRecordByMats(G,mats);;
|
||||
gap> cc := TwoCohomologyCR(C);;
|
||||
gap> cc.factor.rels;
|
||||
[ 2, 0, 0 ]
|
||||
gap> c := cc.factor.prei[2];
|
||||
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, -1, 1 ]
|
||||
gap> cc.gcb;
|
||||
[ [ 0, 0, 1, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, -1, 0, 0, 0, 0, 0, 1, 0, 0, -1, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, -1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, -1 ],
|
||||
[ -1, 0, 1, 1, 0, 0, 0, -1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, -1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 1 ] ]
|
||||
gap> cc.gcc;
|
||||
[ [ 1, 0, 0, 0, 0, -2, -1, 0, 1, 1, -1, -1, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 1, 0, 0, -1, -1, 0, 0, 1, 0, 0, -1, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 1, 0, 0, -2, 0, 0, 1, 0, 0, -1, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 0, 1, 0, 0, -1, -1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 0, 0, 0, 1, 0, 0, -1, 0, 0, 1, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, -1, 1 ],
|
||||
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, -1 ] ]
|
||||
|
||||
|
||||
gap> # LowerCentralSeriesOfGroup for non-nilpotent pcp-groups used to trigger
|
||||
gap> # an infinite recursion
|
||||
gap> G := PcGroupToPcpGroup(SmallGroup(6,1));
|
||||
Pcp-group with orders [ 2, 3 ]
|
||||
gap> LowerCentralSeriesOfGroup(G);
|
||||
[ Pcp-group with orders [ 2, 3 ], Pcp-group with orders [ 3 ] ]
|
||||
|
||||
|
||||
gap> STOP_TEST( "bugfix.tst", 10000000);
|
||||
21
samples/GAP/factor.tst
Normal file
21
samples/GAP/factor.tst
Normal file
@@ -0,0 +1,21 @@
|
||||
gap> START_TEST("Test of factor groups and natural homomorphisms");
|
||||
|
||||
gap> G:=HeisenbergPcpGroup(2);
|
||||
Pcp-group with orders [ 0, 0, 0, 0, 0 ]
|
||||
|
||||
gap> H:=Subgroup(G,[G.2,G.3,G.4,G.5]);
|
||||
gap> K:=G/H;
|
||||
gap> NaturalHomomorphism(K);
|
||||
|
||||
gap> A:=Subgroup(H, [G.3]);
|
||||
Pcp-group with orders [ 0 ]
|
||||
gap> B:=Subgroup(Subgroup(G,[G.1,G.4,G.5]), [G.4]);
|
||||
Pcp-group with orders [ 0 ]
|
||||
gap> Normalizer(A,B);
|
||||
Pcp-group with orders [ 0 ]
|
||||
gap> # The following used to trigger the error "arguments must have a common parent group"
|
||||
gap> Normalizer(B,A);
|
||||
Pcp-group with orders [ 0 ]
|
||||
|
||||
|
||||
gap> STOP_TEST( "factor.tst", 10000000);
|
||||
328
samples/HTML/index.html.hl
Normal file
328
samples/HTML/index.html.hl
Normal file
@@ -0,0 +1,328 @@
|
||||
<script type="text/hoplon">
|
||||
(page "index.html")
|
||||
|
||||
(defn mouse-loc->vec
|
||||
"Given a Google Closure normalized DOM mouse event return the
|
||||
mouse x and y position as a two element vector."
|
||||
[e]
|
||||
[(.-clientX e) (.-clientY e)])
|
||||
|
||||
;; =============================================================================
|
||||
;; Example 1
|
||||
|
||||
(defc ex1-content ["Waiting for a click ...."])
|
||||
(defc ex1-click-count 0)
|
||||
(defn ex1 []
|
||||
(when (< @ex1-click-count 1)
|
||||
(swap! ex1-click-count inc)
|
||||
(swap! ex1-content conj "Got a click!")))
|
||||
|
||||
;; =============================================================================
|
||||
;; Example 2
|
||||
|
||||
(defc ex2-content ["Waiting for a click ...."])
|
||||
(defc ex2-click-count 0)
|
||||
(defn ex2 []
|
||||
(when (= @ex2-click-count 1)
|
||||
(swap! ex2-click-count inc)
|
||||
(swap! ex2-content conj "Done"))
|
||||
(when (= @ex2-click-count 0)
|
||||
(swap! ex2-click-count inc)
|
||||
(swap! ex2-content conj "Got a Click!" "Waiting for another click ....")))
|
||||
|
||||
;; =============================================================================
|
||||
;; Example 3
|
||||
|
||||
(defc ex3-content ["Waiting for a click from Button A ....."])
|
||||
(defc ex3-click-count-a 0)
|
||||
(defc ex3-click-count-b 0)
|
||||
(defn ex3a []
|
||||
(when (= @ex3-click-count-a 0)
|
||||
(swap! ex3-click-count-a inc)
|
||||
(swap! ex3-content conj "Got a click!" "Waiting for a click from Button B ....")) )
|
||||
(defn ex3b []
|
||||
(when (and (= @ex3-click-count-a 1) (= @ex3-click-count-b 0))
|
||||
(swap! ex3-click-count-b inc)
|
||||
(swap! ex3-content conj "Done!")))
|
||||
|
||||
;; =============================================================================
|
||||
;; Example 6
|
||||
|
||||
(defc ex6-content ["Click the button to start tracking the mouse."])
|
||||
(defc ex6-button-name "GO!")
|
||||
(defn ex6-toggle []
|
||||
(let [new-name (if (= @ex6-button-name "GO!") "STOP!" "GO!")]
|
||||
(reset! ex6-button-name new-name)))
|
||||
(defn ex6 [e]
|
||||
(when (= @ex6-button-name "STOP!")
|
||||
(swap! ex6-content conj (str (mouse-loc->vec e)))))
|
||||
|
||||
;; =============================================================================
|
||||
;; Example 7
|
||||
|
||||
(defc ex7-content ["Click the button to start tracking the mouse."])
|
||||
(defc ex7-button-name "GO!")
|
||||
(defn ex7-toggle []
|
||||
(let [new-name (if (= @ex7-button-name "GO!") "STOP!" "GO!")]
|
||||
(reset! ex7-button-name new-name)))
|
||||
(defn ex7 [e]
|
||||
(when (= @ex7-button-name "STOP!")
|
||||
(let [[x y :as m] (mouse-loc->vec e)]
|
||||
(when (zero? (mod y 5))
|
||||
(swap! ex7-content conj (str m))))))
|
||||
|
||||
;; =============================================================================
|
||||
;; Example 8
|
||||
|
||||
(defc ex8-content ["Click the button ten times."])
|
||||
(defc ex8-click-count 0)
|
||||
(defn ex8 []
|
||||
(when (< @ex8-click-count 10)
|
||||
(swap! ex8-click-count inc)
|
||||
(when (= @ex8-click-count 1)
|
||||
(swap! ex8-content conj "1 Click!"))
|
||||
(when (> @ex8-click-count 1)
|
||||
(swap! ex8-content conj (str @ex8-click-count " clicks!")))
|
||||
(when (= @ex8-click-count 10)
|
||||
(swap! ex8-content conj "Done."))))
|
||||
|
||||
;; =============================================================================
|
||||
;; Example 9
|
||||
|
||||
(defc ex9-index 0)
|
||||
(defc ex9-animals [:aardvark :beetle :cat :dog :elk :ferret
|
||||
:goose :hippo :ibis :jellyfish :kangaroo])
|
||||
(defc= ex9-card (nth ex9-animals ex9-index))
|
||||
(defn ex9-prev []
|
||||
(when (> @ex9-index 0)
|
||||
(swap! ex9-index dec)))
|
||||
(defn ex9-next []
|
||||
(when (< @ex9-index (dec (count @ex9-animals)))
|
||||
(swap! ex9-index inc)))
|
||||
|
||||
;; =============================================================================
|
||||
;; Example 10
|
||||
|
||||
(defc ex10-button-name "START!")
|
||||
(defc ex10-index 0)
|
||||
(defn ex10 []
|
||||
(let [the-name @ex10-button-name]
|
||||
(when (= the-name"START!")
|
||||
(reset! ex10-button-name "STOP!"))
|
||||
(when (= the-name"STOP!")
|
||||
(reset! ex10-button-name "DONE!"))))
|
||||
(defc ex10-animals [:aardvark :beetle :cat :dog :elk :ferret
|
||||
:goose :hippo :ibis :jellyfish :kangaroo])
|
||||
(defc= ex10-max (dec (count ex10-animals)))
|
||||
(defc= ex10-card (nth ex10-animals ex10-index))
|
||||
(defn ex10-prev []
|
||||
(if (> @ex10-index 0)
|
||||
(swap! ex10-index dec)
|
||||
(reset! ex10-index @ex10-max)))
|
||||
(defn ex10-next []
|
||||
(if (< @ex10-index @ex10-max)
|
||||
(swap! ex10-index inc)
|
||||
(reset! ex10-index 0)))
|
||||
(defn ex10-nav [k]
|
||||
(when (= @ex10-button-name "STOP!")
|
||||
(when (= k :next)
|
||||
(ex10-next))
|
||||
(when (= k :prev)
|
||||
(ex10-prev))))
|
||||
|
||||
(defn ex10-keys [e]
|
||||
(when (= @ex10-button-name "STOP!")
|
||||
(if (= (.-keyCode e) 39) (ex10-nav :next))
|
||||
(if (= (.-keyCode e) 37) (ex10-nav :prev))
|
||||
)
|
||||
)
|
||||
</script>
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<link rel="stylesheet" type="text/css" href="css/main.css" />
|
||||
</head>
|
||||
<body>
|
||||
<!-- Example 1 -->
|
||||
<div id="ex1" class="example">
|
||||
<h2>Example 1</h2>
|
||||
<table>
|
||||
<tr>
|
||||
<td class="left">
|
||||
<button id="ex1-button" on-click='{{ #(ex1) }}'>Click me</button>
|
||||
</td>
|
||||
<td id="ex1-display" class="display">
|
||||
<div id="ex1-messages">
|
||||
<loop-tpl bindings='{{ [x ex1-content] }}'>
|
||||
<p><text>~{x}</text></p>
|
||||
</loop-tpl>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<!-- Example 2 -->
|
||||
<div id="ex2" class="example">
|
||||
<h2>Example 2</h2>
|
||||
<table>
|
||||
<tr>
|
||||
<td class="left">
|
||||
<button id="ex2-button" on-click='{{ #(ex2) }}'>Click me</button>
|
||||
</td>
|
||||
<td id="ex2-display" class="display">
|
||||
<div id="ex2-messages">
|
||||
<loop-tpl bindings='{{ [x ex2-content] }}'>
|
||||
<p><text>~{x}</text></p>
|
||||
</loop-tpl>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<!-- Example 3 -->
|
||||
<div id="ex3" class="example">
|
||||
<h2>Example 3</h2>
|
||||
<table>
|
||||
<tr>
|
||||
<td class="left">
|
||||
<button id="ex3-button-a" on-click='{{ #(ex3a) }}'>Button A</button>
|
||||
<button id="ex3-button-b" on-click='{{ #(ex3b) }}'>Button B</button>
|
||||
</td>
|
||||
<td id="ex3-display" class="display">
|
||||
<div id="ex3-messages">
|
||||
<loop-tpl bindings='{{ [x ex3-content] }}'>
|
||||
<p><text>~{x}</text></p>
|
||||
</loop-tpl>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<!-- Example 4 -->
|
||||
<div id="ex4" class="example">
|
||||
<h2>Example 4</h2>
|
||||
<table>
|
||||
<tr>
|
||||
<td class="left">
|
||||
<button id="ex4-button-a">Go!</button>
|
||||
</td>
|
||||
<td id="ex4-display" class="display">
|
||||
<div id="ex4-messages"></div>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<!-- Example 5 -->
|
||||
<div id="ex5" class="example">
|
||||
<h2>Example 5</h2>
|
||||
<table>
|
||||
<tr>
|
||||
<td class="left">
|
||||
<button id="ex5-button">Go!</button>
|
||||
</td>
|
||||
<td id="ex5-display" class="display">
|
||||
<div id="ex5-messages"></div>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<!-- Example 6 -->
|
||||
<div id="ex6" class="example" on-mousemove='{{ #(ex6 %) }}' >
|
||||
<h2>Example 6</h2>
|
||||
<table>
|
||||
<tr>
|
||||
<td class="left">
|
||||
<button id="ex6-button" do-text='{{ ex6-button-name }}' on-click='{{ #(ex6-toggle)}}' ></button>
|
||||
</td>
|
||||
<td id="ex6-display" class="display">
|
||||
<div class="scrolling">
|
||||
<div id="ex6-messages">
|
||||
<loop-tpl bindings='{{ [x ex6-content] }}'>
|
||||
<p><text>~{x}</text></p>
|
||||
</loop-tpl>
|
||||
</div>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<!-- Example 7 -->
|
||||
<div id="ex7" class="example" on-mousemove='{{ #(ex7 %) }}'>
|
||||
<h2>Example 7</h2>
|
||||
<table>
|
||||
<tr>
|
||||
<td class="left">
|
||||
<button id="ex7-button" do-text='{{ ex7-button-name }}' on-click='{{ #(ex7-toggle)}}'></button>
|
||||
</td>
|
||||
<td id="ex7-display" class="display">
|
||||
<div class="scrolling">
|
||||
<div id="ex7-messages">
|
||||
<loop-tpl bindings='{{ [x ex7-content] }}'>
|
||||
<p><text>~{x}</text></p>
|
||||
</loop-tpl>
|
||||
</div>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<!-- Example 8 -->
|
||||
<div id="ex8" class="example">
|
||||
<h2>Example 8</h2>
|
||||
<table>
|
||||
<tr>
|
||||
<td class="left">
|
||||
<button id="ex8-button" on-click='{{ #(ex8) }}'>Click me!</button>
|
||||
</td>
|
||||
<td id="ex8-display" class="display card">
|
||||
<div class="scrolling">
|
||||
<div id="ex8-messages">
|
||||
<loop-tpl bindings='{{ [x ex8-content] }}'>
|
||||
<p><text>~{x}</text></p>
|
||||
</loop-tpl>
|
||||
</div>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<!-- Example 9 -->
|
||||
<div id="ex9" class="example">
|
||||
<h2>Example 9</h2>
|
||||
<table>
|
||||
<tr>
|
||||
<td class="left">
|
||||
<button id="ex9-button-prev" on-click='{{ #(ex9-prev) }}' do-class='{{ (cell= {:disabled (= ex9-index 0)})}}'>Previous</button>
|
||||
<button id="ex9-button-next" on-click='{{ #(ex9-next) }}' do-class='{{ (cell= {:disabled (= ex9-index (dec (count ex9-animals)))}) }}'>Next</button>
|
||||
</td>
|
||||
<td id="ex9-card" class="display card" do-text='{{ ex9-card }}'></td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<!-- Example 10 -->
|
||||
<div id="ex10" class="example" on-keydown='{{ #(ex10-keys %) }}'>
|
||||
<h2>Example 10</h2>
|
||||
<table>
|
||||
<tr>
|
||||
<td class="left">
|
||||
<button id="ex10-button-start-stop" do-text='{{ ex10-button-name}}' on-click='{{ #(ex10) }}'></button>
|
||||
<button id="ex10-button-prev" on-click='{{ #(ex10-nav :prev) }}'
|
||||
do-class='{{ (cell= {:disabled (not= ex10-button-name "STOP!")}) }}'>Previous
|
||||
</button>
|
||||
<button id="ex10-button-next" on-click='{{ #(ex10-nav :next) }}' do-class='{{ (cell= {:disabled (not= ex10-button-name "STOP!")}) }}'>Next</button>
|
||||
</td>
|
||||
<td id="ex10-card" class="display card" do-text='{{ ex10-card }}'></td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
69
samples/Inno Setup/expat.iss
Normal file
69
samples/Inno Setup/expat.iss
Normal file
@@ -0,0 +1,69 @@
|
||||
; Basic setup script for the Inno Setup installer builder. For more
|
||||
; information on the free installer builder, see www.jrsoftware.org.
|
||||
;
|
||||
; This script was contributed by Tim Peters.
|
||||
; It was designed for Inno Setup 2.0.19 but works with later versions as well.
|
||||
|
||||
[Setup]
|
||||
AppName=Expat
|
||||
AppId=expat
|
||||
AppVersion=2.1.0
|
||||
AppVerName=Expat 2.1.0
|
||||
AppCopyright=Copyright 1998-2012 Thai Open Source Software Center, Clark Cooper, and the Expat maintainers
|
||||
AppPublisher=The Expat Developers
|
||||
AppPublisherURL=http://www.libexpat.org/
|
||||
AppSupportURL=http://www.libexpat.org/
|
||||
AppUpdatesURL=http://www.libexpat.org/
|
||||
UninstallDisplayName=Expat XML Parser 2.1.0
|
||||
VersionInfoVersion=2.1.0
|
||||
|
||||
DefaultDirName={pf}\Expat 2.1.0
|
||||
UninstallFilesDir={app}\Uninstall
|
||||
|
||||
Compression=lzma
|
||||
SolidCompression=yes
|
||||
SourceDir=..
|
||||
OutputDir=win32
|
||||
DisableStartupPrompt=yes
|
||||
AllowNoIcons=yes
|
||||
DisableProgramGroupPage=yes
|
||||
DisableReadyPage=yes
|
||||
|
||||
[Files]
|
||||
Flags: ignoreversion; Source: win32\bin\Release\xmlwf.exe; DestDir: "{app}\Bin"
|
||||
Flags: ignoreversion; Source: win32\MANIFEST.txt; DestDir: "{app}"
|
||||
Flags: ignoreversion; Source: Changes; DestDir: "{app}"; DestName: Changes.txt
|
||||
Flags: ignoreversion; Source: COPYING; DestDir: "{app}"; DestName: COPYING.txt
|
||||
Flags: ignoreversion; Source: README; DestDir: "{app}"; DestName: README.txt
|
||||
Flags: ignoreversion; Source: doc\*.html; DestDir: "{app}\Doc"
|
||||
Flags: ignoreversion; Source: doc\*.css; DestDir: "{app}\Doc"
|
||||
Flags: ignoreversion; Source: doc\*.png; DestDir: "{app}\Doc"
|
||||
Flags: ignoreversion; Source: win32\bin\Release\*.dll; DestDir: "{app}\Bin"
|
||||
Flags: ignoreversion; Source: win32\bin\Release\*.lib; DestDir: "{app}\Bin"
|
||||
Flags: ignoreversion; Source: expat.dsw; DestDir: "{app}\Source"
|
||||
Flags: ignoreversion; Source: win32\README.txt; DestDir: "{app}\Source"
|
||||
Flags: ignoreversion; Source: bcb5\*.bp*; DestDir: "{app}\Source\bcb5"
|
||||
Flags: ignoreversion; Source: bcb5\*.mak; DestDir: "{app}\Source\bcb5"
|
||||
Flags: ignoreversion; Source: bcb5\*.def; DestDir: "{app}\Source\bcb5"
|
||||
Flags: ignoreversion; Source: bcb5\*.txt; DestDir: "{app}\Source\bcb5"
|
||||
Flags: ignoreversion; Source: bcb5\*.bat; DestDir: "{app}\Source\bcb5"
|
||||
Flags: ignoreversion; Source: lib\*.c; DestDir: "{app}\Source\lib"
|
||||
Flags: ignoreversion; Source: lib\*.h; DestDir: "{app}\Source\lib"
|
||||
Flags: ignoreversion; Source: lib\*.def; DestDir: "{app}\Source\lib"
|
||||
Flags: ignoreversion; Source: lib\*.dsp; DestDir: "{app}\Source\lib"
|
||||
Flags: ignoreversion; Source: examples\*.c; DestDir: "{app}\Source\examples"
|
||||
Flags: ignoreversion; Source: examples\*.dsp; DestDir: "{app}\Source\examples"
|
||||
Flags: ignoreversion; Source: tests\*.c; DestDir: "{app}\Source\tests"
|
||||
Flags: ignoreversion; Source: tests\*.cpp; DestDir: "{app}\Source\tests"
|
||||
Flags: ignoreversion; Source: tests\*.h; DestDir: "{app}\Source\tests"
|
||||
Flags: ignoreversion; Source: tests\README.txt; DestDir: "{app}\Source\tests"
|
||||
Flags: ignoreversion; Source: tests\benchmark\*.c; DestDir: "{app}\Source\tests\benchmark"
|
||||
Flags: ignoreversion; Source: tests\benchmark\*.ds*; DestDir: "{app}\Source\tests\benchmark"
|
||||
Flags: ignoreversion; Source: tests\benchmark\README.txt; DestDir: "{app}\Source\tests\benchmark"
|
||||
Flags: ignoreversion; Source: xmlwf\*.c*; DestDir: "{app}\Source\xmlwf"
|
||||
Flags: ignoreversion; Source: xmlwf\*.h; DestDir: "{app}\Source\xmlwf"
|
||||
Flags: ignoreversion; Source: xmlwf\*.dsp; DestDir: "{app}\Source\xmlwf"
|
||||
|
||||
[Messages]
|
||||
WelcomeLabel1=Welcome to the Expat XML Parser Setup Wizard
|
||||
WelcomeLabel2=This will install [name/ver] on your computer.%n%nExpat is an XML parser with a C-language API, and is primarily made available to allow developers to build applications which use XML using a portable API and fast implementation.%n%nIt is strongly recommended that you close all other applications you have running before continuing. This will help prevent any conflicts during the installation process.
|
||||
73
samples/J/stwij.ijs
Normal file
73
samples/J/stwij.ijs
Normal file
@@ -0,0 +1,73 @@
|
||||
NB. From "Continuing to write in J".
|
||||
NB. See http://www.jsoftware.com/help/jforc/continuing_to_write_in_j.htm
|
||||
|
||||
empno=: 316 317 319 320
|
||||
payrate=: 60 42 44 54
|
||||
billrate=: 120 90 90 108
|
||||
clientlist=: 10011 10012 10025
|
||||
emp_client=: 10012 10025 10012 10025
|
||||
hoursworked=: 4 31 $ 8 0 3 10 9 8 8 9 4 0 8 7 10 10 12 9 0 6 8 9 9 9 0 0 10 11 9 7 10 2 0 8 0 0 9 9 8 9 10 0 0 8 8 10 7 10 0 0 7 8 9 8 9 0 4 9 8 9 8 9 0 0 5 0 0 8 9 9 9 9 0 0 8 7 0 0 9 0 2 10 10 9 11 8 0 0 8 9 10 8 9 0 0 9 0 0 9 10 8 6 6 8 0 9 8 10 6 9 7 0 6 8 8 8 9 0 5 8 9 8 8 12 0 0
|
||||
|
||||
NB. Finds the number of hours each employee worked in the given month.
|
||||
emphours=: 3 : '+/"1 hoursworked'
|
||||
|
||||
NB. Determines the wages earned by each employee in the given month.
|
||||
empearnings=: 3 : 'payrate * +/"1 hoursworked'
|
||||
|
||||
NB. Determines the profit brought in by each employee.
|
||||
empprofit=: 3 : 0
|
||||
(billrate - payrate) * +/"1 hoursworked
|
||||
)
|
||||
|
||||
NB. Returns the amount to bill a given client.
|
||||
billclient=: 3 : 0
|
||||
mask=. emp_client = y
|
||||
+/ (mask # billrate) * +/"1 mask # hoursworked
|
||||
)
|
||||
|
||||
NB. Finds for each day of the month the employee who billed the most hours.
|
||||
dailydrudge=: 3 : 0
|
||||
((|: hoursworked) i."1 0 >./ hoursworked) { empno
|
||||
)
|
||||
|
||||
NB. Returns the employees, in descending order of the profit brought in by each.
|
||||
producers=: 3 : 'empno \: empprofit 0'
|
||||
|
||||
NB. Returns the clients, in descending order of the profit generated by each.
|
||||
custbyprofit=: 3 : 0
|
||||
clientlist \: +/ (clientlist ="1 0 emp_client) * empprofit 0
|
||||
)
|
||||
|
||||
NB. Calculates withholding tax on each employee's earnings.
|
||||
renderuntocaesar=: 3 : 0
|
||||
bktmin=. 0 6000 10000 20000 NB. Four brackets, 0..6000..10000..20000.._
|
||||
bktrate=. 0.05 0.10 0.20 0.30
|
||||
bktearns=. 0 >. ((1 |.!._ bktmin) <."1 0 empearnings'') -"1 bktmin
|
||||
+/"1 bktrate *"1 bktearns
|
||||
)
|
||||
|
||||
NB. Main
|
||||
|
||||
echo 'Problem 1'
|
||||
echo emphours''
|
||||
|
||||
echo 'Problem 2'
|
||||
echo empearnings''
|
||||
|
||||
echo 'Problem 3'
|
||||
echo empprofit''
|
||||
|
||||
echo 'Problem 4'
|
||||
echo billclient 10025
|
||||
|
||||
echo 'Problem 5'
|
||||
echo dailydrudge''
|
||||
|
||||
echo 'Problem 6'
|
||||
echo producers''
|
||||
|
||||
echo 'Problem 7'
|
||||
echo custbyprofit''
|
||||
|
||||
echo 'Problem 8'
|
||||
echo 0j2 ": renderuntocaesar''
|
||||
56
samples/Jasmin/if1.j
Normal file
56
samples/Jasmin/if1.j
Normal file
@@ -0,0 +1,56 @@
|
||||
.class public if1
|
||||
.super java/lang/Object
|
||||
;
|
||||
; standard initializer (calls java.lang.Object's initializer)
|
||||
;
|
||||
.method public <init>()V
|
||||
aload_0
|
||||
invokenonvirtual java/lang/Object/<init>()V
|
||||
return
|
||||
.end method
|
||||
|
||||
.method public static main([Ljava/lang/String;)V
|
||||
|
||||
.limit locals 1
|
||||
.limit stack 5
|
||||
BeginGlobal:
|
||||
.line 2
|
||||
ldc 0x1
|
||||
ldc 0x1
|
||||
if_icmpeq If556261059
|
||||
goto IfElse556261059
|
||||
If556261059:
|
||||
.line 3
|
||||
.line 3
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x1
|
||||
invokevirtual java/io/PrintStream/print(I)V
|
||||
goto IfDone556261059
|
||||
IfElse556261059:
|
||||
.line 5
|
||||
.line 5
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x2
|
||||
invokevirtual java/io/PrintStream/print(I)V
|
||||
IfDone556261059:
|
||||
|
||||
.line 6
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x3
|
||||
invokevirtual java/io/PrintStream/print(I)V
|
||||
|
||||
.line 7
|
||||
ldc 0x1
|
||||
ldc 0x1
|
||||
if_icmpne IfNot-920218690
|
||||
.line 8
|
||||
.line 8
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x4
|
||||
invokevirtual java/io/PrintStream/print(I)V
|
||||
IfNot-920218690:
|
||||
|
||||
EndGlobal:
|
||||
return
|
||||
.end method
|
||||
|
||||
167
samples/Jasmin/if2.j
Normal file
167
samples/Jasmin/if2.j
Normal file
@@ -0,0 +1,167 @@
|
||||
.class public if2
|
||||
.super java/lang/Object
|
||||
;
|
||||
; standard initializer (calls java.lang.Object's initializer)
|
||||
;
|
||||
.method public <init>()V
|
||||
aload_0
|
||||
invokenonvirtual java/lang/Object/<init>()V
|
||||
return
|
||||
.end method
|
||||
|
||||
.method public static main([Ljava/lang/String;)V
|
||||
|
||||
.limit locals 1
|
||||
.limit stack 5
|
||||
BeginGlobal:
|
||||
.line 2
|
||||
ldc 0x1
|
||||
ldc 0x1
|
||||
if_icmpeq Cmp1893841232
|
||||
ldc 0x0
|
||||
goto CmpDone1893841232
|
||||
Cmp1893841232:
|
||||
ldc 0x1
|
||||
CmpDone1893841232:
|
||||
ldc 0x1
|
||||
if_icmpeq If-1736765035
|
||||
goto IfElse-1736765035
|
||||
If-1736765035:
|
||||
.line 2
|
||||
.line 3
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x1
|
||||
invokevirtual java/io/PrintStream/print(I)V
|
||||
goto IfDone-1736765035
|
||||
IfElse-1736765035:
|
||||
.line 4
|
||||
.line 4
|
||||
ldc 0x2
|
||||
ldc 0x1
|
||||
if_icmpeq Cmp-1460884369
|
||||
ldc 0x0
|
||||
goto CmpDone-1460884369
|
||||
Cmp-1460884369:
|
||||
ldc 0x1
|
||||
CmpDone-1460884369:
|
||||
ldc 0x1
|
||||
if_icmpeq If-247349760
|
||||
goto IfElse-247349760
|
||||
If-247349760:
|
||||
.line 4
|
||||
.line 5
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x2
|
||||
invokevirtual java/io/PrintStream/print(I)V
|
||||
goto IfDone-247349760
|
||||
IfElse-247349760:
|
||||
.line 6
|
||||
.line 7
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x3
|
||||
invokevirtual java/io/PrintStream/print(I)V
|
||||
IfDone-247349760:
|
||||
IfDone-1736765035:
|
||||
|
||||
.line 10
|
||||
ldc 0x1
|
||||
ldc 0x2
|
||||
if_icmpeq Cmp933554851
|
||||
ldc 0x0
|
||||
goto CmpDone933554851
|
||||
Cmp933554851:
|
||||
ldc 0x1
|
||||
CmpDone933554851:
|
||||
ldc 0x1
|
||||
if_icmpeq If1623625546
|
||||
goto IfElse1623625546
|
||||
If1623625546:
|
||||
.line 10
|
||||
.line 11
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x1
|
||||
invokevirtual java/io/PrintStream/print(I)V
|
||||
goto IfDone1623625546
|
||||
IfElse1623625546:
|
||||
.line 12
|
||||
.line 12
|
||||
ldc 0x2
|
||||
ldc 0x2
|
||||
if_icmpeq Cmp1572138409
|
||||
ldc 0x0
|
||||
goto CmpDone1572138409
|
||||
Cmp1572138409:
|
||||
ldc 0x1
|
||||
CmpDone1572138409:
|
||||
ldc 0x1
|
||||
if_icmpeq If126354425
|
||||
goto IfElse126354425
|
||||
If126354425:
|
||||
.line 12
|
||||
.line 13
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x2
|
||||
invokevirtual java/io/PrintStream/print(I)V
|
||||
goto IfDone126354425
|
||||
IfElse126354425:
|
||||
.line 14
|
||||
.line 15
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x3
|
||||
invokevirtual java/io/PrintStream/print(I)V
|
||||
IfDone126354425:
|
||||
IfDone1623625546:
|
||||
|
||||
.line 18
|
||||
ldc 0x1
|
||||
ldc 0x2
|
||||
if_icmpeq Cmp126493150
|
||||
ldc 0x0
|
||||
goto CmpDone126493150
|
||||
Cmp126493150:
|
||||
ldc 0x1
|
||||
CmpDone126493150:
|
||||
ldc 0x1
|
||||
if_icmpeq If1522284422
|
||||
goto IfElse1522284422
|
||||
If1522284422:
|
||||
.line 18
|
||||
.line 19
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x1
|
||||
invokevirtual java/io/PrintStream/print(I)V
|
||||
goto IfDone1522284422
|
||||
IfElse1522284422:
|
||||
.line 20
|
||||
.line 20
|
||||
ldc 0x2
|
||||
ldc 0x1
|
||||
if_icmpeq Cmp-906666545
|
||||
ldc 0x0
|
||||
goto CmpDone-906666545
|
||||
Cmp-906666545:
|
||||
ldc 0x1
|
||||
CmpDone-906666545:
|
||||
ldc 0x1
|
||||
if_icmpeq If1083939031
|
||||
goto IfElse1083939031
|
||||
If1083939031:
|
||||
.line 20
|
||||
.line 21
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x2
|
||||
invokevirtual java/io/PrintStream/print(I)V
|
||||
goto IfDone1083939031
|
||||
IfElse1083939031:
|
||||
.line 22
|
||||
.line 23
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x3
|
||||
invokevirtual java/io/PrintStream/print(I)V
|
||||
IfDone1083939031:
|
||||
IfDone1522284422:
|
||||
|
||||
EndGlobal:
|
||||
return
|
||||
.end method
|
||||
|
||||
55
samples/Jasmin/if3.j
Normal file
55
samples/Jasmin/if3.j
Normal file
@@ -0,0 +1,55 @@
|
||||
.class public if3
|
||||
.super java/lang/Object
|
||||
;
|
||||
; standard initializer (calls java.lang.Object's initializer)
|
||||
;
|
||||
.method public <init>()V
|
||||
aload_0
|
||||
invokenonvirtual java/lang/Object/<init>()V
|
||||
return
|
||||
.end method
|
||||
|
||||
.method public static main([Ljava/lang/String;)V
|
||||
|
||||
.limit locals 1
|
||||
.limit stack 5
|
||||
BeginGlobal:
|
||||
.line 2
|
||||
ldc 0x1
|
||||
ldc 0x1
|
||||
if_icmpeq If-811796083
|
||||
goto IfElse-811796083
|
||||
If-811796083:
|
||||
.line 3
|
||||
.line 3
|
||||
ldc 0x0
|
||||
ldc 0x1
|
||||
if_icmpeq If-1001319390
|
||||
goto IfElse-1001319390
|
||||
If-1001319390:
|
||||
.line 4
|
||||
.line 4
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x1
|
||||
invokevirtual java/io/PrintStream/print(I)V
|
||||
goto IfDone-1001319390
|
||||
IfElse-1001319390:
|
||||
.line 6
|
||||
.line 6
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x2
|
||||
invokevirtual java/io/PrintStream/print(I)V
|
||||
IfDone-1001319390:
|
||||
goto IfDone-811796083
|
||||
IfElse-811796083:
|
||||
.line 8
|
||||
.line 8
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x3
|
||||
invokevirtual java/io/PrintStream/print(I)V
|
||||
IfDone-811796083:
|
||||
|
||||
EndGlobal:
|
||||
return
|
||||
.end method
|
||||
|
||||
37
samples/Jasmin/if4.j
Normal file
37
samples/Jasmin/if4.j
Normal file
@@ -0,0 +1,37 @@
|
||||
.class public if4
|
||||
.super java/lang/Object
|
||||
;
|
||||
; standard initializer (calls java.lang.Object's initializer)
|
||||
;
|
||||
.method public <init>()V
|
||||
aload_0
|
||||
invokenonvirtual java/lang/Object/<init>()V
|
||||
return
|
||||
.end method
|
||||
|
||||
.method public static main([Ljava/lang/String;)V
|
||||
|
||||
.limit locals 1
|
||||
.limit stack 5
|
||||
BeginGlobal:
|
||||
.line 2
|
||||
ldc 0x1
|
||||
ldc 0x1
|
||||
if_icmpne IfNot1919266740
|
||||
.line 2
|
||||
.line 2
|
||||
ldc 0x1
|
||||
ldc 0x1
|
||||
if_icmpne IfNot613368541
|
||||
.line 2
|
||||
.line 2
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x1
|
||||
invokevirtual java/io/PrintStream/print(I)V
|
||||
IfNot613368541:
|
||||
IfNot1919266740:
|
||||
|
||||
EndGlobal:
|
||||
return
|
||||
.end method
|
||||
|
||||
54
samples/Jasmin/op1.j
Normal file
54
samples/Jasmin/op1.j
Normal file
@@ -0,0 +1,54 @@
|
||||
.class public op1
|
||||
.super java/lang/Object
|
||||
;
|
||||
; standard initializer (calls java.lang.Object's initializer)
|
||||
;
|
||||
.method public <init>()V
|
||||
aload_0
|
||||
invokenonvirtual java/lang/Object/<init>()V
|
||||
return
|
||||
.end method
|
||||
|
||||
.method public static main([Ljava/lang/String;)V
|
||||
|
||||
.limit locals 1
|
||||
.limit stack 5
|
||||
BeginGlobal:
|
||||
.line 2
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x1
|
||||
ldc 0x1
|
||||
iadd
|
||||
invokevirtual java/io/PrintStream/println(I)V
|
||||
|
||||
.line 3
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0xa
|
||||
ldc 0x5
|
||||
isub
|
||||
invokevirtual java/io/PrintStream/println(I)V
|
||||
|
||||
.line 4
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x15
|
||||
ldc 0x3
|
||||
idiv
|
||||
invokevirtual java/io/PrintStream/println(I)V
|
||||
|
||||
.line 5
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x159
|
||||
ldc 0x38
|
||||
imul
|
||||
invokevirtual java/io/PrintStream/println(I)V
|
||||
|
||||
.line 6
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x52
|
||||
ldc 0x9
|
||||
irem
|
||||
invokevirtual java/io/PrintStream/println(I)V
|
||||
|
||||
EndGlobal:
|
||||
return
|
||||
.end method
|
||||
34
samples/Jasmin/op2.j
Normal file
34
samples/Jasmin/op2.j
Normal file
@@ -0,0 +1,34 @@
|
||||
.class public op2
|
||||
.super java/lang/Object
|
||||
;
|
||||
; standard initializer (calls java.lang.Object's initializer)
|
||||
;
|
||||
.method public <init>()V
|
||||
aload_0
|
||||
invokenonvirtual java/lang/Object/<init>()V
|
||||
return
|
||||
.end method
|
||||
|
||||
.method public static main([Ljava/lang/String;)V
|
||||
|
||||
.limit locals 1
|
||||
.limit stack 5
|
||||
BeginGlobal:
|
||||
.line 2
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x1
|
||||
ldc 0x0
|
||||
iand
|
||||
invokevirtual java/io/PrintStream/println(Z)V
|
||||
|
||||
.line 3
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x1
|
||||
ldc 0x0
|
||||
ior
|
||||
invokevirtual java/io/PrintStream/println(Z)V
|
||||
|
||||
EndGlobal:
|
||||
return
|
||||
.end method
|
||||
|
||||
68
samples/Jasmin/op3.j
Normal file
68
samples/Jasmin/op3.j
Normal file
@@ -0,0 +1,68 @@
|
||||
.class public op3
|
||||
.super java/lang/Object
|
||||
;
|
||||
; standard initializer (calls java.lang.Object's initializer)
|
||||
;
|
||||
.method public <init>()V
|
||||
aload_0
|
||||
invokenonvirtual java/lang/Object/<init>()V
|
||||
return
|
||||
.end method
|
||||
|
||||
.method public static main([Ljava/lang/String;)V
|
||||
|
||||
.limit locals 1
|
||||
.limit stack 5
|
||||
BeginGlobal:
|
||||
.line 2
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x0
|
||||
ldc 0x1
|
||||
if_icmpeq Cmp-1307183590
|
||||
ldc 0x0
|
||||
goto CmpDone-1307183590
|
||||
Cmp-1307183590:
|
||||
ldc 0x1
|
||||
CmpDone-1307183590:
|
||||
invokevirtual java/io/PrintStream/println(Z)V
|
||||
|
||||
.line 3
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x0
|
||||
ldc 0x0
|
||||
if_icmpeq Cmp-1443270821
|
||||
ldc 0x0
|
||||
goto CmpDone-1443270821
|
||||
Cmp-1443270821:
|
||||
ldc 0x1
|
||||
CmpDone-1443270821:
|
||||
invokevirtual java/io/PrintStream/println(Z)V
|
||||
|
||||
.line 4
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x1
|
||||
ldc 0x1
|
||||
if_icmpeq Cmp1759327329
|
||||
ldc 0x0
|
||||
goto CmpDone1759327329
|
||||
Cmp1759327329:
|
||||
ldc 0x1
|
||||
CmpDone1759327329:
|
||||
invokevirtual java/io/PrintStream/println(Z)V
|
||||
|
||||
.line 5
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x1
|
||||
ldc 0x0
|
||||
if_icmpeq Cmp-678570146
|
||||
ldc 0x0
|
||||
goto CmpDone-678570146
|
||||
Cmp-678570146:
|
||||
ldc 0x1
|
||||
CmpDone-678570146:
|
||||
invokevirtual java/io/PrintStream/println(Z)V
|
||||
|
||||
EndGlobal:
|
||||
return
|
||||
.end method
|
||||
|
||||
68
samples/Jasmin/op4.j
Normal file
68
samples/Jasmin/op4.j
Normal file
@@ -0,0 +1,68 @@
|
||||
.class public op4
|
||||
.super java/lang/Object
|
||||
;
|
||||
; standard initializer (calls java.lang.Object's initializer)
|
||||
;
|
||||
.method public <init>()V
|
||||
aload_0
|
||||
invokenonvirtual java/lang/Object/<init>()V
|
||||
return
|
||||
.end method
|
||||
|
||||
.method public static main([Ljava/lang/String;)V
|
||||
|
||||
.limit locals 1
|
||||
.limit stack 5
|
||||
BeginGlobal:
|
||||
.line 2
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x0
|
||||
ldc 0x1
|
||||
if_icmpne Cmp-191731100
|
||||
ldc 0x0
|
||||
goto CmpDone-191731100
|
||||
Cmp-191731100:
|
||||
ldc 0x1
|
||||
CmpDone-191731100:
|
||||
invokevirtual java/io/PrintStream/println(Z)V
|
||||
|
||||
.line 3
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x0
|
||||
ldc 0x0
|
||||
if_icmpne Cmp-901585603
|
||||
ldc 0x0
|
||||
goto CmpDone-901585603
|
||||
Cmp-901585603:
|
||||
ldc 0x1
|
||||
CmpDone-901585603:
|
||||
invokevirtual java/io/PrintStream/println(Z)V
|
||||
|
||||
.line 4
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x1
|
||||
ldc 0x1
|
||||
if_icmpne Cmp1522577937
|
||||
ldc 0x0
|
||||
goto CmpDone1522577937
|
||||
Cmp1522577937:
|
||||
ldc 0x1
|
||||
CmpDone1522577937:
|
||||
invokevirtual java/io/PrintStream/println(Z)V
|
||||
|
||||
.line 5
|
||||
getstatic java/lang/System/out Ljava/io/PrintStream;
|
||||
ldc 0x1
|
||||
ldc 0x0
|
||||
if_icmpne Cmp-1653028684
|
||||
ldc 0x0
|
||||
goto CmpDone-1653028684
|
||||
Cmp-1653028684:
|
||||
ldc 0x1
|
||||
CmpDone-1653028684:
|
||||
invokevirtual java/io/PrintStream/println(Z)V
|
||||
|
||||
EndGlobal:
|
||||
return
|
||||
.end method
|
||||
|
||||
93
samples/JavaScript/namespace.js
Normal file
93
samples/JavaScript/namespace.js
Normal file
@@ -0,0 +1,93 @@
|
||||
(function(root, factory) {
|
||||
if (typeof define === 'function' && define.amd) {
|
||||
define(['lodash'], factory);
|
||||
} else if (typeof exports !== 'undefined') {
|
||||
module.exports = factory(require('lodash'));
|
||||
} else {
|
||||
root.Namespace = factory(root._);
|
||||
}
|
||||
})(this, function(_) {
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* @module namespace
|
||||
* @class namespace
|
||||
*/
|
||||
function Namespace() {}
|
||||
|
||||
/**
|
||||
* Regex for splitting keypaths into arrays.
|
||||
*
|
||||
* @private
|
||||
* @const {RegExp}
|
||||
* @type
|
||||
*/
|
||||
var KEYPATH_SPLITTER = /\./g;
|
||||
|
||||
/**
|
||||
* An internal cache to avoid calculating a keypath more than once.
|
||||
*
|
||||
* @private
|
||||
* @type {Object}
|
||||
*/
|
||||
var _keypaths = {};
|
||||
|
||||
_.extend(Namespace.prototype, {
|
||||
|
||||
/**
|
||||
* Adds a definition to the namespace object.
|
||||
*
|
||||
* @public
|
||||
* @instance
|
||||
* @method add
|
||||
* @param {String} keypath - The keypath for the definition to be added at.
|
||||
* @param {Function|Object} definition - The definition to be added.
|
||||
* @return {Function|Object} - The definition.
|
||||
*/
|
||||
add: function(keypath, definition) {
|
||||
return this._walk(keypath, function(memo, name, index, keypath) {
|
||||
if (index + 1 === keypath.length) {
|
||||
memo[name] = _.extend(definition, memo[name]);
|
||||
}
|
||||
return memo[name] || (memo[name] = {});
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Retrieves a definition from the namespace safely.
|
||||
*
|
||||
* @public
|
||||
* @instance
|
||||
* @method get
|
||||
* @param {String} keypath - The keypath to lookup a definition for.
|
||||
* @returns {Function|Object|undefined} - The definition if it exists, otherwise `undefined`.
|
||||
*/
|
||||
get: function(keypath) {
|
||||
return this._walk(keypath);
|
||||
},
|
||||
|
||||
/**
|
||||
* An internal function for walking a keypath.
|
||||
*
|
||||
* @private
|
||||
* @instance
|
||||
* @method _walk
|
||||
* @param {String} keypath - The keypath to walk through.
|
||||
* @param {Function} [callback] - An optional callback to be called at each item in the path.
|
||||
* @returns {function|Object|undefined} - The reduced keypath.
|
||||
*/
|
||||
_walk: function(keypath, callback) {
|
||||
return _.reduce(
|
||||
_keypaths[keypath] || (_keypaths[keypath] = keypath.split(KEYPATH_SPLITTER)),
|
||||
callback || function(memo, name) {
|
||||
return memo && memo[name];
|
||||
},
|
||||
this
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
return Namespace;
|
||||
});
|
||||
|
||||
//# sourceMappingURL=namespace.js.map
|
||||
344
samples/Mathematica/HeyexImport.m
Normal file
344
samples/Mathematica/HeyexImport.m
Normal file
@@ -0,0 +1,344 @@
|
||||
(* Mathematica Package *)
|
||||
(* Created with IntelliJ IDEA and the Mathematica Language plugin *)
|
||||
|
||||
(* :Title: Importer for the RAW data-format of the Heidelberg Eye Explorer (known as HEYEX) *)
|
||||
|
||||
(* :Context: HeyexImport` *)
|
||||
|
||||
(* :Author: Patrick Scheibe pscheibe@trm.uni-leipzig.de *)
|
||||
|
||||
(* :Package Version: 1.0 *)
|
||||
|
||||
(* :Mathematica Version: 8.0 *)
|
||||
|
||||
(* :Copyright: Patrick Scheibe, 2013-2015 *)
|
||||
|
||||
(* :Discussion: This package registers a new importer which can load the RAW data-format exported by a
|
||||
Heidelberg Spectralis OCT. The import-functionality can access different information contained
|
||||
in a file:
|
||||
1. The file header which contains meta data like when the patient was scanned etc
|
||||
2. The scanned volume data
|
||||
3. Images which represent slices of the scanned volume
|
||||
4. The Scanning laser ophthalmoscopy (SLO) image which is taken with every scanned patient
|
||||
5. The segmentation data for different retina layers provided by the software
|
||||
|
||||
*)
|
||||
|
||||
(* :Keywords: Import, Heyex, OCT, Spectralis, Heidelberg Engineering *)
|
||||
|
||||
BeginPackage[ "HeyexImport`" ]
|
||||
|
||||
HeyexEyePosition::usage = "HeyexEyePosition[file] tries to extract which eye was scanned, left or right.";
|
||||
|
||||
HeyexImport::wrongHdr = "Error importing OCT data. Broken/Wrong file?";
|
||||
|
||||
|
||||
Begin[ "`Private`" ];
|
||||
|
||||
(*
|
||||
Registration of all import possibilities for the Heidelberg OCT.
|
||||
*)
|
||||
|
||||
ImportExport`RegisterImport[
|
||||
"Heyex" ,
|
||||
{
|
||||
"FileHeader" :> importHeader,
|
||||
{ "Data" , n_Integer} :> (importData[n][##]&),
|
||||
"Data" :> importData,
|
||||
{ "Images" , n_Integer} :> (importImages[n][##]&),
|
||||
"Images" :> importImages,
|
||||
"SLOImage" :> importSLOImage,
|
||||
"SegmentationData" :> importSegmentation,
|
||||
{ "SegmentationData" , n_Integer} :> (importSegmentation[n][##]&),
|
||||
"DataSize" :> importDataSize,
|
||||
importData
|
||||
},
|
||||
|
||||
{
|
||||
"Image3D" :> (Image3D["Data" /. #1]&)
|
||||
},
|
||||
|
||||
"AvailableElements" -> {"FileHeader", "Data", "DataSize", "Images", "SLOImage", "SegmentationData", "Image3D"}
|
||||
];
|
||||
|
||||
|
||||
If[Quiet[Check[TrueQ[Compile[{}, 0, CompilationTarget -> "C"][] == 0], False]],
|
||||
$compileTarget = CompilationTarget -> "C",
|
||||
$compileTarget = CompilationTarget -> "MVM"
|
||||
];
|
||||
|
||||
|
||||
(*
|
||||
Helper function which reads data from a stream. This is
|
||||
only a unification so I can map the read function over a
|
||||
list.
|
||||
*)
|
||||
read[{id_String, type_String}, str_] :=
|
||||
id -> BinaryRead[str, type];
|
||||
read[{type_String, n_Integer}, str_] := BinaryReadList[str, type, n];
|
||||
read[{id_String, {type_String, n_Integer}}, str_] := id -> BinaryReadList[str, type, n];
|
||||
(*
|
||||
Note that when reading bytes explicitly I convert them to
|
||||
a string and remove any zeroes at the end.
|
||||
*)
|
||||
read[{id_String, { "Byte" , n_Integer}}, str_] :=
|
||||
id -> StringJoin[
|
||||
FromCharacterCode /@ (Rest[
|
||||
NestList[BinaryRead[str, "Byte" ] &, Null,
|
||||
n]] /. {chars___Integer, Longest[0 ...]} :> {chars})];
|
||||
|
||||
(*
|
||||
The layout of a file exported with "Raw Export"
|
||||
|
||||
*****************
|
||||
* File Header *
|
||||
*****************
|
||||
* SLO Image *
|
||||
*****************
|
||||
* B-Scan #0 *
|
||||
*****************
|
||||
* ..... *
|
||||
*****************
|
||||
* B-Scan #n-1 *
|
||||
*****************
|
||||
*)
|
||||
|
||||
With[{i = "Integer32", f = "Real32", d = "Real64", b = "Byte"},
|
||||
|
||||
$fileHeaderInfo = Transpose[{
|
||||
{
|
||||
"Version" , "SizeX" , "NumBScans" , "SizeZ" , "ScaleX" , "Distance" ,
|
||||
"ScaleZ" , "SizeXSlo" , "SizeYSlo" , "ScaleXSlo" , "ScaleYSlo" ,
|
||||
"FieldSizeSlo" , "ScanFocus" , "ScanPosition" , "ExamTime" ,
|
||||
"ScanPattern" , "BScanHdrSize" , "ID" , "ReferenceID" , "PID" ,
|
||||
"PatientID" , "Padding" , "DOB" , "VID" , "VisitID" , "VisitDate" ,
|
||||
"Spare"
|
||||
},
|
||||
{
|
||||
{b, 12}, i, i, i, d, d, d, i, i, d, d, i, d, {b, 4}, {i, 2}, i, i,
|
||||
{b, 16}, {b, 16}, i, {b, 21}, {b, 3}, d, i, {b, 24}, d, {b, 1840}
|
||||
}
|
||||
}];
|
||||
|
||||
$bScanHeaderInfo = Transpose[{
|
||||
{
|
||||
"Version" , "BScanHdrSize" , "StartX" , "StartY" , "EndX" , "EndY" ,
|
||||
"NumSeg" , "OffSeg" , "Quality" , "Spare"
|
||||
},
|
||||
{{b, 12}, i, d, d, d, d, i, i, f, {b, 196}}
|
||||
}];
|
||||
];
|
||||
|
||||
|
||||
isHeyexRawFormat[{"Version" -> version_String, "SizeX" -> _Integer, "NumBScans" -> _Integer, _Rule..}] /; StringMatchQ[version, "HSF-OCT" ~~__] := True ;
|
||||
isHeyexRawFormat[___] := False;
|
||||
|
||||
readFileHeader[str_InputStream] := With[{hdr = Quiet[read[#, str]] & /@ $fileHeaderInfo},
|
||||
hdr /; TrueQ[isHeyexRawFormat[hdr]]
|
||||
];
|
||||
readFileHeader[___] := (Message[HeyexImport::wrongHdr]; Throw[$Failed]);
|
||||
|
||||
|
||||
(* Reads the camera image of the retina. Note that you must have the
|
||||
information from the fileheader and you must be at the right position
|
||||
of the file stream for this.*)
|
||||
readSLOImage[str_InputStream, fileHdr : {(_String -> _) ..}] :=
|
||||
Image[Partition[
|
||||
BinaryReadList[str, "Byte" , "SizeXSlo" * "SizeYSlo" /. fileHdr],
|
||||
"SizeXSlo" /. fileHdr], "Byte" ];
|
||||
|
||||
skipSLOImage[str_InputStream, fileHdr : {(_String -> _) ..}] :=
|
||||
Skip[str, "Byte" , "SizeXSlo" * "SizeYSlo" /. fileHdr];
|
||||
|
||||
|
||||
(* One single BScan consists itself again of a header and a data part *)
|
||||
readBScanHeader[str_InputStream, fileHdr : {(_String -> _) ..}] :=
|
||||
Module[{i = "Integer32", f = "Real32", d = "Real64", b = "Byte",
|
||||
bScanHdr},
|
||||
bScanHdr = read[#, str] & /@ Transpose[{
|
||||
{ "Version" , "BScanHdrSize" , "StartX" , "StartY" , "EndX" , "EndY" ,
|
||||
"NumSeg" , "OffSeg" , "Quality" , "Spare" },
|
||||
{{b, 12}, i, d, d, d, d, i, i, f, {b, 196}}}
|
||||
];
|
||||
AppendTo[bScanHdr,
|
||||
read[{ "SegArray" , { "Real32" ,
|
||||
"NumSeg" * "SizeX" /. bScanHdr /. fileHdr}}, str]
|
||||
];
|
||||
(*
|
||||
This is horrible slow, therefore I just skip the fillbytes
|
||||
|
||||
AppendTo[bScanHdr,
|
||||
read[{"Fillbytes", {"Byte",
|
||||
"BScanHdrSize" - 256 - "NumSeg"*"SizeX"*4 /. bScanHdr /.
|
||||
fileHdr}}, str]
|
||||
]
|
||||
*)
|
||||
Skip[str, "Byte" , "BScanHdrSize" - 256 - "NumSeg" * "SizeX" * 4 /. bScanHdr /. fileHdr];
|
||||
AppendTo[bScanHdr, "FillBytes" -> None]
|
||||
]
|
||||
|
||||
skipBScanHeader[str_InputStream, fileHdr : {(_String -> _) ..}] :=
|
||||
Skip[str, "Byte" , "BScanHdrSize" /. fileHdr];
|
||||
|
||||
readBScanData[str_InputStream, fileHdr : {(_String -> _) ..}] :=
|
||||
Module[{},
|
||||
Developer`ToPackedArray[
|
||||
Partition[read[{ "Real32" , "SizeX" * "SizeZ" /. fileHdr}, str],
|
||||
"SizeX" /. fileHdr]]
|
||||
];
|
||||
|
||||
skipBScanData[str_InputStream, fileHdr : {(_String -> _) ..}] :=
|
||||
Skip[str, "Byte" , "SizeX" * "SizeZ" * 4 /. fileHdr];
|
||||
|
||||
skipBScanBlocks[str_InputStream, fileHdr : {(_String -> _) ..}, n_Integer] :=
|
||||
Skip[str, "Byte" , n * ("BScanHdrSize" + "SizeX" * "SizeZ" * 4) /. fileHdr];
|
||||
|
||||
|
||||
importHeader[filename_String, ___] := Module[
|
||||
{str, header},
|
||||
str = OpenRead[filename, BinaryFormat -> True];
|
||||
header = readFileHeader[str];
|
||||
Close[str];
|
||||
"FileHeader" -> header
|
||||
];
|
||||
|
||||
|
||||
(* Imports the dimension of the scanned volume. *)
|
||||
importDataSize[filename_String, r___] := Module[{header = importHeader[filename]},
|
||||
"DataSize" -> ({"NumBScans", "SizeZ", "SizeXSlo"} /. ("FileHeader" /. header))
|
||||
]
|
||||
|
||||
importSLOImage[filename_String, ___] := Module[
|
||||
{str, header, slo},
|
||||
str = OpenRead[filename, BinaryFormat -> True];
|
||||
header = readFileHeader[str];
|
||||
slo = readSLOImage[str, header];
|
||||
Close[str];
|
||||
"SLOImage" -> slo
|
||||
]
|
||||
|
||||
importData[filename_String, ___] := Module[
|
||||
{str, header, nx, n, data},
|
||||
str = OpenRead[filename, BinaryFormat -> True];
|
||||
header = readFileHeader[str];
|
||||
{nx, n} = { "SizeX" , "SizeX" * "SizeZ"} /. header;
|
||||
skipSLOImage[str, header];
|
||||
data = Table[
|
||||
skipBScanHeader[str, header];
|
||||
Partition[read[{ "Real32" , n}, str], nx],
|
||||
{"NumBScans" /. header}
|
||||
];
|
||||
Close[str];
|
||||
"Data" -> Developer`ToPackedArray[data]
|
||||
];
|
||||
|
||||
importData[num_Integer][filename_String, ___] := Module[
|
||||
{str, header, nx, n, data},
|
||||
str = OpenRead[filename, BinaryFormat -> True];
|
||||
header = readFileHeader[str];
|
||||
{nx, n} = { "SizeX" , "SizeX" * "SizeZ"} /. header;
|
||||
skipSLOImage[str, header];
|
||||
skipBScanBlocks[str, header, Max[Min["NumBScans" /. header, num - 1], 0] ];
|
||||
skipBScanHeader[str, header];
|
||||
data = Partition[read[{ "Real32" , n}, str], nx];
|
||||
Close[str];
|
||||
{"Data" -> {num -> Developer`ToPackedArray[data]}}
|
||||
];
|
||||
|
||||
(*
|
||||
As suggested in the Heidelberg OCT Manual the importer will adjust
|
||||
the graylevels when importing images. Since this is very time-consuming
|
||||
for the whole scanned volume, I use an optimized version of this function.
|
||||
*)
|
||||
With[{$compileTarget = $compileTarget}, $adjustGraylevelFunc := ($adjustGraylevelFunc = Compile[{{values, _Real, 2}},
|
||||
Map[Floor[255.0 * Min[Max[0.0, #], 1.0]^(0.25) + 0.5] &, values, {2}],
|
||||
RuntimeAttributes -> {Listable},
|
||||
Parallelization -> True,
|
||||
RuntimeOptions -> "Speed",
|
||||
$compileTarget
|
||||
])];
|
||||
|
||||
importImages[filename_String, ___] := Module[
|
||||
{data},
|
||||
data = "Data" /. importData[filename];
|
||||
"Images" -> (Image[#, "Byte" ]& /@ $adjustGraylevelFunc[data])
|
||||
]
|
||||
|
||||
importImages[imageNumber_Integer][filename_String, ___] := Module[
|
||||
{data},
|
||||
data = {imageNumber /. ("Data" /. importData[imageNumber][filename])};
|
||||
{"Images" -> {imageNumber -> (Image[#, "Byte" ]& @@ $adjustGraylevelFunc[data])}}
|
||||
];
|
||||
|
||||
importSegmentation[filename_String, ___] := Module[
|
||||
{str, header, data},
|
||||
str = OpenRead[filename, BinaryFormat -> True];
|
||||
header = readFileHeader[str];
|
||||
skipSLOImage[str, header];
|
||||
data = Table[
|
||||
Module[{bScanHeader, t},
|
||||
{t, bScanHeader} = Timing@readBScanHeader[str, header];
|
||||
skipBScanData[str, header];
|
||||
bScanHeader
|
||||
], {"NumBScans" /. header}
|
||||
];
|
||||
Close[str];
|
||||
(*
|
||||
The BScanHeaderData contain the segmentation vectors as a single list
|
||||
of numbers. Before returning the result, I check how many segmentations
|
||||
there are inside the BScan an I transform the segmentation value list
|
||||
into separate vectors and call them "ILM", "RPE" and "NFL" like described
|
||||
in the manual
|
||||
*)
|
||||
"SegmentationData" -> Function[{bhdr},
|
||||
Block[{numVecs = "NumSeg" /. bhdr, vecNames, nx = "SizeX" /. header},
|
||||
If[numVecs > 0,
|
||||
vecNames = Take[{ "ILM" , "RPE" , "NFL" }, numVecs];
|
||||
bhdr /. ("SegArray" -> vec_) :> Sequence @@ (Rule @@@ Transpose[{vecNames, Partition[vec, nx]} ]),
|
||||
bhdr
|
||||
]
|
||||
]] /@ data
|
||||
]
|
||||
|
||||
importSegmentation[num_Integer][filename_String, ___] := Module[
|
||||
{str, header, bhdr},
|
||||
str = OpenRead[filename, BinaryFormat -> True];
|
||||
header = readFileHeader[str];
|
||||
skipSLOImage[str, header];
|
||||
skipBScanBlocks[str, header, Max[Min["NumBScans" /. header, num - 1], 0] ];
|
||||
bhdr = readBScanHeader[str, header];
|
||||
Close[str];
|
||||
(* See doc above *)
|
||||
{"SegmentationData" -> {num -> Block[
|
||||
{numVecs = "NumSeg" /. bhdr, vecNames, nx = "SizeX" /. header},
|
||||
If[ numVecs > 0,
|
||||
vecNames = Take[{ "ILM" , "RPE" , "NFL" }, numVecs];
|
||||
bhdr /. ("SegArray" -> vec_) :> Sequence @@ (Rule @@@ Transpose[{vecNames, Partition[vec, nx]} ]),
|
||||
bhdr
|
||||
]
|
||||
]
|
||||
}}
|
||||
]
|
||||
|
||||
(* Extracts which eye was scanned. This is stored in the header of the file *)
|
||||
(* OD stands for oculus dexter which is latin for "right eye" and OS stands
|
||||
for oculus sinister which is latin for "left eye" *)
|
||||
HeyexEyePosition[file_String /; FileExistsQ[file]] := Module[{position},
|
||||
Check[
|
||||
position = "ScanPosition" /. Import[file, { "Heyex" , "FileHeader" }];
|
||||
Switch[
|
||||
position,
|
||||
"OD" ,
|
||||
Right,
|
||||
"OS" ,
|
||||
Left,
|
||||
_,
|
||||
$Failed
|
||||
],
|
||||
$Failed
|
||||
]
|
||||
];
|
||||
|
||||
End[]
|
||||
|
||||
EndPackage[]
|
||||
46
samples/Mercury/switch_detection_bug.m
Normal file
46
samples/Mercury/switch_detection_bug.m
Normal file
@@ -0,0 +1,46 @@
|
||||
% This is a regression test for a bug in switch detection
|
||||
% where it was preferring incomplete switches to complete
|
||||
% one-case switches, and hence inferring the wrong determinism.
|
||||
|
||||
%------------------------------------------------------------------------------%
|
||||
|
||||
:- module switch_detection_bug.
|
||||
|
||||
:- interface.
|
||||
|
||||
:- type note ---> note(rank, modifier, octave).
|
||||
|
||||
:- type rank ---> c ; d ; e ; f ; g ; a ; b .
|
||||
|
||||
:- type modifier ---> natural ; sharp ; flat .
|
||||
|
||||
:- type octave == int.
|
||||
|
||||
:- type qualifier ---> maj ; min .
|
||||
|
||||
:- pred next_topnote(note, qualifier, note).
|
||||
:- mode next_topnote(in, in, out) is multi.
|
||||
|
||||
%------------------------------------------------------------------------------%
|
||||
|
||||
:- implementation.
|
||||
|
||||
next_topnote(note(c, _, Oct), _, note(d, natural, Oct)).
|
||||
next_topnote(note(d, _, Oct), _, note(c, natural, Oct)).
|
||||
next_topnote(note(d, _, Oct), maj, note(e, natural, Oct)).
|
||||
next_topnote(note(d, _, Oct), min, note(e, flat, Oct)).
|
||||
next_topnote(note(e, _, Oct), _, note(d, natural, Oct)).
|
||||
next_topnote(note(e, _, Oct), _, note(f, natural, Oct)).
|
||||
next_topnote(note(f, _, Oct), maj, note(e, natural, Oct)).
|
||||
next_topnote(note(f, _, Oct), min, note(e, flat, Oct)).
|
||||
next_topnote(note(g, _, Oct), _, note(f, natural, Oct)).
|
||||
next_topnote(note(g, _, Oct), min, note(a, flat, Oct)).
|
||||
next_topnote(note(g, _, Oct), maj, note(a, natural, Oct)).
|
||||
next_topnote(note(a, _, Oct), _, note(g, natural, Oct)).
|
||||
next_topnote(note(a, _, Oct), min, note(b, flat, Oct)).
|
||||
next_topnote(note(a, _, Oct), maj, note(b, natural, Oct)).
|
||||
next_topnote(note(b, _, Oct), maj, note(a, natural, Oct)).
|
||||
next_topnote(note(b, _, Oct), min, note(a, flat, Oct)).
|
||||
|
||||
%------------------------------------------------------------------------------%
|
||||
|
||||
5281
samples/Modelica/Translational.mo
Normal file
5281
samples/Modelica/Translational.mo
Normal file
File diff suppressed because it is too large
Load Diff
285
samples/Modelica/modelica.mo
Normal file
285
samples/Modelica/modelica.mo
Normal file
@@ -0,0 +1,285 @@
|
||||
within Modelica.Electrical.Analog;
|
||||
package Sensors "Potential, voltage, current, and power sensors"
|
||||
|
||||
extends Modelica.Icons.SensorsPackage;
|
||||
|
||||
model PotentialSensor "Sensor to measure the potential"
|
||||
extends Modelica.Icons.RotationalSensor;
|
||||
|
||||
Interfaces.PositivePin p "pin to be measured" annotation (Placement(
|
||||
transformation(extent={{-110,-10},{-90,10}}, rotation=0)));
|
||||
Modelica.Blocks.Interfaces.RealOutput phi
|
||||
"Absolute voltage potential as output signal"
|
||||
annotation (Placement(transformation(extent={{100,-10},{120,10}},
|
||||
rotation=0)));
|
||||
equation
|
||||
p.i = 0;
|
||||
phi = p.v;
|
||||
annotation (
|
||||
Icon(coordinateSystem(
|
||||
preserveAspectRatio=true,
|
||||
extent={{-100,-100},{100,100}},
|
||||
grid={1,1}), graphics={
|
||||
Text(
|
||||
extent={{-29,-11},{30,-70}},
|
||||
lineColor={0,0,0},
|
||||
textString="V"),
|
||||
Line(points={{-70,0},{-90,0}}, color={0,0,0}),
|
||||
Line(points={{100,0},{70,0}}, color={0,0,255}),
|
||||
Text(
|
||||
extent={{-150,80},{150,120}},
|
||||
textString="%name",
|
||||
lineColor={0,0,255})}),
|
||||
Diagram(coordinateSystem(
|
||||
preserveAspectRatio=true,
|
||||
extent={{-100,-100},{100,100}},
|
||||
grid={1,1}), graphics={Line(points={{-70,0},{-96,0}}, color={0,0,0}),
|
||||
Line(points={{100,0},{70,0}}, color={0,0,255})}),
|
||||
Documentation(revisions="<html>
|
||||
<ul>
|
||||
<li><i> 1998 </i>
|
||||
by Christoph Clauss<br> initially implemented<br>
|
||||
</li>
|
||||
</ul>
|
||||
</html>", info="<html>
|
||||
<p>The potential sensor converts the voltage of a node (with respect to the ground node) into a real valued signal. It does not influence the current sum at the node which voltage is measured, therefore, the electrical behavior is not influenced by the sensor.</p>
|
||||
</html>"));
|
||||
end PotentialSensor;
|
||||
|
||||
model VoltageSensor "Sensor to measure the voltage between two pins"
|
||||
extends Modelica.Icons.RotationalSensor;
|
||||
|
||||
Interfaces.PositivePin p "positive pin" annotation (Placement(
|
||||
transformation(extent={{-110,-10},{-90,10}}, rotation=0)));
|
||||
Interfaces.NegativePin n "negative pin" annotation (Placement(
|
||||
transformation(extent={{90,-10},{110,10}}, rotation=0)));
|
||||
Modelica.Blocks.Interfaces.RealOutput v
|
||||
"Voltage between pin p and n (= p.v - n.v) as output signal"
|
||||
annotation (Placement(transformation(
|
||||
origin={0,-100},
|
||||
extent={{10,-10},{-10,10}},
|
||||
rotation=90)));
|
||||
|
||||
equation
|
||||
p.i = 0;
|
||||
n.i = 0;
|
||||
v = p.v - n.v;
|
||||
annotation (
|
||||
Icon(coordinateSystem(
|
||||
preserveAspectRatio=true,
|
||||
extent={{-100,-100},{100,100}},
|
||||
grid={1,1}), graphics={
|
||||
Text(
|
||||
extent={{-29,-11},{30,-70}},
|
||||
lineColor={0,0,0},
|
||||
textString="V"),
|
||||
Line(points={{-70,0},{-90,0}}, color={0,0,0}),
|
||||
Line(points={{70,0},{90,0}}, color={0,0,0}),
|
||||
Line(points={{0,-90},{0,-70}}, color={0,0,255}),
|
||||
Text(
|
||||
extent={{-150,80},{150,120}},
|
||||
textString="%name",
|
||||
lineColor={0,0,255})}),
|
||||
Diagram(coordinateSystem(
|
||||
preserveAspectRatio=true,
|
||||
extent={{-100,-100},{100,100}},
|
||||
grid={1,1}), graphics={
|
||||
Line(points={{-70,0},{-96,0}}, color={0,0,0}),
|
||||
Line(points={{70,0},{96,0}}, color={0,0,0}),
|
||||
Line(points={{0,-90},{0,-70}}, color={0,0,255})}),
|
||||
Documentation(revisions="<html>
|
||||
<ul>
|
||||
<li><i> 1998 </i>
|
||||
by Christoph Clauss<br> initially implemented<br>
|
||||
</li>
|
||||
</ul>
|
||||
</html>", info="<html>
|
||||
<p>The voltage sensor converts the voltage between the two connectors into a real valued signal. It does not influence the current sum at the nodes in between the voltage is measured, therefore, the electrical behavior is not influenced by the sensor.</p>
|
||||
</html>"));
|
||||
end VoltageSensor;
|
||||
|
||||
model CurrentSensor "Sensor to measure the current in a branch"
|
||||
extends Modelica.Icons.RotationalSensor;
|
||||
|
||||
Interfaces.PositivePin p "positive pin" annotation (Placement(
|
||||
transformation(extent={{-110,-10},{-90,10}}, rotation=0)));
|
||||
Interfaces.NegativePin n "negative pin" annotation (Placement(
|
||||
transformation(extent={{90,-10},{110,10}}, rotation=0)));
|
||||
Modelica.Blocks.Interfaces.RealOutput i
|
||||
"current in the branch from p to n as output signal"
|
||||
annotation (Placement(transformation(
|
||||
origin={0,-100},
|
||||
extent={{10,-10},{-10,10}},
|
||||
rotation=90)));
|
||||
|
||||
equation
|
||||
p.v = n.v;
|
||||
p.i = i;
|
||||
n.i = -i;
|
||||
annotation (
|
||||
Icon(coordinateSystem(
|
||||
preserveAspectRatio=true,
|
||||
extent={{-100,-100},{100,100}},
|
||||
grid={1,1}), graphics={
|
||||
Text(
|
||||
extent={{-29,-11},{30,-70}},
|
||||
lineColor={0,0,0},
|
||||
textString="A"),
|
||||
Line(points={{-70,0},{-90,0}}, color={0,0,0}),
|
||||
Text(
|
||||
extent={{-150,80},{150,120}},
|
||||
textString="%name",
|
||||
lineColor={0,0,255}),
|
||||
Line(points={{70,0},{90,0}}, color={0,0,0}),
|
||||
Line(points={{0,-90},{0,-70}}, color={0,0,255})}),
|
||||
Diagram(coordinateSystem(
|
||||
preserveAspectRatio=true,
|
||||
extent={{-100,-100},{100,100}},
|
||||
grid={1,1}), graphics={
|
||||
Text(
|
||||
extent={{-153,79},{147,119}},
|
||||
textString="%name",
|
||||
lineColor={0,0,255}),
|
||||
Line(points={{-70,0},{-96,0}}, color={0,0,0}),
|
||||
Line(points={{70,0},{96,0}}, color={0,0,0}),
|
||||
Line(points={{0,-90},{0,-70}}, color={0,0,255})}),
|
||||
Documentation(revisions="<html>
|
||||
<ul>
|
||||
<li><i> 1998 </i>
|
||||
by Christoph Clauss<br> initially implemented<br>
|
||||
</li>
|
||||
</ul>
|
||||
</html>", info="<html>
|
||||
<p>The current sensor converts the current flowing between the two connectors into a real valued signal. The two connectors are in the sensor connected like a short cut. The sensor has to be placed within an electrical connection in series. It does not influence the current sum at the connected nodes. Therefore, the electrical behavior is not influenced by the sensor.</p>
|
||||
</html>"));
|
||||
end CurrentSensor;
|
||||
|
||||
model PowerSensor "Sensor to measure the power"
|
||||
|
||||
Modelica.Electrical.Analog.Interfaces.PositivePin pc
|
||||
"Positive pin, current path"
|
||||
annotation (Placement(transformation(extent={{-90,-10},{-110,10}}, rotation=
|
||||
0)));
|
||||
Modelica.Electrical.Analog.Interfaces.NegativePin nc
|
||||
"Negative pin, current path"
|
||||
annotation (Placement(transformation(extent={{110,-10},{90,10}}, rotation=0)));
|
||||
Modelica.Electrical.Analog.Interfaces.PositivePin pv
|
||||
"Positive pin, voltage path"
|
||||
annotation (Placement(transformation(extent={{-10,110},{10,90}}, rotation=0)));
|
||||
Modelica.Electrical.Analog.Interfaces.NegativePin nv
|
||||
"Negative pin, voltage path"
|
||||
annotation (Placement(transformation(extent={{10,-110},{-10,-90}}, rotation=
|
||||
0)));
|
||||
Modelica.Blocks.Interfaces.RealOutput power
|
||||
annotation (Placement(transformation(
|
||||
origin={-80,-110},
|
||||
extent={{-10,10},{10,-10}},
|
||||
rotation=270)));
|
||||
Modelica.Electrical.Analog.Sensors.VoltageSensor voltageSensor
|
||||
annotation (Placement(transformation(
|
||||
origin={0,-30},
|
||||
extent={{10,-10},{-10,10}},
|
||||
rotation=90)));
|
||||
Modelica.Electrical.Analog.Sensors.CurrentSensor currentSensor
|
||||
annotation (Placement(transformation(extent={{-50,-10},{-30,10}}, rotation=
|
||||
0)));
|
||||
Modelica.Blocks.Math.Product product
|
||||
annotation (Placement(transformation(
|
||||
origin={-30,-50},
|
||||
extent={{-10,-10},{10,10}},
|
||||
rotation=270)));
|
||||
|
||||
equation
|
||||
connect(pv, voltageSensor.p) annotation (Line(points={{0,100},{0,-20},{
|
||||
6.12323e-016,-20}}, color={0,0,255}));
|
||||
connect(voltageSensor.n, nv) annotation (Line(points={{-6.12323e-016,-40},{
|
||||
-6.12323e-016,-63},{0,-63},{0,-100}}, color={0,0,255}));
|
||||
connect(pc, currentSensor.p)
|
||||
annotation (Line(points={{-100,0},{-50,0}}, color={0,0,255}));
|
||||
connect(currentSensor.n, nc)
|
||||
annotation (Line(points={{-30,0},{100,0}}, color={0,0,255}));
|
||||
connect(currentSensor.i, product.u2) annotation (Line(points={{-40,-10},{-40,
|
||||
-30},{-36,-30},{-36,-38}}, color={0,0,127}));
|
||||
connect(voltageSensor.v, product.u1) annotation (Line(points={{10,-30},{-24,
|
||||
-30},{-24,-38}}, color={0,0,127}));
|
||||
connect(product.y, power) annotation (Line(points={{-30,-61},{-30,-80},{-80,
|
||||
-80},{-80,-110}}, color={0,0,127}));
|
||||
annotation (Icon(coordinateSystem(
|
||||
preserveAspectRatio=true,
|
||||
extent={{-100,-100},{100,100}},
|
||||
grid={2,2}), graphics={
|
||||
Ellipse(
|
||||
extent={{-70,70},{70,-70}},
|
||||
lineColor={0,0,0},
|
||||
fillColor={255,255,255},
|
||||
fillPattern=FillPattern.Solid),
|
||||
Line(points={{0,100},{0,70}}, color={0,0,255}),
|
||||
Line(points={{0,-70},{0,-100}}, color={0,0,255}),
|
||||
Line(points={{-80,-100},{-80,0}}, color={0,0,255}),
|
||||
Line(points={{-100,0},{100,0}}, color={0,0,255}),
|
||||
Text(
|
||||
extent={{150,120},{-150,160}},
|
||||
textString="%name",
|
||||
lineColor={0,0,255}),
|
||||
Line(points={{0,70},{0,40}}, color={0,0,0}),
|
||||
Line(points={{22.9,32.8},{40.2,57.3}}, color={0,0,0}),
|
||||
Line(points={{-22.9,32.8},{-40.2,57.3}}, color={0,0,0}),
|
||||
Line(points={{37.6,13.7},{65.8,23.9}}, color={0,0,0}),
|
||||
Line(points={{-37.6,13.7},{-65.8,23.9}}, color={0,0,0}),
|
||||
Line(points={{0,0},{9.02,28.6}}, color={0,0,0}),
|
||||
Polygon(
|
||||
points={{-0.48,31.6},{18,26},{18,57.2},{-0.48,31.6}},
|
||||
lineColor={0,0,0},
|
||||
fillColor={0,0,0},
|
||||
fillPattern=FillPattern.Solid),
|
||||
Ellipse(
|
||||
extent={{-5,5},{5,-5}},
|
||||
lineColor={0,0,0},
|
||||
fillColor={0,0,0},
|
||||
fillPattern=FillPattern.Solid),
|
||||
Text(
|
||||
extent={{-29,-11},{30,-70}},
|
||||
lineColor={0,0,0},
|
||||
textString="P")}),
|
||||
Diagram(coordinateSystem(
|
||||
preserveAspectRatio=true,
|
||||
extent={{-100,-100},{100,100}},
|
||||
grid={2,2}), graphics),
|
||||
Documentation(info="<html>
|
||||
<p>This power sensor measures instantaneous electrical power of a singlephase system and has a separated voltage and current path. The pins of the voltage path are pv and nv, the pins of the current path are pc and nc. The internal resistance of the current path is zero, the internal resistance of the voltage path is infinite.</p>
|
||||
</html>", revisions="<html>
|
||||
<ul>
|
||||
<li><i>January 12, 2006</i> by Anton Haumer implemented</li>
|
||||
</ul>
|
||||
</html>"));
|
||||
end PowerSensor;
|
||||
annotation (
|
||||
Documentation(info="<html>
|
||||
<p>This package contains potential, voltage, and current sensors. The sensors can be used to convert voltages or currents into real signal values o be connected to components of the Blocks package. The sensors are designed in such a way that they do not influence the electrical behavior.</p>
|
||||
</html>",
|
||||
revisions="<html>
|
||||
<dl>
|
||||
<dt>
|
||||
<b>Main Authors:</b>
|
||||
<dd>
|
||||
Christoph Clauß
|
||||
<<a href=\"mailto:Christoph.Clauss@eas.iis.fraunhofer.de\">Christoph.Clauss@eas.iis.fraunhofer.de</a>><br>
|
||||
André Schneider
|
||||
<<a href=\"mailto:Andre.Schneider@eas.iis.fraunhofer.de\">Andre.Schneider@eas.iis.fraunhofer.de</a>><br>
|
||||
Fraunhofer Institute for Integrated Circuits<br>
|
||||
Design Automation Department<br>
|
||||
Zeunerstraße 38<br>
|
||||
D-01069 Dresden<br>
|
||||
<p>
|
||||
<dt>
|
||||
<b>Copyright:</b>
|
||||
<dd>
|
||||
Copyright © 1998-2010, Modelica Association and Fraunhofer-Gesellschaft.<br>
|
||||
<i>The Modelica package is <b>free</b> software; it can be redistributed and/or modified
|
||||
under the terms of the <b>Modelica license</b>, see the license conditions
|
||||
and the accompanying <b>disclaimer</b> in the documentation of package
|
||||
Modelica in file \"Modelica/package.mo\".</i><br>
|
||||
<p>
|
||||
</dl>
|
||||
</html>"));
|
||||
end Sensors;
|
||||
239
samples/NewLisp/irc.lsp
Normal file
239
samples/NewLisp/irc.lsp
Normal file
@@ -0,0 +1,239 @@
|
||||
#!/usr/bin/env newlisp
|
||||
|
||||
;; @module IRC
|
||||
;; @description a basic irc library
|
||||
;; @version early alpha! 0.1 2013-01-02 20:11:22
|
||||
;; @author cormullion
|
||||
;; Usage:
|
||||
;; (IRC:init "newlithper") ; a username/nick (not that one obviously :-)
|
||||
;; (IRC:connect "irc.freenode.net" 6667) ; irc/server
|
||||
;; (IRC:join-channel {#newlisp}) ; join a room
|
||||
;; either (IRC:read-irc-loop) ; loop - monitor only, no input
|
||||
;; or (IRC:session) ; a command-line session, end with /QUIT
|
||||
|
||||
(context 'IRC)
|
||||
(define Inickname)
|
||||
(define Ichannels)
|
||||
(define Iserver)
|
||||
(define Iconnected)
|
||||
(define Icallbacks '())
|
||||
(define Idle-time 400) ; seconds
|
||||
(define Itime-stamp) ; time since last message was processed
|
||||
|
||||
(define (register-callback callback-name callback-function)
|
||||
(println {registering callback for } callback-name { : } (sym (term callback-function) (prefix callback-function)))
|
||||
(push (list callback-name (sym (term callback-function) (prefix callback-function))) Icallbacks))
|
||||
|
||||
(define (deregister-callback callback-name)
|
||||
(println {deregistering callback for } callback-name)
|
||||
(setf (assoc "idle-event" Icallbacks) nil)
|
||||
(println {current callbacks: } Icallbacks))
|
||||
|
||||
(define (do-callback callback-name data)
|
||||
(when (set 'func (lookup callback-name Icallbacks)) ; find first callback
|
||||
(if-not (catch (apply func (list data)) 'error)
|
||||
(println {error in callback } callback-name {: } error))))
|
||||
|
||||
(define (do-callbacks callback-name data)
|
||||
(dolist (rf (ref-all callback-name Icallbacks))
|
||||
(set 'callback-entry (Icallbacks (first rf)))
|
||||
(when (set 'func (last callback-entry))
|
||||
(if-not (catch (apply func (list data)) 'error)
|
||||
(println {error in callback } callback-name {: } error)))))
|
||||
|
||||
(define (init str)
|
||||
(set 'Inickname str)
|
||||
(set 'Iconnected nil)
|
||||
(set 'Ichannels '())
|
||||
(set 'Itime-stamp (time-of-day)))
|
||||
|
||||
(define (connect server port)
|
||||
(set 'Iserver (net-connect server port))
|
||||
(net-send Iserver (format "USER %s %s %s :%s\r\n" Inickname Inickname Inickname Inickname))
|
||||
(net-send Iserver (format "NICK %s \r\n" Inickname))
|
||||
(set 'Iconnected true)
|
||||
(do-callbacks "connect" (list (list "server" server) (list "port" port))))
|
||||
|
||||
(define (identify password)
|
||||
(net-send Iserver (format "PRIVMSG nickserv :identify %s\r\n" password)))
|
||||
|
||||
(define (join-channel channel)
|
||||
(when (net-send Iserver (format "JOIN %s \r\n" channel))
|
||||
(push channel Ichannels)
|
||||
(do-callbacks "join-channel" (list (list "channel" channel) (list "nickname" Inickname)))))
|
||||
|
||||
(define (part chan)
|
||||
(if-not (empty? chan)
|
||||
; leave specified
|
||||
(begin
|
||||
(net-send Iserver (format "PART %s\r\n" chan))
|
||||
(replace channel Ichannels)
|
||||
(do-callbacks "part" (list (list "channel" channel))))
|
||||
; leave all
|
||||
(begin
|
||||
(dolist (channel Ichannels)
|
||||
(net-send Iserver (format "PART %s\r\n" channel))
|
||||
(replace channel Ichannels)
|
||||
(do-callbacks "part" (list (list "channel" channel)))))))
|
||||
|
||||
(define (do-quit message)
|
||||
(do-callbacks "quit" '()) ; chance to do stuff before quit...
|
||||
(net-send Iserver (format "QUIT :%s\r\n" message))
|
||||
(sleep 1000)
|
||||
(set 'Ichannels '())
|
||||
(close Iserver)
|
||||
(set 'Iconnected nil))
|
||||
|
||||
(define (privmsg user message)
|
||||
(net-send Iserver (format "PRIVMSG %s :%s\r\n" user message)))
|
||||
|
||||
(define (notice user message)
|
||||
(net-send Iserver (format "NOTICE %s :%s\r\n" user message)))
|
||||
|
||||
(define (send-to-server message (channel nil))
|
||||
(cond
|
||||
((starts-with message {/}) ; default command character
|
||||
(set 'the-message (replace "^/" (copy message) {} 0)) ; keep original
|
||||
(net-send Iserver (format "%s \r\n" the-message)) ; send it
|
||||
; do a quit
|
||||
(if (starts-with (lower-case the-message) "quit")
|
||||
(do-quit { enough})))
|
||||
(true
|
||||
(if (nil? channel)
|
||||
; say to all channels
|
||||
(dolist (c Ichannels)
|
||||
(net-send Iserver (format "PRIVMSG %s :%s\r\n" c message)))
|
||||
; say to specified channel
|
||||
(if (find channel Ichannels)
|
||||
(net-send Iserver (format "PRIVMSG %s :%s\r\n" channel message))))))
|
||||
(do-callbacks "send-to-server" (list (list "channel" channel) (list "message" message))))
|
||||
|
||||
(define (process-command sender command text)
|
||||
(cond
|
||||
((= sender "PING")
|
||||
(net-send Iserver (format "PONG %s\r\n" command)))
|
||||
((or (= command "NOTICE") (= command "PRIVMSG"))
|
||||
(process-message sender command text))
|
||||
((= command "JOIN")
|
||||
(set 'username (first (clean empty? (parse sender {!|:} 0))))
|
||||
(set 'channel (last (clean empty? (parse sender {!|:} 0))))
|
||||
(println {username } username { joined } channel)
|
||||
(do-callbacks "join" (list (list "channel" channel) (list "username" username))))
|
||||
(true
|
||||
nil)))
|
||||
|
||||
(define (process-message sender command text)
|
||||
(let ((username {} target {} message {}))
|
||||
(set 'username (first (clean empty? (parse sender {!|:} 0))))
|
||||
(set 'target (trim (first (clean empty? (parse text {!|:} 0)))))
|
||||
(set 'message (slice text (+ (find {:} text) 1)))
|
||||
(cond
|
||||
((starts-with message "\001")
|
||||
(process-ctcp username target message))
|
||||
((find target Ichannels)
|
||||
(cond
|
||||
((= command {PRIVMSG})
|
||||
(do-callbacks "channel-message" (list (list "channel" target) (list "username" username) (list "message" message))))
|
||||
((= command {NOTICE})
|
||||
(do-callbacks "channel-notice" (list (list "channel" target) (list "username" username) (list "message" message))))))
|
||||
((= target Inickname)
|
||||
(cond
|
||||
((= command {PRIVMSG})
|
||||
(do-callbacks "private-message" (list (list "username" username) (list "message" message))))
|
||||
((= command {NOTICE})
|
||||
(do-callbacks "private-notice" (list (list "username" username) (list "message" message))))))
|
||||
(true
|
||||
nil))))
|
||||
|
||||
(define (process-ctcp username target message)
|
||||
(cond
|
||||
((starts-with message "\001VERSION\001")
|
||||
(net-send Iserver (format "NOTICE %s :\001VERSION %s\001\r\n" username message)))
|
||||
((starts-with message "\001PING")
|
||||
(set 'data (first (rest (clean empty? (parse message { } 0)))))
|
||||
(set 'data (trim data "\001" "\001"))
|
||||
(net-send Iserver (format "NOTICE %s :\001PING %s\001\r\n" username data)))
|
||||
((starts-with message "\001ACTION")
|
||||
; (set 'data (first (rest (clean empty? (parse message { } 0)))))
|
||||
; (set 'data (join data { }))
|
||||
; (set 'data (trim data "\001" "\001"))
|
||||
(if (find target Ichannels)
|
||||
(do-callbacks "channel-action" (list (list "username" username) (list "message" message))))
|
||||
(if (= target Inickname)
|
||||
(do-callbacks "private-action" (list (list "username" username) (list "message" message)))))
|
||||
((starts-with message "\001TIME\001")
|
||||
(net-send Iserver (format "NOTICE %s:\001TIME :%s\001\r\n" username (date))))))
|
||||
|
||||
(define (parse-buffer raw-buffer)
|
||||
(let ((messages (clean empty? (parse raw-buffer "\r\n" 0)))
|
||||
(sender {} command {} text {}))
|
||||
; check for elapsed time since last activity
|
||||
(when (> (sub (time-of-day) Itime-stamp) (mul Idle-time 1000))
|
||||
(do-callbacks "idle-event")
|
||||
(set 'Itime-stamp (time-of-day)))
|
||||
(dolist (message messages)
|
||||
(set 'message-parts (parse message { }))
|
||||
(unless (empty? message-parts)
|
||||
(set 'sender (first message-parts))
|
||||
(catch (set 'command (first (rest message-parts))) 'error)
|
||||
(catch (set 'text (join (rest (rest message-parts)) { })) 'error))
|
||||
(process-command sender command text))))
|
||||
|
||||
(define (read-irc)
|
||||
(let ((buffer {}))
|
||||
(when (!= (net-peek Iserver) 0)
|
||||
(net-receive Iserver buffer 8192 "\n")
|
||||
(unless (empty? buffer)
|
||||
(parse-buffer buffer)))))
|
||||
|
||||
(define (read-irc-loop) ; monitoring
|
||||
(let ((buffer {}))
|
||||
(while Iconnected
|
||||
(read-irc)
|
||||
(sleep 1000))))
|
||||
|
||||
(define (print-raw-message data) ; example of using a callback
|
||||
(set 'raw-data (lookup "message" data))
|
||||
(set 'channel (lookup "channel" data))
|
||||
(set 'message-text raw-data)
|
||||
(println (date (date-value) 0 {%H:%M:%S }) username {> } message-text))
|
||||
|
||||
(define (print-outgoing-message data)
|
||||
(set 'raw-data (lookup "message" data))
|
||||
(set 'channel (lookup "channel" data))
|
||||
(set 'message-text raw-data)
|
||||
(println (date (date-value) 0 {%H:%M:%S }) Inickname {> } message-text))
|
||||
|
||||
(define (session); interactive terminal
|
||||
; must add callbacks to display messages
|
||||
(register-callback "channel-message" 'print-raw-message)
|
||||
(register-callback "send-to-server" 'print-outgoing-message)
|
||||
(while Iconnected
|
||||
(while (zero? (peek 0))
|
||||
(read-irc)
|
||||
(sleep 1000))
|
||||
(send-to-server (string (read-line 0))))
|
||||
(println {finished session } (date))
|
||||
(exit))
|
||||
|
||||
; end of IRC code
|
||||
|
||||
[text]
|
||||
|
||||
simple bot code:
|
||||
(load (string (env {HOME}) {/projects/programming/newlisp-projects/irc.lsp}))
|
||||
(context 'BOT)
|
||||
(define bot-name "bot")
|
||||
(define (join-channel data)
|
||||
(println {in BOT:join-channel with data: } data))
|
||||
(define (process-message data)
|
||||
????)
|
||||
(IRC:register-callback "join-channel" 'join-channel)
|
||||
(IRC:register-callback "channel-message" 'process-message)
|
||||
(IRC:register-callback "idle-event" 'do-idle-event)
|
||||
(IRC:register-callback "send-to-server" 'do-send-event)
|
||||
(IRC:init bot-name)
|
||||
(IRC:connect "irc.freenode.net" 6667)
|
||||
(IRC:join-channel {#newlisp})
|
||||
(IRC:read-irc-loop)
|
||||
[/text]
|
||||
195
samples/NewLisp/log-to-database.lisp
Normal file
195
samples/NewLisp/log-to-database.lisp
Normal file
@@ -0,0 +1,195 @@
|
||||
(module "sqlite3.lsp") ; loads the SQLite3 database module
|
||||
|
||||
; FUNCTIONS-------------------------------------------------
|
||||
|
||||
(define (displayln str-to-display)
|
||||
(println str-to-display)
|
||||
)
|
||||
|
||||
(define (open-database sql-db-to-open)
|
||||
(if (sql3:open (string sql-db-to-open ".db"))
|
||||
(displayln "")
|
||||
(displayln "There was a problem opening the database " sql-db-to-open ": " (sql3:error))))
|
||||
|
||||
(define (close-database)
|
||||
(if (sql3:close)
|
||||
(displayln "")
|
||||
(displayln "There was a problem closing the database: " (sql3:error))))
|
||||
|
||||
;====== SAFE-FOR-SQL ===============================================================
|
||||
; this function makes strings safe for inserting into SQL statements
|
||||
; to avoid SQL injection issues
|
||||
; it's simple right now but will add to it later
|
||||
;===================================================================================
|
||||
(define (safe-for-sql str-sql-query)
|
||||
(if (string? str-sql-query) (begin
|
||||
(replace "&" str-sql-query "&")
|
||||
(replace "'" str-sql-query "'")
|
||||
(replace "\"" str-sql-query """)
|
||||
))
|
||||
(set 'result str-sql-query))
|
||||
|
||||
(define (query sql-text)
|
||||
(set 'sqlarray (sql3:sql sql-text)) ; results of query
|
||||
(if sqlarray
|
||||
(setq query-return sqlarray)
|
||||
(if (sql3:error)
|
||||
(displayln (sql3:error) " query problem ")
|
||||
(setq query-return nil))))
|
||||
|
||||
(define-macro (create-record)
|
||||
; first save the values
|
||||
(set 'temp-record-values nil)
|
||||
(set 'temp-table-name (first (args)))
|
||||
;(displayln "<BR>Arguments: " (args))
|
||||
(dolist (s (rest (args))) (push (eval s) temp-record-values -1))
|
||||
; now save the arguments as symbols under the context "DB"
|
||||
(dolist (s (rest (args)))
|
||||
(set 'temp-index-num (string $idx)) ; we need to number the symbols to keep them in the correct order
|
||||
(if (= (length temp-index-num) 1) (set 'temp-index-num (string "0" temp-index-num))) ; leading 0 keeps the max at 100.
|
||||
(sym (string temp-index-num s) 'DB))
|
||||
; now create the sql query
|
||||
(set 'temp-sql-query (string "INSERT INTO " temp-table-name " ("))
|
||||
;(displayln "<P>TABLE NAME: " temp-table-name)
|
||||
;(displayln "<P>SYMBOLS: " (symbols DB))
|
||||
;(displayln "<BR>VALUES: " temp-record-values)
|
||||
(dolist (d (symbols DB)) (extend temp-sql-query (rest (rest (rest (rest (rest (string d)))))) ", "))
|
||||
(set 'temp-sql-query (chop (chop temp-sql-query)))
|
||||
(extend temp-sql-query ") VALUES (")
|
||||
(dolist (q temp-record-values)
|
||||
(if (string? q) (extend temp-sql-query "'")) ; only quote if value is non-numeric
|
||||
(extend temp-sql-query (string (safe-for-sql q)))
|
||||
(if (string? q) (extend temp-sql-query "'")) ; close quote if value is non-numeric
|
||||
(extend temp-sql-query ", ")) ; all values are sanitized to avoid SQL injection
|
||||
(set 'temp-sql-query (chop (chop temp-sql-query)))
|
||||
(extend temp-sql-query ");")
|
||||
;(displayln "<p>***** SQL QUERY: " temp-sql-query)
|
||||
(displayln (query temp-sql-query)) ; actually run the query against the database
|
||||
(delete 'DB) ; we're done, so delete all symbols in the DB context.
|
||||
)
|
||||
|
||||
(define-macro (update-record)
|
||||
; first save the values
|
||||
(set 'temp-record-values nil)
|
||||
(set 'temp-table-name (first (args)))
|
||||
(set 'continue true) ; debugging
|
||||
(dolist (s (rest (args))) (push (eval s) temp-record-values -1))
|
||||
; now save the arguments as symbols under the context "D2"
|
||||
(dolist (st (rest (args)))
|
||||
(set 'temp-index-num (string $idx)) ; we need to number the symbols to keep them in the correct order
|
||||
(if (= (length temp-index-num) 1) (set 'temp-index-num (string "0" temp-index-num))) ; leading 0 keeps the max at 100.
|
||||
;(displayln "<br>SYMBOL>>>>" (string temp-index-num st) "<<<") ; debugging
|
||||
(sym (string temp-index-num st) 'D2)
|
||||
)
|
||||
(if continue (begin ; --- temporary debugging
|
||||
; now create the sql query
|
||||
(set 'temp-sql-query (string "UPDATE " temp-table-name " SET "))
|
||||
;(displayln "<P>TABLE NAME: " temp-table-name)
|
||||
;(displayln "<P>SYMBOLS: " (symbols D2))
|
||||
;(displayln "<BR>VALUES: " temp-record-values)
|
||||
(dolist (d (rest (symbols D2))) ; ignore the first argument, as it will be the ConditionColumn for later
|
||||
(extend temp-sql-query (rest (rest (rest (rest (rest (string d)))))) "=")
|
||||
(set 'q (temp-record-values (+ $idx 1)))
|
||||
(if (string? q) (extend temp-sql-query "'")) ; only quote if value is non-numeric
|
||||
(extend temp-sql-query (string (safe-for-sql q)))
|
||||
(if (string? q) (extend temp-sql-query "'")) ; close quote if value is non-numeric
|
||||
(extend temp-sql-query ", ") ; all values are sanitized to avoid SQL injection
|
||||
)
|
||||
(set 'temp-sql-query (chop (chop temp-sql-query)))
|
||||
; okay now add the ConditionColumn value
|
||||
(extend temp-sql-query (string " WHERE " (rest (rest (rest (rest (rest (string (first (symbols D2)))))))) "="))
|
||||
(if (string? (first temp-record-values)) (extend temp-sql-query "'"))
|
||||
(extend temp-sql-query (string (safe-for-sql (first temp-record-values))))
|
||||
(if (string? (first temp-record-values)) (extend temp-sql-query "'"))
|
||||
(extend temp-sql-query ";")
|
||||
;(displayln "<p>***** SQL QUERY: " temp-sql-query)
|
||||
(query temp-sql-query) ; actually run the query against the database
|
||||
(delete 'D2) ; we're done, so delete all symbols in the DB context.
|
||||
)) ; --- end temporary debugging
|
||||
)
|
||||
|
||||
(define-macro (delete-record)
|
||||
(set 'temp-table-name (first (args)))
|
||||
(set 'temp-record-values nil)
|
||||
(dolist (s (rest (args))) (push (eval s) temp-record-values -1)) ; only one value for NOW...
|
||||
(sym (first (rest (args))) 'DB) ; put the second argument (for now) into a symbol in the DB context
|
||||
; this will have to be in a dolist loop of (rest (args)) when I add more
|
||||
(set 'temp-sql-query (string "DELETE FROM " temp-table-name " WHERE "))
|
||||
(dolist (d (symbols DB)) (extend temp-sql-query (rest (rest (rest (string d))))))
|
||||
(extend temp-sql-query "=")
|
||||
; why am I doing a loop here? There should be only one value, right? But maybe for future extension...
|
||||
(dolist (q temp-record-values)
|
||||
(if (string? q) (extend temp-sql-query "'")) ; only quote if value is non-numeric
|
||||
(extend temp-sql-query (string (safe-for-sql q)))
|
||||
(if (string? q) (extend temp-sql-query "'"))) ; close quote if value is non-numeric
|
||||
(extend temp-sql-query ";")
|
||||
;(displayln "TEMP-DELETE-QUERY: " temp-sql-query)
|
||||
(query temp-sql-query)
|
||||
(delete 'DB) ; we're done, so delete all symbols in the DB context.
|
||||
)
|
||||
|
||||
(define-macro (get-record)
|
||||
(set 'temp-table-name (first (args)))
|
||||
; if you have more arguments than just the table name, they become the elements of the WHERE clause
|
||||
(if (> (length (args)) 1) (begin
|
||||
(set 'temp-record-values nil)
|
||||
(dolist (s (rest (args))) (push (eval s) temp-record-values -1)) ; only one value for NOW...
|
||||
(sym (first (rest (args))) 'DB) ; put the second argument (for now) into a symbol in the DB context
|
||||
; this will have to be in a dolist loop of (rest (args)) when I add more
|
||||
(set 'temp-sql-query (string "SELECT * FROM " temp-table-name " WHERE "))
|
||||
(dolist (d (symbols DB)) (extend temp-sql-query (rest (rest (rest (string d))))))
|
||||
(extend temp-sql-query "=")
|
||||
; why am I doing a loop here? There should be only one value, right? But maybe for future extension...
|
||||
(dolist (q temp-record-values)
|
||||
(if (string? q) (extend temp-sql-query "'")) ; only quote if value is non-numeric
|
||||
(extend temp-sql-query (string (safe-for-sql q)))
|
||||
(if (string? q) (extend temp-sql-query "'"))) ; close quote if value is non-numeric
|
||||
(extend temp-sql-query ";")
|
||||
)
|
||||
; otherwise, just get everything in that table
|
||||
(set 'temp-sql-query (string "SELECT * FROM " temp-table-name ";"))
|
||||
)
|
||||
;(displayln "TEMP-GET-QUERY: " temp-sql-query)
|
||||
(delete 'DB) ; we're done, so delete all symbols in the DB context.
|
||||
(set 'return-value (query temp-sql-query)) ; this returns a list of everything in the record
|
||||
)
|
||||
|
||||
; END FUNCTIONS ===================
|
||||
|
||||
|
||||
(open-database "SERVER-LOGS")
|
||||
(query "CREATE TABLE Logs (Id INTEGER PRIMARY KEY, IP TEXT, UserId TEXT, UserName TEXT, Date DATE, Request TEXT, Result TEXT, Size INTEGER, Referrer TEXT, UserAgent TEXT)")
|
||||
;(print (query "SELECT * from SQLITE_MASTER;"))
|
||||
(set 'access-log (read-file "/var/log/apache2/access.log"))
|
||||
(set 'access-list (parse access-log "\n"))
|
||||
(set 'max-items (integer (first (first (query "select count(*) from Logs")))))
|
||||
(println "Number of items in database: " max-items)
|
||||
(println "Number of lines in log: " (length access-list))
|
||||
(dolist (line access-list)
|
||||
(set 'line-list (parse line))
|
||||
;(println "Line# " $idx " - " line-list)
|
||||
;(println "Length of line: " (length line-list))
|
||||
(if (> (length line-list) 0) (begin
|
||||
(++ max-items)
|
||||
(set 'Id max-items) (print $idx "/" (length access-list))
|
||||
(set 'IP (string (line-list 0) (line-list 1) (line-list 2)))
|
||||
(set 'UserId (line-list 3))
|
||||
(set 'UserName (line-list 4))
|
||||
(set 'Date (line-list 5))
|
||||
(set 'Date (trim Date "["))
|
||||
(set 'Date (trim Date "]"))
|
||||
;(println "DATE: " Date)
|
||||
(set 'date-parsed (date-parse Date "%d/%b/%Y:%H:%M:%S -0700"))
|
||||
;(println "DATE-PARSED: " date-parsed)
|
||||
(set 'Date (date date-parsed 0 "%Y-%m-%dT%H:%M:%S"))
|
||||
(println " " Date)
|
||||
(set 'Request (line-list 6))
|
||||
(set 'Result (line-list 7))
|
||||
(set 'Size (line-list 8))
|
||||
(set 'Referrer (line-list 9))
|
||||
(set 'UserAgent (line-list 10))
|
||||
(create-record "Logs" Id IP UserId UserName Date Request Result Size Referrer UserAgent)
|
||||
))
|
||||
)
|
||||
(close-database)
|
||||
(exit)
|
||||
798
samples/Nit/file.nit
Normal file
798
samples/Nit/file.nit
Normal file
@@ -0,0 +1,798 @@
|
||||
# This file is part of NIT ( http://www.nitlanguage.org ).
|
||||
#
|
||||
# Copyright 2004-2008 Jean Privat <jean@pryen.org>
|
||||
# Copyright 2008 Floréal Morandat <morandat@lirmm.fr>
|
||||
# Copyright 2008 Jean-Sébastien Gélinas <calestar@gmail.com>
|
||||
#
|
||||
# This file is free software, which comes along with NIT. This software is
|
||||
# distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
|
||||
# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
# PARTICULAR PURPOSE. You can modify it is you want, provided this header
|
||||
# is kept unaltered, and a notification of the changes is added.
|
||||
# You are allowed to redistribute it and sell it, alone or is a part of
|
||||
# another product.
|
||||
|
||||
# File manipulations (create, read, write, etc.)
|
||||
module file
|
||||
|
||||
intrude import stream
|
||||
intrude import ropes
|
||||
import string_search
|
||||
import time
|
||||
|
||||
in "C Header" `{
|
||||
#include <dirent.h>
|
||||
#include <string.h>
|
||||
#include <sys/types.h>
|
||||
#include <sys/stat.h>
|
||||
#include <unistd.h>
|
||||
#include <stdio.h>
|
||||
#include <poll.h>
|
||||
#include <errno.h>
|
||||
`}
|
||||
|
||||
# File Abstract Stream
|
||||
abstract class FStream
|
||||
super IOS
|
||||
# The path of the file.
|
||||
var path: nullable String = null
|
||||
|
||||
# The FILE *.
|
||||
private var file: nullable NativeFile = null
|
||||
|
||||
fun file_stat: FileStat do return _file.file_stat
|
||||
|
||||
# File descriptor of this file
|
||||
fun fd: Int do return _file.fileno
|
||||
end
|
||||
|
||||
# File input stream
|
||||
class IFStream
|
||||
super FStream
|
||||
super BufferedIStream
|
||||
super PollableIStream
|
||||
# Misc
|
||||
|
||||
# Open the same file again.
|
||||
# The original path is reused, therefore the reopened file can be a different file.
|
||||
fun reopen
|
||||
do
|
||||
if not eof and not _file.address_is_null then close
|
||||
_file = new NativeFile.io_open_read(path.to_cstring)
|
||||
if _file.address_is_null then
|
||||
last_error = new IOError("Error: Opening file at '{path.as(not null)}' failed with '{sys.errno.strerror}'")
|
||||
end_reached = true
|
||||
return
|
||||
end
|
||||
end_reached = false
|
||||
_buffer_pos = 0
|
||||
_buffer.clear
|
||||
end
|
||||
|
||||
redef fun close
|
||||
do
|
||||
if _file.address_is_null then return
|
||||
var i = _file.io_close
|
||||
_buffer.clear
|
||||
end_reached = true
|
||||
end
|
||||
|
||||
redef fun fill_buffer
|
||||
do
|
||||
var nb = _file.io_read(_buffer.items, _buffer.capacity)
|
||||
if nb <= 0 then
|
||||
end_reached = true
|
||||
nb = 0
|
||||
end
|
||||
_buffer.length = nb
|
||||
_buffer_pos = 0
|
||||
end
|
||||
# End of file?
|
||||
redef var end_reached: Bool = false
|
||||
|
||||
# Open the file at `path` for reading.
|
||||
init open(path: String)
|
||||
do
|
||||
self.path = path
|
||||
prepare_buffer(10)
|
||||
_file = new NativeFile.io_open_read(path.to_cstring)
|
||||
if _file.address_is_null then
|
||||
last_error = new IOError("Error: Opening file at '{path}' failed with '{sys.errno.strerror}'")
|
||||
end_reached = true
|
||||
end
|
||||
end
|
||||
|
||||
init from_fd(fd: Int) do
|
||||
self.path = ""
|
||||
prepare_buffer(10)
|
||||
_file = fd_to_stream(fd, read_only)
|
||||
if _file.address_is_null then
|
||||
last_error = new IOError("Error: Converting fd {fd} to stream failed with '{sys.errno.strerror}'")
|
||||
end_reached = true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# File output stream
|
||||
class OFStream
|
||||
super FStream
|
||||
super OStream
|
||||
|
||||
redef fun write(s)
|
||||
do
|
||||
if last_error != null then return
|
||||
if not _is_writable then
|
||||
last_error = new IOError("Cannot write to non-writable stream")
|
||||
return
|
||||
end
|
||||
if s isa FlatText then
|
||||
write_native(s.to_cstring, s.length)
|
||||
else
|
||||
for i in s.substrings do write_native(i.to_cstring, i.length)
|
||||
end
|
||||
end
|
||||
|
||||
redef fun close
|
||||
do
|
||||
if _file.address_is_null then
|
||||
last_error = new IOError("Cannot close non-existing write stream")
|
||||
_is_writable = false
|
||||
return
|
||||
end
|
||||
var i = _file.io_close
|
||||
_is_writable = false
|
||||
end
|
||||
redef var is_writable = false
|
||||
|
||||
# Write `len` bytes from `native`.
|
||||
private fun write_native(native: NativeString, len: Int)
|
||||
do
|
||||
if last_error != null then return
|
||||
if not _is_writable then
|
||||
last_error = new IOError("Cannot write to non-writable stream")
|
||||
return
|
||||
end
|
||||
if _file.address_is_null then
|
||||
last_error = new IOError("Writing on a null stream")
|
||||
_is_writable = false
|
||||
return
|
||||
end
|
||||
var err = _file.io_write(native, len)
|
||||
if err != len then
|
||||
# Big problem
|
||||
last_error = new IOError("Problem in writing : {err} {len} \n")
|
||||
end
|
||||
end
|
||||
|
||||
# Open the file at `path` for writing.
|
||||
init open(path: String)
|
||||
do
|
||||
_file = new NativeFile.io_open_write(path.to_cstring)
|
||||
if _file.address_is_null then
|
||||
last_error = new IOError("Error: Opening file at '{path}' failed with '{sys.errno.strerror}'")
|
||||
self.path = path
|
||||
is_writable = false
|
||||
end
|
||||
self.path = path
|
||||
_is_writable = true
|
||||
end
|
||||
|
||||
# Creates a new File stream from a file descriptor
|
||||
init from_fd(fd: Int) do
|
||||
self.path = ""
|
||||
_file = fd_to_stream(fd, wipe_write)
|
||||
_is_writable = true
|
||||
if _file.address_is_null then
|
||||
last_error = new IOError("Error: Opening stream from file descriptor {fd} failed with '{sys.errno.strerror}'")
|
||||
_is_writable = false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
redef interface Object
|
||||
|
||||
private fun read_only: NativeString do return "r".to_cstring
|
||||
|
||||
private fun wipe_write: NativeString do return "w".to_cstring
|
||||
|
||||
private fun fd_to_stream(fd: Int, mode: NativeString): NativeFile `{
|
||||
return fdopen(fd, mode);
|
||||
`}
|
||||
|
||||
# returns first available stream to read or write to
|
||||
# return null on interruption (possibly a signal)
|
||||
protected fun poll( streams : Sequence[FStream] ) : nullable FStream
|
||||
do
|
||||
var in_fds = new Array[Int]
|
||||
var out_fds = new Array[Int]
|
||||
var fd_to_stream = new HashMap[Int,FStream]
|
||||
for s in streams do
|
||||
var fd = s.fd
|
||||
if s isa IFStream then in_fds.add( fd )
|
||||
if s isa OFStream then out_fds.add( fd )
|
||||
|
||||
fd_to_stream[fd] = s
|
||||
end
|
||||
|
||||
var polled_fd = intern_poll( in_fds, out_fds )
|
||||
|
||||
if polled_fd == null then
|
||||
return null
|
||||
else
|
||||
return fd_to_stream[polled_fd]
|
||||
end
|
||||
end
|
||||
|
||||
private fun intern_poll(in_fds: Array[Int], out_fds: Array[Int]) : nullable Int is extern import Array[Int].length, Array[Int].[], Int.as(nullable Int) `{
|
||||
int in_len, out_len, total_len;
|
||||
struct pollfd *c_fds;
|
||||
sigset_t sigmask;
|
||||
int i;
|
||||
int first_polled_fd = -1;
|
||||
int result;
|
||||
|
||||
in_len = Array_of_Int_length( in_fds );
|
||||
out_len = Array_of_Int_length( out_fds );
|
||||
total_len = in_len + out_len;
|
||||
c_fds = malloc( sizeof(struct pollfd) * total_len );
|
||||
|
||||
/* input streams */
|
||||
for ( i=0; i<in_len; i ++ ) {
|
||||
int fd;
|
||||
fd = Array_of_Int__index( in_fds, i );
|
||||
|
||||
c_fds[i].fd = fd;
|
||||
c_fds[i].events = POLLIN;
|
||||
}
|
||||
|
||||
/* output streams */
|
||||
for ( i=0; i<out_len; i ++ ) {
|
||||
int fd;
|
||||
fd = Array_of_Int__index( out_fds, i );
|
||||
|
||||
c_fds[i].fd = fd;
|
||||
c_fds[i].events = POLLOUT;
|
||||
}
|
||||
|
||||
/* poll all fds, unlimited timeout */
|
||||
result = poll( c_fds, total_len, -1 );
|
||||
|
||||
if ( result > 0 ) {
|
||||
/* analyse results */
|
||||
for ( i=0; i<total_len; i++ )
|
||||
if ( c_fds[i].revents & c_fds[i].events || /* awaited event */
|
||||
c_fds[i].revents & POLLHUP ) /* closed */
|
||||
{
|
||||
first_polled_fd = c_fds[i].fd;
|
||||
break;
|
||||
}
|
||||
|
||||
return Int_as_nullable( first_polled_fd );
|
||||
}
|
||||
else if ( result < 0 )
|
||||
fprintf( stderr, "Error in Stream:poll: %s\n", strerror( errno ) );
|
||||
|
||||
return null_Int();
|
||||
`}
|
||||
end
|
||||
|
||||
###############################################################################
|
||||
|
||||
class Stdin
|
||||
super IFStream
|
||||
|
||||
init do
|
||||
_file = new NativeFile.native_stdin
|
||||
path = "/dev/stdin"
|
||||
prepare_buffer(1)
|
||||
end
|
||||
|
||||
redef fun poll_in: Bool is extern "file_stdin_poll_in"
|
||||
end
|
||||
|
||||
class Stdout
|
||||
super OFStream
|
||||
init do
|
||||
_file = new NativeFile.native_stdout
|
||||
path = "/dev/stdout"
|
||||
_is_writable = true
|
||||
end
|
||||
end
|
||||
|
||||
class Stderr
|
||||
super OFStream
|
||||
init do
|
||||
_file = new NativeFile.native_stderr
|
||||
path = "/dev/stderr"
|
||||
_is_writable = true
|
||||
end
|
||||
end
|
||||
|
||||
###############################################################################
|
||||
|
||||
redef class Streamable
|
||||
# Like `write_to` but take care of creating the file
|
||||
fun write_to_file(filepath: String)
|
||||
do
|
||||
var stream = new OFStream.open(filepath)
|
||||
write_to(stream)
|
||||
stream.close
|
||||
end
|
||||
end
|
||||
|
||||
redef class String
|
||||
# return true if a file with this names exists
|
||||
fun file_exists: Bool do return to_cstring.file_exists
|
||||
|
||||
# The status of a file. see POSIX stat(2).
|
||||
fun file_stat: FileStat do return to_cstring.file_stat
|
||||
|
||||
# The status of a file or of a symlink. see POSIX lstat(2).
|
||||
fun file_lstat: FileStat do return to_cstring.file_lstat
|
||||
|
||||
# Remove a file, return true if success
|
||||
fun file_delete: Bool do return to_cstring.file_delete
|
||||
|
||||
# Copy content of file at `self` to `dest`
|
||||
fun file_copy_to(dest: String)
|
||||
do
|
||||
var input = new IFStream.open(self)
|
||||
var output = new OFStream.open(dest)
|
||||
|
||||
while not input.eof do
|
||||
var buffer = input.read(1024)
|
||||
output.write buffer
|
||||
end
|
||||
|
||||
input.close
|
||||
output.close
|
||||
end
|
||||
|
||||
# Remove the trailing extension `ext`.
|
||||
#
|
||||
# `ext` usually starts with a dot but could be anything.
|
||||
#
|
||||
# assert "file.txt".strip_extension(".txt") == "file"
|
||||
# assert "file.txt".strip_extension("le.txt") == "fi"
|
||||
# assert "file.txt".strip_extension("xt") == "file.t"
|
||||
#
|
||||
# if `ext` is not present, `self` is returned unmodified.
|
||||
#
|
||||
# assert "file.txt".strip_extension(".tar.gz") == "file.txt"
|
||||
fun strip_extension(ext: String): String
|
||||
do
|
||||
if has_suffix(ext) then
|
||||
return substring(0, length - ext.length)
|
||||
end
|
||||
return self
|
||||
end
|
||||
|
||||
# Extract the basename of a path and remove the extension
|
||||
#
|
||||
# assert "/path/to/a_file.ext".basename(".ext") == "a_file"
|
||||
# assert "path/to/a_file.ext".basename(".ext") == "a_file"
|
||||
# assert "path/to".basename(".ext") == "to"
|
||||
# assert "path/to/".basename(".ext") == "to"
|
||||
# assert "path".basename("") == "path"
|
||||
# assert "/path".basename("") == "path"
|
||||
# assert "/".basename("") == "/"
|
||||
# assert "".basename("") == ""
|
||||
fun basename(ext: String): String
|
||||
do
|
||||
var l = length - 1 # Index of the last char
|
||||
while l > 0 and self.chars[l] == '/' do l -= 1 # remove all trailing `/`
|
||||
if l == 0 then return "/"
|
||||
var pos = chars.last_index_of_from('/', l)
|
||||
var n = self
|
||||
if pos >= 0 then
|
||||
n = substring(pos+1, l-pos)
|
||||
end
|
||||
return n.strip_extension(ext)
|
||||
end
|
||||
|
||||
# Extract the dirname of a path
|
||||
#
|
||||
# assert "/path/to/a_file.ext".dirname == "/path/to"
|
||||
# assert "path/to/a_file.ext".dirname == "path/to"
|
||||
# assert "path/to".dirname == "path"
|
||||
# assert "path/to/".dirname == "path"
|
||||
# assert "path".dirname == "."
|
||||
# assert "/path".dirname == "/"
|
||||
# assert "/".dirname == "/"
|
||||
# assert "".dirname == "."
|
||||
fun dirname: String
|
||||
do
|
||||
var l = length - 1 # Index of the last char
|
||||
while l > 0 and self.chars[l] == '/' do l -= 1 # remove all trailing `/`
|
||||
var pos = chars.last_index_of_from('/', l)
|
||||
if pos > 0 then
|
||||
return substring(0, pos)
|
||||
else if pos == 0 then
|
||||
return "/"
|
||||
else
|
||||
return "."
|
||||
end
|
||||
end
|
||||
|
||||
# Return the canonicalized absolute pathname (see POSIX function `realpath`)
|
||||
fun realpath: String do
|
||||
var cs = to_cstring.file_realpath
|
||||
var res = cs.to_s_with_copy
|
||||
# cs.free_malloc # FIXME memory leak
|
||||
return res
|
||||
end
|
||||
|
||||
# Simplify a file path by remove useless ".", removing "//", and resolving ".."
|
||||
# ".." are not resolved if they start the path
|
||||
# starting "/" is not removed
|
||||
# trainling "/" is removed
|
||||
#
|
||||
# Note that the method only wonrk on the string:
|
||||
# * no I/O access is performed
|
||||
# * the validity of the path is not checked
|
||||
#
|
||||
# assert "some/./complex/../../path/from/../to/a////file//".simplify_path == "path/to/a/file"
|
||||
# assert "../dir/file".simplify_path == "../dir/file"
|
||||
# assert "dir/../../".simplify_path == ".."
|
||||
# assert "dir/..".simplify_path == "."
|
||||
# assert "//absolute//path/".simplify_path == "/absolute/path"
|
||||
# assert "//absolute//../".simplify_path == "/"
|
||||
fun simplify_path: String
|
||||
do
|
||||
var a = self.split_with("/")
|
||||
var a2 = new Array[String]
|
||||
for x in a do
|
||||
if x == "." then continue
|
||||
if x == "" and not a2.is_empty then continue
|
||||
if x == ".." and not a2.is_empty and a2.last != ".." then
|
||||
a2.pop
|
||||
continue
|
||||
end
|
||||
a2.push(x)
|
||||
end
|
||||
if a2.is_empty then return "."
|
||||
if a2.length == 1 and a2.first == "" then return "/"
|
||||
return a2.join("/")
|
||||
end
|
||||
|
||||
# Correctly join two path using the directory separator.
|
||||
#
|
||||
# Using a standard "{self}/{path}" does not work in the following cases:
|
||||
#
|
||||
# * `self` is empty.
|
||||
# * `path` ends with `'/'`.
|
||||
# * `path` starts with `'/'`.
|
||||
#
|
||||
# This method ensures that the join is valid.
|
||||
#
|
||||
# assert "hello".join_path("world") == "hello/world"
|
||||
# assert "hel/lo".join_path("wor/ld") == "hel/lo/wor/ld"
|
||||
# assert "".join_path("world") == "world"
|
||||
# assert "hello".join_path("/world") == "/world"
|
||||
# assert "hello/".join_path("world") == "hello/world"
|
||||
# assert "hello/".join_path("/world") == "/world"
|
||||
#
|
||||
# Note: You may want to use `simplify_path` on the result.
|
||||
#
|
||||
# Note: This method works only with POSIX paths.
|
||||
fun join_path(path: String): String
|
||||
do
|
||||
if path.is_empty then return self
|
||||
if self.is_empty then return path
|
||||
if path.chars[0] == '/' then return path
|
||||
if self.last == '/' then return "{self}{path}"
|
||||
return "{self}/{path}"
|
||||
end
|
||||
|
||||
# Convert the path (`self`) to a program name.
|
||||
#
|
||||
# Ensure the path (`self`) will be treated as-is by POSIX shells when it is
|
||||
# used as a program name. In order to do that, prepend `./` if needed.
|
||||
#
|
||||
# assert "foo".to_program_name == "./foo"
|
||||
# assert "/foo".to_program_name == "/foo"
|
||||
# assert "".to_program_name == "./" # At least, your shell will detect the error.
|
||||
fun to_program_name: String do
|
||||
if self.has_prefix("/") then
|
||||
return self
|
||||
else
|
||||
return "./{self}"
|
||||
end
|
||||
end
|
||||
|
||||
# Alias for `join_path`
|
||||
#
|
||||
# assert "hello" / "world" == "hello/world"
|
||||
# assert "hel/lo" / "wor/ld" == "hel/lo/wor/ld"
|
||||
# assert "" / "world" == "world"
|
||||
# assert "/hello" / "/world" == "/world"
|
||||
#
|
||||
# This operator is quite useful for chaining changes of path.
|
||||
# The next one being relative to the previous one.
|
||||
#
|
||||
# var a = "foo"
|
||||
# var b = "/bar"
|
||||
# var c = "baz/foobar"
|
||||
# assert a/b/c == "/bar/baz/foobar"
|
||||
fun /(path: String): String do return join_path(path)
|
||||
|
||||
# Returns the relative path needed to go from `self` to `dest`.
|
||||
#
|
||||
# assert "/foo/bar".relpath("/foo/baz") == "../baz"
|
||||
# assert "/foo/bar".relpath("/baz/bar") == "../../baz/bar"
|
||||
#
|
||||
# If `self` or `dest` is relative, they are considered relatively to `getcwd`.
|
||||
#
|
||||
# In some cases, the result is still independent of the current directory:
|
||||
#
|
||||
# assert "foo/bar".relpath("..") == "../../.."
|
||||
#
|
||||
# In other cases, parts of the current directory may be exhibited:
|
||||
#
|
||||
# var p = "../foo/bar".relpath("baz")
|
||||
# var c = getcwd.basename("")
|
||||
# assert p == "../../{c}/baz"
|
||||
#
|
||||
# For path resolution independent of the current directory (eg. for paths in URL),
|
||||
# or to use an other starting directory than the current directory,
|
||||
# just force absolute paths:
|
||||
#
|
||||
# var start = "/a/b/c/d"
|
||||
# var p2 = (start/"../foo/bar").relpath(start/"baz")
|
||||
# assert p2 == "../../d/baz"
|
||||
#
|
||||
#
|
||||
# Neither `self` or `dest` has to be real paths or to exist in directories since
|
||||
# the resolution is only done with string manipulations and without any access to
|
||||
# the underlying file system.
|
||||
#
|
||||
# If `self` and `dest` are the same directory, the empty string is returned:
|
||||
#
|
||||
# assert "foo".relpath("foo") == ""
|
||||
# assert "foo/../bar".relpath("bar") == ""
|
||||
#
|
||||
# The empty string and "." designate both the current directory:
|
||||
#
|
||||
# assert "".relpath("foo/bar") == "foo/bar"
|
||||
# assert ".".relpath("foo/bar") == "foo/bar"
|
||||
# assert "foo/bar".relpath("") == "../.."
|
||||
# assert "/" + "/".relpath(".") == getcwd
|
||||
fun relpath(dest: String): String
|
||||
do
|
||||
var cwd = getcwd
|
||||
var from = (cwd/self).simplify_path.split("/")
|
||||
if from.last.is_empty then from.pop # case for the root directory
|
||||
var to = (cwd/dest).simplify_path.split("/")
|
||||
if to.last.is_empty then to.pop # case for the root directory
|
||||
|
||||
# Remove common prefixes
|
||||
while not from.is_empty and not to.is_empty and from.first == to.first do
|
||||
from.shift
|
||||
to.shift
|
||||
end
|
||||
|
||||
# Result is going up in `from` with ".." then going down following `to`
|
||||
var from_len = from.length
|
||||
if from_len == 0 then return to.join("/")
|
||||
var up = "../"*(from_len-1) + ".."
|
||||
if to.is_empty then return up
|
||||
var res = up + "/" + to.join("/")
|
||||
return res
|
||||
end
|
||||
|
||||
# Create a directory (and all intermediate directories if needed)
|
||||
fun mkdir
|
||||
do
|
||||
var dirs = self.split_with("/")
|
||||
var path = new FlatBuffer
|
||||
if dirs.is_empty then return
|
||||
if dirs[0].is_empty then
|
||||
# it was a starting /
|
||||
path.add('/')
|
||||
end
|
||||
for d in dirs do
|
||||
if d.is_empty then continue
|
||||
path.append(d)
|
||||
path.add('/')
|
||||
path.to_s.to_cstring.file_mkdir
|
||||
end
|
||||
end
|
||||
|
||||
# Delete a directory and all of its content, return `true` on success
|
||||
#
|
||||
# Does not go through symbolic links and may get stuck in a cycle if there
|
||||
# is a cycle in the filesystem.
|
||||
fun rmdir: Bool
|
||||
do
|
||||
var ok = true
|
||||
for file in self.files do
|
||||
var file_path = self.join_path(file)
|
||||
var stat = file_path.file_lstat
|
||||
if stat.is_dir then
|
||||
ok = file_path.rmdir and ok
|
||||
else
|
||||
ok = file_path.file_delete and ok
|
||||
end
|
||||
stat.free
|
||||
end
|
||||
|
||||
# Delete the directory itself
|
||||
if ok then to_cstring.rmdir
|
||||
|
||||
return ok
|
||||
end
|
||||
|
||||
# Change the current working directory
|
||||
#
|
||||
# "/etc".chdir
|
||||
# assert getcwd == "/etc"
|
||||
# "..".chdir
|
||||
# assert getcwd == "/"
|
||||
#
|
||||
# TODO: errno
|
||||
fun chdir do to_cstring.file_chdir
|
||||
|
||||
# Return right-most extension (without the dot)
|
||||
#
|
||||
# Only the last extension is returned.
|
||||
# There is no special case for combined extensions.
|
||||
#
|
||||
# assert "file.txt".file_extension == "txt"
|
||||
# assert "file.tar.gz".file_extension == "gz"
|
||||
#
|
||||
# For file without extension, `null` is returned.
|
||||
# Hoever, for trailing dot, `""` is returned.
|
||||
#
|
||||
# assert "file".file_extension == null
|
||||
# assert "file.".file_extension == ""
|
||||
#
|
||||
# The starting dot of hidden files is never considered.
|
||||
#
|
||||
# assert ".file.txt".file_extension == "txt"
|
||||
# assert ".file".file_extension == null
|
||||
fun file_extension: nullable String
|
||||
do
|
||||
var last_slash = chars.last_index_of('.')
|
||||
if last_slash > 0 then
|
||||
return substring( last_slash+1, length )
|
||||
else
|
||||
return null
|
||||
end
|
||||
end
|
||||
|
||||
# returns files contained within the directory represented by self
|
||||
fun files : Set[ String ] is extern import HashSet[String], HashSet[String].add, NativeString.to_s, String.to_cstring, HashSet[String].as(Set[String]) `{
|
||||
char *dir_path;
|
||||
DIR *dir;
|
||||
|
||||
dir_path = String_to_cstring( recv );
|
||||
if ((dir = opendir(dir_path)) == NULL)
|
||||
{
|
||||
perror( dir_path );
|
||||
exit( 1 );
|
||||
}
|
||||
else
|
||||
{
|
||||
HashSet_of_String results;
|
||||
String file_name;
|
||||
struct dirent *de;
|
||||
|
||||
results = new_HashSet_of_String();
|
||||
|
||||
while ( ( de = readdir( dir ) ) != NULL )
|
||||
if ( strcmp( de->d_name, ".." ) != 0 &&
|
||||
strcmp( de->d_name, "." ) != 0 )
|
||||
{
|
||||
file_name = NativeString_to_s( strdup( de->d_name ) );
|
||||
HashSet_of_String_add( results, file_name );
|
||||
}
|
||||
|
||||
closedir( dir );
|
||||
return HashSet_of_String_as_Set_of_String( results );
|
||||
}
|
||||
`}
|
||||
end
|
||||
|
||||
redef class NativeString
|
||||
private fun file_exists: Bool is extern "string_NativeString_NativeString_file_exists_0"
|
||||
private fun file_stat: FileStat is extern "string_NativeString_NativeString_file_stat_0"
|
||||
private fun file_lstat: FileStat `{
|
||||
struct stat* stat_element;
|
||||
int res;
|
||||
stat_element = malloc(sizeof(struct stat));
|
||||
res = lstat(recv, stat_element);
|
||||
if (res == -1) return NULL;
|
||||
return stat_element;
|
||||
`}
|
||||
private fun file_mkdir: Bool is extern "string_NativeString_NativeString_file_mkdir_0"
|
||||
private fun rmdir: Bool `{ return rmdir(recv); `}
|
||||
private fun file_delete: Bool is extern "string_NativeString_NativeString_file_delete_0"
|
||||
private fun file_chdir is extern "string_NativeString_NativeString_file_chdir_0"
|
||||
private fun file_realpath: NativeString is extern "file_NativeString_realpath"
|
||||
end
|
||||
|
||||
# This class is system dependent ... must reify the vfs
|
||||
extern class FileStat `{ struct stat * `}
|
||||
# Returns the permission bits of file
|
||||
fun mode: Int is extern "file_FileStat_FileStat_mode_0"
|
||||
# Returns the last access time
|
||||
fun atime: Int is extern "file_FileStat_FileStat_atime_0"
|
||||
# Returns the last status change time
|
||||
fun ctime: Int is extern "file_FileStat_FileStat_ctime_0"
|
||||
# Returns the last modification time
|
||||
fun mtime: Int is extern "file_FileStat_FileStat_mtime_0"
|
||||
# Returns the size
|
||||
fun size: Int is extern "file_FileStat_FileStat_size_0"
|
||||
|
||||
# Returns true if it is a regular file (not a device file, pipe, sockect, ...)
|
||||
fun is_reg: Bool `{ return S_ISREG(recv->st_mode); `}
|
||||
# Returns true if it is a directory
|
||||
fun is_dir: Bool `{ return S_ISDIR(recv->st_mode); `}
|
||||
# Returns true if it is a character device
|
||||
fun is_chr: Bool `{ return S_ISCHR(recv->st_mode); `}
|
||||
# Returns true if it is a block device
|
||||
fun is_blk: Bool `{ return S_ISBLK(recv->st_mode); `}
|
||||
# Returns true if the type is fifo
|
||||
fun is_fifo: Bool `{ return S_ISFIFO(recv->st_mode); `}
|
||||
# Returns true if the type is a link
|
||||
fun is_lnk: Bool `{ return S_ISLNK(recv->st_mode); `}
|
||||
# Returns true if the type is a socket
|
||||
fun is_sock: Bool `{ return S_ISSOCK(recv->st_mode); `}
|
||||
end
|
||||
|
||||
# Instance of this class are standard FILE * pointers
|
||||
private extern class NativeFile `{ FILE* `}
|
||||
fun io_read(buf: NativeString, len: Int): Int is extern "file_NativeFile_NativeFile_io_read_2"
|
||||
fun io_write(buf: NativeString, len: Int): Int is extern "file_NativeFile_NativeFile_io_write_2"
|
||||
fun io_close: Int is extern "file_NativeFile_NativeFile_io_close_0"
|
||||
fun file_stat: FileStat is extern "file_NativeFile_NativeFile_file_stat_0"
|
||||
fun fileno: Int `{ return fileno(recv); `}
|
||||
|
||||
new io_open_read(path: NativeString) is extern "file_NativeFileCapable_NativeFileCapable_io_open_read_1"
|
||||
new io_open_write(path: NativeString) is extern "file_NativeFileCapable_NativeFileCapable_io_open_write_1"
|
||||
new native_stdin is extern "file_NativeFileCapable_NativeFileCapable_native_stdin_0"
|
||||
new native_stdout is extern "file_NativeFileCapable_NativeFileCapable_native_stdout_0"
|
||||
new native_stderr is extern "file_NativeFileCapable_NativeFileCapable_native_stderr_0"
|
||||
end
|
||||
|
||||
redef class Sys
|
||||
|
||||
# Standard input
|
||||
var stdin: PollableIStream = new Stdin is protected writable
|
||||
|
||||
# Standard output
|
||||
var stdout: OStream = new Stdout is protected writable
|
||||
|
||||
# Standard output for errors
|
||||
var stderr: OStream = new Stderr is protected writable
|
||||
|
||||
end
|
||||
|
||||
# Print `objects` on the standard output (`stdout`).
|
||||
protected fun printn(objects: Object...)
|
||||
do
|
||||
sys.stdout.write(objects.to_s)
|
||||
end
|
||||
|
||||
# Print an `object` on the standard output (`stdout`) and add a newline.
|
||||
protected fun print(object: Object)
|
||||
do
|
||||
sys.stdout.write(object.to_s)
|
||||
sys.stdout.write("\n")
|
||||
end
|
||||
|
||||
# Read a character from the standard input (`stdin`).
|
||||
protected fun getc: Char
|
||||
do
|
||||
return sys.stdin.read_char.ascii
|
||||
end
|
||||
|
||||
# Read a line from the standard input (`stdin`).
|
||||
protected fun gets: String
|
||||
do
|
||||
return sys.stdin.read_line
|
||||
end
|
||||
|
||||
# Return the working (current) directory
|
||||
protected fun getcwd: String do return file_getcwd.to_s
|
||||
private fun file_getcwd: NativeString is extern "string_NativeString_NativeString_file_getcwd_0"
|
||||
376
samples/Nit/meetup.nit
Normal file
376
samples/Nit/meetup.nit
Normal file
@@ -0,0 +1,376 @@
|
||||
# This file is part of NIT ( http://www.nitlanguage.org ).
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License
|
||||
|
||||
# Shows a meetup and allows to modify its participants
|
||||
module meetup
|
||||
|
||||
import opportunity_model
|
||||
import boilerplate
|
||||
import welcome
|
||||
import template
|
||||
|
||||
# Shows a meetup and allows to modify its participants
|
||||
class OpportunityMeetupPage
|
||||
super OpportunityPage
|
||||
|
||||
# Meetup the page is supposed to show
|
||||
var meetup: nullable Meetup = null
|
||||
# Answer mode for the meetup
|
||||
var mode = 0
|
||||
|
||||
init from_id(id: String) do
|
||||
var db = new OpportunityDB.open("opportunity")
|
||||
meetup = db.find_meetup_by_id(id)
|
||||
db.close
|
||||
if meetup != null then mode = meetup.answer_mode
|
||||
init
|
||||
end
|
||||
|
||||
init do
|
||||
header.page_js = "mode = {mode};\n"
|
||||
header.page_js += """
|
||||
function update_scores(){
|
||||
var anss = $('.answer');
|
||||
var count = {};
|
||||
var scores = {};
|
||||
var answers = [];
|
||||
var maxscore = 0;
|
||||
for(i=0; i < anss.length; i++){
|
||||
var incscore = 0;
|
||||
var inccount = 0;
|
||||
var idparts = anss[i].id.split("_");
|
||||
var ansid = idparts[1];
|
||||
var html = anss[i].innerHTML;
|
||||
if(html === "<center>✔</center>"){
|
||||
inccount = 1;
|
||||
incscore = 2;
|
||||
}else if(html === "<center>❓</center>"){
|
||||
incscore = 1;
|
||||
}
|
||||
var intansid = parseInt(ansid)
|
||||
if(answers.indexOf(intansid) == -1){
|
||||
answers.push(intansid);
|
||||
}
|
||||
if(ansid in count){
|
||||
count[ansid] += inccount;
|
||||
}else{
|
||||
count[ansid] = inccount;
|
||||
}
|
||||
if(ansid in scores){
|
||||
scores[ansid] += incscore;
|
||||
}else{
|
||||
scores[ansid] = incscore;
|
||||
}
|
||||
if(scores[ansid] > maxscore){
|
||||
maxscore = scores[ansid];
|
||||
}
|
||||
}
|
||||
for(i=0; i < answers.length; i++){
|
||||
var ansid = answers[i].toString();
|
||||
var el = $('#total'+ansid)[0];
|
||||
var ins = "<center>"+count[ansid];
|
||||
if(scores[ansid] >= maxscore){
|
||||
ins += "<br/><span style=\\"color:blue\\">★</span>";
|
||||
}
|
||||
ins += "</center>";
|
||||
el.innerHTML = ins;
|
||||
}
|
||||
}
|
||||
function change_answer(ele, id){
|
||||
// modify only the currently selected entry
|
||||
if (in_modification_id != id) return;
|
||||
|
||||
var e = document.getElementById(ele.id);
|
||||
var i = e.innerHTML;
|
||||
var ans = true;"""
|
||||
if mode == 0 then
|
||||
header.page_js += """
|
||||
if(i === "<center>✔</center>"){
|
||||
ans = 0;
|
||||
e.innerHTML = "<center>✘</center>"
|
||||
e.style.color = "red";
|
||||
}else{
|
||||
ans = 1;
|
||||
e.innerHTML = "<center>✔</center>";
|
||||
e.style.color = "green";
|
||||
}"""
|
||||
|
||||
else
|
||||
header.page_js += """
|
||||
if(i === "<center>✔</center>"){
|
||||
ans = 1;
|
||||
e.innerHTML = "<center>❓</center>"
|
||||
e.style.color = "#B8860B";
|
||||
}else if(i === "<center>❓</center>"){
|
||||
ans = 0;
|
||||
e.innerHTML = "<center>✘</center>"
|
||||
e.style.color = "red";
|
||||
}else{
|
||||
ans = 2;
|
||||
e.innerHTML = "<center>✔</center>";
|
||||
e.style.color = "green";
|
||||
}"""
|
||||
end
|
||||
header.page_js += """
|
||||
var a = ele.id.split('_')
|
||||
var pid = a[1]
|
||||
var aid = a[2]
|
||||
update_scores();
|
||||
$.ajax({
|
||||
type: "POST",
|
||||
url: "./rest/answer",
|
||||
data: {
|
||||
answer_id: aid,
|
||||
pers_id: pid,
|
||||
answer: ans
|
||||
}
|
||||
});
|
||||
}
|
||||
function change_temp_answer(ele){
|
||||
var e = document.getElementById(ele.id);
|
||||
var i = e.innerHTML;"""
|
||||
if mode == 0 then
|
||||
header.page_js += """
|
||||
if(i === "<center>✔</center>"){
|
||||
e.innerHTML = "<center>✘</center>"
|
||||
e.style.color = "red";
|
||||
}else{
|
||||
e.innerHTML = "<center>✔</center>";
|
||||
e.style.color = "green";
|
||||
}
|
||||
"""
|
||||
else
|
||||
header.page_js += """
|
||||
if(i === "<center>✔</center>"){
|
||||
e.innerHTML = "<center>❓</center>";
|
||||
e.style.color = "#B8860B";
|
||||
}else if(i === "<center>❓</center>"){
|
||||
e.innerHTML = "<center>✘</center>"
|
||||
e.style.color = "red";
|
||||
}else{
|
||||
e.innerHTML = "<center>✔</center>";
|
||||
e.style.color = "green";
|
||||
}
|
||||
"""
|
||||
end
|
||||
header.page_js += """
|
||||
update_scores();
|
||||
}
|
||||
function add_part(ele){
|
||||
var e = document.getElementById(ele.id);
|
||||
var pname = document.getElementById("new_name").value;
|
||||
var arr = e.id.split("_");
|
||||
var mid = arr[1];
|
||||
var ans = $('#' + ele.id).parent().parent().parent().children(".answer");
|
||||
ansmap = {};
|
||||
for(i=0;i<ans.length;i++){
|
||||
var curr = ans.eq(i)
|
||||
"""
|
||||
if mode == 0 then
|
||||
header.page_js += """
|
||||
if(curr[0].innerHTML === "<center>✔</center>"){
|
||||
ansmap[curr.attr('id')] = 1
|
||||
}else{
|
||||
ansmap[curr.attr('id')] = 0
|
||||
}"""
|
||||
else
|
||||
header.page_js += """
|
||||
if(curr[0].innerHTML === "<center>✔</center>"){
|
||||
ansmap[curr.attr('id')] = 2
|
||||
}else if(curr[0].innerHTML === "<center>❓</center>"){
|
||||
ansmap[curr.attr('id')] = 1
|
||||
}else{
|
||||
ansmap[curr.attr('id')] = 0
|
||||
}"""
|
||||
end
|
||||
header.page_js += """
|
||||
}
|
||||
$.ajax({
|
||||
type: "POST",
|
||||
url: "./rest/meetup/new_pers",
|
||||
data: {
|
||||
meetup_id: mid,
|
||||
persname: pname,
|
||||
answers: $.param(ansmap)
|
||||
}
|
||||
})
|
||||
.done(function(data){
|
||||
location.reload();
|
||||
})
|
||||
.fail(function(data){
|
||||
//TODO: Notify of failure
|
||||
});
|
||||
}
|
||||
function remove_people(ele){
|
||||
var arr = ele.id.split("_")
|
||||
var pid = arr[1]
|
||||
$('#' + ele.id).parent().parent().parent().remove();
|
||||
update_scores();
|
||||
$.ajax({
|
||||
type: "POST",
|
||||
url: "./rest/people",
|
||||
data: {
|
||||
method: "DELETE",
|
||||
p_id: pid
|
||||
}
|
||||
});
|
||||
}
|
||||
// ID of line currently open for modification
|
||||
var in_modification_id = null;
|
||||
function modify_people(ele, id){
|
||||
if (in_modification_id != null) {
|
||||
// reset to normal values
|
||||
$('#modify_'+in_modification_id).text("Modify or delete");
|
||||
$('#modify_'+in_modification_id).attr("class", "btn btn-xs btn-warning");
|
||||
$('#line_'+in_modification_id).css("background-color", "");
|
||||
$('#delete_'+in_modification_id).css("display", "none");
|
||||
}
|
||||
if (in_modification_id != id) {
|
||||
// activate modifiable mode
|
||||
$('#modify_'+id).text("Done");
|
||||
$('#modify_'+id).attr("class", "btn btn-xs btn-success");
|
||||
$('#line_'+id).css("background-color", "LightYellow");
|
||||
$('#delete_'+id).show();
|
||||
|
||||
in_modification_id = id;
|
||||
} else {
|
||||
in_modification_id = null;
|
||||
}
|
||||
}
|
||||
"""
|
||||
end
|
||||
|
||||
redef fun rendering do
|
||||
if meetup == null then
|
||||
add((new OpportunityHomePage).write_to_string)
|
||||
return
|
||||
end
|
||||
add header
|
||||
var db = new OpportunityDB.open("opportunity")
|
||||
add meetup.to_html(db)
|
||||
db.close
|
||||
add footer
|
||||
end
|
||||
end
|
||||
|
||||
redef class Meetup
|
||||
# Build the HTML for `self`
|
||||
fun to_html(db: OpportunityDB): Streamable do
|
||||
var t = new Template
|
||||
t.add """
|
||||
<div class="container">
|
||||
<div class="page-header">
|
||||
<center><h1>{{{name}}}</h1></center>
|
||||
"""
|
||||
if not date.is_empty then t.add """
|
||||
<center><h4>When: {{{date}}}</h4></center>"""
|
||||
|
||||
if not place.is_empty then t.add """
|
||||
<center><h4>Where: {{{place}}}</h4></center>"""
|
||||
|
||||
t.add """
|
||||
</div>
|
||||
<table class="table">
|
||||
"""
|
||||
t.add "<th>Participant name</th>"
|
||||
for i in answers(db) do
|
||||
t.add "<th class=\"text-center\">"
|
||||
t.add i.to_s
|
||||
t.add "</th>"
|
||||
end
|
||||
t.add "<th></th>"
|
||||
t.add "</tr>"
|
||||
for i in participants(db) do
|
||||
i.load_answers(db, self)
|
||||
t.add "<tr id=\"line_{i.id}\">"
|
||||
t.add "<td>"
|
||||
t.add i.to_s
|
||||
t.add "</td>"
|
||||
for j, k in i.answers do
|
||||
var color
|
||||
if answer_mode == 0 then
|
||||
if k == 1 then
|
||||
color = "green"
|
||||
else
|
||||
color = "red"
|
||||
end
|
||||
else
|
||||
if k == 2 then
|
||||
color = "green"
|
||||
else if k == 1 then
|
||||
color = "#B8860B"
|
||||
else
|
||||
color = "red"
|
||||
end
|
||||
end
|
||||
t.add """<td class="answer" onclick="change_answer(this, {{{i.id}}})" id="answer_{{{j.id}}}_{{{i.id}}}" style="color:{{{color}}}">"""
|
||||
t.add "<center>"
|
||||
if answer_mode == 0 then
|
||||
if k == 1 then
|
||||
t.add "✔"
|
||||
else
|
||||
t.add "✘"
|
||||
end
|
||||
else
|
||||
if k == 2 then
|
||||
t.add "✔"
|
||||
else if k == 1 then
|
||||
t.add "❓"
|
||||
else
|
||||
t.add "✘"
|
||||
end
|
||||
end
|
||||
t.add "</center></td>"
|
||||
end
|
||||
t.add """<td class="opportunity-action"><center><button class="btn btn-xs btn-warning" type="button" onclick="modify_people(this, {{{i.id}}})" id="modify_{{{i.id}}}">Modify or delete</button> """
|
||||
t.add """<button class="btn btn-xs btn-danger" type="button" onclick="remove_people(this)" id="delete_{{{i.id}}}" style="display: none;">Delete</button></center></td>"""
|
||||
t.add "</tr>"
|
||||
end
|
||||
t.add """
|
||||
<tr id="newrow" style="background-color: LightYellow">
|
||||
<td><input id="new_name" type="text" placeholder="Your name" class="input-large"></td>
|
||||
"""
|
||||
for i in answers(db) do
|
||||
t.add "<td class=\"answer\" id=\"newans_{i.id}\" onclick=\"change_temp_answer(this)\" style=\"color:red;\"><center>✘</center></td>"
|
||||
end
|
||||
t.add """
|
||||
<td><center><span id="add_{{{id}}}" onclick="add_part(this)" style="color:green;" class="action"><button class="btn btn-xs btn-success" type="button">Done</button></span></center></td>"""
|
||||
t.add "</tr>"
|
||||
# Compute score for each answer
|
||||
var scores = new HashMap[Int, Int]
|
||||
var maxsc = 0
|
||||
for i in answers(db) do
|
||||
scores[i.id] = i.score(db)
|
||||
if scores[i.id] > maxsc then maxsc = scores[i.id]
|
||||
end
|
||||
t.add """
|
||||
<tr id="total">
|
||||
<th>Total</th>
|
||||
"""
|
||||
for i in answers(db) do
|
||||
t.add """<th id="total{{{i.id}}}"><center>{{{i.count(db)}}}"""
|
||||
if scores.has_key(i.id) and scores[i.id] >= maxsc then
|
||||
t.add """<br/><span style="color:blue">★</span>"""
|
||||
end
|
||||
t.add "</center></th>"
|
||||
end
|
||||
t.add "</th>"
|
||||
t.add """
|
||||
<th></th>
|
||||
</tr>"""
|
||||
t.add "</table>"
|
||||
t.add "</div>"
|
||||
return t
|
||||
end
|
||||
end
|
||||
434
samples/Objective-J/AppController.j
Normal file
434
samples/Objective-J/AppController.j
Normal file
@@ -0,0 +1,434 @@
|
||||
//
|
||||
// AppController.j
|
||||
// FlickrPhoto
|
||||
//
|
||||
// Created by Ross Boucher.
|
||||
// Copyright 2008 - 2010, 280 North, Inc. All rights reserved.
|
||||
|
||||
@import <Foundation/Foundation.j>
|
||||
@import <AppKit/AppKit.j>
|
||||
|
||||
var SliderToolbarItemIdentifier = "SliderToolbarItemIdentifier",
|
||||
AddToolbarItemIdentifier = "AddToolbarItemIdentifier",
|
||||
RemoveToolbarItemIdentifier = "RemoveToolbarItemIdentifier";
|
||||
|
||||
/*
|
||||
Important note about CPJSONPConnection: CPJSONPConnection is ONLY for JSONP APIs.
|
||||
If aren't sure you NEED JSONP (see http://ajaxian.com/archives/jsonp-json-with-padding ),
|
||||
you most likely don't want to use CPJSONPConnection, but rather the more standard
|
||||
CPURLConnection. CPJSONPConnection is designed for cross-domain
|
||||
connections, and if you are making requests to the same domain (as most web
|
||||
applications do), you do not need it.
|
||||
*/
|
||||
|
||||
@implementation AppController : CPObject
|
||||
{
|
||||
CPString lastIdentifier;
|
||||
CPDictionary photosets;
|
||||
|
||||
CPCollectionView listCollectionView;
|
||||
CPCollectionView photosCollectionView;
|
||||
}
|
||||
|
||||
- (void)applicationDidFinishLaunching:(CPNotification)aNotification
|
||||
{
|
||||
//the first thing we need to do is create a window to take up the full screen
|
||||
//we'll also create a toolbar to go with it, and grab its size for future reference
|
||||
|
||||
var theWindow = [[CPWindow alloc] initWithContentRect:CGRectMakeZero() styleMask:CPBorderlessBridgeWindowMask],
|
||||
contentView = [theWindow contentView],
|
||||
toolbar = [[CPToolbar alloc] initWithIdentifier:"Photos"],
|
||||
bounds = [contentView bounds];
|
||||
|
||||
//we tell the toolbar that we want to be its delegate and attach it to theWindow
|
||||
[toolbar setDelegate:self];
|
||||
[toolbar setVisible:true];
|
||||
[theWindow setToolbar:toolbar];
|
||||
|
||||
photosets = [CPDictionary dictionary]; //storage for our sets of photos return from Flickr
|
||||
|
||||
//now we create a scroll view to contain the list of collections of photos (photosets)
|
||||
//inside the scroll view, we'll place our collection view, which manages a collection of "cells"
|
||||
//each cell will represent one photo collection, and choosing cells will select that collection
|
||||
|
||||
var listScrollView = [[CPScrollView alloc] initWithFrame:CGRectMake(0, 0, 200, CGRectGetHeight(bounds) - 58)];
|
||||
[listScrollView setAutohidesScrollers:YES];
|
||||
[listScrollView setAutoresizingMask:CPViewHeightSizable];
|
||||
[[listScrollView contentView] setBackgroundColor:[CPColor colorWithRed:213.0 / 255.0 green:221.0 / 255.0 blue:230.0 / 255.0 alpha:1.0]];
|
||||
|
||||
//we create the collection view cells by creating a single prototype (CPCollectionViewItem) and setting its view.
|
||||
//the CPCollectionView class will then duplicate this item as many times as it needs
|
||||
|
||||
var photosListItem = [[CPCollectionViewItem alloc] init];
|
||||
[photosListItem setView:[[PhotosListCell alloc] initWithFrame:CGRectMakeZero()]];
|
||||
|
||||
listCollectionView = [[CPCollectionView alloc] initWithFrame:CGRectMake(0, 0, 200, 0)];
|
||||
|
||||
[listCollectionView setDelegate:self]; //we want delegate methods
|
||||
[listCollectionView setItemPrototype:photosListItem]; //set the item prototype
|
||||
|
||||
[listCollectionView setMinItemSize:CGSizeMake(20.0, 45.0)];
|
||||
[listCollectionView setMaxItemSize:CGSizeMake(1000.0, 45.0)];
|
||||
[listCollectionView setMaxNumberOfColumns:1]; //setting a single column will make this appear as a vertical list
|
||||
|
||||
[listCollectionView setVerticalMargin:0.0];
|
||||
[listCollectionView setAutoresizingMask:CPViewWidthSizable];
|
||||
|
||||
//finally, we put our collection view inside the scroll view as it's document view, so it can be scrolled
|
||||
[listScrollView setDocumentView:listCollectionView];
|
||||
|
||||
//and we add it to the window's content view, so it will show up on the screen
|
||||
[contentView addSubview:listScrollView];
|
||||
|
||||
//repeat the process with another collection view for the actual photos
|
||||
//this time we'll use a different view for the prototype (PhotoCell)
|
||||
|
||||
var photoItem = [[CPCollectionViewItem alloc] init];
|
||||
[photoItem setView:[[PhotoCell alloc] initWithFrame:CGRectMake(0, 0, 150, 150)]];
|
||||
|
||||
var scrollView = [[CPScrollView alloc] initWithFrame:CGRectMake(200, 0, CGRectGetWidth(bounds) - 200, CGRectGetHeight(bounds) - 58)];
|
||||
|
||||
photosCollectionView = [[CPCollectionView alloc] initWithFrame:CGRectMake(0, 0, CGRectGetWidth(bounds) - 200, 0)];
|
||||
|
||||
[photosCollectionView setDelegate:self];
|
||||
[photosCollectionView setItemPrototype:photoItem];
|
||||
|
||||
[photosCollectionView setMinItemSize:CGSizeMake(150, 150)];
|
||||
[photosCollectionView setMaxItemSize:CGSizeMake(150, 150)];
|
||||
[photosCollectionView setAutoresizingMask:CPViewWidthSizable];
|
||||
|
||||
[scrollView setAutoresizingMask:CPViewHeightSizable | CPViewWidthSizable];
|
||||
[scrollView setDocumentView:photosCollectionView];
|
||||
[scrollView setAutohidesScrollers:YES];
|
||||
|
||||
[[scrollView contentView] setBackgroundColor:[CPColor colorWithCalibratedWhite:0.25 alpha:1.0]];
|
||||
|
||||
[contentView addSubview:scrollView];
|
||||
|
||||
//bring forward the window to display it
|
||||
[theWindow orderFront:self];
|
||||
|
||||
//get the most interesting photos on flickr
|
||||
var request = [CPURLRequest requestWithURL:"http://www.flickr.com/services/rest/?method=flickr.interestingness.getList&per_page=20&format=json&api_key=ca4dd89d3dfaeaf075144c3fdec76756"];
|
||||
|
||||
// see important note about CPJSONPConnection above
|
||||
var connection = [CPJSONPConnection sendRequest:request callback:"jsoncallback" delegate:self];
|
||||
|
||||
lastIdentifier = "Interesting Photos";
|
||||
}
|
||||
|
||||
- (void)add:(id)sender
|
||||
{
|
||||
var string = prompt("Enter a tag to search Flickr for photos.");
|
||||
|
||||
if (string)
|
||||
{
|
||||
//create a new request for the photos with the tag returned from the javascript prompt
|
||||
var request = [CPURLRequest requestWithURL:"http://www.flickr.com/services/rest/?"+
|
||||
"method=flickr.photos.search&tags="+encodeURIComponent(string)+
|
||||
"&media=photos&machine_tag_mode=any&per_page=20&format=json&api_key=ca4dd89d3dfaeaf075144c3fdec76756"];
|
||||
|
||||
// see important note about CPJSONPConnection above
|
||||
[CPJSONPConnection sendRequest:request callback:"jsoncallback" delegate:self];
|
||||
|
||||
lastIdentifier = string;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)remove:(id)sender
|
||||
{
|
||||
//remove this photo
|
||||
[self removeImageListWithIdentifier:[[photosets allKeys] objectAtIndex:[[listCollectionView selectionIndexes] firstIndex]]];
|
||||
}
|
||||
|
||||
- (void)addImageList:(CPArray)images withIdentifier:(CPString)aString
|
||||
{
|
||||
[photosets setObject:images forKey:aString];
|
||||
|
||||
[listCollectionView setContent:[[photosets allKeys] copy]];
|
||||
[listCollectionView setSelectionIndexes:[CPIndexSet indexSetWithIndex:[[photosets allKeys] indexOfObject:aString]]];
|
||||
}
|
||||
|
||||
- (void)removeImageListWithIdentifier:(CPString)aString
|
||||
{
|
||||
var nextIndex = MAX([[listCollectionView content] indexOfObject:aString] - 1, 0);
|
||||
|
||||
[photosets removeObjectForKey:aString];
|
||||
|
||||
[listCollectionView setContent:[[photosets allKeys] copy]];
|
||||
[listCollectionView setSelectionIndexes:[CPIndexSet indexSetWithIndex:nextIndex]];
|
||||
}
|
||||
|
||||
- (void)adjustImageSize:(id)sender
|
||||
{
|
||||
var newSize = [sender value];
|
||||
|
||||
[photosCollectionView setMinItemSize:CGSizeMake(newSize, newSize)];
|
||||
[photosCollectionView setMaxItemSize:CGSizeMake(newSize, newSize)];
|
||||
}
|
||||
|
||||
- (void)collectionViewDidChangeSelection:(CPCollectionView)aCollectionView
|
||||
{
|
||||
if (aCollectionView == listCollectionView)
|
||||
{
|
||||
var listIndex = [[listCollectionView selectionIndexes] firstIndex];
|
||||
|
||||
if (listIndex === CPNotFound)
|
||||
return;
|
||||
|
||||
var key = [listCollectionView content][listIndex];
|
||||
|
||||
[photosCollectionView setContent:[photosets objectForKey:key]];
|
||||
[photosCollectionView setSelectionIndexes:[CPIndexSet indexSet]];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)connection:(CPJSONPConnection)aConnection didReceiveData:(CPString)data
|
||||
{
|
||||
//this method is called when the network request returns. the data is the returned
|
||||
//information from flickr. we set the array of photo urls as the data to our collection view
|
||||
|
||||
[self addImageList:data.photos.photo withIdentifier:lastIdentifier];
|
||||
}
|
||||
|
||||
- (void)connection:(CPJSONPConnection)aConnection didFailWithError:(CPString)error
|
||||
{
|
||||
alert(error); //a network error occurred
|
||||
}
|
||||
|
||||
//these two methods are the toolbar delegate methods, and tell the toolbar what it should display to the user
|
||||
|
||||
- (CPArray)toolbarAllowedItemIdentifiers:(CPToolbar)aToolbar
|
||||
{
|
||||
return [self toolbarDefaultItemIdentifiers:aToolbar];
|
||||
}
|
||||
|
||||
- (CPArray)toolbarDefaultItemIdentifiers:(CPToolbar)aToolbar
|
||||
{
|
||||
return [AddToolbarItemIdentifier, RemoveToolbarItemIdentifier, CPToolbarFlexibleSpaceItemIdentifier, SliderToolbarItemIdentifier];
|
||||
}
|
||||
|
||||
//this delegate method returns the actual toolbar item for the given identifier
|
||||
|
||||
- (CPToolbarItem)toolbar:(CPToolbar)aToolbar itemForItemIdentifier:(CPString)anItemIdentifier willBeInsertedIntoToolbar:(BOOL)aFlag
|
||||
{
|
||||
var toolbarItem = [[CPToolbarItem alloc] initWithItemIdentifier:anItemIdentifier];
|
||||
|
||||
if (anItemIdentifier == SliderToolbarItemIdentifier)
|
||||
{
|
||||
[toolbarItem setView:[[PhotoResizeView alloc] initWithFrame:CGRectMake(0, 0, 180, 32)]];
|
||||
[toolbarItem setMinSize:CGSizeMake(180, 32)];
|
||||
[toolbarItem setMaxSize:CGSizeMake(180, 32)];
|
||||
[toolbarItem setLabel:"Scale"];
|
||||
}
|
||||
else if (anItemIdentifier == AddToolbarItemIdentifier)
|
||||
{
|
||||
var image = [[CPImage alloc] initWithContentsOfFile:[[CPBundle mainBundle] pathForResource:"add.png"] size:CPSizeMake(30, 25)],
|
||||
highlighted = [[CPImage alloc] initWithContentsOfFile:[[CPBundle mainBundle] pathForResource:"addHighlighted.png"] size:CPSizeMake(30, 25)];
|
||||
|
||||
[toolbarItem setImage:image];
|
||||
[toolbarItem setAlternateImage:highlighted];
|
||||
|
||||
[toolbarItem setTarget:self];
|
||||
[toolbarItem setAction:@selector(add:)];
|
||||
[toolbarItem setLabel:"Add Photo List"];
|
||||
|
||||
[toolbarItem setMinSize:CGSizeMake(32, 32)];
|
||||
[toolbarItem setMaxSize:CGSizeMake(32, 32)];
|
||||
}
|
||||
else if (anItemIdentifier == RemoveToolbarItemIdentifier)
|
||||
{
|
||||
var image = [[CPImage alloc] initWithContentsOfFile:[[CPBundle mainBundle] pathForResource:"remove.png"] size:CPSizeMake(30, 25)],
|
||||
highlighted = [[CPImage alloc] initWithContentsOfFile:[[CPBundle mainBundle] pathForResource:"removeHighlighted.png"] size:CPSizeMake(30, 25)];
|
||||
|
||||
[toolbarItem setImage:image];
|
||||
[toolbarItem setAlternateImage:highlighted];
|
||||
|
||||
[toolbarItem setTarget:self];
|
||||
[toolbarItem setAction:@selector(remove:)];
|
||||
[toolbarItem setLabel:"Remove Photo List"];
|
||||
|
||||
[toolbarItem setMinSize:CGSizeMake(32, 32)];
|
||||
[toolbarItem setMaxSize:CGSizeMake(32, 32)];
|
||||
}
|
||||
|
||||
return toolbarItem;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
/*
|
||||
This code demonstrates how to add a category to an existing class.
|
||||
In this case, we are adding the class method +flickr_labelWithText: to
|
||||
the CPTextField class. Later on, we can call [CPTextField flickr_labelWithText:"foo"]
|
||||
to return a new text field with the string foo.
|
||||
Best practices suggest prefixing category methods with your unique prefix, to prevent collisions.
|
||||
*/
|
||||
|
||||
@implementation CPTextField (CreateLabel)
|
||||
|
||||
+ (CPTextField)flickr_labelWithText:(CPString)aString
|
||||
{
|
||||
var label = [[CPTextField alloc] initWithFrame:CGRectMakeZero()];
|
||||
|
||||
[label setStringValue:aString];
|
||||
[label sizeToFit];
|
||||
[label setTextShadowColor:[CPColor whiteColor]];
|
||||
[label setTextShadowOffset:CGSizeMake(0, 1)];
|
||||
|
||||
return label;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
// This class wraps our slider + labels combo
|
||||
|
||||
@implementation PhotoResizeView : CPView
|
||||
{
|
||||
}
|
||||
|
||||
- (id)initWithFrame:(CGRect)aFrame
|
||||
{
|
||||
self = [super initWithFrame:aFrame];
|
||||
|
||||
var slider = [[CPSlider alloc] initWithFrame:CGRectMake(30, CGRectGetHeight(aFrame) / 2.0 - 8, CGRectGetWidth(aFrame) - 65, 24)];
|
||||
|
||||
[slider setMinValue:50.0];
|
||||
[slider setMaxValue:250.0];
|
||||
[slider setIntValue:150.0];
|
||||
[slider setAction:@selector(adjustImageSize:)];
|
||||
|
||||
[self addSubview:slider];
|
||||
|
||||
var label = [CPTextField flickr_labelWithText:"50"];
|
||||
[label setFrameOrigin:CGPointMake(0, CGRectGetHeight(aFrame) / 2.0 - 4.0)];
|
||||
[self addSubview:label];
|
||||
|
||||
label = [CPTextField flickr_labelWithText:"250"];
|
||||
[label setFrameOrigin:CGPointMake(CGRectGetWidth(aFrame) - CGRectGetWidth([label frame]), CGRectGetHeight(aFrame) / 2.0 - 4.0)];
|
||||
[self addSubview:label];
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
// This class displays a single photo collection inside our list of photo collecitions
|
||||
|
||||
@implementation PhotosListCell : CPView
|
||||
{
|
||||
CPTextField label;
|
||||
CPView highlightView;
|
||||
}
|
||||
|
||||
- (void)setRepresentedObject:(JSObject)anObject
|
||||
{
|
||||
if (!label)
|
||||
{
|
||||
label = [[CPTextField alloc] initWithFrame:CGRectInset([self bounds], 4, 4)];
|
||||
|
||||
[label setFont:[CPFont systemFontOfSize:16.0]];
|
||||
[label setTextShadowColor:[CPColor whiteColor]];
|
||||
[label setTextShadowOffset:CGSizeMake(0, 1)];
|
||||
|
||||
[self addSubview:label];
|
||||
}
|
||||
|
||||
[label setStringValue:anObject];
|
||||
[label sizeToFit];
|
||||
|
||||
[label setFrameOrigin:CGPointMake(10,CGRectGetHeight([label bounds]) / 2.0)];
|
||||
}
|
||||
|
||||
- (void)setSelected:(BOOL)flag
|
||||
{
|
||||
if (!highlightView)
|
||||
{
|
||||
highlightView = [[CPView alloc] initWithFrame:CGRectCreateCopy([self bounds])];
|
||||
[highlightView setBackgroundColor:[CPColor blueColor]];
|
||||
}
|
||||
|
||||
if (flag)
|
||||
{
|
||||
[self addSubview:highlightView positioned:CPWindowBelow relativeTo:label];
|
||||
[label setTextColor:[CPColor whiteColor]];
|
||||
[label setTextShadowColor:[CPColor blackColor]];
|
||||
}
|
||||
else
|
||||
{
|
||||
[highlightView removeFromSuperview];
|
||||
[label setTextColor:[CPColor blackColor]];
|
||||
[label setTextShadowColor:[CPColor whiteColor]];
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
// This class displays a single photo from our collection
|
||||
|
||||
@implementation PhotoCell : CPView
|
||||
{
|
||||
CPImage image;
|
||||
CPImageView imageView;
|
||||
CPView highlightView;
|
||||
}
|
||||
|
||||
- (void)setRepresentedObject:(JSObject)anObject
|
||||
{
|
||||
if (!imageView)
|
||||
{
|
||||
imageView = [[CPImageView alloc] initWithFrame:CGRectMakeCopy([self bounds])];
|
||||
[imageView setAutoresizingMask:CPViewWidthSizable | CPViewHeightSizable];
|
||||
[imageView setImageScaling:CPScaleProportionally];
|
||||
[imageView setHasShadow:YES];
|
||||
[self addSubview:imageView];
|
||||
}
|
||||
|
||||
[image setDelegate:nil];
|
||||
|
||||
image = [[CPImage alloc] initWithContentsOfFile:thumbForFlickrPhoto(anObject)];
|
||||
|
||||
[image setDelegate:self];
|
||||
|
||||
if ([image loadStatus] == CPImageLoadStatusCompleted)
|
||||
[imageView setImage:image];
|
||||
else
|
||||
[imageView setImage:nil];
|
||||
}
|
||||
|
||||
- (void)imageDidLoad:(CPImage)anImage
|
||||
{
|
||||
[imageView setImage:anImage];
|
||||
}
|
||||
|
||||
- (void)setSelected:(BOOL)flag
|
||||
{
|
||||
if (!highlightView)
|
||||
{
|
||||
highlightView = [[CPView alloc] initWithFrame:[self bounds]];
|
||||
[highlightView setBackgroundColor:[CPColor colorWithCalibratedWhite:0.8 alpha:0.6]];
|
||||
[highlightView setAutoresizingMask:CPViewWidthSizable | CPViewHeightSizable];
|
||||
}
|
||||
|
||||
if (flag)
|
||||
{
|
||||
[highlightView setFrame:[self bounds]];
|
||||
[self addSubview:highlightView positioned:CPWindowBelow relativeTo:imageView];
|
||||
}
|
||||
else
|
||||
[highlightView removeFromSuperview];
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
// helper javascript functions for turning a Flickr photo object into a URL for getting the image
|
||||
|
||||
function urlForFlickrPhoto(photo)
|
||||
{
|
||||
return "http://farm" + photo.farm + ".static.flickr.com/" + photo.server + "/" + photo.id+"_" + photo.secret + ".jpg";
|
||||
}
|
||||
|
||||
function thumbForFlickrPhoto(photo)
|
||||
{
|
||||
return "http://farm" + photo.farm + ".static.flickr.com/" + photo.server + "/" + photo.id + "_" + photo.secret + "_m.jpg";
|
||||
}
|
||||
88
samples/Objective-J/LightsOff.j
Normal file
88
samples/Objective-J/LightsOff.j
Normal file
@@ -0,0 +1,88 @@
|
||||
@import <Foundation/CPObject.j>
|
||||
@import <AppKit/CPView.j>
|
||||
@import <AppKit/CPButton.j>
|
||||
@import <AppKit/CPWebView.j>
|
||||
@import "LOBoard.j"
|
||||
|
||||
@implementation LOInfoView : CPView
|
||||
{
|
||||
}
|
||||
|
||||
- (void)drawRect:(CGRect)r
|
||||
{
|
||||
[[CPColor whiteColor] setFill]
|
||||
var path = [CPBezierPath bezierPath];
|
||||
[path appendBezierPathWithRoundedRect:CGRectMake(5, 0, CGRectGetWidth([self bounds]) - 10.0, CGRectGetHeight([self bounds])) xRadius:10 yRadius:10];
|
||||
[path fill];
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation AppController : CPObject
|
||||
{
|
||||
}
|
||||
|
||||
- (CPPanel)initInfoWindow
|
||||
{
|
||||
var infoWindow = [[CPPanel alloc] initWithContentRect:CGRectMake(400, 50, 320, 480) styleMask:CPHUDBackgroundWindowMask | CPResizableWindowMask];
|
||||
[infoWindow setFloatingPanel:YES];
|
||||
|
||||
var _infoContent = [infoWindow contentView],
|
||||
_iconImage = [[CPImage alloc] initWithContentsOfFile:"Resources/icon.png" size:CPSizeMake(59, 60)],
|
||||
_iconView = [[CPImageView alloc] initWithFrame:CGRectMake(125, 0, 59, 60)];
|
||||
|
||||
[_iconView setImage:_iconImage];
|
||||
[_infoContent addSubview:_iconView];
|
||||
|
||||
var _infoView = [[LOInfoView alloc] initWithFrame:CGRectMake(0, 65, 320, 395)],
|
||||
_webView = [[CPWebView alloc] initWithFrame:CGRectMake(20, 0, 270, 370)];
|
||||
|
||||
[_webView loadHTMLString:@"<center><h3>Lights Off</h3></center> <p>Lights Off is a fantastic game exclusively for iPhone and iPod touch and inspired by Tiger Electronic's 'Lights Out'.</p> <p>The goal of the game is simply to switch all of the lights off, but it's harder than it looks! Give the first few levels a try in the playable demo to the left.</p><center><img src='Resources/avail_on_app_store.png'></center>"];
|
||||
|
||||
[_infoView addSubview:_webView];
|
||||
|
||||
[_infoContent addSubview:_infoView];
|
||||
|
||||
return infoWindow;
|
||||
}
|
||||
|
||||
- (void)applicationDidFinishLaunching:(CPNotification)aNotification
|
||||
{
|
||||
/* Enable Logging (DEBUG) */
|
||||
// CPLogRegister(CPLogPopup);
|
||||
|
||||
var rootWindow = [[CPWindow alloc] initWithContentRect:CGRectMakeZero() styleMask:CPBorderlessBridgeWindowMask];
|
||||
[rootWindow setBackgroundColor:[CPColor grayColor]];
|
||||
[rootWindow orderFront:self];
|
||||
|
||||
var infoWindow = [self initInfoWindow],
|
||||
gameWindow = [[CPPanel alloc] initWithContentRect:CGRectMake(50, 50, 324, 482) styleMask:CPHUDBackgroundWindowMask];
|
||||
[gameWindow setFloatingPanel:YES];
|
||||
[gameWindow setTitle:@"Lights Off"];
|
||||
|
||||
contentView = [gameWindow contentView];
|
||||
|
||||
var _board = [[LOBoard alloc] initWithFrame:CGRectMake(2, 0, 320, 480)],
|
||||
_bgImage = [[CPImage alloc] initWithContentsOfFile:"Resources/lo-background.png" size:CPSizeMake(320, 480)];
|
||||
[_board setImage:_bgImage];
|
||||
[_board resetBoard];
|
||||
|
||||
var _buttonImage = [[CPImage alloc] initWithContentsOfFile:"Resources/button-reset.png" size:CPSizeMake(90, 28)],
|
||||
_buttonPressImage = [[CPImage alloc] initWithContentsOfFile:"Resources/button-reset-press.png" size:CPSizeMake(90, 28)],
|
||||
_resetButton = [[CPButton alloc] initWithFrame:CGRectMake(195, 422, 90, 28)];
|
||||
|
||||
[_resetButton setImage:_buttonImage];
|
||||
[_resetButton setAlternateImage:_buttonPressImage];
|
||||
[_resetButton setBordered:NO];
|
||||
|
||||
[contentView addSubview:_board];
|
||||
[contentView addSubview:_resetButton];
|
||||
|
||||
[_resetButton setTarget:_board];
|
||||
[_resetButton setAction:@selector(resetBoard)];
|
||||
|
||||
[gameWindow orderFront:self];
|
||||
[infoWindow orderFront:self];
|
||||
}
|
||||
|
||||
@end
|
||||
47
samples/Objective-J/iTunesLayout.j
Normal file
47
samples/Objective-J/iTunesLayout.j
Normal file
@@ -0,0 +1,47 @@
|
||||
|
||||
@import <Foundation/CPObject.j>
|
||||
|
||||
|
||||
@implementation AppController : CPObject
|
||||
{
|
||||
}
|
||||
|
||||
- (void)applicationDidFinishLaunching:(CPNotification)aNotification
|
||||
{
|
||||
// The end result of this layout will be the kind of master/detail/auxilliary view
|
||||
// found in iTunes, Mail, and many other apps.
|
||||
|
||||
var theWindow = [[CPWindow alloc] initWithContentRect:CGRectMakeZero() styleMask:CPBorderlessBridgeWindowMask],
|
||||
contentView = [theWindow contentView];
|
||||
|
||||
var navigationArea = [[CPView alloc] initWithFrame:CGRectMake(0.0, 0.0, 150.0, CGRectGetHeight([contentView bounds]) - 150.0)];
|
||||
|
||||
[navigationArea setBackgroundColor:[CPColor redColor]];
|
||||
|
||||
// This view will grow in height, but stay fixed width attached to the left side of the screen.
|
||||
[navigationArea setAutoresizingMask:CPViewHeightSizable | CPViewMaxXMargin];
|
||||
|
||||
[contentView addSubview:navigationArea];
|
||||
|
||||
var metaDataArea = [[CPView alloc] initWithFrame:CGRectMake(0.0, CGRectGetMaxY([navigationArea frame]), 150.0, 150.0)];
|
||||
|
||||
[metaDataArea setBackgroundColor:[CPColor greenColor]];
|
||||
|
||||
// This view will stay the same size in both directions, and fixed to the lower left corner.
|
||||
[metaDataArea setAutoresizingMask:CPViewMinYMargin | CPViewMaxXMargin];
|
||||
|
||||
[contentView addSubview:metaDataArea];
|
||||
|
||||
var contentArea = [[CPView alloc] initWithFrame:CGRectMake(150.0, 0.0, CGRectGetWidth([contentView bounds]) - 150.0, CGRectGetHeight([contentView bounds]))];
|
||||
|
||||
[contentArea setBackgroundColor:[CPColor blueColor]];
|
||||
|
||||
// This view will grow in both height an width.
|
||||
[contentArea setAutoresizingMask:CPViewWidthSizable | CPViewHeightSizable];
|
||||
|
||||
[contentView addSubview:contentArea];
|
||||
|
||||
[theWindow orderFront:self];
|
||||
}
|
||||
|
||||
@end
|
||||
121
samples/Pascal/cwindirs.pp
Normal file
121
samples/Pascal/cwindirs.pp
Normal file
@@ -0,0 +1,121 @@
|
||||
|
||||
unit cwindirs;
|
||||
|
||||
|
||||
|
||||
|
||||
interface
|
||||
|
||||
uses
|
||||
windows,
|
||||
strings;
|
||||
|
||||
Const
|
||||
CSIDL_PROGRAMS = $0002;
|
||||
CSIDL_PERSONAL = $0005;
|
||||
CSIDL_FAVORITES = $0006;
|
||||
CSIDL_STARTUP = $0007;
|
||||
CSIDL_RECENT = $0008;
|
||||
CSIDL_SENDTO = $0009;
|
||||
CSIDL_STARTMENU = $000B;
|
||||
CSIDL_MYMUSIC = $000D;
|
||||
CSIDL_MYVIDEO = $000E;
|
||||
CSIDL_DESKTOPDIRECTORY = $0010;
|
||||
CSIDL_NETHOOD = $0013;
|
||||
CSIDL_TEMPLATES = $0015;
|
||||
CSIDL_COMMON_STARTMENU = $0016;
|
||||
CSIDL_COMMON_PROGRAMS = $0017;
|
||||
CSIDL_COMMON_STARTUP = $0018;
|
||||
CSIDL_COMMON_DESKTOPDIRECTORY = $0019;
|
||||
CSIDL_APPDATA = $001A;
|
||||
CSIDL_PRINTHOOD = $001B;
|
||||
CSIDL_LOCAL_APPDATA = $001C;
|
||||
CSIDL_COMMON_FAVORITES = $001F;
|
||||
CSIDL_INTERNET_CACHE = $0020;
|
||||
CSIDL_COOKIES = $0021;
|
||||
CSIDL_HISTORY = $0022;
|
||||
CSIDL_COMMON_APPDATA = $0023;
|
||||
CSIDL_WINDOWS = $0024;
|
||||
CSIDL_SYSTEM = $0025;
|
||||
CSIDL_PROGRAM_FILES = $0026;
|
||||
CSIDL_MYPICTURES = $0027;
|
||||
CSIDL_PROFILE = $0028;
|
||||
CSIDL_PROGRAM_FILES_COMMON = $002B;
|
||||
CSIDL_COMMON_TEMPLATES = $002D;
|
||||
CSIDL_COMMON_DOCUMENTS = $002E;
|
||||
CSIDL_COMMON_ADMINTOOLS = $002F;
|
||||
CSIDL_ADMINTOOLS = $0030;
|
||||
CSIDL_COMMON_MUSIC = $0035;
|
||||
CSIDL_COMMON_PICTURES = $0036;
|
||||
CSIDL_COMMON_VIDEO = $0037;
|
||||
CSIDL_CDBURN_AREA = $003B;
|
||||
CSIDL_PROFILES = $003E;
|
||||
|
||||
CSIDL_FLAG_CREATE = $8000;
|
||||
|
||||
Function GetWindowsSpecialDir(ID : Integer) : String;
|
||||
|
||||
implementation
|
||||
|
||||
uses
|
||||
sysutils;
|
||||
|
||||
Type
|
||||
PFNSHGetFolderPath = Function(Ahwnd: HWND; Csidl: Integer; Token: THandle; Flags: DWord; Path: PChar): HRESULT; stdcall;
|
||||
|
||||
|
||||
var
|
||||
SHGetFolderPath : PFNSHGetFolderPath = Nil;
|
||||
CFGDLLHandle : THandle = 0;
|
||||
|
||||
Procedure InitDLL;
|
||||
|
||||
Var
|
||||
pathBuf: array[0..MAX_PATH-1] of char;
|
||||
pathLength: Integer;
|
||||
begin
|
||||
{ Load shfolder.dll using a full path, in order to prevent spoofing (Mantis #18185)
|
||||
Don't bother loading shell32.dll because shfolder.dll itself redirects SHGetFolderPath
|
||||
to shell32.dll whenever possible. }
|
||||
pathLength:=GetSystemDirectory(pathBuf, MAX_PATH);
|
||||
if (pathLength>0) and (pathLength<MAX_PATH-14) then
|
||||
begin
|
||||
StrLCopy(@pathBuf[pathLength],'\shfolder.dll',MAX_PATH-pathLength-1);
|
||||
CFGDLLHandle:=LoadLibrary(pathBuf);
|
||||
|
||||
if (CFGDLLHandle<>0) then
|
||||
begin
|
||||
Pointer(ShGetFolderPath):=GetProcAddress(CFGDLLHandle,'SHGetFolderPathA');
|
||||
If @ShGetFolderPath=nil then
|
||||
begin
|
||||
FreeLibrary(CFGDLLHandle);
|
||||
CFGDllHandle:=0;
|
||||
end;
|
||||
end;
|
||||
end;
|
||||
If (@ShGetFolderPath=Nil) then
|
||||
Raise Exception.Create('Could not determine SHGetFolderPath Function');
|
||||
end;
|
||||
|
||||
Function GetWindowsSpecialDir(ID : Integer) : String;
|
||||
|
||||
Var
|
||||
APath : Array[0..MAX_PATH] of char;
|
||||
|
||||
begin
|
||||
Result:='';
|
||||
if (CFGDLLHandle=0) then
|
||||
InitDLL;
|
||||
If (SHGetFolderPath<>Nil) then
|
||||
begin
|
||||
if SHGetFolderPath(0,ID or CSIDL_FLAG_CREATE,0,0,@APATH[0])=S_OK then
|
||||
Result:=IncludeTrailingPathDelimiter(StrPas(@APath[0]));
|
||||
end;
|
||||
end;
|
||||
|
||||
Initialization
|
||||
Finalization
|
||||
if CFGDLLHandle<>0 then
|
||||
FreeLibrary(CFGDllHandle);
|
||||
end.
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
{ $Id$ }
|
||||
{
|
||||
---------------------------------------------------------------------------
|
||||
gtkextra.pp - GTK(2) widgetset - additional gdk/gtk functions
|
||||
---------------------------------------------------------------------------
|
||||
|
||||
This unit contains missing gdk/gtk functions and defines for certain
|
||||
versions of gtk or fpc.
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
|
||||
@created(Sun Jan 28th WET 2006)
|
||||
@lastmod($Date$)
|
||||
@author(Marc Weustink <marc@@dommelstein.nl>)
|
||||
|
||||
*****************************************************************************
|
||||
This file is part of the Lazarus Component Library (LCL)
|
||||
|
||||
See the file COPYING.modifiedLGPL.txt, included in this distribution,
|
||||
for details about the license.
|
||||
*****************************************************************************
|
||||
}
|
||||
|
||||
unit GtkExtra;
|
||||
|
||||
{$mode objfpc}{$H+}
|
||||
|
||||
interface
|
||||
|
||||
{$I gtkdefines.inc}
|
||||
|
||||
{$ifdef gtk1}
|
||||
{$I gtk1extrah.inc}
|
||||
{$endif}
|
||||
|
||||
{$ifdef gtk2}
|
||||
{$I gtk2extrah.inc}
|
||||
{$endif}
|
||||
|
||||
|
||||
implementation
|
||||
|
||||
{$ifdef gtk1}
|
||||
{$I gtk1extra.inc}
|
||||
{$endif}
|
||||
|
||||
{$ifdef gtk2}
|
||||
{$I gtk2extra.inc}
|
||||
{$endif}
|
||||
|
||||
end.
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user